From fa8e6ff90074708ec19fcdcdfd79c1dce03cb128 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 14:01:51 +0000 Subject: [PATCH 001/112] Add stub application services REST API. --- synapse/api/urls.py | 1 + synapse/app/homeserver.py | 7 +++- synapse/rest/appservice/__init__.py | 14 ++++++++ synapse/rest/appservice/v1/__init__.py | 29 ++++++++++++++++ synapse/rest/appservice/v1/base.py | 47 ++++++++++++++++++++++++++ synapse/rest/appservice/v1/register.py | 37 ++++++++++++++++++++ synapse/server.py | 1 + 7 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 synapse/rest/appservice/__init__.py create mode 100644 synapse/rest/appservice/v1/__init__.py create mode 100644 synapse/rest/appservice/v1/base.py create mode 100644 synapse/rest/appservice/v1/register.py diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 693c0efda6..9485719332 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -22,3 +22,4 @@ WEB_CLIENT_PREFIX = "/_matrix/client" CONTENT_REPO_PREFIX = "/_matrix/content" SERVER_KEY_PREFIX = "/_matrix/key/v1" MEDIA_PREFIX = "/_matrix/media/v1" +APP_SERVICE_PREFIX = "/_matrix/appservice/v1" diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 40d28dcbdc..f7e24d0cc6 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -26,13 +26,14 @@ from twisted.web.resource import Resource from twisted.web.static import File from twisted.web.server import Site from synapse.http.server import JsonResource, RootRedirect +from synapse.rest.appservice.v1 import AppServiceRestResource from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.rest.media.v1.media_repository import MediaRepositoryResource from synapse.http.server_key_resource import LocalKey from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.api.urls import ( CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX, - SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, + SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, APP_SERVICE_PREFIX ) from synapse.config.homeserver import HomeServerConfig from synapse.crypto import context_factory @@ -69,6 +70,9 @@ class SynapseHomeServer(HomeServer): def build_resource_for_federation(self): return JsonResource() + def build_resource_for_app_services(self): + return AppServiceRestResource(self) + def build_resource_for_web_client(self): syweb_path = os.path.dirname(syweb.__file__) webclient_path = os.path.join(syweb_path, "webclient") @@ -114,6 +118,7 @@ class SynapseHomeServer(HomeServer): (CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()), (SERVER_KEY_PREFIX, self.get_resource_for_server_key()), (MEDIA_PREFIX, self.get_resource_for_media_repository()), + (APP_SERVICE_PREFIX, self.get_resource_for_app_services()), ] if web_client: logger.info("Adding the web client.") diff --git a/synapse/rest/appservice/__init__.py b/synapse/rest/appservice/__init__.py new file mode 100644 index 0000000000..1a84d94cd9 --- /dev/null +++ b/synapse/rest/appservice/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/synapse/rest/appservice/v1/__init__.py b/synapse/rest/appservice/v1/__init__.py new file mode 100644 index 0000000000..bf243b6180 --- /dev/null +++ b/synapse/rest/appservice/v1/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from . import register + +from synapse.http.server import JsonResource + + +class AppServiceRestResource(JsonResource): + """A resource for version 1 of the matrix application service API.""" + + def __init__(self, hs): + JsonResource.__init__(self) + self.register_servlets(self, hs) + + @staticmethod + def register_servlets(appservice_resource, hs): + register.register_servlets(hs, appservice_resource) \ No newline at end of file diff --git a/synapse/rest/appservice/v1/base.py b/synapse/rest/appservice/v1/base.py new file mode 100644 index 0000000000..46c9a444c0 --- /dev/null +++ b/synapse/rest/appservice/v1/base.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains base REST classes for constructing client v1 servlets. +""" + +from synapse.http.servlet import RestServlet +from synapse.api.urls import APP_SERVICE_PREFIX +import re + +import logging + + +logger = logging.getLogger(__name__) + + +def as_path_pattern(path_regex): + """Creates a regex compiled appservice path with the correct path + prefix. + + Args: + path_regex (str): The regex string to match. This should NOT have a ^ + as this will be prefixed. + Returns: + SRE_Pattern + """ + return re.compile("^" + APP_SERVICE_PREFIX + path_regex) + + +class AppServiceRestServlet(RestServlet): + """A base Synapse REST Servlet for the application services version 1 API. + """ + + def __init__(self, hs): + self.hs = hs \ No newline at end of file diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py new file mode 100644 index 0000000000..17b6dc5664 --- /dev/null +++ b/synapse/rest/appservice/v1/register.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains REST servlets to do with registration: /register""" + +from base import AppServiceRestServlet, as_path_pattern +from synapse.api.errors import CodeMessageException + +import logging + +logger = logging.getLogger(__name__) + + +class RegisterRestServlet(AppServiceRestServlet): + """Handles AS registration with the home server. + """ + + PATTERN = as_path_pattern("/register$") + + def on_POST(self, request): + raise CodeMessageException(500, "Not implemented") + + +def register_servlets(hs, http_server): + RegisterRestServlet(hs).register(http_server) \ No newline at end of file diff --git a/synapse/server.py b/synapse/server.py index f09d5d581e..891c5aa13d 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -75,6 +75,7 @@ class BaseHomeServer(object): 'resource_for_content_repo', 'resource_for_server_key', 'resource_for_media_repository', + 'resource_for_app_services', 'event_sources', 'ratelimiter', 'keyring', From 6efdc11cc811fedd5c989dc06df64fd334b4b033 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 15:03:19 +0000 Subject: [PATCH 002/112] Parse /register and /unregister request JSON. --- synapse/rest/appservice/v1/register.py | 80 +++++++++++++++++++++++++- 1 file changed, 78 insertions(+), 2 deletions(-) diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index 17b6dc5664..779447ac6a 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -16,8 +16,9 @@ """This module contains REST servlets to do with registration: /register""" from base import AppServiceRestServlet, as_path_pattern -from synapse.api.errors import CodeMessageException +from synapse.api.errors import CodeMessageException, SynapseError +import json import logging logger = logging.getLogger(__name__) @@ -30,8 +31,83 @@ class RegisterRestServlet(AppServiceRestServlet): PATTERN = as_path_pattern("/register$") def on_POST(self, request): + params = _parse_json(request) + + # sanity check required params + try: + as_token = params["as_token"] + as_url = params["url"] + if (not isinstance(as_token, basestring) or + not isinstance(as_url, basestring)): + raise ValueError + except (KeyError, ValueError): + raise SynapseError( + 400, "Missed required keys: as_token(str) / url(str)." + ) + + namespaces = { + "users": [], + "rooms": [], + "aliases": [] + } + + if "namespaces" in params: + self._parse_namespace(namespaces, params["namespaces"], "users") + self._parse_namespace(namespaces, params["namespaces"], "rooms") + self._parse_namespace(namespaces, params["namespaces"], "aliases") + + # TODO: pass to the appservice handler + + raise CodeMessageException(500, "Not implemented.") + + def _parse_namespace(self, target_ns, origin_ns, ns): + if ns not in target_ns or ns not in origin_ns: + return # nothing to parse / map through to. + + possible_regex_list = origin_ns[ns] + if not type(possible_regex_list) == list: + raise SynapseError(400, "Namespace %s isn't an array." % ns) + + for regex in possible_regex_list: + if not isinstance(regex, basestring): + raise SynapseError( + 400, "Regex '%s' isn't a string in namespace %s" % + (regex, ns) + ) + + target_ns[ns] = origin_ns[ns] + + +class UnregisterRestServlet(AppServiceRestServlet): + """Handles AS registration with the home server. + """ + + PATTERN = as_path_pattern("/unregister$") + + def on_POST(self, request): + params = _parse_json(request) + try: + as_token = params["as_token"] + if not isinstance(as_token, basestring): + raise ValueError + except (KeyError, ValueError): + raise SynapseError(400, "Missing required key: as_token(str)") + + # TODO: pass to the appservice handler + raise CodeMessageException(500, "Not implemented") +def _parse_json(request): + try: + content = json.loads(request.content.read()) + if type(content) != dict: + raise SynapseError(400, "Content must be a JSON object.") + return content + except ValueError: + raise SynapseError(400, "Content not JSON.") + + def register_servlets(hs, http_server): - RegisterRestServlet(hs).register(http_server) \ No newline at end of file + RegisterRestServlet(hs).register(http_server) + UnregisterRestServlet(hs).register(http_server) From 51449e06654c4af7a645124dc64e1f0cc1678b24 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 15:50:28 +0000 Subject: [PATCH 003/112] Add appservice handler and store. Glue together rest > handler > store. --- synapse/handlers/__init__.py | 2 ++ synapse/handlers/appservice.py | 49 ++++++++++++++++++++++++++ synapse/rest/appservice/v1/base.py | 3 +- synapse/rest/appservice/v1/register.py | 8 +++-- synapse/storage/__init__.py | 5 +-- synapse/storage/appservice.py | 45 +++++++++++++++++++++++ 6 files changed, 107 insertions(+), 5 deletions(-) create mode 100644 synapse/handlers/appservice.py create mode 100644 synapse/storage/appservice.py diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py index fe071a4bc2..96a9b143ca 100644 --- a/synapse/handlers/__init__.py +++ b/synapse/handlers/__init__.py @@ -26,6 +26,7 @@ from .presence import PresenceHandler from .directory import DirectoryHandler from .typing import TypingNotificationHandler from .admin import AdminHandler +from .appservice import ApplicationServicesHandler class Handlers(object): @@ -51,3 +52,4 @@ class Handlers(object): self.directory_handler = DirectoryHandler(hs) self.typing_notification_handler = TypingNotificationHandler(hs) self.admin_handler = AdminHandler(hs) + self.appservice_handler = ApplicationServicesHandler(hs) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py new file mode 100644 index 0000000000..55a653476f --- /dev/null +++ b/synapse/handlers/appservice.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from ._base import BaseHandler +from synapse.api.errors import StoreError, SynapseError + +import logging + + +logger = logging.getLogger(__name__) + + +class ApplicationServicesHandler(BaseHandler): + + def __init__(self, hs): + super(ApplicationServicesHandler, self).__init__(hs) + + @defer.inlineCallbacks + def register(self, base_url, token, namespaces): + # check the token is recognised + try: + app_service = yield self.store.get_app_service(token) + if not app_service: + raise StoreError + except StoreError: + raise SynapseError( + 403, "Unrecognised application services token. " + "Consult the home server admin." + ) + + # update AS entry with base URL + + # store namespaces for this AS + + defer.returnValue("not_implemented_yet") diff --git a/synapse/rest/appservice/v1/base.py b/synapse/rest/appservice/v1/base.py index 46c9a444c0..65d5bcf9be 100644 --- a/synapse/rest/appservice/v1/base.py +++ b/synapse/rest/appservice/v1/base.py @@ -44,4 +44,5 @@ class AppServiceRestServlet(RestServlet): """ def __init__(self, hs): - self.hs = hs \ No newline at end of file + self.hs = hs + self.handler = hs.get_handlers().appservice_handler diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index 779447ac6a..142f09a638 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -14,6 +14,7 @@ # limitations under the License. """This module contains REST servlets to do with registration: /register""" +from twisted.internet import defer from base import AppServiceRestServlet, as_path_pattern from synapse.api.errors import CodeMessageException, SynapseError @@ -30,6 +31,7 @@ class RegisterRestServlet(AppServiceRestServlet): PATTERN = as_path_pattern("/register$") + @defer.inlineCallbacks def on_POST(self, request): params = _parse_json(request) @@ -56,9 +58,11 @@ class RegisterRestServlet(AppServiceRestServlet): self._parse_namespace(namespaces, params["namespaces"], "rooms") self._parse_namespace(namespaces, params["namespaces"], "aliases") - # TODO: pass to the appservice handler + hs_token = yield self.handler.register(as_url, as_token, namespaces) - raise CodeMessageException(500, "Not implemented.") + defer.returnValue({ + "hs_token": hs_token + }) def _parse_namespace(self, target_ns, origin_ns, ns): if ns not in target_ns or ns not in origin_ns: diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 4beb951b9f..9431c1a32d 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -18,6 +18,7 @@ from twisted.internet import defer from synapse.util.logutils import log_function from synapse.api.constants import EventTypes +from .appservice import ApplicationServiceStore from .directory import DirectoryStore from .feedback import FeedbackStore from .presence import PresenceStore @@ -80,8 +81,8 @@ class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, PresenceStore, TransactionStore, DirectoryStore, KeyStore, StateStore, SignatureStore, - EventFederationStore, - MediaRepositoryStore, + EventFederationStore, MediaRepositoryStore, + ApplicationServiceStore ): def __init__(self, hs): diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py new file mode 100644 index 0000000000..99f58b4c62 --- /dev/null +++ b/synapse/storage/appservice.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +from synapse.api.errors import StoreError + +from ._base import SQLBaseStore + + +class ApplicationServiceStore(SQLBaseStore): + + def __init__(self, hs): + super(ApplicationServiceStore, self).__init__(hs) + + self.clock = hs.get_clock() + + @defer.inlineCallbacks + def get_app_service(self, as_token): + """Get the application service with the given token. + + Args: + token (str): The application service token. + Raises: + StoreError if there was a problem retrieving this. + """ + row = self._simple_select_one( + "application_services", {"token": as_token}, + ["url", "token"] + ) + if not row: + raise StoreError(400, "Bad application services token supplied.") + defer.returnValue(row) From 7331d34839dca468b9e396e1d2952b0bb32011bf Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 16:23:46 +0000 Subject: [PATCH 004/112] Add AS specific classes with docstrings. --- synapse/storage/appservice.py | 60 ++++++++++++++++++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 99f58b4c62..4c11191fe8 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -20,11 +20,69 @@ from synapse.api.errors import StoreError from ._base import SQLBaseStore +# XXX: This feels like it should belong in a "models" module, not storage. +class ApplicationService(object): + """Defines an application service. + + Provides methods to check if this service is "interested" in events. + """ + + def __init__(self, token, url=None, namespaces=None): + self.token = token + if url: + self.url = url + if namespaces: + self.namespaces = namespaces + + def is_interested(self, event): + """Check if this service is interested in this event. + + Args: + event(Event): The event to check. + Returns: + bool: True if this service would like to know about this event. + """ + # NB: This does not check room alias regex matches because that requires + # more context that an Event can provide. Room alias matches are checked + # in the ApplicationServiceHandler. + + # TODO check if event.room_id regex matches + # TODO check if event.user_id regex matches (or m.room.member state_key) + + return True + + +class ApplicationServiceCache(object): + """Caches ApplicationServices and provides utility functions on top. + + This class is designed to be invoked on incoming events in order to avoid + hammering the database every time to extract a list of application service + regexes. + """ + + def __init__(self): + self.services = [] + + def get_services_for_event(self, event): + """Retrieve a list of application services interested in this event. + + Args: + event(Event): The event to check. + Returns: + list: A list of services interested in this + event based on the service regex. + """ + interested_list = [ + s for s in self.services if s.is_event_claimed(event) + ] + return interested_list + + class ApplicationServiceStore(SQLBaseStore): def __init__(self, hs): super(ApplicationServiceStore, self).__init__(hs) - + self.cache = ApplicationServiceCache() self.clock = hs.get_clock() @defer.inlineCallbacks From 92171f9dd1ecac24aeae2f46729f3cbbbe94f91e Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 16:53:59 +0000 Subject: [PATCH 005/112] Add stub methods, TODOs and docstrings for application services. --- synapse/handlers/appservice.py | 25 ++++++++++++++-- synapse/storage/appservice.py | 52 ++++++++++++++++++++++++++++++++-- 2 files changed, 72 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 55a653476f..25e1cece56 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -42,8 +42,27 @@ class ApplicationServicesHandler(BaseHandler): "Consult the home server admin." ) - # update AS entry with base URL - - # store namespaces for this AS + # store this AS defer.returnValue("not_implemented_yet") + + def unregister(self, token): + yield self.store.unregister_app_service(token) + + def notify_interested_services(self, event): + """Notifies (pushes) all application services interested in this event. + + Pushing is done asynchronously, so this method won't block for any + prolonged length of time. + + Args: + event(Event): The event to push out to interested services. + """ + # TODO: Gather interested services + # get_services_for_event(event) <-- room IDs and user IDs + # Get a list of room aliases. Check regex. + # TODO: If unknown user: poke User Query API. + # TODO: If unknown room alias: poke Room Alias Query API. + + # TODO: Fork off pushes to these services - XXX First cut, best effort + pass diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 4c11191fe8..fbad17cb9e 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -84,16 +84,60 @@ class ApplicationServiceStore(SQLBaseStore): super(ApplicationServiceStore, self).__init__(hs) self.cache = ApplicationServiceCache() self.clock = hs.get_clock() + self._populate_cache() + + def unregister_app_service(self, token): + """Unregisters this service. + + This removes all AS specific regex and the base URL. The token is the + only thing preserved for future registration attempts. + """ + # TODO: DELETE FROM application_services_regex WHERE id=this service + # TODO: SET url=NULL WHERE token=token + # TODO: Update cache + pass + + def update_app_service(self, service): + """Update an application service, clobbering what was previously there. + + Args: + service(ApplicationService): The updated service. + """ + # NB: There is no "insert" since we provide no public-facing API to + # allocate new ASes. It relies on the server admin inserting the AS + # token into the database manually. + + # TODO: UPDATE application_services, SET url WHERE token=service.token + # TODO: DELETE FROM application_services_regex WHERE id=this service + # TODO: INSERT INTO application_services_regex + # TODO: Update cache + pass + + def get_services_for_event(self, event): + return self.cache.get_services_for_event(event) @defer.inlineCallbacks - def get_app_service(self, as_token): + def get_app_service(self, as_token, from_cache=True): """Get the application service with the given token. Args: token (str): The application service token. + from_cache (bool): True to get this service from the cache, False to + check the database. Raises: - StoreError if there was a problem retrieving this. + StoreError if there was a problem retrieving this service. """ + + if from_cache: + for service in self.cache.services: + if service.token == as_token: + defer.returnValue(service) + return + defer.returnValue(None) + return + + + # TODO: This should be JOINed with the application_services_regex table. row = self._simple_select_one( "application_services", {"token": as_token}, ["url", "token"] @@ -101,3 +145,7 @@ class ApplicationServiceStore(SQLBaseStore): if not row: raise StoreError(400, "Bad application services token supplied.") defer.returnValue(row) + + def _populate_cache(self): + """Populates the ApplicationServiceCache from the database.""" + pass From ec3719b583c6fbbc56dbd313b858054e535ae733 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 17:15:06 +0000 Subject: [PATCH 006/112] Use ApplicationService when registering. --- synapse/handlers/appservice.py | 13 +++++-------- synapse/rest/appservice/v1/register.py | 8 ++++++-- synapse/storage/appservice.py | 16 ++++++---------- 3 files changed, 17 insertions(+), 20 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 25e1cece56..1890ca06aa 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -30,21 +30,18 @@ class ApplicationServicesHandler(BaseHandler): super(ApplicationServicesHandler, self).__init__(hs) @defer.inlineCallbacks - def register(self, base_url, token, namespaces): + def register(self, app_service): # check the token is recognised try: - app_service = yield self.store.get_app_service(token) - if not app_service: - raise StoreError + stored_service = yield self.store.get_app_service(app_service.token) + if not stored_service: + raise StoreError(404, "Not found") except StoreError: raise SynapseError( 403, "Unrecognised application services token. " "Consult the home server admin." ) - - # store this AS - - defer.returnValue("not_implemented_yet") + # TODO store this AS def unregister(self, token): yield self.store.unregister_app_service(token) diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index 142f09a638..5786cf873e 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -18,6 +18,7 @@ from twisted.internet import defer from base import AppServiceRestServlet, as_path_pattern from synapse.api.errors import CodeMessageException, SynapseError +from synapse.storage.appservice import ApplicationService import json import logging @@ -58,7 +59,10 @@ class RegisterRestServlet(AppServiceRestServlet): self._parse_namespace(namespaces, params["namespaces"], "rooms") self._parse_namespace(namespaces, params["namespaces"], "aliases") - hs_token = yield self.handler.register(as_url, as_token, namespaces) + app_service = ApplicationService(as_token, as_url, namespaces) + + yield self.handler.register(app_service) + hs_token = "_not_implemented_yet" # TODO: Pull this from self.hs? defer.returnValue({ "hs_token": hs_token @@ -97,7 +101,7 @@ class UnregisterRestServlet(AppServiceRestServlet): except (KeyError, ValueError): raise SynapseError(400, "Missing required key: as_token(str)") - # TODO: pass to the appservice handler + yield self.handler.unregister(as_token) raise CodeMessageException(500, "Not implemented") diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index fbad17cb9e..f84f026b7b 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -116,8 +116,7 @@ class ApplicationServiceStore(SQLBaseStore): def get_services_for_event(self, event): return self.cache.get_services_for_event(event) - @defer.inlineCallbacks - def get_app_service(self, as_token, from_cache=True): + def get_app_service(self, token, from_cache=True): """Get the application service with the given token. Args: @@ -130,21 +129,18 @@ class ApplicationServiceStore(SQLBaseStore): if from_cache: for service in self.cache.services: - if service.token == as_token: - defer.returnValue(service) - return - defer.returnValue(None) - return - + if service.token == token: + return service + return None # TODO: This should be JOINed with the application_services_regex table. row = self._simple_select_one( - "application_services", {"token": as_token}, + "application_services", {"token": token}, ["url", "token"] ) if not row: raise StoreError(400, "Bad application services token supplied.") - defer.returnValue(row) + return row def _populate_cache(self): """Populates the ApplicationServiceCache from the database.""" From fbeaeb868960099c3682802275d5a222c0cc2d8b Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 27 Jan 2015 17:34:40 +0000 Subject: [PATCH 007/112] Log when ASes are registered/unregistered. --- synapse/handlers/appservice.py | 2 ++ synapse/storage/appservice.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 1890ca06aa..c9f56c41eb 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -31,6 +31,7 @@ class ApplicationServicesHandler(BaseHandler): @defer.inlineCallbacks def register(self, app_service): + logger.info("Register -> %s", app_service) # check the token is recognised try: stored_service = yield self.store.get_app_service(app_service.token) @@ -44,6 +45,7 @@ class ApplicationServicesHandler(BaseHandler): # TODO store this AS def unregister(self, token): + logger.info("Unregister as_token=%s", token) yield self.store.unregister_app_service(token) def notify_interested_services(self, event): diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index f84f026b7b..cd15843ba3 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -51,6 +51,9 @@ class ApplicationService(object): return True + def __str__(self): + return "ApplicationService: %s" % (self.__dict__,) + class ApplicationServiceCache(object): """Caches ApplicationServices and provides utility functions on top. @@ -83,7 +86,6 @@ class ApplicationServiceStore(SQLBaseStore): def __init__(self, hs): super(ApplicationServiceStore, self).__init__(hs) self.cache = ApplicationServiceCache() - self.clock = hs.get_clock() self._populate_cache() def unregister_app_service(self, token): From b46fa8603e8e0726ea12310a77ade5cea59c3ae2 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 28 Jan 2015 09:17:48 +0000 Subject: [PATCH 008/112] Remove unused import --- synapse/storage/appservice.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index cd15843ba3..533fac4972 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from twisted.internet import defer - from synapse.api.errors import StoreError from ._base import SQLBaseStore From 42876969b99b6bad146b44a734e8d4a1a14d6835 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 28 Jan 2015 11:59:38 +0000 Subject: [PATCH 009/112] Add basic application_services SQL, and hook up parts of the appservice store to read from it. --- synapse/handlers/appservice.py | 7 +- synapse/rest/appservice/v1/register.py | 4 +- synapse/storage/__init__.py | 1 + synapse/storage/appservice.py | 89 ++++++++++++++++--- .../storage/schema/application_services.sql | 32 +++++++ 5 files changed, 117 insertions(+), 16 deletions(-) create mode 100644 synapse/storage/schema/application_services.sql diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index c9f56c41eb..8bd475cbfd 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -16,7 +16,7 @@ from twisted.internet import defer from ._base import BaseHandler -from synapse.api.errors import StoreError, SynapseError +from synapse.api.errors import Codes, StoreError, SynapseError import logging @@ -36,11 +36,12 @@ class ApplicationServicesHandler(BaseHandler): try: stored_service = yield self.store.get_app_service(app_service.token) if not stored_service: - raise StoreError(404, "Not found") + raise StoreError(404, "Application Service Not found") except StoreError: raise SynapseError( 403, "Unrecognised application services token. " - "Consult the home server admin." + "Consult the home server admin.", + errcode=Codes.FORBIDDEN ) # TODO store this AS diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index 5786cf873e..e374d538e7 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -64,9 +64,9 @@ class RegisterRestServlet(AppServiceRestServlet): yield self.handler.register(app_service) hs_token = "_not_implemented_yet" # TODO: Pull this from self.hs? - defer.returnValue({ + defer.returnValue((200, { "hs_token": hs_token - }) + })) def _parse_namespace(self, target_ns, origin_ns, ns): if ns not in target_ns or ns not in origin_ns: diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 9431c1a32d..e86b981b47 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -62,6 +62,7 @@ SCHEMAS = [ "event_edges", "event_signatures", "media_repository", + "application_services" ] diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 533fac4972..5a0e47e0d4 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -12,12 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from synapse.api.errors import StoreError +import logging +from twisted.internet import defer from ._base import SQLBaseStore +logger = logging.getLogger(__name__) + + # XXX: This feels like it should belong in a "models" module, not storage. class ApplicationService(object): """Defines an application service. @@ -30,7 +33,22 @@ class ApplicationService(object): if url: self.url = url if namespaces: - self.namespaces = namespaces + self._set_namespaces(namespaces) + + def _set_namespaces(self, namespaces): + # Sanity check that it is of the form: + # { + # users: ["regex",...], + # aliases: ["regex",...], + # rooms: ["regex",...], + # } + for ns in ["users", "rooms", "aliases"]: + if type(namespaces[ns]) != list: + raise ValueError("Bad namespace value for '%s'", ns) + for regex in namespaces[ns]: + if not isinstance(regex, basestring): + raise ValueError("Expected string regex for ns '%s'", ns) + self.namespaces = namespaces def is_interested(self, event): """Check if this service is interested in this event. @@ -133,15 +151,64 @@ class ApplicationServiceStore(SQLBaseStore): return service return None + # TODO: The from_cache=False impl # TODO: This should be JOINed with the application_services_regex table. - row = self._simple_select_one( - "application_services", {"token": token}, - ["url", "token"] - ) - if not row: - raise StoreError(400, "Bad application services token supplied.") - return row + + @defer.inlineCallbacks def _populate_cache(self): """Populates the ApplicationServiceCache from the database.""" - pass + sql = ("SELECT * FROM application_services LEFT JOIN " + "application_services_regex ON application_services.id = " + "application_services_regex.as_id") + + namespace_enum = [ + "users", # 0 + "aliases", # 1 + "rooms" # 2 + ] + # SQL results in the form: + # [ + # { + # 'regex': "something", + # 'url': "something", + # 'namespace': enum, + # 'as_id': 0, + # 'token': "something", + # 'id': 0 + # } + # ] + services = {} + results = yield self._execute_and_decode(sql) + for res in results: + as_token = res["token"] + if as_token not in services: + # add the service + services[as_token] = { + "url": res["url"], + "token": as_token, + "namespaces": { + "users": [], + "aliases": [], + "rooms": [] + } + } + # add the namespace regex if one exists + ns_int = res["namespace"] + if ns_int is None: + continue + try: + services[as_token]["namespaces"][namespace_enum[ns_int]].append( + res["regex"] + ) + except IndexError: + logger.error("Bad namespace enum '%s'. %s", ns_int, res) + + for service in services.values(): + logger.info("Found application service: %s", service) + self.cache.services.append(ApplicationService( + service["token"], + service["url"], + service["namespaces"] + )) + diff --git a/synapse/storage/schema/application_services.sql b/synapse/storage/schema/application_services.sql new file mode 100644 index 0000000000..6d245fc807 --- /dev/null +++ b/synapse/storage/schema/application_services.sql @@ -0,0 +1,32 @@ +/* Copyright 2015 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE IF NOT EXISTS application_services( + id INTEGER PRIMARY KEY AUTOINCREMENT, + url TEXT, + token TEXT, + UNIQUE(token) ON CONFLICT ROLLBACK +); + +CREATE TABLE IF NOT EXISTS application_services_regex( + id INTEGER PRIMARY KEY AUTOINCREMENT, + as_id INTEGER NOT NULL, + namespace INTEGER, /* enum[room_id|room_alias|user_id] */ + regex TEXT, + FOREIGN KEY(as_id) REFERENCES application_services(id) +); + + + From a006d168c556ca71ad0bbb680c60f1ba170338fb Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 2 Feb 2015 16:05:34 +0000 Subject: [PATCH 010/112] Actually merge into develop. --- synapse/storage/__init__.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 9bbd553dfc..1f207495f6 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -67,12 +67,9 @@ SCHEMAS = [ "event_signatures", "pusher", "media_repository", -<<<<<<< HEAD "application_services" -======= "filtering", "rejections", ->>>>>>> develop ] @@ -92,17 +89,13 @@ class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, PresenceStore, TransactionStore, DirectoryStore, KeyStore, StateStore, SignatureStore, -<<<<<<< HEAD - EventFederationStore, MediaRepositoryStore, - ApplicationServiceStore -======= + ApplicationServiceStore, EventFederationStore, MediaRepositoryStore, RejectionsStore, FilteringStore, PusherStore, PushRuleStore ->>>>>>> develop ): def __init__(self, hs): From 1a2de0c5feb1183b35045bb7fb9e379a9598d1cb Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 2 Feb 2015 17:39:41 +0000 Subject: [PATCH 011/112] Implement txns for AS (un)registration. --- synapse/handlers/appservice.py | 3 +- synapse/storage/__init__.py | 2 +- synapse/storage/appservice.py | 120 +++++++++++++++++++++++++++------ 3 files changed, 102 insertions(+), 23 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 8bd475cbfd..da994ba8e0 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -43,7 +43,8 @@ class ApplicationServicesHandler(BaseHandler): "Consult the home server admin.", errcode=Codes.FORBIDDEN ) - # TODO store this AS + logger.info("Updating application service info...") + yield self.store.update_app_service(app_service) def unregister(self, token): logger.info("Unregister as_token=%s", token) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 1f207495f6..6ff0093136 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -67,7 +67,7 @@ SCHEMAS = [ "event_signatures", "pusher", "media_repository", - "application_services" + "application_services", "filtering", "rejections", ] diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 5a0e47e0d4..db0c546211 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -15,11 +15,17 @@ import logging from twisted.internet import defer +from synapse.api.errors import StoreError from ._base import SQLBaseStore logger = logging.getLogger(__name__) +namespace_enum = [ + "users", # 0 + "aliases", # 1 + "rooms" # 2 +] # XXX: This feels like it should belong in a "models" module, not storage. class ApplicationService(object): @@ -30,25 +36,26 @@ class ApplicationService(object): def __init__(self, token, url=None, namespaces=None): self.token = token - if url: - self.url = url - if namespaces: - self._set_namespaces(namespaces) + self.url = url + self.namespaces = self._get_namespaces(namespaces) - def _set_namespaces(self, namespaces): + def _get_namespaces(self, namespaces): # Sanity check that it is of the form: # { # users: ["regex",...], # aliases: ["regex",...], # rooms: ["regex",...], # } + if not namespaces: + return None + for ns in ["users", "rooms", "aliases"]: if type(namespaces[ns]) != list: raise ValueError("Bad namespace value for '%s'", ns) for regex in namespaces[ns]: if not isinstance(regex, basestring): raise ValueError("Expected string regex for ns '%s'", ns) - self.namespaces = namespaces + return namespaces def is_interested(self, event): """Check if this service is interested in this event. @@ -110,10 +117,38 @@ class ApplicationServiceStore(SQLBaseStore): This removes all AS specific regex and the base URL. The token is the only thing preserved for future registration attempts. """ - # TODO: DELETE FROM application_services_regex WHERE id=this service - # TODO: SET url=NULL WHERE token=token - # TODO: Update cache - pass + yield self.runInteraction( + "unregister_app_service", + self._unregister_app_service_txn, + token, + ) + # update cache TODO: Should this be in the txn? + for service in self.cache.services: + if service.token == token: + service.url = None + service.namespaces = None + + def _unregister_app_service_txn(self, txn, token): + # kill the url to prevent pushes + txn.execute( + "UPDATE application_services SET url=NULL WHERE token=?", + (token,) + ) + + # cleanup regex + as_id = self._get_as_id_txn(txn, token) + if not as_id: + logger.warning( + "unregister_app_service_txn: Failed to find as_id for token=", + token + ) + return False + + txn.execute( + "DELETE FROM application_services_regex WHERE as_id=?", + (as_id,) + ) + return True def update_app_service(self, service): """Update an application service, clobbering what was previously there. @@ -124,12 +159,61 @@ class ApplicationServiceStore(SQLBaseStore): # NB: There is no "insert" since we provide no public-facing API to # allocate new ASes. It relies on the server admin inserting the AS # token into the database manually. + if not service.token or not service.url: + raise StoreError(400, "Token and url must be specified.") - # TODO: UPDATE application_services, SET url WHERE token=service.token - # TODO: DELETE FROM application_services_regex WHERE id=this service - # TODO: INSERT INTO application_services_regex - # TODO: Update cache - pass + yield self.runInteraction( + "update_app_service", + self._update_app_service_txn, + service + ) + + # update cache TODO: Should this be in the txn? + for (index, cache_service) in enumerate(self.cache.services): + if service.token == cache_service.token: + self.cache.services[index] = service + logger.info("Updated: %s", service) + return + # new entry + self.cache.services.append(service) + logger.info("Updated(new): %s", service) + + def _update_app_service_txn(self, txn, service): + as_id = self._get_as_id_txn(txn, service.token) + if not as_id: + logger.warning( + "update_app_service_txn: Failed to find as_id for token=", + service.token + ) + return False + + txn.execute( + "UPDATE application_services SET url=? WHERE id=?", + (service.url, as_id,) + ) + # cleanup regex + txn.execute( + "DELETE FROM application_services_regex WHERE id=?", + (as_id,) + ) + for (ns_int, ns_str) in enumerate(namespace_enum): + if ns_str in service.namespaces: + for regex in service.namespaces[ns_str]: + txn.execute( + "INSERT INTO application_services_regex(" + "as_id, namespace, regex) values(?,?,?)", + (as_id, ns_int, regex) + ) + return True + + def _get_as_id_txn(self, txn, token): + cursor = txn.execute( + "SELECT id FROM application_services WHERE token=?", + (token,) + ) + res = cursor.fetchone() + if res: + return res[0] def get_services_for_event(self, event): return self.cache.get_services_for_event(event) @@ -161,12 +245,6 @@ class ApplicationServiceStore(SQLBaseStore): sql = ("SELECT * FROM application_services LEFT JOIN " "application_services_regex ON application_services.id = " "application_services_regex.as_id") - - namespace_enum = [ - "users", # 0 - "aliases", # 1 - "rooms" # 2 - ] # SQL results in the form: # [ # { From 9ff349a3cb1868bb2827047ed0f0d01a9a4c38c7 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 2 Feb 2015 17:42:49 +0000 Subject: [PATCH 012/112] Add defers in the right places. --- synapse/storage/appservice.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index db0c546211..dd9b349370 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -111,6 +111,7 @@ class ApplicationServiceStore(SQLBaseStore): self.cache = ApplicationServiceCache() self._populate_cache() + @defer.inlineCallbacks def unregister_app_service(self, token): """Unregisters this service. @@ -150,6 +151,7 @@ class ApplicationServiceStore(SQLBaseStore): ) return True + @defer.inlineCallbacks def update_app_service(self, service): """Update an application service, clobbering what was previously there. From 197f3ea4bad066da251c7925336baab8bee296c9 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 3 Feb 2015 11:26:33 +0000 Subject: [PATCH 013/112] Implement regex checks for app services. Expose handler.get_services_for_event which manages the checks for all services. --- synapse/handlers/appservice.py | 25 +++++++++++- synapse/storage/appservice.py | 71 +++++++++++++++++++++------------- 2 files changed, 67 insertions(+), 29 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index da994ba8e0..9b8dd1bb49 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -34,9 +34,11 @@ class ApplicationServicesHandler(BaseHandler): logger.info("Register -> %s", app_service) # check the token is recognised try: - stored_service = yield self.store.get_app_service(app_service.token) + stored_service = yield self.store.get_app_service_by_token( + app_service.token + ) if not stored_service: - raise StoreError(404, "Application Service Not found") + raise StoreError(404, "Application service not found") except StoreError: raise SynapseError( 403, "Unrecognised application services token. " @@ -50,6 +52,25 @@ class ApplicationServicesHandler(BaseHandler): logger.info("Unregister as_token=%s", token) yield self.store.unregister_app_service(token) + def get_services_for_event(self, event): + """Retrieve a list of application services interested in this event. + + Args: + event(Event): The event to check. + Returns: + list: A list of services interested in this + event based on the service regex. + """ + # We need to know the aliases associated with this event.room_id, if any + alias_list = [] # TODO + + interested_list = [ + s for s in self.store.get_app_services() if ( + s.is_interested(event, alias_list) + ) + ] + return interested_list + def notify_interested_services(self, event): """Notifies (pushes) all application services interested in this event. diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index dd9b349370..c4e50be4c6 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -13,8 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import re from twisted.internet import defer +from synapse.api.constants import EventTypes from synapse.api.errors import StoreError from ._base import SQLBaseStore @@ -27,6 +29,7 @@ namespace_enum = [ "rooms" # 2 ] + # XXX: This feels like it should belong in a "models" module, not storage. class ApplicationService(object): """Defines an application service. @@ -37,9 +40,9 @@ class ApplicationService(object): def __init__(self, token, url=None, namespaces=None): self.token = token self.url = url - self.namespaces = self._get_namespaces(namespaces) + self.namespaces = self._check_namespaces(namespaces) - def _get_namespaces(self, namespaces): + def _check_namespaces(self, namespaces): # Sanity check that it is of the form: # { # users: ["regex",...], @@ -57,22 +60,50 @@ class ApplicationService(object): raise ValueError("Expected string regex for ns '%s'", ns) return namespaces - def is_interested(self, event): + def _matches_regex(self, test_string, namespace_key): + for regex in self.namespaces[namespace_key]: + if re.match(regex, test_string): + return True + return False + + def _matches_user(self, event): + if (hasattr(event, "user_id") and + self._matches_regex(event.user_id, "users")): + return True + # also check m.room.member state key + if (hasattr(event, "type") and event.type == EventTypes.Member + and hasattr(event, "state_key") + and self._matches_regex(event.state_key, "users")): + return True + return False + + def _matches_room_id(self, event): + if hasattr(event, "room_id"): + return self._matches_regex(event.room_id, "rooms") + return False + + def _matches_aliases(self, event, alias_list): + for alias in alias_list: + if self._matches_regex(alias, "aliases"): + return True + return False + + def is_interested(self, event, aliases_for_event=None): """Check if this service is interested in this event. Args: event(Event): The event to check. + aliases_for_event(list): A list of all the known room aliases for + this event. Returns: bool: True if this service would like to know about this event. """ - # NB: This does not check room alias regex matches because that requires - # more context that an Event can provide. Room alias matches are checked - # in the ApplicationServiceHandler. + if aliases_for_event is None: + aliases_for_event = [] - # TODO check if event.room_id regex matches - # TODO check if event.user_id regex matches (or m.room.member state_key) - - return True + return (self._matches_user(event) + or self._matches_aliases(event, aliases_for_event) + or self._matches_room_id(event)) def __str__(self): return "ApplicationService: %s" % (self.__dict__,) @@ -89,20 +120,6 @@ class ApplicationServiceCache(object): def __init__(self): self.services = [] - def get_services_for_event(self, event): - """Retrieve a list of application services interested in this event. - - Args: - event(Event): The event to check. - Returns: - list: A list of services interested in this - event based on the service regex. - """ - interested_list = [ - s for s in self.services if s.is_event_claimed(event) - ] - return interested_list - class ApplicationServiceStore(SQLBaseStore): @@ -217,10 +234,10 @@ class ApplicationServiceStore(SQLBaseStore): if res: return res[0] - def get_services_for_event(self, event): - return self.cache.get_services_for_event(event) + def get_app_services(self): + return self.cache.services - def get_app_service(self, token, from_cache=True): + def get_app_service_by_token(self, token, from_cache=True): """Get the application service with the given token. Args: From 3bd2841fdbbbc2e290d13cbd1aa9becc315d2f1c Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 3 Feb 2015 11:37:52 +0000 Subject: [PATCH 014/112] Everyone loves SQL typos --- synapse/storage/appservice.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index c4e50be4c6..07ed0adcf8 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -212,7 +212,7 @@ class ApplicationServiceStore(SQLBaseStore): ) # cleanup regex txn.execute( - "DELETE FROM application_services_regex WHERE id=?", + "DELETE FROM application_services_regex WHERE as_id=?", (as_id,) ) for (ns_int, ns_str) in enumerate(namespace_enum): From a060b47b13037da56ed8db2978a297133c23fc7f Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 3 Feb 2015 13:17:28 +0000 Subject: [PATCH 015/112] Add namespace constants. Add restrict_to option to limit namespace checks. --- synapse/handlers/appservice.py | 23 +++++++++++-------- synapse/storage/appservice.py | 41 ++++++++++++++++++++-------------- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 9b8dd1bb49..bf68b33398 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -52,11 +52,12 @@ class ApplicationServicesHandler(BaseHandler): logger.info("Unregister as_token=%s", token) yield self.store.unregister_app_service(token) - def get_services_for_event(self, event): + def get_services_for_event(self, event, restrict_to=""): """Retrieve a list of application services interested in this event. Args: event(Event): The event to check. + restrict_to(str): The namespace to restrict regex tests to. Returns: list: A list of services interested in this event based on the service regex. @@ -66,7 +67,7 @@ class ApplicationServicesHandler(BaseHandler): interested_list = [ s for s in self.store.get_app_services() if ( - s.is_interested(event, alias_list) + s.is_interested(event, restrict_to, alias_list) ) ] return interested_list @@ -80,11 +81,15 @@ class ApplicationServicesHandler(BaseHandler): Args: event(Event): The event to push out to interested services. """ - # TODO: Gather interested services - # get_services_for_event(event) <-- room IDs and user IDs - # Get a list of room aliases. Check regex. - # TODO: If unknown user: poke User Query API. - # TODO: If unknown room alias: poke Room Alias Query API. + # Gather interested services + services = self.get_services_for_event(event) + if len(services) == 0: + return # no services need notifying - # TODO: Fork off pushes to these services - XXX First cut, best effort - pass + # Do we know this user exists? If not, poke the user query API for + # all services which match that user regex. + + # Do we know this room alias exists? If not, poke the room alias query + # API for all services which match that room alias regex. + + # Fork off pushes to these services - XXX First cut, best effort diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 07ed0adcf8..277741fced 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -23,12 +23,6 @@ from ._base import SQLBaseStore logger = logging.getLogger(__name__) -namespace_enum = [ - "users", # 0 - "aliases", # 1 - "rooms" # 2 -] - # XXX: This feels like it should belong in a "models" module, not storage. class ApplicationService(object): @@ -36,6 +30,10 @@ class ApplicationService(object): Provides methods to check if this service is "interested" in events. """ + NS_USERS = "users" + NS_ALIASES = "aliases" + NS_ROOMS = "rooms" + NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] def __init__(self, token, url=None, namespaces=None): self.token = token @@ -52,7 +50,7 @@ class ApplicationService(object): if not namespaces: return None - for ns in ["users", "rooms", "aliases"]: + for ns in ApplicationService.NS_LIST: if type(namespaces[ns]) != list: raise ValueError("Bad namespace value for '%s'", ns) for regex in namespaces[ns]: @@ -68,31 +66,36 @@ class ApplicationService(object): def _matches_user(self, event): if (hasattr(event, "user_id") and - self._matches_regex(event.user_id, "users")): + self._matches_regex( + event.user_id, ApplicationService.NS_USERS)): return True # also check m.room.member state key if (hasattr(event, "type") and event.type == EventTypes.Member and hasattr(event, "state_key") - and self._matches_regex(event.state_key, "users")): + and self._matches_regex( + event.state_key, ApplicationService.NS_USERS)): return True return False def _matches_room_id(self, event): if hasattr(event, "room_id"): - return self._matches_regex(event.room_id, "rooms") + return self._matches_regex( + event.room_id, ApplicationService.NS_ROOMS + ) return False def _matches_aliases(self, event, alias_list): for alias in alias_list: - if self._matches_regex(alias, "aliases"): + if self._matches_regex(alias, ApplicationService.NS_ALIASES): return True return False - def is_interested(self, event, aliases_for_event=None): + def is_interested(self, event, restrict_to=None, aliases_for_event=None): """Check if this service is interested in this event. Args: event(Event): The event to check. + restrict_to(str): The namespace to restrict regex tests to. aliases_for_event(list): A list of all the known room aliases for this event. Returns: @@ -100,6 +103,9 @@ class ApplicationService(object): """ if aliases_for_event is None: aliases_for_event = [] + if restrict_to not in ApplicationService.NS_LIST: + # this is a programming error, so raise a general exception + raise Exception("Unexpected restrict_to value: %s". restrict_to) return (self._matches_user(event) or self._matches_aliases(event, aliases_for_event) @@ -215,7 +221,7 @@ class ApplicationServiceStore(SQLBaseStore): "DELETE FROM application_services_regex WHERE as_id=?", (as_id,) ) - for (ns_int, ns_str) in enumerate(namespace_enum): + for (ns_int, ns_str) in enumerate(ApplicationService.NS_LIST): if ns_str in service.namespaces: for regex in service.namespaces[ns_str]: txn.execute( @@ -285,9 +291,9 @@ class ApplicationServiceStore(SQLBaseStore): "url": res["url"], "token": as_token, "namespaces": { - "users": [], - "aliases": [], - "rooms": [] + ApplicationService.NS_USERS: [], + ApplicationService.NS_ALIASES: [], + ApplicationService.NS_ROOMS: [] } } # add the namespace regex if one exists @@ -295,7 +301,8 @@ class ApplicationServiceStore(SQLBaseStore): if ns_int is None: continue try: - services[as_token]["namespaces"][namespace_enum[ns_int]].append( + services[as_token]["namespaces"][ + ApplicationService.NS_LIST[ns_int]].append( res["regex"] ) except IndexError: From f2c039bfb958ed349bce42098e296995786374cc Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 3 Feb 2015 13:29:27 +0000 Subject: [PATCH 016/112] Implement restricted namespace checks. Begin fleshing out the main hook for notifying application services. --- synapse/handlers/appservice.py | 19 +++++++++++++++++++ synapse/storage/appservice.py | 21 ++++++++++++++++----- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index bf68b33398..dac63e2245 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -17,6 +17,7 @@ from twisted.internet import defer from ._base import BaseHandler from synapse.api.errors import Codes, StoreError, SynapseError +from synapse.storage.appservice import ApplicationService import logging @@ -88,8 +89,26 @@ class ApplicationServicesHandler(BaseHandler): # Do we know this user exists? If not, poke the user query API for # all services which match that user regex. + unknown_user = False # TODO check + if unknown_user: + user_query_services = self.get_services_for_event( + event=event, + restrict_to=ApplicationService.NS_USERS + ) + for user_service in user_query_services: + pass # TODO poke User Query API # Do we know this room alias exists? If not, poke the room alias query # API for all services which match that room alias regex. + unknown_room_alias = False # TODO check + if unknown_room_alias: + alias_query_services = self.get_services_for_event( + event=event, + restrict_to=ApplicationService.NS_ALIASES + ) + for alias_service in alias_query_services: + pass # TODO poke Room Alias Query API # Fork off pushes to these services - XXX First cut, best effort + for service in services: + pass # TODO push event to service diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 277741fced..cdf26ee434 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -33,6 +33,9 @@ class ApplicationService(object): NS_USERS = "users" NS_ALIASES = "aliases" NS_ROOMS = "rooms" + # The ordering here is important as it is used to map database values (which + # are stored as ints representing the position in this list) to namespace + # values. NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] def __init__(self, token, url=None, namespaces=None): @@ -103,13 +106,21 @@ class ApplicationService(object): """ if aliases_for_event is None: aliases_for_event = [] - if restrict_to not in ApplicationService.NS_LIST: - # this is a programming error, so raise a general exception + if restrict_to and restrict_to not in ApplicationService.NS_LIST: + # this is a programming error, so fail early and raise a general + # exception raise Exception("Unexpected restrict_to value: %s". restrict_to) - return (self._matches_user(event) - or self._matches_aliases(event, aliases_for_event) - or self._matches_room_id(event)) + if not restrict_to: + return (self._matches_user(event) + or self._matches_aliases(event, aliases_for_event) + or self._matches_room_id(event)) + elif restrict_to == ApplicationService.NS_ALIASES: + return self._matches_aliases(event, aliases_for_event) + elif restrict_to == ApplicationService.NS_ROOMS: + return self._matches_room_id(event) + elif restrict_to == ApplicationService.NS_USERS: + return self._matches_user(event) def __str__(self): return "ApplicationService: %s" % (self.__dict__,) From 94a5db9f4d400a345c5d8b9f7bacb0c9ccf99959 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 3 Feb 2015 14:44:16 +0000 Subject: [PATCH 017/112] Add appservice package and move ApplicationService into it. --- synapse/appservice/__init__.py | 119 +++++++++++++++++++++++++++++++++ synapse/appservice/api.py | 15 +++++ synapse/handlers/appservice.py | 4 +- synapse/storage/appservice.py | 105 +---------------------------- 4 files changed, 138 insertions(+), 105 deletions(-) create mode 100644 synapse/appservice/__init__.py create mode 100644 synapse/appservice/api.py diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py new file mode 100644 index 0000000000..f801fb5324 --- /dev/null +++ b/synapse/appservice/__init__.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.api.constants import EventTypes + +import re + + +class ApplicationService(object): + """Defines an application service. This definition is mostly what is + provided to the /register AS API. + + Provides methods to check if this service is "interested" in events. + """ + NS_USERS = "users" + NS_ALIASES = "aliases" + NS_ROOMS = "rooms" + # The ordering here is important as it is used to map database values (which + # are stored as ints representing the position in this list) to namespace + # values. + NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] + + def __init__(self, token, url=None, namespaces=None): + self.token = token + self.url = url + self.namespaces = self._check_namespaces(namespaces) + + def _check_namespaces(self, namespaces): + # Sanity check that it is of the form: + # { + # users: ["regex",...], + # aliases: ["regex",...], + # rooms: ["regex",...], + # } + if not namespaces: + return None + + for ns in ApplicationService.NS_LIST: + if type(namespaces[ns]) != list: + raise ValueError("Bad namespace value for '%s'", ns) + for regex in namespaces[ns]: + if not isinstance(regex, basestring): + raise ValueError("Expected string regex for ns '%s'", ns) + return namespaces + + def _matches_regex(self, test_string, namespace_key): + for regex in self.namespaces[namespace_key]: + if re.match(regex, test_string): + return True + return False + + def _matches_user(self, event): + if (hasattr(event, "user_id") and + self._matches_regex( + event.user_id, ApplicationService.NS_USERS)): + return True + # also check m.room.member state key + if (hasattr(event, "type") and event.type == EventTypes.Member + and hasattr(event, "state_key") + and self._matches_regex( + event.state_key, ApplicationService.NS_USERS)): + return True + return False + + def _matches_room_id(self, event): + if hasattr(event, "room_id"): + return self._matches_regex( + event.room_id, ApplicationService.NS_ROOMS + ) + return False + + def _matches_aliases(self, event, alias_list): + for alias in alias_list: + if self._matches_regex(alias, ApplicationService.NS_ALIASES): + return True + return False + + def is_interested(self, event, restrict_to=None, aliases_for_event=None): + """Check if this service is interested in this event. + + Args: + event(Event): The event to check. + restrict_to(str): The namespace to restrict regex tests to. + aliases_for_event(list): A list of all the known room aliases for + this event. + Returns: + bool: True if this service would like to know about this event. + """ + if aliases_for_event is None: + aliases_for_event = [] + if restrict_to and restrict_to not in ApplicationService.NS_LIST: + # this is a programming error, so fail early and raise a general + # exception + raise Exception("Unexpected restrict_to value: %s". restrict_to) + + if not restrict_to: + return (self._matches_user(event) + or self._matches_aliases(event, aliases_for_event) + or self._matches_room_id(event)) + elif restrict_to == ApplicationService.NS_ALIASES: + return self._matches_aliases(event, aliases_for_event) + elif restrict_to == ApplicationService.NS_ROOMS: + return self._matches_room_id(event) + elif restrict_to == ApplicationService.NS_USERS: + return self._matches_user(event) + + def __str__(self): + return "ApplicationService: %s" % (self.__dict__,) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py new file mode 100644 index 0000000000..803f97ea4f --- /dev/null +++ b/synapse/appservice/api.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index dac63e2245..f05b57bcb9 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -17,7 +17,7 @@ from twisted.internet import defer from ._base import BaseHandler from synapse.api.errors import Codes, StoreError, SynapseError -from synapse.storage.appservice import ApplicationService +from synapse.appservice import ApplicationService import logging @@ -96,6 +96,7 @@ class ApplicationServicesHandler(BaseHandler): restrict_to=ApplicationService.NS_USERS ) for user_service in user_query_services: + # this needs to block XXX: Need to feed response back to caller pass # TODO poke User Query API # Do we know this room alias exists? If not, poke the room alias query @@ -107,6 +108,7 @@ class ApplicationServicesHandler(BaseHandler): restrict_to=ApplicationService.NS_ALIASES ) for alias_service in alias_query_services: + # this needs to block XXX: Need to feed response back to caller pass # TODO poke Room Alias Query API # Fork off pushes to these services - XXX First cut, best effort diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index cdf26ee434..48bc7e0fe6 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -13,119 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import re from twisted.internet import defer -from synapse.api.constants import EventTypes from synapse.api.errors import StoreError +from synapse.appservice import ApplicationService from ._base import SQLBaseStore logger = logging.getLogger(__name__) -# XXX: This feels like it should belong in a "models" module, not storage. -class ApplicationService(object): - """Defines an application service. - - Provides methods to check if this service is "interested" in events. - """ - NS_USERS = "users" - NS_ALIASES = "aliases" - NS_ROOMS = "rooms" - # The ordering here is important as it is used to map database values (which - # are stored as ints representing the position in this list) to namespace - # values. - NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] - - def __init__(self, token, url=None, namespaces=None): - self.token = token - self.url = url - self.namespaces = self._check_namespaces(namespaces) - - def _check_namespaces(self, namespaces): - # Sanity check that it is of the form: - # { - # users: ["regex",...], - # aliases: ["regex",...], - # rooms: ["regex",...], - # } - if not namespaces: - return None - - for ns in ApplicationService.NS_LIST: - if type(namespaces[ns]) != list: - raise ValueError("Bad namespace value for '%s'", ns) - for regex in namespaces[ns]: - if not isinstance(regex, basestring): - raise ValueError("Expected string regex for ns '%s'", ns) - return namespaces - - def _matches_regex(self, test_string, namespace_key): - for regex in self.namespaces[namespace_key]: - if re.match(regex, test_string): - return True - return False - - def _matches_user(self, event): - if (hasattr(event, "user_id") and - self._matches_regex( - event.user_id, ApplicationService.NS_USERS)): - return True - # also check m.room.member state key - if (hasattr(event, "type") and event.type == EventTypes.Member - and hasattr(event, "state_key") - and self._matches_regex( - event.state_key, ApplicationService.NS_USERS)): - return True - return False - - def _matches_room_id(self, event): - if hasattr(event, "room_id"): - return self._matches_regex( - event.room_id, ApplicationService.NS_ROOMS - ) - return False - - def _matches_aliases(self, event, alias_list): - for alias in alias_list: - if self._matches_regex(alias, ApplicationService.NS_ALIASES): - return True - return False - - def is_interested(self, event, restrict_to=None, aliases_for_event=None): - """Check if this service is interested in this event. - - Args: - event(Event): The event to check. - restrict_to(str): The namespace to restrict regex tests to. - aliases_for_event(list): A list of all the known room aliases for - this event. - Returns: - bool: True if this service would like to know about this event. - """ - if aliases_for_event is None: - aliases_for_event = [] - if restrict_to and restrict_to not in ApplicationService.NS_LIST: - # this is a programming error, so fail early and raise a general - # exception - raise Exception("Unexpected restrict_to value: %s". restrict_to) - - if not restrict_to: - return (self._matches_user(event) - or self._matches_aliases(event, aliases_for_event) - or self._matches_room_id(event)) - elif restrict_to == ApplicationService.NS_ALIASES: - return self._matches_aliases(event, aliases_for_event) - elif restrict_to == ApplicationService.NS_ROOMS: - return self._matches_room_id(event) - elif restrict_to == ApplicationService.NS_USERS: - return self._matches_user(event) - - def __str__(self): - return "ApplicationService: %s" % (self.__dict__,) - - class ApplicationServiceCache(object): """Caches ApplicationServices and provides utility functions on top. From 17753f0c20d0d8190095c5a3183630b78bf9650c Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 11:19:18 +0000 Subject: [PATCH 018/112] Add stub ApplicationServiceApi and glue it with the handler. --- synapse/appservice/__init__.py | 3 ++- synapse/appservice/api.py | 21 +++++++++++++++++++++ synapse/handlers/appservice.py | 18 +++++++++++++++--- synapse/storage/appservice.py | 1 + 4 files changed, 39 insertions(+), 4 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index f801fb5324..92f64619c9 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -31,10 +31,11 @@ class ApplicationService(object): # values. NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] - def __init__(self, token, url=None, namespaces=None): + def __init__(self, token, url=None, namespaces=None, txn_id=None): self.token = token self.url = url self.namespaces = self._check_namespaces(namespaces) + self.txn_id = None def _check_namespaces(self, namespaces): # Sanity check that it is of the form: diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 803f97ea4f..158aded66e 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -13,3 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. + +class ApplicationServiceApi(object): + """This class manages HS -> AS communications, including querying and + pushing. + """ + + def __init__(self, hs): + self.hs_token = "_hs_token_" # TODO extract hs token + + def query_user(self, service, user_id): + pass + + def query_alias(self, service, alias): + pass + + def push_bulk(self, service, events): + pass + + def push(self, service, event): + pass + diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index f05b57bcb9..9cdeaa2d94 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -18,6 +18,7 @@ from twisted.internet import defer from ._base import BaseHandler from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService +from synapse.appservice.api import ApplicationServiceApi import logging @@ -29,6 +30,7 @@ class ApplicationServicesHandler(BaseHandler): def __init__(self, hs): super(ApplicationServicesHandler, self).__init__(hs) + self.appservice_api = ApplicationServiceApi(hs) @defer.inlineCallbacks def register(self, app_service): @@ -97,7 +99,12 @@ class ApplicationServicesHandler(BaseHandler): ) for user_service in user_query_services: # this needs to block XXX: Need to feed response back to caller - pass # TODO poke User Query API + is_known_user = self.appservice_api.query_user( + user_service, event + ) + if is_known_user: + # the user exists now,so don't query more ASes. + break # Do we know this room alias exists? If not, poke the room alias query # API for all services which match that room alias regex. @@ -109,8 +116,13 @@ class ApplicationServicesHandler(BaseHandler): ) for alias_service in alias_query_services: # this needs to block XXX: Need to feed response back to caller - pass # TODO poke Room Alias Query API + is_known_alias = self.appservice_api.query_alias( + alias_service, event + ) + if is_known_alias: + # the alias exists now so don't query more ASes. + break # Fork off pushes to these services - XXX First cut, best effort for service in services: - pass # TODO push event to service + self.appservice_api.push(service, event) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 48bc7e0fe6..abb617f049 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -216,6 +216,7 @@ class ApplicationServiceStore(SQLBaseStore): except IndexError: logger.error("Bad namespace enum '%s'. %s", ns_int, res) + # TODO get last successful txn id f.e. service for service in services.values(): logger.info("Found application service: %s", service) self.cache.services.append(ApplicationService( From 525a218b2b072b24721c9c9efae42aae21388fc8 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 12:24:20 +0000 Subject: [PATCH 019/112] Begin to add unit tests for appservice glue and regex testing. --- synapse/appservice/__init__.py | 14 +++++- synapse/handlers/appservice.py | 8 +++- tests/appservice/__init__.py | 14 ++++++ tests/appservice/test_appservice.py | 58 ++++++++++++++++++++++++ tests/handlers/test_appservice.py | 68 +++++++++++++++++++++++++++++ 5 files changed, 158 insertions(+), 4 deletions(-) create mode 100644 tests/appservice/__init__.py create mode 100644 tests/appservice/test_appservice.py create mode 100644 tests/handlers/test_appservice.py diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 92f64619c9..0c7f58574e 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -14,8 +14,11 @@ # limitations under the License. from synapse.api.constants import EventTypes +import logging import re +logger = logging.getLogger(__name__) + class ApplicationService(object): """Defines an application service. This definition is mostly what is @@ -56,15 +59,22 @@ class ApplicationService(object): return namespaces def _matches_regex(self, test_string, namespace_key): + if not isinstance(test_string, basestring): + logger.warning( + "Expected a string to test regex against, but got %s", + test_string + ) + return False + for regex in self.namespaces[namespace_key]: if re.match(regex, test_string): return True return False def _matches_user(self, event): - if (hasattr(event, "user_id") and + if (hasattr(event, "sender") and self._matches_regex( - event.user_id, ApplicationService.NS_USERS)): + event.sender, ApplicationService.NS_USERS)): return True # also check m.room.member state key if (hasattr(event, "type") and event.type == EventTypes.Member diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 9cdeaa2d94..3188c60f3d 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -26,10 +26,14 @@ import logging logger = logging.getLogger(__name__) -class ApplicationServicesHandler(BaseHandler): +# NB: Purposefully not inheriting BaseHandler since that contains way too much +# setup code which this handler does not need or use. This makes testing a lot +# easier. +class ApplicationServicesHandler(object): def __init__(self, hs): - super(ApplicationServicesHandler, self).__init__(hs) + self.store = hs.get_datastore() + self.hs = hs self.appservice_api = ApplicationServiceApi(hs) @defer.inlineCallbacks diff --git a/tests/appservice/__init__.py b/tests/appservice/__init__.py new file mode 100644 index 0000000000..1a84d94cd9 --- /dev/null +++ b/tests/appservice/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py new file mode 100644 index 0000000000..5cfd26daa6 --- /dev/null +++ b/tests/appservice/test_appservice.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.appservice import ApplicationService + +from mock import Mock, PropertyMock +from tests import unittest + + +class ApplicationServiceTestCase(unittest.TestCase): + + def setUp(self): + self.service = ApplicationService( + url="some_url", + token="some_token", + namespaces={ + ApplicationService.NS_USERS: [], + ApplicationService.NS_ROOMS: [], + ApplicationService.NS_ALIASES: [] + } + ) + self.event = Mock( + type="m.something", room_id="!foo:bar", sender="@someone:somewhere" + ) + + def test_regex_user_id_prefix_match(self): + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@irc_foobar:matrix.org" + self.assertTrue(self.service.is_interested(self.event)) + + def test_regex_user_id_prefix_no_match(self): + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@someone_else:matrix.org" + self.assertFalse(self.service.is_interested(self.event)) + + def test_regex_room_member_is_checked(self): + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@someone_else:matrix.org" + self.event.type = "m.room.member" + self.event.state_key = "@irc_foobar:matrix.org" + self.assertTrue(self.service.is_interested(self.event)) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py new file mode 100644 index 0000000000..9c464e7fbc --- /dev/null +++ b/tests/handlers/test_appservice.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer +from .. import unittest + +from synapse.handlers.appservice import ApplicationServicesHandler + +from collections import namedtuple +from mock import Mock + +# TODO: Should this be a more general thing? tests/api/test_filtering.py uses it +MockEvent = namedtuple("MockEvent", "sender type room_id") + + +class AppServiceHandlerTestCase(unittest.TestCase): + """ Tests the ApplicationServicesHandler. """ + + def setUp(self): + self.mock_store = Mock() + self.mock_as_api = Mock() + hs = Mock() + hs.get_datastore = Mock(return_value=self.mock_store) + self.handler = ApplicationServicesHandler(hs) # thing being tested + + # FIXME Would be nice to DI this rather than monkey patch:( + if not hasattr(self.handler, "appservice_api"): + # someone probably updated the handler but not the tests. Fail fast. + raise Exception("Test expected handler.appservice_api to exist.") + self.handler.appservice_api = self.mock_as_api + + def test_notify_interested_services(self): + interested_service = self._mkservice(is_interested=True) + services = [ + self._mkservice(is_interested=False), + interested_service, + self._mkservice(is_interested=False) + ] + + self.mock_store.get_app_services = Mock(return_value=services) + + event = MockEvent( + sender="@someone:anywhere", + type="m.room.message", + room_id="!foo:bar" + ) + self.mock_as_api.push = Mock() + self.handler.notify_interested_services(event) + self.mock_as_api.push.assert_called_once_with(interested_service, event) + + def _mkservice(self, is_interested): + service = Mock() + service.is_interested = Mock(return_value=is_interested) + service.token = "mock_service_token" + service.url = "mock_service_url" + return service From 89f2e8fbdf7965d02426ef17ca6a9490219a2ec4 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 15:21:03 +0000 Subject: [PATCH 020/112] Fix bug in store defer. Add more unit tests. --- synapse/storage/appservice.py | 18 +++-- tests/appservice/test_appservice.py | 87 +++++++++++++++++++++++ tests/handlers/test_appservice.py | 6 +- tests/storage/test_appservice.py | 105 ++++++++++++++++++++++++++++ 4 files changed, 207 insertions(+), 9 deletions(-) create mode 100644 tests/storage/test_appservice.py diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index abb617f049..b64416de28 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -40,7 +40,7 @@ class ApplicationServiceStore(SQLBaseStore): def __init__(self, hs): super(ApplicationServiceStore, self).__init__(hs) self.cache = ApplicationServiceCache() - self._populate_cache() + self.cache_defer = self._populate_cache() @defer.inlineCallbacks def unregister_app_service(self, token): @@ -49,6 +49,7 @@ class ApplicationServiceStore(SQLBaseStore): This removes all AS specific regex and the base URL. The token is the only thing preserved for future registration attempts. """ + yield self.cache_defer # make sure the cache is ready yield self.runInteraction( "unregister_app_service", self._unregister_app_service_txn, @@ -89,9 +90,13 @@ class ApplicationServiceStore(SQLBaseStore): Args: service(ApplicationService): The updated service. """ + yield self.cache_defer # make sure the cache is ready + # NB: There is no "insert" since we provide no public-facing API to # allocate new ASes. It relies on the server admin inserting the AS # token into the database manually. + + if not service.token or not service.url: raise StoreError(400, "Token and url must be specified.") @@ -148,9 +153,12 @@ class ApplicationServiceStore(SQLBaseStore): if res: return res[0] + @defer.inlineCallbacks def get_app_services(self): - return self.cache.services + yield self.cache_defer # make sure the cache is ready + defer.returnValue(self.cache.services) + @defer.inlineCallbacks def get_app_service_by_token(self, token, from_cache=True): """Get the application service with the given token. @@ -161,12 +169,14 @@ class ApplicationServiceStore(SQLBaseStore): Raises: StoreError if there was a problem retrieving this service. """ + yield self.cache_defer # make sure the cache is ready if from_cache: for service in self.cache.services: if service.token == token: - return service - return None + defer.returnValue(service) + return + defer.returnValue(None) # TODO: The from_cache=False impl # TODO: This should be JOINed with the application_services_regex table. diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 5cfd26daa6..c0aaf12785 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -56,3 +56,90 @@ class ApplicationServiceTestCase(unittest.TestCase): self.event.type = "m.room.member" self.event.state_key = "@irc_foobar:matrix.org" self.assertTrue(self.service.is_interested(self.event)) + + def test_regex_room_id_match(self): + self.service.namespaces[ApplicationService.NS_ROOMS].append( + "!some_prefix.*some_suffix:matrix.org" + ) + self.event.room_id = "!some_prefixs0m3th1nGsome_suffix:matrix.org" + self.assertTrue(self.service.is_interested(self.event)) + + def test_regex_room_id_no_match(self): + self.service.namespaces[ApplicationService.NS_ROOMS].append( + "!some_prefix.*some_suffix:matrix.org" + ) + self.event.room_id = "!XqBunHwQIXUiqCaoxq:matrix.org" + self.assertFalse(self.service.is_interested(self.event)) + + def test_regex_alias_match(self): + self.service.namespaces[ApplicationService.NS_ALIASES].append( + "#irc_.*:matrix.org" + ) + self.assertTrue(self.service.is_interested( + self.event, + aliases_for_event=["#irc_foobar:matrix.org", "#athing:matrix.org"] + )) + + def test_regex_alias_no_match(self): + self.service.namespaces[ApplicationService.NS_ALIASES].append( + "#irc_.*:matrix.org" + ) + self.assertFalse(self.service.is_interested( + self.event, + aliases_for_event=["#xmpp_foobar:matrix.org", "#athing:matrix.org"] + )) + + def test_regex_multiple_matches(self): + self.service.namespaces[ApplicationService.NS_ALIASES].append( + "#irc_.*:matrix.org" + ) + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@irc_foobar:matrix.org" + self.assertTrue(self.service.is_interested( + self.event, + aliases_for_event=["#irc_barfoo:matrix.org"] + )) + + def test_restrict_to_rooms(self): + self.service.namespaces[ApplicationService.NS_ROOMS].append( + "!flibble_.*:matrix.org" + ) + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@irc_foobar:matrix.org" + self.event.room_id = "!wibblewoo:matrix.org" + self.assertFalse(self.service.is_interested( + self.event, + restrict_to=ApplicationService.NS_ROOMS + )) + + def test_restrict_to_aliases(self): + self.service.namespaces[ApplicationService.NS_ALIASES].append( + "#xmpp_.*:matrix.org" + ) + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@irc_foobar:matrix.org" + self.assertFalse(self.service.is_interested( + self.event, + restrict_to=ApplicationService.NS_ALIASES, + aliases_for_event=["#irc_barfoo:matrix.org"] + )) + + def test_restrict_to_senders(self): + self.service.namespaces[ApplicationService.NS_ALIASES].append( + "#xmpp_.*:matrix.org" + ) + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + self.event.sender = "@xmpp_foobar:matrix.org" + self.assertFalse(self.service.is_interested( + self.event, + restrict_to=ApplicationService.NS_USERS, + aliases_for_event=["#xmpp_barfoo:matrix.org"] + )) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 9c464e7fbc..1daa314f20 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -18,12 +18,8 @@ from .. import unittest from synapse.handlers.appservice import ApplicationServicesHandler -from collections import namedtuple from mock import Mock -# TODO: Should this be a more general thing? tests/api/test_filtering.py uses it -MockEvent = namedtuple("MockEvent", "sender type room_id") - class AppServiceHandlerTestCase(unittest.TestCase): """ Tests the ApplicationServicesHandler. """ @@ -51,7 +47,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.mock_store.get_app_services = Mock(return_value=services) - event = MockEvent( + event = Mock( sender="@someone:anywhere", type="m.room.message", room_id="!foo:bar" diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py new file mode 100644 index 0000000000..56fdda377c --- /dev/null +++ b/tests/storage/test_appservice.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from tests import unittest +from twisted.internet import defer + +from synapse.appservice import ApplicationService +from synapse.server import HomeServer +from synapse.storage.appservice import ApplicationServiceStore + +from tests.utils import SQLiteMemoryDbPool, MockClock + + +class ApplicationServiceStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + db_pool = SQLiteMemoryDbPool() + yield db_pool.prepare() + hs = HomeServer("test", db_pool=db_pool, clock=MockClock()) + self.as_token = "token1" + db_pool.runQuery( + "INSERT INTO application_services(token) VALUES(?)", + (self.as_token,) + ) + db_pool.runQuery( + "INSERT INTO application_services(token) VALUES(?)", ("token2",) + ) + db_pool.runQuery( + "INSERT INTO application_services(token) VALUES(?)", ("token3",) + ) + # must be done after inserts + self.store = ApplicationServiceStore(hs) + + @defer.inlineCallbacks + def test_update_and_retrieval_of_service(self): + url = "https://matrix.org/appservices/foobar" + user_regex = ["@foobar_.*:matrix.org"] + alias_regex = ["#foobar_.*:matrix.org"] + room_regex = [] + service = ApplicationService(url=url, token=self.as_token, namespaces={ + ApplicationService.NS_USERS: user_regex, + ApplicationService.NS_ALIASES: alias_regex, + ApplicationService.NS_ROOMS: room_regex + }) + yield self.store.update_app_service(service) + + stored_service = yield self.store.get_app_service_by_token( + self.as_token + ) + self.assertEquals(stored_service.token, self.as_token) + self.assertEquals(stored_service.url, url) + self.assertEquals( + stored_service.namespaces[ApplicationService.NS_ALIASES], + alias_regex + ) + self.assertEquals( + stored_service.namespaces[ApplicationService.NS_ROOMS], + room_regex + ) + self.assertEquals( + stored_service.namespaces[ApplicationService.NS_USERS], + user_regex + ) + + @defer.inlineCallbacks + def test_retrieve_unknown_service_token(self): + service = yield self.store.get_app_service_by_token("invalid_token") + self.assertEquals(service, None) + + @defer.inlineCallbacks + def test_retrieval_of_service(self): + stored_service = yield self.store.get_app_service_by_token( + self.as_token + ) + self.assertEquals(stored_service.token, self.as_token) + self.assertEquals(stored_service.url, None) + self.assertEquals( + stored_service.namespaces[ApplicationService.NS_ALIASES], + [] + ) + self.assertEquals( + stored_service.namespaces[ApplicationService.NS_ROOMS], + [] + ) + self.assertEquals( + stored_service.namespaces[ApplicationService.NS_USERS], + [] + ) + + @defer.inlineCallbacks + def test_retrieval_of_all_services(self): + services = yield self.store.get_app_services() + self.assertEquals(len(services), 3) From aa8cce58bf3d3dbd1a0d512dbbd41b6545d2b1f9 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 16:44:53 +0000 Subject: [PATCH 021/112] Add query_user/alias APIs. --- synapse/appservice/api.py | 47 +++++++++++++++++++++++++++++++--- synapse/handlers/appservice.py | 26 +++++++++++-------- 2 files changed, 59 insertions(+), 14 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 158aded66e..799ada96df 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -12,25 +12,64 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from twisted.internet import defer +from twisted.web.client import PartialDownloadError + +from synapse.http.client import SimpleHttpClient + +import logging +import urllib + +logger = logging.getLogger(__name__) -class ApplicationServiceApi(object): +class ApplicationServiceApi(SimpleHttpClient): """This class manages HS -> AS communications, including querying and pushing. """ def __init__(self, hs): + super(ApplicationServiceApi, self).__init__(hs) self.hs_token = "_hs_token_" # TODO extract hs token + @defer.inlineCallbacks def query_user(self, service, user_id): - pass + uri = service.url + ("/users/%s" % urllib.quote(user_id)) + response = None + try: + response = yield self.get_json(uri, { + "access_token": self.hs_token + }) + if response: # just an empty json object + defer.returnValue(True) + except PartialDownloadError as e: + if e.status == 404: + defer.returnValue(False) + return + logger.warning("query_user to %s received %s", (uri, e.status)) + @defer.inlineCallbacks def query_alias(self, service, alias): - pass + uri = service.url + ("/rooms/%s" % urllib.quote(alias)) + response = None + try: + response = yield self.get_json(uri, { + "access_token": self.hs_token + }) + logger.info("%s", response[0]) + if response: # just an empty json object + defer.returnValue(True) + except PartialDownloadError as e: + if e.status == 404: + defer.returnValue(False) + return + logger.warning("query_alias to %s received %s", (uri, e.status)) def push_bulk(self, service, events): pass + @defer.inlineCallbacks def push(self, service, event): - pass + response = yield self.push_bulk(service, [event]) + defer.returnValue(response) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 3188c60f3d..dd860b2244 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -55,10 +55,14 @@ class ApplicationServicesHandler(object): logger.info("Updating application service info...") yield self.store.update_app_service(app_service) + logger.info("Sending ping to %s...", app_service.url) + yield self.appservice_api.query_alias(app_service, "ping") + def unregister(self, token): logger.info("Unregister as_token=%s", token) yield self.store.unregister_app_service(token) + @defer.inlineCallbacks def get_services_for_event(self, event, restrict_to=""): """Retrieve a list of application services interested in this event. @@ -71,14 +75,15 @@ class ApplicationServicesHandler(object): """ # We need to know the aliases associated with this event.room_id, if any alias_list = [] # TODO - + services = yield self.store.get_app_services() interested_list = [ - s for s in self.store.get_app_services() if ( + s for s in services if ( s.is_interested(event, restrict_to, alias_list) ) ] - return interested_list + defer.returnValue(interested_list) + @defer.inlineCallbacks def notify_interested_services(self, event): """Notifies (pushes) all application services interested in this event. @@ -89,7 +94,7 @@ class ApplicationServicesHandler(object): event(Event): The event to push out to interested services. """ # Gather interested services - services = self.get_services_for_event(event) + services = yield self.get_services_for_event(event) if len(services) == 0: return # no services need notifying @@ -97,14 +102,14 @@ class ApplicationServicesHandler(object): # all services which match that user regex. unknown_user = False # TODO check if unknown_user: - user_query_services = self.get_services_for_event( + user_query_services = yield self.get_services_for_event( event=event, restrict_to=ApplicationService.NS_USERS ) for user_service in user_query_services: # this needs to block XXX: Need to feed response back to caller - is_known_user = self.appservice_api.query_user( - user_service, event + is_known_user = yield self.appservice_api.query_user( + user_service, event.sender ) if is_known_user: # the user exists now,so don't query more ASes. @@ -114,14 +119,15 @@ class ApplicationServicesHandler(object): # API for all services which match that room alias regex. unknown_room_alias = False # TODO check if unknown_room_alias: - alias_query_services = self.get_services_for_event( + alias = "something" # TODO + alias_query_services = yield self.get_services_for_event( event=event, restrict_to=ApplicationService.NS_ALIASES ) for alias_service in alias_query_services: # this needs to block XXX: Need to feed response back to caller - is_known_alias = self.appservice_api.query_alias( - alias_service, event + is_known_alias = yield self.appservice_api.query_alias( + alias_service, alias ) if is_known_alias: # the alias exists now so don't query more ASes. From 96d4bf90120a07faa5163c2e5af542358554dd61 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 17:07:31 +0000 Subject: [PATCH 022/112] Modify API for SimpleHttpClient.get_json and update usages. Previously, this would only return the HTTP body as JSON, and discard other response information (e.g. the HTTP response code). This has now been changed to throw a CodeMessageException on a non-2xx response, with the response code and body, which can then be parsed as JSON. Affected modules include: - Registration/Login (when using an email for IS auth) --- synapse/handlers/login.py | 33 ++++++++++++--------- synapse/handlers/register.py | 56 +++++++++++++++++++++--------------- synapse/http/client.py | 19 ++++++------ 3 files changed, 61 insertions(+), 47 deletions(-) diff --git a/synapse/handlers/login.py b/synapse/handlers/login.py index d297d71c03..7447800460 100644 --- a/synapse/handlers/login.py +++ b/synapse/handlers/login.py @@ -16,12 +16,13 @@ from twisted.internet import defer from ._base import BaseHandler -from synapse.api.errors import LoginError, Codes +from synapse.api.errors import LoginError, Codes, CodeMessageException from synapse.http.client import SimpleHttpClient from synapse.util.emailutils import EmailException import synapse.util.emailutils as emailutils import bcrypt +import json import logging logger = logging.getLogger(__name__) @@ -96,16 +97,20 @@ class LoginHandler(BaseHandler): @defer.inlineCallbacks def _query_email(self, email): - httpCli = SimpleHttpClient(self.hs) - data = yield httpCli.get_json( - # TODO FIXME This should be configurable. - # XXX: ID servers need to use HTTPS - "http://%s%s" % ( - "matrix.org:8090", "/_matrix/identity/api/v1/lookup" - ), - { - 'medium': 'email', - 'address': email - } - ) - defer.returnValue(data) + http_client = SimpleHttpClient(self.hs) + try: + data = yield http_client.get_json( + # TODO FIXME This should be configurable. + # XXX: ID servers need to use HTTPS + "http://%s%s" % ( + "matrix.org:8090", "/_matrix/identity/api/v1/lookup" + ), + { + 'medium': 'email', + 'address': email + } + ) + defer.returnValue(data) + except CodeMessageException as e: + data = json.loads(e.msg) + defer.returnValue(data) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 66a89c10b2..08cd5fd720 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -18,7 +18,7 @@ from twisted.internet import defer from synapse.types import UserID from synapse.api.errors import ( - SynapseError, RegistrationError, InvalidCaptchaError + SynapseError, RegistrationError, InvalidCaptchaError, CodeMessageException ) from ._base import BaseHandler import synapse.util.stringutils as stringutils @@ -28,6 +28,7 @@ from synapse.http.client import CaptchaServerHttpClient import base64 import bcrypt +import json import logging logger = logging.getLogger(__name__) @@ -161,21 +162,26 @@ class RegistrationHandler(BaseHandler): def _threepid_from_creds(self, creds): # TODO: get this from the homeserver rather than creating a new one for # each request - httpCli = SimpleHttpClient(self.hs) + http_client = SimpleHttpClient(self.hs) # XXX: make this configurable! trustedIdServers = ['matrix.org:8090', 'matrix.org'] if not creds['idServer'] in trustedIdServers: logger.warn('%s is not a trusted ID server: rejecting 3pid ' + 'credentials', creds['idServer']) defer.returnValue(None) - data = yield httpCli.get_json( - # XXX: This should be HTTPS - "http://%s%s" % ( - creds['idServer'], - "/_matrix/identity/api/v1/3pid/getValidated3pid" - ), - {'sid': creds['sid'], 'clientSecret': creds['clientSecret']} - ) + + data = {} + try: + data = yield http_client.get_json( + # XXX: This should be HTTPS + "http://%s%s" % ( + creds['idServer'], + "/_matrix/identity/api/v1/3pid/getValidated3pid" + ), + {'sid': creds['sid'], 'clientSecret': creds['clientSecret']} + ) + except CodeMessageException as e: + data = json.loads(e.msg) if 'medium' in data: defer.returnValue(data) @@ -185,19 +191,23 @@ class RegistrationHandler(BaseHandler): def _bind_threepid(self, creds, mxid): yield logger.debug("binding threepid") - httpCli = SimpleHttpClient(self.hs) - data = yield httpCli.post_urlencoded_get_json( - # XXX: Change when ID servers are all HTTPS - "http://%s%s" % ( - creds['idServer'], "/_matrix/identity/api/v1/3pid/bind" - ), - { - 'sid': creds['sid'], - 'clientSecret': creds['clientSecret'], - 'mxid': mxid, - } - ) - logger.debug("bound threepid") + http_client = SimpleHttpClient(self.hs) + data = None + try: + data = yield http_client.post_urlencoded_get_json( + # XXX: Change when ID servers are all HTTPS + "http://%s%s" % ( + creds['idServer'], "/_matrix/identity/api/v1/3pid/bind" + ), + { + 'sid': creds['sid'], + 'clientSecret': creds['clientSecret'], + 'mxid': mxid, + } + ) + logger.debug("bound threepid") + except CodeMessageException as e: + data = json.loads(e.msg) defer.returnValue(data) @defer.inlineCallbacks diff --git a/synapse/http/client.py b/synapse/http/client.py index 198f575cfa..5f4558be47 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +from synapse.api.errors import CodeMessageException from synapse.http.agent_name import AGENT_NAME from twisted.internet import defer, reactor from twisted.web.client import ( @@ -83,7 +83,7 @@ class SimpleHttpClient(object): @defer.inlineCallbacks def get_json(self, uri, args={}): - """ Get's some json from the given host and path + """ Gets some json from the given host and path Args: uri (str): The URI to request, not including query parameters @@ -91,15 +91,11 @@ class SimpleHttpClient(object): None. **Note**: The value of each key is assumed to be an iterable and *not* a string. - Returns: - Deferred: Succeeds when we get *any* HTTP response. - - The result of the deferred is a tuple of `(code, response)`, - where `response` is a dict representing the decoded JSON body. + Deferred: Succeeds when we get *any* 2xx HTTP response. + Raises: + On a non-2xx HTTP response. """ - - yield if len(args): query_bytes = urllib.urlencode(args, True) uri = "%s?%s" % (uri, query_bytes) @@ -114,7 +110,10 @@ class SimpleHttpClient(object): body = yield readBody(response) - defer.returnValue(json.loads(body)) + if 200 <= response.code < 300: + defer.returnValue(json.loads(body)) + else: + raise CodeMessageException(response.code, body) class CaptchaServerHttpClient(SimpleHttpClient): From 6d3e4f4d0aad4ad9b44b28349838ff48aef39440 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 17:32:44 +0000 Subject: [PATCH 023/112] Update user/alias query APIs to use new format of SimpleHttpClient.get_json --- synapse/appservice/api.py | 15 +++++++-------- synapse/http/client.py | 3 +++ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 799ada96df..74508ecddf 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from twisted.internet import defer -from twisted.web.client import PartialDownloadError +from synapse.api.errors import CodeMessageException from synapse.http.client import SimpleHttpClient import logging @@ -42,11 +42,11 @@ class ApplicationServiceApi(SimpleHttpClient): }) if response: # just an empty json object defer.returnValue(True) - except PartialDownloadError as e: - if e.status == 404: + except CodeMessageException as e: + if e.code == 404: defer.returnValue(False) return - logger.warning("query_user to %s received %s", (uri, e.status)) + logger.warning("query_user to %s received %s", uri, e.code) @defer.inlineCallbacks def query_alias(self, service, alias): @@ -56,14 +56,13 @@ class ApplicationServiceApi(SimpleHttpClient): response = yield self.get_json(uri, { "access_token": self.hs_token }) - logger.info("%s", response[0]) if response: # just an empty json object defer.returnValue(True) - except PartialDownloadError as e: - if e.status == 404: + except CodeMessageException as e: + if e.code == 404: defer.returnValue(False) return - logger.warning("query_alias to %s received %s", (uri, e.status)) + logger.warning("query_alias to %s received %s", uri, e.code) def push_bulk(self, service, events): pass diff --git a/synapse/http/client.py b/synapse/http/client.py index 5f4558be47..fee8c901a2 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -113,6 +113,9 @@ class SimpleHttpClient(object): if 200 <= response.code < 300: defer.returnValue(json.loads(body)) else: + # NB: This is explicitly not json.loads(body)'d because the contract + # of CodeMessageException is a *string* message. Callers can always + # load it into JSON if they want. raise CodeMessageException(response.code, body) From 543e84fe70bdc0af6be5feb237de2d18adf352ab Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 4 Feb 2015 17:39:51 +0000 Subject: [PATCH 024/112] Add SimpleHttpClient.put_json with the same semantics as get_json. --- synapse/http/client.py | 48 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index fee8c901a2..575510637e 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -83,7 +83,7 @@ class SimpleHttpClient(object): @defer.inlineCallbacks def get_json(self, uri, args={}): - """ Gets some json from the given host and path + """ Gets some json from the given URI. Args: uri (str): The URI to request, not including query parameters @@ -92,7 +92,8 @@ class SimpleHttpClient(object): **Note**: The value of each key is assumed to be an iterable and *not* a string. Returns: - Deferred: Succeeds when we get *any* 2xx HTTP response. + Deferred: Succeeds when we get *any* 2xx HTTP response, with the + HTTP body as JSON. Raises: On a non-2xx HTTP response. """ @@ -118,6 +119,49 @@ class SimpleHttpClient(object): # load it into JSON if they want. raise CodeMessageException(response.code, body) + @defer.inlineCallbacks + def put_json(self, uri, json_body, args={}): + """ Puts some json to the given URI. + + Args: + uri (str): The URI to request, not including query parameters + json_body (dict): The JSON to put in the HTTP body, + args (dict): A dictionary used to create query strings, defaults to + None. + **Note**: The value of each key is assumed to be an iterable + and *not* a string. + Returns: + Deferred: Succeeds when we get *any* 2xx HTTP response, with the + HTTP body as JSON. + Raises: + On a non-2xx HTTP response. + """ + if len(args): + query_bytes = urllib.urlencode(args, True) + uri = "%s?%s" % (uri, query_bytes) + + json_str = json.dumps(json_body) + + response = yield self.agent.request( + "PUT", + uri.encode("ascii"), + headers=Headers({ + b"User-Agent": [AGENT_NAME], + "Content-Type": ["application/json"] + }), + bodyProducer=FileBodyProducer(StringIO(json_str)) + ) + + body = yield readBody(response) + + if 200 <= response.code < 300: + defer.returnValue(json.loads(body)) + else: + # NB: This is explicitly not json.loads(body)'d because the contract + # of CodeMessageException is a *string* message. Callers can always + # load it into JSON if they want. + raise CodeMessageException(response.code, body) + class CaptchaServerHttpClient(SimpleHttpClient): """ From a1a4960baf78c1a24f76c603076c832a1947360f Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 09:43:22 +0000 Subject: [PATCH 025/112] Impl push_bulk function --- synapse/appservice/api.py | 19 ++++++++++++++++++- synapse/handlers/appservice.py | 2 +- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 74508ecddf..fbf4abc526 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -64,8 +64,25 @@ class ApplicationServiceApi(SimpleHttpClient): return logger.warning("query_alias to %s received %s", uri, e.code) + @defer.inlineCallbacks def push_bulk(self, service, events): - pass + uri = service.url + ("/transactions/%s" % + urllib.quote(str(0))) # TODO txn_ids + response = None + try: + response = yield self.put_json( + uri, + { + "events": events + }, + { + "access_token": self.hs_token + }) + if response: # just an empty json object + defer.returnValue(True) + except CodeMessageException as e: + logger.warning("push_bulk to %s received %s", uri, e.code) + defer.returnValue(False) @defer.inlineCallbacks def push(self, service, event): diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index dd860b2244..2b2761682f 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -56,7 +56,7 @@ class ApplicationServicesHandler(object): yield self.store.update_app_service(app_service) logger.info("Sending ping to %s...", app_service.url) - yield self.appservice_api.query_alias(app_service, "ping") + yield self.appservice_api.push(app_service, "pinger") def unregister(self, token): logger.info("Unregister as_token=%s", token) From 27091f146a0ebdbfe1ae7c5cd30de51515cfbebc Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 10:08:12 +0000 Subject: [PATCH 026/112] Add hs_token column and generate a different token f.e application service. --- synapse/appservice/__init__.py | 6 ++++-- synapse/appservice/api.py | 8 ++++---- synapse/handlers/appservice.py | 9 ++++++--- synapse/rest/appservice/v1/register.py | 4 ++-- synapse/storage/appservice.py | 17 ++++++++++++----- synapse/storage/schema/application_services.sql | 1 + tests/storage/test_appservice.py | 10 ++++++---- 7 files changed, 35 insertions(+), 20 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 0c7f58574e..f7baf578f0 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -34,11 +34,13 @@ class ApplicationService(object): # values. NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] - def __init__(self, token, url=None, namespaces=None, txn_id=None): + def __init__(self, token, url=None, namespaces=None, hs_token=None, + txn_id=None): self.token = token self.url = url + self.hs_token = hs_token self.namespaces = self._check_namespaces(namespaces) - self.txn_id = None + self.txn_id = txn_id def _check_namespaces(self, namespaces): # Sanity check that it is of the form: diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index fbf4abc526..29bb35d61b 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -30,7 +30,6 @@ class ApplicationServiceApi(SimpleHttpClient): def __init__(self, hs): super(ApplicationServiceApi, self).__init__(hs) - self.hs_token = "_hs_token_" # TODO extract hs token @defer.inlineCallbacks def query_user(self, service, user_id): @@ -38,7 +37,7 @@ class ApplicationServiceApi(SimpleHttpClient): response = None try: response = yield self.get_json(uri, { - "access_token": self.hs_token + "access_token": service.hs_token }) if response: # just an empty json object defer.returnValue(True) @@ -54,7 +53,7 @@ class ApplicationServiceApi(SimpleHttpClient): response = None try: response = yield self.get_json(uri, { - "access_token": self.hs_token + "access_token": service.hs_token }) if response: # just an empty json object defer.returnValue(True) @@ -76,9 +75,10 @@ class ApplicationServiceApi(SimpleHttpClient): "events": events }, { - "access_token": self.hs_token + "access_token": service.hs_token }) if response: # just an empty json object + # TODO: Mark txn as sent successfully defer.returnValue(True) except CodeMessageException as e: logger.warning("push_bulk to %s received %s", uri, e.code) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 2b2761682f..7b0599c71e 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -19,6 +19,7 @@ from ._base import BaseHandler from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService from synapse.appservice.api import ApplicationServiceApi +import synapse.util.stringutils as stringutils import logging @@ -53,10 +54,9 @@ class ApplicationServicesHandler(object): errcode=Codes.FORBIDDEN ) logger.info("Updating application service info...") + app_service.hs_token = self._generate_hs_token() yield self.store.update_app_service(app_service) - - logger.info("Sending ping to %s...", app_service.url) - yield self.appservice_api.push(app_service, "pinger") + defer.returnValue(app_service) def unregister(self, token): logger.info("Unregister as_token=%s", token) @@ -136,3 +136,6 @@ class ApplicationServicesHandler(object): # Fork off pushes to these services - XXX First cut, best effort for service in services: self.appservice_api.push(service, event) + + def _generate_hs_token(self): + return stringutils.random_string(18) diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index e374d538e7..d3d5aef220 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -61,8 +61,8 @@ class RegisterRestServlet(AppServiceRestServlet): app_service = ApplicationService(as_token, as_url, namespaces) - yield self.handler.register(app_service) - hs_token = "_not_implemented_yet" # TODO: Pull this from self.hs? + app_service = yield self.handler.register(app_service) + hs_token = app_service.hs_token defer.returnValue((200, { "hs_token": hs_token diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index b64416de28..3c8bf9ad0d 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -60,6 +60,7 @@ class ApplicationServiceStore(SQLBaseStore): if service.token == token: service.url = None service.namespaces = None + service.hs_token = None def _unregister_app_service_txn(self, txn, token): # kill the url to prevent pushes @@ -100,6 +101,9 @@ class ApplicationServiceStore(SQLBaseStore): if not service.token or not service.url: raise StoreError(400, "Token and url must be specified.") + if not service.hs_token: + raise StoreError(500, "No HS token") + yield self.runInteraction( "update_app_service", self._update_app_service_txn, @@ -126,8 +130,8 @@ class ApplicationServiceStore(SQLBaseStore): return False txn.execute( - "UPDATE application_services SET url=? WHERE id=?", - (service.url, as_id,) + "UPDATE application_services SET url=?, hs_token=? WHERE id=?", + (service.url, service.hs_token, as_id,) ) # cleanup regex txn.execute( @@ -196,6 +200,7 @@ class ApplicationServiceStore(SQLBaseStore): # 'namespace': enum, # 'as_id': 0, # 'token': "something", + # 'hs_token': "otherthing", # 'id': 0 # } # ] @@ -208,6 +213,7 @@ class ApplicationServiceStore(SQLBaseStore): services[as_token] = { "url": res["url"], "token": as_token, + "hs_token": res["hs_token"], "namespaces": { ApplicationService.NS_USERS: [], ApplicationService.NS_ALIASES: [], @@ -230,8 +236,9 @@ class ApplicationServiceStore(SQLBaseStore): for service in services.values(): logger.info("Found application service: %s", service) self.cache.services.append(ApplicationService( - service["token"], - service["url"], - service["namespaces"] + token=service["token"], + url=service["url"], + namespaces=service["namespaces"], + hs_token=service["hs_token"] )) diff --git a/synapse/storage/schema/application_services.sql b/synapse/storage/schema/application_services.sql index 6d245fc807..03b5a10c8a 100644 --- a/synapse/storage/schema/application_services.sql +++ b/synapse/storage/schema/application_services.sql @@ -17,6 +17,7 @@ CREATE TABLE IF NOT EXISTS application_services( id INTEGER PRIMARY KEY AUTOINCREMENT, url TEXT, token TEXT, + hs_token TEXT, UNIQUE(token) ON CONFLICT ROLLBACK ); diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 56fdda377c..b9ecfb3384 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -46,13 +46,15 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_update_and_retrieval_of_service(self): url = "https://matrix.org/appservices/foobar" + hs_token = "hstok" user_regex = ["@foobar_.*:matrix.org"] alias_regex = ["#foobar_.*:matrix.org"] room_regex = [] - service = ApplicationService(url=url, token=self.as_token, namespaces={ - ApplicationService.NS_USERS: user_regex, - ApplicationService.NS_ALIASES: alias_regex, - ApplicationService.NS_ROOMS: room_regex + service = ApplicationService( + url=url, hs_token=hs_token, token=self.as_token, namespaces={ + ApplicationService.NS_USERS: user_regex, + ApplicationService.NS_ALIASES: alias_regex, + ApplicationService.NS_ROOMS: room_regex }) yield self.store.update_app_service(service) From f0c730252fcb198ad925d6406f0f37ccee29e720 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 11:25:32 +0000 Subject: [PATCH 027/112] Add unknown user ID check. Use store.get_aliases_for_room(room_id) when searching for services by alias. --- synapse/handlers/appservice.py | 24 +++++++++++++++++++----- tests/handlers/test_appservice.py | 1 + 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 7b0599c71e..39976b9629 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -15,10 +15,10 @@ from twisted.internet import defer -from ._base import BaseHandler from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService from synapse.appservice.api import ApplicationServiceApi +from synapse.types import UserID import synapse.util.stringutils as stringutils import logging @@ -74,7 +74,7 @@ class ApplicationServicesHandler(object): event based on the service regex. """ # We need to know the aliases associated with this event.room_id, if any - alias_list = [] # TODO + alias_list = yield self.store.get_aliases_for_room(event.room_id) services = yield self.store.get_app_services() interested_list = [ s for s in services if ( @@ -100,7 +100,7 @@ class ApplicationServicesHandler(object): # Do we know this user exists? If not, poke the user query API for # all services which match that user regex. - unknown_user = False # TODO check + unknown_user = yield self._is_unknown_user(event.sender) if unknown_user: user_query_services = yield self.get_services_for_event( event=event, @@ -117,7 +117,7 @@ class ApplicationServicesHandler(object): # Do we know this room alias exists? If not, poke the room alias query # API for all services which match that room alias regex. - unknown_room_alias = False # TODO check + unknown_room_alias = False # TODO if unknown_room_alias: alias = "something" # TODO alias_query_services = yield self.get_services_for_event( @@ -137,5 +137,19 @@ class ApplicationServicesHandler(object): for service in services: self.appservice_api.push(service, event) + @defer.inlineCallbacks + def _is_unknown_user(self, user_id): + user = UserID.from_string(user_id) + if not self.hs.is_mine(user): + # we don't know if they are unknown or not since it isn't one of our + # users. We can't poke ASes. + defer.returnValue(False) + return + + user_info = yield self.store.get_user_by_id(user_id) + defer.returnValue(len(user_info) == 0) + + + def _generate_hs_token(self): - return stringutils.random_string(18) + return stringutils.random_string(24) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 1daa314f20..f92dc1c89a 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -46,6 +46,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): ] self.mock_store.get_app_services = Mock(return_value=services) + self.mock_store.get_user_by_id = Mock(return_value=[]) event = Mock( sender="@someone:anywhere", From b932600653a4585968a7d177b7ea7cb2ca33642a Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 11:47:11 +0000 Subject: [PATCH 028/112] Add unknown room alias check. Call it from directory_handler.get_association --- synapse/handlers/appservice.py | 97 +++++++++++++++++++--------------- synapse/handlers/directory.py | 7 +++ 2 files changed, 61 insertions(+), 43 deletions(-) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 39976b9629..2c6d4e2815 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -62,27 +62,6 @@ class ApplicationServicesHandler(object): logger.info("Unregister as_token=%s", token) yield self.store.unregister_app_service(token) - @defer.inlineCallbacks - def get_services_for_event(self, event, restrict_to=""): - """Retrieve a list of application services interested in this event. - - Args: - event(Event): The event to check. - restrict_to(str): The namespace to restrict regex tests to. - Returns: - list: A list of services interested in this - event based on the service regex. - """ - # We need to know the aliases associated with this event.room_id, if any - alias_list = yield self.store.get_aliases_for_room(event.room_id) - services = yield self.store.get_app_services() - interested_list = [ - s for s in services if ( - s.is_interested(event, restrict_to, alias_list) - ) - ] - defer.returnValue(interested_list) - @defer.inlineCallbacks def notify_interested_services(self, event): """Notifies (pushes) all application services interested in this event. @@ -94,7 +73,7 @@ class ApplicationServicesHandler(object): event(Event): The event to push out to interested services. """ # Gather interested services - services = yield self.get_services_for_event(event) + services = yield self._get_services_for_event(event) if len(services) == 0: return # no services need notifying @@ -102,7 +81,7 @@ class ApplicationServicesHandler(object): # all services which match that user regex. unknown_user = yield self._is_unknown_user(event.sender) if unknown_user: - user_query_services = yield self.get_services_for_event( + user_query_services = yield self._get_services_for_event( event=event, restrict_to=ApplicationService.NS_USERS ) @@ -115,28 +94,62 @@ class ApplicationServicesHandler(object): # the user exists now,so don't query more ASes. break - # Do we know this room alias exists? If not, poke the room alias query - # API for all services which match that room alias regex. - unknown_room_alias = False # TODO - if unknown_room_alias: - alias = "something" # TODO - alias_query_services = yield self.get_services_for_event( - event=event, - restrict_to=ApplicationService.NS_ALIASES - ) - for alias_service in alias_query_services: - # this needs to block XXX: Need to feed response back to caller - is_known_alias = yield self.appservice_api.query_alias( - alias_service, alias - ) - if is_known_alias: - # the alias exists now so don't query more ASes. - break - # Fork off pushes to these services - XXX First cut, best effort for service in services: self.appservice_api.push(service, event) + + @defer.inlineCallbacks + def query_room_alias_exists(self, room_alias): + """Check if an application service knows this room alias exists. + + Args: + room_alias(str): The room alias to query. + Returns: + namedtuple: with keys "room_id" and "servers" or None if no + association can be found. + """ + alias_query_services = yield self._get_services_for_event( + event=None, + restrict_to=ApplicationService.NS_ALIASES, + alias_list=[room_alias] + ) + for alias_service in alias_query_services: + is_known_alias = yield self.appservice_api.query_alias( + alias_service, room_alias + ) + if is_known_alias: + # the alias exists now so don't query more ASes. + result = yield self.store.get_association_from_room_alias( + room_alias + ) + defer.returnValue(result) + + @defer.inlineCallbacks + def _get_services_for_event(self, event, restrict_to="", alias_list=None): + """Retrieve a list of application services interested in this event. + + Args: + event(Event): The event to check. Can be None if alias_list is not. + restrict_to(str): The namespace to restrict regex tests to. + alias_list: A list of aliases to get services for. If None, this + list is obtained from the database. + Returns: + list: A list of services interested in this + event based on the service regex. + """ + # We need to know the aliases associated with this event.room_id, if any + if not alias_list: + alias_list = yield self.store.get_aliases_for_room(event.room_id) + + services = yield self.store.get_app_services() + interested_list = [ + s for s in services if ( + s.is_interested(event, restrict_to, alias_list) + ) + ] + defer.returnValue(interested_list) + @defer.inlineCallbacks def _is_unknown_user(self, user_id): user = UserID.from_string(user_id) @@ -149,7 +162,5 @@ class ApplicationServicesHandler(object): user_info = yield self.store.get_user_by_id(user_id) defer.returnValue(len(user_info) == 0) - - def _generate_hs_token(self): return stringutils.random_string(24) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 58e9a91562..000bf5793c 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -84,6 +84,13 @@ class DirectoryHandler(BaseHandler): if result: room_id = result.room_id servers = result.servers + else: + # Query AS to see if it exists + as_handler = self.hs.get_handlers().appservice_handler + result = yield as_handler.query_room_alias_exists(room_alias) + if result: + room_id = result.room_id + servers = result.servers else: try: result = yield self.federation.make_query( From bc658907f0195f4f5f3ec9f7730884bbd516ce79 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 11:54:36 +0000 Subject: [PATCH 029/112] Add unit test for appservice_handler.query_room_alias_exists --- tests/handlers/test_appservice.py | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index f92dc1c89a..fd6bcbbce6 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -37,6 +37,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): raise Exception("Test expected handler.appservice_api to exist.") self.handler.appservice_api = self.mock_as_api + @defer.inlineCallbacks def test_notify_interested_services(self): interested_service = self._mkservice(is_interested=True) services = [ @@ -54,9 +55,37 @@ class AppServiceHandlerTestCase(unittest.TestCase): room_id="!foo:bar" ) self.mock_as_api.push = Mock() - self.handler.notify_interested_services(event) + yield self.handler.notify_interested_services(event) self.mock_as_api.push.assert_called_once_with(interested_service, event) + @defer.inlineCallbacks + def test_query_room_alias_exists(self): + room_alias = "#foo:bar" + room_id = "!alpha:bet" + servers = ["aperture"] + interested_service = self._mkservice(is_interested=True) + services = [ + self._mkservice(is_interested=False), + interested_service, + self._mkservice(is_interested=False) + ] + + self.mock_store.get_app_services = Mock(return_value=services) + self.mock_store.get_association_from_room_alias = Mock( + return_value=Mock(room_id=room_id, servers=servers) + ) + + result = yield self.handler.query_room_alias_exists(room_alias) + + self.mock_as_api.query_alias.assert_called_once_with( + interested_service, + room_alias + ) + self.assertEquals(result.room_id, room_id) + self.assertEquals(result.servers, servers) + + + def _mkservice(self, is_interested): service = Mock() service.is_interested = Mock(return_value=is_interested) From 51d63ac329a2613c0a7195e6183d70f789b7d823 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 13:19:46 +0000 Subject: [PATCH 030/112] Glue AS work to general event notifications. Add more exception handling when poking ASes. --- synapse/appservice/__init__.py | 2 +- synapse/appservice/api.py | 11 +++++++++- synapse/handlers/appservice.py | 39 ++++++++++++++++++++++------------ synapse/notifier.py | 6 ++++++ 4 files changed, 42 insertions(+), 16 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index f7baf578f0..d9ca856c8b 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -62,7 +62,7 @@ class ApplicationService(object): def _matches_regex(self, test_string, namespace_key): if not isinstance(test_string, basestring): - logger.warning( + logger.error( "Expected a string to test regex against, but got %s", test_string ) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 29bb35d61b..d96caf7f58 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -46,6 +46,9 @@ class ApplicationServiceApi(SimpleHttpClient): defer.returnValue(False) return logger.warning("query_user to %s received %s", uri, e.code) + except Exception as ex: + logger.warning("query_user to %s threw exception %s", uri, ex) + defer.returnValue(False) @defer.inlineCallbacks def query_alias(self, service, alias): @@ -62,6 +65,10 @@ class ApplicationServiceApi(SimpleHttpClient): defer.returnValue(False) return logger.warning("query_alias to %s received %s", uri, e.code) + except Exception as ex: + logger.warning("query_alias to %s threw exception %s", uri, ex) + defer.returnValue(False) + @defer.inlineCallbacks def push_bulk(self, service, events): @@ -82,7 +89,9 @@ class ApplicationServiceApi(SimpleHttpClient): defer.returnValue(True) except CodeMessageException as e: logger.warning("push_bulk to %s received %s", uri, e.code) - defer.returnValue(False) + except Exception as ex: + logger.warning("push_bulk to %s threw exception %s", uri, ex) + defer.returnValue(False) @defer.inlineCallbacks def push(self, service, event): diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 2c6d4e2815..ef94215133 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -23,7 +23,6 @@ import synapse.util.stringutils as stringutils import logging - logger = logging.getLogger(__name__) @@ -58,6 +57,7 @@ class ApplicationServicesHandler(object): yield self.store.update_app_service(app_service) defer.returnValue(app_service) + @defer.inlineCallbacks def unregister(self, token): logger.info("Unregister as_token=%s", token) yield self.store.unregister_app_service(token) @@ -81,34 +81,45 @@ class ApplicationServicesHandler(object): # all services which match that user regex. unknown_user = yield self._is_unknown_user(event.sender) if unknown_user: - user_query_services = yield self._get_services_for_event( - event=event, - restrict_to=ApplicationService.NS_USERS - ) - for user_service in user_query_services: - # this needs to block XXX: Need to feed response back to caller - is_known_user = yield self.appservice_api.query_user( - user_service, event.sender - ) - if is_known_user: - # the user exists now,so don't query more ASes. - break + yield self.query_user_exists(event) # Fork off pushes to these services - XXX First cut, best effort for service in services: self.appservice_api.push(service, event) + @defer.inlineCallbacks + def query_user_exists(self, event): + """Check if an application services knows this event.sender exists. + + Args: + event: An event sent by the user to query + Returns: + True if this user exists. + """ + # TODO Would be nice for this to accept a user ID instead of an event. + user_query_services = yield self._get_services_for_event( + event=event, + restrict_to=ApplicationService.NS_USERS + ) + for user_service in user_query_services: + is_known_user = yield self.appservice_api.query_user( + user_service, event.sender + ) + if is_known_user: + defer.returnValue(True) + defer.returnValue(False) @defer.inlineCallbacks def query_room_alias_exists(self, room_alias): """Check if an application service knows this room alias exists. Args: - room_alias(str): The room alias to query. + room_alias(RoomAlias): The room alias to query. Returns: namedtuple: with keys "room_id" and "servers" or None if no association can be found. """ + room_alias = room_alias.to_string() alias_query_services = yield self._get_services_for_event( event=None, restrict_to=ApplicationService.NS_ALIASES, diff --git a/synapse/notifier.py b/synapse/notifier.py index e3b6ead620..c7f75ab801 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -99,6 +99,12 @@ class Notifier(object): `extra_users` param. """ yield run_on_reactor() + + # poke any interested application service. + self.hs.get_handlers().appservice_handler.notify_interested_services( + event + ) + room_id = event.room_id room_source = self.event_sources.sources["room"] From 131e0364022735ab63d749c158e6875b7b11700e Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 13:22:20 +0000 Subject: [PATCH 031/112] Fix unit tests. --- tests/handlers/test_appservice.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index fd6bcbbce6..e16e511587 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -60,7 +60,10 @@ class AppServiceHandlerTestCase(unittest.TestCase): @defer.inlineCallbacks def test_query_room_alias_exists(self): - room_alias = "#foo:bar" + room_alias_str = "#foo:bar" + room_alias = Mock() + room_alias.to_string = Mock(return_value=room_alias_str) + room_id = "!alpha:bet" servers = ["aperture"] interested_service = self._mkservice(is_interested=True) @@ -79,7 +82,7 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.mock_as_api.query_alias.assert_called_once_with( interested_service, - room_alias + room_alias_str ) self.assertEquals(result.room_id, room_id) self.assertEquals(result.servers, servers) From 0613666d9c417005393636b935593c423f4417b9 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 13:42:35 +0000 Subject: [PATCH 032/112] Serialize events before sending to ASes --- synapse/appservice/api.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index d96caf7f58..9cce4e0973 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -16,6 +16,7 @@ from twisted.internet import defer from synapse.api.errors import CodeMessageException from synapse.http.client import SimpleHttpClient +from synapse.events.utils import serialize_event import logging import urllib @@ -30,6 +31,7 @@ class ApplicationServiceApi(SimpleHttpClient): def __init__(self, hs): super(ApplicationServiceApi, self).__init__(hs) + self.clock = hs.get_clock() @defer.inlineCallbacks def query_user(self, service, user_id): @@ -72,6 +74,8 @@ class ApplicationServiceApi(SimpleHttpClient): @defer.inlineCallbacks def push_bulk(self, service, events): + events = self._serialize(events) + uri = service.url + ("/transactions/%s" % urllib.quote(str(0))) # TODO txn_ids response = None @@ -98,3 +102,9 @@ class ApplicationServiceApi(SimpleHttpClient): response = yield self.push_bulk(service, [event]) defer.returnValue(response) + def _serialize(self, events): + time_now = self.clock.time_msec() + return [ + serialize_event(e, time_now, as_client_event=True) for e in events + ] + From c71456117dc70dfe0bfa15e3f655a3ac1dfc66ee Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 14:17:08 +0000 Subject: [PATCH 033/112] Fix user query checks. HS>AS pushing now works. --- synapse/appservice/__init__.py | 21 +++++++++------- synapse/handlers/appservice.py | 44 ++++++++++++++++++++++++---------- 2 files changed, 44 insertions(+), 21 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index d9ca856c8b..46d46a5a48 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -75,27 +75,23 @@ class ApplicationService(object): def _matches_user(self, event): if (hasattr(event, "sender") and - self._matches_regex( - event.sender, ApplicationService.NS_USERS)): + self.is_interested_in_user(event.sender)): return True # also check m.room.member state key if (hasattr(event, "type") and event.type == EventTypes.Member and hasattr(event, "state_key") - and self._matches_regex( - event.state_key, ApplicationService.NS_USERS)): + and self.is_interested_in_user(event.state_key)): return True return False def _matches_room_id(self, event): if hasattr(event, "room_id"): - return self._matches_regex( - event.room_id, ApplicationService.NS_ROOMS - ) + return self.is_interested_in_room(event.room_id) return False def _matches_aliases(self, event, alias_list): for alias in alias_list: - if self._matches_regex(alias, ApplicationService.NS_ALIASES): + if self.is_interested_in_alias(alias): return True return False @@ -128,5 +124,14 @@ class ApplicationService(object): elif restrict_to == ApplicationService.NS_USERS: return self._matches_user(event) + def is_interested_in_user(self, user_id): + return self._matches_regex(user_id, ApplicationService.NS_USERS) + + def is_interested_in_alias(self, alias): + return self._matches_regex(alias, ApplicationService.NS_ALIASES) + + def is_interested_in_room(self, room_id): + return self._matches_regex(room_id, ApplicationService.NS_ROOMS) + def __str__(self): return "ApplicationService: %s" % (self.__dict__,) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index ef94215133..8d0cdd528c 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -15,6 +15,7 @@ from twisted.internet import defer +from synapse.api.constants import EventTypes from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService from synapse.appservice.api import ApplicationServiceApi @@ -78,32 +79,31 @@ class ApplicationServicesHandler(object): return # no services need notifying # Do we know this user exists? If not, poke the user query API for - # all services which match that user regex. - unknown_user = yield self._is_unknown_user(event.sender) - if unknown_user: - yield self.query_user_exists(event) + # all services which match that user regex. This needs to block as these + # user queries need to be made BEFORE pushing the event. + yield self._check_user_exists(event.sender) + if event.type == EventTypes.Member: + yield self._check_user_exists(event.state_key) # Fork off pushes to these services - XXX First cut, best effort for service in services: self.appservice_api.push(service, event) @defer.inlineCallbacks - def query_user_exists(self, event): - """Check if an application services knows this event.sender exists. + def query_user_exists(self, user_id): + """Check if any application service knows this user_id exists. Args: - event: An event sent by the user to query + user_id(str): The user to query if they exist on any AS. Returns: - True if this user exists. + True if this user exists on at least one application service. """ - # TODO Would be nice for this to accept a user ID instead of an event. - user_query_services = yield self._get_services_for_event( - event=event, - restrict_to=ApplicationService.NS_USERS + user_query_services = yield self._get_services_for_user( + user_id=user_id ) for user_service in user_query_services: is_known_user = yield self.appservice_api.query_user( - user_service, event.sender + user_service, user_id ) if is_known_user: defer.returnValue(True) @@ -161,6 +161,16 @@ class ApplicationServicesHandler(object): ] defer.returnValue(interested_list) + @defer.inlineCallbacks + def _get_services_for_user(self, user_id): + services = yield self.store.get_app_services() + interested_list = [ + s for s in services if ( + s.is_interested_in_user(user_id) + ) + ] + defer.returnValue(interested_list) + @defer.inlineCallbacks def _is_unknown_user(self, user_id): user = UserID.from_string(user_id) @@ -173,5 +183,13 @@ class ApplicationServicesHandler(object): user_info = yield self.store.get_user_by_id(user_id) defer.returnValue(len(user_info) == 0) + @defer.inlineCallbacks + def _check_user_exists(self, user_id): + unknown_user = yield self._is_unknown_user(user_id) + if unknown_user: + exists = yield self.query_user_exists(user_id) + defer.returnValue(exists) + defer.returnValue(True) + def _generate_hs_token(self): return stringutils.random_string(24) From c163357f387788afa8fc1d4160e806eccbc5220a Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 15:00:33 +0000 Subject: [PATCH 034/112] Add CS extension for masquerading as users within the namespaces specified by the AS. --- synapse/api/auth.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 7105ee21dc..86f963c190 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -299,6 +299,29 @@ class Auth(object): # Can optionally look elsewhere in the request (e.g. headers) try: access_token = request.args["access_token"][0] + + # Check for application service tokens with a user_id override + try: + masquerade_user_id = request.args["user_id"][0] + app_service = yield self.store.get_app_service_by_token( + access_token + ) + if not app_service: + raise AuthError( + 403, "Invalid application service access token" + ) + if not app_service.is_interested_in_user(masquerade_user_id): + raise AuthError( + 403, + "Application service cannot masquerade as this user." + ) + defer.returnValue( + (UserID.from_string(masquerade_user_id), ClientInfo("", "")) + ) + return + except KeyError: + pass # normal users won't have this query parameter set + user_info = yield self.get_user_by_token(access_token) user = user_info["user"] device_id = user_info["device_id"] From 5b99b471b2ba9a9c9da801e9a9d6e1801f02068c Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 15:12:36 +0000 Subject: [PATCH 035/112] Fix unit tests. --- synapse/api/auth.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 86f963c190..4f116184c9 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -302,6 +302,12 @@ class Auth(object): # Check for application service tokens with a user_id override try: + if "user_id" not in request.args: + # This has to be done like this rather than relying on it + # natively throwing because tests use a Mock for the request + # object which doesn't throw :/ + raise KeyError + masquerade_user_id = request.args["user_id"][0] app_service = yield self.store.get_app_service_by_token( access_token From e9484d6a9514d5c9aea3839bcc3fb25c9cb88cb0 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 16:29:56 +0000 Subject: [PATCH 036/112] Prevent aliases in AS namespaces being created/deleted by users. Check with ASes when queried for room aliases via federation. --- synapse/handlers/directory.py | 42 +++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 9 deletions(-) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 24ea3573d3..842f075fe0 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -49,6 +49,12 @@ class DirectoryHandler(BaseHandler): # TODO(erikj): Check if there is a current association. + is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) + if is_claimed: + raise SynapseError( + 400, "This alias is reserved by an application service." + ) + if not servers: servers = yield self.store.get_joined_hosts_for_room(room_id) @@ -68,6 +74,12 @@ class DirectoryHandler(BaseHandler): if not self.hs.is_mine(room_alias): raise SynapseError(400, "Room alias must be local") + is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) + if is_claimed: + raise SynapseError( + 400, "This alias is reserved by an application service." + ) + room_id = yield self.store.delete_room_alias(room_alias) if room_id: @@ -77,20 +89,13 @@ class DirectoryHandler(BaseHandler): def get_association(self, room_alias): room_id = None if self.hs.is_mine(room_alias): - result = yield self.store.get_association_from_room_alias( + result = yield self.get_association_from_room_alias( room_alias ) if result: room_id = result.room_id servers = result.servers - else: - # Query AS to see if it exists - as_handler = self.hs.get_handlers().appservice_handler - result = yield as_handler.query_room_alias_exists(room_alias) - if result: - room_id = result.room_id - servers = result.servers else: try: result = yield self.federation.make_query( @@ -145,7 +150,7 @@ class DirectoryHandler(BaseHandler): 400, "Room Alias is not hosted on this Home Server" ) - result = yield self.store.get_association_from_room_alias( + result = yield self.get_association_from_room_alias( room_alias ) @@ -173,3 +178,22 @@ class DirectoryHandler(BaseHandler): "sender": user_id, "content": {"aliases": aliases}, }, ratelimit=False) + + @defer.inlineCallbacks + def get_association_from_room_alias(self, room_alias): + result = yield self.store.get_association_from_room_alias( + room_alias + ) + if not result: + # Query AS to see if it exists + as_handler = self.hs.get_handlers().appservice_handler + result = yield as_handler.query_room_alias_exists(room_alias) + defer.returnValue(result) + + @defer.inlineCallbacks + def is_alias_exclusive_to_appservices(self, alias): + services = yield self.store.get_app_services() + interested_services = [ + s for s in services if s.is_interested_in_alias(alias.to_string()) + ] + defer.returnValue(len(interested_services) > 0) From cab4c730885dcb5c95a50d425d6b6f655154a173 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 16:46:56 +0000 Subject: [PATCH 037/112] Prevent user IDs in AS namespaces being created/deleted by humans. --- synapse/handlers/register.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 08cd5fd720..b6e19d498c 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -65,6 +65,8 @@ class RegistrationHandler(BaseHandler): user = UserID(localpart, self.hs.hostname) user_id = user.to_string() + yield self.check_user_id_is_valid(user_id) + token = self._generate_token(user_id) yield self.store.register( user_id=user_id, @@ -83,6 +85,7 @@ class RegistrationHandler(BaseHandler): localpart = self._generate_user_id() user = UserID(localpart, self.hs.hostname) user_id = user.to_string() + yield self.check_user_id_is_valid(user_id) token = self._generate_token(user_id) yield self.store.register( @@ -148,6 +151,19 @@ class RegistrationHandler(BaseHandler): # XXX: This should be a deferred list, shouldn't it? yield self._bind_threepid(c, user_id) + @defer.inlineCallbacks + def check_user_id_is_valid(self, user_id): + # valid user IDs must not clash with any user ID namespaces claimed by + # application services. + services = yield self.store.get_app_services() + interested_services = [ + s for s in services if s.is_interested_in_user(user_id) + ] + if len(interested_services) > 0: + raise SynapseError( + 400, "This user ID is reserved by an application service." + ) + def _generate_token(self, user_id): # urlsafe variant uses _ and - so use . as the separator and replace # all =s with .s so http clients don't quote =s when it is used as From a3c6010718c2749bd446bb63f3cf03bae09b0d20 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 16:48:57 +0000 Subject: [PATCH 038/112] Add delta sql file. --- synapse/storage/schema/delta/v14.sql | 33 ++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 synapse/storage/schema/delta/v14.sql diff --git a/synapse/storage/schema/delta/v14.sql b/synapse/storage/schema/delta/v14.sql new file mode 100644 index 0000000000..03b5a10c8a --- /dev/null +++ b/synapse/storage/schema/delta/v14.sql @@ -0,0 +1,33 @@ +/* Copyright 2015 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE IF NOT EXISTS application_services( + id INTEGER PRIMARY KEY AUTOINCREMENT, + url TEXT, + token TEXT, + hs_token TEXT, + UNIQUE(token) ON CONFLICT ROLLBACK +); + +CREATE TABLE IF NOT EXISTS application_services_regex( + id INTEGER PRIMARY KEY AUTOINCREMENT, + as_id INTEGER NOT NULL, + namespace INTEGER, /* enum[room_id|room_alias|user_id] */ + regex TEXT, + FOREIGN KEY(as_id) REFERENCES application_services(id) +); + + + From 11e6b3d18b5ae95857e01884960a01b9ea4d307d Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 17:04:59 +0000 Subject: [PATCH 039/112] Dependency inject ApplicationServiceApi when creating ApplicationServicesHandler. --- synapse/handlers/__init__.py | 5 ++++- synapse/handlers/appservice.py | 5 ++--- tests/handlers/test_appservice.py | 10 +++------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/synapse/handlers/__init__.py b/synapse/handlers/__init__.py index b31518bf62..8d345bf936 100644 --- a/synapse/handlers/__init__.py +++ b/synapse/handlers/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from synapse.appservice.api import ApplicationServiceApi from .register import RegistrationHandler from .room import ( RoomCreationHandler, RoomMemberHandler, RoomListHandler @@ -53,5 +54,7 @@ class Handlers(object): self.directory_handler = DirectoryHandler(hs) self.typing_notification_handler = TypingNotificationHandler(hs) self.admin_handler = AdminHandler(hs) - self.appservice_handler = ApplicationServicesHandler(hs) + self.appservice_handler = ApplicationServicesHandler( + hs, ApplicationServiceApi(hs) + ) self.sync_handler = SyncHandler(hs) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 8d0cdd528c..fa810b9a98 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -18,7 +18,6 @@ from twisted.internet import defer from synapse.api.constants import EventTypes from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService -from synapse.appservice.api import ApplicationServiceApi from synapse.types import UserID import synapse.util.stringutils as stringutils @@ -32,10 +31,10 @@ logger = logging.getLogger(__name__) # easier. class ApplicationServicesHandler(object): - def __init__(self, hs): + def __init__(self, hs, appservice_api): self.store = hs.get_datastore() self.hs = hs - self.appservice_api = ApplicationServiceApi(hs) + self.appservice_api = appservice_api @defer.inlineCallbacks def register(self, app_service): diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index e16e511587..a2c541317c 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -29,13 +29,9 @@ class AppServiceHandlerTestCase(unittest.TestCase): self.mock_as_api = Mock() hs = Mock() hs.get_datastore = Mock(return_value=self.mock_store) - self.handler = ApplicationServicesHandler(hs) # thing being tested - - # FIXME Would be nice to DI this rather than monkey patch:( - if not hasattr(self.handler, "appservice_api"): - # someone probably updated the handler but not the tests. Fail fast. - raise Exception("Test expected handler.appservice_api to exist.") - self.handler.appservice_api = self.mock_as_api + self.handler = ApplicationServicesHandler( + hs, self.mock_as_api + ) @defer.inlineCallbacks def test_notify_interested_services(self): From 0227618d3c3bea6c85a922d5605f526719573121 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 5 Feb 2015 17:29:27 +0000 Subject: [PATCH 040/112] Add m.login.application_service registration procedure. This allows known application services to register any user ID under their own user namespace(s). --- synapse/api/constants.py | 1 + synapse/handlers/register.py | 20 ++++++++++++++++++++ synapse/rest/client/v1/register.py | 24 +++++++++++++++++++++++- 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 0d3fc629af..420f963d91 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -59,6 +59,7 @@ class LoginType(object): EMAIL_URL = u"m.login.email.url" EMAIL_IDENTITY = u"m.login.email.identity" RECAPTCHA = u"m.login.recaptcha" + APPLICATION_SERVICE = u"m.login.application_service" class EventTypes(object): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index b6e19d498c..60821edb05 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -105,6 +105,26 @@ class RegistrationHandler(BaseHandler): defer.returnValue((user_id, token)) + @defer.inlineCallbacks + def appservice_register(self, user_localpart, as_token): + user = UserID(user_localpart, self.hs.hostname) + user_id = user.to_string() + service = yield self.store.get_app_service_by_token(as_token) + if not service: + raise SynapseError(403, "Invalid application service token.") + if not service.is_interested_in_user(user_id): + raise SynapseError( + 400, "Invalid user localpart for this application service." + ) + token = self._generate_token(user_id) + yield self.store.register( + user_id=user_id, + token=token, + password_hash="" + ) + self.distributor.fire("registered_user", user) + defer.returnValue((user_id, token)) + @defer.inlineCallbacks def check_recaptcha(self, ip, private_key, challenge, response): """Checks a recaptcha is correct.""" diff --git a/synapse/rest/client/v1/register.py b/synapse/rest/client/v1/register.py index c0423c2d45..1ab32b53ea 100644 --- a/synapse/rest/client/v1/register.py +++ b/synapse/rest/client/v1/register.py @@ -110,7 +110,8 @@ class RegisterRestServlet(ClientV1RestServlet): stages = { LoginType.RECAPTCHA: self._do_recaptcha, LoginType.PASSWORD: self._do_password, - LoginType.EMAIL_IDENTITY: self._do_email_identity + LoginType.EMAIL_IDENTITY: self._do_email_identity, + LoginType.APPLICATION_SERVICE: self._do_app_service } session_info = self._get_session_info(request, session) @@ -276,6 +277,27 @@ class RegisterRestServlet(ClientV1RestServlet): self._remove_session(session) defer.returnValue(result) + @defer.inlineCallbacks + def _do_app_service(self, request, register_json, session): + if "access_token" not in request.args: + raise SynapseError(400, "Expected application service token.") + if "user" not in register_json: + raise SynapseError(400, "Expected 'user' key.") + + as_token = request.args["access_token"][0] + user_localpart = register_json["user"].encode("utf-8") + + handler = self.handlers.registration_handler + (user_id, token) = yield handler.appservice_register( + user_localpart, as_token + ) + self._remove_session(session) + defer.returnValue({ + "user_id": user_id, + "access_token": token, + "home_server": self.hs.hostname, + }) + def _parse_json(request): try: From e426df8e106aef0b213928afb6189569474ac5d9 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 6 Feb 2015 10:57:14 +0000 Subject: [PATCH 041/112] Grant ASes the ability to create alias in their own namespace. Add a new errcode type M_EXCLUSIVE when users try to create aliases inside AS namespaces, and when ASes try to create aliases outside their own namespace. --- synapse/api/auth.py | 12 ++++++++ synapse/api/errors.py | 3 +- synapse/handlers/directory.py | 43 +++++++++++++++++++++-------- synapse/rest/client/v1/directory.py | 29 +++++++++++-------- 4 files changed, 63 insertions(+), 24 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 4f116184c9..ea8c461729 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -380,6 +380,18 @@ class Auth(object): raise AuthError(403, "Unrecognised access token.", errcode=Codes.UNKNOWN_TOKEN) + @defer.inlineCallbacks + def get_appservice_by_req(self, request): + try: + token = request.args["access_token"][0] + service = yield self.store.get_app_service_by_token(token) + if not service: + raise AuthError(403, "Unrecognised access token.", + errcode=Codes.UNKNOWN_TOKEN) + defer.returnValue(service) + except KeyError: + raise AuthError(403, "Missing access token.") + def is_server_admin(self, user): return self.store.is_server_admin(user) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 5041828f18..eddd889778 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -36,7 +36,8 @@ class Codes(object): CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED" CAPTCHA_INVALID = "M_CAPTCHA_INVALID" MISSING_PARAM = "M_MISSING_PARAM", - TOO_LARGE = "M_TOO_LARGE" + TOO_LARGE = "M_TOO_LARGE", + EXCLUSIVE = "M_EXCLUSIVE" class CodeMessageException(RuntimeError): diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 842f075fe0..4c15e57fa6 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -37,24 +37,15 @@ class DirectoryHandler(BaseHandler): ) @defer.inlineCallbacks - def create_association(self, user_id, room_alias, room_id, servers=None): - - # TODO(erikj): Do auth. + def _create_association(self, room_alias, room_id, servers=None): + # general association creation for both human users and app services if not self.hs.is_mine(room_alias): raise SynapseError(400, "Room alias must be local") # TODO(erikj): Change this. # TODO(erikj): Add transactions. - # TODO(erikj): Check if there is a current association. - - is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) - if is_claimed: - raise SynapseError( - 400, "This alias is reserved by an application service." - ) - if not servers: servers = yield self.store.get_joined_hosts_for_room(room_id) @@ -67,6 +58,33 @@ class DirectoryHandler(BaseHandler): servers ) + + @defer.inlineCallbacks + def create_association(self, user_id, room_alias, room_id, servers=None): + # association creation for human users + # TODO(erikj): Do user auth. + + is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) + if is_claimed: + raise SynapseError( + 400, "This alias is reserved by an application service.", + errcode=Codes.EXCLUSIVE + ) + yield self._create_association(room_alias, room_id, servers) + + + @defer.inlineCallbacks + def create_appservice_association(self, service, room_alias, room_id, + servers=None): + if not service.is_interested_in_alias(room_alias.to_string()): + raise SynapseError( + 400, "This application service has not reserved" + " this kind of alias.", errcode=Codes.EXCLUSIVE + ) + + # association creation for app services + yield self._create_association(room_alias, room_id, servers) + @defer.inlineCallbacks def delete_association(self, user_id, room_alias): # TODO Check if server admin @@ -77,7 +95,8 @@ class DirectoryHandler(BaseHandler): is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) if is_claimed: raise SynapseError( - 400, "This alias is reserved by an application service." + 400, "This alias is reserved by an application service.", + errcode=Codes.EXCLUSIVE ) room_id = yield self.store.delete_room_alias(room_alias) diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 8f65efec5f..f7e910bb40 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -45,8 +45,6 @@ class ClientDirectoryServer(ClientV1RestServlet): @defer.inlineCallbacks def on_PUT(self, request, room_alias): - user, client = yield self.auth.get_user_by_req(request) - content = _parse_json(request) if not "room_id" in content: raise SynapseError(400, "Missing room_id key", @@ -70,16 +68,25 @@ class ClientDirectoryServer(ClientV1RestServlet): dir_handler = self.handlers.directory_handler try: - user_id = user.to_string() - yield dir_handler.create_association( - user_id, room_alias, room_id, servers + # try to auth as a user + user, client = yield self.auth.get_user_by_req(request) + try: + user_id = user.to_string() + yield dir_handler.create_association( + user_id, room_alias, room_id, servers + ) + yield dir_handler.send_room_alias_update_event(user_id, room_id) + except SynapseError as e: + raise e + except: + logger.exception("Failed to create association") + raise + except AuthError: + # try to auth as an application service + service = yield self.auth.get_appservice_by_req(request) + yield dir_handler.create_appservice_association( + service, room_alias, room_id, servers ) - yield dir_handler.send_room_alias_update_event(user_id, room_id) - except SynapseError as e: - raise e - except: - logger.exception("Failed to create association") - raise defer.returnValue((200, {})) From c3ae8def755a043283e945e6653970599d227a43 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 6 Feb 2015 11:32:07 +0000 Subject: [PATCH 042/112] Grant ASes the ability to delete aliases in their own namespace. --- synapse/handlers/directory.py | 28 +++++++++++++++++++++----- synapse/rest/client/v1/directory.py | 31 +++++++++++++++++++++++++++-- 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 4c15e57fa6..c3c95996e7 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -87,10 +87,9 @@ class DirectoryHandler(BaseHandler): @defer.inlineCallbacks def delete_association(self, user_id, room_alias): - # TODO Check if server admin + # association deletion for human users - if not self.hs.is_mine(room_alias): - raise SynapseError(400, "Room alias must be local") + # TODO Check if server admin is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) if is_claimed: @@ -99,10 +98,29 @@ class DirectoryHandler(BaseHandler): errcode=Codes.EXCLUSIVE ) + yield self._delete_association(room_alias) + + @defer.inlineCallbacks + def delete_appservice_association(self, service, room_alias): + if not service.is_interested_in_alias(room_alias.to_string()): + raise SynapseError( + 400, + "This application service has not reserved this kind of alias", + errcode=Codes.EXCLUSIVE + ) + yield self._delete_association(room_alias) + + @defer.inlineCallbacks + def _delete_association(self, room_alias): + if not self.hs.is_mine(room_alias): + raise SynapseError(400, "Room alias must be local") + room_id = yield self.store.delete_room_alias(room_alias) - if room_id: - yield self._update_room_alias_events(user_id, room_id) + # TODO - Looks like _update_room_alias_event has never been implemented + # if room_id: + # yield self._update_room_alias_events(user_id, room_id) + @defer.inlineCallbacks def get_association(self, room_alias): diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index f7e910bb40..8e83548bbf 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -87,24 +87,51 @@ class ClientDirectoryServer(ClientV1RestServlet): yield dir_handler.create_appservice_association( service, room_alias, room_id, servers ) + logger.info( + "Application service at %s created alias %s pointing to %s", + service.url, + room_alias.to_string(), + room_id + ) defer.returnValue((200, {})) @defer.inlineCallbacks def on_DELETE(self, request, room_alias): + dir_handler = self.handlers.directory_handler + + try: + service = yield self.auth.get_appservice_by_req(request) + room_alias = RoomAlias.from_string(room_alias) + yield dir_handler.delete_appservice_association( + service, room_alias + ) + logger.info( + "Application service at %s deleted alias %s", + service.url, + room_alias.to_string() + ) + defer.returnValue((200, {})) + except AuthError: + # fallback to default user behaviour if they aren't an AS + pass + user, client = yield self.auth.get_user_by_req(request) is_admin = yield self.auth.is_server_admin(user) if not is_admin: raise AuthError(403, "You need to be a server admin") - dir_handler = self.handlers.directory_handler - room_alias = RoomAlias.from_string(room_alias) yield dir_handler.delete_association( user.to_string(), room_alias ) + logger.info( + "User %s deleted alias %s", + user.to_string(), + room_alias.to_string() + ) defer.returnValue((200, {})) From 0995810273ddbc7e5e89b56ad3768fa8f0583bc4 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 6 Feb 2015 11:45:19 +0000 Subject: [PATCH 043/112] Pyflakes: unused variable. --- synapse/handlers/directory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index c3c95996e7..22c6391a15 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -115,7 +115,7 @@ class DirectoryHandler(BaseHandler): if not self.hs.is_mine(room_alias): raise SynapseError(400, "Room alias must be local") - room_id = yield self.store.delete_room_alias(room_alias) + yield self.store.delete_room_alias(room_alias) # TODO - Looks like _update_room_alias_event has never been implemented # if room_id: From 73a680b2a87b309bf05b4afe2685aaba70201305 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 6 Feb 2015 17:10:04 +0000 Subject: [PATCH 044/112] Add errcodes for appservice registrations. --- synapse/handlers/register.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 60821edb05..10ab9bc99e 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -18,7 +18,8 @@ from twisted.internet import defer from synapse.types import UserID from synapse.api.errors import ( - SynapseError, RegistrationError, InvalidCaptchaError, CodeMessageException + AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError, + CodeMessageException ) from ._base import BaseHandler import synapse.util.stringutils as stringutils @@ -111,10 +112,11 @@ class RegistrationHandler(BaseHandler): user_id = user.to_string() service = yield self.store.get_app_service_by_token(as_token) if not service: - raise SynapseError(403, "Invalid application service token.") + raise AuthError(403, "Invalid application service token.") if not service.is_interested_in_user(user_id): raise SynapseError( - 400, "Invalid user localpart for this application service." + 400, "Invalid user localpart for this application service.", + errcode=Codes.EXCLUSIVE ) token = self._generate_token(user_id) yield self.store.register( @@ -181,7 +183,8 @@ class RegistrationHandler(BaseHandler): ] if len(interested_services) > 0: raise SynapseError( - 400, "This user ID is reserved by an application service." + 400, "This user ID is reserved by an application service.", + errcode=Codes.EXCLUSIVE ) def _generate_token(self, user_id): From ac3183caaa66b750996d90c0ac9ed430f623909c Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Feb 2015 12:03:37 +0000 Subject: [PATCH 045/112] Register a user account for the AS when the AS registers. Add 'sender' column to AS table. --- synapse/appservice/__init__.py | 3 ++- synapse/handlers/appservice.py | 8 +++++++- synapse/storage/appservice.py | 5 +++-- synapse/storage/schema/application_services.sql | 1 + synapse/storage/schema/delta/v14.sql | 1 + 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 46d46a5a48..fb9bfffe5d 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -35,10 +35,11 @@ class ApplicationService(object): NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS] def __init__(self, token, url=None, namespaces=None, hs_token=None, - txn_id=None): + sender=None, txn_id=None): self.token = token self.url = url self.hs_token = hs_token + self.sender = sender self.namespaces = self._check_namespaces(namespaces) self.txn_id = txn_id diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index fa810b9a98..5071a12eb1 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -52,8 +52,14 @@ class ApplicationServicesHandler(object): "Consult the home server admin.", errcode=Codes.FORBIDDEN ) - logger.info("Updating application service info...") + app_service.hs_token = self._generate_hs_token() + + # create a sender for this application service which is used when + # creating rooms, etc.. + account = yield self.hs.get_handlers().registration_handler.register() + app_service.sender = account[0] + yield self.store.update_app_service(app_service) defer.returnValue(app_service) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 3c8bf9ad0d..eef77e737e 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -130,8 +130,9 @@ class ApplicationServiceStore(SQLBaseStore): return False txn.execute( - "UPDATE application_services SET url=?, hs_token=? WHERE id=?", - (service.url, service.hs_token, as_id,) + "UPDATE application_services SET url=?, hs_token=?, sender=? " + "WHERE id=?", + (service.url, service.hs_token, service.sender, as_id,) ) # cleanup regex txn.execute( diff --git a/synapse/storage/schema/application_services.sql b/synapse/storage/schema/application_services.sql index 03b5a10c8a..e491ad5aec 100644 --- a/synapse/storage/schema/application_services.sql +++ b/synapse/storage/schema/application_services.sql @@ -18,6 +18,7 @@ CREATE TABLE IF NOT EXISTS application_services( url TEXT, token TEXT, hs_token TEXT, + sender TEXT, UNIQUE(token) ON CONFLICT ROLLBACK ); diff --git a/synapse/storage/schema/delta/v14.sql b/synapse/storage/schema/delta/v14.sql index 03b5a10c8a..e491ad5aec 100644 --- a/synapse/storage/schema/delta/v14.sql +++ b/synapse/storage/schema/delta/v14.sql @@ -18,6 +18,7 @@ CREATE TABLE IF NOT EXISTS application_services( url TEXT, token TEXT, hs_token TEXT, + sender TEXT, UNIQUE(token) ON CONFLICT ROLLBACK ); From 5a7dd058184613c70041a61fdbc2ccce104bb500 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Feb 2015 14:14:15 +0000 Subject: [PATCH 046/112] Modify auth.get_user_by_req for authing appservices directly. Add logic to map the appservice token to the autogenned appservice user ID. Add unit tests for all forms of get_user_by_req (user/appservice, valid/bad/missing tokens) --- synapse/api/auth.py | 34 +++--- synapse/storage/appservice.py | 4 +- tests/api/test_auth.py | 139 +++++++++++++++++++++++++ tests/rest/client/v1/test_presence.py | 3 + tests/rest/client/v2_alpha/__init__.py | 4 +- 5 files changed, 164 insertions(+), 20 deletions(-) create mode 100644 tests/api/test_auth.py diff --git a/synapse/api/auth.py b/synapse/api/auth.py index ea8c461729..310a428066 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -302,27 +302,26 @@ class Auth(object): # Check for application service tokens with a user_id override try: - if "user_id" not in request.args: - # This has to be done like this rather than relying on it - # natively throwing because tests use a Mock for the request - # object which doesn't throw :/ - raise KeyError - - masquerade_user_id = request.args["user_id"][0] app_service = yield self.store.get_app_service_by_token( access_token ) if not app_service: - raise AuthError( - 403, "Invalid application service access token" - ) - if not app_service.is_interested_in_user(masquerade_user_id): - raise AuthError( - 403, - "Application service cannot masquerade as this user." - ) + raise KeyError + + user_id = app_service.sender + if "user_id" in request.args: + user_id = request.args["user_id"][0] + if not app_service.is_interested_in_user(user_id): + raise AuthError( + 403, + "Application service cannot masquerade as this user." + ) + + if not user_id: + raise KeyError + defer.returnValue( - (UserID.from_string(masquerade_user_id), ClientInfo("", "")) + (UserID.from_string(user_id), ClientInfo("", "")) ) return except KeyError: @@ -366,8 +365,7 @@ class Auth(object): try: ret = yield self.store.get_user_by_token(token=token) if not ret: - raise StoreError() - + raise StoreError(400, "Unknown token") user_info = { "admin": bool(ret.get("admin", False)), "device_id": ret.get("device_id"), diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index eef77e737e..ba31c68595 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -215,6 +215,7 @@ class ApplicationServiceStore(SQLBaseStore): "url": res["url"], "token": as_token, "hs_token": res["hs_token"], + "sender": res["sender"], "namespaces": { ApplicationService.NS_USERS: [], ApplicationService.NS_ALIASES: [], @@ -240,6 +241,7 @@ class ApplicationServiceStore(SQLBaseStore): token=service["token"], url=service["url"], namespaces=service["namespaces"], - hs_token=service["hs_token"] + hs_token=service["hs_token"], + sender=service["sender"] )) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py new file mode 100644 index 0000000000..1d8367ce42 --- /dev/null +++ b/tests/api/test_auth.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from tests import unittest +from twisted.internet import defer + +from mock import Mock, NonCallableMock + +from synapse.api.auth import Auth +from synapse.api.errors import AuthError +from synapse.types import UserID + +class AuthTestCase(unittest.TestCase): + + def setUp(self): + self.state_handler = Mock() + self.store = Mock() + + self.hs = Mock() + self.hs.get_datastore = Mock(return_value=self.store) + self.hs.get_state_handler = Mock(return_value=self.state_handler) + self.auth = Auth(self.hs) + + self.test_user = "@foo:bar" + self.test_token = "_test_token_" + + @defer.inlineCallbacks + def test_get_user_by_req_user_valid_token(self): + self.store.get_app_service_by_token = Mock(return_value=None) + user_info = { + "name": self.test_user, + "device_id": "nothing", + "token_id": "ditto", + "admin": False + } + self.store.get_user_by_token = Mock(return_value=user_info) + + request = Mock(args={}) + request.args["access_token"] = [self.test_token] + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + (user, info) = yield self.auth.get_user_by_req(request) + self.assertEquals(user.to_string(), self.test_user) + + def test_get_user_by_req_user_bad_token(self): + self.store.get_app_service_by_token = Mock(return_value=None) + self.store.get_user_by_token = Mock(return_value=None) + + request = Mock(args={}) + request.args["access_token"] = [self.test_token] + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + d = self.auth.get_user_by_req(request) + self.failureResultOf(d, AuthError) + + def test_get_user_by_req_user_missing_token(self): + self.store.get_app_service_by_token = Mock(return_value=None) + user_info = { + "name": self.test_user, + "device_id": "nothing", + "token_id": "ditto", + "admin": False + } + self.store.get_user_by_token = Mock(return_value=user_info) + + request = Mock(args={}) + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + d = self.auth.get_user_by_req(request) + self.failureResultOf(d, AuthError) + + @defer.inlineCallbacks + def test_get_user_by_req_appservice_valid_token(self): + app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + self.store.get_app_service_by_token = Mock(return_value=app_service) + self.store.get_user_by_token = Mock(return_value=None) + + request = Mock(args={}) + request.args["access_token"] = [self.test_token] + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + (user, info) = yield self.auth.get_user_by_req(request) + self.assertEquals(user.to_string(), self.test_user) + + def test_get_user_by_req_appservice_bad_token(self): + self.store.get_app_service_by_token = Mock(return_value=None) + self.store.get_user_by_token = Mock(return_value=None) + + request = Mock(args={}) + request.args["access_token"] = [self.test_token] + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + d = self.auth.get_user_by_req(request) + self.failureResultOf(d, AuthError) + + def test_get_user_by_req_appservice_missing_token(self): + app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + self.store.get_app_service_by_token = Mock(return_value=app_service) + self.store.get_user_by_token = Mock(return_value=None) + + request = Mock(args={}) + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + d = self.auth.get_user_by_req(request) + self.failureResultOf(d, AuthError) + + @defer.inlineCallbacks + def test_get_user_by_req_appservice_valid_token_valid_user_id(self): + masquerading_user_id = "@doppelganger:matrix.org" + app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service.is_interested_in_user = Mock(return_value=True) + self.store.get_app_service_by_token = Mock(return_value=app_service) + self.store.get_user_by_token = Mock(return_value=None) + + request = Mock(args={}) + request.args["access_token"] = [self.test_token] + request.args["user_id"] = [masquerading_user_id] + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + (user, info) = yield self.auth.get_user_by_req(request) + self.assertEquals(user.to_string(), masquerading_user_id) + + def test_get_user_by_req_appservice_valid_token_bad_user_id(self): + masquerading_user_id = "@doppelganger:matrix.org" + app_service = Mock(token="foobar", url="a_url", sender=self.test_user) + app_service.is_interested_in_user = Mock(return_value=False) + self.store.get_app_service_by_token = Mock(return_value=app_service) + self.store.get_user_by_token = Mock(return_value=None) + + request = Mock(args={}) + request.args["access_token"] = [self.test_token] + request.args["user_id"] = [masquerading_user_id] + request.requestHeaders.getRawHeaders = Mock(return_value=[""]) + d = self.auth.get_user_by_req(request) + self.failureResultOf(d, AuthError) diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py index f849120a3e..e5d876d89a 100644 --- a/tests/rest/client/v1/test_presence.py +++ b/tests/rest/client/v1/test_presence.py @@ -65,6 +65,7 @@ class PresenceStateTestCase(unittest.TestCase): hs.handlers = JustPresenceHandlers(hs) self.datastore = hs.get_datastore() + self.datastore.get_app_service_by_token = Mock(return_value=None) def get_presence_list(*a, **kw): return defer.succeed([]) @@ -154,6 +155,7 @@ class PresenceListTestCase(unittest.TestCase): hs.handlers = JustPresenceHandlers(hs) self.datastore = hs.get_datastore() + self.datastore.get_app_service_by_token = Mock(return_value=None) def has_presence_state(user_localpart): return defer.succeed( @@ -303,6 +305,7 @@ class PresenceEventStreamTestCase(unittest.TestCase): hs.handlers.room_member_handler.get_rooms_for_user = get_rooms_for_user self.mock_datastore = hs.get_datastore() + self.mock_datastore.get_app_service_by_token = Mock(return_value=None) def get_profile_displayname(user_id): return defer.succeed("Frank") diff --git a/tests/rest/client/v2_alpha/__init__.py b/tests/rest/client/v2_alpha/__init__.py index fa70575c57..7c2b0dfa0e 100644 --- a/tests/rest/client/v2_alpha/__init__.py +++ b/tests/rest/client/v2_alpha/__init__.py @@ -59,6 +59,8 @@ class V2AlphaRestTestCase(unittest.TestCase): r.register_servlets(hs, self.mock_resource) def make_datastore_mock(self): - return Mock(spec=[ + store = Mock(spec=[ "insert_client_ip", ]) + store.get_app_service_by_token = Mock(return_value=None) + return store From ab3c897ce123506bfff8a6a67e0530ca90ae2f15 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Feb 2015 14:16:36 +0000 Subject: [PATCH 047/112] Remove unused imports. --- tests/api/test_auth.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 1d8367ce42..4f83db5e84 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -15,11 +15,11 @@ from tests import unittest from twisted.internet import defer -from mock import Mock, NonCallableMock +from mock import Mock from synapse.api.auth import Auth from synapse.api.errors import AuthError -from synapse.types import UserID + class AuthTestCase(unittest.TestCase): From f7cac2f7b699fb3bb58c02fa1828aeec6c319f01 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Feb 2015 15:01:28 +0000 Subject: [PATCH 048/112] Fix bugs so lazy room joining works as intended. --- synapse/appservice/api.py | 6 +++--- synapse/handlers/appservice.py | 6 +++--- synapse/handlers/directory.py | 23 +++++++++++++++++------ 3 files changed, 23 insertions(+), 12 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 9cce4e0973..15ac1e27fc 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -41,7 +41,7 @@ class ApplicationServiceApi(SimpleHttpClient): response = yield self.get_json(uri, { "access_token": service.hs_token }) - if response: # just an empty json object + if response is not None: # just an empty json object defer.returnValue(True) except CodeMessageException as e: if e.code == 404: @@ -60,13 +60,13 @@ class ApplicationServiceApi(SimpleHttpClient): response = yield self.get_json(uri, { "access_token": service.hs_token }) - if response: # just an empty json object + if response is not None: # just an empty json object defer.returnValue(True) except CodeMessageException as e: + logger.warning("query_alias to %s received %s", uri, e.code) if e.code == 404: defer.returnValue(False) return - logger.warning("query_alias to %s received %s", uri, e.code) except Exception as ex: logger.warning("query_alias to %s threw exception %s", uri, ex) defer.returnValue(False) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 5071a12eb1..8591a77bf3 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -124,15 +124,15 @@ class ApplicationServicesHandler(object): namedtuple: with keys "room_id" and "servers" or None if no association can be found. """ - room_alias = room_alias.to_string() + room_alias_str = room_alias.to_string() alias_query_services = yield self._get_services_for_event( event=None, restrict_to=ApplicationService.NS_ALIASES, - alias_list=[room_alias] + alias_list=[room_alias_str] ) for alias_service in alias_query_services: is_known_alias = yield self.appservice_api.query_alias( - alias_service, room_alias + alias_service, room_alias_str ) if is_known_alias: # the alias exists now so don't query more ASes. diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 22c6391a15..87bc12c983 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -64,8 +64,11 @@ class DirectoryHandler(BaseHandler): # association creation for human users # TODO(erikj): Do user auth. - is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) - if is_claimed: + can_create = yield self.can_modify_alias( + room_alias, + user_id=user_id + ) + if not can_create: raise SynapseError( 400, "This alias is reserved by an application service.", errcode=Codes.EXCLUSIVE @@ -91,8 +94,11 @@ class DirectoryHandler(BaseHandler): # TODO Check if server admin - is_claimed = yield self.is_alias_exclusive_to_appservices(room_alias) - if is_claimed: + can_delete = yield self.can_modify_alias( + room_alias, + user_id=user_id + ) + if not can_delete: raise SynapseError( 400, "This alias is reserved by an application service.", errcode=Codes.EXCLUSIVE @@ -228,9 +234,14 @@ class DirectoryHandler(BaseHandler): defer.returnValue(result) @defer.inlineCallbacks - def is_alias_exclusive_to_appservices(self, alias): + def can_modify_alias(self, alias, user_id=None): services = yield self.store.get_app_services() interested_services = [ s for s in services if s.is_interested_in_alias(alias.to_string()) ] - defer.returnValue(len(interested_services) > 0) + for service in interested_services: + if user_id == service.sender: + # this user IS the app service + defer.returnValue(True) + return + defer.returnValue(len(interested_services) == 0) From c7783d6feec9b69c24f3303cbb51cce3e6b8ffb3 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 11 Feb 2015 10:36:08 +0000 Subject: [PATCH 049/112] Notify ASes for events sent by other users in a room which an AS user is a part of. --- synapse/appservice/__init__.py | 17 +++++++++++++---- synapse/handlers/appservice.py | 21 ++++++++++++++++----- synapse/rest/appservice/v1/__init__.py | 4 ++-- tests/appservice/test_appservice.py | 25 +++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 11 deletions(-) diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index fb9bfffe5d..381b4cfc4a 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -74,7 +74,7 @@ class ApplicationService(object): return True return False - def _matches_user(self, event): + def _matches_user(self, event, member_list): if (hasattr(event, "sender") and self.is_interested_in_user(event.sender)): return True @@ -83,6 +83,10 @@ class ApplicationService(object): and hasattr(event, "state_key") and self.is_interested_in_user(event.state_key)): return True + # check joined member events + for member in member_list: + if self.is_interested_in_user(member.state_key): + return True return False def _matches_room_id(self, event): @@ -96,7 +100,8 @@ class ApplicationService(object): return True return False - def is_interested(self, event, restrict_to=None, aliases_for_event=None): + def is_interested(self, event, restrict_to=None, aliases_for_event=None, + member_list=None): """Check if this service is interested in this event. Args: @@ -104,18 +109,22 @@ class ApplicationService(object): restrict_to(str): The namespace to restrict regex tests to. aliases_for_event(list): A list of all the known room aliases for this event. + member_list(list): A list of all joined room members in this room. Returns: bool: True if this service would like to know about this event. """ if aliases_for_event is None: aliases_for_event = [] + if member_list is None: + member_list = [] + if restrict_to and restrict_to not in ApplicationService.NS_LIST: # this is a programming error, so fail early and raise a general # exception raise Exception("Unexpected restrict_to value: %s". restrict_to) if not restrict_to: - return (self._matches_user(event) + return (self._matches_user(event, member_list) or self._matches_aliases(event, aliases_for_event) or self._matches_room_id(event)) elif restrict_to == ApplicationService.NS_ALIASES: @@ -123,7 +132,7 @@ class ApplicationService(object): elif restrict_to == ApplicationService.NS_ROOMS: return self._matches_room_id(event) elif restrict_to == ApplicationService.NS_USERS: - return self._matches_user(event) + return self._matches_user(event, member_list) def is_interested_in_user(self, user_id): return self._matches_regex(user_id, ApplicationService.NS_USERS) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 8591a77bf3..2c488a46f6 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -15,7 +15,7 @@ from twisted.internet import defer -from synapse.api.constants import EventTypes +from synapse.api.constants import EventTypes, Membership from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService from synapse.types import UserID @@ -154,14 +154,25 @@ class ApplicationServicesHandler(object): list: A list of services interested in this event based on the service regex. """ - # We need to know the aliases associated with this event.room_id, if any - if not alias_list: - alias_list = yield self.store.get_aliases_for_room(event.room_id) + member_list = None + if hasattr(event, "room_id"): + # We need to know the aliases associated with this event.room_id, + # if any. + if not alias_list: + alias_list = yield self.store.get_aliases_for_room( + event.room_id + ) + # We need to know the members associated with this event.room_id, + # if any. + member_list = yield self.store.get_room_members( + room_id=event.room_id, + membership=Membership.JOIN + ) services = yield self.store.get_app_services() interested_list = [ s for s in services if ( - s.is_interested(event, restrict_to, alias_list) + s.is_interested(event, restrict_to, alias_list, member_list) ) ] defer.returnValue(interested_list) diff --git a/synapse/rest/appservice/v1/__init__.py b/synapse/rest/appservice/v1/__init__.py index bf243b6180..a7877609ad 100644 --- a/synapse/rest/appservice/v1/__init__.py +++ b/synapse/rest/appservice/v1/__init__.py @@ -21,9 +21,9 @@ class AppServiceRestResource(JsonResource): """A resource for version 1 of the matrix application service API.""" def __init__(self, hs): - JsonResource.__init__(self) + JsonResource.__init__(self, hs) self.register_servlets(self, hs) @staticmethod def register_servlets(appservice_resource, hs): - register.register_servlets(hs, appservice_resource) \ No newline at end of file + register.register_servlets(hs, appservice_resource) diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index c0aaf12785..d12e4f2644 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -143,3 +143,28 @@ class ApplicationServiceTestCase(unittest.TestCase): restrict_to=ApplicationService.NS_USERS, aliases_for_event=["#xmpp_barfoo:matrix.org"] )) + + def test_member_list_match(self): + self.service.namespaces[ApplicationService.NS_USERS].append( + "@irc_.*" + ) + join_list = [ + Mock( + type="m.room.member", room_id="!foo:bar", sender="@alice:here", + state_key="@alice:here" + ), + Mock( + type="m.room.member", room_id="!foo:bar", sender="@irc_fo:here", + state_key="@irc_fo:here" # AS user + ), + Mock( + type="m.room.member", room_id="!foo:bar", sender="@bob:here", + state_key="@bob:here" + ) + ] + + self.event.sender = "@xmpp_foobar:matrix.org" + self.assertTrue(self.service.is_interested( + event=self.event, + member_list=join_list + )) From 8beb61391609d8dd55022a14e1ef11bf353c1fdc Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 11 Feb 2015 10:36:48 +0000 Subject: [PATCH 050/112] Add newline to EOF --- synapse/rest/appservice/v1/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/rest/appservice/v1/__init__.py b/synapse/rest/appservice/v1/__init__.py index a7877609ad..a9d6d72566 100644 --- a/synapse/rest/appservice/v1/__init__.py +++ b/synapse/rest/appservice/v1/__init__.py @@ -27,3 +27,4 @@ class AppServiceRestResource(JsonResource): @staticmethod def register_servlets(appservice_resource, hs): register.register_servlets(hs, appservice_resource) + From fd40d992adfb8b63f6e925dad030c63498501408 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 11 Feb 2015 10:41:33 +0000 Subject: [PATCH 051/112] PEP8-ify --- synapse/appservice/api.py | 2 -- synapse/handlers/directory.py | 3 --- synapse/rest/appservice/v1/register.py | 2 +- synapse/storage/appservice.py | 3 --- 4 files changed, 1 insertion(+), 9 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 15ac1e27fc..6192813c03 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -71,7 +71,6 @@ class ApplicationServiceApi(SimpleHttpClient): logger.warning("query_alias to %s threw exception %s", uri, ex) defer.returnValue(False) - @defer.inlineCallbacks def push_bulk(self, service, events): events = self._serialize(events) @@ -107,4 +106,3 @@ class ApplicationServiceApi(SimpleHttpClient): return [ serialize_event(e, time_now, as_client_event=True) for e in events ] - diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 87bc12c983..20ab9e269c 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -58,7 +58,6 @@ class DirectoryHandler(BaseHandler): servers ) - @defer.inlineCallbacks def create_association(self, user_id, room_alias, room_id, servers=None): # association creation for human users @@ -75,7 +74,6 @@ class DirectoryHandler(BaseHandler): ) yield self._create_association(room_alias, room_id, servers) - @defer.inlineCallbacks def create_appservice_association(self, service, room_alias, room_id, servers=None): @@ -127,7 +125,6 @@ class DirectoryHandler(BaseHandler): # if room_id: # yield self._update_room_alias_events(user_id, room_id) - @defer.inlineCallbacks def get_association(self, room_alias): room_id = None diff --git a/synapse/rest/appservice/v1/register.py b/synapse/rest/appservice/v1/register.py index d3d5aef220..3bd0c1220c 100644 --- a/synapse/rest/appservice/v1/register.py +++ b/synapse/rest/appservice/v1/register.py @@ -65,7 +65,7 @@ class RegisterRestServlet(AppServiceRestServlet): hs_token = app_service.hs_token defer.returnValue((200, { - "hs_token": hs_token + "hs_token": hs_token })) def _parse_namespace(self, target_ns, origin_ns, ns): diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index ba31c68595..d941b1f387 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -97,7 +97,6 @@ class ApplicationServiceStore(SQLBaseStore): # allocate new ASes. It relies on the server admin inserting the AS # token into the database manually. - if not service.token or not service.url: raise StoreError(400, "Token and url must be specified.") @@ -186,7 +185,6 @@ class ApplicationServiceStore(SQLBaseStore): # TODO: The from_cache=False impl # TODO: This should be JOINed with the application_services_regex table. - @defer.inlineCallbacks def _populate_cache(self): """Populates the ApplicationServiceCache from the database.""" @@ -244,4 +242,3 @@ class ApplicationServiceStore(SQLBaseStore): hs_token=service["hs_token"], sender=service["sender"] )) - From 14d413752bc7f656e9df5363a8aa29bae404d10a Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 11 Feb 2015 10:53:47 +0000 Subject: [PATCH 052/112] Fix newline on __init__ --- synapse/rest/appservice/v1/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/rest/appservice/v1/__init__.py b/synapse/rest/appservice/v1/__init__.py index a9d6d72566..a7877609ad 100644 --- a/synapse/rest/appservice/v1/__init__.py +++ b/synapse/rest/appservice/v1/__init__.py @@ -27,4 +27,3 @@ class AppServiceRestResource(JsonResource): @staticmethod def register_servlets(appservice_resource, hs): register.register_servlets(hs, appservice_resource) - From f51832442674a1f72c41ffce2279880109fc7ff0 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 11 Feb 2015 16:41:16 +0000 Subject: [PATCH 053/112] Minor tweaks based on PR feedback. --- synapse/appservice/api.py | 6 +++--- synapse/http/client.py | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 6192813c03..c2179f8d55 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -80,11 +80,11 @@ class ApplicationServiceApi(SimpleHttpClient): response = None try: response = yield self.put_json( - uri, - { + uri=uri, + json_body={ "events": events }, - { + args={ "access_token": service.hs_token }) if response: # just an empty json object diff --git a/synapse/http/client.py b/synapse/http/client.py index 575510637e..7b23116556 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -95,7 +95,8 @@ class SimpleHttpClient(object): Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body as JSON. Raises: - On a non-2xx HTTP response. + On a non-2xx HTTP response. The response body will be used as the + error message. """ if len(args): query_bytes = urllib.urlencode(args, True) From cb43fbeeb47e86e2f572dd647f9e59ed181bbf79 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Wed, 11 Feb 2015 16:46:01 +0000 Subject: [PATCH 054/112] Fix tests which broke when event caching was introduced. --- tests/storage/test_appservice.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index b9ecfb3384..fc733d4c79 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -19,6 +19,7 @@ from synapse.appservice import ApplicationService from synapse.server import HomeServer from synapse.storage.appservice import ApplicationServiceStore +from mock import Mock from tests.utils import SQLiteMemoryDbPool, MockClock @@ -28,7 +29,9 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): def setUp(self): db_pool = SQLiteMemoryDbPool() yield db_pool.prepare() - hs = HomeServer("test", db_pool=db_pool, clock=MockClock()) + hs = HomeServer( + "test", db_pool=db_pool, clock=MockClock(), config=Mock() + ) self.as_token = "token1" db_pool.runQuery( "INSERT INTO application_services(token) VALUES(?)", From 183b3d4e47dd2fc0e0ca88714d0e0bd415f81736 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 13 Feb 2015 14:29:49 +0000 Subject: [PATCH 055/112] Prepare the database whenever a connection is opened from the db_pool so that in-memory databases will work --- synapse/app/homeserver.py | 14 ++++---------- synapse/storage/__init__.py | 3 +++ 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f5681fac20..6f39819d3a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -90,7 +90,9 @@ class SynapseHomeServer(HomeServer): "sqlite3", self.get_db_name(), check_same_thread=False, cp_min=1, - cp_max=1 + cp_max=1, + cp_openfun=prepare_database, # Prepare the database for each conn + # so that :memory: sqlite works ) def create_resource_tree(self, web_client, redirect_root_to_web_client): @@ -252,14 +254,6 @@ def setup(): logger.info("Database prepared in %s.", db_name) - db_pool = hs.get_db_pool() - - if db_name == ":memory:": - # Memory databases will need to be setup each time they are opened. - reactor.callWhenRunning( - db_pool.runWithConnection, prepare_database - ) - if config.manhole: f = twisted.manhole.telnet.ShellFactory() f.username = "matrix" @@ -270,10 +264,10 @@ def setup(): bind_port = config.bind_port if config.no_tls: bind_port = None + hs.start_listening(bind_port, config.unsecure_port) hs.get_pusherpool().start() - hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 02b1f06854..1c22e19ab0 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -637,10 +637,13 @@ def prepare_database(db_conn): c.executescript(sql_script) db_conn.commit() + else: + logger.info("Database is at version %r", user_version) else: sql_script = "BEGIN TRANSACTION;\n" for sql_loc in SCHEMAS: + logger.debug("Applying schema %r", sql_loc) sql_script += read_schema(sql_loc) sql_script += "\n" sql_script += "COMMIT TRANSACTION;" From a5ad6f862c75a2d4c9ed5d57537c529053002daa Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Feb 2015 13:15:41 +0000 Subject: [PATCH 056/112] Fix contrib/graph/graph2.py to handle FrozenDict --- contrib/graph/graph2.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/contrib/graph/graph2.py b/contrib/graph/graph2.py index 6b551d42e5..d0d2cfe7c0 100644 --- a/contrib/graph/graph2.py +++ b/contrib/graph/graph2.py @@ -21,6 +21,7 @@ import datetime import argparse from synapse.events import FrozenEvent +from synapse.util.frozenutils import unfreeze def make_graph(db_name, room_id, file_prefix, limit): @@ -70,7 +71,7 @@ def make_graph(db_name, room_id, file_prefix, limit): float(event.origin_server_ts) / 1000 ).strftime('%Y-%m-%d %H:%M:%S,%f') - content = json.dumps(event.get_dict()["content"]) + content = json.dumps(unfreeze(event.get_dict()["content"])) label = ( "<" From 61385846510ceed1bff571dde1233a70e02b4748 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Feb 2015 14:08:02 +0000 Subject: [PATCH 057/112] Don't return anything from _handle_new_pdu, since we ignore the return value anyway --- synapse/federation/federation_server.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9f5c98694c..7ca8d49c54 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -331,7 +331,6 @@ class FederationServer(FederationBase): ) if already_seen: logger.debug("Already seen pdu %s", pdu.event_id) - defer.returnValue({}) return # Check signature. @@ -418,7 +417,7 @@ class FederationServer(FederationBase): except: logger.warn("Failed to get state for event: %s", pdu.event_id) - ret = yield self.handler.on_receive_pdu( + yield self.handler.on_receive_pdu( origin, pdu, backfilled=False, @@ -426,8 +425,6 @@ class FederationServer(FederationBase): auth_chain=auth_chain, ) - defer.returnValue(ret) - def __str__(self): return "" % self.server_name From 91fc5eef1deb2f756d16f3d5ea20ede8d967f6df Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Feb 2015 14:27:40 +0000 Subject: [PATCH 058/112] Mark old events as outliers. This is to fix the issue where if a remote server sends an event that references a really "old" event, then the local server will pull that in and send to all clients. We decide if an event is old if its depth is less than the minimum depth of the room. --- synapse/federation/federation_server.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 7ca8d49c54..4391a60c02 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -366,7 +366,13 @@ class FederationServer(FederationBase): pdu.room_id, min_depth ) - if min_depth and pdu.depth > min_depth and max_recursion > 0: + if min_depth and pdu.depth < min_depth: + # This is so that we don't notify the user about this + # message, to work around the fact that some events will + # reference really really old events we really don't want to + # send to the clients. + pdu.internal_metadata.outlier = True + elif min_depth and pdu.depth > min_depth and max_recursion > 0: for event_id, hashes in pdu.prev_events: if event_id not in have_seen: logger.debug( From 2674aeb96a1b75583dc3ea514cbce580e8ae35c8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Feb 2015 16:16:47 +0000 Subject: [PATCH 059/112] Factor out ExpiringCache from StateHandler --- synapse/state.py | 46 +++----------- synapse/util/expiringcache.py | 115 ++++++++++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 37 deletions(-) create mode 100644 synapse/util/expiringcache.py diff --git a/synapse/state.py b/synapse/state.py index fe5f3dc84b..80cced351d 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -18,6 +18,7 @@ from twisted.internet import defer from synapse.util.logutils import log_function from synapse.util.async import run_on_reactor +from synapse.util.expiringcache import ExpiringCache from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.events.snapshot import EventContext @@ -51,7 +52,6 @@ class _StateCacheEntry(object): def __init__(self, state, state_group, ts): self.state = state self.state_group = state_group - self.ts = ts class StateHandler(object): @@ -69,12 +69,15 @@ class StateHandler(object): def start_caching(self): logger.debug("start_caching") - self._state_cache = {} + self._state_cache = ExpiringCache( + cache_name="state_cache", + clock=self.clock, + max_len=SIZE_OF_CACHE, + expiry_ms=EVICTION_TIMEOUT_SECONDS*1000, + reset_expiry_on_get=True, + ) - def f(): - self._prune_cache() - - self.clock.looping_call(f, 5*1000) + self._state_cache.start() @defer.inlineCallbacks def get_current_state(self, room_id, event_type=None, state_key=""): @@ -409,34 +412,3 @@ class StateHandler(object): return -int(e.depth), hashlib.sha1(e.event_id).hexdigest() return sorted(events, key=key_func) - - def _prune_cache(self): - logger.debug( - "_prune_cache. before len: %d", - len(self._state_cache.keys()) - ) - - now = self.clock.time_msec() - - if len(self._state_cache.keys()) > SIZE_OF_CACHE: - sorted_entries = sorted( - self._state_cache.items(), - key=lambda k, v: v.ts, - ) - - for k, _ in sorted_entries[SIZE_OF_CACHE:]: - self._state_cache.pop(k) - - keys_to_delete = set() - - for key, cache_entry in self._state_cache.items(): - if now - cache_entry.ts > EVICTION_TIMEOUT_SECONDS*1000: - keys_to_delete.add(key) - - for k in keys_to_delete: - self._state_cache.pop(k) - - logger.debug( - "_prune_cache. after len: %d", - len(self._state_cache.keys()) - ) diff --git a/synapse/util/expiringcache.py b/synapse/util/expiringcache.py new file mode 100644 index 0000000000..fb5b27ad10 --- /dev/null +++ b/synapse/util/expiringcache.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + + +logger = logging.getLogger(__name__) + + +class ExpiringCache(object): + def __init__(self, cache_name, clock, max_len=0, expiry_ms=0, + reset_expiry_on_get=False): + """ + Args: + cache_name (str): Name of this cache, used for logging. + clock (Clock) + max_len (int): Max size of dict. If the dict grows larger than this + then the oldest items get automatically evicted. Default is 0, + which indicates there is no max limit. + expiry_ms (int): How long before an item is evicted from the cache + in milliseconds. Default is 0, indicating items never get + evicted based on time. + reset_expiry_on_get (bool): If true, will reset the expiry time for + an item on access. Defaults to False. + + """ + self._cache_name = cache_name + + self._clock = clock + + self._max_len = max_len + self._expiry_ms = expiry_ms + + self._reset_expiry_on_get = reset_expiry_on_get + + self._cache = {} + + def start(self): + if not self._expiry_ms: + # Don't bother starting the loop if things never expire + return + + def f(): + self._prune_cache() + + self._clock.looping_call(f, self._expiry_ms) + + def __setitem__(self, key, value): + now = self._clock.time_msec() + self._cache[key] = _CacheEntry(now, value) + + # Evict if there are now too many items + if self._max_len and len(self._cache.keys()) > self._max_len: + sorted_entries = sorted( + self._cache.items(), + key=lambda k, v: v.time, + ) + + for k, _ in sorted_entries[self._max_len:]: + self._cache.pop(k) + + def __getitem__(self, key): + entry = self._cache[key] + + if self._reset_expiry_on_get: + entry.time = self._clock.time_msec() + + return entry.value + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def _purge_cache(self): + if not self._expiry_ms: + # zero expiry time means don't expire. This should never get called + # since we have this check in start too. + return + begin_length = len(self._cache) + + now = self._clock.time_msec() + + keys_to_delete = set() + + for key, cache_entry in self._cache.items(): + if now - cache_entry.time > self._expiry_ms: + keys_to_delete.add(key) + + for k in keys_to_delete: + self._cache.pop(k) + + logger.debug( + "[%s] _prune_cache before: %d, after len: %d", + self._cache_name, begin_length, len(self._cache.keys()) + ) + + +class _CacheEntry(object): + def __init__(self, time, value): + self.time = time + self.value = value From baa5b9a97582d4b3c825be1225aba7863230c047 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 16 Feb 2015 18:02:39 +0000 Subject: [PATCH 060/112] Cache results of get_pdu. --- synapse/federation/federation_client.py | 42 +++++++++++++++++++++++-- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 70c9a6f46b..83b4947b99 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -19,7 +19,8 @@ from twisted.internet import defer from .federation_base import FederationBase from .units import Edu -from synapse.api.errors import CodeMessageException +from synapse.api.errors import CodeMessageException, SynapseError +from synapse.util.expiringcache import ExpiringCache from synapse.util.logutils import log_function from synapse.events import FrozenEvent @@ -30,6 +31,20 @@ logger = logging.getLogger(__name__) class FederationClient(FederationBase): + def __init__(self): + self._fail_fetch_pdu_cache = None + + def start_pdu_fail_cache(self): + self._fail_fetch_pdu_cache = ExpiringCache( + cache_name="get_pdu_cache", + clock=self._clock, + max_len=1000, + expiry_ms=120*1000, + reset_expiry_on_get=False, + ) + + self._fail_fetch_pdu_cache.start() + @log_function def send_pdu(self, pdu, destinations): """Informs the replication layer about a new PDU generated within the @@ -160,6 +175,11 @@ class FederationClient(FederationBase): # TODO: Rate limit the number of times we try and get the same event. + if self._fail_fetch_pdu_cache: + e = self._fail_fetch_pdu_cache.get(event_id) + if e: + defer.returnValue(e) + pdu = None for destination in destinations: try: @@ -181,8 +201,21 @@ class FederationClient(FederationBase): pdu = yield self._check_sigs_and_hash(pdu) break - except CodeMessageException: - raise + except SynapseError: + logger.info( + "Failed to get PDU %s from %s because %s", + event_id, destination, e, + ) + continue + except CodeMessageException as e: + if 400 <= e.code < 500: + raise + + logger.info( + "Failed to get PDU %s from %s because %s", + event_id, destination, e, + ) + continue except Exception as e: logger.info( "Failed to get PDU %s from %s because %s", @@ -190,6 +223,9 @@ class FederationClient(FederationBase): ) continue + if self._fail_fetch_pdu_cache is not None: + self._fail_fetch_pdu_cache[event_id] = pdu + defer.returnValue(pdu) @defer.inlineCallbacks From 72a4de2ce627528a13bda480403344ffde6275d3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 10:03:23 +0000 Subject: [PATCH 061/112] Use consumeErrors=True on all DeferredLists. This is so that the DeferredLists actually consume the error instead of propogating down the non-existent errback chain. This should reduce the number of unhandled errors we are seeing. --- synapse/federation/federation_server.py | 2 +- synapse/federation/transaction_queue.py | 2 +- synapse/handlers/presence.py | 8 ++++---- synapse/notifier.py | 6 ++++-- synapse/storage/roommember.py | 2 +- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9f5c98694c..e94d0411b4 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -124,7 +124,7 @@ class FederationServer(FederationBase): edu.content ) - results = yield defer.DeferredList(dl) + results = yield defer.DeferredList(dl, consumeErrors=True) ret = [] for r in results: diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 731019ad9f..bb20f2ebab 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -98,7 +98,7 @@ class TransactionQueue(object): deferreds.append(deferred) - yield defer.DeferredList(deferreds) + yield defer.DeferredList(deferreds, consumeErrors=True) # NO inlineCallbacks def enqueue_edu(self, edu): diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 59287010ed..8ef248ecf2 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -492,7 +492,7 @@ class PresenceHandler(BaseHandler): user, domain, remoteusers )) - yield defer.DeferredList(deferreds) + yield defer.DeferredList(deferreds, consumeErrors=True) def _start_polling_local(self, user, target_user): target_localpart = target_user.localpart @@ -548,7 +548,7 @@ class PresenceHandler(BaseHandler): self._stop_polling_remote(user, domain, remoteusers) ) - return defer.DeferredList(deferreds) + return defer.DeferredList(deferreds, consumeErrors=True) def _stop_polling_local(self, user, target_user): for localpart in self._local_pushmap.keys(): @@ -729,7 +729,7 @@ class PresenceHandler(BaseHandler): del self._remote_sendmap[user] with PreserveLoggingContext(): - yield defer.DeferredList(deferreds) + yield defer.DeferredList(deferreds, consumeErrors=True) @defer.inlineCallbacks def push_update_to_local_and_remote(self, observed_user, statuscache, @@ -768,7 +768,7 @@ class PresenceHandler(BaseHandler): ) ) - yield defer.DeferredList(deferreds) + yield defer.DeferredList(deferreds, consumeErrors=True) defer.returnValue((localusers, remote_domains)) diff --git a/synapse/notifier.py b/synapse/notifier.py index e3b6ead620..f5a394596d 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -135,7 +135,8 @@ class Notifier(object): with PreserveLoggingContext(): yield defer.DeferredList( - [notify(l).addErrback(eb) for l in listeners] + [notify(l).addErrback(eb) for l in listeners], + consumeErrors=True, ) @defer.inlineCallbacks @@ -203,7 +204,8 @@ class Notifier(object): with PreserveLoggingContext(): yield defer.DeferredList( - [notify(l).addErrback(eb) for l in listeners] + [notify(l).addErrback(eb) for l in listeners], + consumeErrors=True, ) @defer.inlineCallbacks diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 779f9ce544..9bf608bc90 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -288,7 +288,7 @@ class RoomMemberStore(SQLBaseStore): deferreds = [self.get_rooms_for_user(u) for u in user_id_list] - results = yield defer.DeferredList(deferreds) + results = yield defer.DeferredList(deferreds, consumeErrors=True) # A list of sets of strings giving room IDs for each user room_id_lists = [set([r.room_id for r in result[1]]) for result in results] From ea1d6c16cd2b91ec2980eef31446990cf305b796 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 10:54:06 +0000 Subject: [PATCH 062/112] Don't write bytecode --- synapse/app/homeserver.py | 4 +++- synapse/app/synctl.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f5681fac20..18585e87f2 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys +sys.dont_write_bytecode = True + from synapse.storage import prepare_database, UpgradeDatabaseException from synapse.server import HomeServer @@ -48,7 +51,6 @@ import synapse import logging import os import re -import sys import sqlite3 import syweb diff --git a/synapse/app/synctl.py b/synapse/app/synctl.py index 363c20f994..3a70a248dc 100755 --- a/synapse/app/synctl.py +++ b/synapse/app/synctl.py @@ -19,7 +19,7 @@ import os import subprocess import signal -SYNAPSE = ["python", "-m", "synapse.app.homeserver"] +SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"] CONFIGFILE = "homeserver.yaml" PIDFILE = "homeserver.pid" From c2b1dbd84c27be8759b14502b124504cba4c9295 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 11:11:11 +0000 Subject: [PATCH 063/112] We do want to consumeError --- synapse/handlers/typing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index c69787005f..c2762f92c7 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -181,7 +181,7 @@ class TypingNotificationHandler(BaseHandler): }, )) - yield defer.DeferredList(deferreds, consumeErrors=False) + yield defer.DeferredList(deferreds, consumeErrors=True) @defer.inlineCallbacks def _recv_edu(self, origin, content): From 02bfa889de1990eb8dc47770834bef777252dc82 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 13:08:27 +0000 Subject: [PATCH 064/112] Handle recieving failures in transactions --- synapse/federation/federation_server.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e94d0411b4..34f5b17446 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -114,7 +114,15 @@ class FederationServer(FederationBase): with PreserveLoggingContext(): dl = [] for pdu in pdu_list: - dl.append(self._handle_new_pdu(transaction.origin, pdu)) + d = self._handle_new_pdu(transaction.origin, pdu) + + def handle_failure(failure): + failure.trap(FederationError) + self.enqueue_failure(failure.value, transaction.origin) + + d.addErrback(handle_failure) + + dl.append(d) if hasattr(transaction, "edus"): for edu in [Edu(**x) for x in transaction.edus]: @@ -124,6 +132,9 @@ class FederationServer(FederationBase): edu.content ) + for failure in getattr(transaction, "failures", []): + logger.info("Got failure %r", failure) + results = yield defer.DeferredList(dl, consumeErrors=True) ret = [] From 47281f8fa4b3f529f84409367736483da66416ae Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 11:51:22 +0000 Subject: [PATCH 065/112] Change some debug logging to info --- synapse/handlers/federation.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 3aecc1ca51..0eb2ff95ca 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -802,7 +802,7 @@ class FederationHandler(BaseHandler): missing_auth = event_auth_events - seen_events if missing_auth: - logger.debug("Missing auth: %s", missing_auth) + logger.info("Missing auth: %s", missing_auth) # If we don't have all the auth events, we need to get them. try: remote_auth_chain = yield self.replication_layer.get_event_auth( @@ -856,7 +856,7 @@ class FederationHandler(BaseHandler): if different_auth and not event.internal_metadata.is_outlier(): # Do auth conflict res. - logger.debug("Different auth: %s", different_auth) + logger.info("Different auth: %s", different_auth) different_events = yield defer.gatherResults( [ @@ -892,6 +892,8 @@ class FederationHandler(BaseHandler): context.state_group = None if different_auth and not event.internal_metadata.is_outlier(): + logger.info("Different auth after resolution: %s", different_auth) + # Only do auth resolution if we have something new to say. # We can't rove an auth failure. do_resolution = False From c82e26ad4ba80742adc68a7e8c52441d760e4746 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 13:24:13 +0000 Subject: [PATCH 066/112] Actually respond with JSON to incoming transaction --- synapse/federation/federation_server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 34f5b17446..c48a41b5d5 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -147,6 +147,8 @@ class FederationServer(FederationBase): logger.debug("Returning: %s", str(ret)) + response = ret + yield self.transaction_actions.set_response( transaction, 200, response From 659ead082ff11842fea803e44d75667e0ca38d71 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 13:58:52 +0000 Subject: [PATCH 067/112] Format the response of transaction request in a nicer way --- synapse/federation/federation_server.py | 19 +++++++++++++++---- synapse/federation/transaction_queue.py | 22 ++++++++++++++++++++-- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index c48a41b5d5..d1ec0b9eac 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -118,7 +118,7 @@ class FederationServer(FederationBase): def handle_failure(failure): failure.trap(FederationError) - self.enqueue_failure(failure.value, transaction.origin) + self.send_failure(failure.value, transaction.origin) d.addErrback(handle_failure) @@ -132,7 +132,7 @@ class FederationServer(FederationBase): edu.content ) - for failure in getattr(transaction, "failures", []): + for failure in getattr(transaction, "pdu_failures", []): logger.info("Got failure %r", failure) results = yield defer.DeferredList(dl, consumeErrors=True) @@ -143,11 +143,15 @@ class FederationServer(FederationBase): ret.append({}) else: logger.exception(r[1]) - ret.append({"error": str(r[1])}) + ret.append({"error": str(r[1].value)}) logger.debug("Returning: %s", str(ret)) - response = ret + response = { + "pdus": dict(zip( + (p.event_id for p in pdu_list), ret + )), + } yield self.transaction_actions.set_response( transaction, @@ -358,6 +362,13 @@ class FederationServer(FederationBase): affected=pdu.event_id, ) + raise FederationError( + "ERROR", + 403, + "Forbidden", + affected=pdu.event_id, + ) + state = None auth_chain = [] diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index bb20f2ebab..6faaa066fb 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -91,7 +91,7 @@ class TransactionQueue(object): if not deferred.called: deferred.errback(failure) else: - logger.warn("Failed to send pdu", failure) + logger.warn("Failed to send pdu", failure.value) with PreserveLoggingContext(): self._attempt_new_transaction(destination).addErrback(eb) @@ -116,7 +116,7 @@ class TransactionQueue(object): if not deferred.called: deferred.errback(failure) else: - logger.warn("Failed to send edu", failure) + logger.warn("Failed to send edu", failure.value) with PreserveLoggingContext(): self._attempt_new_transaction(destination).addErrback(eb) @@ -133,6 +133,15 @@ class TransactionQueue(object): (failure, deferred) ) + def eb(failure): + if not deferred.called: + deferred.errback(failure) + else: + logger.warn("Failed to send failure", failure.value) + + with PreserveLoggingContext(): + self._attempt_new_transaction(destination).addErrback(eb) + yield deferred @defer.inlineCallbacks @@ -249,6 +258,15 @@ class TransactionQueue(object): transaction, json_data_cb ) code = 200 + + if response: + for e_id, r in getattr(response, "pdus", {}).items(): + if "error" in r: + logger.warn( + "Transaction returned error for %s: %s", + e_id, r, + ) + except HttpResponseException as e: code = e.code response = e.response From 4de93001bf6a7d8e770b990ea3546237b2569609 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:12:06 +0000 Subject: [PATCH 068/112] Make matrixfederationclient log more nicely --- synapse/http/matrixfederationclient.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 1927948001..764b151d9a 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -16,6 +16,7 @@ from twisted.internet import defer, reactor, protocol from twisted.internet.error import DNSLookupError +from twisted.python.failure import Failure from twisted.web.client import readBody, _AgentBase, _URI from twisted.web.http_headers import Headers from twisted.web._newclient import ResponseDone @@ -146,14 +147,22 @@ class MatrixFederationHttpClient(object): ) raise SynapseError(400, "Domain specified not found.") + if hasattr(e, "reasons"): + reasons = ", ".join( + f.value.message + for f in e.reasons + ) + else: + reasons = e.message + logger.warn( - "Sending request failed to %s: %s %s : %s", + "Sending request failed to %s: %s %s: %s - %s", destination, method, url_bytes, - e + type(e). __name__, + reasons, ) - _print_ex(e) if retries_left: yield sleep(2 ** (5 - retries_left)) @@ -447,14 +456,6 @@ def _readBodyToFile(response, stream, max_size): return d -def _print_ex(e): - if hasattr(e, "reasons") and e.reasons: - for ex in e.reasons: - _print_ex(ex) - else: - logger.warn(e) - - class _JsonProducer(object): """ Used by the twisted http client to create the HTTP body from json """ From 472734a8cc19d4e4b9f5e311b59f58a37142f8a2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:13:50 +0000 Subject: [PATCH 069/112] Consume errors in time_bound_deferred --- synapse/util/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index e77eba90ad..79109d0b19 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -99,8 +99,6 @@ class Clock(object): except: pass - return res - given_deferred.addCallbacks(callback=sucess, errback=err) timer = self.call_later(time_out, timed_out_fn) From 0647e27a414e5a86cab57bba65931376e855c289 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:19:54 +0000 Subject: [PATCH 070/112] Remove unused import --- synapse/http/matrixfederationclient.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 764b151d9a..454c3d4ab1 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -16,7 +16,6 @@ from twisted.internet import defer, reactor, protocol from twisted.internet.error import DNSLookupError -from twisted.python.failure import Failure from twisted.web.client import readBody, _AgentBase, _URI from twisted.web.http_headers import Headers from twisted.web._newclient import ResponseDone From 676e8ee78ae3f51cbc0113c8d810d4bc1e81cdab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:22:45 +0000 Subject: [PATCH 071/112] Remove debug raise --- synapse/federation/federation_server.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 679fb141e7..22b9663831 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -361,13 +361,6 @@ class FederationServer(FederationBase): affected=pdu.event_id, ) - raise FederationError( - "ERROR", - 403, - "Forbidden", - affected=pdu.event_id, - ) - state = None auth_chain = [] From 8b919c00f30c03ddae257f0129f58f2d0285723c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:44:01 +0000 Subject: [PATCH 072/112] Start the get_pdu cache --- synapse/app/homeserver.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 27b478a1c3..7565d94449 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -275,6 +275,7 @@ def setup(): hs.get_pusherpool().start() hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() + hs.get_replication_layer().start_pdu_fail_cache() if config.daemonize: print config.pid_file From e7e20417ca468c1afe2e27162b9790f860d2da51 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:44:26 +0000 Subject: [PATCH 073/112] ExpiringCache: purge every 1/2 interval --- synapse/util/expiringcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/util/expiringcache.py b/synapse/util/expiringcache.py index fb5b27ad10..3c8409b164 100644 --- a/synapse/util/expiringcache.py +++ b/synapse/util/expiringcache.py @@ -55,7 +55,7 @@ class ExpiringCache(object): def f(): self._prune_cache() - self._clock.looping_call(f, self._expiry_ms) + self._clock.looping_call(f, self._expiry_ms/2) def __setitem__(self, key, value): now = self._clock.time_msec() From 964bb43fbe15105c58f9550f376fa76709734cd8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:44:41 +0000 Subject: [PATCH 074/112] Fix typo in function name --- synapse/util/expiringcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/util/expiringcache.py b/synapse/util/expiringcache.py index 3c8409b164..1c7859297a 100644 --- a/synapse/util/expiringcache.py +++ b/synapse/util/expiringcache.py @@ -85,7 +85,7 @@ class ExpiringCache(object): except KeyError: return default - def _purge_cache(self): + def _prune_cache(self): if not self._expiry_ms: # zero expiry time means don't expire. This should never get called # since we have this check in start too. From 1a989c436cda4926f11b04b4f26d83e7d3ce9ef5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:45:55 +0000 Subject: [PATCH 075/112] Bump schema version --- synapse/storage/__init__.py | 2 +- synapse/storage/schema/delta/{v14.sql => v13.sql} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename synapse/storage/schema/delta/{v14.sql => v13.sql} (100%) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index c6e96b842f..ec701014a9 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -74,7 +74,7 @@ SCHEMAS = [ # Remember to update this number every time an incompatible change is made to # database schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 12 +SCHEMA_VERSION = 13 class _RollbackButIsFineException(Exception): diff --git a/synapse/storage/schema/delta/v14.sql b/synapse/storage/schema/delta/v13.sql similarity index 100% rename from synapse/storage/schema/delta/v14.sql rename to synapse/storage/schema/delta/v13.sql From 5025305fb250c358236584cae06bf0855f02b0a4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 15:57:42 +0000 Subject: [PATCH 076/112] Rate limit retries when fetching server keys. --- synapse/crypto/keyring.py | 136 +++++++++++++++++++++----------------- 1 file changed, 76 insertions(+), 60 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 3fb99f7125..3250cff595 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -87,69 +87,85 @@ class Keyring(object): return # Try to fetch the key from the remote server. - # TODO(markjh): Ratelimit requests to a given server. - (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_context_factory + retry_last_ts, retry_interval = (0, 0) + retry_timings = yield self.store.get_destination_retry_timings( + server_name ) + if retry_timings: + retry_last_ts, retry_interval = ( + retry_timings.retry_last_ts, retry_timings.retry_interval + ) + if retry_last_ts + retry_interval > int(self.clock.time_msec()): + logger.info("%s not ready for retry", server_name) + raise ValueError("No verification key found for given key ids") - # Check the response. - - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) - - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") - - tls_certificate_b64 = response["tls_certificate"] - - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") - - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key - - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] - ) - - # Cache the result in the datastore. - - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) - - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key + try: + (response, tls_certificate) = yield fetch_server_key( + server_name, self.hs.tls_context_factory ) - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + # Check the response. - raise ValueError("No verification key found for given key ids") + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) + + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") + + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") + + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] + ) + + # Cache the result in the datastore. + + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) + + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key + ) + + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return + + raise ValueError("No verification key found for given key ids") + + except: + self.set_retrying(server_name, retry_interval) + raise From 2b8f1a956c6a1d767a3b60e84e7d0afe5857fb0d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 17:20:56 +0000 Subject: [PATCH 077/112] Add per server retry limiting. Factor out the pre destination retry logic from TransactionQueue so it can be reused in both get_pdu and crypto.keyring --- synapse/crypto/keyring.py | 22 ++-- synapse/federation/federation_client.py | 36 ++++-- synapse/federation/transaction_queue.py | 161 ++++++++++-------------- synapse/util/retryutils.py | 108 ++++++++++++++++ 4 files changed, 205 insertions(+), 122 deletions(-) create mode 100644 synapse/util/retryutils.py diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 3250cff595..ea00c830c0 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -22,6 +22,8 @@ from syutil.crypto.signing_key import ( from syutil.base64util import decode_base64, encode_base64 from synapse.api.errors import SynapseError, Codes +from synapse.util.retryutils import get_retry_limiter + from OpenSSL import crypto import logging @@ -88,19 +90,13 @@ class Keyring(object): # Try to fetch the key from the remote server. - retry_last_ts, retry_interval = (0, 0) - retry_timings = yield self.store.get_destination_retry_timings( - server_name + limiter = yield get_retry_limiter( + server_name, + self.clock, + self.store, ) - if retry_timings: - retry_last_ts, retry_interval = ( - retry_timings.retry_last_ts, retry_timings.retry_interval - ) - if retry_last_ts + retry_interval > int(self.clock.time_msec()): - logger.info("%s not ready for retry", server_name) - raise ValueError("No verification key found for given key ids") - try: + with limiter: (response, tls_certificate) = yield fetch_server_key( server_name, self.hs.tls_context_factory ) @@ -165,7 +161,3 @@ class Keyring(object): return raise ValueError("No verification key found for given key ids") - - except: - self.set_retrying(server_name, retry_interval) - raise diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 70c9a6f46b..c5b0274c2f 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -23,6 +23,8 @@ from synapse.api.errors import CodeMessageException from synapse.util.logutils import log_function from synapse.events import FrozenEvent +from synapse.util.retryutils import get_retry_limiter, NotRetryingDestination + import logging @@ -163,24 +165,34 @@ class FederationClient(FederationBase): pdu = None for destination in destinations: try: - transaction_data = yield self.transport_layer.get_event( - destination, event_id + limiter = yield get_retry_limiter( + destination, + self._clock, + self.store, ) - logger.debug("transaction_data %r", transaction_data) + with limiter: + transaction_data = yield self.transport_layer.get_event( + destination, event_id + ) - pdu_list = [ - self.event_from_pdu_json(p, outlier=outlier) - for p in transaction_data["pdus"] - ] + logger.debug("transaction_data %r", transaction_data) - if pdu_list: - pdu = pdu_list[0] + pdu_list = [ + self.event_from_pdu_json(p, outlier=outlier) + for p in transaction_data["pdus"] + ] - # Check signatures are correct. - pdu = yield self._check_sigs_and_hash(pdu) + if pdu_list: + pdu = pdu_list[0] - break + # Check signatures are correct. + pdu = yield self._check_sigs_and_hash(pdu) + + break + except NotRetryingDestination as e: + logger.info(e.message) + continue except CodeMessageException: raise except Exception as e: diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index bb20f2ebab..7d02afe163 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -22,6 +22,9 @@ from .units import Transaction from synapse.api.errors import HttpResponseException from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext +from synapse.util.retryutils import ( + get_retry_limiter, NotRetryingDestination, +) import logging @@ -138,25 +141,6 @@ class TransactionQueue(object): @defer.inlineCallbacks @log_function def _attempt_new_transaction(self, destination): - - (retry_last_ts, retry_interval) = (0, 0) - retry_timings = yield self.store.get_destination_retry_timings( - destination - ) - if retry_timings: - (retry_last_ts, retry_interval) = ( - retry_timings.retry_last_ts, retry_timings.retry_interval - ) - if retry_last_ts + retry_interval > int(self._clock.time_msec()): - logger.info( - "TX [%s] not ready for retry yet - " - "dropping transaction for now", - destination, - ) - return - else: - logger.info("TX [%s] is ready for retry", destination) - if destination in self.pending_transactions: # XXX: pending_transactions can get stuck on by a never-ending # request at which point pending_pdus_by_dest just keeps growing. @@ -204,77 +188,79 @@ class TransactionQueue(object): ] try: - self.pending_transactions[destination] = 1 - - logger.debug("TX [%s] Persisting transaction...", destination) - - transaction = Transaction.create_new( - origin_server_ts=int(self._clock.time_msec()), - transaction_id=str(self._next_txn_id), - origin=self.server_name, - destination=destination, - pdus=pdus, - edus=edus, - pdu_failures=failures, - ) - - self._next_txn_id += 1 - - yield self.transaction_actions.prepare_to_send(transaction) - - logger.debug("TX [%s] Persisted transaction", destination) - logger.info( - "TX [%s] Sending transaction [%s]", + limiter = yield get_retry_limiter( destination, - transaction.transaction_id, + self._clock, + self.store, ) - # Actually send the transaction + with limiter: + self.pending_transactions[destination] = 1 - # FIXME (erikj): This is a bit of a hack to make the Pdu age - # keys work - def json_data_cb(): - data = transaction.get_dict() - now = int(self._clock.time_msec()) - if "pdus" in data: - for p in data["pdus"]: - if "age_ts" in p: - unsigned = p.setdefault("unsigned", {}) - unsigned["age"] = now - int(p["age_ts"]) - del p["age_ts"] - return data + logger.debug("TX [%s] Persisting transaction...", destination) - try: - response = yield self.transport_layer.send_transaction( - transaction, json_data_cb + transaction = Transaction.create_new( + origin_server_ts=int(self._clock.time_msec()), + transaction_id=str(self._next_txn_id), + origin=self.server_name, + destination=destination, + pdus=pdus, + edus=edus, + pdu_failures=failures, ) - code = 200 - except HttpResponseException as e: - code = e.code - response = e.response - logger.info("TX [%s] got %d response", destination, code) + self._next_txn_id += 1 - logger.debug("TX [%s] Sent transaction", destination) - logger.debug("TX [%s] Marking as delivered...", destination) + yield self.transaction_actions.prepare_to_send(transaction) - yield self.transaction_actions.delivered( - transaction, code, response - ) + logger.debug("TX [%s] Persisted transaction", destination) + logger.info( + "TX [%s] Sending transaction [%s]", + destination, + transaction.transaction_id, + ) + + # Actually send the transaction + + # FIXME (erikj): This is a bit of a hack to make the Pdu age + # keys work + def json_data_cb(): + data = transaction.get_dict() + now = int(self._clock.time_msec()) + if "pdus" in data: + for p in data["pdus"]: + if "age_ts" in p: + unsigned = p.setdefault("unsigned", {}) + unsigned["age"] = now - int(p["age_ts"]) + del p["age_ts"] + return data + + try: + response = yield self.transport_layer.send_transaction( + transaction, json_data_cb + ) + code = 200 + except HttpResponseException as e: + code = e.code + response = e.response + + logger.info("TX [%s] got %d response", destination, code) + + logger.debug("TX [%s] Sent transaction", destination) + logger.debug("TX [%s] Marking as delivered...", destination) + + yield self.transaction_actions.delivered( + transaction, code, response + ) + + logger.debug("TX [%s] Marked as delivered", destination) - logger.debug("TX [%s] Marked as delivered", destination) logger.debug("TX [%s] Yielding to callbacks...", destination) for deferred in deferreds: if code == 200: - if retry_last_ts: - # this host is alive! reset retry schedule - yield self.store.set_destination_retry_timings( - destination, 0, 0 - ) deferred.callback(None) else: - self.set_retrying(destination, retry_interval) deferred.errback(RuntimeError("Got status %d" % code)) # Ensures we don't continue until all callbacks on that @@ -285,6 +271,12 @@ class TransactionQueue(object): pass logger.debug("TX [%s] Yielded to callbacks", destination) + except NotRetryingDestination: + logger.info( + "TX [%s] not ready for retry yet - " + "dropping transaction for now", + destination, + ) except RuntimeError as e: # We capture this here as there as nothing actually listens # for this finishing functions deferred. @@ -302,8 +294,6 @@ class TransactionQueue(object): e, ) - self.set_retrying(destination, retry_interval) - for deferred in deferreds: if not deferred.called: deferred.errback(e) @@ -314,22 +304,3 @@ class TransactionQueue(object): # Check to see if there is anything else to send. self._attempt_new_transaction(destination) - - @defer.inlineCallbacks - def set_retrying(self, destination, retry_interval): - # track that this destination is having problems and we should - # give it a chance to recover before trying it again - - if retry_interval: - retry_interval *= 2 - # plateau at hourly retries for now - if retry_interval >= 60 * 60 * 1000: - retry_interval = 60 * 60 * 1000 - else: - retry_interval = 2000 # try again at first after 2 seconds - - yield self.store.set_destination_retry_timings( - destination, - int(self._clock.time_msec()), - retry_interval - ) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py new file mode 100644 index 0000000000..bba524ebd8 --- /dev/null +++ b/synapse/util/retryutils.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet import defer + +import logging + + +logger = logging.getLogger(__name__) + + +class NotRetryingDestination(Exception): + def __init__(self, retry_last_ts, retry_interval, destination): + msg = "Not retrying server %s." % (destination,) + super(NotRetryingDestination, self).__init__(msg) + + self.retry_last_ts = retry_last_ts + self.retry_interval = retry_interval + self.destination = destination + + +@defer.inlineCallbacks +def get_retry_limiter(destination, clock, store, **kwargs): + retry_last_ts, retry_interval = (0, 0) + + retry_timings = yield store.get_destination_retry_timings( + destination + ) + + if retry_timings: + retry_last_ts, retry_interval = ( + retry_timings.retry_last_ts, retry_timings.retry_interval + ) + + now = int(clock.time_msec()) + + if retry_last_ts + retry_interval > now: + raise NotRetryingDestination( + retry_last_ts=retry_last_ts, + retry_interval=retry_interval, + destination=destination, + ) + + defer.returnValue( + RetryDestinationLimiter( + destination, + clock, + store, + retry_interval, + **kwargs + ) + ) + + +class RetryDestinationLimiter(object): + def __init__(self, destination, clock, store, retry_interval, + min_retry_interval=20000, max_retry_interval=60 * 60 * 1000, + multiplier_retry_interval=2): + self.clock = clock + self.store = store + self.destination = destination + + self.retry_interval = retry_interval + self.min_retry_interval = min_retry_interval + self.max_retry_interval = max_retry_interval + self.multiplier_retry_interval = multiplier_retry_interval + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + def err(self, failure): + logger.exception( + "Failed to store set_destination_retry_timings", + failure.value + ) + + if exc_type is None and exc_val is None and exc_tb is None: + # We connected successfully. + retry_last_ts = 0 + self.retry_interval = 0 + else: + # We couldn't connect. + if self.retry_interval: + self.retry_interval *= self.multiplier_retry_interval + + if self.retry_interval >= self.max_retry_interval: + self.retry_interval = self.max_retry_interval + else: + self.retry_interval = self.min_retry_interval + + retry_last_ts = int(self._clock.time_msec()), + + self.store.set_destination_retry_timings( + self.destination, retry_last_ts, self.retry_interval + ).addErrback(err) From 2c29ed3e847285973ab552b9617750e1ba6693e0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 17:22:24 +0000 Subject: [PATCH 078/112] Use absolute path when loading delta sql files --- synapse/storage/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index ec701014a9..d16e7b8fac 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -76,6 +76,8 @@ SCHEMAS = [ # database schema files, so the users will be informed on server restarts. SCHEMA_VERSION = 13 +dir_path = os.path.abspath(os.path.dirname(__file__)) + class _RollbackButIsFineException(Exception): """ This exception is used to rollback a transaction without implying @@ -583,7 +585,6 @@ def schema_path(schema): A filesystem path pointing at a ".sql" file. """ - dir_path = os.path.dirname(__file__) schemaPath = os.path.join(dir_path, "schema", schema + ".sql") return schemaPath From f91263b1e03a6e30480af1ece357c27439a8c6b3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 17:37:51 +0000 Subject: [PATCH 079/112] Remove spurious self --- synapse/util/retryutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index bba524ebd8..9ab119f29e 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -81,7 +81,7 @@ class RetryDestinationLimiter(object): pass def __exit__(self, exc_type, exc_val, exc_tb): - def err(self, failure): + def err(failure): logger.exception( "Failed to store set_destination_retry_timings", failure.value From c8436b38a08c29fd7027e040564678ec8a635a14 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 17:38:38 +0000 Subject: [PATCH 080/112] Only update destination_retry_timings if we have succeeded when retrying --- synapse/util/retryutils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 9ab119f29e..888b7ef2e9 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -89,6 +89,9 @@ class RetryDestinationLimiter(object): if exc_type is None and exc_val is None and exc_tb is None: # We connected successfully. + if not self.retry_interval: + return + retry_last_ts = 0 self.retry_interval = 0 else: From 9371019133bf16cec163d58fe69aa701c8ca5305 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 17 Feb 2015 18:13:34 +0000 Subject: [PATCH 081/112] Try to only back off if we think we failed to connect to the remote --- synapse/crypto/keyring.py | 100 ++++++++++++------------ synapse/federation/transaction_queue.py | 76 +++++++++--------- synapse/util/retryutils.py | 10 ++- 3 files changed, 96 insertions(+), 90 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index ea00c830c0..828aced44a 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -101,63 +101,63 @@ class Keyring(object): server_name, self.hs.tls_context_factory ) - # Check the response. + # Check the response. - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") - tls_certificate_b64 = response["tls_certificate"] + tls_certificate_b64 = response["tls_certificate"] - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] - ) - - # Cache the result in the datastore. - - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) - - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] ) - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + # Cache the result in the datastore. - raise ValueError("No verification key found for given key ids") + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) + + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key + ) + + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return + + raise ValueError("No verification key found for given key ids") diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 7d02afe163..dc0f30cb64 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -167,15 +167,6 @@ class TransactionQueue(object): logger.info("TX [%s] Nothing to send", destination) return - logger.debug( - "TX [%s] Attempting new transaction" - " (pdus: %d, edus: %d, failures: %d)", - destination, - len(pending_pdus), - len(pending_edus), - len(pending_failures) - ) - # Sort based on the order field pending_pdus.sort(key=lambda t: t[2]) @@ -194,32 +185,41 @@ class TransactionQueue(object): self.store, ) + logger.debug( + "TX [%s] Attempting new transaction" + " (pdus: %d, edus: %d, failures: %d)", + destination, + len(pending_pdus), + len(pending_edus), + len(pending_failures) + ) + + self.pending_transactions[destination] = 1 + + logger.debug("TX [%s] Persisting transaction...", destination) + + transaction = Transaction.create_new( + origin_server_ts=int(self._clock.time_msec()), + transaction_id=str(self._next_txn_id), + origin=self.server_name, + destination=destination, + pdus=pdus, + edus=edus, + pdu_failures=failures, + ) + + self._next_txn_id += 1 + + yield self.transaction_actions.prepare_to_send(transaction) + + logger.debug("TX [%s] Persisted transaction", destination) + logger.info( + "TX [%s] Sending transaction [%s]", + destination, + transaction.transaction_id, + ) + with limiter: - self.pending_transactions[destination] = 1 - - logger.debug("TX [%s] Persisting transaction...", destination) - - transaction = Transaction.create_new( - origin_server_ts=int(self._clock.time_msec()), - transaction_id=str(self._next_txn_id), - origin=self.server_name, - destination=destination, - pdus=pdus, - edus=edus, - pdu_failures=failures, - ) - - self._next_txn_id += 1 - - yield self.transaction_actions.prepare_to_send(transaction) - - logger.debug("TX [%s] Persisted transaction", destination) - logger.info( - "TX [%s] Sending transaction [%s]", - destination, - transaction.transaction_id, - ) - # Actually send the transaction # FIXME (erikj): This is a bit of a hack to make the Pdu age @@ -249,11 +249,11 @@ class TransactionQueue(object): logger.debug("TX [%s] Sent transaction", destination) logger.debug("TX [%s] Marking as delivered...", destination) - yield self.transaction_actions.delivered( - transaction, code, response - ) + yield self.transaction_actions.delivered( + transaction, code, response + ) - logger.debug("TX [%s] Marked as delivered", destination) + logger.debug("TX [%s] Marked as delivered", destination) logger.debug("TX [%s] Yielding to callbacks...", destination) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 888b7ef2e9..d190100c8c 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -15,6 +15,8 @@ from twisted.internet import defer +from synapse.api.errors import CodeMessageException + import logging @@ -67,7 +69,7 @@ def get_retry_limiter(destination, clock, store, **kwargs): class RetryDestinationLimiter(object): def __init__(self, destination, clock, store, retry_interval, min_retry_interval=20000, max_retry_interval=60 * 60 * 1000, - multiplier_retry_interval=2): + multiplier_retry_interval=2,): self.clock = clock self.store = store self.destination = destination @@ -87,7 +89,11 @@ class RetryDestinationLimiter(object): failure.value ) - if exc_type is None and exc_val is None and exc_tb is None: + valid_err_code = False + if exc_type is CodeMessageException: + valid_err_code = 0 <= exc_val.code < 500 + + if exc_type is None or valid_err_code: # We connected successfully. if not self.retry_interval: return From d77912ff4483fa63308eeefb70da93b8c327b740 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:09:54 +0000 Subject: [PATCH 082/112] Docs. --- synapse/util/retryutils.py | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index d190100c8c..08285e811d 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -35,6 +35,23 @@ class NotRetryingDestination(Exception): @defer.inlineCallbacks def get_retry_limiter(destination, clock, store, **kwargs): + """For a given destination check if we have previously failed to + send a request there and are waiting before retrying the destination. + If we are not ready to retry the destination, this will raise a + NotRetryingDestination exception. Otherwise, will return a Context Manager + that will mark the destination as down if an exception is thrown (excluding + CodeMessageException with code < 500) + + Example usage: + + try: + limiter = yield get_retry_limiter(destination, clock, store) + with limiter: + response = yield do_request() + except NotRetryingDestination: + # We aren't ready to retry that destination. + raise + """ retry_last_ts, retry_interval = (0, 0) retry_timings = yield store.get_destination_retry_timings( @@ -68,8 +85,23 @@ def get_retry_limiter(destination, clock, store, **kwargs): class RetryDestinationLimiter(object): def __init__(self, destination, clock, store, retry_interval, - min_retry_interval=20000, max_retry_interval=60 * 60 * 1000, + min_retry_interval=5000, max_retry_interval=60 * 60 * 1000, multiplier_retry_interval=2,): + """ + Args: + destination (str) + clock (Clock) + store (DataStore) + retry_interval (int): The next retry interval taken from the + database in milliseconds, or zero if the last request was + successful. + min_retry_interval (int): The minimum retry interval to use after + a failed request, in milliseconds. + max_retry_interval (int): The maximum retry interval to use after + a failed request, in milliseconds. + multiplier_retry_interval (int): The multiplier to use to increase + the retry interval after a failed request. + """ self.clock = clock self.store = store self.destination = destination From 4fd176a41db9f3b9073db172e20c8ede40b5f5f4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:11:24 +0000 Subject: [PATCH 083/112] More docs --- synapse/util/retryutils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 08285e811d..366f7c48a7 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -87,7 +87,11 @@ class RetryDestinationLimiter(object): def __init__(self, destination, clock, store, retry_interval, min_retry_interval=5000, max_retry_interval=60 * 60 * 1000, multiplier_retry_interval=2,): - """ + """Marks the destination as "down" if an exception is thrown in the + context, except for CodeMessageException with code < 500. + + If no exception is raised, marks the destination as "up". + Args: destination (str) clock (Clock) From ec847059f3e9b9b5de62aa2f7ad2366c4e883fac Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:14:10 +0000 Subject: [PATCH 084/112] Rename _fail_fetch_pdu_cache to _get_pdu_cache --- synapse/app/homeserver.py | 2 +- synapse/federation/federation_client.py | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 7565d94449..7be82d0576 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -275,7 +275,7 @@ def setup(): hs.get_pusherpool().start() hs.get_state_handler().start_caching() hs.get_datastore().start_profiling() - hs.get_replication_layer().start_pdu_fail_cache() + hs.get_replication_layer().start_get_pdu_cache() if config.daemonize: print config.pid_file diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 83b4947b99..6042e366bd 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -32,10 +32,10 @@ logger = logging.getLogger(__name__) class FederationClient(FederationBase): def __init__(self): - self._fail_fetch_pdu_cache = None + self._get_pdu_cache = None - def start_pdu_fail_cache(self): - self._fail_fetch_pdu_cache = ExpiringCache( + def start_get_pdu_cache(self): + self._get_pdu_cache = ExpiringCache( cache_name="get_pdu_cache", clock=self._clock, max_len=1000, @@ -43,7 +43,7 @@ class FederationClient(FederationBase): reset_expiry_on_get=False, ) - self._fail_fetch_pdu_cache.start() + self._get_pdu_cache.start() @log_function def send_pdu(self, pdu, destinations): @@ -175,8 +175,8 @@ class FederationClient(FederationBase): # TODO: Rate limit the number of times we try and get the same event. - if self._fail_fetch_pdu_cache: - e = self._fail_fetch_pdu_cache.get(event_id) + if self._get_pdu_cache: + e = self._get_pdu_cache.get(event_id) if e: defer.returnValue(e) @@ -223,8 +223,8 @@ class FederationClient(FederationBase): ) continue - if self._fail_fetch_pdu_cache is not None: - self._fail_fetch_pdu_cache[event_id] = pdu + if self._get_pdu_cache is not None: + self._get_pdu_cache[event_id] = pdu defer.returnValue(pdu) From 859fbd4423c57a0d64dde5322dde46120692f64b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:39:14 +0000 Subject: [PATCH 085/112] s/self._clock/self.clock/ --- synapse/util/retryutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 366f7c48a7..ac30545079 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -146,7 +146,7 @@ class RetryDestinationLimiter(object): else: self.retry_interval = self.min_retry_interval - retry_last_ts = int(self._clock.time_msec()), + retry_last_ts = int(self.clock.time_msec()), self.store.set_destination_retry_timings( self.destination, retry_last_ts, self.retry_interval From 0db52d43fa41526b83d0c70141c43bfd89388820 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:41:46 +0000 Subject: [PATCH 086/112] strings.join() expects iterable of strings --- synapse/http/matrixfederationclient.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 454c3d4ab1..b5e201b5f1 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -148,7 +148,7 @@ class MatrixFederationHttpClient(object): if hasattr(e, "reasons"): reasons = ", ".join( - f.value.message + str(f.value.message) for f in e.reasons ) else: From e482541e1d77cb179dafa44037ee1e04003501ab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:44:22 +0000 Subject: [PATCH 087/112] Fix pyflakes --- synapse/federation/transaction_queue.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index a77201ad49..ae04774c76 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -266,7 +266,6 @@ class TransactionQueue(object): logger.debug("TX [%s] Sent transaction", destination) logger.debug("TX [%s] Marking as delivered...", destination) - yield self.transaction_actions.delivered( transaction, code, response ) From 5e2447146982a30d0d4ea1334297cc0bfb32c3c5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:50:10 +0000 Subject: [PATCH 088/112] Fix up ResponseNeverReceived to str --- synapse/http/matrixfederationclient.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index b5e201b5f1..304efcdd56 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -146,21 +146,13 @@ class MatrixFederationHttpClient(object): ) raise SynapseError(400, "Domain specified not found.") - if hasattr(e, "reasons"): - reasons = ", ".join( - str(f.value.message) - for f in e.reasons - ) - else: - reasons = e.message - logger.warn( "Sending request failed to %s: %s %s: %s - %s", destination, method, url_bytes, type(e). __name__, - reasons, + _flatten_response_never_received(e), ) if retries_left: @@ -474,3 +466,13 @@ class _JsonProducer(object): def stopProducing(self): pass + + +def _flatten_response_never_received(e): + if hasattr(e, "reasons"): + return ", ".join( + _flatten_response_never_received(f.value) + for f in e.reasons + ) + else: + return "%s: %s" % (type(e), e.message,) From 65ca713ff53794bd517a85de7df063e631fb255a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:51:32 +0000 Subject: [PATCH 089/112] Add .__name__ after type(e) --- synapse/http/matrixfederationclient.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 304efcdd56..49a4c7d9d3 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -151,7 +151,7 @@ class MatrixFederationHttpClient(object): destination, method, url_bytes, - type(e). __name__, + type(e).__name__, _flatten_response_never_received(e), ) @@ -475,4 +475,4 @@ def _flatten_response_never_received(e): for f in e.reasons ) else: - return "%s: %s" % (type(e), e.message,) + return "%s: %s" % (type(e).__name__, e.message,) From cedad8fbd688eef70da4d758987bb39f46fe84bf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:54:34 +0000 Subject: [PATCH 090/112] Bump version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 792b8089f6..a804a25748 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.7.0f" +__version__ = "0.7.1" From 7e9d59f3b4628981d5e53ac0cc71325ad896287c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 10:58:13 +0000 Subject: [PATCH 091/112] Don't convert DNSLookupError to a 4xx SynapseError --- synapse/http/matrixfederationclient.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 49a4c7d9d3..fe57363388 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -144,7 +144,7 @@ class MatrixFederationHttpClient(object): destination, e ) - raise SynapseError(400, "Domain specified not found.") + raise logger.warn( "Sending request failed to %s: %s %s: %s - %s", From 87e9aeb9143a3dab2dcac24797c822dedde845f1 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 18 Feb 2015 10:59:30 +0000 Subject: [PATCH 092/112] Move pynacl to the top of the depedency link list so that it is installed before syutil --- synapse/python_dependencies.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index ec78fc3627..a30a570552 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -24,6 +24,11 @@ def github_link(project, version, egg): return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg) DEPENDENCY_LINKS = [ + github_link( + project="pyca/pynacl", + version="d4d3175589b892f6ea7c22f466e0e223853516fa", + egg="pynacl-0.3.0", + ) github_link( project="matrix-org/syutil", version="v0.0.3", @@ -34,11 +39,6 @@ DEPENDENCY_LINKS = [ version="v0.6.2", egg="matrix_angular_sdk-0.6.2", ), - github_link( - project="pyca/pynacl", - version="d4d3175589b892f6ea7c22f466e0e223853516fa", - egg="pynacl-0.3.0", - ) ] From 5806d52423a97e219b710ca638866e0763fa416e Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 18 Feb 2015 11:01:37 +0000 Subject: [PATCH 093/112] Fix syntax --- synapse/python_dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index a30a570552..9464b4fb62 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -28,7 +28,7 @@ DEPENDENCY_LINKS = [ project="pyca/pynacl", version="d4d3175589b892f6ea7c22f466e0e223853516fa", egg="pynacl-0.3.0", - ) + ), github_link( project="matrix-org/syutil", version="v0.0.3", From b17bd31da0574cf335f90763dee2708af40ac87f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 11:17:26 +0000 Subject: [PATCH 094/112] Temporarily add a run_on_reactor() call --- synapse/util/retryutils.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index ac30545079..ee1194f1de 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -17,6 +17,8 @@ from twisted.internet import defer from synapse.api.errors import CodeMessageException +from synapse.util.async import run_on_reactor + import logging @@ -52,6 +54,7 @@ def get_retry_limiter(destination, clock, store, **kwargs): # We aren't ready to retry that destination. raise """ + yield run_on_reactor() retry_last_ts, retry_interval = (0, 0) retry_timings = yield store.get_destination_retry_timings( From 36e144091b3be995754f7100a9c5322b224f2692 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 11:25:20 +0000 Subject: [PATCH 095/112] Remove spurious comma. Remove temp run_on_reactor --- synapse/util/retryutils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index ee1194f1de..ab58eb528b 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -54,7 +54,6 @@ def get_retry_limiter(destination, clock, store, **kwargs): # We aren't ready to retry that destination. raise """ - yield run_on_reactor() retry_last_ts, retry_interval = (0, 0) retry_timings = yield store.get_destination_retry_timings( @@ -149,7 +148,7 @@ class RetryDestinationLimiter(object): else: self.retry_interval = self.min_retry_interval - retry_last_ts = int(self.clock.time_msec()), + retry_last_ts = int(self.clock.time_msec()) self.store.set_destination_retry_timings( self.destination, retry_last_ts, self.retry_interval From 47d3ff4cf8ebceacc680c9e5b414150c6edbf85b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 11:30:37 +0000 Subject: [PATCH 096/112] Don't send failure to self --- synapse/federation/transaction_queue.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index ae04774c76..f0041d4d12 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -128,6 +128,9 @@ class TransactionQueue(object): @defer.inlineCallbacks def enqueue_failure(self, failure, destination): + if destination == self.server_name: + return + deferred = defer.Deferred() self.pending_failures_by_dest.setdefault( From b68e4a729ff91372a3a7269acc08c1f77584b0f4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 11:32:39 +0000 Subject: [PATCH 097/112] Discard destination 'localhost' --- synapse/federation/transaction_queue.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index f0041d4d12..06944eefb9 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -107,7 +107,7 @@ class TransactionQueue(object): def enqueue_edu(self, edu): destination = edu.destination - if destination == self.server_name: + if destination == self.server_name or destination == "localhost": return deferred = defer.Deferred() @@ -128,7 +128,7 @@ class TransactionQueue(object): @defer.inlineCallbacks def enqueue_failure(self, failure, destination): - if destination == self.server_name: + if destination == self.server_name or destination == "localhost": return deferred = defer.Deferred() From 2462aacd77963431507bb97769acee3ed9d65ceb Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 18 Feb 2015 11:51:00 +0000 Subject: [PATCH 098/112] Restrict the destinations that synapse can talk to --- synapse/federation/transaction_queue.py | 30 ++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index ae04774c76..4b5460c797 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -66,6 +66,26 @@ class TransactionQueue(object): # HACK to get unique tx id self._next_txn_id = int(self._clock.time_msec()) + def can_send_to(self, destination): + """Can we send messages to the given server? + + We can't send messages to ourselves. If we are running on localhost + then we can only federation with other servers running on localhost. + Otherwise we only federate with servers on a public domain. + + Args: + destination(str): The server we are possibly trying to send to. + Returns: + bool: True if we can send to the server. + """ + + if destination == self.server_name: + return False + if self.server_name.startswith("localhost"): + return destination.startswith("localhost") + else: + return not destination.startswith("localhost") + @defer.inlineCallbacks @log_function def enqueue_pdu(self, pdu, destinations, order): @@ -74,8 +94,9 @@ class TransactionQueue(object): # table and we'll get back to it later. destinations = set(destinations) - destinations.discard(self.server_name) - destinations.discard("localhost") + destinations = set( + dest for dest in destinations if self.can_send_to(dest) + ) logger.debug("Sending to: %s", str(destinations)) @@ -107,7 +128,7 @@ class TransactionQueue(object): def enqueue_edu(self, edu): destination = edu.destination - if destination == self.server_name: + if not self.can_send_to(destination): return deferred = defer.Deferred() @@ -130,6 +151,9 @@ class TransactionQueue(object): def enqueue_failure(self, failure, destination): deferred = defer.Deferred() + if not self.can_send_to(destination): + return + self.pending_failures_by_dest.setdefault( destination, [] ).append( From 6375bd3e33f687499e6d15298e74416c77ab499e Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 18 Feb 2015 12:01:37 +0000 Subject: [PATCH 099/112] SYN-282: Don't log tracebacks for client errors --- synapse/rest/media/v1/base_resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py index d44d5f1298..b10cbddb81 100644 --- a/synapse/rest/media/v1/base_resource.py +++ b/synapse/rest/media/v1/base_resource.py @@ -54,7 +54,7 @@ class BaseMediaResource(Resource): try: yield request_handler(self, request) except CodeMessageException as e: - logger.exception(e) + logger.info("Responding with error: %r", e) respond_with_json( request, e.code, cs_exception(e), send_cors=True ) From cc3d3babb0c04b21501f59c25739b9e48fa5b490 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 12:01:41 +0000 Subject: [PATCH 100/112] Remove unused import --- synapse/util/retryutils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index ab58eb528b..4e82232796 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -17,8 +17,6 @@ from twisted.internet import defer from synapse.api.errors import CodeMessageException -from synapse.util.async import run_on_reactor - import logging From 446ef589920a9c3c4738873023494bed79d1f6c3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 12:03:12 +0000 Subject: [PATCH 101/112] Add errback to all deferreds in transaction_queue --- synapse/federation/transaction_queue.py | 37 +++++++++++++++---------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 06944eefb9..1e5505a4bf 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -90,14 +90,17 @@ class TransactionQueue(object): (pdu, deferred, order) ) - def eb(failure): + def chain(failure): if not deferred.called: deferred.errback(failure) - else: - logger.warn("Failed to send pdu", failure.value) + + def log_failure(failure): + logger.warn("Failed to send pdu", failure.value) + + deferred.addErrback(log_failure) with PreserveLoggingContext(): - self._attempt_new_transaction(destination).addErrback(eb) + self._attempt_new_transaction(destination).addErrback(chain) deferreds.append(deferred) @@ -115,14 +118,17 @@ class TransactionQueue(object): (edu, deferred) ) - def eb(failure): + def chain(failure): if not deferred.called: deferred.errback(failure) - else: - logger.warn("Failed to send edu", failure.value) + + def log_failure(failure): + logger.warn("Failed to send pdu", failure.value) + + deferred.addErrback(log_failure) with PreserveLoggingContext(): - self._attempt_new_transaction(destination).addErrback(eb) + self._attempt_new_transaction(destination).addErrback(chain) return deferred @@ -139,14 +145,17 @@ class TransactionQueue(object): (failure, deferred) ) - def eb(failure): + def chain(f): if not deferred.called: - deferred.errback(failure) - else: - logger.warn("Failed to send failure", failure.value) + deferred.errback(f) + + def log_failure(f): + logger.warn("Failed to send pdu", f.value) + + deferred.addErrback(log_failure) with PreserveLoggingContext(): - self._attempt_new_transaction(destination).addErrback(eb) + self._attempt_new_transaction(destination).addErrback(chain) yield deferred @@ -308,7 +317,7 @@ class TransactionQueue(object): except Exception as e: # We capture this here as there as nothing actually listens # for this finishing functions deferred. - logger.exception( + logger.warn( "TX [%s] Problem in _attempt_transaction: %s", destination, e, From d122e215ff459648d0352627c91adb00c6a34213 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 16:21:35 +0000 Subject: [PATCH 102/112] Generate a version string that includes git details if run from git checkout --- synapse/app/homeserver.py | 62 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 351fc69143..166a0169ab 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -52,6 +52,7 @@ import synapse import logging import os import re +import subprocess import sqlite3 import syweb @@ -208,6 +209,61 @@ class SynapseHomeServer(HomeServer): logger.info("Synapse now listening on port %d", unsecure_port) +def get_version_string(): + null = open(os.devnull, 'w') + try: + git_branch = subprocess.check_output( + ['git', 'rev-parse', '--abbrev-ref', 'HEAD'], + stderr=null, + ).strip() + git_branch = "b=" + git_branch + except subprocess.CalledProcessError: + git_branch = "" + + try: + git_tag = subprocess.check_output( + ['git', 'describe', '--exact-match'], + stderr=null, + ).strip() + git_tag = "t=" + git_tag + except subprocess.CalledProcessError: + git_tag = "" + + try: + git_commit = subprocess.check_output( + ['git', 'rev-parse', '--short', 'HEAD'], + stderr=null, + ).strip() + except subprocess.CalledProcessError: + git_commit = "" + + try: + dirty_string = "-this_is_a_dirty_checkout" + is_dirty = subprocess.check_output( + ['git', 'describe', '--dirty=' + dirty_string], + stderr=null, + ).strip().endswith(dirty_string) + + git_dirty = "dirty" if is_dirty else "" + except subprocess.CalledProcessError: + git_dirty = "" + + if git_branch or git_tag or git_commit or git_dirty: + git_version = ",".join( + s for s in + (git_branch, git_tag, git_commit, git_dirty,) + if s + ) + + return ( + "Synapse/v=%s,%s" % ( + synapse.__version__, git_version, + ) + ).encode("ascii") + + return ("Synapse/%s" % (synapse.__version__,)).encode("ascii") + + def setup(): config = HomeServerConfig.load_config( "Synapse Homeserver", @@ -219,8 +275,10 @@ def setup(): check_requirements() + version_string = get_version_string() + logger.info("Server hostname: %s", config.server_name) - logger.info("Server version: %s", synapse.__version__) + logger.info("Server version: %s", version_string) if re.search(":[0-9]+$", config.server_name): domain_with_port = config.server_name @@ -229,6 +287,7 @@ def setup(): tls_context_factory = context_factory.ServerContextFactory(config) + hs = SynapseHomeServer( config.server_name, domain_with_port=domain_with_port, @@ -237,6 +296,7 @@ def setup(): tls_context_factory=tls_context_factory, config=config, content_addr=config.content_addr, + version_string=version_string, ) hs.create_resource_tree( From aa577df06405c1574084389bcd189baedc3a7397 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 16:26:30 +0000 Subject: [PATCH 103/112] When computing git version run git commands in same dir as source files --- synapse/app/homeserver.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 166a0169ab..6acb3b2f4e 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -211,10 +211,12 @@ class SynapseHomeServer(HomeServer): def get_version_string(): null = open(os.devnull, 'w') + cwd = os.path.dirname(os.path.abspath(__file__)) try: git_branch = subprocess.check_output( ['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stderr=null, + cwd=cwd, ).strip() git_branch = "b=" + git_branch except subprocess.CalledProcessError: @@ -224,6 +226,7 @@ def get_version_string(): git_tag = subprocess.check_output( ['git', 'describe', '--exact-match'], stderr=null, + cwd=cwd, ).strip() git_tag = "t=" + git_tag except subprocess.CalledProcessError: @@ -233,6 +236,7 @@ def get_version_string(): git_commit = subprocess.check_output( ['git', 'rev-parse', '--short', 'HEAD'], stderr=null, + cwd=cwd, ).strip() except subprocess.CalledProcessError: git_commit = "" @@ -242,6 +246,7 @@ def get_version_string(): is_dirty = subprocess.check_output( ['git', 'describe', '--dirty=' + dirty_string], stderr=null, + cwd=cwd, ).strip().endswith(dirty_string) git_dirty = "dirty" if is_dirty else "" From 5358966a878aa543659c10be09f4bf58b1568f2f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 16:51:33 +0000 Subject: [PATCH 104/112] Use git aware version string in User-Agent and Server headers --- synapse/http/agent_name.py | 18 ----------------- synapse/http/client.py | 10 +++++----- synapse/http/matrixfederationclient.py | 4 ++-- synapse/http/server.py | 27 +++++++++++++++++--------- tests/utils.py | 10 ++++++++-- 5 files changed, 33 insertions(+), 36 deletions(-) delete mode 100644 synapse/http/agent_name.py diff --git a/synapse/http/agent_name.py b/synapse/http/agent_name.py deleted file mode 100644 index d761890863..0000000000 --- a/synapse/http/agent_name.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014, 2015 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from synapse import __version__ - -AGENT_NAME = ("Synapse/%s" % (__version__,)).encode("ascii") diff --git a/synapse/http/client.py b/synapse/http/client.py index e46e7db146..22b7145ac1 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -14,7 +14,6 @@ # limitations under the License. from synapse.api.errors import CodeMessageException -from synapse.http.agent_name import AGENT_NAME from syutil.jsonutil import encode_canonical_json from twisted.internet import defer, reactor @@ -44,6 +43,7 @@ class SimpleHttpClient(object): # BrowserLikePolicyForHTTPS which will do regular cert validation # 'like a browser' self.agent = Agent(reactor) + self.version_string = hs.version_string @defer.inlineCallbacks def post_urlencoded_get_json(self, uri, args={}): @@ -55,7 +55,7 @@ class SimpleHttpClient(object): uri.encode("ascii"), headers=Headers({ b"Content-Type": [b"application/x-www-form-urlencoded"], - b"User-Agent": [AGENT_NAME], + b"User-Agent": [self.version_string], }), bodyProducer=FileBodyProducer(StringIO(query_bytes)) ) @@ -108,7 +108,7 @@ class SimpleHttpClient(object): "GET", uri.encode("ascii"), headers=Headers({ - b"User-Agent": [AGENT_NAME], + b"User-Agent": [self.version_string], }) ) @@ -149,7 +149,7 @@ class SimpleHttpClient(object): "PUT", uri.encode("ascii"), headers=Headers({ - b"User-Agent": [AGENT_NAME], + b"User-Agent": [self.version_string], "Content-Type": ["application/json"] }), bodyProducer=FileBodyProducer(StringIO(json_str)) @@ -182,7 +182,7 @@ class CaptchaServerHttpClient(SimpleHttpClient): bodyProducer=FileBodyProducer(StringIO(query_bytes)), headers=Headers({ b"Content-Type": [b"application/x-www-form-urlencoded"], - b"User-Agent": [AGENT_NAME], + b"User-Agent": [self.version_string], }) ) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 49a4c7d9d3..9d483032a0 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -20,7 +20,6 @@ from twisted.web.client import readBody, _AgentBase, _URI from twisted.web.http_headers import Headers from twisted.web._newclient import ResponseDone -from synapse.http.agent_name import AGENT_NAME from synapse.http.endpoint import matrix_federation_endpoint from synapse.util.async import sleep from synapse.util.logcontext import PreserveLoggingContext @@ -80,6 +79,7 @@ class MatrixFederationHttpClient(object): self.server_name = hs.hostname self.agent = MatrixFederationHttpAgent(reactor) self.clock = hs.get_clock() + self.version_string = hs.version_string @defer.inlineCallbacks def _create_request(self, destination, method, path_bytes, @@ -87,7 +87,7 @@ class MatrixFederationHttpClient(object): query_bytes=b"", retry_on_dns_fail=True): """ Creates and sends a request to the given url """ - headers_dict[b"User-Agent"] = [AGENT_NAME] + headers_dict[b"User-Agent"] = [self.version_string] headers_dict[b"Host"] = [destination] url_bytes = urlparse.urlunparse( diff --git a/synapse/http/server.py b/synapse/http/server.py index 589c30c199..74a101a5d7 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -14,7 +14,6 @@ # limitations under the License. -from synapse.http.agent_name import AGENT_NAME from synapse.api.errors import ( cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError ) @@ -74,6 +73,7 @@ class JsonResource(HttpServer, resource.Resource): self.clock = hs.get_clock() self.path_regexs = {} + self.version_string = hs.version_string def register_path(self, method, path_pattern, callback): self.path_regexs.setdefault(method, []).append( @@ -189,9 +189,13 @@ class JsonResource(HttpServer, resource.Resource): return # TODO: Only enable CORS for the requests that need it. - respond_with_json(request, code, response_json_object, send_cors=True, - response_code_message=response_code_message, - pretty_print=self._request_user_agent_is_curl) + respond_with_json( + request, code, response_json_object, + send_cors=True, + response_code_message=response_code_message, + pretty_print=self._request_user_agent_is_curl, + version_string=self.version_string, + ) @staticmethod def _request_user_agent_is_curl(request): @@ -221,18 +225,23 @@ class RootRedirect(resource.Resource): def respond_with_json(request, code, json_object, send_cors=False, - response_code_message=None, pretty_print=False): + response_code_message=None, pretty_print=False, + version_string=""): if not pretty_print: json_bytes = encode_pretty_printed_json(json_object) else: json_bytes = encode_canonical_json(json_object) - return respond_with_json_bytes(request, code, json_bytes, send_cors, - response_code_message=response_code_message) + return respond_with_json_bytes( + request, code, json_bytes, + send_cors=send_cors, + response_code_message=response_code_message, + version_string=version_string + ) def respond_with_json_bytes(request, code, json_bytes, send_cors=False, - response_code_message=None): + version_string="", response_code_message=None): """Sends encoded JSON in response to the given request. Args: @@ -246,7 +255,7 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False, request.setResponseCode(code, message=response_code_message) request.setHeader(b"Content-Type", b"application/json") - request.setHeader(b"Server", AGENT_NAME) + request.setHeader(b"Server", version_string) request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),)) if send_cors: diff --git a/tests/utils.py b/tests/utils.py index 39895c739f..110b9f86b8 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -46,10 +46,16 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs): if datastore is None: db_pool = SQLiteMemoryDbPool() yield db_pool.prepare() - hs = HomeServer(name, db_pool=db_pool, config=config, **kargs) + hs = HomeServer( + name, db_pool=db_pool, config=config, + version_string="Synapse/tests", + **kargs + ) else: hs = HomeServer( - name, db_pool=None, datastore=datastore, config=config, **kargs + name, db_pool=None, datastore=datastore, config=config, + version_string="Synapse/tests", + **kargs ) defer.returnValue(hs) From 1df8bad63e7f55dca79f20919a6873157c76d8b7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 16:54:25 +0000 Subject: [PATCH 105/112] pyflakes --- synapse/app/homeserver.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 6acb3b2f4e..3368ec6759 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -292,7 +292,6 @@ def setup(): tls_context_factory = context_factory.ServerContextFactory(config) - hs = SynapseHomeServer( config.server_name, domain_with_port=domain_with_port, From 5b753d472bd4ffba124103753f83b55c567889fb Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 17:02:40 +0000 Subject: [PATCH 106/112] Bump matrix-angular-sdk version --- synapse/python_dependencies.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 9464b4fb62..c71df75404 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) REQUIREMENTS = { "syutil>=0.0.3": ["syutil"], - "matrix_angular_sdk>=0.6.2": ["syweb>=0.6.2"], + "matrix_angular_sdk>=0.6.3": ["syweb>=0.6.3"], "Twisted==14.0.2": ["twisted==14.0.2"], "service_identity>=1.0.0": ["service_identity>=1.0.0"], "pyopenssl>=0.14": ["OpenSSL>=0.14"], @@ -36,8 +36,8 @@ DEPENDENCY_LINKS = [ ), github_link( project="matrix-org/matrix-angular-sdk", - version="v0.6.2", - egg="matrix_angular_sdk-0.6.2", + version="v0.6.3", + egg="matrix_angular_sdk-0.6.3", ), ] From b6771037a6d726803f76d486bf82075553d4a7b7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 17:31:50 +0000 Subject: [PATCH 107/112] Make version_string conform to User-Agent and Server spec --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3368ec6759..bf01483b85 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -261,7 +261,7 @@ def get_version_string(): ) return ( - "Synapse/v=%s,%s" % ( + "Synapse/%s/(%s)" % ( synapse.__version__, git_version, ) ).encode("ascii") From 3f1871021e4efd1bbc513790e5c82f09b4c04503 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 17:32:12 +0000 Subject: [PATCH 108/112] Make /keys/ return correct Server version --- synapse/http/server_key_resource.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/http/server_key_resource.py b/synapse/http/server_key_resource.py index 4fc491dc82..71e9a51f5c 100644 --- a/synapse/http/server_key_resource.py +++ b/synapse/http/server_key_resource.py @@ -50,6 +50,7 @@ class LocalKey(Resource): def __init__(self, hs): self.hs = hs + self.version_string = hs.version_string self.response_body = encode_canonical_json( self.response_json_object(hs.config) ) @@ -82,7 +83,10 @@ class LocalKey(Resource): return json_object def render_GET(self, request): - return respond_with_json_bytes(request, 200, self.response_body) + return respond_with_json_bytes( + request, 200, self.response_body, + version_string=self.version_string + ) def getChild(self, name, request): if name == '': From 6311ae8968d5f2e3b867789376933de83678e036 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 17:34:26 +0000 Subject: [PATCH 109/112] Conform to header spec take two --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index bf01483b85..ea20de1434 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -261,7 +261,7 @@ def get_version_string(): ) return ( - "Synapse/%s/(%s)" % ( + "Synapse/%s (%s)" % ( synapse.__version__, git_version, ) ).encode("ascii") From faf4f678474feb5d9964df6aa621990e73d1792d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 Feb 2015 18:02:54 +0000 Subject: [PATCH 110/112] Update CHANGES --- CHANGES.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 8835107594..04f9d94405 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,14 @@ +Changes in synapse v0.7.1 (2015-02-18) +====================================== + +* Add cache when fetching events from remote servers to stop repeatedly + fetching events with bad signatures. +* Respect the per remote server retry scheme when fetching both events and + server keys to reduce the number of times we send requests to dead servers. +* Inform remote servers when the local server fails to handle a received event. +* Turn off python bytecode generation due to problems experienced when + upgrading from previous versions. + Changes in synapse v0.7.0 (2015-02-12) ====================================== From b457f1677c0798894140eb6b7e091800c7c7bf2d Mon Sep 17 00:00:00 2001 From: David Baker Date: Thu, 19 Feb 2015 10:06:17 +0000 Subject: [PATCH 111/112] Send room ID in http notifications so clients know which room to go to if the user responds to the notification. --- synapse/push/httppusher.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 5788db4eba..202fcb42f4 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -66,6 +66,7 @@ class HttpPusher(Pusher): d = { 'notification': { 'id': event['event_id'], + 'room_id': event['room_id'], 'type': event['type'], 'sender': event['user_id'], 'counts': { # -- we don't mark messages as read yet so From 63c1f4fa987874e2bde76bd2c2d8cb35beef7d99 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 19 Feb 2015 10:33:31 +0000 Subject: [PATCH 112/112] Update release date --- CHANGES.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 04f9d94405..d4474ca5e4 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,4 +1,4 @@ -Changes in synapse v0.7.1 (2015-02-18) +Changes in synapse v0.7.1 (2015-02-19) ====================================== * Add cache when fetching events from remote servers to stop repeatedly