From 5f02017aead05adf127b89d7e203849982bd7fdc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Sep 2018 10:20:40 +0100 Subject: [PATCH 01/45] Improve performance of getting typing updates for replication Fetching the list of all new typing notifications involved iterating over all rooms and comparing their serial. Lets move to using a stream change cache, like we do for other streams. --- synapse/handlers/typing.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 2d2d3d5a0..65f475d63 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -20,6 +20,7 @@ from twisted.internet import defer from synapse.api.errors import AuthError, SynapseError from synapse.types import UserID, get_domain_from_id +from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.logcontext import run_in_background from synapse.util.metrics import Measure from synapse.util.wheel_timer import WheelTimer @@ -68,6 +69,11 @@ class TypingHandler(object): # map room IDs to sets of users currently typing self._room_typing = {} + # caches which room_ids changed at which serials + self._typing_stream_change_cache = StreamChangeCache( + "TypingStreamChangeCache", self._latest_room_serial, + ) + self.clock.looping_call( self._handle_timeouts, 5000, @@ -274,19 +280,29 @@ class TypingHandler(object): self._latest_room_serial += 1 self._room_serials[member.room_id] = self._latest_room_serial + self._typing_stream_change_cache.entity_has_changed( + member.room_id, self._latest_room_serial, + ) self.notifier.on_new_event( "typing_key", self._latest_room_serial, rooms=[member.room_id] ) def get_all_typing_updates(self, last_id, current_id): - # TODO: Work out a way to do this without scanning the entire state. if last_id == current_id: return [] + changed_rooms = self._typing_stream_change_cache.get_all_entities_changed( + last_id, + ) + + if changed_rooms is None: + changed_rooms = self._room_serials + rows = [] - for room_id, serial in self._room_serials.items(): - if last_id < serial and serial <= current_id: + for room_id in changed_rooms: + serial = self._room_serials[room_id] + if last_id < serial <= current_id: typing = self._room_typing[room_id] rows.append((serial, room_id, list(typing))) rows.sort() From 1a24d4effaeadaa14e30d56c1a710fff296c2391 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Sep 2018 16:22:26 +0100 Subject: [PATCH 02/45] Newsfile --- changelog.d/3794.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3794.misc diff --git a/changelog.d/3794.misc b/changelog.d/3794.misc new file mode 100644 index 000000000..6b98c9609 --- /dev/null +++ b/changelog.d/3794.misc @@ -0,0 +1 @@ +Speed up calculation of typing updates for replication From 79eded1ae4f0d341918f5c58076f1e60cd400e8a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 21 Sep 2018 14:52:21 +0100 Subject: [PATCH 03/45] Make ExpiringCache slightly more performant --- synapse/util/caches/expiringcache.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 921a9c5b2..48ca2d634 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -16,6 +16,8 @@ import logging from collections import OrderedDict +from six import iteritems + from synapse.metrics.background_process_metrics import run_as_background_process from synapse.util.caches import register_cache @@ -127,7 +129,7 @@ class ExpiringCache(object): keys_to_delete = set() - for key, cache_entry in self._cache.items(): + for key, cache_entry in iteritems(self._cache): if now - cache_entry.time > self._expiry_ms: keys_to_delete.add(key) @@ -149,6 +151,8 @@ class ExpiringCache(object): class _CacheEntry(object): + __slots__ = ["time", "value"] + def __init__(self, time, value): self.time = time self.value = value From fdd1a62e8d09ddccbe685fe7d7840990a9c06241 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 21 Sep 2018 14:55:47 +0100 Subject: [PATCH 04/45] Add a five minute cache to get_destination_retry_timings Hopefully helps with #3931 --- synapse/storage/transactions.py | 23 ++++++++++++++++++++++- synapse/util/caches/expiringcache.py | 13 +++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index baf0379a6..ab54977a7 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -23,6 +23,7 @@ from canonicaljson import encode_canonical_json from twisted.internet import defer from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.util.caches.expiringcache import ExpiringCache from ._base import SQLBaseStore, db_to_json @@ -49,6 +50,8 @@ _UpdateTransactionRow = namedtuple( ) ) +SENTINEL = object() + class TransactionStore(SQLBaseStore): """A collection of queries for handling PDUs. @@ -59,6 +62,12 @@ class TransactionStore(SQLBaseStore): self._clock.looping_call(self._start_cleanup_transactions, 30 * 60 * 1000) + self._destination_retry_cache = ExpiringCache( + cache_name="get_destination_retry_timings", + clock=self._clock, + expiry_ms=5 * 60 * 1000, + ) + def get_received_txn_response(self, transaction_id, origin): """For an incoming transaction from a given origin, check if we have already responded to it. If so, return the response code and response @@ -155,6 +164,7 @@ class TransactionStore(SQLBaseStore): """ pass + @defer.inlineCallbacks def get_destination_retry_timings(self, destination): """Gets the current retry timings (if any) for a given destination. @@ -165,10 +175,20 @@ class TransactionStore(SQLBaseStore): None if not retrying Otherwise a dict for the retry scheme """ - return self.runInteraction( + + result = self._destination_retry_cache.get(destination, SENTINEL) + if result is not SENTINEL: + defer.returnValue(result) + + result = yield self.runInteraction( "get_destination_retry_timings", self._get_destination_retry_timings, destination) + # We don't hugely care about race conditions between getting and + # invalidating the cache, since we time out fairly quickly anyway. + self._destination_retry_cache[destination] = result + defer.returnValue(result) + def _get_destination_retry_timings(self, txn, destination): result = self._simple_select_one_txn( txn, @@ -196,6 +216,7 @@ class TransactionStore(SQLBaseStore): retry_interval (int) - how long until next retry in ms """ + self._destination_retry_cache.pop(destination) return self.runInteraction( "set_destination_retry_timings", self._set_destination_retry_timings, diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 48ca2d634..346669e4c 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -24,6 +24,9 @@ from synapse.util.caches import register_cache logger = logging.getLogger(__name__) +SENTINEL = object() + + class ExpiringCache(object): def __init__(self, cache_name, clock, max_len=0, expiry_ms=0, reset_expiry_on_get=False, iterable=False): @@ -102,6 +105,16 @@ class ExpiringCache(object): return entry.value + def pop(self, key, default=None): + value = self._cache.pop(key, SENTINEL) + if value is SENTINEL: + return default + + if self.iterable: + self._size_estimate -= len(value.value) + + return value + def __contains__(self, key): return key in self._cache From 5230bc147159d6ffbe04d27e695064cac44166d2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 21 Sep 2018 15:05:37 +0100 Subject: [PATCH 05/45] Newsfile --- changelog.d/3933.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3933.misc diff --git a/changelog.d/3933.misc b/changelog.d/3933.misc new file mode 100644 index 000000000..6545871f5 --- /dev/null +++ b/changelog.d/3933.misc @@ -0,0 +1 @@ +Add a cache to get_destination_retry_timings From 19dc676d1aaf66a45c9a1810132952f7ae3ca2aa Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 21 Sep 2018 16:25:42 +0100 Subject: [PATCH 06/45] Fix ExpiringCache.__len__ to be accurate It used to try and produce an estimate, which was sometimes negative. This caused metrics to be sad, so lets always just calculate it from scratch. --- synapse/util/caches/expiringcache.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 346669e4c..8ac56d85f 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -16,7 +16,7 @@ import logging from collections import OrderedDict -from six import iteritems +from six import iteritems, itervalues from synapse.metrics.background_process_metrics import run_as_background_process from synapse.util.caches import register_cache @@ -59,8 +59,6 @@ class ExpiringCache(object): self.iterable = iterable - self._size_estimate = 0 - self.metrics = register_cache("expiring", cache_name, self) if not self._expiry_ms: @@ -79,16 +77,11 @@ class ExpiringCache(object): now = self._clock.time_msec() self._cache[key] = _CacheEntry(now, value) - if self.iterable: - self._size_estimate += len(value) - # Evict if there are now too many items while self._max_len and len(self) > self._max_len: _key, value = self._cache.popitem(last=False) if self.iterable: - removed_len = len(value.value) - self.metrics.inc_evictions(removed_len) - self._size_estimate -= removed_len + self.metrics.inc_evictions(len(value.value)) else: self.metrics.inc_evictions() @@ -111,7 +104,9 @@ class ExpiringCache(object): return default if self.iterable: - self._size_estimate -= len(value.value) + self.metrics.inc_evictions(len(value.value)) + else: + self.metrics.inc_evictions() return value @@ -149,7 +144,9 @@ class ExpiringCache(object): for k in keys_to_delete: value = self._cache.pop(k) if self.iterable: - self._size_estimate -= len(value.value) + self.metrics.inc_evictions(len(value.value)) + else: + self.metrics.inc_evictions() logger.debug( "[%s] _prune_cache before: %d, after len: %d", @@ -158,7 +155,7 @@ class ExpiringCache(object): def __len__(self): if self.iterable: - return self._size_estimate + return sum(len(entry.value) for entry in itervalues(self._cache)) else: return len(self._cache) From d4e0861ff94a809ad36c6b951c1e903457c44def Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 27 Sep 2018 00:23:21 +1000 Subject: [PATCH 07/45] Reduce the load on our CI (#3957) * changelog * reduce circleci config * plus a handy script * fix regex --- .circleci/config.yml | 28 ++++++++++++++++++++-------- changelog.d/3957.misc | 1 + scripts-dev/next_github_number.sh | 9 +++++++++ 3 files changed, 30 insertions(+), 8 deletions(-) create mode 100644 changelog.d/3957.misc create mode 100755 scripts-dev/next_github_number.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index 605430fb3..521aca22e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -99,23 +99,35 @@ workflows: version: 2 build: jobs: - - sytestpy2 - - sytestpy2postgres - - sytestpy3 - - sytestpy3postgres + - sytestpy2: + filters: + branches: + only: /develop|master|release-.*/ + - sytestpy2postgres: + filters: + branches: + only: /develop|master|release-.*/ + - sytestpy3: + filters: + branches: + only: /develop|master|release-.*/ + - sytestpy3postgres: + filters: + branches: + only: /develop|master|release-.*/ - sytestpy2merged: filters: branches: - ignore: /develop|master/ + ignore: /develop|master|release-.*/ - sytestpy2postgresmerged: filters: branches: - ignore: /develop|master/ + ignore: /develop|master|release-.*/ - sytestpy3merged: filters: branches: - ignore: /develop|master/ + ignore: /develop|master|release-.*/ - sytestpy3postgresmerged: filters: branches: - ignore: /develop|master/ + ignore: /develop|master|release-.*/ diff --git a/changelog.d/3957.misc b/changelog.d/3957.misc new file mode 100644 index 000000000..69d647f11 --- /dev/null +++ b/changelog.d/3957.misc @@ -0,0 +1 @@ +CircleCI now only runs merged jobs on PRs, and commit jobs on develop, master, and release branches. diff --git a/scripts-dev/next_github_number.sh b/scripts-dev/next_github_number.sh new file mode 100755 index 000000000..376280025 --- /dev/null +++ b/scripts-dev/next_github_number.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +# Fetch the current GitHub issue number, add one to it -- presto! The likely +# next PR number. +CURRENT_NUMBER=`curl -s "https://api.github.com/repos/matrix-org/synapse/issues?state=all&per_page=1" | jq -r ".[0].number"` +CURRENT_NUMBER=$((CURRENT_NUMBER+1)) +echo $CURRENT_NUMBER \ No newline at end of file From 28781b65e7cb8b0d1fc409d0d78253662b49fe06 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 26 Sep 2018 16:16:41 +0100 Subject: [PATCH 08/45] fix #3854 --- synapse/storage/monthly_active_users.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 59580949f..0fe8c8e24 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -172,6 +172,10 @@ class MonthlyActiveUsersStore(SQLBaseStore): Deferred[bool]: True if a new entry was created, False if an existing one was updated. """ + # Am consciously deciding to lock the table on the basis that is ought + # never be a big table and alternative approaches (batching multiple + # upserts into a single txn) introduced a lot of extra complexity. + # See https://github.com/matrix-org/synapse/issues/3854 for more is_insert = yield self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", @@ -181,7 +185,6 @@ class MonthlyActiveUsersStore(SQLBaseStore): values={ "timestamp": int(self._clock.time_msec()), }, - lock=False, ) if is_insert: self.user_last_seen_monthly_active.invalidate((user_id,)) From d514608b5cc80cfe1f93a9953eb8264162c4a47b Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 26 Sep 2018 16:19:53 +0100 Subject: [PATCH 09/45] towncrier --- changelog.d/3961.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3961.bugfix diff --git a/changelog.d/3961.bugfix b/changelog.d/3961.bugfix new file mode 100644 index 000000000..e46b5834a --- /dev/null +++ b/changelog.d/3961.bugfix @@ -0,0 +1 @@ +Fix errors due to concurrent monthly_active_user upserts From 607eec0456dde3b7e5b33c3e9297af234659caae Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Sep 2018 16:53:55 +0100 Subject: [PATCH 10/45] fix docstring for FederationClient.get_state_for_room trivial fixes for docstring --- synapse/federation/federation_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 5a92428f5..8bf1ad0c1 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -289,8 +289,7 @@ class FederationClient(FederationBase): @defer.inlineCallbacks @log_function def get_state_for_room(self, destination, room_id, event_id): - """Requests all of the `current` state PDUs for a given room from - a remote home server. + """Requests all of the room state at a given event from a remote home server. Args: destination (str): The remote homeserver to query for the state. @@ -298,9 +297,10 @@ class FederationClient(FederationBase): event_id (str): The id of the event we want the state at. Returns: - Deferred: Results in a list of PDUs. + Deferred[Tuple[List[EventBase], List[EventBase]]]: + A list of events in the state, and a list of events in the auth chain + for the given event. """ - try: # First we try and ask for just the IDs, as thats far quicker if # we have most of the state and auth_chain already. From ab59f3d8dab9b0a1c25c8f136ced9f28861a86ec Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 16:51:28 +0100 Subject: [PATCH 11/45] changelog --- changelog.d/3963.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3963.misc diff --git a/changelog.d/3963.misc b/changelog.d/3963.misc new file mode 100644 index 000000000..f1e0eaf18 --- /dev/null +++ b/changelog.d/3963.misc @@ -0,0 +1 @@ +fix docstring for FederationClient.get_state_for_room From 9453c65948bce69f259c1ba1005565d86b1d8730 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Sep 2018 17:38:54 +0100 Subject: [PATCH 12/45] remove spurious federation checks on localhost There's really no point in checking for destinations called "localhost" because there is nothing stopping people creating other DNS entries which point to 127.0.0.1. The right fix for this is https://github.com/matrix-org/synapse/issues/3953. Blocking localhost, on the other hand, means that you get a surprise when trying to connect a test server on localhost to an existing server (with a 'normal' server_name). --- synapse/federation/transaction_queue.py | 37 ++++--------------------- 1 file changed, 6 insertions(+), 31 deletions(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 8cbf8c4f7..ae47aaae0 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -137,26 +137,6 @@ class TransactionQueue(object): self._processing_pending_presence = False - def can_send_to(self, destination): - """Can we send messages to the given server? - - We can't send messages to ourselves. If we are running on localhost - then we can only federation with other servers running on localhost. - Otherwise we only federate with servers on a public domain. - - Args: - destination(str): The server we are possibly trying to send to. - Returns: - bool: True if we can send to the server. - """ - - if destination == self.server_name: - return False - if self.server_name.startswith("localhost"): - return destination.startswith("localhost") - else: - return not destination.startswith("localhost") - def notify_new_events(self, current_id): """This gets called when we have some new events we might want to send out to other servers. @@ -279,10 +259,7 @@ class TransactionQueue(object): self._order += 1 destinations = set(destinations) - destinations = set( - dest for dest in destinations if self.can_send_to(dest) - ) - + destinations.discard(self.server_name) logger.debug("Sending to: %s", str(destinations)) if not destinations: @@ -358,7 +335,7 @@ class TransactionQueue(object): for destinations, states in hosts_and_states: for destination in destinations: - if not self.can_send_to(destination): + if destination == self.server_name: continue self.pending_presence_by_dest.setdefault( @@ -377,7 +354,8 @@ class TransactionQueue(object): content=content, ) - if not self.can_send_to(destination): + if not destination == self.server_name: + logger.info("Not sending EDU to ourselves") return sent_edus_counter.inc() @@ -392,11 +370,8 @@ class TransactionQueue(object): self._attempt_new_transaction(destination) def send_device_messages(self, destination): - if destination == self.server_name or destination == "localhost": - return - - if not self.can_send_to(destination): - return + if destination == self.server_name: + logger.info("Not sending device update to ourselves") self._attempt_new_transaction(destination) From 0c4a99ea2d6046c815c8040d8d060795ab787b5f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 16:54:54 +0100 Subject: [PATCH 13/45] changelog --- changelog.d/3964.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3964.feature diff --git a/changelog.d/3964.feature b/changelog.d/3964.feature new file mode 100644 index 000000000..599222eb5 --- /dev/null +++ b/changelog.d/3964.feature @@ -0,0 +1 @@ +Remove spurious check which made 'localhost' servers not work From a87d419a85e1c966da5bb4fc08f3309eb78ecb7a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 07:48:35 +0100 Subject: [PATCH 14/45] Run notify_app_services as a bg process This ensures that its resource usage metrics get recorded somewhere rather than getting lost. (It also fixes an error when called from a nested logging context which completes before the bg process) --- synapse/notifier.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/notifier.py b/synapse/notifier.py index f1d92c139..340b16ce2 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -24,9 +24,10 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError from synapse.handlers.presence import format_user_presence_state from synapse.metrics import LaterGauge +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import StreamToken from synapse.util.async_helpers import ObservableDeferred, timeout_deferred -from synapse.util.logcontext import PreserveLoggingContext, run_in_background +from synapse.util.logcontext import PreserveLoggingContext from synapse.util.logutils import log_function from synapse.util.metrics import Measure from synapse.visibility import filter_events_for_client @@ -248,7 +249,10 @@ class Notifier(object): def _on_new_room_event(self, event, room_stream_id, extra_users=[]): """Notify any user streams that are interested in this room event""" # poke any interested application service. - run_in_background(self._notify_app_services, room_stream_id) + run_as_background_process( + "notify_app_services", + self._notify_app_services, room_stream_id, + ) if self.federation_sender: self.federation_sender.notify_new_events(room_stream_id) From 3c37c7e45b1befc6728fc0b26724bdea2e0845bd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 17:01:30 +0100 Subject: [PATCH 15/45] changelog --- changelog.d/3965.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3965.misc diff --git a/changelog.d/3965.misc b/changelog.d/3965.misc new file mode 100644 index 000000000..e7e4a9c5a --- /dev/null +++ b/changelog.d/3965.misc @@ -0,0 +1 @@ +Run notify_app_services as a bg process From e70b4ce06920102e2460dfb65bc357e2d7e8b794 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 07:56:06 +0100 Subject: [PATCH 16/45] Logging improvements Some logging tweaks to help with debugging incoming federation transactions --- changelog.d/3966.misc | 1 + synapse/handlers/federation.py | 4 ++++ synapse/state/v1.py | 14 ++++++++++++-- 3 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3966.misc diff --git a/changelog.d/3966.misc b/changelog.d/3966.misc new file mode 100644 index 000000000..1e3c8e170 --- /dev/null +++ b/changelog.d/3966.misc @@ -0,0 +1 @@ +Improve the logging when handling a federation transaction \ No newline at end of file diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2ccdc3bfa..a70ae8c83 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -572,6 +572,10 @@ class FederationHandler(BaseHandler): }) seen_ids.add(e.event_id) + logger.info( + "[%s %s] persisting newly-received auth/state events %s", + room_id, event_id, [e["event"].event_id for e in event_infos] + ) yield self._handle_new_events(origin, event_infos) try: diff --git a/synapse/state/v1.py b/synapse/state/v1.py index c95477d31..7a7157b35 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -65,10 +65,15 @@ def resolve_events_with_factory(state_sets, event_map, state_map_factory): for event_ids in itervalues(conflicted_state) for event_id in event_ids ) + needed_event_count = len(needed_events) if event_map is not None: needed_events -= set(iterkeys(event_map)) - logger.info("Asking for %d conflicted events", len(needed_events)) + logger.info( + "Asking for %d/%d conflicted events", + len(needed_events), + needed_event_count, + ) # dict[str, FrozenEvent]: a map from state event id to event. Only includes # the state events which are in conflict (and those in event_map) @@ -85,11 +90,16 @@ def resolve_events_with_factory(state_sets, event_map, state_map_factory): ) new_needed_events = set(itervalues(auth_events)) + new_needed_event_count = len(new_needed_events) new_needed_events -= needed_events if event_map is not None: new_needed_events -= set(iterkeys(event_map)) - logger.info("Asking for %d auth events", len(new_needed_events)) + logger.info( + "Asking for %d/%d auth events", + len(new_needed_events), + new_needed_event_count, + ) state_map_new = yield state_map_factory(new_needed_events) state_map.update(state_map_new) From ae6ad4cf4190dbd2dadd72fc8131e4c139f8eb4a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Sep 2018 11:22:25 +0100 Subject: [PATCH 17/45] docstrings and unittests for storage.state (#3958) I spent ages trying to figure out how I was going mad... --- changelog.d/3958.misc | 1 + synapse/storage/state.py | 30 ++++++++++++++++++++-------- tests/storage/test_state.py | 39 +++++++++++++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 8 deletions(-) create mode 100644 changelog.d/3958.misc diff --git a/changelog.d/3958.misc b/changelog.d/3958.misc new file mode 100644 index 000000000..5931d06dc --- /dev/null +++ b/changelog.d/3958.misc @@ -0,0 +1 @@ +Fix docstrings and add tests for state store methods diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 4b971efdb..3f4cbd61c 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -255,7 +255,17 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): ) @defer.inlineCallbacks - def get_state_groups_ids(self, room_id, event_ids): + def get_state_groups_ids(self, _room_id, event_ids): + """Get the event IDs of all the state for the state groups for the given events + + Args: + _room_id (str): id of the room for these events + event_ids (iterable[str]): ids of the events + + Returns: + Deferred[dict[int, dict[tuple[str, str], str]]]: + dict of state_group_id -> (dict of (type, state_key) -> event id) + """ if not event_ids: defer.returnValue({}) @@ -270,7 +280,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): @defer.inlineCallbacks def get_state_ids_for_group(self, state_group): - """Get the state IDs for the given state group + """Get the event IDs of all the state in the given state group Args: state_group (int) @@ -286,7 +296,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): def get_state_groups(self, room_id, event_ids): """ Get the state groups for the given list of event_ids - The return value is a dict mapping group names to lists of events. + Returns: + Deferred[dict[int, list[EventBase]]]: + dict of state_group_id -> list of state events. """ if not event_ids: defer.returnValue({}) @@ -324,7 +336,9 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): member events (if True), or to exclude member events (if False) Returns: - dictionary state_group -> (dict of (type, state_key) -> event id) + Returns: + Deferred[dict[int, dict[tuple[str, str], str]]]: + dict of state_group_id -> (dict of (type, state_key) -> event id) """ results = {} @@ -732,8 +746,8 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): If None, `types` filtering is applied to all events. Returns: - Deferred[dict[int, dict[(type, state_key), EventBase]]] - a dictionary mapping from state group to state dictionary. + Deferred[dict[int, dict[tuple[str, str], str]]]: + dict of state_group_id -> (dict of (type, state_key) -> event id) """ if types is not None: non_member_types = [t for t in types if t[0] != EventTypes.Member] @@ -788,8 +802,8 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): If None, `types` filtering is applied to all events. Returns: - Deferred[dict[int, dict[(type, state_key), EventBase]]] - a dictionary mapping from state group to state dictionary. + Deferred[dict[int, dict[tuple[str, str], str]]]: + dict of state_group_id -> (dict of (type, state_key) -> event id) """ if types: types = frozenset(types) diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index b91096593..b9c5b39d5 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -74,6 +74,45 @@ class StateStoreTestCase(tests.unittest.TestCase): self.assertEqual(s1[t].event_id, s2[t].event_id) self.assertEqual(len(s1), len(s2)) + @defer.inlineCallbacks + def test_get_state_groups_ids(self): + e1 = yield self.inject_state_event( + self.room, self.u_alice, EventTypes.Create, '', {} + ) + e2 = yield self.inject_state_event( + self.room, self.u_alice, EventTypes.Name, '', {"name": "test room"} + ) + + state_group_map = yield self.store.get_state_groups_ids(self.room, [e2.event_id]) + self.assertEqual(len(state_group_map), 1) + state_map = list(state_group_map.values())[0] + self.assertDictEqual( + state_map, + { + (EventTypes.Create, ''): e1.event_id, + (EventTypes.Name, ''): e2.event_id, + }, + ) + + @defer.inlineCallbacks + def test_get_state_groups(self): + e1 = yield self.inject_state_event( + self.room, self.u_alice, EventTypes.Create, '', {} + ) + e2 = yield self.inject_state_event( + self.room, self.u_alice, EventTypes.Name, '', {"name": "test room"} + ) + + state_group_map = yield self.store.get_state_groups( + self.room, [e2.event_id]) + self.assertEqual(len(state_group_map), 1) + state_list = list(state_group_map.values())[0] + + self.assertEqual( + {ev.event_id for ev in state_list}, + {e1.event_id, e2.event_id}, + ) + @defer.inlineCallbacks def test_get_state_for_event(self): From 4a15a3e4d539dcea9a4a57e7cd800a926f2a17c3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 27 Sep 2018 11:25:34 +0100 Subject: [PATCH 18/45] Include eventid in log lines when processing incoming federation transactions (#3959) when processing incoming transactions, it can be hard to see what's going on, because we process a bunch of stuff in parallel, and because we may end up recursively working our way through a chain of three or four events. This commit creates a way to use logcontexts to add the relevant event ids to the log lines. --- changelog.d/3959.feature | 1 + synapse/federation/federation_server.py | 32 ++++++------ synapse/handlers/federation.py | 65 +++++++++++++++---------- synapse/util/logcontext.py | 41 ++++++++++++++-- tests/test_federation.py | 28 ++++++----- tests/util/test_logcontext.py | 5 ++ 6 files changed, 115 insertions(+), 57 deletions(-) create mode 100644 changelog.d/3959.feature diff --git a/changelog.d/3959.feature b/changelog.d/3959.feature new file mode 100644 index 000000000..b3a4f37a8 --- /dev/null +++ b/changelog.d/3959.feature @@ -0,0 +1 @@ +Include eventid in log lines when processing incoming federation transactions \ No newline at end of file diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9a571e4fc..819e8f733 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -46,6 +46,7 @@ from synapse.replication.http.federation import ( from synapse.types import get_domain_from_id from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache +from synapse.util.logcontext import nested_logging_context from synapse.util.logutils import log_function # when processing incoming transactions, we try to handle multiple rooms in @@ -187,21 +188,22 @@ class FederationServer(FederationBase): for pdu in pdus_by_room[room_id]: event_id = pdu.event_id - try: - yield self._handle_received_pdu( - origin, pdu - ) - pdu_results[event_id] = {} - except FederationError as e: - logger.warn("Error handling PDU %s: %s", event_id, e) - pdu_results[event_id] = {"error": str(e)} - except Exception as e: - f = failure.Failure() - pdu_results[event_id] = {"error": str(e)} - logger.error( - "Failed to handle PDU %s: %s", - event_id, f.getTraceback().rstrip(), - ) + with nested_logging_context(event_id): + try: + yield self._handle_received_pdu( + origin, pdu + ) + pdu_results[event_id] = {} + except FederationError as e: + logger.warn("Error handling PDU %s: %s", event_id, e) + pdu_results[event_id] = {"error": str(e)} + except Exception as e: + f = failure.Failure() + pdu_results[event_id] = {"error": str(e)} + logger.error( + "Failed to handle PDU %s: %s", + event_id, f.getTraceback().rstrip(), + ) yield concurrently_execute( process_pdus_for_room, pdus_by_room.keys(), diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2ccdc3bfa..993546387 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -339,14 +339,18 @@ class FederationHandler(BaseHandler): "[%s %s] Requesting state at missing prev_event %s", room_id, event_id, p, ) - state, got_auth_chain = ( - yield self.federation_client.get_state_for_room( - origin, room_id, p, + + with logcontext.nested_logging_context(p): + state, got_auth_chain = ( + yield self.federation_client.get_state_for_room( + origin, room_id, p, + ) ) - ) - auth_chains.update(got_auth_chain) - state_group = {(x.type, x.state_key): x.event_id for x in state} - state_groups.append(state_group) + auth_chains.update(got_auth_chain) + state_group = { + (x.type, x.state_key): x.event_id for x in state + } + state_groups.append(state_group) # Resolve any conflicting state def fetch(ev_ids): @@ -483,20 +487,21 @@ class FederationHandler(BaseHandler): "[%s %s] Handling received prev_event %s", room_id, event_id, ev.event_id, ) - try: - yield self.on_receive_pdu( - origin, - ev, - sent_to_us_directly=False, - ) - except FederationError as e: - if e.code == 403: - logger.warn( - "[%s %s] Received prev_event %s failed history check.", - room_id, event_id, ev.event_id, + with logcontext.nested_logging_context(ev.event_id): + try: + yield self.on_receive_pdu( + origin, + ev, + sent_to_us_directly=False, ) - else: - raise + except FederationError as e: + if e.code == 403: + logger.warn( + "[%s %s] Received prev_event %s failed history check.", + room_id, event_id, ev.event_id, + ) + else: + raise @defer.inlineCallbacks def _process_received_pdu(self, origin, event, state, auth_chain): @@ -1135,7 +1140,8 @@ class FederationHandler(BaseHandler): try: logger.info("Processing queued PDU %s which was received " "while we were joining %s", p.event_id, p.room_id) - yield self.on_receive_pdu(origin, p, sent_to_us_directly=True) + with logcontext.nested_logging_context(p.event_id): + yield self.on_receive_pdu(origin, p, sent_to_us_directly=True) except Exception as e: logger.warn( "Error handling queued PDU %s from %s: %s", @@ -1581,15 +1587,22 @@ class FederationHandler(BaseHandler): Notifies about the events where appropriate. """ - contexts = yield logcontext.make_deferred_yieldable(defer.gatherResults( - [ - logcontext.run_in_background( - self._prep_event, + + @defer.inlineCallbacks + def prep(ev_info): + event = ev_info["event"] + with logcontext.nested_logging_context(suffix=event.event_id): + res = yield self._prep_event( origin, - ev_info["event"], + event, state=ev_info.get("state"), auth_events=ev_info.get("auth_events"), ) + defer.returnValue(res) + + contexts = yield logcontext.make_deferred_yieldable(defer.gatherResults( + [ + logcontext.run_in_background(prep, ev_info) for ev_info in event_infos ], consumeErrors=True, )) diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index a0c2d3761..89224b26c 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -200,7 +200,7 @@ class LoggingContext(object): sentinel = Sentinel() - def __init__(self, name=None, parent_context=None): + def __init__(self, name=None, parent_context=None, request=None): self.previous_context = LoggingContext.current_context() self.name = name @@ -218,6 +218,13 @@ class LoggingContext(object): self.parent_context = parent_context + if self.parent_context is not None: + self.parent_context.copy_to(self) + + if request is not None: + # the request param overrides the request from the parent context + self.request = request + def __str__(self): return "%s@%x" % (self.name, id(self)) @@ -256,9 +263,6 @@ class LoggingContext(object): ) self.alive = True - if self.parent_context is not None: - self.parent_context.copy_to(self) - return self def __exit__(self, type, value, traceback): @@ -439,6 +443,35 @@ class PreserveLoggingContext(object): ) +def nested_logging_context(suffix, parent_context=None): + """Creates a new logging context as a child of another. + + The nested logging context will have a 'request' made up of the parent context's + request, plus the given suffix. + + CPU/db usage stats will be added to the parent context's on exit. + + Normal usage looks like: + + with nested_logging_context(suffix): + # ... do stuff + + Args: + suffix (str): suffix to add to the parent context's 'request'. + parent_context (LoggingContext|None): parent context. Will use the current context + if None. + + Returns: + LoggingContext: new logging context. + """ + if parent_context is None: + parent_context = LoggingContext.current_context() + return LoggingContext( + parent_context=parent_context, + request=parent_context.request + "-" + suffix, + ) + + def preserve_fn(f): """Function decorator which wraps the function with run_in_background""" def g(*args, **kwargs): diff --git a/tests/test_federation.py b/tests/test_federation.py index 2540604fc..ff55c7a62 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -6,6 +6,7 @@ from twisted.internet.defer import maybeDeferred, succeed from synapse.events import FrozenEvent from synapse.types import Requester, UserID from synapse.util import Clock +from synapse.util.logcontext import LoggingContext from tests import unittest from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver @@ -117,9 +118,10 @@ class MessageAcceptTests(unittest.TestCase): } ) - d = self.handler.on_receive_pdu( - "test.serv", lying_event, sent_to_us_directly=True - ) + with LoggingContext(request="lying_event"): + d = self.handler.on_receive_pdu( + "test.serv", lying_event, sent_to_us_directly=True + ) # Step the reactor, so the database fetches come back self.reactor.advance(1) @@ -209,11 +211,12 @@ class MessageAcceptTests(unittest.TestCase): } ) - d = self.handler.on_receive_pdu( - "test.serv", good_event, sent_to_us_directly=True - ) - self.reactor.advance(1) - self.assertEqual(self.successResultOf(d), None) + with LoggingContext(request="good_event"): + d = self.handler.on_receive_pdu( + "test.serv", good_event, sent_to_us_directly=True + ) + self.reactor.advance(1) + self.assertEqual(self.successResultOf(d), None) bad_event = FrozenEvent( { @@ -230,10 +233,11 @@ class MessageAcceptTests(unittest.TestCase): } ) - d = self.handler.on_receive_pdu( - "test.serv", bad_event, sent_to_us_directly=True - ) - self.reactor.advance(1) + with LoggingContext(request="bad_event"): + d = self.handler.on_receive_pdu( + "test.serv", bad_event, sent_to_us_directly=True + ) + self.reactor.advance(1) extrem = maybeDeferred( self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 4633db77b..8adaee3c8 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -159,6 +159,11 @@ class LoggingContextTestCase(unittest.TestCase): self.assertEqual(r, "bum") self._check_test_key("one") + def test_nested_logging_context(self): + with LoggingContext(request="foo"): + nested_context = logcontext.nested_logging_context(suffix="bar") + self.assertEqual(nested_context.request, "foo-bar") + # a function which returns a deferred which has been "called", but # which had a function which returned another incomplete deferred on From e3c159863d72ba1628394497bba45dd96b9cc1ac Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 08:09:07 +0100 Subject: [PATCH 19/45] Clarifications in FederationHandler * add some comments on things that look a bit bogus * rename this `state` variable to avoid confusion with the `state` used elsewhere in this function. (There was no actual conflict, but it was a confusing bit of spaghetti.) --- synapse/handlers/federation.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 128926e71..6793f9b6c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -341,14 +341,23 @@ class FederationHandler(BaseHandler): ) with logcontext.nested_logging_context(p): - state, got_auth_chain = ( + # XXX if any of the missing prevs share missing state or auth + # events, we'll end up requesting those missing events for + # *each* missing prev, contributing to the hammering of /event + # as per https://github.com/matrix-org/synapse/issues/2164. + remote_state, got_auth_chain = ( yield self.federation_client.get_state_for_room( origin, room_id, p, ) ) + + # XXX hrm I'm not convinced that duplicate events will compare + # for equality, so I'm not sure this does what the author + # hoped. auth_chains.update(got_auth_chain) + state_group = { - (x.type, x.state_key): x.event_id for x in state + (x.type, x.state_key): x.event_id for x in remote_state } state_groups.append(state_group) From ad8e137062deb70ab39bcade147e943abbeb6839 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 17:43:30 +0100 Subject: [PATCH 20/45] changelog --- changelog.d/3967.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3967.misc diff --git a/changelog.d/3967.misc b/changelog.d/3967.misc new file mode 100644 index 000000000..dc808aec7 --- /dev/null +++ b/changelog.d/3967.misc @@ -0,0 +1 @@ +Clarifications in FederationHandler From 28223841e05a77a44ec2c0b29d1e930c68974913 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 19:17:36 +0100 Subject: [PATCH 21/45] more comments --- synapse/federation/federation_client.py | 2 -- synapse/handlers/federation.py | 7 +++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 8bf1ad0c1..d05ed91d6 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -209,8 +209,6 @@ class FederationClient(FederationBase): Will attempt to get the PDU from each destination in the list until one succeeds. - This will persist the PDU locally upon receipt. - Args: destinations (list): Which home servers to query event_id (str): event to fetch diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 6793f9b6c..38bebbf59 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -341,10 +341,9 @@ class FederationHandler(BaseHandler): ) with logcontext.nested_logging_context(p): - # XXX if any of the missing prevs share missing state or auth - # events, we'll end up requesting those missing events for - # *each* missing prev, contributing to the hammering of /event - # as per https://github.com/matrix-org/synapse/issues/2164. + # note that if any of the missing prevs share missing state or + # auth events, the requests to fetch those events are deduped + # by the get_pdu_cache in federation_client. remote_state, got_auth_chain = ( yield self.federation_client.get_state_for_room( origin, room_id, p, From a215b698c43f4cfe8a2fdb9c160c8ecd1c1297c5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 09:52:56 +0100 Subject: [PATCH 22/45] Fix "unhashable type: 'list'" exception in federation handling get_state_groups returns a map from state_group_id to a list of FrozenEvents, so was very much the wrong thing to be putting as one of the entries in the list passed to resolve_events_with_factory (which expects maps from (event_type, state_key) to event id). We actually want get_state_groups_ids().values() rather than get_state_groups(). This fixes the main problem in #3923, but there are other problems with this bit of code which get discovered once you do so. --- synapse/handlers/federation.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 38bebbf59..2d6b8edec 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -106,7 +106,7 @@ class FederationHandler(BaseHandler): self.hs = hs - self.store = hs.get_datastore() + self.store = hs.get_datastore() # type: synapse.storage.DataStore self.federation_client = hs.get_federation_client() self.state_handler = hs.get_state_handler() self.server_name = hs.hostname @@ -325,12 +325,17 @@ class FederationHandler(BaseHandler): # Calculate the state of the previous events, and # de-conflict them to find the current state. - state_groups = [] auth_chains = set() try: # Get the state of the events we know about - ours = yield self.store.get_state_groups(room_id, list(seen)) - state_groups.append(ours) + ours = yield self.store.get_state_groups_ids(room_id, seen) + + # state_maps is a list of mappings from (type, state_key) to event_id + # type: list[dict[tuple[str, str], str]] + state_maps = list(ours.values()) + + # we don't need this any more, let's delete it. + del ours # Ask the remote server for the states we don't # know about @@ -355,10 +360,10 @@ class FederationHandler(BaseHandler): # hoped. auth_chains.update(got_auth_chain) - state_group = { + remote_state_map = { (x.type, x.state_key): x.event_id for x in remote_state } - state_groups.append(state_group) + state_maps.append(remote_state_map) # Resolve any conflicting state def fetch(ev_ids): @@ -368,7 +373,7 @@ class FederationHandler(BaseHandler): room_version = yield self.store.get_room_version(room_id) state_map = yield resolve_events_with_factory( - room_version, state_groups, {event_id: pdu}, fetch + room_version, state_maps, {event_id: pdu}, fetch, ) state = (yield self.store.get_events(state_map.values())).values() From bd61c82bdf97460a33080bcb6b2c836616a3b415 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 12:16:13 +0100 Subject: [PATCH 23/45] Include state from remote servers in pdu handling If we've fetched state events from remote servers in order to resolve the state for a new event, we need to actually pass those events into resolve_events_with_factory (so that it can do the state res) and then persist the ones we need - otherwise other bits of the codebase get confused about why we have state groups pointing to non-existent events. --- synapse/handlers/federation.py | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2d6b8edec..cdad565d0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -326,6 +326,9 @@ class FederationHandler(BaseHandler): # Calculate the state of the previous events, and # de-conflict them to find the current state. auth_chains = set() + event_map = { + event_id: pdu, + } try: # Get the state of the events we know about ours = yield self.store.get_state_groups_ids(room_id, seen) @@ -365,18 +368,30 @@ class FederationHandler(BaseHandler): } state_maps.append(remote_state_map) + for x in remote_state: + event_map[x.event_id] = x + # Resolve any conflicting state + @defer.inlineCallbacks def fetch(ev_ids): - return self.store.get_events( - ev_ids, get_prev_content=False, check_redacted=False + fetched = yield self.store.get_events( + ev_ids, get_prev_content=False, check_redacted=False, ) + # add any events we fetch here to the `event_map` so that we + # can use them to build the state event list below. + event_map.update(fetched) + defer.returnValue(fetched) room_version = yield self.store.get_room_version(room_id) state_map = yield resolve_events_with_factory( - room_version, state_maps, {event_id: pdu}, fetch, + room_version, state_maps, event_map, fetch, ) - state = (yield self.store.get_events(state_map.values())).values() + # we need to give _process_received_pdu the actual state events + # rather than event ids, so generate that now. + state = [ + event_map[e] for e in six.itervalues(state_map) + ] auth_chain = list(auth_chains) except Exception: logger.warn( From 333bee27f53916bf5354a39a79aa468967730326 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 19:49:59 +0100 Subject: [PATCH 24/45] Include event when resolving state for missing prevs If we have a forward extremity for a room as `E`, and you receive `A`, `B`, s.t. `A -> B -> E`, and `B` also points to an unknown event `X`, then we need to do state res between `X` and `E`. When that happens, we need to make sure we include `X` in the state that goes into the state res alg. Fixes #3934. --- synapse/handlers/federation.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index cdad565d0..d05b63673 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -323,8 +323,8 @@ class FederationHandler(BaseHandler): affected=pdu.event_id, ) - # Calculate the state of the previous events, and - # de-conflict them to find the current state. + # Calculate the state after each of the previous events, and + # resolve them to find the correct state at the current event. auth_chains = set() event_map = { event_id: pdu, @@ -358,6 +358,20 @@ class FederationHandler(BaseHandler): ) ) + # we want the state *after* p; get_state_for_room returns the + # state *before* p. + remote_event = yield self.federation_client.get_pdu( + [origin], p, outlier=True, + ) + + if remote_event is None: + raise Exception( + "Unable to get missing prev_event %s" % (p, ) + ) + + if remote_event.is_state(): + remote_state.append(remote_event) + # XXX hrm I'm not convinced that duplicate events will compare # for equality, so I'm not sure this does what the author # hoped. From 948a6d877606deb4ccc8e42fd25f50e2edcc068d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Sep 2018 19:56:51 +0100 Subject: [PATCH 25/45] changelog --- changelog.d/3968.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3968.bugfix diff --git a/changelog.d/3968.bugfix b/changelog.d/3968.bugfix new file mode 100644 index 000000000..18d43cd64 --- /dev/null +++ b/changelog.d/3968.bugfix @@ -0,0 +1 @@ +Fix exceptions when processing incoming events over federation \ No newline at end of file From 0d36fe35634cc44212073f2f6fab764ee7d8c0a9 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Tue, 25 Sep 2018 09:43:21 +0100 Subject: [PATCH 26/45] Build and push docker image to hub --- .circleci/config.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 521aca22e..ff9d2a0c2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,5 +1,12 @@ version: 2 jobs: + dockerhubupload: + machine: true + steps: + - checkout + - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_TAG . + - run: docker login --username matrixdotorg --password $DOCKER_HUB_PASSWORD + - run: docker push sytestpy2: machine: true steps: @@ -131,3 +138,7 @@ workflows: filters: branches: ignore: /develop|master|release-.*/ + - dockerhub: + filters: + tags: + only: /v[0-9].[0-9]+.[0-9]+(.[0-9]+)?/ From 3852c2bc3900813affe7adf761238694fb6a1fd4 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Thu, 27 Sep 2018 12:00:01 +0100 Subject: [PATCH 27/45] Fix typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ff9d2a0c2..e404818a5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -138,7 +138,7 @@ workflows: filters: branches: ignore: /develop|master|release-.*/ - - dockerhub: + - dockerhubupload: filters: tags: only: /v[0-9].[0-9]+.[0-9]+(.[0-9]+)?/ From 73a089f461dce236ce84054e591eff873200dbbb Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Tue, 25 Sep 2018 10:29:38 +0100 Subject: [PATCH 28/45] Make username configurable in the UI too --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e404818a5..9a4347718 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,7 +5,7 @@ jobs: steps: - checkout - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_TAG . - - run: docker login --username matrixdotorg --password $DOCKER_HUB_PASSWORD + - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD - run: docker push sytestpy2: machine: true From a3c11f73204857a363f78a26412f5aa46adf8650 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Tue, 25 Sep 2018 10:37:08 +0100 Subject: [PATCH 29/45] Also, don't run this job on any branches --- .circleci/config.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9a4347718..cb329aa24 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -141,4 +141,6 @@ workflows: - dockerhubupload: filters: tags: - only: /v[0-9].[0-9]+.[0-9]+(.[0-9]+)?/ + only: /^v[0-9].[0-9]+.[0-9]+(.[0-9]+)?/ + branches: + ignore: /.*/ From 8607397ea763cedaafe51124063504436a268fa9 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Thu, 27 Sep 2018 11:44:21 +0100 Subject: [PATCH 30/45] Make a ":latest" tag, and a SHA1 commit ID one too. Latest is horrible and makes debugging what has happened anywhere a nightmare. We push a latest because of demand for it, but we'll also push a SHA1 commit id so those wanting to know what they're running (and be able to roll back if required) can use those instead. Note that latest here is defined as "most recent master commit" not "most recent released version", as the actual semantics of making latest correct while still being able to build bugfixed releases of previous versions is just ARGH. So we define it as "master" not "latest release". --- .circleci/config.yml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index cb329aa24..3cb14793f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,12 +1,21 @@ version: 2 jobs: - dockerhubupload: + dockerhubuploadrelease: machine: true steps: - checkout - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_TAG . - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD - - run: docker push + - run: docker push matrixdotorg/synapse:$CIRCLE_TAG + dockerhubuploadlatest: + machine: true + steps: + - checkout + - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_SHA1 . + - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD + - run: docker tag matrixdotorg/synapse:$CIRCLE_SHA1 matrixdotorg/synapse:latest + - run: docker push matrixdotorg/synapse:$CIRCLE_SHA1 + - run: docker push matrixdotorg/synapse:latest sytestpy2: machine: true steps: @@ -138,9 +147,13 @@ workflows: filters: branches: ignore: /develop|master|release-.*/ - - dockerhubupload: + - dockerhubuploadrelease: filters: tags: only: /^v[0-9].[0-9]+.[0-9]+(.[0-9]+)?/ branches: ignore: /.*/ + - dockerhubuploadlatest: + filters: + branches: + only: master From 74bbdd0412b8e35994c82cbbccad4af2122039fb Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Thu, 27 Sep 2018 11:56:18 +0100 Subject: [PATCH 31/45] Do the changelog.d dance --- changelog.d/3946.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3946.misc diff --git a/changelog.d/3946.misc b/changelog.d/3946.misc new file mode 100644 index 000000000..803857a29 --- /dev/null +++ b/changelog.d/3946.misc @@ -0,0 +1 @@ +Automate pushes to docker hub From dc5db01ff25dac6ec74ceea9b4d815a8c43cd7dd Mon Sep 17 00:00:00 2001 From: Schnuffle Date: Thu, 27 Sep 2018 13:38:50 +0200 Subject: [PATCH 32/45] Replaced all occurences of e.message with str(e) Signed-off-by: Schnuffle --- scripts-dev/dump_macaroon.py | 2 +- synapse/api/filtering.py | 2 +- synapse/app/__init__.py | 2 +- synapse/app/appservice.py | 2 +- synapse/app/client_reader.py | 2 +- synapse/app/event_creator.py | 2 +- synapse/app/federation_reader.py | 2 +- synapse/app/federation_sender.py | 2 +- synapse/app/frontend_proxy.py | 2 +- synapse/app/homeserver.py | 4 ++-- synapse/app/media_repository.py | 2 +- synapse/app/pusher.py | 2 +- synapse/app/synchrotron.py | 2 +- synapse/app/user_dir.py | 2 +- synapse/config/__main__.py | 2 +- synapse/handlers/e2e_keys.py | 2 +- synapse/handlers/profile.py | 2 +- tests/unittest.py | 2 +- 18 files changed, 19 insertions(+), 19 deletions(-) diff --git a/scripts-dev/dump_macaroon.py b/scripts-dev/dump_macaroon.py index 6e45be75d..fcc556883 100755 --- a/scripts-dev/dump_macaroon.py +++ b/scripts-dev/dump_macaroon.py @@ -21,4 +21,4 @@ try: verifier.verify(macaroon, key) print "Signature is correct" except Exception as e: - print e.message + print str(e) diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index a31a9a17e..eed8c67e6 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -226,7 +226,7 @@ class Filtering(object): jsonschema.validate(user_filter_json, USER_FILTER_SCHEMA, format_checker=FormatChecker()) except jsonschema.ValidationError as e: - raise SynapseError(400, e.message) + raise SynapseError(400, str(e)) class FilterCollection(object): diff --git a/synapse/app/__init__.py b/synapse/app/__init__.py index 3b6b9368b..c3afcc573 100644 --- a/synapse/app/__init__.py +++ b/synapse/app/__init__.py @@ -24,7 +24,7 @@ try: python_dependencies.check_requirements() except python_dependencies.MissingRequirementError as e: message = "\n".join([ - "Missing Requirement: %s" % (e.message,), + "Missing Requirement: %s" % (str(e),), "To install run:", " pip install --upgrade --force \"%s\"" % (e.dependency,), "", diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 02039f7e7..8559e141a 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -136,7 +136,7 @@ def start(config_options): "Synapse appservice", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.appservice" diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 4c73c637b..76aed8c60 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -153,7 +153,7 @@ def start(config_options): "Synapse client reader", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.client_reader" diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index bc82197b2..9060ab14f 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -169,7 +169,7 @@ def start(config_options): "Synapse event creator", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.event_creator" diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 18ca71ef9..228a297fb 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -140,7 +140,7 @@ def start(config_options): "Synapse federation reader", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.federation_reader" diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 6501c5779..e9a99d76e 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -160,7 +160,7 @@ def start(config_options): "Synapse federation sender", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.federation_sender" diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index b076fbe52..fc4b25de1 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -228,7 +228,7 @@ def start(config_options): "Synapse frontend proxy", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.frontend_proxy" diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 8c5d858b0..a98fdbd21 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -301,7 +301,7 @@ class SynapseHomeServer(HomeServer): try: database_engine.check_database(db_conn.cursor()) except IncorrectDatabaseSetup as e: - quit_with_error(e.message) + quit_with_error(str(e)) # Gauges to expose monthly active user control metrics @@ -328,7 +328,7 @@ def setup(config_options): config_options, ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) if not config: diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 992d182db..acc0487ad 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -133,7 +133,7 @@ def start(config_options): "Synapse media repository", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.media_repository" diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 2ec4c7def..630dcda47 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -191,7 +191,7 @@ def start(config_options): "Synapse pusher", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.pusher" diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index df81b7bcb..9a7fc6ee9 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -410,7 +410,7 @@ def start(config_options): "Synapse synchrotron", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.synchrotron" diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index b383e79c1..0a5f62b50 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -188,7 +188,7 @@ def start(config_options): "Synapse user directory", config_options ) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) assert config.worker_app == "synapse.app.user_dir" diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py index 58c97a70a..8fccf573e 100644 --- a/synapse/config/__main__.py +++ b/synapse/config/__main__.py @@ -25,7 +25,7 @@ if __name__ == "__main__": try: config = HomeServerConfig.load_config("", sys.argv[3:]) except ConfigError as e: - sys.stderr.write("\n" + e.message + "\n") + sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) print (getattr(config, key)) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 578e9250f..9dc46aa15 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -341,7 +341,7 @@ class E2eKeysHandler(object): def _exception_to_failure(e): if isinstance(e, CodeMessageException): return { - "status": e.code, "message": e.message, + "status": e.code, "message": str(e), } if isinstance(e, NotRetryingDestination): diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 75b8b7ce6..f284d5a38 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -278,7 +278,7 @@ class BaseProfileHandler(BaseHandler): except Exception as e: logger.warn( "Failed to update join event for room %s - %s", - room_id, str(e.message) + room_id, str(e) ) diff --git a/tests/unittest.py b/tests/unittest.py index ef905e638..043710afa 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -121,7 +121,7 @@ class TestCase(unittest.TestCase): try: self.assertEquals(attrs[key], getattr(obj, key)) except AssertionError as e: - raise (type(e))(e.message + " for '.%s'" % key) + raise (type(e))(str(e) + " for '.%s'" % key) def assert_dict(self, required, actual): """Does a partial assert of a dict. From a873896096bd05b608d7db80c9f0c12090877644 Mon Sep 17 00:00:00 2001 From: Schnuffle Date: Thu, 27 Sep 2018 14:01:44 +0200 Subject: [PATCH 33/45] Added changelog fragment --- changelog.d/3970.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3970.bugfix diff --git a/changelog.d/3970.bugfix b/changelog.d/3970.bugfix new file mode 100644 index 000000000..c4caa31ed --- /dev/null +++ b/changelog.d/3970.bugfix @@ -0,0 +1 @@ +Replaced all occurences of e.message with str(e)Replaced all occurences of e.message with str(e). Contributed by Schnuffle From 484a9b8c81a2315a05a00c7094b7f5fb4a9d5794 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 27 Sep 2018 12:48:06 +0100 Subject: [PATCH 34/45] Remove redundant, failing, test This test didn't do what it claimed to do, and what it claimed to do was the same as test_cant_hide_direct_ancestors anyway. This stuff is tested by sytest anyway. --- tests/test_federation.py | 106 --------------------------------------- 1 file changed, 106 deletions(-) diff --git a/tests/test_federation.py b/tests/test_federation.py index ff55c7a62..952a0a7b5 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -141,109 +141,3 @@ class MessageAcceptTests(unittest.TestCase): self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id ) self.assertEqual(self.successResultOf(extrem)[0], "$join:test.serv") - - def test_cant_hide_past_history(self): - """ - If you send a message, you must be able to provide the direct - prev_events that said event references. - """ - - def post_json(destination, path, data, headers=None, timeout=0): - if path.startswith("/_matrix/federation/v1/get_missing_events/"): - return { - "events": [ - { - "room_id": self.room_id, - "sender": "@baduser:test.serv", - "event_id": "three:test.serv", - "depth": 1000, - "origin_server_ts": 1, - "type": "m.room.message", - "origin": "test.serv", - "content": "hewwo?", - "auth_events": [], - "prev_events": [("four:test.serv", {})], - } - ] - } - - self.http_client.post_json = post_json - - def get_json(destination, path, args, headers=None): - if path.startswith("/_matrix/federation/v1/state_ids/"): - d = self.successResultOf( - self.homeserver.datastore.get_state_ids_for_event("one:test.serv") - ) - - return succeed( - { - "pdu_ids": [ - y - for x, y in d.items() - if x == ("m.room.member", "@us:test") - ], - "auth_chain_ids": list(d.values()), - } - ) - - self.http_client.get_json = get_json - - # Figure out what the most recent event is - most_recent = self.successResultOf( - maybeDeferred( - self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id - ) - )[0] - - # Make a good event - good_event = FrozenEvent( - { - "room_id": self.room_id, - "sender": "@baduser:test.serv", - "event_id": "one:test.serv", - "depth": 1000, - "origin_server_ts": 1, - "type": "m.room.message", - "origin": "test.serv", - "content": "hewwo?", - "auth_events": [], - "prev_events": [(most_recent, {})], - } - ) - - with LoggingContext(request="good_event"): - d = self.handler.on_receive_pdu( - "test.serv", good_event, sent_to_us_directly=True - ) - self.reactor.advance(1) - self.assertEqual(self.successResultOf(d), None) - - bad_event = FrozenEvent( - { - "room_id": self.room_id, - "sender": "@baduser:test.serv", - "event_id": "two:test.serv", - "depth": 1000, - "origin_server_ts": 1, - "type": "m.room.message", - "origin": "test.serv", - "content": "hewwo?", - "auth_events": [], - "prev_events": [("one:test.serv", {}), ("three:test.serv", {})], - } - ) - - with LoggingContext(request="bad_event"): - d = self.handler.on_receive_pdu( - "test.serv", bad_event, sent_to_us_directly=True - ) - self.reactor.advance(1) - - extrem = maybeDeferred( - self.homeserver.datastore.get_latest_event_ids_in_room, self.room_id - ) - self.assertEqual(self.successResultOf(extrem)[0], "two:test.serv") - - state = self.homeserver.get_state_handler().get_current_state_ids(self.room_id) - self.reactor.advance(1) - self.assertIn(("m.room.member", "@us:test"), self.successResultOf(state).keys()) From 861c063ebc8989e6b25d2e00e7aedf0ecc780f41 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 27 Sep 2018 22:47:01 +1000 Subject: [PATCH 35/45] Update 3970.bugfix --- changelog.d/3970.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/3970.bugfix b/changelog.d/3970.bugfix index c4caa31ed..562531549 100644 --- a/changelog.d/3970.bugfix +++ b/changelog.d/3970.bugfix @@ -1 +1 @@ -Replaced all occurences of e.message with str(e)Replaced all occurences of e.message with str(e). Contributed by Schnuffle +Replaced all occurences of e.message with str(e). Contributed by Schnuffle From b3064532d005e08a3f5ec0c2a2decf688208f5e6 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 27 Sep 2018 23:21:54 +1000 Subject: [PATCH 36/45] Run our oldest supported configuration in CI (#3952) --- .travis.yml | 3 +++ changelog.d/3952.misc | 1 + synapse/python_dependencies.py | 27 ++++++++++++++------------- tox.ini | 20 ++++++++++++++++++++ 4 files changed, 38 insertions(+), 13 deletions(-) create mode 100644 changelog.d/3952.misc diff --git a/.travis.yml b/.travis.yml index b6faca4b9..2077f6af7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,6 +20,9 @@ matrix: - python: 2.7 env: TOX_ENV=py27 + - python: 2.7 + env: TOX_ENV=py27-old + - python: 2.7 env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4" services: diff --git a/changelog.d/3952.misc b/changelog.d/3952.misc new file mode 100644 index 000000000..015e4a43e --- /dev/null +++ b/changelog.d/3952.misc @@ -0,0 +1 @@ +Run the test suite on the oldest supported versions of our dependencies in CI. \ No newline at end of file diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index c779f69fa..0f339a032 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -33,31 +33,32 @@ logger = logging.getLogger(__name__) # [2] https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-dependencies REQUIREMENTS = { "jsonschema>=2.5.1": ["jsonschema>=2.5.1"], - "frozendict>=0.4": ["frozendict"], + "frozendict>=1": ["frozendict"], "unpaddedbase64>=1.1.0": ["unpaddedbase64>=1.1.0"], "canonicaljson>=1.1.3": ["canonicaljson>=1.1.3"], "signedjson>=1.0.0": ["signedjson>=1.0.0"], "pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"], - "service_identity>=1.0.0": ["service_identity>=1.0.0"], + "service_identity>=16.0.0": ["service_identity>=16.0.0"], "Twisted>=17.1.0": ["twisted>=17.1.0"], "treq>=15.1": ["treq>=15.1"], # Twisted has required pyopenssl 16.0 since about Twisted 16.6. "pyopenssl>=16.0.0": ["OpenSSL>=16.0.0"], - "pyyaml": ["yaml"], - "pyasn1": ["pyasn1"], - "daemonize": ["daemonize"], - "bcrypt": ["bcrypt>=3.1.0"], - "pillow": ["PIL"], - "pydenticon": ["pydenticon"], - "sortedcontainers": ["sortedcontainers"], - "pysaml2>=3.0.0": ["saml2>=3.0.0"], - "pymacaroons-pynacl": ["pymacaroons"], + "pyyaml>=3.11": ["yaml"], + "pyasn1>=0.1.9": ["pyasn1"], + "pyasn1-modules>=0.0.7": ["pyasn1_modules"], + "daemonize>=2.3.1": ["daemonize"], + "bcrypt>=3.1.0": ["bcrypt>=3.1.0"], + "pillow>=3.1.2": ["PIL"], + "pydenticon>=0.2": ["pydenticon"], + "sortedcontainers>=1.4.4": ["sortedcontainers"], + "pysaml2>=3.0.0": ["saml2"], + "pymacaroons-pynacl>=0.9.3": ["pymacaroons"], "msgpack-python>=0.3.0": ["msgpack"], "phonenumbers>=8.2.0": ["phonenumbers"], - "six": ["six"], - "prometheus_client": ["prometheus_client"], + "six>=1.10": ["six"], + "prometheus_client>=0.0.18": ["prometheus_client"], # we use attr.s(slots), which arrived in 16.0.0 "attrs>=16.0.0": ["attr>=16.0.0"], diff --git a/tox.ini b/tox.ini index e4db563b4..87b5e4782 100644 --- a/tox.ini +++ b/tox.ini @@ -64,6 +64,26 @@ setenv = {[base]setenv} SYNAPSE_POSTGRES = 1 +# A test suite for the oldest supported versions of Python libraries, to catch +# any uses of APIs not available in them. +[testenv:py27-old] +skip_install=True +deps = + # Old automat version for Twisted + Automat == 0.3.0 + + mock + lxml +commands = + /usr/bin/find "{toxinidir}" -name '*.pyc' -delete + # Make all greater-thans equals so we test the oldest version of our direct + # dependencies, but make the pyopenssl 17.0, which can work against an + # OpenSSL 1.1 compiled cryptography (as older ones don't compile on Travis). + /bin/sh -c 'python -m synapse.python_dependencies | sed -e "s/>=/==/g" -e "s/psycopg2==2.6//" -e "s/pyopenssl==16.0.0/pyopenssl==17.0.0/" | xargs pip install' + # Install Synapse itself. This won't update any libraries. + pip install -e . + {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:} + [testenv:py35] usedevelop=true From 19475cf3371819170737515c414e3cc7c2a32d43 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 28 Sep 2018 11:55:57 +0100 Subject: [PATCH 37/45] Remove redundant call to start_get_pdu_cache I think this got forgotten in #3932. We were getting away with it because it was the last call in this function. --- changelog.d/3980.bugfix | 1 + synapse/app/homeserver.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 changelog.d/3980.bugfix diff --git a/changelog.d/3980.bugfix b/changelog.d/3980.bugfix new file mode 100644 index 000000000..7578414ed --- /dev/null +++ b/changelog.d/3980.bugfix @@ -0,0 +1 @@ +Fix some instances of ExpiringCache not expiring cache items diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index a98fdbd21..e3f0d99a3 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -386,7 +386,6 @@ def setup(config_options): hs.get_pusherpool().start() hs.get_datastore().start_profiling() hs.get_datastore().start_doing_background_updates() - hs.get_federation_client().start_get_pdu_cache() reactor.callWhenRunning(start) From 5304449738b0193012a613edf74437e4af2ff416 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Fri, 28 Sep 2018 13:36:35 +0200 Subject: [PATCH 38/45] build python 3 docker images on circle CI (#3976) --- .circleci/config.yml | 16 +++++++++++----- changelog.d/3976.misc | 1 + 2 files changed, 12 insertions(+), 5 deletions(-) create mode 100644 changelog.d/3976.misc diff --git a/.circleci/config.yml b/.circleci/config.yml index 3cb14793f..6ae3a4223 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,18 +4,24 @@ jobs: machine: true steps: - checkout - - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_TAG . + - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG} . + - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 . - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD - - run: docker push matrixdotorg/synapse:$CIRCLE_TAG + - run: docker push matrixdotorg/synapse:${CIRCLE_TAG} + - run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3 dockerhubuploadlatest: machine: true steps: - checkout - - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:$CIRCLE_SHA1 . + - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1} . + - run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 . - run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD - - run: docker tag matrixdotorg/synapse:$CIRCLE_SHA1 matrixdotorg/synapse:latest - - run: docker push matrixdotorg/synapse:$CIRCLE_SHA1 + - run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1} matrixdotorg/synapse:latest + - run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1}-py3 matrixdotorg/synapse:latest-py3 + - run: docker push matrixdotorg/synapse:${CIRCLE_SHA1} + - run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}-py3 - run: docker push matrixdotorg/synapse:latest + - run: docker push matrixdotorg/synapse:latest-py3 sytestpy2: machine: true steps: diff --git a/changelog.d/3976.misc b/changelog.d/3976.misc new file mode 100644 index 000000000..282148c98 --- /dev/null +++ b/changelog.d/3976.misc @@ -0,0 +1 @@ +Build py3 docker images for docker hub too From 965154d60af59b69eac01f7cfcf821a757ae93fa Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 28 Sep 2018 12:45:54 +0100 Subject: [PATCH 39/45] Fix complete fail to do the right thing --- synapse/federation/transaction_queue.py | 3 ++- synapse/handlers/typing.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index ae47aaae0..98b595080 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -354,7 +354,7 @@ class TransactionQueue(object): content=content, ) - if not destination == self.server_name: + if destination == self.server_name: logger.info("Not sending EDU to ourselves") return @@ -372,6 +372,7 @@ class TransactionQueue(object): def send_device_messages(self, destination): if destination == self.server_name: logger.info("Not sending device update to ourselves") + return self._attempt_new_transaction(destination) diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 2d2d3d5a0..bf82b3f86 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -218,6 +218,7 @@ class TypingHandler(object): for domain in set(get_domain_from_id(u) for u in users): if domain != self.server_name: + logger.debug("sending typing update to %s", domain) self.federation.send_edu( destination=domain, edu_type="m.typing", From 9a8bbc9a59e4f5534225d0d07c86652242c88efe Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Fri, 28 Sep 2018 13:36:56 +0100 Subject: [PATCH 40/45] add --no-admin flag to registration script (#3836) --- changelog.d/3836.bugfix | 1 + scripts/register_new_matrix_user | 18 ++++++++++++++---- 2 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelog.d/3836.bugfix diff --git a/changelog.d/3836.bugfix b/changelog.d/3836.bugfix new file mode 100644 index 000000000..add49fbec --- /dev/null +++ b/changelog.d/3836.bugfix @@ -0,0 +1 @@ +support registering regular users non-interactively with register_new_matrix_user script \ No newline at end of file diff --git a/scripts/register_new_matrix_user b/scripts/register_new_matrix_user index 8c3d42935..91bdb3a25 100755 --- a/scripts/register_new_matrix_user +++ b/scripts/register_new_matrix_user @@ -133,7 +133,7 @@ def register_new_user(user, password, server_location, shared_secret, admin): print "Passwords do not match" sys.exit(1) - if not admin: + if admin is None: admin = raw_input("Make admin [no]: ") if admin in ("y", "yes", "true"): admin = True @@ -160,10 +160,16 @@ if __name__ == "__main__": default=None, help="New password for user. Will prompt if omitted.", ) - parser.add_argument( + admin_group = parser.add_mutually_exclusive_group() + admin_group.add_argument( "-a", "--admin", action="store_true", - help="Register new user as an admin. Will prompt if omitted.", + help="Register new user as an admin. Will prompt if --no-admin is not set either.", + ) + admin_group.add_argument( + "--no-admin", + action="store_true", + help="Register new user as a regular user. Will prompt if --admin is not set either.", ) group = parser.add_mutually_exclusive_group(required=True) @@ -197,4 +203,8 @@ if __name__ == "__main__": else: secret = args.shared_secret - register_new_user(args.user, args.password, args.server_url, secret, args.admin) + admin = None + if args.admin or args.no_admin: + admin = args.admin + + register_new_user(args.user, args.password, args.server_url, secret, admin) From 8ea887856c1bcb97c90d88fe20f7f82ed7f48967 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 1 Oct 2018 11:59:52 +0100 Subject: [PATCH 41/45] Don't update eviction metrics on explicit removal --- synapse/util/caches/expiringcache.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 8ac56d85f..f2f55ba6c 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -103,11 +103,6 @@ class ExpiringCache(object): if value is SENTINEL: return default - if self.iterable: - self.metrics.inc_evictions(len(value.value)) - else: - self.metrics.inc_evictions() - return value def __contains__(self, key): From 4f3e3ac192f2c5ff37198f50af99bc8fbd57beca Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 1 Oct 2018 12:25:27 +0100 Subject: [PATCH 42/45] Correctly match 'dict.pop' api --- synapse/util/caches/expiringcache.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index f2f55ba6c..f36978027 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -98,10 +98,18 @@ class ExpiringCache(object): return entry.value - def pop(self, key, default=None): - value = self._cache.pop(key, SENTINEL) + def pop(self, key, default=SENTINEL): + """Removes and returns the value with the given key from the cache. + + If the key isn't in the cache then `default` will be returned if + specified, otherwise `KeyError` will get raised. + + Identical functionality to `dict.pop(..)`. + """ + + value = self._cache.pop(key, default) if value is SENTINEL: - return default + raise KeyError(key) return value From 53c5fa4e6c179c6ed855169dcb99b69699db75be Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 1 Oct 2018 12:29:17 +0100 Subject: [PATCH 43/45] Further reduce the size of the docker image (#3972) Rewrite the dockerfile as a multistage build: this means we can get rid of a whole load of cruft which we don't need. --- changelog.d/3972.misc | 1 + docker/Dockerfile | 71 ++++++++++++++++++++++++++++--------------- 2 files changed, 47 insertions(+), 25 deletions(-) create mode 100644 changelog.d/3972.misc diff --git a/changelog.d/3972.misc b/changelog.d/3972.misc new file mode 100644 index 000000000..e56299ee7 --- /dev/null +++ b/changelog.d/3972.misc @@ -0,0 +1 @@ +Further reduce the docker image size diff --git a/docker/Dockerfile b/docker/Dockerfile index 1d00defc2..db44c02a9 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,9 +1,13 @@ ARG PYTHON_VERSION=2 -FROM docker.io/python:${PYTHON_VERSION}-alpine3.8 -COPY . /synapse +### +### Stage 0: builder +### +FROM docker.io/python:${PYTHON_VERSION}-alpine3.8 as builder -RUN apk add --no-cache --virtual .build_deps \ +# install the OS build deps + +RUN apk add \ build-base \ libffi-dev \ libjpeg-turbo-dev \ @@ -11,30 +15,47 @@ RUN apk add --no-cache --virtual .build_deps \ libxslt-dev \ linux-headers \ postgresql-dev \ - zlib-dev \ - && cd /synapse \ - && apk add --no-cache --virtual .runtime_deps \ - libffi \ - libjpeg-turbo \ - libressl \ - libxslt \ - libpq \ - zlib \ - su-exec \ - && pip install --upgrade \ + zlib-dev + +# build things which have slow build steps, before we copy synapse, so that +# the layer can be cached. +# +# (we really just care about caching a wheel here, as the "pip install" below +# will install them again.) + +RUN pip install --prefix="/install" --no-warn-script-location \ + cryptography \ + msgpack-python \ + pillow \ + pynacl + +# now install synapse and all of the python deps to /install. + +COPY . /synapse +RUN pip install --prefix="/install" --no-warn-script-location \ lxml \ - pip \ psycopg2 \ - setuptools \ - && mkdir -p /synapse/cache \ - && pip install -f /synapse/cache --upgrade --process-dependency-links . \ - && mv /synapse/docker/start.py /synapse/docker/conf / \ - && rm -rf \ - setup.cfg \ - setup.py \ - synapse \ - && apk del .build_deps - + /synapse + +### +### Stage 1: runtime +### + +FROM docker.io/python:${PYTHON_VERSION}-alpine3.8 + +RUN apk add --no-cache --virtual .runtime_deps \ + libffi \ + libjpeg-turbo \ + libressl \ + libxslt \ + libpq \ + zlib \ + su-exec + +COPY --from=builder /install /usr/local +COPY ./docker/start.py /start.py +COPY ./docker/conf /conf + VOLUME ["/data"] EXPOSE 8008/tcp 8448/tcp From 6e05fd032c670080f9e9f99f2e2e8b8eccf24c7d Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 2 Oct 2018 00:11:58 +1000 Subject: [PATCH 44/45] Fix userconsent on Python 3 (#3938) --- changelog.d/3938.bugfix | 1 + synapse/api/urls.py | 2 +- tests/server_notices/test_consent.py | 100 +++++++++++++++++++++++++ tests/storage/test_client_ips.py | 48 +----------- tests/unittest.py | 80 +++++++++++++++++++- tests/utils.py | 105 +++++++++++++++------------ 6 files changed, 240 insertions(+), 96 deletions(-) create mode 100644 changelog.d/3938.bugfix create mode 100644 tests/server_notices/test_consent.py diff --git a/changelog.d/3938.bugfix b/changelog.d/3938.bugfix new file mode 100644 index 000000000..01ccca21a --- /dev/null +++ b/changelog.d/3938.bugfix @@ -0,0 +1 @@ +Sending server notices regarding user consent now works on Python 3. diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 71347912f..6d9f1ca0e 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -64,7 +64,7 @@ class ConsentURIBuilder(object): """ mac = hmac.new( key=self._hmac_secret, - msg=user_id, + msg=user_id.encode('ascii'), digestmod=sha256, ).hexdigest() consent_uri = "%s_matrix/consent?%s" % ( diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py new file mode 100644 index 000000000..95badc985 --- /dev/null +++ b/tests/server_notices/test_consent.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.rest.client.v1 import admin, login, room +from synapse.rest.client.v2_alpha import sync + +from tests import unittest + + +class ConsentNoticesTests(unittest.HomeserverTestCase): + + servlets = [ + sync.register_servlets, + admin.register_servlets, + login.register_servlets, + room.register_servlets, + ] + + def make_homeserver(self, reactor, clock): + + self.consent_notice_message = "consent %(consent_uri)s" + config = self.default_config() + config.user_consent_version = "1" + config.user_consent_server_notice_content = { + "msgtype": "m.text", + "body": self.consent_notice_message, + } + config.public_baseurl = "https://example.com/" + config.form_secret = "123abc" + + config.server_notices_mxid = "@notices:test" + config.server_notices_mxid_display_name = "test display name" + config.server_notices_mxid_avatar_url = None + config.server_notices_room_name = "Server Notices" + + hs = self.setup_test_homeserver(config=config) + + return hs + + def prepare(self, reactor, clock, hs): + self.user_id = self.register_user("bob", "abc123") + self.access_token = self.login("bob", "abc123") + + def test_get_sync_message(self): + """ + When user consent server notices are enabled, a sync will cause a notice + to fire (in a room which the user is invited to). The notice contains + the notice URL + an authentication code. + """ + # Initial sync, to get the user consent room invite + request, channel = self.make_request( + "GET", "/_matrix/client/r0/sync", access_token=self.access_token + ) + self.render(request) + self.assertEqual(channel.code, 200) + + # Get the Room ID to join + room_id = list(channel.json_body["rooms"]["invite"].keys())[0] + + # Join the room + request, channel = self.make_request( + "POST", + "/_matrix/client/r0/rooms/" + room_id + "/join", + access_token=self.access_token, + ) + self.render(request) + self.assertEqual(channel.code, 200) + + # Sync again, to get the message in the room + request, channel = self.make_request( + "GET", "/_matrix/client/r0/sync", access_token=self.access_token + ) + self.render(request) + self.assertEqual(channel.code, 200) + + # Get the message + room = channel.json_body["rooms"]["join"][room_id] + messages = [ + x for x in room["timeline"]["events"] if x["type"] == "m.room.message" + ] + + # One message, with the consent URL + self.assertEqual(len(messages), 1) + self.assertTrue( + messages[0]["content"]["body"].startswith( + "consent https://example.com/_matrix/consent" + ) + ) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 2ffbb9f14..4577e9422 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -14,10 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import hashlib -import hmac -import json - from mock import Mock from twisted.internet import defer @@ -145,34 +141,8 @@ class ClientIpAuthTestCase(unittest.HomeserverTestCase): return hs def prepare(self, hs, reactor, clock): - self.hs.config.registration_shared_secret = u"shared" self.store = self.hs.get_datastore() - - # Create the user - request, channel = self.make_request("GET", "/_matrix/client/r0/admin/register") - self.render(request) - nonce = channel.json_body["nonce"] - - want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) - want_mac.update(nonce.encode('ascii') + b"\x00bob\x00abc123\x00admin") - want_mac = want_mac.hexdigest() - - body = json.dumps( - { - "nonce": nonce, - "username": "bob", - "password": "abc123", - "admin": True, - "mac": want_mac, - } - ) - request, channel = self.make_request( - "POST", "/_matrix/client/r0/admin/register", body.encode('utf8') - ) - self.render(request) - - self.assertEqual(channel.code, 200) - self.user_id = channel.json_body["user_id"] + self.user_id = self.register_user("bob", "abc123", True) def test_request_with_xforwarded(self): """ @@ -194,20 +164,7 @@ class ClientIpAuthTestCase(unittest.HomeserverTestCase): def _runtest(self, headers, expected_ip, make_request_args): device_id = "bleb" - body = json.dumps( - { - "type": "m.login.password", - "user": "bob", - "password": "abc123", - "device_id": device_id, - } - ) - request, channel = self.make_request( - "POST", "/_matrix/client/r0/login", body.encode('utf8'), **make_request_args - ) - self.render(request) - self.assertEqual(channel.code, 200) - access_token = channel.json_body["access_token"].encode('ascii') + access_token = self.login("bob", "abc123", device_id=device_id) # Advance to a known time self.reactor.advance(123456 - self.reactor.seconds()) @@ -215,7 +172,6 @@ class ClientIpAuthTestCase(unittest.HomeserverTestCase): request, channel = self.make_request( "GET", "/_matrix/client/r0/admin/users/" + self.user_id, - body.encode('utf8'), access_token=access_token, **make_request_args ) diff --git a/tests/unittest.py b/tests/unittest.py index 043710afa..a59291cc6 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import hashlib +import hmac import logging from mock import Mock @@ -32,6 +34,7 @@ from synapse.types import UserID, create_requester from synapse.util.logcontext import LoggingContextFilter from tests.server import get_clock, make_request, render, setup_test_homeserver +from tests.utils import default_config # Set up putting Synapse's logs into Trial's. rootLogger = logging.getLogger() @@ -121,7 +124,7 @@ class TestCase(unittest.TestCase): try: self.assertEquals(attrs[key], getattr(obj, key)) except AssertionError as e: - raise (type(e))(str(e) + " for '.%s'" % key) + raise (type(e))(e.message + " for '.%s'" % key) def assert_dict(self, required, actual): """Does a partial assert of a dict. @@ -223,6 +226,15 @@ class HomeserverTestCase(TestCase): hs = self.setup_test_homeserver() return hs + def default_config(self, name="test"): + """ + Get a default HomeServer config object. + + Args: + name (str): The homeserver name/domain. + """ + return default_config(name) + def prepare(self, reactor, clock, homeserver): """ Prepare for the test. This involves things like mocking out parts of @@ -297,3 +309,69 @@ class HomeserverTestCase(TestCase): return d self.pump() return self.successResultOf(d) + + def register_user(self, username, password, admin=False): + """ + Register a user. Requires the Admin API be registered. + + Args: + username (bytes/unicode): The user part of the new user. + password (bytes/unicode): The password of the new user. + admin (bool): Whether the user should be created as an admin + or not. + + Returns: + The MXID of the new user (unicode). + """ + self.hs.config.registration_shared_secret = u"shared" + + # Create the user + request, channel = self.make_request("GET", "/_matrix/client/r0/admin/register") + self.render(request) + nonce = channel.json_body["nonce"] + + want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1) + nonce_str = b"\x00".join([username.encode('utf8'), password.encode('utf8')]) + if admin: + nonce_str += b"\x00admin" + else: + nonce_str += b"\x00notadmin" + want_mac.update(nonce.encode('ascii') + b"\x00" + nonce_str) + want_mac = want_mac.hexdigest() + + body = json.dumps( + { + "nonce": nonce, + "username": username, + "password": password, + "admin": admin, + "mac": want_mac, + } + ) + request, channel = self.make_request( + "POST", "/_matrix/client/r0/admin/register", body.encode('utf8') + ) + self.render(request) + self.assertEqual(channel.code, 200) + + user_id = channel.json_body["user_id"] + return user_id + + def login(self, username, password, device_id=None): + """ + Log in a user, and get an access token. Requires the Login API be + registered. + + """ + body = {"type": "m.login.password", "user": username, "password": password} + if device_id: + body["device_id"] = device_id + + request, channel = self.make_request( + "POST", "/_matrix/client/r0/login", json.dumps(body).encode('utf8') + ) + self.render(request) + self.assertEqual(channel.code, 200) + + access_token = channel.json_body["access_token"].encode('ascii') + return access_token diff --git a/tests/utils.py b/tests/utils.py index aaed1149c..1ef80e7b7 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -96,6 +96,62 @@ def setupdb(): atexit.register(_cleanup) +def default_config(name): + """ + Create a reasonable test config. + """ + config = Mock() + config.signing_key = [MockKey()] + config.event_cache_size = 1 + config.enable_registration = True + config.macaroon_secret_key = "not even a little secret" + config.expire_access_token = False + config.server_name = name + config.trusted_third_party_id_servers = [] + config.room_invite_state_types = [] + config.password_providers = [] + config.worker_replication_url = "" + config.worker_app = None + config.email_enable_notifs = False + config.block_non_admin_invites = False + config.federation_domain_whitelist = None + config.federation_rc_reject_limit = 10 + config.federation_rc_sleep_limit = 10 + config.federation_rc_sleep_delay = 100 + config.federation_rc_concurrent = 10 + config.filter_timeline_limit = 5000 + config.user_directory_search_all_users = False + config.user_consent_server_notice_content = None + config.block_events_without_consent_error = None + config.media_storage_providers = [] + config.auto_join_rooms = [] + config.limit_usage_by_mau = False + config.hs_disabled = False + config.hs_disabled_message = "" + config.hs_disabled_limit_type = "" + config.max_mau_value = 50 + config.mau_trial_days = 0 + config.mau_limits_reserved_threepids = [] + config.admin_contact = None + config.rc_messages_per_second = 10000 + config.rc_message_burst_count = 10000 + + # we need a sane default_room_version, otherwise attempts to create rooms will + # fail. + config.default_room_version = "1" + + # disable user directory updates, because they get done in the + # background, which upsets the test runner. + config.update_user_directory = False + + def is_threepid_reserved(threepid): + return ServerConfig.is_threepid_reserved(config, threepid) + + config.is_threepid_reserved.side_effect = is_threepid_reserved + + return config + + class TestHomeServer(HomeServer): DATASTORE_CLASS = DataStore @@ -124,54 +180,7 @@ def setup_test_homeserver( from twisted.internet import reactor if config is None: - config = Mock() - config.signing_key = [MockKey()] - config.event_cache_size = 1 - config.enable_registration = True - config.macaroon_secret_key = "not even a little secret" - config.expire_access_token = False - config.server_name = name - config.trusted_third_party_id_servers = [] - config.room_invite_state_types = [] - config.password_providers = [] - config.worker_replication_url = "" - config.worker_app = None - config.email_enable_notifs = False - config.block_non_admin_invites = False - config.federation_domain_whitelist = None - config.federation_rc_reject_limit = 10 - config.federation_rc_sleep_limit = 10 - config.federation_rc_sleep_delay = 100 - config.federation_rc_concurrent = 10 - config.filter_timeline_limit = 5000 - config.user_directory_search_all_users = False - config.user_consent_server_notice_content = None - config.block_events_without_consent_error = None - config.media_storage_providers = [] - config.auto_join_rooms = [] - config.limit_usage_by_mau = False - config.hs_disabled = False - config.hs_disabled_message = "" - config.hs_disabled_limit_type = "" - config.max_mau_value = 50 - config.mau_trial_days = 0 - config.mau_limits_reserved_threepids = [] - config.admin_contact = None - config.rc_messages_per_second = 10000 - config.rc_message_burst_count = 10000 - - # we need a sane default_room_version, otherwise attempts to create rooms will - # fail. - config.default_room_version = "1" - - # disable user directory updates, because they get done in the - # background, which upsets the test runner. - config.update_user_directory = False - - def is_threepid_reserved(threepid): - return ServerConfig.is_threepid_reserved(config, threepid) - - config.is_threepid_reserved.side_effect = is_threepid_reserved + config = default_config(name) config.use_frozen_dicts = True config.ldap_enabled = False From abdc141c2bc872e7db6761f652551eb1bbf0859d Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 2 Oct 2018 01:08:38 +1000 Subject: [PATCH 45/45] Update instructions to point to pip install (#3985) --- .circleci/merge_base_branch.sh | 13 ++++--- README.rst | 70 +++++++++------------------------- UPGRADE.rst | 6 +-- changelog.d/3985.misc | 1 + 4 files changed, 29 insertions(+), 61 deletions(-) create mode 100644 changelog.d/3985.misc diff --git a/.circleci/merge_base_branch.sh b/.circleci/merge_base_branch.sh index 9614eb91b..6b0bf3aa4 100755 --- a/.circleci/merge_base_branch.sh +++ b/.circleci/merge_base_branch.sh @@ -9,12 +9,15 @@ source $BASH_ENV if [[ -z "${CIRCLE_PR_NUMBER}" ]] then - echo "Can't figure out what the PR number is!" - exit 1 -fi + echo "Can't figure out what the PR number is! Assuming merge target is develop." -# Get the reference, using the GitHub API -GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` + # It probably hasn't had a PR opened yet. Since all PRs land on develop, we + # can probably assume it's based on it and will be merged into it. + GITBASE="develop" +else + # Get the reference, using the GitHub API + GITBASE=`curl -q https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'` +fi # Show what we are before git show -s diff --git a/README.rst b/README.rst index 5547f617b..e1ea351f8 100644 --- a/README.rst +++ b/README.rst @@ -81,7 +81,7 @@ Thanks for using Matrix! Synapse Installation ==================== -Synapse is the reference python/twisted Matrix homeserver implementation. +Synapse is the reference Python/Twisted Matrix homeserver implementation. System requirements: @@ -91,12 +91,13 @@ System requirements: Installing from source ---------------------- + (Prebuilt packages are available for some platforms - see `Platform-Specific Instructions`_.) -Synapse is written in python but some of the libraries it uses are written in -C. So before we can install synapse itself we need a working C compiler and the -header files for python C extensions. +Synapse is written in Python but some of the libraries it uses are written in +C. So before we can install Synapse itself we need a working C compiler and the +header files for Python C extensions. Installing prerequisites on Ubuntu or Debian:: @@ -143,18 +144,24 @@ Installing prerequisites on OpenBSD:: doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \ libxslt -To install the synapse homeserver run:: +To install the Synapse homeserver run:: virtualenv -p python2.7 ~/.synapse source ~/.synapse/bin/activate pip install --upgrade pip pip install --upgrade setuptools - pip install https://github.com/matrix-org/synapse/tarball/master + pip install matrix-synapse -This installs synapse, along with the libraries it uses, into a virtual +This installs Synapse, along with the libraries it uses, into a virtual environment under ``~/.synapse``. Feel free to pick a different directory if you prefer. +This Synapse installation can then be later upgraded by using pip again with the +update flag:: + + source ~/.synapse/bin/activate + pip install -U matrix-synapse + In case of problems, please see the _`Troubleshooting` section below. There is an offical synapse image available at @@ -167,7 +174,7 @@ Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a Dockerfile to automate a synapse server in a single Docker image, at https://hub.docker.com/r/avhost/docker-matrix/tags/ -Configuring synapse +Configuring Synapse ------------------- Before you can start Synapse, you will need to generate a configuration @@ -249,26 +256,6 @@ Setting up a TURN server For reliable VoIP calls to be routed via this homeserver, you MUST configure a TURN server. See ``_ for details. -IPv6 ----- - -As of Synapse 0.19 we finally support IPv6, many thanks to @kyrias and @glyph -for providing PR #1696. - -However, for federation to work on hosts with IPv6 DNS servers you **must** -be running Twisted 17.1.0 or later - see https://github.com/matrix-org/synapse/issues/1002 -for details. We can't make Synapse depend on Twisted 17.1 by default -yet as it will break most older distributions (see https://github.com/matrix-org/synapse/pull/1909) -so if you are using operating system dependencies you'll have to install your -own Twisted 17.1 package via pip or backports etc. - -If you're running in a virtualenv then pip should have installed the newest -Twisted automatically, but if your virtualenv is old you will need to manually -upgrade to a newer Twisted dependency via: - - pip install Twisted>=17.1.0 - - Running Synapse =============== @@ -444,8 +431,7 @@ settings require a slightly more difficult installation process. using the ``.`` command, rather than ``bash``'s ``source``. 5) Optionally, use ``pip`` to install ``lxml``, which Synapse needs to parse webpages for their titles. -6) Use ``pip`` to install this repository: ``pip install - https://github.com/matrix-org/synapse/tarball/master`` +6) Use ``pip`` to install this repository: ``pip install matrix-synapse`` 7) Optionally, change ``_synapse``'s shell to ``/bin/false`` to reduce the chance of a compromised Synapse server being used to take over your box. @@ -473,7 +459,7 @@ Troubleshooting Troubleshooting Installation ---------------------------- -Synapse requires pip 1.7 or later, so if your OS provides too old a version you +Synapse requires pip 8 or later, so if your OS provides too old a version you may need to manually upgrade it:: sudo pip install --upgrade pip @@ -508,28 +494,6 @@ failing, e.g.:: pip install twisted -On OS X, if you encounter clang: error: unknown argument: '-mno-fused-madd' you -will need to export CFLAGS=-Qunused-arguments. - -Troubleshooting Running ------------------------ - -If synapse fails with ``missing "sodium.h"`` crypto errors, you may need -to manually upgrade PyNaCL, as synapse uses NaCl (https://nacl.cr.yp.to/) for -encryption and digital signatures. -Unfortunately PyNACL currently has a few issues -(https://github.com/pyca/pynacl/issues/53) and -(https://github.com/pyca/pynacl/issues/79) that mean it may not install -correctly, causing all tests to fail with errors about missing "sodium.h". To -fix try re-installing from PyPI or directly from -(https://github.com/pyca/pynacl):: - - # Install from PyPI - pip install --user --upgrade --force pynacl - - # Install from github - pip install --user https://github.com/pyca/pynacl/tarball/master - Running out of File Handles ~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/UPGRADE.rst b/UPGRADE.rst index f6bb1070b..6cf3169f7 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -18,7 +18,7 @@ instructions that may be required are listed later in this document. .. code:: bash - pip install --upgrade --process-dependency-links https://github.com/matrix-org/synapse/tarball/master + pip install --upgrade --process-dependency-links matrix-synapse # restart synapse synctl restart @@ -48,11 +48,11 @@ returned by the Client-Server API: # configured on port 443. curl -kv https:///_matrix/client/versions 2>&1 | grep "Server:" -Upgrading to $NEXT_VERSION +Upgrading to v0.27.3 ==================== This release expands the anonymous usage stats sent if the opt-in -``report_stats`` configuration is set to ``true``. We now capture RSS memory +``report_stats`` configuration is set to ``true``. We now capture RSS memory and cpu use at a very coarse level. This requires administrators to install the optional ``psutil`` python module. diff --git a/changelog.d/3985.misc b/changelog.d/3985.misc new file mode 100644 index 000000000..ba935caf3 --- /dev/null +++ b/changelog.d/3985.misc @@ -0,0 +1 @@ +Updated the installation instructions to point to the matrix-synapse package on PyPI.