From 3a30846bd088bbfecd9ddfa1ee82a6951670ade7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 11 Apr 2024 16:03:45 +0100 Subject: [PATCH 01/26] Fix mypy on latest Twisted release (#17036) `ITransport.abortConnection` isn't a thing, but `HTTPChannel.forceAbortClient` calls it, so lets just use that Fixes https://github.com/element-hq/synapse/issues/16728 --- changelog.d/17036.misc | 1 + synapse/http/proxy.py | 3 ++- synapse/http/server.py | 4 ++-- synapse/http/site.py | 3 ++- 4 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 changelog.d/17036.misc diff --git a/changelog.d/17036.misc b/changelog.d/17036.misc new file mode 100644 index 000000000..329666805 --- /dev/null +++ b/changelog.d/17036.misc @@ -0,0 +1 @@ +Fix mypy with latest Twisted release. diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py index 6cbbd5741..5b5ded757 100644 --- a/synapse/http/proxy.py +++ b/synapse/http/proxy.py @@ -262,7 +262,8 @@ class _ProxyResponseBody(protocol.Protocol): self._request.finish() else: # Abort the underlying request since our remote request also failed. - self._request.transport.abortConnection() + if self._request.channel: + self._request.channel.forceAbortClient() class ProxySite(Site): diff --git a/synapse/http/server.py b/synapse/http/server.py index 632284712..c76500e14 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -153,9 +153,9 @@ def return_json_error( # Only respond with an error response if we haven't already started writing, # otherwise lets just kill the connection if request.startedWriting: - if request.transport: + if request.channel: try: - request.transport.abortConnection() + request.channel.forceAbortClient() except Exception: # abortConnection throws if the connection is already closed pass diff --git a/synapse/http/site.py b/synapse/http/site.py index 682b28e4c..a5b578067 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -150,7 +150,8 @@ class SynapseRequest(Request): self.get_method(), self.get_redacted_uri(), ) - self.transport.abortConnection() + if self.channel: + self.channel.forceAbortClient() return super().handleContentChunk(data) From fe4719a2683dcd0d9c9deb606a1895d222c1b001 Mon Sep 17 00:00:00 2001 From: Nick Mills-Barrett Date: Fri, 12 Apr 2024 09:28:44 +0100 Subject: [PATCH 02/26] Use receipts `event_stream_ordering` instead of joins (#17032) Resurrecting https://github.com/matrix-org/synapse/pull/13918. This should reduce IOPs incurred by joining to the events table to lookup stream ordering, which happens in many receipt handling code paths. Like the previous PR I believe sufficient time has passed between the original migration in DB schema 72 and now to merge this as-is. It's highly unlikely that both the migration is still ongoing AND (active) users still have any receipts prior to that date. In the unlikely event there is a receipt without a populated `event_stream_ordering` synapse will behave just as it does now when receipts exist for events that don't (yet): for push action calculation the receipts are just ignored. I've removed the validation on event IDs as this is already covered here: https://github.com/element-hq/synapse/blob/59ceabcb9798793cd4312fdbcced4e612aeda84d/synapse/handlers/receipts.py#L189-L192 --- changelog.d/17032.misc | 1 + .../databases/main/event_push_actions.py | 22 +++++++------------ synapse/storage/databases/main/receipts.py | 8 +++---- 3 files changed, 12 insertions(+), 19 deletions(-) create mode 100644 changelog.d/17032.misc diff --git a/changelog.d/17032.misc b/changelog.d/17032.misc new file mode 100644 index 000000000..b03f6f42e --- /dev/null +++ b/changelog.d/17032.misc @@ -0,0 +1 @@ +Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 40bf000e9..bdd0781c4 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -385,7 +385,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas WITH all_receipts AS ( SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE {receipt_types_clause} AND user_id = ? @@ -621,13 +620,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas SELECT notif_count, COALESCE(unread_count, 0), thread_id FROM event_push_summary LEFT JOIN ( - SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering + SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE user_id = ? AND room_id = ? - AND stream_ordering > ? + AND event_stream_ordering > ? AND {receipt_types_clause} GROUP BY thread_id ) AS receipts USING (thread_id) @@ -659,13 +657,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas sql = f""" SELECT COUNT(*), thread_id FROM event_push_actions LEFT JOIN ( - SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering + SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE user_id = ? AND room_id = ? - AND stream_ordering > ? + AND event_stream_ordering > ? AND {receipt_types_clause} GROUP BY thread_id ) AS receipts USING (thread_id) @@ -738,13 +735,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas thread_id FROM event_push_actions LEFT JOIN ( - SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering + SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering FROM receipts_linearized - LEFT JOIN events USING (room_id, event_id) WHERE user_id = ? AND room_id = ? - AND stream_ordering > ? + AND event_stream_ordering > ? AND {receipt_types_clause} GROUP BY thread_id ) AS receipts USING (thread_id) @@ -910,9 +906,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas # given this function generally gets called with only one room and # thread ID. sql = f""" - SELECT room_id, thread_id, MAX(stream_ordering) + SELECT room_id, thread_id, MAX(event_stream_ordering) FROM receipts_linearized - INNER JOIN events USING (room_id, event_id) WHERE {receipt_types_clause} AND {thread_ids_clause} AND {room_ids_clause} @@ -1442,9 +1437,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas ) sql = """ - SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering + SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, r.event_stream_ordering FROM receipts_linearized AS r - INNER JOIN events AS e USING (event_id) WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ? ORDER BY r.stream_id ASC LIMIT ? diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index d513c4253..9660fc469 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -178,14 +178,13 @@ class ReceiptsWorkerStore(SQLBaseStore): ) sql = f""" - SELECT event_id, stream_ordering + SELECT event_id, event_stream_ordering FROM receipts_linearized - INNER JOIN events USING (room_id, event_id) WHERE {clause} AND user_id = ? AND room_id = ? AND thread_id IS NULL - ORDER BY stream_ordering DESC + ORDER BY event_stream_ordering DESC LIMIT 1 """ @@ -736,8 +735,7 @@ class ReceiptsWorkerStore(SQLBaseStore): thread_args = (thread_id,) sql = f""" - SELECT stream_ordering, event_id FROM events - INNER JOIN receipts_linearized AS r USING (event_id, room_id) + SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause} """ txn.execute( From 259442fa4c476b32de1e8a0739f5909403c820e4 Mon Sep 17 00:00:00 2001 From: Kegan Dougal <7190048+kegsay@users.noreply.github.com> Date: Mon, 15 Apr 2024 11:57:56 +0100 Subject: [PATCH 03/26] bugfix: make msc3967 idempotent (#16943) MSC3967 was updated recently to make it more robust to network failures: > there is an existing cross-signing master key and it exactly matches the cross-signing master key provided in the request body. If there are any additional keys provided in the request (self signing key, user signing key) they MUST also match the existing keys stored on the server. In other words, the request contains no new keys. If there are new keys, UIA MUST be performed. https://github.com/matrix-org/matrix-spec-proposals/blob/hughns/device-signing-upload-uia/proposals/3967-device-signing-upload-uia.md#proposal This covers the case where the 200 OK is lost in transit so the client retries the upload, only to then get UIA'd. Complement tests: https://github.com/matrix-org/complement/pull/713 - passing example https://github.com/element-hq/synapse/actions/runs/7976948122/job/21778795094?pr=16943#step:7:8820 ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: reivilibre --- changelog.d/16943.bugfix | 1 + .../conf/workers-shared-extra.yaml.j2 | 2 + scripts-dev/complement.sh | 2 +- synapse/handlers/e2e_keys.py | 36 +++++++++++++ synapse/rest/client/keys.py | 14 +++++- tests/handlers/test_e2e_keys.py | 50 +++++++++++++++++++ 6 files changed, 102 insertions(+), 3 deletions(-) create mode 100644 changelog.d/16943.bugfix diff --git a/changelog.d/16943.bugfix b/changelog.d/16943.bugfix new file mode 100644 index 000000000..436074113 --- /dev/null +++ b/changelog.d/16943.bugfix @@ -0,0 +1 @@ +Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. \ No newline at end of file diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2 index 2b11b487f..32eada441 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2 @@ -102,6 +102,8 @@ experimental_features: msc3391_enabled: true # Filtering /messages by relation type. msc3874_enabled: true + # no UIA for x-signing upload for the first time + msc3967_enabled: true server_notices: system_mxid_localpart: _server diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index b1a8724b7..2a779f825 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -214,7 +214,7 @@ fi extra_test_args=() -test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902" +test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902 ./tests/msc3967" # Enable dirty runs, so tests will reuse the same container where possible. # This significantly speeds up tests, but increases the possibility of test pollution. diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 63e00f102..1ece54ccf 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -1476,6 +1476,42 @@ class E2eKeysHandler: else: return exists, self.clock.time_msec() < ts_replacable_without_uia_before + async def has_different_keys(self, user_id: str, body: JsonDict) -> bool: + """ + Check if a key provided in `body` differs from the same key stored in the DB. Returns + true on the first difference. If a key exists in `body` but does not exist in the DB, + returns True. If `body` has no keys, this always returns False. + Note by 'key' we mean Matrix key rather than JSON key. + + The purpose of this function is to detect whether or not we need to apply UIA checks. + We must apply UIA checks if any key in the database is being overwritten. If a key is + being inserted for the first time, or if the key exactly matches what is in the database, + then no UIA check needs to be performed. + + Args: + user_id: The user who sent the `body`. + body: The JSON request body from POST /keys/device_signing/upload + Returns: + True if any key in `body` has a different value in the database. + """ + # Ensure that each key provided in the request body exactly matches the one we have stored. + # The first time we see the DB having a different key to the matching request key, bail. + # Note: we do not care if the DB has a key which the request does not specify, as we only + # care about *replacements* or *insertions* (i.e UPSERT) + req_body_key_to_db_key = { + "master_key": "master", + "self_signing_key": "self_signing", + "user_signing_key": "user_signing", + } + for req_body_key, db_key in req_body_key_to_db_key.items(): + if req_body_key in body: + existing_key = await self.store.get_e2e_cross_signing_key( + user_id, db_key + ) + if existing_key != body[req_body_key]: + return True + return False + def _check_cross_signing_key( key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index b6d9ee074..86c951585 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -409,7 +409,18 @@ class SigningKeyUploadServlet(RestServlet): # But first-time setup is fine elif self.hs.config.experimental.msc3967_enabled: - # If we already have a master key then cross signing is set up and we require UIA to reset + # MSC3967 allows this endpoint to 200 OK for idempotency. Resending exactly the same + # keys should just 200 OK without doing a UIA prompt. + keys_are_different = await self.e2e_keys_handler.has_different_keys( + user_id, body + ) + if not keys_are_different: + # FIXME: we do not fallthrough to upload_signing_keys_for_user because confusingly + # if we do, we 500 as it looks like it tries to INSERT the same key twice, causing a + # unique key constraint violation. This sounds like a bug? + return 200, {} + # the keys are different, is x-signing set up? If no, then the keys don't exist which is + # why they are different. If yes, then we need to UIA to change them. if is_cross_signing_setup: await self.auth_handler.validate_user_via_ui_auth( requester, @@ -420,7 +431,6 @@ class SigningKeyUploadServlet(RestServlet): can_skip_ui_auth=False, ) # Otherwise we don't require UIA since we are setting up cross signing for first time - else: # Previous behaviour is to always require UIA but allow it to be skipped await self.auth_handler.validate_user_via_ui_auth( diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 3d931abb0..0e6352ff4 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -1101,6 +1101,56 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): }, ) + def test_has_different_keys(self) -> None: + """check that has_different_keys returns True when the keys provided are different to what + is in the database.""" + local_user = "@boris:" + self.hs.hostname + keys1 = { + "master_key": { + # private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0 + "user_id": local_user, + "usage": ["master"], + "keys": { + "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk" + }, + } + } + self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1)) + is_different = self.get_success( + self.handler.has_different_keys( + local_user, + { + "master_key": keys1["master_key"], + }, + ) + ) + self.assertEqual(is_different, False) + # change the usage => different keys + keys1["master_key"]["usage"] = ["develop"] + is_different = self.get_success( + self.handler.has_different_keys( + local_user, + { + "master_key": keys1["master_key"], + }, + ) + ) + self.assertEqual(is_different, True) + keys1["master_key"]["usage"] = ["master"] # reset + # change the key => different keys + keys1["master_key"]["keys"] = { + "ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs" + } + is_different = self.get_success( + self.handler.has_different_keys( + local_user, + { + "master_key": keys1["master_key"], + }, + ) + ) + self.assertEqual(is_different, True) + def test_query_devices_remote_sync(self) -> None: """Tests that querying keys for a remote user that we share a room with, but haven't yet fetched the keys for, returns the cross signing keys From f0d6f140479d24754993b7fcaeb33e07f26e1c88 Mon Sep 17 00:00:00 2001 From: Gordan Trevis Date: Tue, 16 Apr 2024 21:12:36 +0200 Subject: [PATCH 04/26] Parse Integer negative value validation (#16920) --- changelog.d/16920.bugfix | 1 + synapse/http/servlet.py | 90 ++++++++++++++++------ synapse/rest/admin/federation.py | 38 ++------- synapse/rest/admin/media.py | 54 ++----------- synapse/rest/admin/statistics.py | 34 +------- synapse/rest/admin/users.py | 18 +---- synapse/rest/client/room.py | 2 +- synapse/rest/media/preview_url_resource.py | 5 +- tests/rest/admin/test_media.py | 5 +- 9 files changed, 89 insertions(+), 158 deletions(-) create mode 100644 changelog.d/16920.bugfix diff --git a/changelog.d/16920.bugfix b/changelog.d/16920.bugfix new file mode 100644 index 000000000..460f4f716 --- /dev/null +++ b/changelog.d/16920.bugfix @@ -0,0 +1 @@ +Adds validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index b73d06f1d..0ca08038f 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -19,7 +19,8 @@ # # -""" This module contains base REST classes for constructing REST servlets. """ +"""This module contains base REST classes for constructing REST servlets.""" + import enum import logging from http import HTTPStatus @@ -65,17 +66,49 @@ def parse_integer(request: Request, name: str, default: int) -> int: ... @overload -def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ... +def parse_integer( + request: Request, name: str, *, default: int, negative: bool +) -> int: ... @overload def parse_integer( - request: Request, name: str, default: Optional[int] = None, required: bool = False + request: Request, name: str, *, default: int, negative: bool = False +) -> int: ... + + +@overload +def parse_integer( + request: Request, name: str, *, required: Literal[True], negative: bool = False +) -> int: ... + + +@overload +def parse_integer( + request: Request, name: str, *, default: Literal[None], negative: bool = False +) -> None: ... + + +@overload +def parse_integer(request: Request, name: str, *, negative: bool) -> Optional[int]: ... + + +@overload +def parse_integer( + request: Request, + name: str, + default: Optional[int] = None, + required: bool = False, + negative: bool = False, ) -> Optional[int]: ... def parse_integer( - request: Request, name: str, default: Optional[int] = None, required: bool = False + request: Request, + name: str, + default: Optional[int] = None, + required: bool = False, + negative: bool = False, ) -> Optional[int]: """Parse an integer parameter from the request string @@ -85,16 +118,17 @@ def parse_integer( default: value to use if the parameter is absent, defaults to None. required: whether to raise a 400 SynapseError if the parameter is absent, defaults to False. - + negative: whether to allow negative integers, defaults to True. Returns: An int value or the default. Raises: - SynapseError: if the parameter is absent and required, or if the - parameter is present and not an integer. + SynapseError: if the parameter is absent and required, if the + parameter is present and not an integer, or if the + parameter is illegitimate negative. """ args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore - return parse_integer_from_args(args, name, default, required) + return parse_integer_from_args(args, name, default, required, negative) @overload @@ -120,6 +154,7 @@ def parse_integer_from_args( name: str, default: Optional[int] = None, required: bool = False, + negative: bool = False, ) -> Optional[int]: ... @@ -128,6 +163,7 @@ def parse_integer_from_args( name: str, default: Optional[int] = None, required: bool = False, + negative: bool = True, ) -> Optional[int]: """Parse an integer parameter from the request string @@ -137,33 +173,37 @@ def parse_integer_from_args( default: value to use if the parameter is absent, defaults to None. required: whether to raise a 400 SynapseError if the parameter is absent, defaults to False. + negative: whether to allow negative integers, defaults to True. Returns: An int value or the default. Raises: - SynapseError: if the parameter is absent and required, or if the - parameter is present and not an integer. + SynapseError: if the parameter is absent and required, if the + parameter is present and not an integer, or if the + parameter is illegitimate negative. """ name_bytes = name.encode("ascii") - if name_bytes in args: - try: - return int(args[name_bytes][0]) - except Exception: - message = "Query parameter %r must be an integer" % (name,) - raise SynapseError( - HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM - ) - else: - if required: - message = "Missing integer query parameter %r" % (name,) - raise SynapseError( - HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM - ) - else: + if name_bytes not in args: + if not required: return default + message = f"Missing required integer query parameter {name}" + raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM) + + try: + integer = int(args[name_bytes][0]) + except Exception: + message = f"Query parameter {name} must be an integer" + raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM) + + if not negative and integer < 0: + message = f"Query parameter {name} must be a positive integer." + raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM) + + return integer + @overload def parse_boolean(request: Request, name: str, default: bool) -> bool: ... diff --git a/synapse/rest/admin/federation.py b/synapse/rest/admin/federation.py index 045153e0c..14ab4644c 100644 --- a/synapse/rest/admin/federation.py +++ b/synapse/rest/admin/federation.py @@ -23,7 +23,7 @@ from http import HTTPStatus from typing import TYPE_CHECKING, Tuple from synapse.api.constants import Direction -from synapse.api.errors import Codes, NotFoundError, SynapseError +from synapse.api.errors import NotFoundError, SynapseError from synapse.federation.transport.server import Authenticator from synapse.http.servlet import RestServlet, parse_enum, parse_integer, parse_string from synapse.http.site import SynapseRequest @@ -61,22 +61,8 @@ class ListDestinationsRestServlet(RestServlet): async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) + start = parse_integer(request, "from", default=0, negative=False) + limit = parse_integer(request, "limit", default=100, negative=False) destination = parse_string(request, "destination") @@ -195,22 +181,8 @@ class DestinationMembershipRestServlet(RestServlet): if not await self._store.is_destination_known(destination): raise NotFoundError("Unknown destination") - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) + start = parse_integer(request, "from", default=0, negative=False) + limit = parse_integer(request, "limit", default=100, negative=False) direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS) diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index 27f080865..a05b7252e 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -311,29 +311,17 @@ class DeleteMediaByDateSize(RestServlet): ) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) - before_ts = parse_integer(request, "before_ts", required=True) - size_gt = parse_integer(request, "size_gt", default=0) + before_ts = parse_integer(request, "before_ts", required=True, negative=False) + size_gt = parse_integer(request, "size_gt", default=0, negative=False) keep_profiles = parse_boolean(request, "keep_profiles", default=True) - if before_ts < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter before_ts must be a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - elif before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds + if before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds raise SynapseError( HTTPStatus.BAD_REQUEST, "Query parameter before_ts you provided is from the year 1970. " + "Double check that you are providing a timestamp in milliseconds.", errcode=Codes.INVALID_PARAM, ) - if size_gt < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter size_gt must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) # This check is useless, we keep it for the legacy endpoint only. if server_name is not None and self.server_name != server_name: @@ -389,22 +377,8 @@ class UserMediaRestServlet(RestServlet): if user is None: raise NotFoundError("Unknown user") - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) + start = parse_integer(request, "from", default=0, negative=False) + limit = parse_integer(request, "limit", default=100, negative=False) # If neither `order_by` nor `dir` is set, set the default order # to newest media is on top for backward compatibility. @@ -447,22 +421,8 @@ class UserMediaRestServlet(RestServlet): if user is None: raise NotFoundError("Unknown user") - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) + start = parse_integer(request, "from", default=0, negative=False) + limit = parse_integer(request, "limit", default=100, negative=False) # If neither `order_by` nor `dir` is set, set the default order # to newest media is on top for backward compatibility. diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py index 832f20402..dc27a41dd 100644 --- a/synapse/rest/admin/statistics.py +++ b/synapse/rest/admin/statistics.py @@ -63,38 +63,12 @@ class UserMediaStatisticsRestServlet(RestServlet): ), ) - start = parse_integer(request, "from", default=0) - if start < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) + start = parse_integer(request, "from", default=0, negative=False) + limit = parse_integer(request, "limit", default=100, negative=False) + from_ts = parse_integer(request, "from_ts", default=0, negative=False) + until_ts = parse_integer(request, "until_ts", negative=False) - limit = parse_integer(request, "limit", default=100) - if limit < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - from_ts = parse_integer(request, "from_ts", default=0) - if from_ts < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from_ts must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - until_ts = parse_integer(request, "until_ts") if until_ts is not None: - if until_ts < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter until_ts must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) if until_ts <= from_ts: raise SynapseError( HTTPStatus.BAD_REQUEST, diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 4e34e4651..5bf12c497 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -90,22 +90,8 @@ class UsersRestServletV2(RestServlet): async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) - start = parse_integer(request, "from", default=0) - limit = parse_integer(request, "limit", default=100) - - if start < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter from must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) - - if limit < 0: - raise SynapseError( - HTTPStatus.BAD_REQUEST, - "Query parameter limit must be a string representing a positive integer.", - errcode=Codes.INVALID_PARAM, - ) + start = parse_integer(request, "from", default=0, negative=False) + limit = parse_integer(request, "limit", default=100, negative=False) user_id = parse_string(request, "user_id") name = parse_string(request, "name", encoding="utf-8") diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 65dedb8b9..4eeadf877 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -499,7 +499,7 @@ class PublicRoomListRestServlet(RestServlet): if server: raise e - limit: Optional[int] = parse_integer(request, "limit", 0) + limit: Optional[int] = parse_integer(request, "limit", 0, negative=False) since_token = parse_string(request, "since") if limit == 0: diff --git a/synapse/rest/media/preview_url_resource.py b/synapse/rest/media/preview_url_resource.py index 6724986fc..bfeff2179 100644 --- a/synapse/rest/media/preview_url_resource.py +++ b/synapse/rest/media/preview_url_resource.py @@ -72,9 +72,6 @@ class PreviewUrlResource(RestServlet): # XXX: if get_user_by_req fails, what should we do in an async render? requester = await self.auth.get_user_by_req(request) url = parse_string(request, "url", required=True) - ts = parse_integer(request, "ts") - if ts is None: - ts = self.clock.time_msec() - + ts = parse_integer(request, "ts", default=self.clock.time_msec()) og = await self.url_previewer.preview(url, requester.user, ts) respond_with_json_bytes(request, 200, og, send_cors=True) diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py index 493e1d191..f37816551 100644 --- a/tests/rest/admin/test_media.py +++ b/tests/rest/admin/test_media.py @@ -277,7 +277,8 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests): self.assertEqual(400, channel.code, msg=channel.json_body) self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"]) self.assertEqual( - "Missing integer query parameter 'before_ts'", channel.json_body["error"] + "Missing required integer query parameter before_ts", + channel.json_body["error"], ) def test_invalid_parameter(self) -> None: @@ -320,7 +321,7 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests): self.assertEqual(400, channel.code, msg=channel.json_body) self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"]) self.assertEqual( - "Query parameter size_gt must be a string representing a positive integer.", + "Query parameter size_gt must be a positive integer.", channel.json_body["error"], ) From 28f5ad07d37a9f82c896fa1722d8c47980adc89e Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 17 Apr 2024 15:44:40 +0200 Subject: [PATCH 05/26] Bump minimum required Rust version to 1.66.0 (#17079) --- .github/workflows/tests.yml | 18 +++++++++--------- changelog.d/17079.misc | 1 + rust/Cargo.toml | 2 +- 3 files changed, 11 insertions(+), 10 deletions(-) create mode 100644 changelog.d/17079.misc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 295461aad..20afe311f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -81,7 +81,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -148,7 +148,7 @@ jobs: uses: actions/checkout@v4 - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - name: Setup Poetry @@ -208,7 +208,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 with: @@ -225,7 +225,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 with: components: clippy - uses: Swatinem/rust-cache@v2 @@ -344,7 +344,7 @@ jobs: postgres:${{ matrix.job.postgres-version }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - uses: matrix-org/setup-python-poetry@v1 @@ -386,7 +386,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 # There aren't wheels for some of the older deps, so we need to install @@ -498,7 +498,7 @@ jobs: run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - name: Run SyTest @@ -642,7 +642,7 @@ jobs: path: synapse - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - name: Prepare Complement's Prerequisites @@ -674,7 +674,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Rust - uses: dtolnay/rust-toolchain@1.65.0 + uses: dtolnay/rust-toolchain@1.66.0 - uses: Swatinem/rust-cache@v2 - run: cargo test diff --git a/changelog.d/17079.misc b/changelog.d/17079.misc new file mode 100644 index 000000000..340e40d19 --- /dev/null +++ b/changelog.d/17079.misc @@ -0,0 +1 @@ +Bump minimum supported Rust version to 1.66.0. diff --git a/rust/Cargo.toml b/rust/Cargo.toml index d89def184..ba293f8d4 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -7,7 +7,7 @@ name = "synapse" version = "0.1.0" edition = "2021" -rust-version = "1.65.0" +rust-version = "1.66.0" [lib] name = "synapse" From c8e0bed4269106c49ffd733eaad5cdb3576f55d2 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 17 Apr 2024 16:47:35 +0200 Subject: [PATCH 06/26] Support for MSC4108 via delegation (#17086) This adds support for MSC4108 via delegation, similar to what has been done for MSC3886 --------- Co-authored-by: Hugh Nimmo-Smith --- changelog.d/17086.feature | 1 + synapse/config/experimental.py | 11 +++++++++ synapse/http/server.py | 13 ++++++++++- synapse/rest/client/rendezvous.py | 30 +++++++++++++++++++++--- synapse/rest/client/versions.py | 3 +++ tests/rest/client/test_rendezvous.py | 34 ++++++++++++++++++++++++---- 6 files changed, 84 insertions(+), 8 deletions(-) create mode 100644 changelog.d/17086.feature diff --git a/changelog.d/17086.feature b/changelog.d/17086.feature new file mode 100644 index 000000000..08b407d31 --- /dev/null +++ b/changelog.d/17086.feature @@ -0,0 +1 @@ +Support delegating the rendezvous mechanism described MSC4108 to an external implementation. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index fcc78d2d8..353ae23f9 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -411,3 +411,14 @@ class ExperimentalConfig(Config): self.msc4069_profile_inhibit_propagation = experimental.get( "msc4069_profile_inhibit_propagation", False ) + + # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code + self.msc4108_delegation_endpoint: Optional[str] = experimental.get( + "msc4108_delegation_endpoint", None + ) + + if self.msc4108_delegation_endpoint is not None and not self.msc3861.enabled: + raise ConfigError( + "MSC4108 requires MSC3861 to be enabled", + ("experimental", "msc4108_delegation_endpoint"), + ) diff --git a/synapse/http/server.py b/synapse/http/server.py index c76500e14..45b2cbffc 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -909,7 +909,18 @@ def set_cors_headers(request: "SynapseRequest") -> None: request.setHeader( b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS" ) - if request.experimental_cors_msc3886: + if request.path is not None and request.path.startswith( + b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous" + ): + request.setHeader( + b"Access-Control-Allow-Headers", + b"Content-Type, If-Match, If-None-Match", + ) + request.setHeader( + b"Access-Control-Expose-Headers", + b"Synapse-Trace-Id, Server, ETag", + ) + elif request.experimental_cors_msc3886: request.setHeader( b"Access-Control-Allow-Headers", b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match", diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py index dee7c37ec..ed06a2998 100644 --- a/synapse/rest/client/rendezvous.py +++ b/synapse/rest/client/rendezvous.py @@ -2,7 +2,7 @@ # This file is licensed under the Affero General Public License (AGPL) version 3. # # Copyright 2022 The Matrix.org Foundation C.I.C. -# Copyright (C) 2023 New Vector, Ltd +# Copyright (C) 2023-2024 New Vector, Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -34,7 +34,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -class RendezvousServlet(RestServlet): +class MSC3886RendezvousServlet(RestServlet): """ This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) simple client rendezvous capability that is used by the "Sign in with QR" functionality. @@ -76,6 +76,30 @@ class RendezvousServlet(RestServlet): # PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target. +class MSC4108DelegationRendezvousServlet(RestServlet): + PATTERNS = client_patterns( + "/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True + ) + + def __init__(self, hs: "HomeServer"): + super().__init__() + redirection_target: Optional[str] = ( + hs.config.experimental.msc4108_delegation_endpoint + ) + assert ( + redirection_target is not None + ), "Servlet is only registered if there is a delegation target" + self.endpoint = redirection_target.encode("utf-8") + + async def on_POST(self, request: SynapseRequest) -> None: + respond_with_redirect( + request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True + ) + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.experimental.msc3886_endpoint is not None: - RendezvousServlet(hs).register(http_server) + MSC3886RendezvousServlet(hs).register(http_server) + + if hs.config.experimental.msc4108_delegation_endpoint is not None: + MSC4108DelegationRendezvousServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index c46d4fe8c..638d4c45a 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -140,6 +140,9 @@ class VersionsRestServlet(RestServlet): "org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation, # Allows clients to handle push for encrypted events. "org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events, + # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code + "org.matrix.msc4108": self.config.experimental.msc4108_delegation_endpoint + is not None, }, }, ) diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index 294b39f17..c84704c09 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -27,8 +27,10 @@ from synapse.util import Clock from tests import unittest from tests.unittest import override_config +from tests.utils import HAS_AUTHLIB -endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous" +msc3886_endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous" +msc4108_endpoint = "/_matrix/client/unstable/org.matrix.msc4108/rendezvous" class RendezvousServletTestCase(unittest.HomeserverTestCase): @@ -41,11 +43,35 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): return self.hs def test_disabled(self) -> None: - channel = self.make_request("POST", endpoint, {}, access_token=None) + channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None) + self.assertEqual(channel.code, 404) + channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None) self.assertEqual(channel.code, 404) @override_config({"experimental_features": {"msc3886_endpoint": "/asd"}}) - def test_redirect(self) -> None: - channel = self.make_request("POST", endpoint, {}, access_token=None) + def test_msc3886_redirect(self) -> None: + channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None) self.assertEqual(channel.code, 307) self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"]) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_delegation_endpoint": "https://asd", + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_delegation(self) -> None: + channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None) + self.assertEqual(channel.code, 307) + self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"]) From 803f05f60caab050e68bfc022a6da3dac5a9a75f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 17 Apr 2024 16:08:40 +0100 Subject: [PATCH 07/26] Fix remote receipts for events we don't have (#17096) Introduced in #17032 --- changelog.d/17096.misc | 1 + synapse/storage/databases/main/receipts.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17096.misc diff --git a/changelog.d/17096.misc b/changelog.d/17096.misc new file mode 100644 index 000000000..b03f6f42e --- /dev/null +++ b/changelog.d/17096.misc @@ -0,0 +1 @@ +Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 9660fc469..13387a383 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -734,9 +734,13 @@ class ReceiptsWorkerStore(SQLBaseStore): thread_clause = "r.thread_id = ?" thread_args = (thread_id,) + # If the receipt doesn't have a stream ordering it is because we + # don't have the associated event, and so must be a remote receipt. + # Hence it's safe to just allow new receipts to clobber it. sql = f""" SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r - WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause} + WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? + AND r.event_stream_ordering IS NOT NULL AND {thread_clause} """ txn.execute( sql, From 09f0957b36cf1b4e9a89f5594df51a853d0dfffe Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 18 Apr 2024 12:20:30 +0200 Subject: [PATCH 08/26] Helpers to transform Twisted requests to Rust http Requests/Responses (#17081) This adds functions to transform a Twisted request to the `http::Request`, and then to send back an `http::Response` through it. It also imports the SynapseError exception so that we can throw that from Rust code directly Example usage of this would be: ```rust use crate::http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt}; fn handler(twisted_request: &PyAny) -> PyResult<()> { let request = http_request_from_twisted(twisted_request)?; let ua: headers::UserAgent = request.headers().typed_get_required()?; if whatever { return Err((crate::errors::SynapseError::new( StatusCode::UNAUTHORIZED, "Whatever".to_owned "M_UNAUTHORIZED", None, None, ))); } let response = Response::new("hello".as_bytes()); http_response_to_twisted(twisted_request, response)?; Ok(()) } ``` --- Cargo.lock | 92 ++++++++++++++++++++++- changelog.d/17081.misc | 1 + rust/Cargo.toml | 3 + rust/src/errors.rs | 60 +++++++++++++++ rust/src/http.rs | 165 +++++++++++++++++++++++++++++++++++++++++ rust/src/lib.rs | 2 + 6 files changed, 321 insertions(+), 2 deletions(-) create mode 100644 changelog.d/17081.misc create mode 100644 rust/src/errors.rs create mode 100644 rust/src/http.rs diff --git a/Cargo.lock b/Cargo.lock index 630d38c2f..65f4807c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -29,6 +29,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "bitflags" version = "1.3.2" @@ -53,12 +59,27 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" + [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -80,6 +101,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "generic-array" version = "0.14.6" @@ -90,6 +117,30 @@ dependencies = [ "version_check", ] +[[package]] +name = "headers" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9" +dependencies = [ + "base64", + "bytes", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" +dependencies = [ + "http", +] + [[package]] name = "heck" version = "0.4.1" @@ -102,6 +153,23 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + [[package]] name = "indoc" version = "2.0.4" @@ -122,9 +190,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.135" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "lock_api" @@ -157,6 +225,12 @@ dependencies = [ "autocfg", ] +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "once_cell" version = "1.15.0" @@ -376,6 +450,17 @@ dependencies = [ "serde", ] +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "smallvec" version = "1.10.0" @@ -405,7 +490,10 @@ version = "0.1.0" dependencies = [ "anyhow", "blake2", + "bytes", + "headers", "hex", + "http", "lazy_static", "log", "pyo3", diff --git a/changelog.d/17081.misc b/changelog.d/17081.misc new file mode 100644 index 000000000..d1ab69126 --- /dev/null +++ b/changelog.d/17081.misc @@ -0,0 +1 @@ +Add helpers to transform Twisted requests to Rust http Requests/Responses. diff --git a/rust/Cargo.toml b/rust/Cargo.toml index ba293f8d4..9ac766182 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -23,6 +23,9 @@ name = "synapse.synapse_rust" [dependencies] anyhow = "1.0.63" +bytes = "1.6.0" +headers = "0.4.0" +http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" pyo3 = { version = "0.20.0", features = [ diff --git a/rust/src/errors.rs b/rust/src/errors.rs new file mode 100644 index 000000000..4e580e3e8 --- /dev/null +++ b/rust/src/errors.rs @@ -0,0 +1,60 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +#![allow(clippy::new_ret_no_self)] + +use std::collections::HashMap; + +use http::{HeaderMap, StatusCode}; +use pyo3::{exceptions::PyValueError, import_exception}; + +import_exception!(synapse.api.errors, SynapseError); + +impl SynapseError { + pub fn new( + code: StatusCode, + message: String, + errcode: &'static str, + additional_fields: Option>, + headers: Option, + ) -> pyo3::PyErr { + // Transform the HeaderMap into a HashMap + let headers = if let Some(headers) = headers { + let mut map = HashMap::with_capacity(headers.len()); + for (key, value) in headers.iter() { + let Ok(value) = value.to_str() else { + // This should never happen, but we don't want to panic in case it does + return PyValueError::new_err( + "Could not construct SynapseError: header value is not valid ASCII", + ); + }; + + map.insert(key.as_str().to_owned(), value.to_owned()); + } + Some(map) + } else { + None + }; + + SynapseError::new_err((code.as_u16(), message, errcode, additional_fields, headers)) + } +} + +import_exception!(synapse.api.errors, NotFoundError); + +impl NotFoundError { + pub fn new() -> pyo3::PyErr { + NotFoundError::new_err(()) + } +} diff --git a/rust/src/http.rs b/rust/src/http.rs new file mode 100644 index 000000000..74098f4c8 --- /dev/null +++ b/rust/src/http.rs @@ -0,0 +1,165 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +use bytes::{Buf, BufMut, Bytes, BytesMut}; +use headers::{Header, HeaderMapExt}; +use http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode, Uri}; +use pyo3::{ + exceptions::PyValueError, + types::{PyBytes, PySequence, PyTuple}, + PyAny, PyResult, +}; + +use crate::errors::SynapseError; + +/// Read a file-like Python object by chunks +/// +/// # Errors +/// +/// Returns an error if calling the `read` on the Python object failed +fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult { + let mut buf = BytesMut::new(); + loop { + let bytes: &PyBytes = body.call_method1("read", (chunk_size,))?.downcast()?; + if bytes.as_bytes().is_empty() { + return Ok(buf.into()); + } + buf.put(bytes.as_bytes()); + } +} + +/// Transform a Twisted `IRequest` to an [`http::Request`] +/// +/// It uses the following members of `IRequest`: +/// - `content`, which is expected to be a file-like object with a `read` method +/// - `uri`, which is expected to be a valid URI as `bytes` +/// - `method`, which is expected to be a valid HTTP method as `bytes` +/// - `requestHeaders`, which is expected to have a `getAllRawHeaders` method +/// +/// # Errors +/// +/// Returns an error if the Python object doesn't properly implement `IRequest` +pub fn http_request_from_twisted(request: &PyAny) -> PyResult> { + let content = request.getattr("content")?; + let body = read_io_body(content, 4096)?; + + let mut req = Request::new(body); + + let uri: &PyBytes = request.getattr("uri")?.downcast()?; + *req.uri_mut() = + Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?; + + let method: &PyBytes = request.getattr("method")?.downcast()?; + *req.method_mut() = Method::from_bytes(method.as_bytes()) + .map_err(|_| PyValueError::new_err("invalid method"))?; + + let headers_iter = request + .getattr("requestHeaders")? + .call_method0("getAllRawHeaders")? + .iter()?; + + for header in headers_iter { + let header = header?; + let header: &PyTuple = header.downcast()?; + let name: &PyBytes = header.get_item(0)?.downcast()?; + let name = HeaderName::from_bytes(name.as_bytes()) + .map_err(|_| PyValueError::new_err("invalid header name"))?; + + let values: &PySequence = header.get_item(1)?.downcast()?; + for index in 0..values.len()? { + let value: &PyBytes = values.get_item(index)?.downcast()?; + let value = HeaderValue::from_bytes(value.as_bytes()) + .map_err(|_| PyValueError::new_err("invalid header value"))?; + req.headers_mut().append(name.clone(), value); + } + } + + Ok(req) +} + +/// Send an [`http::Response`] through a Twisted `IRequest` +/// +/// It uses the following members of `IRequest`: +/// +/// - `responseHeaders`, which is expected to have a `addRawHeader(bytes, bytes)` method +/// - `setResponseCode(int)` method +/// - `write(bytes)` method +/// - `finish()` method +/// +/// # Errors +/// +/// Returns an error if the Python object doesn't properly implement `IRequest` +pub fn http_response_to_twisted(request: &PyAny, response: Response) -> PyResult<()> +where + B: Buf, +{ + let (parts, mut body) = response.into_parts(); + + request.call_method1("setResponseCode", (parts.status.as_u16(),))?; + + let response_headers = request.getattr("responseHeaders")?; + for (name, value) in parts.headers.iter() { + response_headers.call_method1("addRawHeader", (name.as_str(), value.as_bytes()))?; + } + + while body.remaining() != 0 { + let chunk = body.chunk(); + request.call_method1("write", (chunk,))?; + body.advance(chunk.len()); + } + + request.call_method0("finish")?; + + Ok(()) +} + +/// An extension trait for [`HeaderMap`] that provides typed access to headers, and throws the +/// right python exceptions when the header is missing or fails to parse. +/// +/// [`HeaderMap`]: headers::HeaderMap +pub trait HeaderMapPyExt: HeaderMapExt { + /// Get a header from the map, returning an error if it is missing or invalid. + fn typed_get_required(&self) -> PyResult + where + H: Header, + { + self.typed_get_optional::()?.ok_or_else(|| { + SynapseError::new( + StatusCode::BAD_REQUEST, + format!("Missing required header: {}", H::name()), + "M_MISSING_PARAM", + None, + None, + ) + }) + } + + /// Get a header from the map, returning `None` if it is missing and an error if it is invalid. + fn typed_get_optional(&self) -> PyResult> + where + H: Header, + { + self.typed_try_get::().map_err(|_| { + SynapseError::new( + StatusCode::BAD_REQUEST, + format!("Invalid header: {}", H::name()), + "M_INVALID_PARAM", + None, + None, + ) + }) + } +} + +impl HeaderMapPyExt for T {} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 7b3b579e5..36a3d6452 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -3,7 +3,9 @@ use pyo3::prelude::*; use pyo3_log::ResetHandle; pub mod acl; +pub mod errors; pub mod events; +pub mod http; pub mod push; lazy_static! { From 1d4753231021cfb3cb8a2af7e4fdef543559851a Mon Sep 17 00:00:00 2001 From: Gordan Trevis Date: Thu, 18 Apr 2024 14:57:38 +0200 Subject: [PATCH 09/26] Parse json validation (#16923) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/16923.bugfix | 1 + synapse/http/servlet.py | 82 +++++++++++++++++++++++++++++++++ synapse/rest/admin/rooms.py | 36 +++++---------- synapse/rest/client/room.py | 35 +++++--------- tests/rest/admin/test_room.py | 61 ++++++++++++++++++++++++ tests/rest/client/test_rooms.py | 52 +++++++++++++++++++++ 6 files changed, 220 insertions(+), 47 deletions(-) create mode 100644 changelog.d/16923.bugfix diff --git a/changelog.d/16923.bugfix b/changelog.d/16923.bugfix new file mode 100644 index 000000000..bd6f24925 --- /dev/null +++ b/changelog.d/16923.bugfix @@ -0,0 +1 @@ +Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. \ No newline at end of file diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 0ca08038f..ab12951da 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -23,6 +23,7 @@ import enum import logging +import urllib.parse as urlparse from http import HTTPStatus from typing import ( TYPE_CHECKING, @@ -450,6 +451,87 @@ def parse_string( ) +def parse_json( + request: Request, + name: str, + default: Optional[dict] = None, + required: bool = False, + encoding: str = "ascii", +) -> Optional[JsonDict]: + """ + Parse a JSON parameter from the request query string. + + Args: + request: the twisted HTTP request. + name: the name of the query parameter. + default: value to use if the parameter is absent, + defaults to None. + required: whether to raise a 400 SynapseError if the + parameter is absent, defaults to False. + encoding: The encoding to decode the string content with. + + Returns: + A JSON value, or `default` if the named query parameter was not found + and `required` was False. + + Raises: + SynapseError if the parameter is absent and required, or if the + parameter is present and not a JSON object. + """ + args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore + return parse_json_from_args( + args, + name, + default, + required=required, + encoding=encoding, + ) + + +def parse_json_from_args( + args: Mapping[bytes, Sequence[bytes]], + name: str, + default: Optional[dict] = None, + required: bool = False, + encoding: str = "ascii", +) -> Optional[JsonDict]: + """ + Parse a JSON parameter from the request query string. + + Args: + args: a mapping of request args as bytes to a list of bytes (e.g. request.args). + name: the name of the query parameter. + default: value to use if the parameter is absent, + defaults to None. + required: whether to raise a 400 SynapseError if the + parameter is absent, defaults to False. + encoding: the encoding to decode the string content with. + + A JSON value, or `default` if the named query parameter was not found + and `required` was False. + + Raises: + SynapseError if the parameter is absent and required, or if the + parameter is present and not a JSON object. + """ + name_bytes = name.encode("ascii") + + if name_bytes not in args: + if not required: + return default + + message = f"Missing required integer query parameter {name}" + raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM) + + json_str = parse_string_from_args(args, name, required=True, encoding=encoding) + + try: + return json_decoder.decode(urlparse.unquote(json_str)) + except Exception: + message = f"Query parameter {name} must be a valid JSON object" + raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.NOT_JSON) + + EnumT = TypeVar("EnumT", bound=enum.Enum) diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 4252f98a6..0d86a4e15 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -21,7 +21,6 @@ import logging from http import HTTPStatus from typing import TYPE_CHECKING, List, Optional, Tuple, cast -from urllib import parse as urlparse import attr @@ -38,6 +37,7 @@ from synapse.http.servlet import ( assert_params_in_dict, parse_enum, parse_integer, + parse_json, parse_json_object_from_request, parse_string, ) @@ -51,7 +51,6 @@ from synapse.storage.databases.main.room import RoomSortOrder from synapse.streams.config import PaginationConfig from synapse.types import JsonDict, RoomID, ScheduledTask, UserID, create_requester from synapse.types.state import StateFilter -from synapse.util import json_decoder if TYPE_CHECKING: from synapse.api.auth import Auth @@ -776,14 +775,8 @@ class RoomEventContextServlet(RestServlet): limit = parse_integer(request, "limit", default=10) # picking the API shape for symmetry with /messages - filter_str = parse_string(request, "filter", encoding="utf-8") - if filter_str: - filter_json = urlparse.unquote(filter_str) - event_filter: Optional[Filter] = Filter( - self._hs, json_decoder.decode(filter_json) - ) - else: - event_filter = None + filter_json = parse_json(request, "filter", encoding="utf-8") + event_filter = Filter(self._hs, filter_json) if filter_json else None event_context = await self.room_context_handler.get_event_context( requester, @@ -914,21 +907,16 @@ class RoomMessagesRestServlet(RestServlet): ) # Twisted will have processed the args by now. assert request.args is not None + + filter_json = parse_json(request, "filter", encoding="utf-8") + event_filter = Filter(self._hs, filter_json) if filter_json else None + as_client_event = b"raw" not in request.args - filter_str = parse_string(request, "filter", encoding="utf-8") - if filter_str: - filter_json = urlparse.unquote(filter_str) - event_filter: Optional[Filter] = Filter( - self._hs, json_decoder.decode(filter_json) - ) - if ( - event_filter - and event_filter.filter_json.get("event_format", "client") - == "federation" - ): - as_client_event = False - else: - event_filter = None + if ( + event_filter + and event_filter.filter_json.get("event_format", "client") == "federation" + ): + as_client_event = False msgs = await self._pagination_handler.get_messages( room_id=room_id, diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 4eeadf877..e4c7dd1a5 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -52,6 +52,7 @@ from synapse.http.servlet import ( parse_boolean, parse_enum, parse_integer, + parse_json, parse_json_object_from_request, parse_string, parse_strings_from_args, @@ -65,7 +66,6 @@ from synapse.rest.client.transactions import HttpTransactionCache from synapse.streams.config import PaginationConfig from synapse.types import JsonDict, Requester, StreamToken, ThirdPartyInstanceID, UserID from synapse.types.state import StateFilter -from synapse.util import json_decoder from synapse.util.cancellation import cancellable from synapse.util.stringutils import parse_and_validate_server_name, random_string @@ -703,21 +703,16 @@ class RoomMessageListRestServlet(RestServlet): ) # Twisted will have processed the args by now. assert request.args is not None + + filter_json = parse_json(request, "filter", encoding="utf-8") + event_filter = Filter(self._hs, filter_json) if filter_json else None + as_client_event = b"raw" not in request.args - filter_str = parse_string(request, "filter", encoding="utf-8") - if filter_str: - filter_json = urlparse.unquote(filter_str) - event_filter: Optional[Filter] = Filter( - self._hs, json_decoder.decode(filter_json) - ) - if ( - event_filter - and event_filter.filter_json.get("event_format", "client") - == "federation" - ): - as_client_event = False - else: - event_filter = None + if ( + event_filter + and event_filter.filter_json.get("event_format", "client") == "federation" + ): + as_client_event = False msgs = await self.pagination_handler.get_messages( room_id=room_id, @@ -898,14 +893,8 @@ class RoomEventContextServlet(RestServlet): limit = parse_integer(request, "limit", default=10) # picking the API shape for symmetry with /messages - filter_str = parse_string(request, "filter", encoding="utf-8") - if filter_str: - filter_json = urlparse.unquote(filter_str) - event_filter: Optional[Filter] = Filter( - self._hs, json_decoder.decode(filter_json) - ) - else: - event_filter = None + filter_json = parse_json(request, "filter", encoding="utf-8") + event_filter = Filter(self._hs, filter_json) if filter_json else None event_context = await self.room_context_handler.get_event_context( requester, room_id, event_id, limit, event_filter diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 0b669b6ee..756274726 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -21,6 +21,7 @@ import json import time import urllib.parse +from http import HTTPStatus from typing import List, Optional from unittest.mock import AsyncMock, Mock @@ -2190,6 +2191,33 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase): chunk = channel.json_body["chunk"] self.assertEqual(len(chunk), 0, [event["content"] for event in chunk]) + def test_room_message_filter_query_validation(self) -> None: + # Test json validation in (filter) query parameter. + # Does not test the validity of the filter, only the json validation. + + # Check Get with valid json filter parameter, expect 200. + valid_filter_str = '{"types": ["m.room.message"]}' + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={valid_filter_str}", + access_token=self.admin_user_tok, + ) + + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + + # Check Get with invalid json filter parameter, expect 400 NOT_JSON. + invalid_filter_str = "}}}{}" + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={invalid_filter_str}", + access_token=self.admin_user_tok, + ) + + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body + ) + class JoinAliasRoomTestCase(unittest.HomeserverTestCase): servlets = [ @@ -2522,6 +2550,39 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase): else: self.fail("Event %s from events_after not found" % j) + def test_room_event_context_filter_query_validation(self) -> None: + # Test json validation in (filter) query parameter. + # Does not test the validity of the filter, only the json validation. + + # Create a user with room and event_id. + user_id = self.register_user("test", "test") + user_tok = self.login("test", "test") + room_id = self.helper.create_room_as(user_id, tok=user_tok) + event_id = self.helper.send(room_id, "message 1", tok=user_tok)["event_id"] + + # Check Get with valid json filter parameter, expect 200. + valid_filter_str = '{"types": ["m.room.message"]}' + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={valid_filter_str}", + access_token=self.admin_user_tok, + ) + + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + + # Check Get with invalid json filter parameter, expect 400 NOT_JSON. + invalid_filter_str = "}}}{}" + channel = self.make_request( + "GET", + f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={invalid_filter_str}", + access_token=self.admin_user_tok, + ) + + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body + ) + class MakeRoomAdminTestCase(unittest.HomeserverTestCase): servlets = [ diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 136461508..b796163dc 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -2175,6 +2175,31 @@ class RoomMessageListTestCase(RoomBase): chunk = channel.json_body["chunk"] self.assertEqual(len(chunk), 0, [event["content"] for event in chunk]) + def test_room_message_filter_query_validation(self) -> None: + # Test json validation in (filter) query parameter. + # Does not test the validity of the filter, only the json validation. + + # Check Get with valid json filter parameter, expect 200. + valid_filter_str = '{"types": ["m.room.message"]}' + channel = self.make_request( + "GET", + f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={valid_filter_str}", + ) + + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + + # Check Get with invalid json filter parameter, expect 400 NOT_JSON. + invalid_filter_str = "}}}{}" + channel = self.make_request( + "GET", + f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={invalid_filter_str}", + ) + + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body + ) + class RoomMessageFilterTestCase(RoomBase): """Tests /rooms/$room_id/messages REST events.""" @@ -3213,6 +3238,33 @@ class ContextTestCase(unittest.HomeserverTestCase): self.assertDictEqual(events_after[0].get("content"), {}, events_after[0]) self.assertEqual(events_after[1].get("content"), {}, events_after[1]) + def test_room_event_context_filter_query_validation(self) -> None: + # Test json validation in (filter) query parameter. + # Does not test the validity of the filter, only the json validation. + event_id = self.helper.send(self.room_id, "message 7", tok=self.tok)["event_id"] + + # Check Get with valid json filter parameter, expect 200. + valid_filter_str = '{"types": ["m.room.message"]}' + channel = self.make_request( + "GET", + f"/rooms/{self.room_id}/context/{event_id}?filter={valid_filter_str}", + access_token=self.tok, + ) + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + + # Check Get with invalid json filter parameter, expect 400 NOT_JSON. + invalid_filter_str = "}}}{}" + channel = self.make_request( + "GET", + f"/rooms/{self.room_id}/context/{event_id}?filter={invalid_filter_str}", + access_token=self.tok, + ) + + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body + ) + class RoomAliasListTestCase(unittest.HomeserverTestCase): servlets = [ From 6d64f1b2b89c3b4efdefbb5748443533f4377e5a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:40:55 +0100 Subject: [PATCH 10/26] Bump anyhow from 1.0.81 to 1.0.82 (#17095) Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.81 to 1.0.82.
Release notes

Sourced from anyhow's releases.

1.0.82

  • Documentation improvements
Commits
  • 074bdea Release 1.0.82
  • 47a4fbf Merge pull request #360 from dtolnay/docensure
  • c5af1db Make ensure's doc comment apply to the cfg(not(doc)) macro too
  • bebc7a2 Revert "Temporarily disable miri on doctests"
  • f2c4db9 Update ui test suite to nightly-2024-03-31
  • 028cbee Explicitly install a Rust toolchain for cargo-outdated job
  • 7a4cac5 Merge pull request #358 from dtolnay/workspacewrapper
  • 939db01 Apply RUSTC_WORKSPACE_WRAPPER
  • 9f84a37 Temporarily disable miri on doctests
  • 45e5a58 Ignore dead code lint in test
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=anyhow&package-manager=cargo&previous-version=1.0.81&new-version=1.0.82)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 65f4807c6..faac6b3c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" +checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" [[package]] name = "arc-swap" From 47f3870894847d6f29a6b9d7ee049f1ec69aecf0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:41:03 +0100 Subject: [PATCH 11/26] Bump ruff from 0.3.5 to 0.3.7 (#17094) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [ruff](https://github.com/astral-sh/ruff) from 0.3.5 to 0.3.7.
Release notes

Sourced from ruff's releases.

v0.3.7

Changes

Preview features

  • [flake8-bugbear] Implement loop-iterator-mutation (B909) (#9578)
  • [pylint] Implement rule to prefer augmented assignment (PLR6104) (#9932)

Bug fixes

  • Avoid TOCTOU errors in cache initialization (#10884)
  • [pylint] Recode nan-comparison rule to W0177 (#10894)
  • [pylint] Reverse min-max logic in if-stmt-min-max (#10890)

Contributors

v0.3.6

Changes

Preview features

  • [pylint] Implement bad-staticmethod-argument (PLW0211) (#10781)
  • [pylint] Implement if-stmt-min-max (PLR1730, PLR1731) (#10002)
  • [pyupgrade] Replace str,Enum multiple inheritance with StrEnum UP042 (#10713)
  • [refurb] Implement if-expr-instead-of-or-operator (FURB110) (#10687)
  • [refurb] Implement int-on-sliced-str (FURB166) (#10650)
  • [refurb] Implement write-whole-file (FURB103) (#10802)
  • [refurb] Support itemgetter in reimplemented-operator (FURB118) (#10526)
  • [flake8_comprehensions] Add sum/min/max to unnecessary comprehension check (C419) (#10759)

Rule changes

  • [pydocstyle] Require capitalizing docstrings where the first sentence is a single word (D403) (#10776)
  • [pycodestyle] Ignore annotated lambdas in class scopes (E731) (#10720)
  • [flake8-pyi] Various improvements to PYI034 (#10807)
  • [flake8-slots] Flag subclasses of call-based typing.NamedTuples as well as subclasses of collections.namedtuple() (SLOT002) (#10808)
  • [pyflakes] Allow forward references in class bases in stub files (F821) (#10779)
  • [pygrep-hooks] Improve blanket-noqa error message (PGH004) (#10851)

CLI

  • Support FORCE_COLOR env var (#10839)

Configuration

  • Support negated patterns in [extend-]per-file-ignores (#10852)

... (truncated)

Changelog

Sourced from ruff's changelog.

0.3.7

Preview features

  • [flake8-bugbear] Implement loop-iterator-mutation (B909) (#9578)
  • [pylint] Implement rule to prefer augmented assignment (PLR6104) (#9932)

Bug fixes

  • Avoid TOCTOU errors in cache initialization (#10884)
  • [pylint] Recode nan-comparison rule to W0177 (#10894)
  • [pylint] Reverse min-max logic in if-stmt-min-max (#10890)

0.3.6

Preview features

  • [pylint] Implement bad-staticmethod-argument (PLW0211) (#10781)
  • [pylint] Implement if-stmt-min-max (PLR1730, PLR1731) (#10002)
  • [pyupgrade] Replace str,Enum multiple inheritance with StrEnum UP042 (#10713)
  • [refurb] Implement if-expr-instead-of-or-operator (FURB110) (#10687)
  • [refurb] Implement int-on-sliced-str (FURB166) (#10650)
  • [refurb] Implement write-whole-file (FURB103) (#10802)
  • [refurb] Support itemgetter in reimplemented-operator (FURB118) (#10526)
  • [flake8_comprehensions] Add sum/min/max to unnecessary comprehension check (C419) (#10759)

Rule changes

  • [pydocstyle] Require capitalizing docstrings where the first sentence is a single word (D403) (#10776)
  • [pycodestyle] Ignore annotated lambdas in class scopes (E731) (#10720)
  • [flake8-pyi] Various improvements to PYI034 (#10807)
  • [flake8-slots] Flag subclasses of call-based typing.NamedTuples as well as subclasses of collections.namedtuple() (SLOT002) (#10808)
  • [pyflakes] Allow forward references in class bases in stub files (F821) (#10779)
  • [pygrep-hooks] Improve blanket-noqa error message (PGH004) (#10851)

CLI

  • Support FORCE_COLOR env var (#10839)

Configuration

  • Support negated patterns in [extend-]per-file-ignores (#10852)

Bug fixes

  • [flake8-import-conventions] Accept non-aliased (but correct) import in unconventional-import-alias (ICN001) (#10729)
  • [flake8-quotes] Add semantic model flag when inside f-string replacement field (#10766)
  • [pep8-naming] Recursively resolve TypeDicts for N815 violations (#10719)
  • [flake8-quotes] Respect Q00* ignores in flake8-quotes rules (#10728)
  • [flake8-simplify] Show negated condition in needless-bool diagnostics (SIM103) (#10854)

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=ruff&package-manager=pip&previous-version=0.3.5&new-version=0.3.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 814877b70..643197592 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2444,28 +2444,28 @@ files = [ [[package]] name = "ruff" -version = "0.3.5" +version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, - {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, - {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, - {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, - {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, ] [[package]] @@ -3451,4 +3451,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "4abda113a01f162bb3978b0372956d569364533aa39f57863c234363f8449a4f" +content-hash = "1951f2b4623138d47db08a405edd970e67599d05804bb459af21a085e1665f69" diff --git a/pyproject.toml b/pyproject.toml index f0f025645..fb310589f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -321,7 +321,7 @@ all = [ # This helps prevents merge conflicts when running a batch of dependabot updates. isort = ">=5.10.1" black = ">=22.7.0" -ruff = "0.3.5" +ruff = "0.3.7" # Type checking only works with the pydantic.v1 compat module from pydantic v2 pydantic = "^2" From f5b6005559784df9383bcc4218375b27d64e4651 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:41:15 +0100 Subject: [PATCH 12/26] Bump pyasn1-modules from 0.3.0 to 0.4.0 (#17093) Bumps [pyasn1-modules](https://github.com/pyasn1/pyasn1-modules) from 0.3.0 to 0.4.0.
Release notes

Sourced from pyasn1-modules's releases.

Release 0.4.0

It's a major release where we drop Python 2 support entirely. The most significant changes are:

  • Added support for Python 3.11, 3.12
  • Removed support for EOL Pythons 2.7, 3.6, 3.7

A full list of changes can be seen in the CHANGELOG.

Changelog

Sourced from pyasn1-modules's changelog.

Revision 0.4.0, released 26-03-2024

  • Added support for Python 3.11, 3.12
  • Removed support for EOL Pythons 2.7, 3.6, 3.7
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pyasn1-modules&package-manager=pip&previous-version=0.3.0&new-version=0.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 643197592..90f592f53 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1848,17 +1848,17 @@ files = [ [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" From 98f57ea3f2ffa94bf66310674bea5ff554df277f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:41:24 +0100 Subject: [PATCH 13/26] Bump pygithub from 2.2.0 to 2.3.0 (#17092) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pygithub](https://github.com/pygithub/pygithub) from 2.2.0 to 2.3.0.
Release notes

Sourced from pygithub's releases.

v2.3.0

New features

Improvements

Bug Fixes

Maintenance

Changelog

Sourced from pygithub's changelog.

Version 2.3.0 (March 21, 2024)

New features ^^^^^^^^^^^^

  • Support OAuth for enterprise (#2780) (e4106e00)
  • Support creation of Dependabot Organization and Repository Secrets (#2874) (0784f835)

Improvements ^^^^^^^^^^^^

  • Create release with optional name and message when generate_release_notes is true (#2868) (d65fc30d)
  • Add missing attributes to WorkflowJob (#2921) (9e092458)
  • Add created and check_suite_id filter for Repository WorkflowRuns (#2891) (c788985c)
  • Assert requester argument type in Auth (#2912) (0b8435fc)

Bug Fixes ^^^^^^^^^

  • Revert having allowed values for add_to_collaborators (#2905) (b542438e)

Maintenance ^^^^^^^^^^^

  • Fix imports in authentication docs (#2923) (e3d36535)
  • CI: add docformatter to precommit (#2614) (96ad19ae)
  • Add .swp files to gitignore (#2903) (af529abe)
  • Fix instructions building docs in CONTRIBUTING.md (#2900) (cd8e528d)
  • Explicitly name the modules built in pyproject.toml (#2894) (4d461734)
Commits
  • 7266e81 Release v2.3.0 (#2926)
  • e4106e0 Support oauth for enterprise (#2780)
  • d65fc30 Create release with optional name and message when generate_release_notes is ...
  • 0784f83 Support creation of Dependabot Organization and Repository Secrets (#2874)
  • 9e09245 Add missing attributes to WorkflowJob (#2921)
  • e3d3653 Fix imports in authentication docs (#2923)
  • c788985 Add created and check_suite_id filter for Repository WorkflowRuns (#2891)
  • 0b8435f Assert requester argument type in Auth (#2912)
  • 96ad19a CI: add docformatter to precommit (#2614)
  • b542438 Revert having allowed values for add_to_collaborators (#2905)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pygithub&package-manager=pip&previous-version=2.2.0&new-version=2.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 90f592f53..52351a569 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1983,13 +1983,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygithub" -version = "2.2.0" +version = "2.3.0" description = "Use the full Github API v3" optional = false python-versions = ">=3.7" files = [ - {file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"}, - {file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"}, + {file = "PyGithub-2.3.0-py3-none-any.whl", hash = "sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e"}, + {file = "PyGithub-2.3.0.tar.gz", hash = "sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e"}, ] [package.dependencies] From dcae2b4ba445f519ed1b3f3369a4661920f6752e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:41:45 +0100 Subject: [PATCH 14/26] Bump twine from 4.0.2 to 5.0.0 (#17091) Bumps [twine](https://github.com/pypa/twine) from 4.0.2 to 5.0.0.
Changelog

Sourced from twine's changelog.

Twine 5.0.0 (2024-02-10)

Bugfixes ^^^^^^^^

  • Use email.message instead of cgi as cgi has been deprecated ([#969](https://github.com/pypa/twine/issues/969) <https://github.com/pypa/twine/issues/969>_)

Misc ^^^^

  • [#931](https://github.com/pypa/twine/issues/931) <https://github.com/pypa/twine/issues/931>, [#991](https://github.com/pypa/twine/issues/991) <https://github.com/pypa/twine/issues/991>, [#1028](https://github.com/pypa/twine/issues/1028) <https://github.com/pypa/twine/issues/1028>, [#1040](https://github.com/pypa/twine/issues/1040) <https://github.com/pypa/twine/issues/1040>
Commits
  • 94f810c Merge pull request #1047 from pypa/new-release
  • 09d993a Update linkcheck_ignore setting for docs
  • ab0ed19 Apply 2024 black format
  • 407e6cc Build changelog for 5.0.0
  • 6644b86 Add missing changelog entries
  • fe1885f Merge pull request #1034 from DimitriPapadopoulos/codespell
  • 694bdcf Fix typos found by codespell
  • 89ec78c Merge pull request #1040 from woodruffw-forks/ww/pypi-mandatory-api-tokens
  • b3b363a tests: lintage
  • 6e94d20 tests: more non-PyPI tests
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=twine&package-manager=pip&previous-version=4.0.2&new-version=5.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 52351a569..eddeee301 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2954,13 +2954,13 @@ docs = ["sphinx (<7.0.0)"] [[package]] name = "twine" -version = "4.0.2" +version = "5.0.0" description = "Collection of utilities for publishing packages on PyPI" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"}, - {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"}, + {file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"}, + {file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"}, ] [package.dependencies] From 20c8991a94afb21ad176ef30089235b6051cc43e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:42:21 +0100 Subject: [PATCH 15/26] Bump peaceiris/actions-mdbook from 1.2.0 to 2.0.0 (#17089) Bumps [peaceiris/actions-mdbook](https://github.com/peaceiris/actions-mdbook) from 1.2.0 to 2.0.0.
Release notes

Sourced from peaceiris/actions-mdbook's releases.

actions-mdbook v2.0.0

See CHANGELOG.md for more details.

Changelog

Sourced from peaceiris/actions-mdbook's changelog.

Changelog

All notable changes to this project will be documented in this file. See standard-version for commit guidelines.

2.0.0 (2024-04-08)

build

chore

ci

feat

1.2.0 (2022-10-23)

chore

... (truncated)

Commits
  • ee69d23 chore(release): 2.0.0
  • 2d79d45 chore(release): Add build assets
  • c95f05c chore: revert build
  • cb4d902 build: bump node to 20.12.1 (#504)
  • 46c97c2 feat: bump to node20 runtime (#500)
  • 7b0c98f ci: bump codecov/codecov-action from 3 to 4 (#490)
  • 60cc2ff ci: bump actions/dependency-review-action from 3.0.8 to 3.1.0 (#488)
  • c0c1ffe ci: bump actions/checkout from 3 to 4 (#487)
  • c6c9e0f ci: bump actions/setup-node from 3.8.0 to 3.8.1 (#486)
  • 162a198 ci: bump actions/dependency-review-action from 3.0.7 to 3.0.8 (#485)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=peaceiris/actions-mdbook&package-manager=github_actions&previous-version=1.2.0&new-version=2.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs-pr.yaml | 4 ++-- .github/workflows/docs.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index 652ef9009..07dc301b1 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -19,7 +19,7 @@ jobs: fetch-depth: 0 - name: Setup mdbook - uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 + uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 with: mdbook-version: '0.4.17' @@ -53,7 +53,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup mdbook - uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 + uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 with: mdbook-version: '0.4.17' diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index d611fdc92..4ddee9ad0 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -56,7 +56,7 @@ jobs: fetch-depth: 0 - name: Setup mdbook - uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0 + uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 with: mdbook-version: '0.4.17' From 14e9ab19be56f6daa429b36c215db22079f0f111 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:42:35 +0100 Subject: [PATCH 16/26] Bump sigstore/cosign-installer from 3.4.0 to 3.5.0 (#17088) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [sigstore/cosign-installer](https://github.com/sigstore/cosign-installer) from 3.4.0 to 3.5.0.
Release notes

Sourced from sigstore/cosign-installer's releases.

v3.5.0

What's Changed

Full Changelog: https://github.com/sigstore/cosign-installer/compare/v3.4.0...v3.5.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sigstore/cosign-installer&package-manager=github_actions&previous-version=3.4.0&new-version=3.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 657455044..391e9c96f 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,7 +30,7 @@ jobs: run: docker buildx inspect - name: Install Cosign - uses: sigstore/cosign-installer@v3.4.0 + uses: sigstore/cosign-installer@v3.5.0 - name: Checkout repository uses: actions/checkout@v4 From 8c667759ad7983774b3937778731fd485af54417 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:43:04 +0100 Subject: [PATCH 17/26] Bump peaceiris/actions-gh-pages from 3.9.3 to 4.0.0 (#17087) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [peaceiris/actions-gh-pages](https://github.com/peaceiris/actions-gh-pages) from 3.9.3 to 4.0.0.
Release notes

Sourced from peaceiris/actions-gh-pages's releases.

actions-github-pages v4.0.0

See CHANGELOG.md for more details.

Changelog

Sourced from peaceiris/actions-gh-pages's changelog.

Changelog

All notable changes to this project will be documented in this file. See standard-version for commit guidelines.

4.0.0 (2024-04-08)

build

chore

ci

docs

3.9.3 (2023-03-30)

docs

fix

3.9.2 (2023-01-17)

chore

... (truncated)

Commits
  • 4f9cc66 chore(release): 4.0.0
  • 9c75028 chore(release): Add build assets
  • 5049354 build: node 20.11.1
  • 4eb285e chore: bump node16 to node20 (#1067)
  • cdc09a3 chore(deps): update dependency @​types/node to v16.18.77 (#1065)
  • d830378 chore(deps): update dependency @​types/node to v16.18.76 (#1063)
  • 80daa1d chore(deps): update dependency @​types/node to v16.18.75 (#1061)
  • 108285e chore(deps): update dependency ts-jest to v29.1.2 (#1060)
  • 99c95ff chore(deps): update dependency @​types/node to v16.18.74 (#1058)
  • 1f46537 chore(deps): update dependency @​types/node to v16.18.73 (#1057)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=peaceiris/actions-gh-pages&package-manager=github_actions&previous-version=3.9.3&new-version=4.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 4ddee9ad0..fe3212f82 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -80,7 +80,7 @@ jobs: # Deploy to the target directory. - name: Deploy to gh pages - uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 + uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./book @@ -110,7 +110,7 @@ jobs: # Deploy to the target directory. - name: Deploy to gh pages - uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 + uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./dev-docs/_build/html From 800a5b6ef33076e677a8bf6cf6090b213e42855d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 09:43:25 +0100 Subject: [PATCH 18/26] Bump types-pillow from 10.2.0.20240406 to 10.2.0.20240415 (#17090) Bumps [types-pillow](https://github.com/python/typeshed) from 10.2.0.20240406 to 10.2.0.20240415.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=types-pillow&package-manager=pip&previous-version=10.2.0.20240406&new-version=10.2.0.20240415)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index eddeee301..d916c627a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3109,13 +3109,13 @@ files = [ [[package]] name = "types-pillow" -version = "10.2.0.20240406" +version = "10.2.0.20240415" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.2.0.20240406.tar.gz", hash = "sha256:62e0cc1f17caba40e72e7154a483f4c7f3bea0e1c34c0ebba9de3c7745bc306d"}, - {file = "types_Pillow-10.2.0.20240406-py3-none-any.whl", hash = "sha256:5ac182e8afce53de30abca2fdf9cbec7b2500e549d0be84da035a729a84c7c47"}, + {file = "types-Pillow-10.2.0.20240415.tar.gz", hash = "sha256:dd6058027639bcdc66ba78b228cc25fdae42524c2150c78c804da427e7e76e70"}, + {file = "types_Pillow-10.2.0.20240415-py3-none-any.whl", hash = "sha256:f933332b7e96010bae9b9cf82a4c9979ff0c270d63f5c5bbffb2d789b85cd00b"}, ] [[package]] From 301c9771c41108218b0efab43f30982bf76dc349 Mon Sep 17 00:00:00 2001 From: devonh Date: Fri, 19 Apr 2024 15:26:28 +0000 Subject: [PATCH 19/26] Clarify what part of message retention is still experimental (#17099) ### Pull Request Checklist * [X] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [X] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/17099.doc | 1 + docs/message_retention_policies.md | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 changelog.d/17099.doc diff --git a/changelog.d/17099.doc b/changelog.d/17099.doc new file mode 100644 index 000000000..d8d10fa53 --- /dev/null +++ b/changelog.d/17099.doc @@ -0,0 +1 @@ +Clarify what part of message retention is still experimental. diff --git a/docs/message_retention_policies.md b/docs/message_retention_policies.md index 2746a106b..c64d1539b 100644 --- a/docs/message_retention_policies.md +++ b/docs/message_retention_policies.md @@ -7,8 +7,10 @@ follow the semantics described in and allow server and room admins to configure how long messages should be kept in a homeserver's database before being purged from it. **Please note that, as this feature isn't part of the Matrix -specification yet, this implementation is to be considered as -experimental.** +specification yet, the use of `m.room.retention` events for per-room +retention policies is to be considered as experimental. However, the use +of a default message retention policy is considered a stable feature +in Synapse.** A message retention policy is mainly defined by its `max_lifetime` parameter, which defines how long a message can be kept around after From 074ef4d75f9439036119c9874e42f8a92c9bc4fb Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 19 Apr 2024 17:10:44 +0100 Subject: [PATCH 20/26] Add an OSX prompt to manually configure icu4c. (#17069) Documentation fix. --- changelog.d/17069.doc | 1 + docs/development/contributing_guide.md | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/17069.doc diff --git a/changelog.d/17069.doc b/changelog.d/17069.doc new file mode 100644 index 000000000..f5a7f599d --- /dev/null +++ b/changelog.d/17069.doc @@ -0,0 +1 @@ +Add a prompt in the contributing guide to manually configure icu4c. diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index ac8a7039d..76c3e790c 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -86,6 +86,8 @@ poetry install --extras all This will install the runtime and developer dependencies for the project. Be sure to check that the `poetry install` step completed cleanly. +For OSX users, be sure to set `PKG_CONFIG_PATH` to support `icu4c`. Run `brew info icu4c` for more details. + ## Running Synapse via poetry To start a local instance of Synapse in the locked poetry environment, create a config file: From ae181233aa4c296d5d973eedfc599145ac0d5918 Mon Sep 17 00:00:00 2001 From: mcalinghee Date: Tue, 23 Apr 2024 17:45:24 +0200 Subject: [PATCH 21/26] Send an email if the address is already bound to an user account (#16819) Co-authored-by: Mathieu Velten Co-authored-by: Olivier D --- changelog.d/16819.feature | 1 + synapse/config/emailconfig.py | 12 ++++++++++++ synapse/push/mailer.py | 16 ++++++++++++++++ synapse/res/templates/already_in_use.html | 12 ++++++++++++ synapse/res/templates/already_in_use.txt | 10 ++++++++++ synapse/rest/client/register.py | 12 ++++++++++-- tests/rest/client/test_register.py | 9 +++++++++ 7 files changed, 70 insertions(+), 2 deletions(-) create mode 100644 changelog.d/16819.feature create mode 100644 synapse/res/templates/already_in_use.html create mode 100644 synapse/res/templates/already_in_use.txt diff --git a/changelog.d/16819.feature b/changelog.d/16819.feature new file mode 100644 index 000000000..1af6f466b --- /dev/null +++ b/changelog.d/16819.feature @@ -0,0 +1 @@ +Send an email if the address is already bound to an user account. diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index a4dc9db03..8033fa2e5 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -52,6 +52,7 @@ DEFAULT_SUBJECTS = { "invite_from_person_to_space": "[%(app)s] %(person)s has invited you to join the %(space)s space on %(app)s...", "password_reset": "[%(server_name)s] Password reset", "email_validation": "[%(server_name)s] Validate your email", + "email_already_in_use": "[%(server_name)s] Email already in use", } LEGACY_TEMPLATE_DIR_WARNING = """ @@ -76,6 +77,7 @@ class EmailSubjectConfig: invite_from_person_to_space: str password_reset: str email_validation: str + email_already_in_use: str class EmailConfig(Config): @@ -180,6 +182,12 @@ class EmailConfig(Config): registration_template_text = email_config.get( "registration_template_text", "registration.txt" ) + already_in_use_template_html = email_config.get( + "already_in_use_template_html", "already_in_use.html" + ) + already_in_use_template_text = email_config.get( + "already_in_use_template_html", "already_in_use.txt" + ) add_threepid_template_html = email_config.get( "add_threepid_template_html", "add_threepid.html" ) @@ -215,6 +223,8 @@ class EmailConfig(Config): self.email_password_reset_template_text, self.email_registration_template_html, self.email_registration_template_text, + self.email_already_in_use_template_html, + self.email_already_in_use_template_text, self.email_add_threepid_template_html, self.email_add_threepid_template_text, self.email_password_reset_template_confirmation_html, @@ -230,6 +240,8 @@ class EmailConfig(Config): password_reset_template_text, registration_template_html, registration_template_text, + already_in_use_template_html, + already_in_use_template_text, add_threepid_template_html, add_threepid_template_text, "password_reset_confirmation.html", diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index f1ffc8115..7c15eb744 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -205,6 +205,22 @@ class Mailer: template_vars, ) + emails_sent_counter.labels("already_in_use") + + async def send_already_in_use_mail(self, email_address: str) -> None: + """Send an email if the address is already bound to an user account + + Args: + email_address: Email address we're sending to the "already in use" mail + """ + + await self.send_email( + email_address, + self.email_subjects.email_already_in_use + % {"server_name": self.hs.config.server.server_name, "app": self.app_name}, + {}, + ) + emails_sent_counter.labels("add_threepid") async def send_add_threepid_mail( diff --git a/synapse/res/templates/already_in_use.html b/synapse/res/templates/already_in_use.html new file mode 100644 index 000000000..4c4c3c36a --- /dev/null +++ b/synapse/res/templates/already_in_use.html @@ -0,0 +1,12 @@ +{% extends "_base.html" %} +{% block title %}Email already in use{% endblock %} + +{% block body %} +

You have asked us to register this email with a new Matrix account, but this email is already registered with an existing account.

+ +

Please reset your password if needed.

+ +

If this was not you, you can safely disregard this email.

+ +

Thank you.

+{% endblock %} diff --git a/synapse/res/templates/already_in_use.txt b/synapse/res/templates/already_in_use.txt new file mode 100644 index 000000000..c60401a94 --- /dev/null +++ b/synapse/res/templates/already_in_use.txt @@ -0,0 +1,10 @@ +Hello there, + +You have asked us to register this email with a new Matrix account, +but this email is already registered with an existing account. + +Please reset your password if needed. + +If this was not you, you can safely disregard this email. + +Thank you. diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 634ebed2b..5dddbc69b 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -86,12 +86,18 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): self.config = hs.config if self.hs.config.email.can_verify_email: - self.mailer = Mailer( + self.registration_mailer = Mailer( hs=self.hs, app_name=self.config.email.email_app_name, template_html=self.config.email.email_registration_template_html, template_text=self.config.email.email_registration_template_text, ) + self.already_in_use_mailer = Mailer( + hs=self.hs, + app_name=self.config.email.email_app_name, + template_html=self.config.email.email_already_in_use_template_html, + template_text=self.config.email.email_already_in_use_template_text, + ) async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: if not self.hs.config.email.can_verify_email: @@ -139,8 +145,10 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): if self.hs.config.server.request_token_inhibit_3pid_errors: # Make the client think the operation succeeded. See the rationale in the # comments for request_token_inhibit_3pid_errors. + # Still send an email to warn the user that an account already exists. # Also wait for some random amount of time between 100ms and 1s to make it # look like we did something. + await self.already_in_use_mailer.send_already_in_use_mail(email) await self.hs.get_clock().sleep(random.randint(1, 10) / 10) return 200, {"sid": random_string(16)} @@ -151,7 +159,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet): email, client_secret, send_attempt, - self.mailer.send_registration_mail, + self.registration_mailer.send_registration_mail, next_link, ) diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 859051cdd..694f143ef 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -22,6 +22,7 @@ import datetime import os from typing import Any, Dict, List, Tuple +from unittest.mock import AsyncMock import pkg_resources @@ -42,6 +43,7 @@ from synapse.types import JsonDict from synapse.util import Clock from tests import unittest +from tests.server import ThreadedMemoryReactorClock from tests.unittest import override_config @@ -58,6 +60,13 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): config["allow_guest_access"] = True return config + def make_homeserver( + self, reactor: ThreadedMemoryReactorClock, clock: Clock + ) -> HomeServer: + hs = super().make_homeserver(reactor, clock) + hs.get_send_email_handler()._sendmail = AsyncMock() + return hs + def test_POST_appservice_registration_valid(self) -> None: user_id = "@as_user_kermit:test" as_token = "i_am_an_app_service" From 646cb6ff2412bfc5180b5d748b95dbe6ef790a0b Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 25 Apr 2024 13:25:26 +0100 Subject: [PATCH 22/26] Add type annotation to `visited_chains` (#17125) This should fix CI on `develop`. Broke in https://github.com/element-hq/synapse/commit/0fe9e1f7dafa80f3e02762f7ae75cefee5b3316c, presumably due to a `mypy` dependency upgrade. --- changelog.d/17125.misc | 1 + synapse/storage/databases/main/events.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17125.misc diff --git a/changelog.d/17125.misc b/changelog.d/17125.misc new file mode 100644 index 000000000..a7d9ce649 --- /dev/null +++ b/changelog.d/17125.misc @@ -0,0 +1 @@ +Fix type annotation for `visited_chains` after `mypy` upgrade. \ No newline at end of file diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index 1e731d56b..990698aa5 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -2454,7 +2454,7 @@ class _LinkMap: return target_seq <= src_seq # We have to graph traverse the links to check for indirect paths. - visited_chains = collections.Counter() + visited_chains: Dict[int, int] = collections.Counter() search = [(src_chain, src_seq)] while search: chain, seq = search.pop() From 2e92b718d5ea063af4b2dc9412dcd2ce625b4987 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 25 Apr 2024 14:50:12 +0200 Subject: [PATCH 23/26] MSC4108 implementation (#17056) Co-authored-by: Hugh Nimmo-Smith Co-authored-by: Hugh Nimmo-Smith Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- Cargo.lock | 164 ++++++++- changelog.d/17056.feature | 1 + rust/Cargo.toml | 4 + rust/src/lib.rs | 2 + rust/src/rendezvous/mod.rs | 315 +++++++++++++++++ rust/src/rendezvous/session.rs | 91 +++++ synapse/config/experimental.py | 12 +- synapse/http/server.py | 5 +- synapse/rest/client/rendezvous.py | 16 + synapse/rest/client/versions.py | 9 +- synapse/rest/synapse/client/__init__.py | 4 + synapse/rest/synapse/client/rendezvous.py | 58 ++++ synapse/server.py | 5 + synapse/synapse_rust/rendezvous.pyi | 30 ++ tests/rest/client/test_rendezvous.py | 401 +++++++++++++++++++++- tests/server.py | 7 +- tests/unittest.py | 5 + 17 files changed, 1120 insertions(+), 9 deletions(-) create mode 100644 changelog.d/17056.feature create mode 100644 rust/src/rendezvous/mod.rs create mode 100644 rust/src/rendezvous/session.rs create mode 100644 synapse/rest/synapse/client/rendezvous.py create mode 100644 synapse/synapse_rust/rendezvous.pyi diff --git a/Cargo.lock b/Cargo.lock index faac6b3c8..4474dfb90 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -59,6 +59,12 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + [[package]] name = "bytes" version = "1.6.0" @@ -92,9 +98,9 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.5" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", @@ -117,6 +123,19 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + [[package]] name = "headers" version = "0.4.0" @@ -182,6 +201,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -266,6 +294,12 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + [[package]] name = "proc-macro2" version = "1.0.76" @@ -369,6 +403,36 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -461,6 +525,17 @@ dependencies = [ "digest", ] +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "smallvec" version = "1.10.0" @@ -489,6 +564,7 @@ name = "synapse" version = "0.1.0" dependencies = [ "anyhow", + "base64", "blake2", "bytes", "headers", @@ -496,12 +572,15 @@ dependencies = [ "http", "lazy_static", "log", + "mime", "pyo3", "pyo3-log", "pythonize", "regex", "serde", "serde_json", + "sha2", + "ulid", ] [[package]] @@ -516,6 +595,17 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" +[[package]] +name = "ulid" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259" +dependencies = [ + "getrandom", + "rand", + "web-time", +] + [[package]] name = "unicode-ident" version = "1.0.5" @@ -534,6 +624,76 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "windows-sys" version = "0.36.1" diff --git a/changelog.d/17056.feature b/changelog.d/17056.feature new file mode 100644 index 000000000..b4cbe849e --- /dev/null +++ b/changelog.d/17056.feature @@ -0,0 +1 @@ +Implement the rendezvous mechanism described by MSC4108. diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 9ac766182..d41a216d1 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -23,11 +23,13 @@ name = "synapse.synapse_rust" [dependencies] anyhow = "1.0.63" +base64 = "0.21.7" bytes = "1.6.0" headers = "0.4.0" http = "1.1.0" lazy_static = "1.4.0" log = "0.4.17" +mime = "0.3.17" pyo3 = { version = "0.20.0", features = [ "macros", "anyhow", @@ -37,8 +39,10 @@ pyo3 = { version = "0.20.0", features = [ pyo3-log = "0.9.0" pythonize = "0.20.0" regex = "1.6.0" +sha2 = "0.10.8" serde = { version = "1.0.144", features = ["derive"] } serde_json = "1.0.85" +ulid = "1.1.2" [features] extension-module = ["pyo3/extension-module"] diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 36a3d6452..9bd1f17ad 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -7,6 +7,7 @@ pub mod errors; pub mod events; pub mod http; pub mod push; +pub mod rendezvous; lazy_static! { static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init(); @@ -45,6 +46,7 @@ fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> { acl::register_module(py, m)?; push::register_module(py, m)?; events::register_module(py, m)?; + rendezvous::register_module(py, m)?; Ok(()) } diff --git a/rust/src/rendezvous/mod.rs b/rust/src/rendezvous/mod.rs new file mode 100644 index 000000000..c0f5d8b60 --- /dev/null +++ b/rust/src/rendezvous/mod.rs @@ -0,0 +1,315 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + * + */ + +use std::{ + collections::{BTreeMap, HashMap}, + time::{Duration, SystemTime}, +}; + +use bytes::Bytes; +use headers::{ + AccessControlAllowOrigin, AccessControlExposeHeaders, CacheControl, ContentLength, ContentType, + HeaderMapExt, IfMatch, IfNoneMatch, Pragma, +}; +use http::{header::ETAG, HeaderMap, Response, StatusCode, Uri}; +use mime::Mime; +use pyo3::{ + exceptions::PyValueError, pyclass, pymethods, types::PyModule, Py, PyAny, PyObject, PyResult, + Python, ToPyObject, +}; +use ulid::Ulid; + +use self::session::Session; +use crate::{ + errors::{NotFoundError, SynapseError}, + http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt}, +}; + +mod session; + +// n.b. Because OPTIONS requests are handled by the Python code, we don't need to set Access-Control-Allow-Headers. +fn prepare_headers(headers: &mut HeaderMap, session: &Session) { + headers.typed_insert(AccessControlAllowOrigin::ANY); + headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG])); + headers.typed_insert(Pragma::no_cache()); + headers.typed_insert(CacheControl::new().with_no_store()); + headers.typed_insert(session.etag()); + headers.typed_insert(session.expires()); + headers.typed_insert(session.last_modified()); +} + +#[pyclass] +struct RendezvousHandler { + base: Uri, + clock: PyObject, + sessions: BTreeMap, + capacity: usize, + max_content_length: u64, + ttl: Duration, +} + +impl RendezvousHandler { + /// Check the input headers of a request which sets data for a session, and return the content type. + fn check_input_headers(&self, headers: &HeaderMap) -> PyResult { + let ContentLength(content_length) = headers.typed_get_required()?; + + if content_length > self.max_content_length { + return Err(SynapseError::new( + StatusCode::PAYLOAD_TOO_LARGE, + "Payload too large".to_owned(), + "M_TOO_LARGE", + None, + None, + )); + } + + let content_type: ContentType = headers.typed_get_required()?; + + // Content-Type must be text/plain + if content_type != ContentType::text() { + return Err(SynapseError::new( + StatusCode::BAD_REQUEST, + "Content-Type must be text/plain".to_owned(), + "M_INVALID_PARAM", + None, + None, + )); + } + + Ok(content_type.into()) + } + + /// Evict expired sessions and remove the oldest sessions until we're under the capacity. + fn evict(&mut self, now: SystemTime) { + // First remove all the entries which expired + self.sessions.retain(|_, session| !session.expired(now)); + + // Then we remove the oldest entires until we're under the limit + while self.sessions.len() > self.capacity { + self.sessions.pop_first(); + } + } +} + +#[pymethods] +impl RendezvousHandler { + #[new] + #[pyo3(signature = (homeserver, /, capacity=100, max_content_length=4*1024, eviction_interval=60*1000, ttl=60*1000))] + fn new( + py: Python<'_>, + homeserver: &PyAny, + capacity: usize, + max_content_length: u64, + eviction_interval: u64, + ttl: u64, + ) -> PyResult> { + let base: String = homeserver + .getattr("config")? + .getattr("server")? + .getattr("public_baseurl")? + .extract()?; + let base = Uri::try_from(format!("{base}_synapse/client/rendezvous")) + .map_err(|_| PyValueError::new_err("Invalid base URI"))?; + + let clock = homeserver.call_method0("get_clock")?.to_object(py); + + // Construct a Python object so that we can get a reference to the + // evict method and schedule it to run. + let self_ = Py::new( + py, + Self { + base, + clock, + sessions: BTreeMap::new(), + capacity, + max_content_length, + ttl: Duration::from_millis(ttl), + }, + )?; + + let evict = self_.getattr(py, "_evict")?; + homeserver.call_method0("get_clock")?.call_method( + "looping_call", + (evict, eviction_interval), + None, + )?; + + Ok(self_) + } + + fn _evict(&mut self, py: Python<'_>) -> PyResult<()> { + let clock = self.clock.as_ref(py); + let now: u64 = clock.call_method0("time_msec")?.extract()?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + self.evict(now); + + Ok(()) + } + + fn handle_post(&mut self, py: Python<'_>, twisted_request: &PyAny) -> PyResult<()> { + let request = http_request_from_twisted(twisted_request)?; + + let content_type = self.check_input_headers(request.headers())?; + + let clock = self.clock.as_ref(py); + let now: u64 = clock.call_method0("time_msec")?.extract()?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + + // We trigger an immediate eviction if we're at 2x the capacity + if self.sessions.len() >= self.capacity * 2 { + self.evict(now); + } + + // Generate a new ULID for the session from the current time. + let id = Ulid::from_datetime(now); + + let uri = format!("{base}/{id}", base = self.base); + + let body = request.into_body(); + + let session = Session::new(body, content_type, now, self.ttl); + + let response = serde_json::json!({ + "url": uri, + }) + .to_string(); + + let mut response = Response::new(response.as_bytes()); + *response.status_mut() = StatusCode::CREATED; + response.headers_mut().typed_insert(ContentType::json()); + prepare_headers(response.headers_mut(), &session); + http_response_to_twisted(twisted_request, response)?; + + self.sessions.insert(id, session); + + Ok(()) + } + + fn handle_get(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> { + let request = http_request_from_twisted(twisted_request)?; + + let if_none_match: Option = request.headers().typed_get_optional()?; + + let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + + let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; + let session = self + .sessions + .get(&id) + .filter(|s| !s.expired(now)) + .ok_or_else(NotFoundError::new)?; + + if let Some(if_none_match) = if_none_match { + if !if_none_match.precondition_passes(&session.etag()) { + let mut response = Response::new(Bytes::new()); + *response.status_mut() = StatusCode::NOT_MODIFIED; + prepare_headers(response.headers_mut(), session); + http_response_to_twisted(twisted_request, response)?; + return Ok(()); + } + } + + let mut response = Response::new(session.data()); + *response.status_mut() = StatusCode::OK; + let headers = response.headers_mut(); + prepare_headers(headers, session); + headers.typed_insert(session.content_type()); + headers.typed_insert(session.content_length()); + http_response_to_twisted(twisted_request, response)?; + + Ok(()) + } + + fn handle_put(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> { + let request = http_request_from_twisted(twisted_request)?; + + let content_type = self.check_input_headers(request.headers())?; + + let if_match: IfMatch = request.headers().typed_get_required()?; + + let data = request.into_body(); + + let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?; + let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now); + + let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; + let session = self + .sessions + .get_mut(&id) + .filter(|s| !s.expired(now)) + .ok_or_else(NotFoundError::new)?; + + if !if_match.precondition_passes(&session.etag()) { + let mut headers = HeaderMap::new(); + prepare_headers(&mut headers, session); + + let mut additional_fields = HashMap::with_capacity(1); + additional_fields.insert( + String::from("org.matrix.msc4108.errcode"), + String::from("M_CONCURRENT_WRITE"), + ); + + return Err(SynapseError::new( + StatusCode::PRECONDITION_FAILED, + "ETag does not match".to_owned(), + "M_UNKNOWN", // Would be M_CONCURRENT_WRITE + Some(additional_fields), + Some(headers), + )); + } + + session.update(data, content_type, now); + + let mut response = Response::new(Bytes::new()); + *response.status_mut() = StatusCode::ACCEPTED; + prepare_headers(response.headers_mut(), session); + http_response_to_twisted(twisted_request, response)?; + + Ok(()) + } + + fn handle_delete(&mut self, twisted_request: &PyAny, id: &str) -> PyResult<()> { + let _request = http_request_from_twisted(twisted_request)?; + + let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?; + let _session = self.sessions.remove(&id).ok_or_else(NotFoundError::new)?; + + let mut response = Response::new(Bytes::new()); + *response.status_mut() = StatusCode::NO_CONTENT; + response + .headers_mut() + .typed_insert(AccessControlAllowOrigin::ANY); + http_response_to_twisted(twisted_request, response)?; + + Ok(()) + } +} + +pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> { + let child_module = PyModule::new(py, "rendezvous")?; + + child_module.add_class::()?; + + m.add_submodule(child_module)?; + + // We need to manually add the module to sys.modules to make `from + // synapse.synapse_rust import rendezvous` work. + py.import("sys")? + .getattr("modules")? + .set_item("synapse.synapse_rust.rendezvous", child_module)?; + + Ok(()) +} diff --git a/rust/src/rendezvous/session.rs b/rust/src/rendezvous/session.rs new file mode 100644 index 000000000..179304edf --- /dev/null +++ b/rust/src/rendezvous/session.rs @@ -0,0 +1,91 @@ +/* + * This file is licensed under the Affero General Public License (AGPL) version 3. + * + * Copyright (C) 2024 New Vector, Ltd + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * See the GNU Affero General Public License for more details: + * . + */ + +use std::time::{Duration, SystemTime}; + +use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine as _}; +use bytes::Bytes; +use headers::{ContentLength, ContentType, ETag, Expires, LastModified}; +use mime::Mime; +use sha2::{Digest, Sha256}; + +/// A single session, containing data, metadata, and expiry information. +pub struct Session { + hash: [u8; 32], + data: Bytes, + content_type: Mime, + last_modified: SystemTime, + expires: SystemTime, +} + +impl Session { + /// Create a new session with the given data, content type, and time-to-live. + pub fn new(data: Bytes, content_type: Mime, now: SystemTime, ttl: Duration) -> Self { + let hash = Sha256::digest(&data).into(); + Self { + hash, + data, + content_type, + expires: now + ttl, + last_modified: now, + } + } + + /// Returns true if the session has expired at the given time. + pub fn expired(&self, now: SystemTime) -> bool { + self.expires <= now + } + + /// Update the session with new data, content type, and last modified time. + pub fn update(&mut self, data: Bytes, content_type: Mime, now: SystemTime) { + self.hash = Sha256::digest(&data).into(); + self.data = data; + self.content_type = content_type; + self.last_modified = now; + } + + /// Returns the Content-Type header of the session. + pub fn content_type(&self) -> ContentType { + self.content_type.clone().into() + } + + /// Returns the Content-Length header of the session. + pub fn content_length(&self) -> ContentLength { + ContentLength(self.data.len() as _) + } + + /// Returns the ETag header of the session. + pub fn etag(&self) -> ETag { + let encoded = URL_SAFE_NO_PAD.encode(self.hash); + // SAFETY: Base64 encoding is URL-safe, so ETag-safe + format!("\"{encoded}\"") + .parse() + .expect("base64-encoded hash should be URL-safe") + } + + /// Returns the Last-Modified header of the session. + pub fn last_modified(&self) -> LastModified { + self.last_modified.into() + } + + /// Returns the Expires header of the session. + pub fn expires(&self) -> Expires { + self.expires.into() + } + + /// Returns the current data stored in the session. + pub fn data(&self) -> Bytes { + self.data.clone() + } +} diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index 353ae23f9..baa3580f2 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -413,12 +413,22 @@ class ExperimentalConfig(Config): ) # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code + self.msc4108_enabled = experimental.get("msc4108_enabled", False) + self.msc4108_delegation_endpoint: Optional[str] = experimental.get( "msc4108_delegation_endpoint", None ) - if self.msc4108_delegation_endpoint is not None and not self.msc3861.enabled: + if ( + self.msc4108_enabled or self.msc4108_delegation_endpoint is not None + ) and not self.msc3861.enabled: raise ConfigError( "MSC4108 requires MSC3861 to be enabled", ("experimental", "msc4108_delegation_endpoint"), ) + + if self.msc4108_delegation_endpoint is not None and self.msc4108_enabled: + raise ConfigError( + "You cannot have MSC4108 both enabled and delegated at the same time", + ("experimental", "msc4108_delegation_endpoint"), + ) diff --git a/synapse/http/server.py b/synapse/http/server.py index 45b2cbffc..211795dc3 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -909,8 +909,9 @@ def set_cors_headers(request: "SynapseRequest") -> None: request.setHeader( b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS" ) - if request.path is not None and request.path.startswith( - b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous" + if request.path is not None and ( + request.path == b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous" + or request.path.startswith(b"/_synapse/client/rendezvous") ): request.setHeader( b"Access-Control-Allow-Headers", diff --git a/synapse/rest/client/rendezvous.py b/synapse/rest/client/rendezvous.py index ed06a2998..143f05765 100644 --- a/synapse/rest/client/rendezvous.py +++ b/synapse/rest/client/rendezvous.py @@ -97,9 +97,25 @@ class MSC4108DelegationRendezvousServlet(RestServlet): ) +class MSC4108RendezvousServlet(RestServlet): + PATTERNS = client_patterns( + "/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True + ) + + def __init__(self, hs: "HomeServer") -> None: + super().__init__() + self._handler = hs.get_rendezvous_handler() + + def on_POST(self, request: SynapseRequest) -> None: + self._handler.handle_post(request) + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.experimental.msc3886_endpoint is not None: MSC3886RendezvousServlet(hs).register(http_server) + if hs.config.experimental.msc4108_enabled: + MSC4108RendezvousServlet(hs).register(http_server) + if hs.config.experimental.msc4108_delegation_endpoint is not None: MSC4108DelegationRendezvousServlet(hs).register(http_server) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 638d4c45a..fa453a3b0 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -141,8 +141,13 @@ class VersionsRestServlet(RestServlet): # Allows clients to handle push for encrypted events. "org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events, # MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code - "org.matrix.msc4108": self.config.experimental.msc4108_delegation_endpoint - is not None, + "org.matrix.msc4108": ( + self.config.experimental.msc4108_enabled + or ( + self.config.experimental.msc4108_delegation_endpoint + is not None + ) + ), }, }, ) diff --git a/synapse/rest/synapse/client/__init__.py b/synapse/rest/synapse/client/__init__.py index 31544867d..ba6576d4d 100644 --- a/synapse/rest/synapse/client/__init__.py +++ b/synapse/rest/synapse/client/__init__.py @@ -26,6 +26,7 @@ from twisted.web.resource import Resource from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource from synapse.rest.synapse.client.pick_idp import PickIdpResource from synapse.rest.synapse.client.pick_username import pick_username_resource +from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource from synapse.rest.synapse.client.sso_register import SsoRegisterResource from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource @@ -76,6 +77,9 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc # To be removed in Synapse v1.32.0. resources["/_matrix/saml2"] = res + if hs.config.experimental.msc4108_enabled: + resources["/_synapse/client/rendezvous"] = MSC4108RendezvousSessionResource(hs) + return resources diff --git a/synapse/rest/synapse/client/rendezvous.py b/synapse/rest/synapse/client/rendezvous.py new file mode 100644 index 000000000..5216d30d1 --- /dev/null +++ b/synapse/rest/synapse/client/rendezvous.py @@ -0,0 +1,58 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + +import logging +from typing import TYPE_CHECKING, List + +from synapse.api.errors import UnrecognizedRequestError +from synapse.http.server import DirectServeJsonResource +from synapse.http.site import SynapseRequest + +if TYPE_CHECKING: + from synapse.server import HomeServer + +logger = logging.getLogger(__name__) + + +class MSC4108RendezvousSessionResource(DirectServeJsonResource): + isLeaf = True + + def __init__(self, hs: "HomeServer") -> None: + super().__init__() + self._handler = hs.get_rendezvous_handler() + + async def _async_render_GET(self, request: SynapseRequest) -> None: + postpath: List[bytes] = request.postpath # type: ignore + if len(postpath) != 1: + raise UnrecognizedRequestError() + session_id = postpath[0].decode("ascii") + + self._handler.handle_get(request, session_id) + + def _async_render_PUT(self, request: SynapseRequest) -> None: + postpath: List[bytes] = request.postpath # type: ignore + if len(postpath) != 1: + raise UnrecognizedRequestError() + session_id = postpath[0].decode("ascii") + + self._handler.handle_put(request, session_id) + + def _async_render_DELETE(self, request: SynapseRequest) -> None: + postpath: List[bytes] = request.postpath # type: ignore + if len(postpath) != 1: + raise UnrecognizedRequestError() + session_id = postpath[0].decode("ascii") + + self._handler.handle_delete(request, session_id) diff --git a/synapse/server.py b/synapse/server.py index 6d5a18fb1..95e319d2e 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -143,6 +143,7 @@ from synapse.state import StateHandler, StateResolutionHandler from synapse.storage import Databases from synapse.storage.controllers import StorageControllers from synapse.streams.events import EventSources +from synapse.synapse_rust.rendezvous import RendezvousHandler from synapse.types import DomainSpecificString, ISynapseReactor from synapse.util import Clock from synapse.util.distributor import Distributor @@ -859,6 +860,10 @@ class HomeServer(metaclass=abc.ABCMeta): def get_room_forgetter_handler(self) -> RoomForgetterHandler: return RoomForgetterHandler(self) + @cache_in_self + def get_rendezvous_handler(self) -> RendezvousHandler: + return RendezvousHandler(self) + @cache_in_self def get_outbound_redis_connection(self) -> "ConnectionHandler": """ diff --git a/synapse/synapse_rust/rendezvous.pyi b/synapse/synapse_rust/rendezvous.pyi new file mode 100644 index 000000000..03eae3a19 --- /dev/null +++ b/synapse/synapse_rust/rendezvous.pyi @@ -0,0 +1,30 @@ +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . + +from twisted.web.iweb import IRequest + +from synapse.server import HomeServer + +class RendezvousHandler: + def __init__( + self, + homeserver: HomeServer, + /, + capacity: int = 100, + max_content_length: int = 4 * 1024, # MSC4108 specifies 4KB + eviction_interval: int = 60 * 1000, + ttl: int = 60 * 1000, + ) -> None: ... + def handle_post(self, request: IRequest) -> None: ... + def handle_get(self, request: IRequest, session_id: str) -> None: ... + def handle_put(self, request: IRequest, session_id: str) -> None: ... + def handle_delete(self, request: IRequest, session_id: str) -> None: ... diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index c84704c09..0ab754a11 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -2,7 +2,7 @@ # This file is licensed under the Affero General Public License (AGPL) version 3. # # Copyright 2022 The Matrix.org Foundation C.I.C. -# Copyright (C) 2023 New Vector, Ltd +# Copyright (C) 2023-2024 New Vector, Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -19,9 +19,14 @@ # # +from typing import Dict +from urllib.parse import urlparse + from twisted.test.proto_helpers import MemoryReactor +from twisted.web.resource import Resource from synapse.rest.client import rendezvous +from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource from synapse.server import HomeServer from synapse.util import Clock @@ -42,6 +47,12 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): self.hs = self.setup_test_homeserver() return self.hs + def create_resource_dict(self) -> Dict[str, Resource]: + return { + **super().create_resource_dict(), + "/_synapse/client/rendezvous": MSC4108RendezvousSessionResource(self.hs), + } + def test_disabled(self) -> None: channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None) self.assertEqual(channel.code, 404) @@ -75,3 +86,391 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase): channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None) self.assertEqual(channel.code, 307) self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"]) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108(self) -> None: + """ + Test the MSC4108 rendezvous endpoint, including: + - Creating a session + - Getting the data back + - Updating the data + - Deleting the data + - ETag handling + """ + # We can post arbitrary data to the endpoint + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + self.assertSubstring("/_synapse/client/rendezvous/", channel.json_body["url"]) + headers = dict(channel.headers.getAllRawHeaders()) + self.assertIn(b"ETag", headers) + self.assertIn(b"Expires", headers) + self.assertEqual(headers[b"Content-Type"], [b"application/json"]) + self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"]) + self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"]) + self.assertEqual(headers[b"Cache-Control"], [b"no-store"]) + self.assertEqual(headers[b"Pragma"], [b"no-cache"]) + self.assertIn("url", channel.json_body) + self.assertTrue(channel.json_body["url"].startswith("https://")) + + url = urlparse(channel.json_body["url"]) + session_endpoint = url.path + etag = headers[b"ETag"][0] + + # We can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 200) + headers = dict(channel.headers.getAllRawHeaders()) + self.assertEqual(headers[b"ETag"], [etag]) + self.assertIn(b"Expires", headers) + self.assertEqual(headers[b"Content-Type"], [b"text/plain"]) + self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"]) + self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"]) + self.assertEqual(headers[b"Cache-Control"], [b"no-store"]) + self.assertEqual(headers[b"Pragma"], [b"no-cache"]) + self.assertEqual(channel.text_body, "foo=bar") + + # We can make sure the data hasn't changed + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + custom_headers=[("If-None-Match", etag)], + ) + + self.assertEqual(channel.code, 304) + + # We can update the data + channel = self.make_request( + "PUT", + session_endpoint, + "foo=baz", + content_type=b"text/plain", + access_token=None, + custom_headers=[("If-Match", etag)], + ) + + self.assertEqual(channel.code, 202) + headers = dict(channel.headers.getAllRawHeaders()) + old_etag = etag + new_etag = headers[b"ETag"][0] + + # If we try to update it again with the old etag, it should fail + channel = self.make_request( + "PUT", + session_endpoint, + "bar=baz", + content_type=b"text/plain", + access_token=None, + custom_headers=[("If-Match", old_etag)], + ) + + self.assertEqual(channel.code, 412) + self.assertEqual(channel.json_body["errcode"], "M_UNKNOWN") + self.assertEqual( + channel.json_body["org.matrix.msc4108.errcode"], "M_CONCURRENT_WRITE" + ) + + # If we try to get with the old etag, we should get the updated data + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + custom_headers=[("If-None-Match", old_etag)], + ) + + self.assertEqual(channel.code, 200) + headers = dict(channel.headers.getAllRawHeaders()) + self.assertEqual(headers[b"ETag"], [new_etag]) + self.assertEqual(channel.text_body, "foo=baz") + + # We can delete the data + channel = self.make_request( + "DELETE", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 204) + + # If we try to get the data again, it should fail + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 404) + self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND") + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_expiration(self) -> None: + """ + Test that entries are evicted after a TTL. + """ + # Start a new session + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + session_endpoint = urlparse(channel.json_body["url"]).path + + # Sanity check that we can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.text_body, "foo=bar") + + # Advance the clock, TTL of entries is 1 minute + self.reactor.advance(60) + + # Get the data back, it should be gone + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 404) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_capacity(self) -> None: + """ + Test that a capacity limit is enforced on the rendezvous sessions, as old + entries are evicted at an interval when the limit is reached. + """ + # Start a new session + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + session_endpoint = urlparse(channel.json_body["url"]).path + + # Sanity check that we can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.text_body, "foo=bar") + + # Start a lot of new sessions + for _ in range(100): + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + + # Get the data back, it should still be there, as the eviction hasn't run yet + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 200) + + # Advance the clock, as it will trigger the eviction + self.reactor.advance(1) + + # Get the data back, it should be gone + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_hard_capacity(self) -> None: + """ + Test that a hard capacity limit is enforced on the rendezvous sessions, as old + entries are evicted immediately when the limit is reached. + """ + # Start a new session + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + session_endpoint = urlparse(channel.json_body["url"]).path + # We advance the clock to make sure that this entry is the "lowest" in the session list + self.reactor.advance(1) + + # Sanity check that we can get the data back + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + self.assertEqual(channel.code, 200) + self.assertEqual(channel.text_body, "foo=bar") + + # Start a lot of new sessions + for _ in range(200): + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + + # Get the data back, it should already be gone as we hit the hard limit + channel = self.make_request( + "GET", + session_endpoint, + access_token=None, + ) + + self.assertEqual(channel.code, 404) + + @unittest.skip_unless(HAS_AUTHLIB, "requires authlib") + @override_config( + { + "disable_registration": True, + "experimental_features": { + "msc4108_enabled": True, + "msc3861": { + "enabled": True, + "issuer": "https://issuer", + "client_id": "client_id", + "client_auth_method": "client_secret_post", + "client_secret": "client_secret", + "admin_token": "admin_token_value", + }, + }, + } + ) + def test_msc4108_content_type(self) -> None: + """ + Test that the content-type is restricted to text/plain. + """ + # We cannot post invalid content-type arbitrary data to the endpoint + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_is_form=True, + access_token=None, + ) + self.assertEqual(channel.code, 400) + self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM") + + # Make a valid request + channel = self.make_request( + "POST", + msc4108_endpoint, + "foo=bar", + content_type=b"text/plain", + access_token=None, + ) + self.assertEqual(channel.code, 201) + url = urlparse(channel.json_body["url"]) + session_endpoint = url.path + headers = dict(channel.headers.getAllRawHeaders()) + etag = headers[b"ETag"][0] + + # We can't update the data with invalid content-type + channel = self.make_request( + "PUT", + session_endpoint, + "foo=baz", + content_is_form=True, + access_token=None, + custom_headers=[("If-Match", etag)], + ) + self.assertEqual(channel.code, 400) + self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM") diff --git a/tests/server.py b/tests/server.py index 4aaa91e95..434be3d22 100644 --- a/tests/server.py +++ b/tests/server.py @@ -351,6 +351,7 @@ def make_request( request: Type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, + content_type: Optional[bytes] = None, content_is_form: bool = False, await_result: bool = True, custom_headers: Optional[Iterable[CustomHeaderType]] = None, @@ -373,6 +374,8 @@ def make_request( with the usual REST API path, if it doesn't contain it. federation_auth_origin: if set to not-None, we will add a fake Authorization header pretenting to be the given server name. + content_type: The content-type to use for the request. If not set then will default to + application/json unless content_is_form is true. content_is_form: Whether the content is URL encoded form data. Adds the 'Content-Type': 'application/x-www-form-urlencoded' header. await_result: whether to wait for the request to complete rendering. If true, @@ -436,7 +439,9 @@ def make_request( ) if content: - if content_is_form: + if content_type is not None: + req.requestHeaders.addRawHeader(b"Content-Type", content_type) + elif content_is_form: req.requestHeaders.addRawHeader( b"Content-Type", b"application/x-www-form-urlencoded" ) diff --git a/tests/unittest.py b/tests/unittest.py index 6fe0cd4a2..e6aad9ed4 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -523,6 +523,7 @@ class HomeserverTestCase(TestCase): request: Type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, + content_type: Optional[bytes] = None, content_is_form: bool = False, await_result: bool = True, custom_headers: Optional[Iterable[CustomHeaderType]] = None, @@ -541,6 +542,9 @@ class HomeserverTestCase(TestCase): with the usual REST API path, if it doesn't contain it. federation_auth_origin: if set to not-None, we will add a fake Authorization header pretenting to be the given server name. + + content_type: The content-type to use for the request. If not set then will default to + application/json unless content_is_form is true. content_is_form: Whether the content is URL encoded form data. Adds the 'Content-Type': 'application/x-www-form-urlencoded' header. @@ -566,6 +570,7 @@ class HomeserverTestCase(TestCase): request, shorthand, federation_auth_origin, + content_type, content_is_form, await_result, custom_headers, From 47773232b034c0d7b72bb7419a01e772509c8814 Mon Sep 17 00:00:00 2001 From: Till <2353100+S7evinK@users.noreply.github.com> Date: Thu, 25 Apr 2024 15:25:31 +0200 Subject: [PATCH 24/26] Redact membership events if the user requested erasure upon deactivating (#17076) Fixes #15355 by redacting all membership events before leaving rooms. --- changelog.d/17076.bugfix | 1 + synapse/handlers/deactivate_account.py | 13 ++++++- synapse/storage/databases/main/roommember.py | 22 ++++++++++++ tests/handlers/test_deactivate_account.py | 37 ++++++++++++++++++++ 4 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 changelog.d/17076.bugfix diff --git a/changelog.d/17076.bugfix b/changelog.d/17076.bugfix new file mode 100644 index 000000000..a111ea2b8 --- /dev/null +++ b/changelog.d/17076.bugfix @@ -0,0 +1 @@ +Redact membership events if the user requested erasure upon deactivating. \ No newline at end of file diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index b13c4b6cb..11ac37768 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -261,11 +261,22 @@ class DeactivateAccountHandler: user = UserID.from_string(user_id) rooms_for_user = await self.store.get_rooms_for_user(user_id) + requester = create_requester(user, authenticated_entity=self._server_name) + should_erase = await self.store.is_user_erased(user_id) + for room_id in rooms_for_user: logger.info("User parter parting %r from %r", user_id, room_id) try: + # Before parting the user, redact all membership events if requested + if should_erase: + event_ids = await self.store.get_membership_event_ids_for_user( + user_id, room_id + ) + for event_id in event_ids: + await self.store.expire_event(event_id) + await self._room_member_handler.update_membership( - create_requester(user, authenticated_entity=self._server_name), + requester, user, room_id, "leave", diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 5d5150259..9fddbb2ca 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1234,6 +1234,28 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore): return set(room_ids) + async def get_membership_event_ids_for_user( + self, user_id: str, room_id: str + ) -> Set[str]: + """Get all event_ids for the given user and room. + + Args: + user_id: The user ID to get the event IDs for. + room_id: The room ID to look up events for. + + Returns: + Set of event IDs + """ + + event_ids = await self.db_pool.simple_select_onecol( + table="room_memberships", + keyvalues={"user_id": user_id, "room_id": room_id}, + retcol="event_id", + desc="get_membership_event_ids_for_user", + ) + + return set(event_ids) + @cached(max_entries=5000) async def _get_membership_from_event_id( self, member_event_id: str diff --git a/tests/handlers/test_deactivate_account.py b/tests/handlers/test_deactivate_account.py index b3f9e50f0..c698771a0 100644 --- a/tests/handlers/test_deactivate_account.py +++ b/tests/handlers/test_deactivate_account.py @@ -424,3 +424,40 @@ class DeactivateAccountTestCase(HomeserverTestCase): self._store.get_knocked_at_rooms_for_local_user(self.user) ) self.assertEqual(len(after_deactivate_knocks), 0) + + def test_membership_is_redacted_upon_deactivation(self) -> None: + """ + Tests that room membership events are redacted if erasure is requested. + """ + # Create a room + room_id = self.helper.create_room_as( + self.user, + is_public=True, + tok=self.token, + ) + + # Change the displayname + membership_event, _ = self.get_success( + self.handler.update_membership( + requester=create_requester(self.user), + target=UserID.from_string(self.user), + room_id=room_id, + action=Membership.JOIN, + content={"displayname": "Hello World!"}, + ) + ) + + # Deactivate the account + self._deactivate_my_account() + + # Get the all membership event IDs + membership_event_ids = self.get_success( + self._store.get_membership_event_ids_for_user(self.user, room_id=room_id) + ) + + # Get the events incl. JSON + events = self.get_success(self._store.get_events_as_list(membership_event_ids)) + + # Validate that there is no displayname in any of the events + for event in events: + self.assertTrue("displayname" not in event.content) From 48a90c697b7d6faf1d44273dfe5c4e76467a0bc4 Mon Sep 17 00:00:00 2001 From: Olivier 'reivilibre Date: Thu, 25 Apr 2024 15:55:18 +0100 Subject: [PATCH 25/26] 1.106.0rc1 --- CHANGES.md | 42 +++++++++++++++++++++++++++++++++++++++ changelog.d/16819.feature | 1 - changelog.d/16920.bugfix | 1 - changelog.d/16923.bugfix | 1 - changelog.d/16943.bugfix | 1 - changelog.d/17032.misc | 1 - changelog.d/17036.misc | 1 - changelog.d/17056.feature | 1 - changelog.d/17069.doc | 1 - changelog.d/17076.bugfix | 1 - changelog.d/17079.misc | 1 - changelog.d/17081.misc | 1 - changelog.d/17086.feature | 1 - changelog.d/17096.misc | 1 - changelog.d/17099.doc | 1 - changelog.d/17125.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 18 files changed, 49 insertions(+), 16 deletions(-) delete mode 100644 changelog.d/16819.feature delete mode 100644 changelog.d/16920.bugfix delete mode 100644 changelog.d/16923.bugfix delete mode 100644 changelog.d/16943.bugfix delete mode 100644 changelog.d/17032.misc delete mode 100644 changelog.d/17036.misc delete mode 100644 changelog.d/17056.feature delete mode 100644 changelog.d/17069.doc delete mode 100644 changelog.d/17076.bugfix delete mode 100644 changelog.d/17079.misc delete mode 100644 changelog.d/17081.misc delete mode 100644 changelog.d/17086.feature delete mode 100644 changelog.d/17096.misc delete mode 100644 changelog.d/17099.doc delete mode 100644 changelog.d/17125.misc diff --git a/CHANGES.md b/CHANGES.md index ec5bc22a9..913e6fbc8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,45 @@ +# Synapse 1.106.0rc1 (2024-04-25) + +### Features + +- Send an email if the address is already bound to an user account. ([\#16819](https://github.com/element-hq/synapse/issues/16819)) +- Implement the rendezvous mechanism described by MSC4108. ([\#17056](https://github.com/element-hq/synapse/issues/17056)) +- Support delegating the rendezvous mechanism described MSC4108 to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086)) + +### Bugfixes + +- Add validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. ([\#16920](https://github.com/element-hq/synapse/issues/16920)) +- Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. ([\#16923](https://github.com/element-hq/synapse/issues/16923)) +- Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. ([\#16943](https://github.com/element-hq/synapse/issues/16943)) +- Redact membership events if the user requested erasure upon deactivating. ([\#17076](https://github.com/element-hq/synapse/issues/17076)) + +### Improved Documentation + +- Add a prompt in the contributing guide to manually configure icu4c. ([\#17069](https://github.com/element-hq/synapse/issues/17069)) +- Clarify what part of message retention is still experimental. ([\#17099](https://github.com/element-hq/synapse/issues/17099)) + +### Internal Changes + +- Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). ([\#17032](https://github.com/element-hq/synapse/issues/17032), [\#17096](https://github.com/element-hq/synapse/issues/17096)) +- Fix mypy with latest Twisted release. ([\#17036](https://github.com/element-hq/synapse/issues/17036)) +- Bump minimum supported Rust version to 1.66.0. ([\#17079](https://github.com/element-hq/synapse/issues/17079)) +- Add helpers to transform Twisted requests to Rust http Requests/Responses. ([\#17081](https://github.com/element-hq/synapse/issues/17081)) +- Fix type annotation for `visited_chains` after `mypy` upgrade. ([\#17125](https://github.com/element-hq/synapse/issues/17125)) + + + +### Updates to locked dependencies + +* Bump anyhow from 1.0.81 to 1.0.82. ([\#17095](https://github.com/element-hq/synapse/issues/17095)) +* Bump peaceiris/actions-gh-pages from 3.9.3 to 4.0.0. ([\#17087](https://github.com/element-hq/synapse/issues/17087)) +* Bump peaceiris/actions-mdbook from 1.2.0 to 2.0.0. ([\#17089](https://github.com/element-hq/synapse/issues/17089)) +* Bump pyasn1-modules from 0.3.0 to 0.4.0. ([\#17093](https://github.com/element-hq/synapse/issues/17093)) +* Bump pygithub from 2.2.0 to 2.3.0. ([\#17092](https://github.com/element-hq/synapse/issues/17092)) +* Bump ruff from 0.3.5 to 0.3.7. ([\#17094](https://github.com/element-hq/synapse/issues/17094)) +* Bump sigstore/cosign-installer from 3.4.0 to 3.5.0. ([\#17088](https://github.com/element-hq/synapse/issues/17088)) +* Bump twine from 4.0.2 to 5.0.0. ([\#17091](https://github.com/element-hq/synapse/issues/17091)) +* Bump types-pillow from 10.2.0.20240406 to 10.2.0.20240415. ([\#17090](https://github.com/element-hq/synapse/issues/17090)) + # Synapse 1.105.1 (2024-04-23) ## Security advisory diff --git a/changelog.d/16819.feature b/changelog.d/16819.feature deleted file mode 100644 index 1af6f466b..000000000 --- a/changelog.d/16819.feature +++ /dev/null @@ -1 +0,0 @@ -Send an email if the address is already bound to an user account. diff --git a/changelog.d/16920.bugfix b/changelog.d/16920.bugfix deleted file mode 100644 index 460f4f716..000000000 --- a/changelog.d/16920.bugfix +++ /dev/null @@ -1 +0,0 @@ -Adds validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. diff --git a/changelog.d/16923.bugfix b/changelog.d/16923.bugfix deleted file mode 100644 index bd6f24925..000000000 --- a/changelog.d/16923.bugfix +++ /dev/null @@ -1 +0,0 @@ -Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. \ No newline at end of file diff --git a/changelog.d/16943.bugfix b/changelog.d/16943.bugfix deleted file mode 100644 index 436074113..000000000 --- a/changelog.d/16943.bugfix +++ /dev/null @@ -1 +0,0 @@ -Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. \ No newline at end of file diff --git a/changelog.d/17032.misc b/changelog.d/17032.misc deleted file mode 100644 index b03f6f42e..000000000 --- a/changelog.d/17032.misc +++ /dev/null @@ -1 +0,0 @@ -Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/17036.misc b/changelog.d/17036.misc deleted file mode 100644 index 329666805..000000000 --- a/changelog.d/17036.misc +++ /dev/null @@ -1 +0,0 @@ -Fix mypy with latest Twisted release. diff --git a/changelog.d/17056.feature b/changelog.d/17056.feature deleted file mode 100644 index b4cbe849e..000000000 --- a/changelog.d/17056.feature +++ /dev/null @@ -1 +0,0 @@ -Implement the rendezvous mechanism described by MSC4108. diff --git a/changelog.d/17069.doc b/changelog.d/17069.doc deleted file mode 100644 index f5a7f599d..000000000 --- a/changelog.d/17069.doc +++ /dev/null @@ -1 +0,0 @@ -Add a prompt in the contributing guide to manually configure icu4c. diff --git a/changelog.d/17076.bugfix b/changelog.d/17076.bugfix deleted file mode 100644 index a111ea2b8..000000000 --- a/changelog.d/17076.bugfix +++ /dev/null @@ -1 +0,0 @@ -Redact membership events if the user requested erasure upon deactivating. \ No newline at end of file diff --git a/changelog.d/17079.misc b/changelog.d/17079.misc deleted file mode 100644 index 340e40d19..000000000 --- a/changelog.d/17079.misc +++ /dev/null @@ -1 +0,0 @@ -Bump minimum supported Rust version to 1.66.0. diff --git a/changelog.d/17081.misc b/changelog.d/17081.misc deleted file mode 100644 index d1ab69126..000000000 --- a/changelog.d/17081.misc +++ /dev/null @@ -1 +0,0 @@ -Add helpers to transform Twisted requests to Rust http Requests/Responses. diff --git a/changelog.d/17086.feature b/changelog.d/17086.feature deleted file mode 100644 index 08b407d31..000000000 --- a/changelog.d/17086.feature +++ /dev/null @@ -1 +0,0 @@ -Support delegating the rendezvous mechanism described MSC4108 to an external implementation. diff --git a/changelog.d/17096.misc b/changelog.d/17096.misc deleted file mode 100644 index b03f6f42e..000000000 --- a/changelog.d/17096.misc +++ /dev/null @@ -1 +0,0 @@ -Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). diff --git a/changelog.d/17099.doc b/changelog.d/17099.doc deleted file mode 100644 index d8d10fa53..000000000 --- a/changelog.d/17099.doc +++ /dev/null @@ -1 +0,0 @@ -Clarify what part of message retention is still experimental. diff --git a/changelog.d/17125.misc b/changelog.d/17125.misc deleted file mode 100644 index a7d9ce649..000000000 --- a/changelog.d/17125.misc +++ /dev/null @@ -1 +0,0 @@ -Fix type annotation for `visited_chains` after `mypy` upgrade. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 214ed5942..de912c2ac 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.106.0~rc1) stable; urgency=medium + + * New Synapse release 1.106.0rc1. + + -- Synapse Packaging team Thu, 25 Apr 2024 15:54:59 +0100 + matrix-synapse-py3 (1.105.1) stable; urgency=medium * New Synapse release 1.105.1. diff --git a/pyproject.toml b/pyproject.toml index ed0f5ef4b..5e47a46cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.105.1" +version = "1.106.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 30c50e024075f7046baa5465d27a1c490b54dc21 Mon Sep 17 00:00:00 2001 From: Olivier 'reivilibre Date: Thu, 25 Apr 2024 16:00:37 +0100 Subject: [PATCH 26/26] Tweak changelog --- CHANGES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 913e6fbc8..451581fa6 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -3,8 +3,8 @@ ### Features - Send an email if the address is already bound to an user account. ([\#16819](https://github.com/element-hq/synapse/issues/16819)) -- Implement the rendezvous mechanism described by MSC4108. ([\#17056](https://github.com/element-hq/synapse/issues/17056)) -- Support delegating the rendezvous mechanism described MSC4108 to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086)) +- Implement the rendezvous mechanism described by [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108). ([\#17056](https://github.com/element-hq/synapse/issues/17056)) +- Support delegating the rendezvous mechanism described [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108) to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086)) ### Bugfixes