diff --git a/poetry.lock b/poetry.lock index 9257d2ccf..6212961fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -169,29 +169,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.10.1" +version = "24.2.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, ] [package.dependencies] @@ -205,7 +209,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 1dcc289df..a533cad5a 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -1040,10 +1040,10 @@ class Porter: return done, remaining + done async def _setup_state_group_id_seq(self) -> None: - curr_id: Optional[ - int - ] = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True + curr_id: Optional[int] = ( + await self.sqlite_store.db_pool.simple_select_one_onecol( + table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True + ) ) if not curr_id: @@ -1132,13 +1132,13 @@ class Porter: ) async def _setup_auth_chain_sequence(self) -> None: - curr_chain_id: Optional[ - int - ] = await self.sqlite_store.db_pool.simple_select_one_onecol( - table="event_auth_chains", - keyvalues={}, - retcol="MAX(chain_id)", - allow_none=True, + curr_chain_id: Optional[int] = ( + await self.sqlite_store.db_pool.simple_select_one_onecol( + table="event_auth_chains", + keyvalues={}, + retcol="MAX(chain_id)", + allow_none=True, + ) ) def r(txn: LoggingTransaction) -> None: diff --git a/synapse/api/constants.py b/synapse/api/constants.py index f3d2c8073..d25aff98f 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -43,7 +43,6 @@ MAIN_TIMELINE: Final = "main" class Membership: - """Represents the membership states of a user in a room.""" INVITE: Final = "invite" diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 7ff8ad2d5..fbc1d58ec 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -370,9 +370,11 @@ class RoomVersionCapability: MSC3244_CAPABILITIES = { cap.identifier: { - "preferred": cap.preferred_version.identifier - if cap.preferred_version is not None - else None, + "preferred": ( + cap.preferred_version.identifier + if cap.preferred_version is not None + else None + ), "support": [ v.identifier for v in KNOWN_ROOM_VERSIONS.values() diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b241dbf62..8a545a86c 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -188,9 +188,9 @@ class SynapseHomeServer(HomeServer): PasswordResetSubmitTokenResource, ) - resources[ - "/_synapse/client/password_reset/email/submit_token" - ] = PasswordResetSubmitTokenResource(self) + resources["/_synapse/client/password_reset/email/submit_token"] = ( + PasswordResetSubmitTokenResource(self) + ) if name == "consent": from synapse.rest.consent.consent_resource import ConsentResource diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 34fa2bb65..19322471d 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -362,16 +362,16 @@ class ApplicationServiceApi(SimpleHttpClient): # TODO: Update to stable prefixes once MSC3202 completes FCP merge if service.msc3202_transaction_extensions: if one_time_keys_count: - body[ - "org.matrix.msc3202.device_one_time_key_counts" - ] = one_time_keys_count - body[ - "org.matrix.msc3202.device_one_time_keys_count" - ] = one_time_keys_count + body["org.matrix.msc3202.device_one_time_key_counts"] = ( + one_time_keys_count + ) + body["org.matrix.msc3202.device_one_time_keys_count"] = ( + one_time_keys_count + ) if unused_fallback_keys: - body[ - "org.matrix.msc3202.device_unused_fallback_key_types" - ] = unused_fallback_keys + body["org.matrix.msc3202.device_unused_fallback_key_types"] = ( + unused_fallback_keys + ) if device_list_summary: body["org.matrix.msc3202.device_lists"] = { "changed": list(device_list_summary.changed), diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 3fe0f050c..c7f3e6d35 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -171,9 +171,9 @@ class RegistrationConfig(Config): refreshable_access_token_lifetime = self.parse_duration( refreshable_access_token_lifetime ) - self.refreshable_access_token_lifetime: Optional[ - int - ] = refreshable_access_token_lifetime + self.refreshable_access_token_lifetime: Optional[int] = ( + refreshable_access_token_lifetime + ) if ( self.session_lifetime is not None diff --git a/synapse/config/repository.py b/synapse/config/repository.py index 4655882b4..164547049 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -199,9 +199,9 @@ class ContentRepositoryConfig(Config): provider_config["module"] == "file_system" or provider_config["module"] == "synapse.rest.media.v1.storage_provider" ): - provider_config[ - "module" - ] = "synapse.media.storage_provider.FileStorageProviderBackend" + provider_config["module"] = ( + "synapse.media.storage_provider.FileStorageProviderBackend" + ) provider_class, parsed_config = load_module( provider_config, ("media_storage_providers", "" % i) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index c8b06f760..f5abcde2d 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -88,8 +88,7 @@ class _EventSourceStore(Protocol): redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, "EventBase"]: - ... + ) -> Dict[str, "EventBase"]: ... def validate_event_for_room_version(event: "EventBase") -> None: diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 7ec696c6c..36e0f47e5 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -93,16 +93,14 @@ class DictProperty(Generic[T]): self, instance: Literal[None], owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> "DictProperty": - ... + ) -> "DictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> T: - ... + ) -> T: ... def __get__( self, @@ -161,16 +159,14 @@ class DefaultDictProperty(DictProperty, Generic[T]): self, instance: Literal[None], owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> "DefaultDictProperty": - ... + ) -> "DefaultDictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, owner: Optional[Type[_DictPropertyInstance]] = None, - ) -> T: - ... + ) -> T: ... def __get__( self, diff --git a/synapse/events/utils.py b/synapse/events/utils.py index cc52d0d1e..e0613d0db 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -612,9 +612,9 @@ class EventClientSerializer: serialized_aggregations = {} if event_aggregations.references: - serialized_aggregations[ - RelationTypes.REFERENCE - ] = event_aggregations.references + serialized_aggregations[RelationTypes.REFERENCE] = ( + event_aggregations.references + ) if event_aggregations.replace: # Include information about it in the relations dict. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index dc8cd5ec9..65d3a661f 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -169,9 +169,9 @@ class FederationServer(FederationBase): # We cache responses to state queries, as they take a while and often # come in waves. - self._state_resp_cache: ResponseCache[ - Tuple[str, Optional[str]] - ] = ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( + ResponseCache(hs.get_clock(), "state_resp", timeout_ms=30000) + ) self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( hs.get_clock(), "state_ids_resp", timeout_ms=30000 ) diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index e9a2386a5..b5c9fcff7 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -88,9 +88,9 @@ class FederationRemoteSendQueue(AbstractFederationSender): # Stores the destinations we need to explicitly send presence to about a # given user. # Stream position -> (user_id, destinations) - self.presence_destinations: SortedDict[ - int, Tuple[str, Iterable[str]] - ] = SortedDict() + self.presence_destinations: SortedDict[int, Tuple[str, Iterable[str]]] = ( + SortedDict() + ) # (destination, key) -> EDU self.keyed_edu: Dict[Tuple[str, tuple], Edu] = {} diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py index 37cc3d3ff..89e944bc1 100644 --- a/synapse/handlers/account.py +++ b/synapse/handlers/account.py @@ -118,10 +118,10 @@ class AccountHandler: } if self._use_account_validity_in_account_status: - status[ - "org.matrix.expired" - ] = await self._account_validity_handler.is_user_expired( - user_id.to_string() + status["org.matrix.expired"] = ( + await self._account_validity_handler.is_user_expired( + user_id.to_string() + ) ) return status diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 5f3dc30b6..ad2b0f5fc 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -265,9 +265,9 @@ class DirectoryHandler: async def get_association(self, room_alias: RoomAlias) -> JsonDict: room_id = None if self.hs.is_mine(room_alias): - result: Optional[ - RoomAliasMapping - ] = await self.get_association_from_room_alias(room_alias) + result: Optional[RoomAliasMapping] = ( + await self.get_association_from_room_alias(room_alias) + ) if result: room_id = result.room_id diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2b7aad5b5..299588e47 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1001,11 +1001,11 @@ class FederationHandler: ) if include_auth_user_id: - event_content[ - EventContentFields.AUTHORISING_USER - ] = await self._event_auth_handler.get_user_which_could_invite( - room_id, - state_ids, + event_content[EventContentFields.AUTHORISING_USER] = ( + await self._event_auth_handler.get_user_which_could_invite( + room_id, + state_ids, + ) ) builder = self.event_builder_factory.for_room_version( diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 83f6a2598..c85deaed5 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -1367,9 +1367,9 @@ class FederationEventHandler: ) if remote_event.is_state() and remote_event.rejected_reason is None: - state_map[ - (remote_event.type, remote_event.state_key) - ] = remote_event.event_id + state_map[(remote_event.type, remote_event.state_key)] = ( + remote_event.event_id + ) return state_map diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 7e5bb97f2..0ce6eeee1 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1654,9 +1654,9 @@ class EventCreationHandler: expiry_ms=60 * 60 * 1000, ) - self._external_cache_joined_hosts_updates[ - state_entry.state_group - ] = None + self._external_cache_joined_hosts_updates[state_entry.state_group] = ( + None + ) async def _validate_canonical_alias( self, diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 21d3c71d8..37ee625f7 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -493,9 +493,9 @@ class WorkerPresenceHandler(BasePresenceHandler): # The number of ongoing syncs on this process, by (user ID, device ID). # Empty if _presence_enabled is false. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int - ] = {} + self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = ( + {} + ) self.notifier = hs.get_notifier() self.instance_id = hs.get_instance_id() @@ -818,9 +818,9 @@ class PresenceHandler(BasePresenceHandler): # Keeps track of the number of *ongoing* syncs on this process. While # this is non zero a user will never go offline. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int - ] = {} + self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = ( + {} + ) # Keeps track of the number of *ongoing* syncs on other processes. # diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 279d393a5..e51e282a9 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -320,9 +320,9 @@ class ProfileHandler: server_name = host if self._is_mine_server_name(server_name): - media_info: Optional[ - Union[LocalMedia, RemoteMedia] - ] = await self.store.get_local_media(media_id) + media_info: Optional[Union[LocalMedia, RemoteMedia]] = ( + await self.store.get_local_media(media_id) + ) else: media_info = await self.store.get_cached_remote_media(server_name, media_id) diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index 828a4b4cb..931ac0c81 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -188,13 +188,13 @@ class RelationsHandler: if include_original_event: # Do not bundle aggregations when retrieving the original event because # we want the content before relations are applied to it. - return_value[ - "original_event" - ] = await self._event_serializer.serialize_event( - event, - now, - bundle_aggregations=None, - config=serialize_options, + return_value["original_event"] = ( + await self._event_serializer.serialize_event( + event, + now, + bundle_aggregations=None, + config=serialize_options, + ) ) if next_token: diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 6b116dce8..3278426ca 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -538,10 +538,10 @@ class RoomCreationHandler: # deep-copy the power-levels event before we start modifying it # note that if frozen_dicts are enabled, `power_levels` will be a frozen # dict so we can't just copy.deepcopy it. - initial_state[ - (EventTypes.PowerLevels, "") - ] = power_levels = copy_and_fixup_power_levels_contents( - initial_state[(EventTypes.PowerLevels, "")] + initial_state[(EventTypes.PowerLevels, "")] = power_levels = ( + copy_and_fixup_power_levels_contents( + initial_state[(EventTypes.PowerLevels, "")] + ) ) # Resolve the minimum power level required to send any state event @@ -1362,9 +1362,11 @@ class RoomCreationHandler: visibility = room_config.get("visibility", "private") preset_name = room_config.get( "preset", - RoomCreationPreset.PRIVATE_CHAT - if visibility == "private" - else RoomCreationPreset.PUBLIC_CHAT, + ( + RoomCreationPreset.PRIVATE_CHAT + if visibility == "private" + else RoomCreationPreset.PUBLIC_CHAT + ), ) try: preset_config = self._presets_dict[preset_name] diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index d238c40bc..9e9f6cd06 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1236,11 +1236,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): # If this is going to be a local join, additional information must # be included in the event content in order to efficiently validate # the event. - content[ - EventContentFields.AUTHORISING_USER - ] = await self.event_auth_handler.get_user_which_could_invite( - room_id, - state_before_join, + content[EventContentFields.AUTHORISING_USER] = ( + await self.event_auth_handler.get_user_which_could_invite( + room_id, + state_before_join, + ) ) return False, [] diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 9122a79b4..5bb8a1439 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1333,9 +1333,9 @@ class SyncHandler: and auth_event.state_key == member ): missing_members.discard(member) - additional_state_ids[ - (EventTypes.Member, member) - ] = auth_event.event_id + additional_state_ids[(EventTypes.Member, member)] = ( + auth_event.event_id + ) break if missing_members: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 884ecdacd..c73a589e6 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -931,8 +931,7 @@ class MatrixFederationHttpClient: try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, backoff_on_all_error_codes: bool = False, - ) -> JsonDict: - ... + ) -> JsonDict: ... @overload async def put_json( @@ -949,8 +948,7 @@ class MatrixFederationHttpClient: try_trailing_slash_on_400: bool = False, parser: Optional[ByteParser[T]] = None, backoff_on_all_error_codes: bool = False, - ) -> T: - ... + ) -> T: ... async def put_json( self, @@ -1140,8 +1138,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> JsonDict: - ... + ) -> JsonDict: ... @overload async def get_json( @@ -1154,8 +1151,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> T: - ... + ) -> T: ... async def get_json( self, @@ -1236,8 +1232,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: - ... + ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: ... @overload async def get_json_with_headers( @@ -1250,8 +1245,7 @@ class MatrixFederationHttpClient: ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> Tuple[T, Dict[bytes, List[bytes]]]: - ... + ) -> Tuple[T, Dict[bytes, List[bytes]]]: ... async def get_json_with_headers( self, diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index b22eb727b..b73d06f1d 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -61,20 +61,17 @@ logger = logging.getLogger(__name__) @overload -def parse_integer(request: Request, name: str, default: int) -> int: - ... +def parse_integer(request: Request, name: str, default: int) -> int: ... @overload -def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: - ... +def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ... @overload def parse_integer( request: Request, name: str, default: Optional[int] = None, required: bool = False -) -> Optional[int]: - ... +) -> Optional[int]: ... def parse_integer( @@ -105,8 +102,7 @@ def parse_integer_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: Optional[int] = None, -) -> Optional[int]: - ... +) -> Optional[int]: ... @overload @@ -115,8 +111,7 @@ def parse_integer_from_args( name: str, *, required: Literal[True], -) -> int: - ... +) -> int: ... @overload @@ -125,8 +120,7 @@ def parse_integer_from_args( name: str, default: Optional[int] = None, required: bool = False, -) -> Optional[int]: - ... +) -> Optional[int]: ... def parse_integer_from_args( @@ -172,20 +166,17 @@ def parse_integer_from_args( @overload -def parse_boolean(request: Request, name: str, default: bool) -> bool: - ... +def parse_boolean(request: Request, name: str, default: bool) -> bool: ... @overload -def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: - ... +def parse_boolean(request: Request, name: str, *, required: Literal[True]) -> bool: ... @overload def parse_boolean( request: Request, name: str, default: Optional[bool] = None, required: bool = False -) -> Optional[bool]: - ... +) -> Optional[bool]: ... def parse_boolean( @@ -216,8 +207,7 @@ def parse_boolean_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: bool, -) -> bool: - ... +) -> bool: ... @overload @@ -226,8 +216,7 @@ def parse_boolean_from_args( name: str, *, required: Literal[True], -) -> bool: - ... +) -> bool: ... @overload @@ -236,8 +225,7 @@ def parse_boolean_from_args( name: str, default: Optional[bool] = None, required: bool = False, -) -> Optional[bool]: - ... +) -> Optional[bool]: ... def parse_boolean_from_args( @@ -289,8 +277,7 @@ def parse_bytes_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, default: Optional[bytes] = None, -) -> Optional[bytes]: - ... +) -> Optional[bytes]: ... @overload @@ -300,8 +287,7 @@ def parse_bytes_from_args( default: Literal[None] = None, *, required: Literal[True], -) -> bytes: - ... +) -> bytes: ... @overload @@ -310,8 +296,7 @@ def parse_bytes_from_args( name: str, default: Optional[bytes] = None, required: bool = False, -) -> Optional[bytes]: - ... +) -> Optional[bytes]: ... def parse_bytes_from_args( @@ -355,8 +340,7 @@ def parse_string( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -367,8 +351,7 @@ def parse_string( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -380,8 +363,7 @@ def parse_string( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... def parse_string( @@ -437,8 +419,7 @@ def parse_enum( name: str, E: Type[EnumT], default: EnumT, -) -> EnumT: - ... +) -> EnumT: ... @overload @@ -448,8 +429,7 @@ def parse_enum( E: Type[EnumT], *, required: Literal[True], -) -> EnumT: - ... +) -> EnumT: ... def parse_enum( @@ -526,8 +506,7 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: - ... +) -> Optional[List[str]]: ... @overload @@ -538,8 +517,7 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: - ... +) -> List[str]: ... @overload @@ -550,8 +528,7 @@ def parse_strings_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: - ... +) -> List[str]: ... @overload @@ -563,8 +540,7 @@ def parse_strings_from_args( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: - ... +) -> Optional[List[str]]: ... def parse_strings_from_args( @@ -625,8 +601,7 @@ def parse_string_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... @overload @@ -638,8 +613,7 @@ def parse_string_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> str: - ... +) -> str: ... @overload @@ -650,8 +624,7 @@ def parse_string_from_args( required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[str]: - ... +) -> Optional[str]: ... def parse_string_from_args( @@ -704,22 +677,19 @@ def parse_string_from_args( @overload -def parse_json_value_from_request(request: Request) -> JsonDict: - ... +def parse_json_value_from_request(request: Request) -> JsonDict: ... @overload def parse_json_value_from_request( request: Request, allow_empty_body: Literal[False] -) -> JsonDict: - ... +) -> JsonDict: ... @overload def parse_json_value_from_request( request: Request, allow_empty_body: bool = False -) -> Optional[JsonDict]: - ... +) -> Optional[JsonDict]: ... def parse_json_value_from_request( @@ -847,7 +817,6 @@ def assert_params_in_dict(body: JsonDict, required: StrCollection) -> None: class RestServlet: - """A Synapse REST Servlet. An implementing class can either provide its own custom 'register' method, diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 548d255b6..b36f39757 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -744,8 +744,7 @@ def preserve_fn( @overload -def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: - ... +def preserve_fn(f: Callable[P, R]) -> Callable[P, "defer.Deferred[R]"]: ... def preserve_fn( @@ -774,8 +773,7 @@ def run_in_background( @overload def run_in_background( f: Callable[P, R], *args: P.args, **kwargs: P.kwargs -) -> "defer.Deferred[R]": - ... +) -> "defer.Deferred[R]": ... def run_in_background( # type: ignore[misc] diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 78b9fffbf..7a3c805cc 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -388,15 +388,13 @@ def only_if_tracing(func: Callable[P, R]) -> Callable[P, Optional[R]]: @overload def ensure_active_span( message: str, -) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: - ... +) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: ... @overload def ensure_active_span( message: str, ret: T -) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: - ... +) -> Callable[[Callable[P, R]], Callable[P, Union[T, R]]]: ... def ensure_active_span( diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index 52859ed49..0e875132f 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -1002,9 +1002,9 @@ class MediaRepository: ) t_width = min(m_width, t_width) t_height = min(m_height, t_height) - thumbnails[ - (t_width, t_height, requirement.media_type) - ] = requirement.method + thumbnails[(t_width, t_height, requirement.media_type)] = ( + requirement.method + ) # Now we generate the thumbnails for each dimension, store it for (t_width, t_height, t_type), t_method in thumbnails.items(): diff --git a/synapse/metrics/jemalloc.py b/synapse/metrics/jemalloc.py index 6b4c64f7a..bd2598568 100644 --- a/synapse/metrics/jemalloc.py +++ b/synapse/metrics/jemalloc.py @@ -42,14 +42,12 @@ class JemallocStats: @overload def _mallctl( self, name: str, read: Literal[True] = True, write: Optional[int] = None - ) -> int: - ... + ) -> int: ... @overload def _mallctl( self, name: str, read: Literal[False], write: Optional[int] = None - ) -> None: - ... + ) -> None: ... def _mallctl( self, name: str, read: bool = True, write: Optional[int] = None diff --git a/synapse/notifier.py b/synapse/notifier.py index 62d954298..e87333a80 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -469,8 +469,7 @@ class Notifier: new_token: RoomStreamToken, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... @overload def on_new_event( @@ -479,8 +478,7 @@ class Notifier: new_token: MultiWriterStreamToken, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... @overload def on_new_event( @@ -497,8 +495,7 @@ class Notifier: new_token: int, users: Optional[Collection[Union[str, UserID]]] = None, rooms: Optional[StrCollection] = None, - ) -> None: - ... + ) -> None: ... def on_new_event( self, diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index b4bd88f30..f1ffc8115 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -377,12 +377,14 @@ class Mailer: # # Note that many email clients will not render the unsubscribe link # unless DKIM, etc. is properly setup. - additional_headers={ - "List-Unsubscribe-Post": "List-Unsubscribe=One-Click", - "List-Unsubscribe": f"<{unsubscribe_link}>", - } - if unsubscribe_link - else None, + additional_headers=( + { + "List-Unsubscribe-Post": "List-Unsubscribe=One-Click", + "List-Unsubscribe": f"<{unsubscribe_link}>", + } + if unsubscribe_link + else None + ), ) async def _get_room_vars( diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index a82ad49e0..9aa8d90bf 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -259,9 +259,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): url_args.append(txn_id) if cls.METHOD == "POST": - request_func: Callable[ - ..., Awaitable[Any] - ] = client.post_json_get_json + request_func: Callable[..., Awaitable[Any]] = ( + client.post_json_get_json + ) elif cls.METHOD == "PUT": request_func = client.put_json elif cls.METHOD == "GET": diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index ce47d8035..a95771b5f 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -70,9 +70,9 @@ class ExternalCache: def __init__(self, hs: "HomeServer"): if hs.config.redis.redis_enabled: - self._redis_connection: Optional[ - "ConnectionHandler" - ] = hs.get_outbound_redis_connection() + self._redis_connection: Optional["ConnectionHandler"] = ( + hs.get_outbound_redis_connection() + ) else: self._redis_connection = None diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 07e0fb71f..6da1d7916 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -237,10 +237,12 @@ class PurgeHistoryStatusRestServlet(RestServlet): raise NotFoundError("purge id '%s' not found" % purge_id) result: JsonDict = { - "status": purge_task.status - if purge_task.status == TaskStatus.COMPLETE - or purge_task.status == TaskStatus.FAILED - else "active", + "status": ( + purge_task.status + if purge_task.status == TaskStatus.COMPLETE + or purge_task.status == TaskStatus.FAILED + else "active" + ), } if purge_task.error: result["error"] = purge_task.error diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index a9645e4af..4e34e4651 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -1184,12 +1184,14 @@ class RateLimitRestServlet(RestServlet): # convert `null` to `0` for consistency # both values do the same in retelimit handler ret = { - "messages_per_second": 0 - if ratelimit.messages_per_second is None - else ratelimit.messages_per_second, - "burst_count": 0 - if ratelimit.burst_count is None - else ratelimit.burst_count, + "messages_per_second": ( + 0 + if ratelimit.messages_per_second is None + else ratelimit.messages_per_second + ), + "burst_count": ( + 0 if ratelimit.burst_count is None else ratelimit.burst_count + ), } else: ret = {} diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 0cdc4cc4f..12ffca984 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -112,9 +112,9 @@ class AccountDataServlet(RestServlet): self._hs.config.experimental.msc4010_push_rules_account_data and account_data_type == AccountDataTypes.PUSH_RULES ): - account_data: Optional[ - JsonMapping - ] = await self._push_rules_handler.push_rules_for_user(requester.user) + account_data: Optional[JsonMapping] = ( + await self._push_rules_handler.push_rules_for_user(requester.user) + ) else: account_data = await self.store.get_global_account_data_by_type_for_user( user_id, account_data_type diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 3af2b7dfd..2b103ca6a 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -313,12 +313,12 @@ class SyncRestServlet(RestServlet): # https://github.com/matrix-org/matrix-doc/blob/54255851f642f84a4f1aaf7bc063eebe3d76752b/proposals/2732-olm-fallback-keys.md # states that this field should always be included, as long as the server supports the feature. - response[ - "org.matrix.msc2732.device_unused_fallback_key_types" - ] = sync_result.device_unused_fallback_key_types - response[ - "device_unused_fallback_key_types" - ] = sync_result.device_unused_fallback_key_types + response["org.matrix.msc2732.device_unused_fallback_key_types"] = ( + sync_result.device_unused_fallback_key_types + ) + response["device_unused_fallback_key_types"] = ( + sync_result.device_unused_fallback_key_types + ) if joined: response["rooms"][Membership.JOIN] = joined @@ -543,9 +543,9 @@ class SyncRestServlet(RestServlet): if room.unread_thread_notifications: result["unread_thread_notifications"] = room.unread_thread_notifications if self._msc3773_enabled: - result[ - "org.matrix.msc3773.unread_thread_notifications" - ] = room.unread_thread_notifications + result["org.matrix.msc3773.unread_thread_notifications"] = ( + room.unread_thread_notifications + ) result["summary"] = room.summary if self._msc2654_enabled: result["org.matrix.msc2654.unread_count"] = room.unread_count diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 6afe4a7bc..dc7325fc5 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -191,10 +191,10 @@ class RemoteKey(RestServlet): server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {} for server_name, key_ids in query.items(): if key_ids: - results: Mapping[ - str, Optional[FetchKeyResultForRemote] - ] = await self.store.get_server_keys_json_for_remote( - server_name, key_ids + results: Mapping[str, Optional[FetchKeyResultForRemote]] = ( + await self.store.get_server_keys_json_for_remote( + server_name, key_ids + ) ) else: results = await self.store.get_all_server_keys_json_for_remote( diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 015e49ab8..72b291889 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -603,15 +603,15 @@ class StateResolutionHandler: self.resolve_linearizer = Linearizer(name="state_resolve_lock") # dict of set of event_ids -> _StateCacheEntry. - self._state_cache: ExpiringCache[ - FrozenSet[int], _StateCacheEntry - ] = ExpiringCache( - cache_name="state_cache", - clock=self.clock, - max_len=100000, - expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, - iterable=True, - reset_expiry_on_get=True, + self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( + ExpiringCache( + cache_name="state_cache", + clock=self.clock, + max_len=100000, + expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000, + iterable=True, + reset_expiry_on_get=True, + ) ) # diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 8de16db1d..da926ad14 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -52,8 +52,7 @@ class Clock(Protocol): # This is usually synapse.util.Clock, but it's replaced with a FakeClock in tests. # We only ever sleep(0) though, so that other async functions can make forward # progress without waiting for stateres to complete. - def sleep(self, duration_ms: float) -> Awaitable[None]: - ... + def sleep(self, duration_ms: float) -> Awaitable[None]: ... class StateResolutionStore(Protocol): @@ -61,13 +60,11 @@ class StateResolutionStore(Protocol): # TestStateResolutionStore in tests. def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: - ... + ) -> Awaitable[Dict[str, EventBase]]: ... def get_auth_chain_difference( self, room_id: str, state_sets: List[Set[str]] - ) -> Awaitable[Set[str]]: - ... + ) -> Awaitable[Set[str]]: ... # We want to await to the reactor occasionally during state res when dealing @@ -742,8 +739,7 @@ async def _get_event( event_map: Dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[False] = False, -) -> EventBase: - ... +) -> EventBase: ... @overload @@ -753,8 +749,7 @@ async def _get_event( event_map: Dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[True], -) -> Optional[EventBase]: - ... +) -> Optional[EventBase]: ... async def _get_event( diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index 9df4edee3..f47329407 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -836,9 +836,9 @@ class BackgroundUpdater: c.execute(sql) if isinstance(self.db_pool.engine, engines.PostgresEngine): - runner: Optional[ - Callable[[LoggingDatabaseConnection], None] - ] = create_index_psql + runner: Optional[Callable[[LoggingDatabaseConnection], None]] = ( + create_index_psql + ) elif psql_only: runner = None else: diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index 69d5999c0..84699a2ee 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -773,9 +773,9 @@ class EventsPersistenceStorageController: ) # Remove any events which are prev_events of any existing events. - existing_prevs: Collection[ - str - ] = await self.persist_events_store._get_events_which_are_prevs(result) + existing_prevs: Collection[str] = ( + await self.persist_events_store._get_events_which_are_prevs(result) + ) result.difference_update(existing_prevs) # Finally handle the case where the new events have soft-failed prev diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 8dc908084..d7d202f02 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -111,8 +111,7 @@ class _PoolConnection(Connection): A Connection from twisted.enterprise.adbapi.Connection. """ - def reconnect(self) -> None: - ... + def reconnect(self) -> None: ... def make_pool( @@ -1603,8 +1602,7 @@ class DatabasePool: retcols: Collection[str], allow_none: Literal[False] = False, desc: str = "simple_select_one", - ) -> Tuple[Any, ...]: - ... + ) -> Tuple[Any, ...]: ... @overload async def simple_select_one( @@ -1614,8 +1612,7 @@ class DatabasePool: retcols: Collection[str], allow_none: Literal[True] = True, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: - ... + ) -> Optional[Tuple[Any, ...]]: ... async def simple_select_one( self, @@ -1654,8 +1651,7 @@ class DatabasePool: retcol: str, allow_none: Literal[False] = False, desc: str = "simple_select_one_onecol", - ) -> Any: - ... + ) -> Any: ... @overload async def simple_select_one_onecol( @@ -1665,8 +1661,7 @@ class DatabasePool: retcol: str, allow_none: Literal[True] = True, desc: str = "simple_select_one_onecol", - ) -> Optional[Any]: - ... + ) -> Optional[Any]: ... async def simple_select_one_onecol( self, @@ -1706,8 +1701,7 @@ class DatabasePool: keyvalues: Dict[str, Any], retcol: str, allow_none: Literal[False] = False, - ) -> Any: - ... + ) -> Any: ... @overload @classmethod @@ -1718,8 +1712,7 @@ class DatabasePool: keyvalues: Dict[str, Any], retcol: str, allow_none: Literal[True] = True, - ) -> Optional[Any]: - ... + ) -> Optional[Any]: ... @classmethod def simple_select_one_onecol_txn( @@ -2501,8 +2494,7 @@ def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, columns: Tuple[str, str], iterable: Collection[Tuple[Any, Any]], -) -> Tuple[str, list]: - ... +) -> Tuple[str, list]: ... def make_tuple_in_list_sql_clause( diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 3e011f334..8dbcb3f5a 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -1701,9 +1701,9 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): # Map of (user_id, device_id) -> bool. If there is an entry that implies # the device exists. - self.device_id_exists_cache: LruCache[ - Tuple[str, str], Literal[True] - ] = LruCache(cache_name="device_id_exists", max_size=10000) + self.device_id_exists_cache: LruCache[Tuple[str, str], Literal[True]] = ( + LruCache(cache_name="device_id_exists", max_size=10000) + ) async def store_device( self, diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index c96371a0d..b219ea70e 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -256,8 +256,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker self, query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: - ... + ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( @@ -265,8 +264,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: - ... + ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( @@ -274,8 +272,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker query_list: Collection[Tuple[str, Optional[str]]], include_all_devices: Literal[True], include_deleted_devices: Literal[True], - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: - ... + ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ... @trace @cancellable diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index d5942a10b..a6fda3f43 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -1292,9 +1292,9 @@ class PersistEventsStore: Returns: filtered list """ - new_events_and_contexts: OrderedDict[ - str, Tuple[EventBase, EventContext] - ] = OrderedDict() + new_events_and_contexts: OrderedDict[str, Tuple[EventBase, EventContext]] = ( + OrderedDict() + ) for event, context in events_and_contexts: prev_event_context = new_events_and_contexts.get(event.event_id) if prev_event_context: diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 9c3775bb7..4c09567a7 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -263,13 +263,13 @@ class EventsWorkerStore(SQLBaseStore): 5 * 60 * 1000, ) - self._get_event_cache: AsyncLruCache[ - Tuple[str], EventCacheEntry - ] = AsyncLruCache( - cache_name="*getEvent*", - max_size=hs.config.caches.event_cache_size, - # `extra_index_cb` Returns a tuple as that is the key type - extra_index_cb=lambda _, v: (v.event.room_id,), + self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( + AsyncLruCache( + cache_name="*getEvent*", + max_size=hs.config.caches.event_cache_size, + # `extra_index_cb` Returns a tuple as that is the key type + extra_index_cb=lambda _, v: (v.event.room_id,), + ) ) # Map from event ID to a deferred that will result in a map from event @@ -459,8 +459,7 @@ class EventsWorkerStore(SQLBaseStore): allow_rejected: bool = ..., allow_none: Literal[False] = ..., check_room_id: Optional[str] = ..., - ) -> EventBase: - ... + ) -> EventBase: ... @overload async def get_event( @@ -471,8 +470,7 @@ class EventsWorkerStore(SQLBaseStore): allow_rejected: bool = ..., allow_none: Literal[True] = ..., check_room_id: Optional[str] = ..., - ) -> Optional[EventBase]: - ... + ) -> Optional[EventBase]: ... @cancellable async def get_event( @@ -800,9 +798,9 @@ class EventsWorkerStore(SQLBaseStore): # to all the events we pulled from the DB (this will result in this # function returning more events than requested, but that can happen # already due to `_get_events_from_db`). - fetching_deferred: ObservableDeferred[ - Dict[str, EventCacheEntry] - ] = ObservableDeferred(defer.Deferred(), consumeErrors=True) + fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = ( + ObservableDeferred(defer.Deferred(), consumeErrors=True) + ) for event_id in missing_events_ids: self._current_event_fetches[event_id] = fetching_deferred @@ -1871,9 +1869,9 @@ class EventsWorkerStore(SQLBaseStore): " LIMIT ?" ) txn.execute(sql, (-last_id, -current_id, instance_name, limit)) - new_event_updates: List[ - Tuple[int, Tuple[str, str, str, str, str, str]] - ] = [] + new_event_updates: List[Tuple[int, Tuple[str, str, str, str, str, str]]] = ( + [] + ) row: Tuple[int, str, str, str, str, str, str] # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index 0794cc6d2..8277ad8c3 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -79,9 +79,9 @@ class LockStore(SQLBaseStore): # A map from `(lock_name, lock_key)` to lock that we think we # currently hold. - self._live_lock_tokens: WeakValueDictionary[ - Tuple[str, str], Lock - ] = WeakValueDictionary() + self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = ( + WeakValueDictionary() + ) # A map from `(lock_name, lock_key, token)` to read/write lock that we # think we currently hold. For a given lock_name/lock_key, there can be diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index b5ed1bf9c..6128332af 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -158,9 +158,9 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore): ) if hs.config.media.can_load_media_repo: - self.unused_expiration_time: Optional[ - int - ] = hs.config.media.unused_expiration_time + self.unused_expiration_time: Optional[int] = ( + hs.config.media.unused_expiration_time + ) else: self.unused_expiration_time = None diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index 8a426d287..d513c4253 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -394,9 +394,9 @@ class ReceiptsWorkerStore(SQLBaseStore): content: JsonDict = {} for receipt_type, user_id, event_id, data in rows: - content.setdefault(event_id, {}).setdefault(receipt_type, {})[ - user_id - ] = db_to_json(data) + content.setdefault(event_id, {}).setdefault(receipt_type, {})[user_id] = ( + db_to_json(data) + ) return [{"type": EduTypes.RECEIPT, "room_id": room_id, "content": content}] @@ -483,9 +483,9 @@ class ReceiptsWorkerStore(SQLBaseStore): if user_id in receipt_type_dict: # existing receipt # is the existing receipt threaded and we are currently processing an unthreaded one? if "thread_id" in receipt_type_dict[user_id] and not thread_id: - receipt_type_dict[ - user_id - ] = receipt_data # replace with unthreaded one + receipt_type_dict[user_id] = ( + receipt_data # replace with unthreaded one + ) else: # receipt does not exist, just set it receipt_type_dict[user_id] = receipt_data if thread_id: diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index 3220d515d..b2a67aff8 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -768,12 +768,10 @@ class StateMapWrapper(Dict[StateKey, str]): return super().__getitem__(key) @overload - def get(self, key: Tuple[str, str]) -> Optional[str]: - ... + def get(self, key: Tuple[str, str]) -> Optional[str]: ... @overload - def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: - ... + def get(self, key: Tuple[str, str], default: Union[str, _T]) -> Union[str, _T]: ... def get( self, key: StateKey, default: Union[str, _T, None] = None diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 19041cc35..7ab6003f6 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -988,8 +988,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): txn: LoggingTransaction, event_id: str, allow_none: Literal[False] = False, - ) -> int: - ... + ) -> int: ... @overload def get_stream_id_for_event_txn( @@ -997,8 +996,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): txn: LoggingTransaction, event_id: str, allow_none: bool = False, - ) -> Optional[int]: - ... + ) -> Optional[int]: ... def get_stream_id_for_event_txn( self, @@ -1476,12 +1474,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): _EventDictReturn(event_id, topological_ordering, stream_ordering) for event_id, instance_name, topological_ordering, stream_ordering in txn if _filter_results( - lower_token=to_token - if direction == Direction.BACKWARDS - else from_token, - upper_token=from_token - if direction == Direction.BACKWARDS - else to_token, + lower_token=( + to_token if direction == Direction.BACKWARDS else from_token + ), + upper_token=( + from_token if direction == Direction.BACKWARDS else to_token + ), instance_name=instance_name, topological_ordering=topological_ordering, stream_ordering=stream_ordering, diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py index 7b9561643..4956870b1 100644 --- a/synapse/storage/databases/main/task_scheduler.py +++ b/synapse/storage/databases/main/task_scheduler.py @@ -136,12 +136,12 @@ class TaskSchedulerWorkerStore(SQLBaseStore): "status": task.status, "timestamp": task.timestamp, "resource_id": task.resource_id, - "params": None - if task.params is None - else json_encoder.encode(task.params), - "result": None - if task.result is None - else json_encoder.encode(task.result), + "params": ( + None if task.params is None else json_encoder.encode(task.params) + ), + "result": ( + None if task.result is None else json_encoder.encode(task.result) + ), "error": task.error, }, desc="insert_scheduled_task", diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index a1c4b8c6c..0513e7dc0 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -745,9 +745,11 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore): p.user_id, get_localpart_from_id(p.user_id), get_domain_from_id(p.user_id), - _filter_text_for_index(p.display_name) - if p.display_name - else None, + ( + _filter_text_for_index(p.display_name) + if p.display_name + else None + ), ) for p in profiles ], diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index e64495ba8..d4ac74c1e 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -120,11 +120,11 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore): # TODO: this hasn't been tuned yet 50000, ) - self._state_group_members_cache: DictionaryCache[ - int, StateKey, str - ] = DictionaryCache( - "*stateGroupMembersCache*", - 500000, + self._state_group_members_cache: DictionaryCache[int, StateKey, str] = ( + DictionaryCache( + "*stateGroupMembersCache*", + 500000, + ) ) def get_max_state_group_txn(txn: Cursor) -> int: diff --git a/synapse/storage/engines/_base.py b/synapse/storage/engines/_base.py index 8c29236b5..ad222e7e2 100644 --- a/synapse/storage/engines/_base.py +++ b/synapse/storage/engines/_base.py @@ -48,8 +48,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM @property @abc.abstractmethod - def single_threaded(self) -> bool: - ... + def single_threaded(self) -> bool: ... @property @abc.abstractmethod @@ -68,8 +67,7 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM @abc.abstractmethod def check_database( self, db_conn: ConnectionType, allow_outdated_version: bool = False - ) -> None: - ... + ) -> None: ... @abc.abstractmethod def check_new_database(self, txn: CursorType) -> None: @@ -79,27 +77,22 @@ class BaseDatabaseEngine(Generic[ConnectionType, CursorType], metaclass=abc.ABCM ... @abc.abstractmethod - def convert_param_style(self, sql: str) -> str: - ... + def convert_param_style(self, sql: str) -> str: ... # This method would ideally take a plain ConnectionType, but it seems that # the Sqlite engine expects to use LoggingDatabaseConnection.cursor # instead of sqlite3.Connection.cursor: only the former takes a txn_name. @abc.abstractmethod - def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: - ... + def on_new_connection(self, db_conn: "LoggingDatabaseConnection") -> None: ... @abc.abstractmethod - def is_deadlock(self, error: Exception) -> bool: - ... + def is_deadlock(self, error: Exception) -> bool: ... @abc.abstractmethod - def is_connection_closed(self, conn: ConnectionType) -> bool: - ... + def is_connection_closed(self, conn: ConnectionType) -> bool: ... @abc.abstractmethod - def lock_table(self, txn: Cursor, table: str) -> None: - ... + def lock_table(self, txn: Cursor, table: str) -> None: ... @property @abc.abstractmethod diff --git a/synapse/storage/types.py b/synapse/storage/types.py index b4e0a8f57..74f60cc59 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -42,20 +42,17 @@ SQLQueryParameters = Union[Sequence[Any], Mapping[str, Any]] class Cursor(Protocol): - def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: - ... + def execute(self, sql: str, parameters: SQLQueryParameters = ...) -> Any: ... - def executemany(self, sql: str, parameters: Sequence[SQLQueryParameters]) -> Any: - ... + def executemany( + self, sql: str, parameters: Sequence[SQLQueryParameters] + ) -> Any: ... - def fetchone(self) -> Optional[Tuple]: - ... + def fetchone(self) -> Optional[Tuple]: ... - def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: - ... + def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ... - def fetchall(self) -> List[Tuple]: - ... + def fetchall(self) -> List[Tuple]: ... @property def description( @@ -70,36 +67,28 @@ class Cursor(Protocol): def rowcount(self) -> int: return 0 - def __iter__(self) -> Iterator[Tuple]: - ... + def __iter__(self) -> Iterator[Tuple]: ... - def close(self) -> None: - ... + def close(self) -> None: ... class Connection(Protocol): - def cursor(self) -> Cursor: - ... + def cursor(self) -> Cursor: ... - def close(self) -> None: - ... + def close(self) -> None: ... - def commit(self) -> None: - ... + def commit(self) -> None: ... - def rollback(self) -> None: - ... + def rollback(self) -> None: ... - def __enter__(self) -> "Connection": - ... + def __enter__(self) -> "Connection": ... def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], - ) -> Optional[bool]: - ... + ) -> Optional[bool]: ... class DBAPI2Module(Protocol): @@ -129,24 +118,20 @@ class DBAPI2Module(Protocol): # explain why this is necessary for safety. TL;DR: we shouldn't be able to write # to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 . @property - def Warning(self) -> Type[Exception]: - ... + def Warning(self) -> Type[Exception]: ... @property - def Error(self) -> Type[Exception]: - ... + def Error(self) -> Type[Exception]: ... # Errors are divided into `InterfaceError`s (something went wrong in the database # driver) and `DatabaseError`s (something went wrong in the database). These are # both subclasses of `Error`, but we can't currently express this in type # annotations due to https://github.com/python/mypy/issues/8397 @property - def InterfaceError(self) -> Type[Exception]: - ... + def InterfaceError(self) -> Type[Exception]: ... @property - def DatabaseError(self) -> Type[Exception]: - ... + def DatabaseError(self) -> Type[Exception]: ... # Everything below is a subclass of `DatabaseError`. @@ -155,8 +140,7 @@ class DBAPI2Module(Protocol): # - An invalid date time was provided. # - A string contained a null code point. @property - def DataError(self) -> Type[Exception]: - ... + def DataError(self) -> Type[Exception]: ... # Roughly: something went wrong in the database, but it's not within the application # programmer's control. Examples: @@ -167,21 +151,18 @@ class DBAPI2Module(Protocol): # - The database ran out of resources, such as storage, memory, connections, etc. # - The database encountered an error from the operating system. @property - def OperationalError(self) -> Type[Exception]: - ... + def OperationalError(self) -> Type[Exception]: ... # Roughly: we've given the database data which breaks a rule we asked it to enforce. # Examples: # - Stop, criminal scum! You violated the foreign key constraint # - Also check constraints, non-null constraints, etc. @property - def IntegrityError(self) -> Type[Exception]: - ... + def IntegrityError(self) -> Type[Exception]: ... # Roughly: something went wrong within the database server itself. @property - def InternalError(self) -> Type[Exception]: - ... + def InternalError(self) -> Type[Exception]: ... # Roughly: the application did something silly that needs to be fixed. Examples: # - We don't have permissions to do something. @@ -189,13 +170,11 @@ class DBAPI2Module(Protocol): # - We tried to use a reserved name. # - We referred to a column that doesn't exist. @property - def ProgrammingError(self) -> Type[Exception]: - ... + def ProgrammingError(self) -> Type[Exception]: ... # Roughly: we've tried to do something that this database doesn't support. @property - def NotSupportedError(self) -> Type[Exception]: - ... + def NotSupportedError(self) -> Type[Exception]: ... # We originally wrote # def connect(self, *args, **kwargs) -> Connection: ... @@ -204,8 +183,7 @@ class DBAPI2Module(Protocol): # psycopg2.connect doesn't have a mandatory positional argument. Instead, we use # the following slightly unusual workaround. @property - def connect(self) -> Callable[..., Connection]: - ... + def connect(self) -> Callable[..., Connection]: ... __all__ = ["Cursor", "Connection", "DBAPI2Module"] diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index 423ede596..6ec07e2d7 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -57,6 +57,7 @@ class EventInternalMetadata: (Added in synapse 0.99.0, so may be unreliable for events received before that) """ ... + def get_send_on_behalf_of(self) -> Optional[str]: """Whether this server should send the event on behalf of another server. This is used by the federation "send_join" API to forward the initial join @@ -65,6 +66,7 @@ class EventInternalMetadata: returns a str with the name of the server this event is sent on behalf of. """ ... + def need_to_check_redaction(self) -> bool: """Whether the redaction event needs to be rechecked when fetching from the database. @@ -76,6 +78,7 @@ class EventInternalMetadata: due to auth rules, then this will always return false. """ ... + def is_soft_failed(self) -> bool: """Whether the event has been soft failed. @@ -86,6 +89,7 @@ class EventInternalMetadata: therefore not to current state). """ ... + def should_proactively_send(self) -> bool: """Whether the event, if ours, should be sent to other clients and servers. @@ -94,6 +98,7 @@ class EventInternalMetadata: can still explicitly fetch the event. """ ... + def is_redacted(self) -> bool: """Whether the event has been redacted. @@ -101,6 +106,7 @@ class EventInternalMetadata: marked as redacted without needing to make another database call. """ ... + def is_notifiable(self) -> bool: """Whether this event can trigger a push notification""" ... diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index d3ee71837..a88982a04 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -976,12 +976,12 @@ class StreamToken: return attr.evolve(self, **{key.value: new_value}) @overload - def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: - ... + def get_field(self, key: Literal[StreamKeyType.ROOM]) -> RoomStreamToken: ... @overload - def get_field(self, key: Literal[StreamKeyType.RECEIPT]) -> MultiWriterStreamToken: - ... + def get_field( + self, key: Literal[StreamKeyType.RECEIPT] + ) -> MultiWriterStreamToken: ... @overload def get_field( @@ -995,14 +995,12 @@ class StreamToken: StreamKeyType.TYPING, StreamKeyType.UN_PARTIAL_STATED_ROOMS, ], - ) -> int: - ... + ) -> int: ... @overload def get_field( self, key: StreamKeyType - ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: - ... + ) -> Union[int, RoomStreamToken, MultiWriterStreamToken]: ... def get_field( self, key: StreamKeyType diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 914d4fd74..d50b5dd34 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -357,24 +357,21 @@ T4 = TypeVar("T4") @overload def gather_results( deferredList: Tuple[()], consumeErrors: bool = ... -) -> "defer.Deferred[Tuple[()]]": - ... +) -> "defer.Deferred[Tuple[()]]": ... @overload def gather_results( deferredList: Tuple["defer.Deferred[T1]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1]]": - ... +) -> "defer.Deferred[Tuple[T1]]": ... @overload def gather_results( deferredList: Tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2]]": - ... +) -> "defer.Deferred[Tuple[T1, T2]]": ... @overload @@ -383,8 +380,7 @@ def gather_results( "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]" ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3]]": - ... +) -> "defer.Deferred[Tuple[T1, T2, T3]]": ... @overload @@ -396,8 +392,7 @@ def gather_results( "defer.Deferred[T4]", ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": - ... +) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": ... def gather_results( # type: ignore[misc] @@ -782,18 +777,15 @@ def stop_cancellation(deferred: "defer.Deferred[T]") -> "defer.Deferred[T]": @overload -def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": - ... +def delay_cancellation(awaitable: "defer.Deferred[T]") -> "defer.Deferred[T]": ... @overload -def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": - ... +def delay_cancellation(awaitable: Coroutine[Any, Any, T]) -> "defer.Deferred[T]": ... @overload -def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: - ... +def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: ... def delay_cancellation(awaitable: Awaitable[T]) -> Awaitable[T]: diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index a52ba59a3..462016f82 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -152,12 +152,10 @@ class ExpiringCache(Generic[KT, VT]): return key in self._cache @overload - def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: - ... + def get(self, key: KT, default: Literal[None] = None) -> Optional[VT]: ... @overload - def get(self, key: KT, default: T) -> Union[VT, T]: - ... + def get(self, key: KT, default: T) -> Union[VT, T]: ... def get(self, key: KT, default: Optional[T] = None) -> Union[VT, Optional[T]]: try: diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index a1b4f5b6a..481a1a621 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -580,8 +580,7 @@ class LruCache(Generic[KT, VT]): callbacks: Collection[Callable[[], None]] = ..., update_metrics: bool = ..., update_last_access: bool = ..., - ) -> Optional[VT]: - ... + ) -> Optional[VT]: ... @overload def cache_get( @@ -590,8 +589,7 @@ class LruCache(Generic[KT, VT]): callbacks: Collection[Callable[[], None]] = ..., update_metrics: bool = ..., update_last_access: bool = ..., - ) -> Union[T, VT]: - ... + ) -> Union[T, VT]: ... @synchronized def cache_get( @@ -634,16 +632,14 @@ class LruCache(Generic[KT, VT]): key: tuple, default: Literal[None] = None, update_metrics: bool = True, - ) -> Union[None, Iterable[Tuple[KT, VT]]]: - ... + ) -> Union[None, Iterable[Tuple[KT, VT]]]: ... @overload def cache_get_multi( key: tuple, default: T, update_metrics: bool = True, - ) -> Union[T, Iterable[Tuple[KT, VT]]]: - ... + ) -> Union[T, Iterable[Tuple[KT, VT]]]: ... @synchronized def cache_get_multi( @@ -728,12 +724,10 @@ class LruCache(Generic[KT, VT]): return value @overload - def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: - ... + def cache_pop(key: KT, default: Literal[None] = None) -> Optional[VT]: ... @overload - def cache_pop(key: KT, default: T) -> Union[T, VT]: - ... + def cache_pop(key: KT, default: T) -> Union[T, VT]: ... @synchronized def cache_pop(key: KT, default: Optional[T] = None) -> Union[None, T, VT]: diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py index 082ad8ced..b73f690b8 100644 --- a/synapse/util/iterutils.py +++ b/synapse/util/iterutils.py @@ -50,8 +50,7 @@ class _SelfSlice(Sized, Protocol): returned. """ - def __getitem__(self: S, i: slice) -> S: - ... + def __getitem__(self: S, i: slice) -> S: ... def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T, ...]]: diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index dc9bddb00..8ead72bb7 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -177,9 +177,9 @@ class FederationRateLimiter: clock=clock, config=config, metrics_name=metrics_name ) - self.ratelimiters: DefaultDict[ - str, "_PerHostRatelimiter" - ] = collections.defaultdict(new_limiter) + self.ratelimiters: DefaultDict[str, "_PerHostRatelimiter"] = ( + collections.defaultdict(new_limiter) + ) with _rate_limiter_instances_lock: _rate_limiter_instances.add(self) diff --git a/synapse/visibility.py b/synapse/visibility.py index e58f649aa..d1d478129 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -129,9 +129,9 @@ async def filter_events_for_client( retention_policies: Dict[str, RetentionPolicy] = {} for room_id in room_ids: - retention_policies[ - room_id - ] = await storage.main.get_retention_policy_for_room(room_id) + retention_policies[room_id] = ( + await storage.main.get_retention_policy_for_room(room_id) + ) def allowed(event: EventBase) -> Optional[EventBase]: return _check_client_allowed_to_see_event( diff --git a/tests/replication/_base.py b/tests/replication/_base.py index d2220f819..8437da1cd 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -495,9 +495,9 @@ class FakeRedisPubSubServer: """A fake Redis server for pub/sub.""" def __init__(self) -> None: - self._subscribers_by_channel: Dict[ - bytes, Set["FakeRedisPubSubProtocol"] - ] = defaultdict(set) + self._subscribers_by_channel: Dict[bytes, Set["FakeRedisPubSubProtocol"]] = ( + defaultdict(set) + ) def add_subscriber(self, conn: "FakeRedisPubSubProtocol", channel: bytes) -> None: """A connection has called SUBSCRIBE""" diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index b11a73e92..d2f2ded48 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -1222,9 +1222,9 @@ class RoomJoinTestCase(RoomBase): """ # Register a dummy callback. Make it allow all room joins for now. - return_value: Union[ - Literal["NOT_SPAM"], Tuple[Codes, dict], Codes - ] = synapse.module_api.NOT_SPAM + return_value: Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes] = ( + synapse.module_api.NOT_SPAM + ) async def user_may_join_room( userid: str, @@ -1664,9 +1664,9 @@ class RoomMessagesTestCase(RoomBase): expected_fields: dict, ) -> None: class SpamCheck: - mock_return_value: Union[ - str, bool, Codes, Tuple[Codes, JsonDict], bool - ] = "NOT_SPAM" + mock_return_value: Union[str, bool, Codes, Tuple[Codes, JsonDict], bool] = ( + "NOT_SPAM" + ) mock_content: Optional[JsonDict] = None async def check_event_for_spam( diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py index 10cfe22d8..daa68d78b 100644 --- a/tests/rest/client/utils.py +++ b/tests/rest/client/utils.py @@ -87,8 +87,7 @@ class RestHelper: expect_code: Literal[200] = ..., extra_content: Optional[Dict] = ..., custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., - ) -> str: - ... + ) -> str: ... @overload def create_room_as( @@ -100,8 +99,7 @@ class RestHelper: expect_code: int = ..., extra_content: Optional[Dict] = ..., custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., - ) -> Optional[str]: - ... + ) -> Optional[str]: ... def create_room_as( self, diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index 249c6b39f..d5b999628 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -337,15 +337,15 @@ class CleanupExtremDummyEventsTestCase(HomeserverTestCase): """Simple test to ensure that _expire_rooms_to_exclude_from_dummy_event_insertion() expires old entries correctly. """ - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "1" - ] = 100000 - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "2" - ] = 200000 - self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion[ - "3" - ] = 300000 + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["1"] = ( + 100000 + ) + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["2"] = ( + 200000 + ) + self.event_creator_handler._rooms_to_exclude_from_dummy_event_insertion["3"] = ( + 300000 + ) self.event_creator_handler._expire_rooms_to_exclude_from_dummy_event_insertion() # All entries within time frame diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index 01c532480..1eab89f14 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -328,9 +328,11 @@ class MessageSearchTest(HomeserverTestCase): self.assertEqual( result["count"], 1 if expect_to_contain else 0, - f"expected '{query}' to match '{self.PHRASE}'" - if expect_to_contain - else f"'{query}' unexpectedly matched '{self.PHRASE}'", + ( + f"expected '{query}' to match '{self.PHRASE}'" + if expect_to_contain + else f"'{query}' unexpectedly matched '{self.PHRASE}'" + ), ) self.assertEqual( len(result["results"]), @@ -346,9 +348,11 @@ class MessageSearchTest(HomeserverTestCase): self.assertEqual( result["count"], 1 if expect_to_contain else 0, - f"expected '{query}' to match '{self.PHRASE}'" - if expect_to_contain - else f"'{query}' unexpectedly matched '{self.PHRASE}'", + ( + f"expected '{query}' to match '{self.PHRASE}'" + if expect_to_contain + else f"'{query}' unexpectedly matched '{self.PHRASE}'" + ), ) self.assertEqual( len(result["results"]), diff --git a/tests/unittest.py b/tests/unittest.py index 33c9a384e..6fe0cd4a2 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -109,8 +109,7 @@ class _TypedFailure(Generic[_ExcType], Protocol): """Extension to twisted.Failure, where the 'value' has a certain type.""" @property - def value(self) -> _ExcType: - ... + def value(self) -> _ExcType: ... def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]: diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index d4268bc2e..7cbb1007d 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -34,8 +34,7 @@ from tests import unittest class UnblockFunction(Protocol): - def __call__(self, pump_reactor: bool = True) -> None: - ... + def __call__(self, pump_reactor: bool = True) -> None: ... class LinearizerTestCase(unittest.TestCase): diff --git a/tests/utils.py b/tests/utils.py index b5dbd60a9..757320ebe 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -121,13 +121,11 @@ def setupdb() -> None: @overload -def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: - ... +def default_config(name: str, parse: Literal[False] = ...) -> Dict[str, object]: ... @overload -def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: - ... +def default_config(name: str, parse: Literal[True]) -> HomeServerConfig: ... def default_config(