0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-06-18 10:38:21 +02:00

Merge remote-tracking branch 'upstream/release-v1.71'

This commit is contained in:
Tulir Asokan 2022-11-01 16:18:58 +02:00
commit 2337ca829d
135 changed files with 5192 additions and 2356 deletions

View file

@ -8,4 +8,11 @@
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
ignore=W503,W504,E203,E731,E501
#
# flake8-bugbear runs extra checks. Its error codes are described at
# https://github.com/PyCQA/flake8-bugbear#list-of-warnings
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
# B023: Functions defined inside a loop must not use variables redefined in the loop
# B024: Abstract base class with no abstract method.
ignore=W503,W504,E203,E731,E501,B019,B023,B024

View file

@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v3
- name: Setup mdbook
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
with:
mdbook-version: '0.4.17'
@ -58,7 +58,7 @@ jobs:
# Deploy to the target directory.
- name: Deploy to gh pages
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book

View file

@ -99,7 +99,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, macos-10.15]
os: [ubuntu-20.04, macos-11]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
@ -109,9 +109,9 @@ jobs:
exclude:
# Don't build macos wheels on PR CI.
- is_pr: true
os: "macos-10.15"
os: "macos-11"
# Don't build aarch64 wheels on mac.
- os: "macos-10.15"
- os: "macos-11"
arch: aarch64
# Don't build aarch64 wheels on PR CI.
- is_pr: true

View file

@ -167,6 +167,14 @@ jobs:
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.job.python-version }}

View file

@ -5,24 +5,11 @@ on:
types: [ opened ]
jobs:
add_new_issues:
name: Add new issues to the triage board
runs-on: ubuntu-latest
steps:
- uses: octokit/graphql-action@v2.x
id: add_to_project
with:
headers: '{"GraphQL-Features": "projects_next_graphql"}'
query: |
mutation add_to_project($projectid:ID!,$contentid:ID!) {
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
item {
id
}
}
}
projectid: ${{ env.PROJECT_ID }}
contentid: ${{ github.event.issue.node_id }}
env:
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
triage:
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
with:
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
content_id: ${{ github.event.issue.node_id }}
secrets:
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}

View file

@ -151,12 +151,11 @@ jobs:
run: |
set -x
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
pipx install poetry==1.1.14
pipx install poetry==1.2.0
poetry remove -n twisted
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry lock --no-update
# NOT IN 1.1.14 poetry lock --check
working-directory: synapse
- run: |

View file

@ -1,6 +1,96 @@
Synapse 1.71.0rc1 (2022-11-01)
==============================
Features
--------
- Support back-channel logouts from OpenID Connect providers. ([\#11414](https://github.com/matrix-org/synapse/issues/11414))
- Allow use of Postgres and SQLlite full-text search operators in search queries. ([\#11635](https://github.com/matrix-org/synapse/issues/11635), [\#14310](https://github.com/matrix-org/synapse/issues/14310), [\#14311](https://github.com/matrix-org/synapse/issues/14311))
- Implement [MSC3664](https://github.com/matrix-org/matrix-doc/pull/3664), Pushrules for relations. Contributed by Nico. ([\#11804](https://github.com/matrix-org/synapse/issues/11804))
- Improve aesthetics of HTML templates. Note that these changes do not retroactively apply to templates which have been [customised](https://matrix-org.github.io/synapse/latest/templates.html#templates) by server admins. ([\#13652](https://github.com/matrix-org/synapse/issues/13652))
- Enable write-ahead logging for SQLite installations. Contributed by [@asymmetric](https://github.com/asymmetric). ([\#13897](https://github.com/matrix-org/synapse/issues/13897))
- Show erasure status when [listing users](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#query-user-account) in the Admin API. ([\#14205](https://github.com/matrix-org/synapse/issues/14205))
- Provide a specific error code when a `/sync` request provides a filter which doesn't represent a JSON object. ([\#14262](https://github.com/matrix-org/synapse/issues/14262))
Bugfixes
--------
- Fix a long-standing bug where the `update_synapse_database` script could not be run with multiple databases. Contributed by @thefinn93 @ Beeper. ([\#13422](https://github.com/matrix-org/synapse/issues/13422))
- Fix a bug which prevented setting an avatar on homeservers which have an explicit port in their `server_name` and have `max_avatar_size` and/or `allowed_avatar_mimetypes` configuration. Contributed by @ashfame. ([\#13927](https://github.com/matrix-org/synapse/issues/13927))
- Check appservice user interest against the local users instead of all users in the room to align with [MSC3905](https://github.com/matrix-org/matrix-spec-proposals/pull/3905). ([\#13958](https://github.com/matrix-org/synapse/issues/13958))
- Fix a long-standing bug where Synapse would accidentally include extra information in the response to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14064](https://github.com/matrix-org/synapse/issues/14064))
- Fix a bug introduced in Synapse 1.64.0 where presence updates could be missing from `/sync` responses. ([\#14243](https://github.com/matrix-org/synapse/issues/14243))
- Fix a bug introduced in Synapse 1.60.0 which caused an error to be logged when Synapse received a SIGHUP signal if debug logging was enabled. ([\#14258](https://github.com/matrix-org/synapse/issues/14258))
- Prevent history insertion ([MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716)) during an partial join ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706)). ([\#14291](https://github.com/matrix-org/synapse/issues/14291))
- Fix a bug introduced in Synapse 1.34.0 where device names would be returned via a federation user key query request when `allow_device_name_lookup_over_federation` was set to `false`. ([\#14304](https://github.com/matrix-org/synapse/issues/14304))
- Fix a bug introduced in Synapse 0.34.0 where logs could include error spam when background processes are measured as taking a negative amount of time. ([\#14323](https://github.com/matrix-org/synapse/issues/14323))
- Fix a bug introduced in Synapse 1.70.0 where clients were unable to PUT new [dehydrated devices](https://github.com/matrix-org/matrix-spec-proposals/pull/2697). ([\#14336](https://github.com/matrix-org/synapse/issues/14336))
Improved Documentation
----------------------
- Explain how to disable the use of [`trusted_key_servers`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#trusted_key_servers). ([\#13999](https://github.com/matrix-org/synapse/issues/13999))
- Add workers settings to [configuration manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#individual-worker-configuration). ([\#14086](https://github.com/matrix-org/synapse/issues/14086))
- Correct the name of the config option [`encryption_enabled_by_default_for_room_type`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#encryption_enabled_by_default_for_room_type). ([\#14110](https://github.com/matrix-org/synapse/issues/14110))
- Update docstrings of `SynapseError` and `FederationError` to bettter describe what they are used for and the effects of using them are. ([\#14191](https://github.com/matrix-org/synapse/issues/14191))
Internal Changes
----------------
- Remove unused `@lru_cache` decorator. ([\#13595](https://github.com/matrix-org/synapse/issues/13595))
- Save login tokens in database and prevent login token reuse. ([\#13844](https://github.com/matrix-org/synapse/issues/13844))
- Refactor OIDC tests to better mimic an actual OIDC provider. ([\#13910](https://github.com/matrix-org/synapse/issues/13910))
- Fix type annotation causing import time error in the Complement forking launcher. ([\#14084](https://github.com/matrix-org/synapse/issues/14084))
- Refactor [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to loop over federation destinations with standard pattern and error handling. ([\#14096](https://github.com/matrix-org/synapse/issues/14096))
- Add initial power level event to batch of bulk persisted events when creating a new room. ([\#14228](https://github.com/matrix-org/synapse/issues/14228))
- Refactor `/key/` endpoints to use `RestServlet` classes. ([\#14229](https://github.com/matrix-org/synapse/issues/14229))
- Switch to using the `matrix-org/backend-meta` version of `triage-incoming` for new issues in CI. ([\#14230](https://github.com/matrix-org/synapse/issues/14230))
- Build wheels on macos 11, not 10.15. ([\#14249](https://github.com/matrix-org/synapse/issues/14249))
- Add debugging to help diagnose lost device list updates. ([\#14268](https://github.com/matrix-org/synapse/issues/14268))
- Add Rust cache to CI for `trial` runs. ([\#14287](https://github.com/matrix-org/synapse/issues/14287))
- Improve type hinting of `RawHeaders`. ([\#14303](https://github.com/matrix-org/synapse/issues/14303))
- Use Poetry 1.2.0 in the Twisted Trunk CI job. ([\#14305](https://github.com/matrix-org/synapse/issues/14305))
<details>
<summary>Dependency updates</summary>
Runtime:
- Bump anyhow from 1.0.65 to 1.0.66. ([\#14278](https://github.com/matrix-org/synapse/issues/14278))
- Bump jinja2 from 3.0.3 to 3.1.2. ([\#14271](https://github.com/matrix-org/synapse/issues/14271))
- Bump prometheus-client from 0.14.0 to 0.15.0. ([\#14274](https://github.com/matrix-org/synapse/issues/14274))
- Bump psycopg2 from 2.9.4 to 2.9.5. ([\#14331](https://github.com/matrix-org/synapse/issues/14331))
- Bump pysaml2 from 7.1.2 to 7.2.1. ([\#14270](https://github.com/matrix-org/synapse/issues/14270))
- Bump sentry-sdk from 1.5.11 to 1.10.1. ([\#14330](https://github.com/matrix-org/synapse/issues/14330))
- Bump serde from 1.0.145 to 1.0.147. ([\#14277](https://github.com/matrix-org/synapse/issues/14277))
- Bump serde_json from 1.0.86 to 1.0.87. ([\#14279](https://github.com/matrix-org/synapse/issues/14279))
Tooling and CI:
- Bump black from 22.3.0 to 22.10.0. ([\#14328](https://github.com/matrix-org/synapse/issues/14328))
- Bump flake8-bugbear from 21.3.2 to 22.9.23. ([\#14042](https://github.com/matrix-org/synapse/issues/14042))
- Bump peaceiris/actions-gh-pages from 3.8.0 to 3.9.0. ([\#14276](https://github.com/matrix-org/synapse/issues/14276))
- Bump peaceiris/actions-mdbook from 1.1.14 to 1.2.0. ([\#14275](https://github.com/matrix-org/synapse/issues/14275))
- Bump setuptools-rust from 1.5.1 to 1.5.2. ([\#14273](https://github.com/matrix-org/synapse/issues/14273))
- Bump twine from 3.8.0 to 4.0.1. ([\#14332](https://github.com/matrix-org/synapse/issues/14332))
- Bump types-opentracing from 2.4.7 to 2.4.10. ([\#14133](https://github.com/matrix-org/synapse/issues/14133))
- Bump types-requests from 2.28.11 to 2.28.11.2. ([\#14272](https://github.com/matrix-org/synapse/issues/14272))
</details>
Synapse 1.70.1 (2022-10-28)
===========================
This release fixes some regressions that were discovered in 1.70.0.
[#14300](https://github.com/matrix-org/synapse/issues/14300)
was previously reported to be a regression in 1.70.0 as well. However, we have
since concluded that it was limited to the reporter and thus have not needed
to include any fix for it in 1.70.1.
Bugfixes
--------

16
Cargo.lock generated
View file

@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.65"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
[[package]]
name = "arc-swap"
@ -323,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
version = "1.0.145"
version = "1.0.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.145"
version = "1.0.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852"
dependencies = [
"proc-macro2",
"quote",
@ -343,9 +343,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.86"
version = "1.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45"
dependencies = [
"itoa",
"ryu",

6
debian/changelog vendored
View file

@ -1,3 +1,9 @@
matrix-synapse-py3 (1.71.0~rc1) stable; urgency=medium
* New Synapse release 1.71.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Nov 2022 12:10:17 +0000
matrix-synapse-py3 (1.70.1) stable; urgency=medium
* New Synapse release 1.70.1.

View file

@ -37,6 +37,7 @@ It returns a JSON body like the following:
"is_guest": 0,
"admin": 0,
"deactivated": 0,
"erased": false,
"shadow_banned": 0,
"creation_ts": 1560432506,
"appservice_id": null,
@ -167,6 +168,7 @@ A response body like the following is returned:
"admin": 0,
"user_type": null,
"deactivated": 0,
"erased": false,
"shadow_banned": 0,
"displayname": "<User One>",
"avatar_url": null,
@ -177,6 +179,7 @@ A response body like the following is returned:
"admin": 1,
"user_type": null,
"deactivated": 0,
"erased": false,
"shadow_banned": 0,
"displayname": "<User Two>",
"avatar_url": "<avatar_url>",
@ -247,6 +250,7 @@ The following fields are returned in the JSON response body:
- `user_type` - string - Type of the user. Normal users are type `None`.
This allows user type specific behaviour. There are also types `support` and `bot`.
- `deactivated` - bool - Status if that user has been marked as deactivated.
- `erased` - bool - Status if that user has been marked as erased.
- `shadow_banned` - bool - Status if that user has been marked as shadow banned.
- `displayname` - string - The user's display name if they have set one.
- `avatar_url` - string - The user's avatar URL if they have set one.

View file

@ -49,6 +49,13 @@ setting in your configuration file.
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
the text below for example configurations for specific providers.
## OIDC Back-Channel Logout
Synapse supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) notifications.
This lets the OpenID Connect Provider notify Synapse when a user logs out, so that Synapse can end that user session.
This feature can be enabled by setting the `backchannel_logout_enabled` property to `true` in the provider configuration, and setting the following URL as destination for Back-Channel Logout notifications in your OpenID Connect Provider: `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout`
## Sample configs
Here are a few configs for providers that should work with Synapse.
@ -123,6 +130,9 @@ oidc_providers:
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
Keycloak supports OIDC Back-Channel Logout, which sends logout notification to Synapse, so that Synapse users get logged out when they log out from Keycloak.
This can be optionally enabled by setting `backchannel_logout_enabled` to `true` in the Synapse configuration, and by setting the "Backchannel Logout URL" in Keycloak.
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
1. Click `Clients` in the sidebar and click `Create`
@ -144,6 +154,8 @@ Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to
| Client Protocol | `openid-connect` |
| Access Type | `confidential` |
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
| Backchannel Logout URL (optional) | `[synapse public baseurl]/_synapse/client/oidc/backchannel_logout` |
| Backchannel Logout Session Required (optional) | `On` |
5. Click `Save`
6. On the Credentials tab, update the fields:
@ -167,7 +179,9 @@ oidc_providers:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
backchannel_logout_enabled: true # Optional
```
### Auth0
[Auth0][auth0] is a hosted SaaS IdP solution.

View file

@ -6,7 +6,7 @@
# Synapse also supports structured logging for machine readable logs which can
# be ingested by ELK stacks. See [2] for details.
#
# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
# [1]: https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
version: 1

View file

@ -88,6 +88,34 @@ process, for example:
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
```
# Upgrading to v1.71.0
## Removal of the `generate_short_term_login_token` module API method
As announced with the release of [Synapse 1.69.0](#deprecation-of-the-generate_short_term_login_token-module-api-method), the deprecated `generate_short_term_login_token` module method has been removed.
Modules relying on it can instead use the `create_login_token` method.
## Changes to the events received by application services (interest)
To align with spec (changed in
[MSC3905](https://github.com/matrix-org/matrix-spec-proposals/pull/3905)), Synapse now
only considers local users to be interesting. In other words, the `users` namespace
regex is only be applied against local users of the homeserver.
Please note, this probably doesn't affect the expected behavior of your application
service, since an interesting local user in a room still means all messages in the room
(from local or remote users) will still be considered interesting. And matching a room
with the `rooms` or `aliases` namespace regex will still consider all events sent in the
room to be interesting to the application service.
If one of your application service's `users` regex was intending to match a remote user,
this will no longer match as you expect. The behavioral mismatch between matching all
local users and some remote users is why the spec was changed/clarified and this
caveat is no longer supported.
# Upgrading to v1.69.0
## Changes to the receipts replication streams

View file

@ -99,7 +99,7 @@ modules:
config: {}
```
---
## Server ##
## Server
Define your homeserver name and other base options.
@ -159,7 +159,7 @@ including _matrix/...). This is the same URL a user might enter into the
'Custom Homeserver URL' field on their client. If you use Synapse with a
reverse proxy, this should be the URL to reach Synapse via the proxy.
Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
'listeners' below).
['listeners'](#listeners) below).
Defaults to `https://<server_name>/`.
@ -570,7 +570,7 @@ Example configuration:
delete_stale_devices_after: 1y
```
## Homeserver blocking ##
## Homeserver blocking
Useful options for Synapse admins.
---
@ -922,7 +922,7 @@ retention:
interval: 1d
```
---
## TLS ##
## TLS
Options related to TLS.
@ -1012,7 +1012,7 @@ federation_custom_ca_list:
- myCA3.pem
```
---
## Federation ##
## Federation
Options related to federation.
@ -1071,7 +1071,7 @@ Example configuration:
allow_device_name_lookup_over_federation: true
```
---
## Caching ##
## Caching
Options related to caching.
@ -1185,7 +1185,7 @@ file in Synapse's `contrib` directory, you can send a `SIGHUP` signal by using
`systemctl reload matrix-synapse`.
---
## Database ##
## Database
Config options related to database settings.
---
@ -1332,20 +1332,21 @@ databases:
cp_max: 10
```
---
## Logging ##
## Logging
Config options related to logging.
---
### `log_config`
This option specifies a yaml python logging config file as described [here](https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema).
This option specifies a yaml python logging config file as described
[here](https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema).
Example configuration:
```yaml
log_config: "CONFDIR/SERVERNAME.log.config"
```
---
## Ratelimiting ##
## Ratelimiting
Options related to ratelimiting in Synapse.
Each ratelimiting configuration is made of two parameters:
@ -1576,7 +1577,7 @@ Example configuration:
federation_rr_transactions_per_room_per_second: 40
```
---
## Media Store ##
## Media Store
Config options related to Synapse's media store.
---
@ -1766,7 +1767,7 @@ url_preview_ip_range_blacklist:
- 'ff00::/8'
- 'fec0::/10'
```
----
---
### `url_preview_ip_range_whitelist`
This option sets a list of IP address CIDR ranges that the URL preview spider is allowed
@ -1860,7 +1861,7 @@ Example configuration:
- 'fr;q=0.8'
- '*;q=0.7'
```
----
---
### `oembed`
oEmbed allows for easier embedding content from a website. It can be
@ -1877,7 +1878,7 @@ oembed:
- oembed/my_providers.json
```
---
## Captcha ##
## Captcha
See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha.
@ -1926,7 +1927,7 @@ Example configuration:
recaptcha_siteverify_api: "https://my.recaptcha.site"
```
---
## TURN ##
## TURN
Options related to adding a TURN server to Synapse.
---
@ -1947,7 +1948,7 @@ Example configuration:
```yaml
turn_shared_secret: "YOUR_SHARED_SECRET"
```
----
---
### `turn_username` and `turn_password`
The Username and password if the TURN server needs them and does not use a token.
@ -2366,7 +2367,7 @@ Example configuration:
```yaml
session_lifetime: 24h
```
----
---
### `refresh_access_token_lifetime`
Time that an access token remains valid for, if the session is using refresh tokens.
@ -2422,7 +2423,7 @@ nonrefreshable_access_token_lifetime: 24h
```
---
## Metrics ###
## Metrics
Config options related to metrics.
---
@ -2519,7 +2520,7 @@ Example configuration:
report_stats_endpoint: https://example.com/report-usage-stats/push
```
---
## API Configuration ##
## API Configuration
Config settings related to the client/server API
---
@ -2619,7 +2620,7 @@ Example configuration:
form_secret: <PRIVATE STRING>
```
---
## Signing Keys ##
## Signing Keys
Config options relating to signing keys
---
@ -2680,6 +2681,12 @@ is still supported for backwards-compatibility, but it is deprecated.
warning on start-up. To suppress this warning, set
`suppress_key_server_warning` to true.
If the use of a trusted key server has to be deactivated, e.g. in a private
federation or for privacy reasons, this can be realised by setting
an empty array (`trusted_key_servers: []`). Then Synapse will request the keys
directly from the server that owns the keys. If Synapse does not get keys directly
from the server, the events of this server will be rejected.
Options for each entry in the list include:
* `server_name`: the name of the server. Required.
* `verify_keys`: an optional map from key id to base64-encoded public key.
@ -2728,7 +2735,7 @@ Example configuration:
key_server_signing_keys_path: "key_server_signing_keys.key"
```
---
## Single sign-on integration ##
## Single sign-on integration
The following settings can be used to make Synapse use a single sign-on
provider for authentication, instead of its internal password database.
@ -3014,6 +3021,15 @@ Options for each entry include:
which is set to the claims returned by the UserInfo Endpoint and/or
in the ID Token.
* `backchannel_logout_enabled`: set to `true` to process OIDC Back-Channel Logout notifications.
Those notifications are expected to be received on `/_synapse/client/oidc/backchannel_logout`.
Defaults to `false`.
* `backchannel_logout_ignore_sub`: by default, the OIDC Back-Channel Logout feature checks that the
`sub` claim matches the subject claim received during login. This check can be disabled by setting
this to `true`. Defaults to `false`.
You might want to disable this if the `subject_claim` returned by the mapping provider is not `sub`.
It is possible to configure Synapse to only allow logins if certain attributes
match particular values in the OIDC userinfo. The requirements can be listed under
@ -3348,7 +3364,7 @@ email:
email_validation: "[%(server_name)s] Validate your email"
```
---
## Push ##
## Push
Configuration settings related to push notifications
---
@ -3381,11 +3397,11 @@ push:
group_unread_count_by_room: false
```
---
## Rooms ##
## Rooms
Config options relating to rooms.
---
### `encryption_enabled_by_default`
### `encryption_enabled_by_default_for_room_type`
Controls whether locally-created rooms should be end-to-end encrypted by
default.
@ -3627,7 +3643,7 @@ default_power_level_content_override:
```
---
## Opentracing ##
## Opentracing
Configuration options related to Opentracing support.
---
@ -3670,14 +3686,71 @@ opentracing:
false
```
---
## Workers ##
Configuration options related to workers.
## Coordinating workers
Configuration options related to workers which belong in the main config file
(usually called `homeserver.yaml`).
A Synapse deployment can scale horizontally by running multiple Synapse processes
called _workers_. Incoming requests are distributed between workers to handle higher
loads. Some workers are privileged and can accept requests from other workers.
As a result, the worker configuration is divided into two parts.
1. The first part (in this section of the manual) defines which shardable tasks
are delegated to privileged workers. This allows unprivileged workers to make
request a privileged worker to act on their behalf.
1. [The second part](#individual-worker-configuration)
controls the behaviour of individual workers in isolation.
For guidance on setting up workers, see the [worker documentation](../../workers.md).
---
### `worker_replication_secret`
A shared secret used by the replication APIs on the main process to authenticate
HTTP requests from workers.
The default, this value is omitted (equivalently `null`), which means that
traffic between the workers and the main process is not authenticated.
Example configuration:
```yaml
worker_replication_secret: "secret_secret"
```
---
### `start_pushers`
Controls sending of push notifications on the main process. Set to `false`
if using a [pusher worker](../../workers.md#synapseapppusher). Defaults to `true`.
Example configuration:
```yaml
start_pushers: false
```
---
### `pusher_instances`
It is possible to run multiple [pusher workers](../../workers.md#synapseapppusher),
in which case the work is balanced across them. Use this setting to list the pushers by
[`worker_name`](#worker_name). Ensure the main process and all pusher workers are
restarted after changing this option.
If no or only one pusher worker is configured, this setting is not necessary.
The main process will send out push notifications by default if you do not disable
it by setting [`start_pushers: false`](#start_pushers).
Example configuration:
```yaml
start_pushers: false
pusher_instances:
- pusher_worker1
- pusher_worker2
```
---
### `send_federation`
Controls sending of outbound federation transactions on the main process.
Set to false if using a federation sender worker. Defaults to true.
Set to `false` if using a [federation sender worker](../../workers.md#synapseappfederation_sender).
Defaults to `true`.
Example configuration:
```yaml
@ -3686,8 +3759,9 @@ send_federation: false
---
### `federation_sender_instances`
It is possible to run multiple federation sender workers, in which case the
work is balanced across them. Use this setting to list the senders.
It is possible to run multiple
[federation sender worker](../../workers.md#synapseappfederation_sender), in which
case the work is balanced across them. Use this setting to list the senders.
This configuration setting must be shared between all federation sender workers, and if
changed all federation sender workers must be stopped at the same time and then
@ -3696,14 +3770,19 @@ events may be dropped).
Example configuration:
```yaml
send_federation: false
federation_sender_instances:
- federation_sender1
```
---
### `instance_map`
When using workers this should be a map from worker name to the
When using workers this should be a map from [`worker_name`](#worker_name) to the
HTTP replication listener of the worker, if configured.
Each worker declared under [`stream_writers`](../../workers.md#stream-writers) needs
a HTTP replication listener, and that listener should be included in the `instance_map`.
(The main process also needs an HTTP replication listener, but it should not be
listed in the `instance_map`.)
Example configuration:
```yaml
@ -3716,8 +3795,11 @@ instance_map:
### `stream_writers`
Experimental: When using workers you can define which workers should
handle event persistence and typing notifications. Any worker
specified here must also be in the `instance_map`.
handle writing to streams such as event persistence and typing notifications.
Any worker specified here must also be in the [`instance_map`](#instance_map).
See the list of available streams in the
[worker documentation](../../workers.md#stream-writers).
Example configuration:
```yaml
@ -3728,29 +3810,18 @@ stream_writers:
---
### `run_background_tasks_on`
The worker that is used to run background tasks (e.g. cleaning up expired
data). If not provided this defaults to the main process.
The [worker](../../workers.md#background-tasks) that is used to run
background tasks (e.g. cleaning up expired data). If not provided this
defaults to the main process.
Example configuration:
```yaml
run_background_tasks_on: worker1
```
---
### `worker_replication_secret`
A shared secret used by the replication APIs to authenticate HTTP requests
from workers.
By default this is unused and traffic is not authenticated.
Example configuration:
```yaml
worker_replication_secret: "secret_secret"
```
### `redis`
Configuration for Redis when using workers. This *must* be enabled when
using workers (unless using old style direct TCP configuration).
Configuration for Redis when using workers. This *must* be enabled when using workers.
This setting has the following sub-options:
* `enabled`: whether to use Redis support. Defaults to false.
* `host` and `port`: Optional host and port to use to connect to redis. Defaults to
@ -3765,7 +3836,123 @@ redis:
port: 6379
password: <secret_password>
```
## Background Updates ##
---
## Individual worker configuration
These options configure an individual worker, in its worker configuration file.
They should be not be provided when configuring the main process.
Note also the configuration above for
[coordinating a cluster of workers](#coordinating-workers).
For guidance on setting up workers, see the [worker documentation](../../workers.md).
---
### `worker_app`
The type of worker. The currently available worker applications are listed
in [worker documentation](../../workers.md#available-worker-applications).
The most common worker is the
[`synapse.app.generic_worker`](../../workers.md#synapseappgeneric_worker).
Example configuration:
```yaml
worker_app: synapse.app.generic_worker
```
---
### `worker_name`
A unique name for the worker. The worker needs a name to be addressed in
further parameters and identification in log files. We strongly recommend
giving each worker a unique `worker_name`.
Example configuration:
```yaml
worker_name: generic_worker1
```
---
### `worker_replication_host`
The HTTP replication endpoint that it should talk to on the main Synapse process.
The main Synapse process defines this with a `replication` resource in
[`listeners` option](#listeners).
Example configuration:
```yaml
worker_replication_host: 127.0.0.1
```
---
### `worker_replication_http_port`
The HTTP replication port that it should talk to on the main Synapse process.
The main Synapse process defines this with a `replication` resource in
[`listeners` option](#listeners).
Example configuration:
```yaml
worker_replication_http_port: 9093
```
---
### `worker_listeners`
A worker can handle HTTP requests. To do so, a `worker_listeners` option
must be declared, in the same way as the [`listeners` option](#listeners)
in the shared config.
Workers declared in [`stream_writers`](#stream_writers) will need to include a
`replication` listener here, in order to accept internal HTTP requests from
other workers.
Example configuration:
```yaml
worker_listeners:
- type: http
port: 8083
resources:
- names: [client, federation]
```
---
### `worker_daemonize`
Specifies whether the worker should be started as a daemon process.
If Synapse is being managed by [systemd](../../systemd-with-workers/README.md), this option
must be omitted or set to `false`.
Defaults to `false`.
Example configuration:
```yaml
worker_daemonize: true
```
---
### `worker_pid_file`
When running a worker as a daemon, we need a place to store the
[PID](https://en.wikipedia.org/wiki/Process_identifier) of the worker.
This option defines the location of that "pid file".
This option is required if `worker_daemonize` is `true` and ignored
otherwise. It has no default.
See also the [`pid_file` option](#pid_file) option for the main Synapse process.
Example configuration:
```yaml
worker_pid_file: DATADIR/generic_worker1.pid
```
---
### `worker_log_config`
This option specifies a yaml python logging config file as described
[here](https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema).
See also the [`log_config` option](#log_config) option for the main Synapse process.
Example configuration:
```yaml
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
```
---
## Background Updates
Configuration settings related to background updates.
---

View file

@ -88,10 +88,12 @@ shared configuration file.
### Shared configuration
Normally, only a couple of changes are needed to make an existing configuration
file suitable for use with workers. First, you need to enable an "HTTP replication
listener" for the main process; and secondly, you need to enable redis-based
replication. Optionally, a shared secret can be used to authenticate HTTP
traffic between workers. For example:
file suitable for use with workers. First, you need to enable an
["HTTP replication listener"](usage/configuration/config_documentation.md#listeners)
for the main process; and secondly, you need to enable
[redis-based replication](usage/configuration/config_documentation.md#redis).
Optionally, a [shared secret](usage/configuration/config_documentation.md#worker_replication_secret)
can be used to authenticate HTTP traffic between workers. For example:
```yaml
# extend the existing `listeners` section. This defines the ports that the
@ -111,25 +113,28 @@ redis:
enabled: true
```
See the [configuration manual](usage/configuration/config_documentation.html) for the full documentation of each option.
See the [configuration manual](usage/configuration/config_documentation.md)
for the full documentation of each option.
Under **no circumstances** should the replication listener be exposed to the
public internet; replication traffic is:
* always unencrypted
* unauthenticated, unless `worker_replication_secret` is configured
* unauthenticated, unless [`worker_replication_secret`](usage/configuration/config_documentation.md#worker_replication_secret)
is configured
### Worker configuration
In the config file for each worker, you must specify:
* The type of worker (`worker_app`). The currently available worker applications are listed below.
* A unique name for the worker (`worker_name`).
* The type of worker ([`worker_app`](usage/configuration/config_documentation.md#worker_app)).
The currently available worker applications are listed [below](#available-worker-applications).
* A unique name for the worker ([`worker_name`](usage/configuration/config_documentation.md#worker_name)).
* The HTTP replication endpoint that it should talk to on the main synapse process
(`worker_replication_host` and `worker_replication_http_port`)
* If handling HTTP requests, a `worker_listeners` option with an `http`
listener, in the same way as the [`listeners`](usage/configuration/config_documentation.md#listeners)
option in the shared config.
([`worker_replication_host`](usage/configuration/config_documentation.md#worker_replication_host) and
[`worker_replication_http_port`](usage/configuration/config_documentation.md#worker_replication_http_port)).
* If handling HTTP requests, a [`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners) option
with an `http` listener.
* If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
the main process (`worker_main_http_uri`).
@ -146,7 +151,6 @@ plain HTTP endpoint on port 8083 separately serving various endpoints, e.g.
Obviously you should configure your reverse-proxy to route the relevant
endpoints to the worker (`localhost:8083` in the above example).
### Running Synapse with workers
Finally, you need to start your worker processes. This can be done with either
@ -288,7 +292,8 @@ For multiple workers not handling the SSO endpoints properly, see
[#9427](https://github.com/matrix-org/synapse/issues/9427).
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
with `client` and `federation` `resources` must be configured in the `worker_listeners`
with `client` and `federation` `resources` must be configured in the
[`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners)
option in the worker config.
#### Load balancing
@ -331,9 +336,10 @@ of the main process to a particular worker.
To enable this, the worker must have a
[HTTP `replication` listener](usage/configuration/config_documentation.md#listeners) configured,
have a `worker_name` and be listed in the `instance_map` config. The same worker
can handle multiple streams, but unless otherwise documented, each stream can only
have a single writer.
have a [`worker_name`](usage/configuration/config_documentation.md#worker_name)
and be listed in the [`instance_map`](usage/configuration/config_documentation.md#instance_map)
config. The same worker can handle multiple streams, but unless otherwise documented,
each stream can only have a single writer.
For example, to move event persistence off to a dedicated worker, the shared
configuration would include:
@ -360,9 +366,26 @@ streams and the endpoints associated with them:
##### The `events` stream
The `events` stream experimentally supports having multiple writers, where work
is sharded between them by room ID. Note that you *must* restart all worker
instances when adding or removing event persisters. An example `stream_writers`
The `events` stream experimentally supports having multiple writer workers, where load
is sharded between them by room ID. Each writer is called an _event persister_. They are
responsible for
- receiving new events,
- linking them to those already in the room [DAG](development/room-dag-concepts.md),
- persisting them to the DB, and finally
- updating the events stream.
Because load is sharded in this way, you *must* restart all worker instances when
adding or removing event persisters.
An `event_persister` should not be mistaken for an `event_creator`.
An `event_creator` listens for requests from clients to create new events and does
so. It will then pass those events over HTTP replication to any configured event
persisters (or the main process if none are configured).
Note that `event_creator`s and `event_persister`s are implemented using the same
[`synapse.app.generic_worker`](#synapse.app.generic_worker).
An example [`stream_writers`](usage/configuration/config_documentation.md#stream_writers)
configuration with multiple writers:
```yaml
@ -416,16 +439,18 @@ worker. Background tasks are run periodically or started via replication. Exactl
which tasks are configured to run depends on your Synapse configuration (e.g. if
stats is enabled). This worker doesn't handle any REST endpoints itself.
To enable this, the worker must have a `worker_name` and can be configured to run
background tasks. For example, to move background tasks to a dedicated worker,
the shared configuration would include:
To enable this, the worker must have a unique
[`worker_name`](usage/configuration/config_documentation.md#worker_name)
and can be configured to run background tasks. For example, to move background tasks
to a dedicated worker, the shared configuration would include:
```yaml
run_background_tasks_on: background_worker
```
You might also wish to investigate the `update_user_directory_from_worker` and
`media_instance_running_background_jobs` settings.
You might also wish to investigate the
[`update_user_directory_from_worker`](#updating-the-user-directory) and
[`media_instance_running_background_jobs`](#synapseappmedia_repository) settings.
An example for a dedicated background worker instance:
@ -478,13 +503,17 @@ worker application type.
### `synapse.app.pusher`
Handles sending push notifications to sygnal and email. Doesn't handle any
REST endpoints itself, but you should set `start_pushers: False` in the
REST endpoints itself, but you should set
[`start_pushers: false`](usage/configuration/config_documentation.md#start_pushers) in the
shared configuration file to stop the main synapse sending push notifications.
To run multiple instances at once the `pusher_instances` option should list all
pusher instances by their worker name, e.g.:
To run multiple instances at once the
[`pusher_instances`](usage/configuration/config_documentation.md#pusher_instances)
option should list all pusher instances by their
[`worker_name`](usage/configuration/config_documentation.md#worker_name), e.g.:
```yaml
start_pushers: false
pusher_instances:
- pusher_worker1
- pusher_worker2
@ -512,15 +541,20 @@ Note this worker cannot be load-balanced: only one instance should be active.
### `synapse.app.federation_sender`
Handles sending federation traffic to other servers. Doesn't handle any
REST endpoints itself, but you should set `send_federation: False` in the
shared configuration file to stop the main synapse sending this traffic.
REST endpoints itself, but you should set
[`send_federation: false`](usage/configuration/config_documentation.md#send_federation)
in the shared configuration file to stop the main synapse sending this traffic.
If running multiple federation senders then you must list each
instance in the `federation_sender_instances` option by their `worker_name`.
instance in the
[`federation_sender_instances`](usage/configuration/config_documentation.md#federation_sender_instances)
option by their
[`worker_name`](usage/configuration/config_documentation.md#worker_name).
All instances must be stopped and started when adding or removing instances.
For example:
```yaml
send_federation: false
federation_sender_instances:
- federation_sender1
- federation_sender2
@ -547,7 +581,9 @@ Handles the media repository. It can handle all endpoints starting with:
^/_synapse/admin/v1/quarantine_media/.*$
^/_synapse/admin/v1/users/.*/media$
You should also set `enable_media_repo: False` in the shared configuration
You should also set
[`enable_media_repo: False`](usage/configuration/config_documentation.md#enable_media_repo)
in the shared configuration
file to stop the main synapse running background jobs related to managing the
media repository. Note that doing so will prevent the main process from being
able to handle the above endpoints.

View file

@ -56,7 +56,6 @@ exclude = (?x)
|tests/rest/media/v1/test_media_storage.py
|tests/server.py
|tests/server_notices/test_resource_limits_server_notices.py
|tests/test_metrics.py
|tests/test_state.py
|tests/test_terms_auth.py
|tests/util/caches/test_cached_call.py
@ -106,6 +105,9 @@ disallow_untyped_defs = False
[mypy-tests.handlers.test_user_directory]
disallow_untyped_defs = True
[mypy-tests.metrics.test_background_process_metrics]
disallow_untyped_defs = True
[mypy-tests.push.test_bulk_push_rule_evaluator]
disallow_untyped_defs = True

207
poetry.lock generated
View file

@ -52,18 +52,18 @@ typecheck = ["mypy"]
[[package]]
name = "black"
version = "22.3.0"
version = "22.10.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.6.2"
python-versions = ">=3.7"
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
@ -260,7 +260,7 @@ pyflakes = ">=2.4.0,<2.5.0"
[[package]]
name = "flake8-bugbear"
version = "21.3.2"
version = "22.9.23"
description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
category = "dev"
optional = false
@ -271,7 +271,7 @@ attrs = ">=19.2.0"
flake8 = ">=3.0.0"
[package.extras]
dev = ["black", "coverage", "hypothesis", "hypothesmith"]
dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"]
[[package]]
name = "flake8-comprehensions"
@ -438,11 +438,11 @@ trio = ["async_generator", "trio"]
[[package]]
name = "jinja2"
version = "3.0.3"
version = "3.1.2"
description = "A very fast and expressive template engine."
category = "main"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
[package.dependencies]
MarkupSafe = ">=2.0"
@ -710,7 +710,7 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock
[[package]]
name = "prometheus-client"
version = "0.14.0"
version = "0.15.0"
description = "Python client for the Prometheus monitoring system."
category = "main"
optional = false
@ -721,7 +721,7 @@ twisted = ["twisted"]
[[package]]
name = "psycopg2"
version = "2.9.4"
version = "2.9.5"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
category = "main"
optional = true
@ -918,14 +918,14 @@ python-versions = ">=3.7"
[[package]]
name = "pysaml2"
version = "7.1.2"
version = "7.2.1"
description = "Python implementation of SAML Version 2 Standard"
category = "main"
optional = true
python-versions = "<4,>=3.6"
[package.dependencies]
cryptography = ">=1.4"
cryptography = ">=3.1"
defusedxml = "*"
importlib-resources = {version = "*", markers = "python_version < \"3.9\""}
pyOpenSSL = "*"
@ -976,11 +976,11 @@ python-versions = ">=3.6"
[[package]]
name = "readme-renderer"
version = "33.0"
version = "37.2"
description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse"
category = "dev"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
[package.dependencies]
bleach = ">=2.1.0"
@ -1030,6 +1030,22 @@ python-versions = ">=3.7"
[package.extras]
idna2008 = ["idna"]
[[package]]
name = "rich"
version = "12.6.0"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
category = "dev"
optional = false
python-versions = ">=3.6.3,<4.0.0"
[package.dependencies]
commonmark = ">=0.9.0,<0.10.0"
pygments = ">=2.6.0,<3.0.0"
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "secretstorage"
version = "3.3.1"
@ -1056,7 +1072,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
version = "1.5.11"
version = "1.10.1"
description = "Python client for Sentry (https://sentry.io)"
category = "main"
optional = true
@ -1064,7 +1080,7 @@ python-versions = "*"
[package.dependencies]
certifi = "*"
urllib3 = ">=1.10.0"
urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
[package.extras]
aiohttp = ["aiohttp (>=3.5)"]
@ -1074,6 +1090,7 @@ celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"]
pure-eval = ["asttokens", "executing", "pure-eval"]
@ -1082,6 +1099,7 @@ quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"]
sanic = ["sanic (>=0.8)"]
sqlalchemy = ["sqlalchemy (>=1.2)"]
starlette = ["starlette (>=0.19.1)"]
tornado = ["tornado (>=5)"]
[[package]]
@ -1120,7 +1138,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (
[[package]]
name = "setuptools-rust"
version = "1.5.1"
version = "1.5.2"
description = "Setuptools Rust extension plugin"
category = "main"
optional = false
@ -1251,22 +1269,6 @@ tomli = {version = "*", markers = "python_version >= \"3.6\""}
[package.extras]
dev = ["packaging"]
[[package]]
name = "tqdm"
version = "4.63.0"
description = "Fast, Extensible Progress Meter"
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[package.extras]
dev = ["py-make (>=0.1.0)", "twine", "wheel"]
notebook = ["ipywidgets (>=6)"]
telegram = ["requests"]
[[package]]
name = "treq"
version = "22.2.0"
@ -1288,22 +1290,21 @@ docs = ["sphinx (>=1.4.8)"]
[[package]]
name = "twine"
version = "3.8.0"
version = "4.0.1"
description = "Collection of utilities for publishing packages on PyPI"
category = "dev"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
[package.dependencies]
colorama = ">=0.4.3"
importlib-metadata = ">=3.6"
keyring = ">=15.1"
pkginfo = ">=1.8.1"
readme-renderer = ">=21.0"
readme-renderer = ">=35.0"
requests = ">=2.20"
requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
rfc3986 = ">=1.4.0"
tqdm = ">=4.14"
rich = ">=12.0.0"
urllib3 = ">=1.26.0"
[[package]]
@ -1426,7 +1427,7 @@ python-versions = "*"
[[package]]
name = "types-opentracing"
version = "2.4.7"
version = "2.4.10"
description = "Typing stubs for opentracing"
category = "dev"
optional = false
@ -1469,7 +1470,7 @@ python-versions = "*"
[[package]]
name = "types-requests"
version = "2.28.11"
version = "2.28.11.2"
description = "Typing stubs for requests"
category = "dev"
optional = false
@ -1512,15 +1513,15 @@ python-versions = ">=3.6,<4.0"
[[package]]
name = "urllib3"
version = "1.26.8"
version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
brotli = ["brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
@ -1633,7 +1634,7 @@ url-preview = ["lxml"]
[metadata]
lock-version = "1.1"
python-versions = "^3.7.1"
content-hash = "9400cb5c92bb4648238f652f5e7f81df51cdcf9b7c69d645f35beaa4acb2f420"
content-hash = "27811bd21d56ceeb0f68ded5a00375efcd1a004928f0736f5b02927ce8594cb0"
[metadata.files]
attrs = [
@ -1672,29 +1673,27 @@ bcrypt = [
{file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
]
black = [
{file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"},
{file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"},
{file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"},
{file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"},
{file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"},
{file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"},
{file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"},
{file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"},
{file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"},
{file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"},
{file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"},
{file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"},
{file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"},
{file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"},
{file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"},
{file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"},
{file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"},
{file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"},
{file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"},
{file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"},
{file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"},
{file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"},
{file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"},
{file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
{file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
{file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
{file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
{file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
{file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
{file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
{file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
{file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
{file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
{file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
{file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
{file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
{file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
{file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
{file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
{file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
{file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
{file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
]
bleach = [
{file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"},
@ -1826,8 +1825,8 @@ flake8 = [
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
]
flake8-bugbear = [
{file = "flake8-bugbear-21.3.2.tar.gz", hash = "sha256:cadce434ceef96463b45a7c3000f23527c04ea4b531d16c7ac8886051f516ca0"},
{file = "flake8_bugbear-21.3.2-py36.py37.py38-none-any.whl", hash = "sha256:5d6ccb0c0676c738a6e066b4d50589c408dcc1c5bf1d73b464b18b73cd6c05c2"},
{file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"},
{file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"},
]
flake8-comprehensions = [
{file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"},
@ -1999,8 +1998,8 @@ jeepney = [
{file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"},
]
jinja2 = [
{file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"},
{file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"},
{file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
{file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
]
jsonschema = [
{file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"},
@ -2301,21 +2300,21 @@ platformdirs = [
{file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
]
prometheus-client = [
{file = "prometheus_client-0.14.0-py3-none-any.whl", hash = "sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2"},
{file = "prometheus_client-0.14.0.tar.gz", hash = "sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750"},
{file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"},
{file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"},
]
psycopg2 = [
{file = "psycopg2-2.9.4-cp310-cp310-win32.whl", hash = "sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797"},
{file = "psycopg2-2.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c"},
{file = "psycopg2-2.9.4-cp36-cp36m-win32.whl", hash = "sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a"},
{file = "psycopg2-2.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb"},
{file = "psycopg2-2.9.4-cp37-cp37m-win32.whl", hash = "sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb"},
{file = "psycopg2-2.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61"},
{file = "psycopg2-2.9.4-cp38-cp38-win32.whl", hash = "sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c"},
{file = "psycopg2-2.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184"},
{file = "psycopg2-2.9.4-cp39-cp39-win32.whl", hash = "sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426"},
{file = "psycopg2-2.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308"},
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
{file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"},
{file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"},
{file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"},
{file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"},
{file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"},
{file = "psycopg2-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a"},
{file = "psycopg2-2.9.5-cp38-cp38-win32.whl", hash = "sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2"},
{file = "psycopg2-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e"},
{file = "psycopg2-2.9.5-cp39-cp39-win32.whl", hash = "sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5"},
{file = "psycopg2-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa"},
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
]
psycopg2cffi = [
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
@ -2445,8 +2444,8 @@ pyrsistent = [
{file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"},
]
pysaml2 = [
{file = "pysaml2-7.1.2-py2.py3-none-any.whl", hash = "sha256:d915961aaa4d4d97d952b30fe5d18d64cf053465acf3e38d8090b36c5ff08325"},
{file = "pysaml2-7.1.2.tar.gz", hash = "sha256:1ec94442306511b93fe7a5710f224e05e0aba948682d506614d1e04f3232f827"},
{file = "pysaml2-7.2.1-py2.py3-none-any.whl", hash = "sha256:2ca155f4eeb1471b247a7b0cc79ccfd5780046d33d0b201e1199a00698dce795"},
{file = "pysaml2-7.2.1.tar.gz", hash = "sha256:f40f9576dce9afef156469179277ffeeca36829248be333252af0517a26d0b1f"},
]
python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
@ -2503,8 +2502,8 @@ pyyaml = [
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
readme-renderer = [
{file = "readme_renderer-33.0-py3-none-any.whl", hash = "sha256:f02cee0c4de9636b5a62b6be50c9742427ba1b956aad1d938bfb087d0d72ccdf"},
{file = "readme_renderer-33.0.tar.gz", hash = "sha256:e3b53bc84bd6af054e4cc1fe3567dc1ae19f554134221043a3f8c674e22209db"},
{file = "readme_renderer-37.2-py3-none-any.whl", hash = "sha256:d3f06a69e8c40fca9ab3174eca48f96d9771eddb43517b17d96583418427b106"},
{file = "readme_renderer-37.2.tar.gz", hash = "sha256:e8ad25293c98f781dbc2c5a36a309929390009f902f99e1798c761aaf04a7923"},
]
requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
@ -2518,6 +2517,10 @@ rfc3986 = [
{file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
{file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
]
rich = [
{file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"},
{file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"},
]
secretstorage = [
{file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
{file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
@ -2527,8 +2530,8 @@ semantic-version = [
{file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"},
]
sentry-sdk = [
{file = "sentry-sdk-1.5.11.tar.gz", hash = "sha256:6c01d9d0b65935fd275adc120194737d1df317dce811e642cbf0394d0d37a007"},
{file = "sentry_sdk-1.5.11-py2.py3-none-any.whl", hash = "sha256:c17179183cac614e900cbd048dab03f49a48e2820182ec686c25e7ce46f8548f"},
{file = "sentry-sdk-1.10.1.tar.gz", hash = "sha256:105faf7bd7b7fa25653404619ee261527266b14103fe1389e0ce077bd23a9691"},
{file = "sentry_sdk-1.10.1-py2.py3-none-any.whl", hash = "sha256:06c0fa9ccfdc80d7e3b5d2021978d6eb9351fa49db9b5847cf4d1f2a473414ad"},
]
service-identity = [
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
@ -2539,8 +2542,8 @@ setuptools = [
{file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"},
]
setuptools-rust = [
{file = "setuptools-rust-1.5.1.tar.gz", hash = "sha256:0e05e456645d59429cb1021370aede73c0760e9360bbfdaaefb5bced530eb9d7"},
{file = "setuptools_rust-1.5.1-py3-none-any.whl", hash = "sha256:306b236ff3aa5229180e58292610d0c2c51bb488191122d2fc559ae4caeb7d5e"},
{file = "setuptools-rust-1.5.2.tar.gz", hash = "sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7"},
{file = "setuptools_rust-1.5.2-py3-none-any.whl", hash = "sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206"},
]
signedjson = [
{file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"},
@ -2682,17 +2685,13 @@ towncrier = [
{file = "towncrier-21.9.0-py2.py3-none-any.whl", hash = "sha256:fc5a88a2a54988e3a8ed2b60d553599da8330f65722cc607c839614ed87e0f92"},
{file = "towncrier-21.9.0.tar.gz", hash = "sha256:9cb6f45c16e1a1eec9d0e7651165e7be60cd0ab81d13a5c96ca97a498ae87f48"},
]
tqdm = [
{file = "tqdm-4.63.0-py2.py3-none-any.whl", hash = "sha256:e643e071046f17139dea55b880dc9b33822ce21613b4a4f5ea57f202833dbc29"},
{file = "tqdm-4.63.0.tar.gz", hash = "sha256:1d9835ede8e394bb8c9dcbffbca02d717217113adc679236873eeaac5bc0b3cd"},
]
treq = [
{file = "treq-22.2.0-py3-none-any.whl", hash = "sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2"},
{file = "treq-22.2.0.tar.gz", hash = "sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec"},
]
twine = [
{file = "twine-3.8.0-py3-none-any.whl", hash = "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8"},
{file = "twine-3.8.0.tar.gz", hash = "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19"},
{file = "twine-4.0.1-py3-none-any.whl", hash = "sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e"},
{file = "twine-4.0.1.tar.gz", hash = "sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0"},
]
twisted = [
{file = "Twisted-22.8.0-py3-none-any.whl", hash = "sha256:8d4718d1e48dcc28933f8beb48dc71cfe77a125e37ad1eb7a3d0acc49baf6c99"},
@ -2767,8 +2766,8 @@ types-jsonschema = [
{file = "types_jsonschema-4.4.6-py3-none-any.whl", hash = "sha256:1db9031ca49a8444d01bd2ce8cf2f89318382b04610953b108321e6f8fb03390"},
]
types-opentracing = [
{file = "types-opentracing-2.4.7.tar.gz", hash = "sha256:be60e9618355aa892571ace002e6b353702538b1c0dc4fbc1c921219d6658830"},
{file = "types_opentracing-2.4.7-py3-none-any.whl", hash = "sha256:861fb8103b07cf717f501dd400cb274ca9992552314d4d6c7a824b11a215e512"},
{file = "types-opentracing-2.4.10.tar.gz", hash = "sha256:6101414f3b6d3b9c10f1c510a261e8439b6c8d67c723d5c2872084697b4580a7"},
{file = "types_opentracing-2.4.10-py3-none-any.whl", hash = "sha256:66d9cfbbdc4a6f8ca8189a15ad26f0fe41cee84c07057759c5d194e2505b84c2"},
]
types-pillow = [
{file = "types-Pillow-9.2.2.1.tar.gz", hash = "sha256:85c139e06e1c46ec5f9c634d5c54a156b0958d5d0e8be024ed353db0c804b426"},
@ -2787,8 +2786,8 @@ types-PyYAML = [
{file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"},
]
types-requests = [
{file = "types-requests-2.28.11.tar.gz", hash = "sha256:7ee827eb8ce611b02b5117cfec5da6455365b6a575f5e3ff19f655ba603e6b4e"},
{file = "types_requests-2.28.11-py3-none-any.whl", hash = "sha256:af5f55e803cabcfb836dad752bd6d8a0fc8ef1cd84243061c0e27dee04ccf4fd"},
{file = "types-requests-2.28.11.2.tar.gz", hash = "sha256:fdcd7bd148139fb8eef72cf4a41ac7273872cad9e6ada14b11ff5dfdeee60ed3"},
{file = "types_requests-2.28.11.2-py3-none-any.whl", hash = "sha256:14941f8023a80b16441b3b46caffcbfce5265fd14555844d6029697824b5a2ef"},
]
types-setuptools = [
{file = "types-setuptools-65.5.0.1.tar.gz", hash = "sha256:5b297081c8f1fbd992cd8b305a97ed96ee6ffc765e9115124029597dd10b8a71"},
@ -2807,8 +2806,8 @@ unpaddedbase64 = [
{file = "unpaddedbase64-2.1.0.tar.gz", hash = "sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005"},
]
urllib3 = [
{file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"},
{file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"},
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]
webencodings = [
{file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},

View file

@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry]
name = "matrix-synapse"
version = "1.70.1"
version = "1.71.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@ -192,7 +192,7 @@ psycopg2 = { version = ">=2.8", markers = "platform_python_implementation != 'Py
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
pysaml2 = { version = ">=4.5.0", optional = true }
authlib = { version = ">=0.14.0", optional = true }
authlib = { version = ">=0.15.1", optional = true }
# systemd-python is necessary for logging to the systemd journal via
# `systemd.journal.JournalHandler`, as is documented in
# `contrib/systemd/log_config.yaml`.

View file

@ -254,9 +254,9 @@ importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version <
incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321
jinja2==3.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
--hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
jinja2==3.1.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
--hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
jsonschema==4.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23 \
--hash=sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9
@ -479,21 +479,21 @@ pillow==9.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0"
pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.7.1" and python_version < "3.9" \
--hash=sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174 \
--hash=sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e
prometheus-client==0.14.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750 \
--hash=sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2
psycopg2==2.9.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:07b90a24d5056687781ddaef0ea172fd951f2f7293f6ffdd03d4f5077801f426 \
--hash=sha256:1da77c061bdaab450581458932ae5e469cc6e36e0d62f988376e9f513f11cb5c \
--hash=sha256:46361c054df612c3cc813fdb343733d56543fb93565cff0f8ace422e4da06acb \
--hash=sha256:839f9ea8f6098e39966d97fcb8d08548fbc57c523a1e27a1f0609addf40f777c \
--hash=sha256:849bd868ae3369932127f0771c08d1109b254f08d48dc42493c3d1b87cb2d308 \
--hash=sha256:8de6a9fc5f42fa52f559e65120dcd7502394692490c98fed1221acf0819d7797 \
--hash=sha256:a11946bad3557ca254f17357d5a4ed63bdca45163e7a7d2bfb8e695df069cc3a \
--hash=sha256:aa184d551a767ad25df3b8d22a0a62ef2962e0e374c04f6cbd1204947f540d61 \
--hash=sha256:aafa96f2da0071d6dd0cbb7633406d99f414b40ab0f918c9d9af7df928a1accb \
--hash=sha256:c7fa041b4acb913f6968fce10169105af5200f296028251d817ab37847c30184 \
--hash=sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f
prometheus-client==0.15.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1 \
--hash=sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2
psycopg2==2.9.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:190d51e8c1b25a47484e52a79638a8182451d6f6dff99f26ad9bd81e5359a0fa \
--hash=sha256:1a5c7d7d577e0eabfcf15eb87d1e19314c8c4f0e722a301f98e0e3a65e238b4e \
--hash=sha256:1e5a38aa85bd660c53947bd28aeaafb6a97d70423606f1ccb044a03a1203fe4a \
--hash=sha256:322fd5fca0b1113677089d4ebd5222c964b1760e361f151cbb2706c4912112c5 \
--hash=sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee \
--hash=sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0 \
--hash=sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a \
--hash=sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d \
--hash=sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f \
--hash=sha256:f5b6320dbc3cf6cfb9f25308286f9f7ab464e65cfb105b64cc9c52831748ced2 \
--hash=sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5
psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
@ -636,9 +636,9 @@ semantic-version==2.10.0 ; python_full_version >= "3.7.1" and python_full_versio
service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \
--hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db
setuptools-rust==1.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:0e05e456645d59429cb1021370aede73c0760e9360bbfdaaefb5bced530eb9d7 \
--hash=sha256:306b236ff3aa5229180e58292610d0c2c51bb488191122d2fc559ae4caeb7d5e
setuptools-rust==1.5.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206 \
--hash=sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7
setuptools==65.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \
--hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57
@ -744,9 +744,9 @@ typing-extensions==4.4.0 ; python_full_version >= "3.7.1" and python_full_versio
unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \
--hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \
--hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005
urllib3==1.26.8 ; python_full_version >= "3.7.1" and python_version < "4" \
--hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \
--hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c
urllib3==1.26.12 ; python_full_version >= "3.7.1" and python_version < "4" \
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \
--hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
--hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923

View file

@ -20,15 +20,15 @@ crate-type = ["lib", "cdylib"]
name = "synapse.synapse_rust"
[dependencies]
anyhow = "1.0.63"
anyhow = "1.0.66"
lazy_static = "1.4.0"
log = "0.4.17"
pyo3 = { version = "0.17.1", features = ["extension-module", "macros", "anyhow", "abi3", "abi3-py37"] }
pyo3-log = "0.7.0"
pythonize = "0.17.0"
regex = "1.6.0"
serde = { version = "1.0.144", features = ["derive"] }
serde_json = "1.0.85"
serde = { version = "1.0.147", features = ["derive"] }
serde_json = "1.0.87"
[build-dependencies]
blake2 = "0.10.4"

View file

@ -25,6 +25,7 @@ use crate::push::Action;
use crate::push::Condition;
use crate::push::EventMatchCondition;
use crate::push::PushRule;
use crate::push::RelatedEventMatchCondition;
use crate::push::SetTweak;
use crate::push::TweakValue;
@ -114,6 +115,22 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
default: true,
default_enabled: true,
},
PushRule {
rule_id: Cow::Borrowed("global/override/.im.nheko.msc3664.reply"),
priority_class: 5,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatch(
RelatedEventMatchCondition {
key: Some(Cow::Borrowed("sender")),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_id")),
rel_type: Cow::Borrowed("m.in_reply_to"),
include_fallbacks: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
default: true,
default_enabled: true,
},
PushRule {
rule_id: Cow::Borrowed("global/override/.m.rule.contains_display_name"),
priority_class: 5,

View file

@ -23,6 +23,7 @@ use regex::Regex;
use super::{
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
Action, Condition, EventMatchCondition, FilteredPushRules, KnownCondition,
RelatedEventMatchCondition,
};
lazy_static! {
@ -49,6 +50,13 @@ pub struct PushRuleEvaluator {
/// The power level of the sender of the event, or None if event is an
/// outlier.
sender_power_level: Option<i64>,
/// The related events, indexed by relation type. Flattened in the same manner as
/// `flattened_keys`.
related_events_flattened: BTreeMap<String, BTreeMap<String, String>>,
/// If msc3664, push rules for related events, is enabled.
related_event_match_enabled: bool,
}
#[pymethods]
@ -60,6 +68,8 @@ impl PushRuleEvaluator {
room_member_count: u64,
sender_power_level: Option<i64>,
notification_power_levels: BTreeMap<String, i64>,
related_events_flattened: BTreeMap<String, BTreeMap<String, String>>,
related_event_match_enabled: bool,
) -> Result<Self, Error> {
let body = flattened_keys
.get("content.body")
@ -72,6 +82,8 @@ impl PushRuleEvaluator {
room_member_count,
notification_power_levels,
sender_power_level,
related_events_flattened,
related_event_match_enabled,
})
}
@ -156,6 +168,9 @@ impl PushRuleEvaluator {
KnownCondition::EventMatch(event_match) => {
self.match_event_match(event_match, user_id)?
}
KnownCondition::RelatedEventMatch(event_match) => {
self.match_related_event_match(event_match, user_id)?
}
KnownCondition::ContainsDisplayName => {
if let Some(dn) = display_name {
if !dn.is_empty() {
@ -239,6 +254,79 @@ impl PushRuleEvaluator {
compiled_pattern.is_match(haystack)
}
/// Evaluates a `related_event_match` condition. (MSC3664)
fn match_related_event_match(
&self,
event_match: &RelatedEventMatchCondition,
user_id: Option<&str>,
) -> Result<bool, Error> {
// First check if related event matching is enabled...
if !self.related_event_match_enabled {
return Ok(false);
}
// get the related event, fail if there is none.
let event = if let Some(event) = self.related_events_flattened.get(&*event_match.rel_type) {
event
} else {
return Ok(false);
};
// If we are not matching fallbacks, don't match if our special key indicating this is a
// fallback relation is not present.
if !event_match.include_fallbacks.unwrap_or(false)
&& event.contains_key("im.vector.is_falling_back")
{
return Ok(false);
}
// if we have no key, accept the event as matching, if it existed without matching any
// fields.
let key = if let Some(key) = &event_match.key {
key
} else {
return Ok(true);
};
let pattern = if let Some(pattern) = &event_match.pattern {
pattern
} else if let Some(pattern_type) = &event_match.pattern_type {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
match &**pattern_type {
"user_id" => user_id,
"user_localpart" => get_localpart_from_id(user_id)?,
_ => return Ok(false),
}
} else {
return Ok(false);
};
let haystack = if let Some(haystack) = event.get(&**key) {
haystack
} else {
return Ok(false);
};
// For the content.body we match against "words", but for everything
// else we match against the entire value.
let match_type = if key == "content.body" {
GlobMatchType::Word
} else {
GlobMatchType::Whole
};
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
compiled_pattern.is_match(haystack)
}
/// Match the member count against an 'is' condition
/// The `is` condition can be things like '>2', '==3' or even just '4'.
fn match_member_count(&self, is: &str) -> Result<bool, Error> {
@ -267,8 +355,15 @@ impl PushRuleEvaluator {
fn push_rule_evaluator() {
let mut flattened_keys = BTreeMap::new();
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
let evaluator =
PushRuleEvaluator::py_new(flattened_keys, 10, Some(0), BTreeMap::new()).unwrap();
let evaluator = PushRuleEvaluator::py_new(
flattened_keys,
10,
Some(0),
BTreeMap::new(),
BTreeMap::new(),
true,
)
.unwrap();
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
assert_eq!(result.len(), 3);

View file

@ -267,6 +267,8 @@ pub enum Condition {
#[serde(tag = "kind")]
pub enum KnownCondition {
EventMatch(EventMatchCondition),
#[serde(rename = "im.nheko.msc3664.related_event_match")]
RelatedEventMatch(RelatedEventMatchCondition),
ContainsDisplayName,
RoomMemberCount {
#[serde(skip_serializing_if = "Option::is_none")]
@ -299,6 +301,20 @@ pub struct EventMatchCondition {
pub pattern_type: Option<Cow<'static, str>>,
}
/// The body of a [`Condition::RelatedEventMatch`]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RelatedEventMatchCondition {
#[serde(skip_serializing_if = "Option::is_none")]
pub key: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern_type: Option<Cow<'static, str>>,
pub rel_type: Cow<'static, str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_fallbacks: Option<bool>,
}
/// The collection of push rules for a user.
#[derive(Debug, Clone, Default)]
#[pyclass(frozen)]
@ -391,15 +407,21 @@ impl PushRules {
pub struct FilteredPushRules {
push_rules: PushRules,
enabled_map: BTreeMap<String, bool>,
msc3664_enabled: bool,
}
#[pymethods]
impl FilteredPushRules {
#[new]
pub fn py_new(push_rules: PushRules, enabled_map: BTreeMap<String, bool>) -> Self {
pub fn py_new(
push_rules: PushRules,
enabled_map: BTreeMap<String, bool>,
msc3664_enabled: bool,
) -> Self {
Self {
push_rules,
enabled_map,
msc3664_enabled,
}
}
@ -414,13 +436,25 @@ impl FilteredPushRules {
/// Iterates over all the rules and their enabled state, including base
/// rules, in the order they should be executed in.
fn iter(&self) -> impl Iterator<Item = (&PushRule, bool)> {
self.push_rules.iter().map(|r| {
let enabled = *self
.enabled_map
.get(&*r.rule_id)
.unwrap_or(&r.default_enabled);
(r, enabled)
})
self.push_rules
.iter()
.filter(|rule| {
// Ignore disabled experimental push rules
if !self.msc3664_enabled
&& rule.rule_id == "global/override/.im.nheko.msc3664.reply"
{
return false;
}
true
})
.map(|r| {
let enabled = *self
.enabled_map
.get(&*r.rule_id)
.unwrap_or(&r.default_enabled);
(r, enabled)
})
}
}
@ -446,6 +480,17 @@ fn test_deserialize_condition() {
let _: Condition = serde_json::from_str(json).unwrap();
}
#[test]
fn test_deserialize_unstable_msc3664_condition() {
let json = r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern":"coffee","rel_type":"m.in_reply_to"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::RelatedEventMatch(_))
));
}
#[test]
fn test_deserialize_custom_condition() {
let json = r#"{"kind":"custom_tag"}"#;

View file

@ -25,7 +25,9 @@ class PushRules:
def rules(self) -> Collection[PushRule]: ...
class FilteredPushRules:
def __init__(self, push_rules: PushRules, enabled_map: Dict[str, bool]): ...
def __init__(
self, push_rules: PushRules, enabled_map: Dict[str, bool], msc3664_enabled: bool
): ...
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
def get_base_rule_ids() -> Collection[str]: ...
@ -37,6 +39,8 @@ class PushRuleEvaluator:
room_member_count: int,
sender_power_level: Optional[int],
notification_power_levels: Mapping[str, int],
related_events_flattened: Mapping[str, Mapping[str, str]],
related_event_match_enabled: bool,
): ...
def run(
self,

8
synapse/_scripts/update_synapse_database.py Executable file → Normal file
View file

@ -15,7 +15,6 @@
import argparse
import logging
import sys
from typing import cast
import yaml
@ -100,13 +99,6 @@ def main() -> None:
# Load, process and sanity-check the config.
hs_config = yaml.safe_load(args.database_config)
if "database" not in hs_config and "databases" not in hs_config:
sys.stderr.write(
"The configuration file must have a 'database' or 'databases' section. "
"See https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database"
)
sys.exit(4)
config = HomeServerConfig()
config.parse_config_dict(hs_config, "", "")

View file

@ -155,7 +155,13 @@ class RedirectException(CodeMessageException):
class SynapseError(CodeMessageException):
"""A base exception type for matrix errors which have an errcode and error
message (as well as an HTTP status code).
message (as well as an HTTP status code). These often bubble all the way up to the
client API response so the error code and status often reach the client directly as
defined here. If the error doesn't make sense to present to a client, then it
probably shouldn't be a `SynapseError`. For example, if we contact another
homeserver over federation, we shouldn't automatically ferry response errors back to
the client on our end (a 500 from a remote server does not make sense to a client
when our server did not experience a 500).
Attributes:
errcode: Matrix error code e.g 'M_FORBIDDEN'
@ -600,8 +606,20 @@ def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs: Any) -> "JsonDict":
class FederationError(RuntimeError):
"""This class is used to inform remote homeservers about erroneous
PDUs they sent us.
"""
Raised when we process an erroneous PDU.
There are two kinds of scenarios where this exception can be raised:
1. We may pull an invalid PDU from a remote homeserver (e.g. during backfill). We
raise this exception to signal an error to the rest of the application.
2. We may be pushed an invalid PDU as part of a `/send` transaction from a remote
homeserver. We raise so that we can respond to the transaction and include the
error string in the "PDU Processing Result". The message which will likely be
ignored by the remote homeserver and is not machine parse-able since it's just a
string.
TODO: In the future, we should split these usage scenarios into their own error types.
FATAL: The remote server could not interpret the source event.
(e.g., it was missing a required field)

View file

@ -28,7 +28,7 @@ FEDERATION_V1_PREFIX = FEDERATION_PREFIX + "/v1"
FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2"
FEDERATION_UNSTABLE_PREFIX = FEDERATION_PREFIX + "/unstable"
STATIC_PREFIX = "/_matrix/static"
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
SERVER_KEY_PREFIX = "/_matrix/key"
MEDIA_R0_PREFIX = "/_matrix/media/r0"
MEDIA_V3_PREFIX = "/_matrix/media/v3"
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"

View file

@ -558,7 +558,7 @@ def reload_cache_config(config: HomeServerConfig) -> None:
logger.warning(f)
else:
logger.debug(
"New cache config. Was:\n %s\nNow:\n",
"New cache config. Was:\n %s\nNow:\n %s",
previous_cache_config.__dict__,
config.caches.__dict__,
)

View file

@ -55,13 +55,13 @@ import os
import signal
import sys
from types import FrameType
from typing import Any, Callable, List, Optional
from typing import Any, Callable, Dict, List, Optional
from twisted.internet.main import installReactor
# a list of the original signal handlers, before we installed our custom ones.
# We restore these in our child processes.
_original_signal_handlers: dict[int, Any] = {}
_original_signal_handlers: Dict[int, Any] = {}
class ProxiedReactor:

View file

@ -28,7 +28,7 @@ from synapse.api.urls import (
LEGACY_MEDIA_PREFIX,
MEDIA_R0_PREFIX,
MEDIA_V3_PREFIX,
SERVER_KEY_V2_PREFIX,
SERVER_KEY_PREFIX,
)
from synapse.app import _base
from synapse.app._base import (
@ -89,7 +89,7 @@ from synapse.rest.client.register import (
RegistrationTokenValidityRestServlet,
)
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.key.v2 import KeyResource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
from synapse.rest.well_known import well_known_resource
from synapse.server import HomeServer
@ -178,13 +178,13 @@ class KeyUploadServlet(RestServlet):
# Proxy headers from the original request, such as the auth headers
# (in case the access token is there) and the original IP /
# User-Agent of the request.
headers = {
header: request.requestHeaders.getRawHeaders(header, [])
headers: Dict[bytes, List[bytes]] = {
header: list(request.requestHeaders.getRawHeaders(header, []))
for header in (b"Authorization", b"User-Agent")
}
# Add the previous hop to the X-Forwarded-For header.
x_forwarded_for = request.requestHeaders.getRawHeaders(
b"X-Forwarded-For", []
x_forwarded_for = list(
request.requestHeaders.getRawHeaders(b"X-Forwarded-For", [])
)
# we use request.client here, since we want the previous hop, not the
# original client (as returned by request.getClientAddress()).
@ -325,13 +325,13 @@ class GenericWorkerServer(HomeServer):
presence.register_servlets(self, resource)
resources.update({CLIENT_API_PREFIX: resource})
resources[CLIENT_API_PREFIX] = resource
resources.update(build_synapse_client_resource_tree(self))
resources.update({"/.well-known": well_known_resource(self)})
resources["/.well-known"] = well_known_resource(self)
elif name == "federation":
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
resources[FEDERATION_PREFIX] = TransportLayerServer(self)
elif name == "media":
if self.config.media.can_load_media_repo:
media_repo = self.get_media_repository_resource()
@ -359,16 +359,12 @@ class GenericWorkerServer(HomeServer):
# Only load the openid resource separately if federation resource
# is not specified since federation resource includes openid
# resource.
resources.update(
{
FEDERATION_PREFIX: TransportLayerServer(
self, servlet_groups=["openid"]
)
}
resources[FEDERATION_PREFIX] = TransportLayerServer(
self, servlet_groups=["openid"]
)
if name in ["keys", "federation"]:
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
resources[SERVER_KEY_PREFIX] = KeyResource(self)
if name == "replication":
resources[REPLICATION_PREFIX] = ReplicationRestResource(self)

View file

@ -31,7 +31,7 @@ from synapse.api.urls import (
LEGACY_MEDIA_PREFIX,
MEDIA_R0_PREFIX,
MEDIA_V3_PREFIX,
SERVER_KEY_V2_PREFIX,
SERVER_KEY_PREFIX,
STATIC_PREFIX,
)
from synapse.app import _base
@ -60,7 +60,7 @@ from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
from synapse.rest import ClientRestResource
from synapse.rest.admin import AdminRestResource
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.key.v2 import KeyResource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
from synapse.rest.well_known import well_known_resource
from synapse.server import HomeServer
@ -215,30 +215,22 @@ class SynapseHomeServer(HomeServer):
consent_resource: Resource = ConsentResource(self)
if compress:
consent_resource = gz_wrap(consent_resource)
resources.update({"/_matrix/consent": consent_resource})
resources["/_matrix/consent"] = consent_resource
if name == "federation":
federation_resource: Resource = TransportLayerServer(self)
if compress:
federation_resource = gz_wrap(federation_resource)
resources.update({FEDERATION_PREFIX: federation_resource})
resources[FEDERATION_PREFIX] = federation_resource
if name == "openid":
resources.update(
{
FEDERATION_PREFIX: TransportLayerServer(
self, servlet_groups=["openid"]
)
}
resources[FEDERATION_PREFIX] = TransportLayerServer(
self, servlet_groups=["openid"]
)
if name in ["static", "client"]:
resources.update(
{
STATIC_PREFIX: StaticResource(
os.path.join(os.path.dirname(synapse.__file__), "static")
)
}
resources[STATIC_PREFIX] = StaticResource(
os.path.join(os.path.dirname(synapse.__file__), "static")
)
if name in ["media", "federation", "client"]:
@ -257,7 +249,7 @@ class SynapseHomeServer(HomeServer):
)
if name in ["keys", "federation"]:
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
resources[SERVER_KEY_PREFIX] = KeyResource(self)
if name == "metrics" and self.config.metrics.enable_metrics:
metrics_resource: Resource = MetricsResource(RegistryProxy)

View file

@ -172,12 +172,24 @@ class ApplicationService:
Returns:
True if this service would like to know about this room.
"""
member_list = await store.get_users_in_room(
# We can use `get_local_users_in_room(...)` here because an application service
# can only be interested in local users of the server it's on (ignore any remote
# users that might match the user namespace regex).
#
# In the future, we can consider re-using
# `store.get_app_service_users_in_room` which is very similar to this
# function but has a slightly worse performance than this because we
# have an early escape-hatch if we find a single user that the
# appservice is interested in. The juice would be worth the squeeze if
# `store.get_app_service_users_in_room` was used in more places besides
# an experimental MSC. But for now we can avoid doing more work and
# barely using it later.
local_user_ids = await store.get_local_users_in_room(
room_id, on_invalidate=cache_context.invalidate
)
# check joined member events
for user_id in member_list:
for user_id in local_user_ids:
if self.is_interested_in_user(user_id):
return True
return False

View file

@ -98,6 +98,9 @@ class ExperimentalConfig(Config):
# MSC3773: Thread notifications
self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False)
# MSC3664: Pushrules to match on related events
self.msc3664_enabled: bool = experimental.get("msc3664_enabled", False)
# MSC3848: Introduce errcodes for specific event sending failures
self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False)

View file

@ -53,7 +53,7 @@ DEFAULT_LOG_CONFIG = Template(
# Synapse also supports structured logging for machine readable logs which can
# be ingested by ELK stacks. See [2] for details.
#
# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
# [1]: https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
version: 1

View file

@ -123,6 +123,8 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
"userinfo_endpoint": {"type": "string"},
"jwks_uri": {"type": "string"},
"skip_verification": {"type": "boolean"},
"backchannel_logout_enabled": {"type": "boolean"},
"backchannel_logout_ignore_sub": {"type": "boolean"},
"user_profile_method": {
"type": "string",
"enum": ["auto", "userinfo_endpoint"],
@ -292,6 +294,10 @@ def _parse_oidc_config_dict(
token_endpoint=oidc_config.get("token_endpoint"),
userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
jwks_uri=oidc_config.get("jwks_uri"),
backchannel_logout_enabled=oidc_config.get("backchannel_logout_enabled", False),
backchannel_logout_ignore_sub=oidc_config.get(
"backchannel_logout_ignore_sub", False
),
skip_verification=oidc_config.get("skip_verification", False),
user_profile_method=oidc_config.get("user_profile_method", "auto"),
allow_existing_users=oidc_config.get("allow_existing_users", False),
@ -368,6 +374,12 @@ class OidcProviderConfig:
# "openid" scope is used.
jwks_uri: Optional[str]
# Whether Synapse should react to backchannel logouts
backchannel_logout_enabled: bool
# Whether Synapse should ignore the `sub` claim in backchannel logouts or not.
backchannel_logout_ignore_sub: bool
# Whether to skip metadata verification
skip_verification: bool

View file

@ -80,6 +80,18 @@ PDU_RETRY_TIME_MS = 1 * 60 * 1000
T = TypeVar("T")
@attr.s(frozen=True, slots=True, auto_attribs=True)
class PulledPduInfo:
"""
A result object that stores the PDU and info about it like which homeserver we
pulled it from (`pull_origin`)
"""
pdu: EventBase
# Which homeserver we pulled the PDU from
pull_origin: str
class InvalidResponseError(RuntimeError):
"""Helper for _try_destination_list: indicates that the server returned a response
we couldn't parse
@ -114,7 +126,9 @@ class FederationClient(FederationBase):
self.hostname = hs.hostname
self.signing_key = hs.signing_key
self._get_pdu_cache: ExpiringCache[str, EventBase] = ExpiringCache(
# Cache mapping `event_id` to a tuple of the event itself and the `pull_origin`
# (which server we pulled the event from)
self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache(
cache_name="get_pdu_cache",
clock=self._clock,
max_len=1000,
@ -352,11 +366,11 @@ class FederationClient(FederationBase):
@tag_args
async def get_pdu(
self,
destinations: Iterable[str],
destinations: Collection[str],
event_id: str,
room_version: RoomVersion,
timeout: Optional[int] = None,
) -> Optional[EventBase]:
) -> Optional[PulledPduInfo]:
"""Requests the PDU with given origin and ID from the remote home
servers.
@ -371,11 +385,11 @@ class FederationClient(FederationBase):
moving to the next destination. None indicates no timeout.
Returns:
The requested PDU, or None if we were unable to find it.
The requested PDU wrapped in `PulledPduInfo`, or None if we were unable to find it.
"""
logger.debug(
"get_pdu: event_id=%s from destinations=%s", event_id, destinations
"get_pdu(event_id=%s): from destinations=%s", event_id, destinations
)
# TODO: Rate limit the number of times we try and get the same event.
@ -384,19 +398,25 @@ class FederationClient(FederationBase):
# it gets persisted to the database), so we cache the results of the lookup.
# Note that this is separate to the regular get_event cache which caches
# events once they have been persisted.
event = self._get_pdu_cache.get(event_id)
get_pdu_cache_entry = self._get_pdu_cache.get(event_id)
event = None
pull_origin = None
if get_pdu_cache_entry:
event, pull_origin = get_pdu_cache_entry
# If we don't see the event in the cache, go try to fetch it from the
# provided remote federated destinations
if not event:
else:
pdu_attempts = self.pdu_destination_tried.setdefault(event_id, {})
# TODO: We can probably refactor this to use `_try_destination_list`
for destination in destinations:
now = self._clock.time_msec()
last_attempt = pdu_attempts.get(destination, 0)
if last_attempt + PDU_RETRY_TIME_MS > now:
logger.debug(
"get_pdu: skipping destination=%s because we tried it recently last_attempt=%s and we only check every %s (now=%s)",
"get_pdu(event_id=%s): skipping destination=%s because we tried it recently last_attempt=%s and we only check every %s (now=%s)",
event_id,
destination,
last_attempt,
PDU_RETRY_TIME_MS,
@ -411,43 +431,48 @@ class FederationClient(FederationBase):
room_version=room_version,
timeout=timeout,
)
pull_origin = destination
pdu_attempts[destination] = now
if event:
# Prime the cache
self._get_pdu_cache[event.event_id] = event
self._get_pdu_cache[event.event_id] = (event, pull_origin)
# Now that we have an event, we can break out of this
# loop and stop asking other destinations.
break
except NotRetryingDestination as e:
logger.info("get_pdu(event_id=%s): %s", event_id, e)
continue
except FederationDeniedError:
logger.info(
"get_pdu(event_id=%s): Not attempting to fetch PDU from %s because the homeserver is not on our federation whitelist",
event_id,
destination,
)
continue
except SynapseError as e:
logger.info(
"Failed to get PDU %s from %s because %s",
"get_pdu(event_id=%s): Failed to get PDU from %s because %s",
event_id,
destination,
e,
)
continue
except NotRetryingDestination as e:
logger.info(str(e))
continue
except FederationDeniedError as e:
logger.info(str(e))
continue
except Exception as e:
pdu_attempts[destination] = now
logger.info(
"Failed to get PDU %s from %s because %s",
"get_pdu(event_id=): Failed to get PDU from %s because %s",
event_id,
destination,
e,
)
continue
if not event:
if not event or not pull_origin:
return None
# `event` now refers to an object stored in `get_pdu_cache`. Our
@ -459,7 +484,7 @@ class FederationClient(FederationBase):
event.room_version,
)
return event_copy
return PulledPduInfo(event_copy, pull_origin)
@trace
@tag_args
@ -699,12 +724,14 @@ class FederationClient(FederationBase):
pdu_origin = get_domain_from_id(pdu.sender)
if not res and pdu_origin != origin:
try:
res = await self.get_pdu(
pulled_pdu_info = await self.get_pdu(
destinations=[pdu_origin],
event_id=pdu.event_id,
room_version=room_version,
timeout=10000,
)
if pulled_pdu_info is not None:
res = pulled_pdu_info.pdu
except SynapseError:
pass
@ -806,6 +833,7 @@ class FederationClient(FederationBase):
)
for destination in destinations:
# We don't want to ask our own server for information we don't have
if destination == self.server_name:
continue
@ -814,9 +842,21 @@ class FederationClient(FederationBase):
except (
RequestSendFailed,
InvalidResponseError,
NotRetryingDestination,
) as e:
logger.warning("Failed to %s via %s: %s", description, destination, e)
# Skip to the next homeserver in the list to try.
continue
except NotRetryingDestination as e:
logger.info("%s: %s", description, e)
continue
except FederationDeniedError:
logger.info(
"%s: Not attempting to %s from %s because the homeserver is not on our federation whitelist",
description,
description,
destination,
)
continue
except UnsupportedRoomVersionError:
raise
except HttpResponseException as e:
@ -1609,6 +1649,54 @@ class FederationClient(FederationBase):
return result
async def timestamp_to_event(
self, *, destinations: List[str], room_id: str, timestamp: int, direction: str
) -> Optional["TimestampToEventResponse"]:
"""
Calls each remote federating server from `destinations` asking for their closest
event to the given timestamp in the given direction until we get a response.
Also validates the response to always return the expected keys or raises an
error.
Args:
destinations: The domains of homeservers to try fetching from
room_id: Room to fetch the event from
timestamp: The point in time (inclusive) we should navigate from in
the given direction to find the closest event.
direction: ["f"|"b"] to indicate whether we should navigate forward
or backward from the given timestamp to find the closest event.
Returns:
A parsed TimestampToEventResponse including the closest event_id
and origin_server_ts or None if no destination has a response.
"""
async def _timestamp_to_event_from_destination(
destination: str,
) -> TimestampToEventResponse:
return await self._timestamp_to_event_from_destination(
destination, room_id, timestamp, direction
)
try:
# Loop through each homeserver candidate until we get a succesful response
timestamp_to_event_response = await self._try_destination_list(
"timestamp_to_event",
destinations,
# TODO: The requested timestamp may lie in a part of the
# event graph that the remote server *also* didn't have,
# in which case they will have returned another event
# which may be nowhere near the requested timestamp. In
# the future, we may need to reconcile that gap and ask
# other homeservers, and/or extend `/timestamp_to_event`
# to return events on *both* sides of the timestamp to
# help reconcile the gap faster.
_timestamp_to_event_from_destination,
)
return timestamp_to_event_response
except SynapseError:
return None
async def _timestamp_to_event_from_destination(
self, destination: str, room_id: str, timestamp: int, direction: str
) -> "TimestampToEventResponse":
"""

View file

@ -481,6 +481,14 @@ class FederationServer(FederationBase):
pdu_results[pdu.event_id] = await process_pdu(pdu)
async def process_pdu(pdu: EventBase) -> JsonDict:
"""
Processes a pushed PDU sent to us via a `/send` transaction
Returns:
JsonDict representing a "PDU Processing Result" that will be bundled up
with the other processed PDU's in the `/send` transaction and sent back
to remote homeserver.
"""
event_id = pdu.event_id
with nested_logging_context(event_id):
try:

View file

@ -499,6 +499,11 @@ class FederationV2InviteServlet(BaseFederationServerServlet):
result = await self.handler.on_invite_request(
origin, event, room_version_id=room_version
)
# We only store invite_room_state for internal use, so remove it before
# returning the event to the remote homeserver.
result["event"].get("unsigned", {}).pop("invite_room_state", None)
return 200, result

View file

@ -100,6 +100,7 @@ class AdminHandler:
user_info_dict["avatar_url"] = profile.avatar_url
user_info_dict["threepids"] = threepids
user_info_dict["external_ids"] = external_ids
user_info_dict["erased"] = await self.store.is_user_erased(user.to_string())
return user_info_dict

View file

@ -38,6 +38,7 @@ from typing import (
import attr
import bcrypt
import unpaddedbase64
from prometheus_client import Counter
from twisted.internet.defer import CancelledError
from twisted.web.server import Request
@ -48,6 +49,7 @@ from synapse.api.errors import (
Codes,
InteractiveAuthIncompleteError,
LoginError,
NotFoundError,
StoreError,
SynapseError,
UserDeactivatedError,
@ -63,10 +65,14 @@ from synapse.http.server import finish_request, respond_with_html
from synapse.http.site import SynapseRequest
from synapse.logging.context import defer_to_thread
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.databases.main.registration import (
LoginTokenExpired,
LoginTokenLookupResult,
LoginTokenReused,
)
from synapse.types import JsonDict, Requester, UserID
from synapse.util import stringutils as stringutils
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
from synapse.util.macaroons import LoginTokenAttributes
from synapse.util.msisdn import phone_number_to_msisdn
from synapse.util.stringutils import base62_encode
from synapse.util.threepids import canonicalise_email
@ -80,6 +86,12 @@ logger = logging.getLogger(__name__)
INVALID_USERNAME_OR_PASSWORD = "Invalid username or password"
invalid_login_token_counter = Counter(
"synapse_user_login_invalid_login_tokens",
"Counts the number of rejected m.login.token on /login",
["reason"],
)
def convert_client_dict_legacy_fields_to_identifier(
submission: JsonDict,
@ -883,6 +895,25 @@ class AuthHandler:
return True
async def create_login_token_for_user_id(
self,
user_id: str,
duration_ms: int = (2 * 60 * 1000),
auth_provider_id: Optional[str] = None,
auth_provider_session_id: Optional[str] = None,
) -> str:
login_token = self.generate_login_token()
now = self._clock.time_msec()
expiry_ts = now + duration_ms
await self.store.add_login_token_to_user(
user_id=user_id,
token=login_token,
expiry_ts=expiry_ts,
auth_provider_id=auth_provider_id,
auth_provider_session_id=auth_provider_session_id,
)
return login_token
async def create_refresh_token_for_user_id(
self,
user_id: str,
@ -1401,6 +1432,18 @@ class AuthHandler:
return None
return user_id
def generate_login_token(self) -> str:
"""Generates an opaque string, for use as an short-term login token"""
# we use the following format for access tokens:
# syl_<random string>_<base62 crc check>
random_string = stringutils.random_string(20)
base = f"syl_{random_string}"
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
return f"{base}_{crc}"
def generate_access_token(self, for_user: UserID) -> str:
"""Generates an opaque string, for use as an access token"""
@ -1427,16 +1470,17 @@ class AuthHandler:
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
return f"{base}_{crc}"
async def validate_short_term_login_token(
self, login_token: str
) -> LoginTokenAttributes:
async def consume_login_token(self, login_token: str) -> LoginTokenLookupResult:
try:
res = self.macaroon_gen.verify_short_term_login_token(login_token)
except Exception:
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
return await self.store.consume_login_token(login_token)
except LoginTokenExpired:
invalid_login_token_counter.labels("expired").inc()
except LoginTokenReused:
invalid_login_token_counter.labels("reused").inc()
except NotFoundError:
invalid_login_token_counter.labels("not found").inc()
await self.auth_blocking.check_auth_blocking(res.user_id)
return res
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
async def delete_access_token(self, access_token: str) -> None:
"""Invalidate a single access token
@ -1711,7 +1755,7 @@ class AuthHandler:
)
# Create a login token
login_token = self.macaroon_gen.generate_short_term_login_token(
login_token = await self.create_login_token_for_user_id(
registered_user_id,
auth_provider_id=auth_provider_id,
auth_provider_session_id=auth_provider_session_id,

View file

@ -49,6 +49,7 @@ logger = logging.getLogger(__name__)
class E2eKeysHandler:
def __init__(self, hs: "HomeServer"):
self.config = hs.config
self.store = hs.get_datastores().main
self.federation = hs.get_federation_client()
self.device_handler = hs.get_device_handler()
@ -431,13 +432,17 @@ class E2eKeysHandler:
@trace
@cancellable
async def query_local_devices(
self, query: Mapping[str, Optional[List[str]]]
self,
query: Mapping[str, Optional[List[str]]],
include_displaynames: bool = True,
) -> Dict[str, Dict[str, dict]]:
"""Get E2E device keys for local users
Args:
query: map from user_id to a list
of devices to query (None for all devices)
include_displaynames: Whether to include device displaynames in the returned
device details.
Returns:
A map from user_id -> device_id -> device details
@ -469,7 +474,9 @@ class E2eKeysHandler:
# make sure that each queried user appears in the result dict
result_dict[user_id] = {}
results = await self.store.get_e2e_device_keys_for_cs_api(local_query)
results = await self.store.get_e2e_device_keys_for_cs_api(
local_query, include_displaynames
)
# Build the result structure
for user_id, device_keys in results.items():
@ -482,11 +489,33 @@ class E2eKeysHandler:
async def on_federation_query_client_keys(
self, query_body: Dict[str, Dict[str, Optional[List[str]]]]
) -> JsonDict:
"""Handle a device key query from a federated server"""
"""Handle a device key query from a federated server:
Handles the path: GET /_matrix/federation/v1/users/keys/query
Args:
query_body: The body of the query request. Should contain a key
"device_keys" that map to a dictionary of user ID's -> list of
device IDs. If the list of device IDs is empty, all devices of
that user will be queried.
Returns:
A json dictionary containing the following:
- device_keys: A dictionary containing the requested device information.
- master_keys: An optional dictionary of user ID -> master cross-signing
key info.
- self_signing_key: An optional dictionary of user ID -> self-signing
key info.
"""
device_keys_query: Dict[str, Optional[List[str]]] = query_body.get(
"device_keys", {}
)
res = await self.query_local_devices(device_keys_query)
res = await self.query_local_devices(
device_keys_query,
include_displaynames=(
self.config.federation.allow_device_name_lookup_over_federation
),
)
ret = {"device_keys": res}
# add in the cross-signing keys

View file

@ -442,6 +442,15 @@ class FederationHandler:
# appropriate stuff.
# TODO: We can probably do something more intelligent here.
return True
except NotRetryingDestination as e:
logger.info("_maybe_backfill_inner: %s", e)
continue
except FederationDeniedError:
logger.info(
"_maybe_backfill_inner: Not attempting to backfill from %s because the homeserver is not on our federation whitelist",
dom,
)
continue
except (SynapseError, InvalidResponseError) as e:
logger.info("Failed to backfill from %s because %s", dom, e)
continue
@ -477,15 +486,9 @@ class FederationHandler:
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except NotRetryingDestination as e:
logger.info(str(e))
continue
except RequestSendFailed as e:
logger.info("Failed to get backfill from %s because %s", dom, e)
continue
except FederationDeniedError as e:
logger.info(e)
continue
except Exception as e:
logger.exception("Failed to backfill from %s because %s", dom, e)
continue
@ -1017,7 +1020,9 @@ class FederationHandler:
context = EventContext.for_outlier(self._storage_controllers)
await self._bulk_push_rule_evaluator.action_for_event_by_user(event, context)
await self._bulk_push_rule_evaluator.action_for_events_by_user(
[(event, context)]
)
try:
await self._federation_event_handler.persist_events_and_notify(
event.room_id, [(event, context)]

View file

@ -58,7 +58,7 @@ from synapse.event_auth import (
)
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.federation.federation_client import InvalidResponseError
from synapse.federation.federation_client import InvalidResponseError, PulledPduInfo
from synapse.logging.context import nested_logging_context
from synapse.logging.opentracing import (
SynapseTags,
@ -1517,8 +1517,8 @@ class FederationEventHandler:
)
async def backfill_event_id(
self, destination: str, room_id: str, event_id: str
) -> EventBase:
self, destinations: List[str], room_id: str, event_id: str
) -> PulledPduInfo:
"""Backfill a single event and persist it as a non-outlier which means
we also pull in all of the state and auth events necessary for it.
@ -1530,24 +1530,21 @@ class FederationEventHandler:
Raises:
FederationError if we are unable to find the event from the destination
"""
logger.info(
"backfill_event_id: event_id=%s from destination=%s", event_id, destination
)
logger.info("backfill_event_id: event_id=%s", event_id)
room_version = await self._store.get_room_version(room_id)
event_from_response = await self._federation_client.get_pdu(
[destination],
pulled_pdu_info = await self._federation_client.get_pdu(
destinations,
event_id,
room_version,
)
if not event_from_response:
if not pulled_pdu_info:
raise FederationError(
"ERROR",
404,
"Unable to find event_id=%s from destination=%s to backfill."
% (event_id, destination),
f"Unable to find event_id={event_id} from remote servers to backfill.",
affected=event_id,
)
@ -1555,13 +1552,13 @@ class FederationEventHandler:
# and auth events to de-outlier it. This also sets up the necessary
# `state_groups` for the event.
await self._process_pulled_events(
destination,
[event_from_response],
pulled_pdu_info.pull_origin,
[pulled_pdu_info.pdu],
# Prevent notifications going to clients
backfilled=True,
)
return event_from_response
return pulled_pdu_info
@trace
@tag_args
@ -1584,19 +1581,19 @@ class FederationEventHandler:
async def get_event(event_id: str) -> None:
with nested_logging_context(event_id):
try:
event = await self._federation_client.get_pdu(
pulled_pdu_info = await self._federation_client.get_pdu(
[destination],
event_id,
room_version,
)
if event is None:
if pulled_pdu_info is None:
logger.warning(
"Server %s didn't return event %s",
destination,
event_id,
)
return
events.append(event)
events.append(pulled_pdu_info.pdu)
except Exception as e:
logger.warning(
@ -2171,8 +2168,8 @@ class FederationEventHandler:
min_depth,
)
else:
await self._bulk_push_rule_evaluator.action_for_event_by_user(
event, context
await self._bulk_push_rule_evaluator.action_for_events_by_user(
[(event, context)]
)
try:

View file

@ -1442,17 +1442,9 @@ class EventCreationHandler:
a room that has been un-partial stated.
"""
for event, context in events_and_context:
# Skip push notification actions for historical messages
# because we don't want to notify people about old history back in time.
# The historical messages also do not have the proper `context.current_state_ids`
# and `state_groups` because they have `prev_events` that aren't persisted yet
# (historical messages persisted in reverse-chronological order).
if not event.internal_metadata.is_historical() and not event.content.get(EventContentFields.MSC2716_HISTORICAL):
with opentracing.start_active_span("calculate_push_actions"):
await self._bulk_push_rule_evaluator.action_for_event_by_user(
event, context
)
await self._bulk_push_rule_evaluator.action_for_events_by_user(
events_and_context
)
try:
# If we're a worker we need to hit out to the master.

View file

@ -12,14 +12,28 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import binascii
import inspect
import json
import logging
from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar, Union
from typing import (
TYPE_CHECKING,
Any,
Dict,
Generic,
List,
Optional,
Type,
TypeVar,
Union,
)
from urllib.parse import urlencode, urlparse
import attr
import unpaddedbase64
from authlib.common.security import generate_token
from authlib.jose import JsonWebToken, jwt
from authlib.jose import JsonWebToken, JWTClaims
from authlib.jose.errors import InvalidClaimError, JoseError, MissingClaimError
from authlib.oauth2.auth import ClientAuth
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
from authlib.oidc.core import CodeIDToken, UserInfo
@ -35,9 +49,12 @@ from typing_extensions import TypedDict
from twisted.web.client import readBody
from twisted.web.http_headers import Headers
from synapse.api.errors import SynapseError
from synapse.config import ConfigError
from synapse.config.oidc import OidcProviderClientSecretJwtKey, OidcProviderConfig
from synapse.handlers.sso import MappingException, UserAttributes
from synapse.http.server import finish_request
from synapse.http.servlet import parse_string
from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable
from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart
@ -88,6 +105,8 @@ class Token(TypedDict):
#: there is no real point of doing this in our case.
JWK = Dict[str, str]
C = TypeVar("C")
#: A JWK Set, as per RFC7517 sec 5.
class JWKS(TypedDict):
@ -247,6 +266,80 @@ class OidcHandler:
await oidc_provider.handle_oidc_callback(request, session_data, code)
async def handle_backchannel_logout(self, request: SynapseRequest) -> None:
"""Handle an incoming request to /_synapse/client/oidc/backchannel_logout
This extracts the logout_token from the request and tries to figure out
which OpenID Provider it is comming from. This works by matching the iss claim
with the issuer and the aud claim with the client_id.
Since at this point we don't know who signed the JWT, we can't just
decode it using authlib since it will always verifies the signature. We
have to decode it manually without validating the signature. The actual JWT
verification is done in the `OidcProvider.handler_backchannel_logout` method,
once we figured out which provider sent the request.
Args:
request: the incoming request from the browser.
"""
logout_token = parse_string(request, "logout_token")
if logout_token is None:
raise SynapseError(400, "Missing logout_token in request")
# A JWT looks like this:
# header.payload.signature
# where all parts are encoded with urlsafe base64.
# The aud and iss claims we care about are in the payload part, which
# is a JSON object.
try:
# By destructuring the list after splitting, we ensure that we have
# exactly 3 segments
_, payload, _ = logout_token.split(".")
except ValueError:
raise SynapseError(400, "Invalid logout_token in request")
try:
payload_bytes = unpaddedbase64.decode_base64(payload)
claims = json_decoder.decode(payload_bytes.decode("utf-8"))
except (json.JSONDecodeError, binascii.Error, UnicodeError):
raise SynapseError(400, "Invalid logout_token payload in request")
try:
# Let's extract the iss and aud claims
iss = claims["iss"]
aud = claims["aud"]
# The aud claim can be either a string or a list of string. Here we
# normalize it as a list of strings.
if isinstance(aud, str):
aud = [aud]
# Check that we have the right types for the aud and the iss claims
if not isinstance(iss, str) or not isinstance(aud, list):
raise TypeError()
for a in aud:
if not isinstance(a, str):
raise TypeError()
# At this point we properly checked both claims types
issuer: str = iss
audience: List[str] = aud
except (TypeError, KeyError):
raise SynapseError(400, "Invalid issuer/audience in logout_token")
# Now that we know the audience and the issuer, we can figure out from
# what provider it is coming from
oidc_provider: Optional[OidcProvider] = None
for provider in self._providers.values():
if provider.issuer == issuer and provider.client_id in audience:
oidc_provider = provider
break
if oidc_provider is None:
raise SynapseError(400, "Could not find the OP that issued this event")
# Ask the provider to handle the logout request.
await oidc_provider.handle_backchannel_logout(request, logout_token)
class OidcError(Exception):
"""Used to catch errors when calling the token_endpoint"""
@ -275,6 +368,7 @@ class OidcProvider:
provider: OidcProviderConfig,
):
self._store = hs.get_datastores().main
self._clock = hs.get_clock()
self._macaroon_generaton = macaroon_generator
@ -341,6 +435,7 @@ class OidcProvider:
self.idp_brand = provider.idp_brand
self._sso_handler = hs.get_sso_handler()
self._device_handler = hs.get_device_handler()
self._sso_handler.register_identity_provider(self)
@ -399,6 +494,41 @@ class OidcProvider:
# If we're not using userinfo, we need a valid jwks to validate the ID token
m.validate_jwks_uri()
if self._config.backchannel_logout_enabled:
if not m.get("backchannel_logout_supported", False):
logger.warning(
"OIDC Back-Channel Logout is enabled for issuer %r"
"but it does not advertise support for it",
self.issuer,
)
elif not m.get("backchannel_logout_session_supported", False):
logger.warning(
"OIDC Back-Channel Logout is enabled and supported "
"by issuer %r but it might not send a session ID with "
"logout tokens, which is required for the logouts to work",
self.issuer,
)
if not self._config.backchannel_logout_ignore_sub:
# If OIDC backchannel logouts are enabled, the provider mapping provider
# should use the `sub` claim. We verify that by mapping a dumb user and
# see if we get back the sub claim
user = UserInfo({"sub": "thisisasubject"})
try:
subject = self._user_mapping_provider.get_remote_user_id(user)
if subject != user["sub"]:
raise ValueError("Unexpected subject")
except Exception:
logger.warning(
f"OIDC Back-Channel Logout is enabled for issuer {self.issuer!r} "
"but it looks like the configured `user_mapping_provider` "
"does not use the `sub` claim as subject. If it is the case, "
"and you want Synapse to ignore the `sub` claim in OIDC "
"Back-Channel Logouts, set `backchannel_logout_ignore_sub` "
"to `true` in the issuer config."
)
@property
def _uses_userinfo(self) -> bool:
"""Returns True if the ``userinfo_endpoint`` should be used.
@ -414,6 +544,16 @@ class OidcProvider:
or self._user_profile_method == "userinfo_endpoint"
)
@property
def issuer(self) -> str:
"""The issuer identifying this provider."""
return self._config.issuer
@property
def client_id(self) -> str:
"""The client_id used when interacting with this provider."""
return self._config.client_id
async def load_metadata(self, force: bool = False) -> OpenIDProviderMetadata:
"""Return the provider metadata.
@ -661,6 +801,59 @@ class OidcProvider:
return UserInfo(resp)
async def _verify_jwt(
self,
alg_values: List[str],
token: str,
claims_cls: Type[C],
claims_options: Optional[dict] = None,
claims_params: Optional[dict] = None,
) -> C:
"""Decode and validate a JWT, re-fetching the JWKS as needed.
Args:
alg_values: list of `alg` values allowed when verifying the JWT.
token: the JWT.
claims_cls: the JWTClaims class to use to validate the claims.
claims_options: dict of options passed to the `claims_cls` constructor.
claims_params: dict of params passed to the `claims_cls` constructor.
Returns:
The decoded claims in the JWT.
"""
jwt = JsonWebToken(alg_values)
logger.debug("Attempting to decode JWT (%s) %r", claims_cls.__name__, token)
# Try to decode the keys in cache first, then retry by forcing the keys
# to be reloaded
jwk_set = await self.load_jwks()
try:
claims = jwt.decode(
token,
key=jwk_set,
claims_cls=claims_cls,
claims_options=claims_options,
claims_params=claims_params,
)
except ValueError:
logger.info("Reloading JWKS after decode error")
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
claims = jwt.decode(
token,
key=jwk_set,
claims_cls=claims_cls,
claims_options=claims_options,
claims_params=claims_params,
)
logger.debug("Decoded JWT (%s) %r; validating", claims_cls.__name__, claims)
claims.validate(
now=self._clock.time(), leeway=120
) # allows 2 min of clock skew
return claims
async def _parse_id_token(self, token: Token, nonce: str) -> CodeIDToken:
"""Return an instance of UserInfo from token's ``id_token``.
@ -673,7 +866,14 @@ class OidcProvider:
Returns:
The decoded claims in the ID token.
"""
id_token = token.get("id_token")
# That has been theoritically been checked by the caller, so even though
# assertion are not enabled in production, it is mainly here to appease mypy
assert id_token is not None
metadata = await self.load_metadata()
claims_params = {
"nonce": nonce,
"client_id": self._client_auth.client_id,
@ -683,39 +883,17 @@ class OidcProvider:
# in the `id_token` that we can check against.
claims_params["access_token"] = token["access_token"]
claims_options = {"iss": {"values": [metadata["issuer"]]}}
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
jwt = JsonWebToken(alg_values)
claim_options = {"iss": {"values": [metadata["issuer"]]}}
id_token = token["id_token"]
logger.debug("Attempting to decode JWT id_token %r", id_token)
# Try to decode the keys in cache first, then retry by forcing the keys
# to be reloaded
jwk_set = await self.load_jwks()
try:
claims = jwt.decode(
id_token,
key=jwk_set,
claims_cls=CodeIDToken,
claims_options=claim_options,
claims_params=claims_params,
)
except ValueError:
logger.info("Reloading JWKS after decode error")
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
claims = jwt.decode(
id_token,
key=jwk_set,
claims_cls=CodeIDToken,
claims_options=claim_options,
claims_params=claims_params,
)
logger.debug("Decoded id_token JWT %r; validating", claims)
claims.validate(leeway=120) # allows 2 min of clock skew
claims = await self._verify_jwt(
alg_values=alg_values,
token=id_token,
claims_cls=CodeIDToken,
claims_options=claims_options,
claims_params=claims_params,
)
return claims
@ -1036,6 +1214,146 @@ class OidcProvider:
# to be strings.
return str(remote_user_id)
async def handle_backchannel_logout(
self, request: SynapseRequest, logout_token: str
) -> None:
"""Handle an incoming request to /_synapse/client/oidc/backchannel_logout
The OIDC Provider posts a logout token to this endpoint when a user
session ends. That token is a JWT signed with the same keys as
ID tokens. The OpenID Connect Back-Channel Logout draft explains how to
validate the JWT and figure out what session to end.
Args:
request: The request to respond to
logout_token: The logout token (a JWT) extracted from the request body
"""
# Back-Channel Logout can be disabled in the config, hence this check.
# This is not that important for now since Synapse is registered
# manually to the OP, so not specifying the backchannel-logout URI is
# as effective than disabling it here. It might make more sense if we
# support dynamic registration in Synapse at some point.
if not self._config.backchannel_logout_enabled:
logger.warning(
f"Received an OIDC Back-Channel Logout request from issuer {self.issuer!r} but it is disabled in config"
)
# TODO: this responds with a 400 status code, which is what the OIDC
# Back-Channel Logout spec expects, but spec also suggests answering with
# a JSON object, with the `error` and `error_description` fields set, which
# we are not doing here.
# See https://openid.net/specs/openid-connect-backchannel-1_0.html#BCResponse
raise SynapseError(
400, "OpenID Connect Back-Channel Logout is disabled for this provider"
)
metadata = await self.load_metadata()
# As per OIDC Back-Channel Logout 1.0 sec. 2.4:
# A Logout Token MUST be signed and MAY also be encrypted. The same
# keys are used to sign and encrypt Logout Tokens as are used for ID
# Tokens. If the Logout Token is encrypted, it SHOULD replicate the
# iss (issuer) claim in the JWT Header Parameters, as specified in
# Section 5.3 of [JWT].
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
# As per sec. 2.6:
# 3. Validate the iss, aud, and iat Claims in the same way they are
# validated in ID Tokens.
# Which means the audience should contain Synapse's client_id and the
# issuer should be the IdP issuer
claims_options = {
"iss": {"values": [metadata["issuer"]]},
"aud": {"values": [self.client_id]},
}
try:
claims = await self._verify_jwt(
alg_values=alg_values,
token=logout_token,
claims_cls=LogoutToken,
claims_options=claims_options,
)
except JoseError:
logger.exception("Invalid logout_token")
raise SynapseError(400, "Invalid logout_token")
# As per sec. 2.6:
# 4. Verify that the Logout Token contains a sub Claim, a sid Claim,
# or both.
# 5. Verify that the Logout Token contains an events Claim whose
# value is JSON object containing the member name
# http://schemas.openid.net/event/backchannel-logout.
# 6. Verify that the Logout Token does not contain a nonce Claim.
# This is all verified by the LogoutToken claims class, so at this
# point the `sid` claim exists and is a string.
sid: str = claims.get("sid")
# If the `sub` claim was included in the logout token, we check that it matches
# that it matches the right user. We can have cases where the `sub` claim is not
# the ID saved in database, so we let admins disable this check in config.
sub: Optional[str] = claims.get("sub")
expected_user_id: Optional[str] = None
if sub is not None and not self._config.backchannel_logout_ignore_sub:
expected_user_id = await self._store.get_user_by_external_id(
self.idp_id, sub
)
# Invalidate any running user-mapping sessions, in-flight login tokens and
# active devices
await self._sso_handler.revoke_sessions_for_provider_session_id(
auth_provider_id=self.idp_id,
auth_provider_session_id=sid,
expected_user_id=expected_user_id,
)
request.setResponseCode(200)
request.setHeader(b"Cache-Control", b"no-cache, no-store")
request.setHeader(b"Pragma", b"no-cache")
finish_request(request)
class LogoutToken(JWTClaims):
"""
Holds and verify claims of a logout token, as per
https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken
"""
REGISTERED_CLAIMS = ["iss", "sub", "aud", "iat", "jti", "events", "sid"]
def validate(self, now: Optional[int] = None, leeway: int = 0) -> None:
"""Validate everything in claims payload."""
super().validate(now, leeway)
self.validate_sid()
self.validate_events()
self.validate_nonce()
def validate_sid(self) -> None:
"""Ensure the sid claim is present"""
sid = self.get("sid")
if not sid:
raise MissingClaimError("sid")
if not isinstance(sid, str):
raise InvalidClaimError("sid")
def validate_nonce(self) -> None:
"""Ensure the nonce claim is absent"""
if "nonce" in self:
raise InvalidClaimError("nonce")
def validate_events(self) -> None:
"""Ensure the events claim is present and with the right value"""
events = self.get("events")
if not events:
raise MissingClaimError("events")
if not isinstance(events, dict):
raise InvalidClaimError("events")
if "http://schemas.openid.net/event/backchannel-logout" not in events:
raise InvalidClaimError("events")
# number of seconds a newly-generated client secret should be valid for
CLIENT_SECRET_VALIDITY_SECONDS = 3600
@ -1105,6 +1423,7 @@ class JwtClientSecret:
logger.info(
"Generating new JWT for %s: %s %s", self._oauth_issuer, header, payload
)
jwt = JsonWebToken(header["alg"])
self._cached_secret = jwt.encode(header, payload, self._key.key)
self._cached_secret_replacement_time = (
expires_at - CLIENT_SECRET_MIN_VALIDITY_SECONDS
@ -1119,9 +1438,6 @@ class UserAttributeDict(TypedDict):
emails: List[str]
C = TypeVar("C")
class OidcMappingProvider(Generic[C]):
"""A mapping provider maps a UserInfo object to user attributes.

View file

@ -307,7 +307,11 @@ class ProfileHandler:
if not self.max_avatar_size and not self.allowed_avatar_mimetypes:
return True
server_name, _, media_id = parse_and_validate_mxc_uri(mxc)
host, port, media_id = parse_and_validate_mxc_uri(mxc)
if port is not None:
server_name = host + ":" + str(port)
else:
server_name = host
if server_name == self.server_name:
media_info = await self.store.get_local_media(media_id)

View file

@ -49,7 +49,6 @@ from synapse.api.constants import (
from synapse.api.errors import (
AuthError,
Codes,
HttpResponseException,
LimitExceededError,
NotFoundError,
StoreError,
@ -60,7 +59,6 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase
from synapse.events.utils import copy_and_fixup_power_levels_contents
from synapse.federation.federation_client import InvalidResponseError
from synapse.handlers.relations import BundledAggregations
from synapse.module_api import NOT_SPAM
from synapse.rest.admin._base import assert_user_is_admin
@ -1070,9 +1068,6 @@ class RoomCreationHandler:
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
depth = 1
# the last event sent/persisted to the db
last_sent_event_id: Optional[str] = None
# the most recently created event
prev_event: List[str] = []
# a map of event types, state keys -> event_ids. We collect these mappings this as events are
@ -1117,26 +1112,6 @@ class RoomCreationHandler:
return new_event, new_context
async def send(
event: EventBase,
context: synapse.events.snapshot.EventContext,
creator: Requester,
) -> int:
nonlocal last_sent_event_id
ev = await self.event_creation_handler.handle_new_client_event(
requester=creator,
events_and_context=[(event, context)],
ratelimit=False,
ignore_shadow_ban=True,
)
last_sent_event_id = ev.event_id
# we know it was persisted, so must have a stream ordering
assert ev.internal_metadata.stream_ordering
return ev.internal_metadata.stream_ordering
try:
config = self._presets_dict[preset_config]
except KeyError:
@ -1150,10 +1125,14 @@ class RoomCreationHandler:
)
logger.debug("Sending %s in new room", EventTypes.Member)
await send(creation_event, creation_context, creator)
ev = await self.event_creation_handler.handle_new_client_event(
requester=creator,
events_and_context=[(creation_event, creation_context)],
ratelimit=False,
ignore_shadow_ban=True,
)
last_sent_event_id = ev.event_id
# Room create event must exist at this point
assert last_sent_event_id is not None
member_event_id, _ = await self.room_member_handler.update_membership(
creator,
creator.user,
@ -1172,6 +1151,7 @@ class RoomCreationHandler:
depth += 1
state_map[(EventTypes.Member, creator.user.to_string())] = member_event_id
events_to_send = []
# We treat the power levels override specially as this needs to be one
# of the first events that get sent into a room.
pl_content = initial_state.pop((EventTypes.PowerLevels, ""), None)
@ -1180,7 +1160,7 @@ class RoomCreationHandler:
EventTypes.PowerLevels, pl_content, False
)
current_state_group = power_context._state_group
await send(power_event, power_context, creator)
events_to_send.append((power_event, power_context))
else:
power_level_content: JsonDict = {
"users": {creator_id: 9001},
@ -1229,9 +1209,8 @@ class RoomCreationHandler:
False,
)
current_state_group = pl_context._state_group
await send(pl_event, pl_context, creator)
events_to_send.append((pl_event, pl_context))
events_to_send = []
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
room_alias_event, room_alias_context = await create_event(
EventTypes.CanonicalAlias, {"alias": room_alias.to_string()}, True
@ -1509,7 +1488,12 @@ class TimestampLookupHandler:
Raises:
SynapseError if unable to find any event locally in the given direction
"""
logger.debug(
"get_event_for_timestamp(room_id=%s, timestamp=%s, direction=%s) Finding closest event...",
room_id,
timestamp,
direction,
)
local_event_id = await self.store.get_event_id_for_timestamp(
room_id, timestamp, direction
)
@ -1561,85 +1545,54 @@ class TimestampLookupHandler:
)
)
# Loop through each homeserver candidate until we get a succesful response
for domain in likely_domains:
# We don't want to ask our own server for information we don't have
if domain == self.server_name:
continue
remote_response = await self.federation_client.timestamp_to_event(
destinations=likely_domains,
room_id=room_id,
timestamp=timestamp,
direction=direction,
)
if remote_response is not None:
logger.debug(
"get_event_for_timestamp: remote_response=%s",
remote_response,
)
try:
remote_response = await self.federation_client.timestamp_to_event(
domain, room_id, timestamp, direction
)
logger.debug(
"get_event_for_timestamp: response from domain(%s)=%s",
domain,
remote_response,
remote_event_id = remote_response.event_id
remote_origin_server_ts = remote_response.origin_server_ts
# Backfill this event so we can get a pagination token for
# it with `/context` and paginate `/messages` from this
# point.
pulled_pdu_info = await self.federation_event_handler.backfill_event_id(
likely_domains, room_id, remote_event_id
)
remote_event = pulled_pdu_info.pdu
# XXX: When we see that the remote server is not trustworthy,
# maybe we should not ask them first in the future.
if remote_origin_server_ts != remote_event.origin_server_ts:
logger.info(
"get_event_for_timestamp: Remote server (%s) claimed that remote_event_id=%s occured at remote_origin_server_ts=%s but that isn't true (actually occured at %s). Their claims are dubious and we should consider not trusting them.",
pulled_pdu_info.pull_origin,
remote_event_id,
remote_origin_server_ts,
remote_event.origin_server_ts,
)
remote_event_id = remote_response.event_id
remote_origin_server_ts = remote_response.origin_server_ts
# Backfill this event so we can get a pagination token for
# it with `/context` and paginate `/messages` from this
# point.
#
# TODO: The requested timestamp may lie in a part of the
# event graph that the remote server *also* didn't have,
# in which case they will have returned another event
# which may be nowhere near the requested timestamp. In
# the future, we may need to reconcile that gap and ask
# other homeservers, and/or extend `/timestamp_to_event`
# to return events on *both* sides of the timestamp to
# help reconcile the gap faster.
remote_event = (
await self.federation_event_handler.backfill_event_id(
domain, room_id, remote_event_id
)
)
# XXX: When we see that the remote server is not trustworthy,
# maybe we should not ask them first in the future.
if remote_origin_server_ts != remote_event.origin_server_ts:
logger.info(
"get_event_for_timestamp: Remote server (%s) claimed that remote_event_id=%s occured at remote_origin_server_ts=%s but that isn't true (actually occured at %s). Their claims are dubious and we should consider not trusting them.",
domain,
remote_event_id,
remote_origin_server_ts,
remote_event.origin_server_ts,
)
# Only return the remote event if it's closer than the local event
if not local_event or (
abs(remote_event.origin_server_ts - timestamp)
< abs(local_event.origin_server_ts - timestamp)
):
logger.info(
"get_event_for_timestamp: returning remote_event_id=%s (%s) since it's closer to timestamp=%s than local_event=%s (%s)",
remote_event_id,
remote_event.origin_server_ts,
timestamp,
local_event.event_id if local_event else None,
local_event.origin_server_ts if local_event else None,
)
return remote_event_id, remote_origin_server_ts
except (HttpResponseException, InvalidResponseError) as ex:
# Let's not put a high priority on some other homeserver
# failing to respond or giving a random response
logger.debug(
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
domain,
type(ex).__name__,
ex,
ex.args,
)
except Exception:
# But we do want to see some exceptions in our code
logger.warning(
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception",
domain,
exc_info=True,
# Only return the remote event if it's closer than the local event
if not local_event or (
abs(remote_event.origin_server_ts - timestamp)
< abs(local_event.origin_server_ts - timestamp)
):
logger.info(
"get_event_for_timestamp: returning remote_event_id=%s (%s) since it's closer to timestamp=%s than local_event=%s (%s)",
remote_event_id,
remote_event.origin_server_ts,
timestamp,
local_event.event_id if local_event else None,
local_event.origin_server_ts if local_event else None,
)
return remote_event_id, remote_origin_server_ts
# To appease mypy, we have to add both of these conditions to check for
# `None`. We only expect `local_event` to be `None` when

View file

@ -191,6 +191,7 @@ class SsoHandler:
self._server_name = hs.hostname
self._registration_handler = hs.get_registration_handler()
self._auth_handler = hs.get_auth_handler()
self._device_handler = hs.get_device_handler()
self._error_template = hs.config.sso.sso_error_template
self._bad_user_template = hs.config.sso.sso_auth_bad_user_template
self._profile_handler = hs.get_profile_handler()
@ -1026,6 +1027,76 @@ class SsoHandler:
return True
async def revoke_sessions_for_provider_session_id(
self,
auth_provider_id: str,
auth_provider_session_id: str,
expected_user_id: Optional[str] = None,
) -> None:
"""Revoke any devices and in-flight logins tied to a provider session.
Args:
auth_provider_id: A unique identifier for this SSO provider, e.g.
"oidc" or "saml".
auth_provider_session_id: The session ID from the provider to logout
expected_user_id: The user we're expecting to logout. If set, it will ignore
sessions belonging to other users and log an error.
"""
# Invalidate any running user-mapping sessions
to_delete = []
for session_id, session in self._username_mapping_sessions.items():
if (
session.auth_provider_id == auth_provider_id
and session.auth_provider_session_id == auth_provider_session_id
):
to_delete.append(session_id)
for session_id in to_delete:
logger.info("Revoking mapping session %s", session_id)
del self._username_mapping_sessions[session_id]
# Invalidate any in-flight login tokens
await self._store.invalidate_login_tokens_by_session_id(
auth_provider_id=auth_provider_id,
auth_provider_session_id=auth_provider_session_id,
)
# Fetch any device(s) in the store associated with the session ID.
devices = await self._store.get_devices_by_auth_provider_session_id(
auth_provider_id=auth_provider_id,
auth_provider_session_id=auth_provider_session_id,
)
# We have no guarantee that all the devices of that session are for the same
# `user_id`. Hence, we have to iterate over the list of devices and log them out
# one by one.
for device in devices:
user_id = device["user_id"]
device_id = device["device_id"]
# If the user_id associated with that device/session is not the one we got
# out of the `sub` claim, skip that device and show log an error.
if expected_user_id is not None and user_id != expected_user_id:
logger.error(
"Received a logout notification from SSO provider "
f"{auth_provider_id!r} for the user {expected_user_id!r}, but with "
f"a session ID ({auth_provider_session_id!r}) which belongs to "
f"{user_id!r}. This may happen when the SSO provider user mapper "
"uses something else than the standard attribute as mapping ID. "
"For OIDC providers, set `backchannel_logout_ignore_sub` to `true` "
"in the provider config if that is the case."
)
continue
logger.info(
"Logging out %r (device %r) via SSO (%r) logout notification (session %r).",
user_id,
device_id,
auth_provider_id,
auth_provider_session_id,
)
await self._device_handler.delete_devices(user_id, [device_id])
def get_username_mapping_session_cookie_from_request(request: IRequest) -> str:
"""Extract the session ID from the cookie

View file

@ -25,7 +25,6 @@ from typing import (
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
@ -90,14 +89,29 @@ incoming_responses_counter = Counter(
"synapse_http_client_responses", "", ["method", "code"]
)
# the type of the headers list, to be passed to the t.w.h.Headers.
# Actually we can mix str and bytes keys, but Mapping treats 'key' as invariant so
# we simplify.
# the type of the headers map, to be passed to the t.w.h.Headers.
#
# The actual type accepted by Twisted is
# Mapping[Union[str, bytes], Sequence[Union[str, bytes]] ,
# allowing us to mix and match str and bytes freely. However: any str is also a
# Sequence[str]; passing a header string value which is a
# standalone str is interpreted as a sequence of 1-codepoint strings. This is a disastrous footgun.
# We use a narrower value type (RawHeaderValue) to avoid this footgun.
#
# We also simplify the keys to be either all str or all bytes. This helps because
# Dict[K, V] is invariant in K (and indeed V).
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
# the value actually has to be a List, but List is invariant so we can't specify that
# the entries can either be Lists or bytes.
RawHeaderValue = Sequence[Union[str, bytes]]
RawHeaderValue = Union[
List[str],
List[bytes],
List[Union[str, bytes]],
Tuple[str, ...],
Tuple[bytes, ...],
Tuple[Union[str, bytes], ...],
]
def check_against_blacklist(

View file

@ -174,8 +174,10 @@ class _BackgroundProcess:
diff = new_stats - self._reported_stats
self._reported_stats = new_stats
_background_process_ru_utime.labels(self.desc).inc(diff.ru_utime)
_background_process_ru_stime.labels(self.desc).inc(diff.ru_stime)
# For unknown reasons, the difference in times can be negative. See comment in
# synapse.http.request_metrics.RequestMetrics.update_metrics.
_background_process_ru_utime.labels(self.desc).inc(max(diff.ru_utime, 0))
_background_process_ru_stime.labels(self.desc).inc(max(diff.ru_stime, 0))
_background_process_db_txn_count.labels(self.desc).inc(diff.db_txn_count)
_background_process_db_txn_duration.labels(self.desc).inc(
diff.db_txn_duration_sec

View file

@ -771,50 +771,11 @@ class ModuleApi:
auth_provider_session_id: The session ID got during login from the SSO IdP,
if any.
"""
# The deprecated `generate_short_term_login_token` method defaulted to an empty
# string for the `auth_provider_id` because of how the underlying macaroon was
# generated. This will change to a proper NULL-able field when the tokens get
# moved to the database.
return self._hs.get_macaroon_generator().generate_short_term_login_token(
return await self._hs.get_auth_handler().create_login_token_for_user_id(
user_id,
auth_provider_id or "",
auth_provider_session_id,
duration_in_ms,
)
def generate_short_term_login_token(
self,
user_id: str,
duration_in_ms: int = (2 * 60 * 1000),
auth_provider_id: str = "",
auth_provider_session_id: Optional[str] = None,
) -> str:
"""Generate a login token suitable for m.login.token authentication
Added in Synapse v1.9.0.
This was deprecated in Synapse v1.69.0 in favor of create_login_token, and will
be removed in Synapse 1.71.0.
Args:
user_id: gives the ID of the user that the token is for
duration_in_ms: the time that the token will be valid for
auth_provider_id: the ID of the SSO IdP that the user used to authenticate
to get this token, if any. This is encoded in the token so that
/login can report stats on number of successful logins by IdP.
"""
logger.warn(
"A module configured on this server uses ModuleApi.generate_short_term_login_token(), "
"which is deprecated in favor of ModuleApi.create_login_token(), and will be removed in "
"Synapse 1.71.0",
)
return self._hs.get_macaroon_generator().generate_short_term_login_token(
user_id,
auth_provider_id,
auth_provider_session_id,
duration_in_ms,
)
@defer.inlineCallbacks

View file

@ -28,7 +28,7 @@ from typing import (
from prometheus_client import Counter
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes, EventContentFields
from synapse.event_auth import auth_types_for_event, get_user_power_level
from synapse.events import EventBase, relation_from_event
from synapse.events.snapshot import EventContext
@ -45,7 +45,6 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
push_rules_invalidation_counter = Counter(
"synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter", ""
)
@ -107,6 +106,8 @@ class BulkPushRuleEvaluator:
self.clock = hs.get_clock()
self._event_auth_handler = hs.get_event_auth_handler()
self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled
self.room_push_rule_cache_metrics = register_cache(
"cache",
"room_push_rule_cache",
@ -165,8 +166,21 @@ class BulkPushRuleEvaluator:
return rules_by_user
async def _get_power_levels_and_sender_level(
self, event: EventBase, context: EventContext
self,
event: EventBase,
context: EventContext,
event_id_to_event: Mapping[str, EventBase],
) -> Tuple[dict, Optional[int]]:
"""
Given an event and an event context, get the power level event relevant to the event
and the power level of the sender of the event.
Args:
event: event to check
context: context of event to check
event_id_to_event: a mapping of event_id to event for a set of events being
batch persisted. This is needed as the sought-after power level event may
be in this batch rather than the DB
"""
# There are no power levels and sender levels possible to get from outlier
if event.internal_metadata.is_outlier():
return {}, None
@ -177,15 +191,26 @@ class BulkPushRuleEvaluator:
)
pl_event_id = prev_state_ids.get(POWER_KEY)
# fastpath: if there's a power level event, that's all we need, and
# not having a power level event is an extreme edge case
if pl_event_id:
# fastpath: if there's a power level event, that's all we need, and
# not having a power level event is an extreme edge case
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
# Get the power level event from the batch, or fall back to the database.
pl_event = event_id_to_event.get(pl_event_id)
if pl_event:
auth_events = {POWER_KEY: pl_event}
else:
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
else:
auth_events_ids = self._event_auth_handler.compute_auth_events(
event, prev_state_ids, for_verification=False
)
auth_events_dict = await self.store.get_events(auth_events_ids)
# Some needed auth events might be in the batch, combine them with those
# fetched from the database.
for auth_event_id in auth_events_ids:
auth_event = event_id_to_event.get(auth_event_id)
if auth_event:
auth_events_dict[auth_event_id] = auth_event
auth_events = {(e.type, e.state_key): e for e in auth_events_dict.values()}
sender_level = get_user_power_level(event.sender, auth_events)
@ -194,16 +219,81 @@ class BulkPushRuleEvaluator:
return pl_event.content if pl_event else {}, sender_level
@measure_func("action_for_event_by_user")
async def action_for_event_by_user(
self, event: EventBase, context: EventContext
) -> None:
"""Given an event and context, evaluate the push rules, check if the message
should increment the unread count, and insert the results into the
event_push_actions_staging table.
async def _related_events(self, event: EventBase) -> Dict[str, Dict[str, str]]:
"""Fetches the related events for 'event'. Sets the im.vector.is_falling_back key if the event is from a fallback relation
Returns:
Mapping of relation type to flattened events.
"""
if not event.internal_metadata.is_notifiable():
# Push rules for events that aren't notifiable can't be processed by this
related_events: Dict[str, Dict[str, str]] = {}
if self._related_event_match_enabled:
related_event_id = event.content.get("m.relates_to", {}).get("event_id")
relation_type = event.content.get("m.relates_to", {}).get("rel_type")
if related_event_id is not None and relation_type is not None:
related_event = await self.store.get_event(
related_event_id, allow_none=True
)
if related_event is not None:
related_events[relation_type] = _flatten_dict(related_event)
reply_event_id = (
event.content.get("m.relates_to", {})
.get("m.in_reply_to", {})
.get("event_id")
)
# convert replies to pseudo relations
if reply_event_id is not None:
related_event = await self.store.get_event(
reply_event_id, allow_none=True
)
if related_event is not None:
related_events["m.in_reply_to"] = _flatten_dict(related_event)
# indicate that this is from a fallback relation.
if relation_type == "m.thread" and event.content.get(
"m.relates_to", {}
).get("is_falling_back", False):
related_events["m.in_reply_to"][
"im.vector.is_falling_back"
] = ""
return related_events
async def action_for_events_by_user(
self, events_and_context: List[Tuple[EventBase, EventContext]]
) -> None:
"""Given a list of events and their associated contexts, evaluate the push rules
for each event, check if the message should increment the unread count, and
insert the results into the event_push_actions_staging table.
"""
# For batched events the power level events may not have been persisted yet,
# so we pass in the batched events. Thus if the event cannot be found in the
# database we can check in the batch.
event_id_to_event = {e.event_id: e for e, _ in events_and_context}
for event, context in events_and_context:
await self._action_for_event_by_user(event, context, event_id_to_event)
@measure_func("action_for_event_by_user")
async def _action_for_event_by_user(
self,
event: EventBase,
context: EventContext,
event_id_to_event: Mapping[str, EventBase],
) -> None:
if (
not event.internal_metadata.is_notifiable()
or event.internal_metadata.is_historical()
or event.content.get(EventContentFields.MSC2716_HISTORICAL)
):
# Push rules for events that aren't notifiable can't be processed by this and
# we want to skip push notification actions for historical messages
# because we don't want to notify people about old history back in time.
# The historical messages also do not have the proper `context.current_state_ids`
# and `state_groups` because they have `prev_events` that aren't persisted yet
# (historical messages persisted in reverse-chronological order).
return
# Disable counting as unread unless the experimental configuration is
@ -223,7 +313,9 @@ class BulkPushRuleEvaluator:
(
power_levels,
sender_power_level,
) = await self._get_power_levels_and_sender_level(event, context)
) = await self._get_power_levels_and_sender_level(
event, context, event_id_to_event
)
# Find the event's thread ID.
relation = relation_from_event(event)
@ -238,6 +330,8 @@ class BulkPushRuleEvaluator:
# the parent is part of a thread.
thread_id = await self.store.get_thread_id(relation.parent_id)
related_events = await self._related_events(event)
# It's possible that old room versions have non-integer power levels (floats or
# strings). Workaround this by explicitly converting to int.
notification_levels = power_levels.get("notifications", {})
@ -250,6 +344,8 @@ class BulkPushRuleEvaluator:
room_member_count,
sender_power_level,
notification_levels,
related_events,
self._related_event_match_enabled,
)
users = rules_by_user.keys()

View file

@ -0,0 +1,29 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{% block title %}{% endblock %}</title>
<style type="text/css">
{%- include 'style.css' without context %}
</style>
{% block header %}{% endblock %}
</head>
<body>
<header class="mx_Header">
{% if app_name == "Riot" %}
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
{% elif app_name == "Vector" %}
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
{% elif app_name == "Element" %}
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
{% else %}
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
{% endif %}
</header>
{% block body %}{% endblock %}
</body>
</html>

View file

@ -1,12 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</title>
</head>
<body>
Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.
</body>
</html>
{% extends "_base.html" %}
{% block title %}Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.{% endblock %}
{% block body %}
<p>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</p>
{% endblock %}

View file

@ -1,12 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</title>
</head>
<body>
Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.
</body>
</html>
{% extends "_base.html" %}
{% block title %}Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.{% endblock %}
{% block body %}
<p>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</p>
{% endblock %}

View file

@ -1,14 +1,8 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Request to add an email address to your Matrix account</title>
</head>
<body>
<p>A request to add an email address to your Matrix account has been received. If this was you, please click the link below to confirm adding this email:</p>
<a href="{{ link }}">{{ link }}</a>
<p>If this was not you, you can safely ignore this email. Thank you.</p>
</body>
</html>
{% extends "_base.html" %}
{% block title %}Request to add an email address to your Matrix account{% endblock %}
{% block body %}
<p>A request to add an email address to your Matrix account has been received. If this was you, please click the link below to confirm adding this email:</p>
<a href="{{ link }}">{{ link }}</a>
<p>If this was not you, you can safely ignore this email. Thank you.</p>
{% endblock %}

View file

@ -1,13 +1,7 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Request failed</title>
</head>
<body>
<p>The request failed for the following reason: {{ failure_reason }}.</p>
<p>No changes have been made to your account.</p>
</body>
</html>
{% extends "_base.html" %}
{% block title %}Request failed{% endblock %}
{% block body %}
<p>The request failed for the following reason: {{ failure_reason }}.</p>
<p>No changes have been made to your account.</p>
{% endblock %}

View file

@ -1,12 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Your email has now been validated</title>
</head>
<body>
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
</body>
</html>
{% extends "_base.html" %}
{% block title %}Your email has now been validated{% endblock %}
{% block body %}
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
{% endblock %}

View file

@ -1,21 +1,21 @@
<html>
<head>
<title>Success!</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% extends "_base.html" %}
{% block title %}Success!{% endblock %}
{% block header %}
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
<script>
if (window.onAuthDone) {
window.onAuthDone();
} else if (window.opener && window.opener.postMessage) {
window.opener.postMessage("authDone", "*");
window.opener.postMessage("authDone", "*");
}
</script>
</head>
<body>
<div>
<p>Thank you</p>
<p>You may now close this window and return to the application</p>
</div>
</body>
</html>
{% endblock %}
{% block body %}
<div>
<p>Thank you</p>
<p>You may now close this window and return to the application</p>
</div>
{% endblock %}

View file

@ -1,12 +1,5 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Invalid renewal token.</title>
</head>
<body>
Invalid renewal token.
</body>
</html>
{% block title %}Invalid renewal token.{% endblock %}
{% block body %}
<p>Invalid renewal token.</p>
{% endblock %}

View file

@ -1,47 +1,46 @@
<!doctype html>
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include 'mail.css' without context %}
{% include "mail-%s.css" % app_name ignore missing without context %}
{% include 'mail-expiry.css' without context %}
</style>
</head>
<body>
<table id="page">
<tr>
<td> </td>
<td id="inner">
<table class="header">
<tr>
<td>
<div class="salutation">Hi {{ display_name }},</div>
</td>
<td class="logo">
{% if app_name == "Riot" %}
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
{% elif app_name == "Vector" %}
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
{% elif app_name == "Element" %}
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
{% else %}
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
{% endif %}
</td>
</tr>
<tr>
<td colspan="2">
<div class="noticetext">Your account will expire on {{ expiration_ts|format_ts("%d-%m-%Y") }}. This means that you will lose access to your account after this date.</div>
<div class="noticetext">To extend the validity of your account, please click on the link below (or copy and paste it into a new browser tab):</div>
<div class="noticetext"><a href="{{ url }}">{{ url }}</a></div>
</td>
</tr>
</table>
</td>
<td> </td>
</tr>
</table>
</body>
</html>
{% extends "_base.html" %}
{% block title %}Notice of expiry{% endblock %}
{% block header %}
<style type="text/css">
{% include 'mail.css' without context %}
{% include "mail-%s.css" % app_name ignore missing without context %}
{% include 'mail-expiry.css' without context %}
</style>
{% endblock %}
{% block body %}
<table id="page">
<tr>
<td> </td>
<td id="inner">
<table class="header">
<tr>
<td>
<div class="salutation">Hi {{ display_name }},</div>
</td>
<td class="logo">
{% if app_name == "Riot" %}
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
{% elif app_name == "Vector" %}
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
{% elif app_name == "Element" %}
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
{% else %}
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
{% endif %}
</td>
</tr>
<tr>
<td colspan="2">
<div class="noticetext">Your account will expire on {{ expiration_ts|format_ts("%d-%m-%Y") }}. This means that you will lose access to your account after this date.</div>
<div class="noticetext">To extend the validity of your account, please click on the link below (or copy and paste it into a new browser tab):</div>
<div class="noticetext"><a href="{{ url }}">{{ url }}</a></div>
</td>
</tr>
</table>
</td>
<td> </td>
</tr>
</table>
{% endblock %}

View file

@ -1,59 +1,57 @@
<!doctype html>
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{%- include 'mail.css' without context %}
{%- include "mail-%s.css" % app_name ignore missing without context %}
</style>
</head>
<body>
<table id="page">
<tr>
<td> </td>
<td id="inner">
<table class="header">
<tr>
<td>
<div class="salutation">Hi {{ user_display_name }},</div>
<div class="summarytext">{{ summary_text }}</div>
</td>
<td class="logo">
{%- if app_name == "Riot" %}
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
{%- elif app_name == "Vector" %}
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
{%- elif app_name == "Element" %}
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
{%- else %}
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
{%- endif %}
</td>
</tr>
</table>
{%- for room in rooms %}
{%- include 'room.html' with context %}
{%- endfor %}
<div class="footer">
<a href="{{ unsubscribe_link }}">Unsubscribe</a>
<br/>
<br/>
<div class="debug">
Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because
an event was received at {{ reason.received_at|format_ts("%c") }}
which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago,
{%- if reason.last_sent_ts %}
and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }},
which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago.
{%- else %}
and we don't have a last time we sent a mail for this room.
{%- endif %}
</div>
</div>
</td>
<td> </td>
</tr>
</table>
</body>
</html>
{% block title %}New activity in room{% endblock %}
{% block header %}
<style type="text/css">
{%- include 'mail.css' without context %}
{%- include "mail-%s.css" % app_name ignore missing without context %}
</style>
{% endblock %}
{% block body %}
<table id="page">
<tr>
<td> </td>
<td id="inner">
<table class="header">
<tr>
<td>
<div class="salutation">Hi {{ user_display_name }},</div>
<div class="summarytext">{{ summary_text }}</div>
</td>
<td class="logo">
{%- if app_name == "Riot" %}
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
{%- elif app_name == "Vector" %}
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
{%- elif app_name == "Element" %}
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
{%- else %}
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
{%- endif %}
</td>
</tr>
</table>
{%- for room in rooms %}
{%- include 'room.html' with context %}
{%- endfor %}
<div class="footer">
<a href="{{ unsubscribe_link }}">Unsubscribe</a>
<br/>
<br/>
<div class="debug">
Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because
an event was received at {{ reason.received_at|format_ts("%c") }}
which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago,
{%- if reason.last_sent_ts %}
and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }},
which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago.
{%- else %}
and we don't have a last time we sent a mail for this room.
{%- endif %}
</div>
</div>
</td>
<td> </td>
</tr>
</table>
{% endblock %}

View file

@ -1,14 +1,9 @@
<html lang="en">
<head>
<title>Password reset</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
<p>A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:</p>
{% block title %}Password reset{% endblock %}
<a href="{{ link }}">{{ link }}</a>
{% block body %}
<p>A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:</p>
<p>If this was not you, <strong>do not</strong> click the link above and instead contact your server administrator. Thank you.</p>
</body>
</html>
<a href="{{ link }}">{{ link }}</a>
<p>If this was not you, <strong>do not</strong> click the link above and instead contact your server administrator. Thank you.</p>
{% endblock %}

View file

@ -1,10 +1,6 @@
<html lang="en">
<head>
<title>Password reset confirmation</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
{% block title %}Password reset confirmation{% endblock %}
{% block body %}
<!--Use a hidden form to resubmit the information necessary to reset the password-->
<form method="post">
<input type="hidden" name="sid" value="{{ sid }}">
@ -15,6 +11,4 @@
If you did not mean to do this, please close this page and your password will not be changed.</p>
<p><button type="submit">Confirm changing my password</button></p>
</form>
</body>
</html>
{% endblock %}

View file

@ -1,12 +1,6 @@
<html lang="en">
<head>
<title>Password reset failure</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
<p>The request failed for the following reason: {{ failure_reason }}.</p>
{% block title %}Password reset failure{% endblock %}
{% block body %}
<p>The request failed for the following reason: {{ failure_reason }}.</p>
<p>Your password has not been reset.</p>
</body>
</html>
{% endblock %}

View file

@ -1,9 +1,5 @@
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
{% block title %}Password reset success{% endblock %}
{% block body %}
<p>Your email has now been validated, please return to your client to reset your password. You may now close this window.</p>
</body>
</html>
{% endblock %}

View file

@ -1,10 +1,7 @@
<html>
<head>
<title>Authentication</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script src="https://www.recaptcha.net/recaptcha/api.js"
async defer></script>
{% block title %}Authentication{% endblock %}
{% block header %}
<script src="https://www.recaptcha.net/recaptcha/api.js" async defer></script>
<script src="//code.jquery.com/jquery-1.11.2.min.js"></script>
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
<script>
@ -12,8 +9,9 @@ function captchaDone() {
$('#registrationForm').submit();
}
</script>
</head>
<body>
{% endblock %}
{% block body %}
<form id="registrationForm" method="post" action="{{ myurl }}">
<div>
{% if error is defined %}
@ -37,5 +35,4 @@ function captchaDone() {
</div>
</div>
</form>
</body>
</html>
{% endblock %}

View file

@ -1,16 +1,11 @@
<html lang="en">
<head>
<title>Registration</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
<p>You have asked us to register this email with a new Matrix account. If this was you, please click the link below to confirm your email address:</p>
{% block title %}Registration{% endblock %}
<a href="{{ link }}">Verify Your Email Address</a>
{% block body %}
<p>You have asked us to register this email with a new Matrix account. If this was you, please click the link below to confirm your email address:</p>
<p>If this was not you, you can safely disregard this email.</p>
<a href="{{ link }}">Verify Your Email Address</a>
<p>Thank you.</p>
</body>
</html>
<p>If this was not you, you can safely disregard this email.</p>
<p>Thank you.</p>
{% endblock %}

View file

@ -1,9 +1,5 @@
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
{% block title %}Registration failure{% endblock %}
{% block body %}
<p>Validation failed for the following reason: {{ failure_reason }}.</p>
</body>
</html>
{% endblock %}

View file

@ -1,10 +1,5 @@
<html lang="en">
<head>
<title>Your email has now been validated</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
{% block title %}Your email has now been validated{% endblock %}
{% block body %}
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
</body>
</html>
{% endblock %}

View file

@ -1,11 +1,10 @@
<html lang="en">
<head>
<title>Authentication</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% block title %}Authentication{% endblock %}
{% block header %}
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
</head>
<body>
{% endblock %}
{% block body %}
<form id="registrationForm" method="post" action="{{ myurl }}">
<div>
{% if error is defined %}
@ -19,5 +18,4 @@
<input type="submit" value="Authenticate" />
</div>
</form>
</body>
</html>
{% endblock %}

View file

@ -1,25 +1,24 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>SSO account deactivated</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <style type="text/css">
{% include "sso.css" without context %}
</style>
</head>
<body class="error_page">
<header>
<h1>Your account has been deactivated</h1>
<p>
<strong>No account found</strong>
</p>
<p>
Your account might have been deactivated by the server administrator.
You can either try to create a new account or contact the servers
administrator.
</p>
</header>
{% include "sso_footer.html" without context %}
</body>
</html>
{% block title %}SSO account deactivated{% endblock %}
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
</style>
{% endblock %}
{% block body %}
<div class="error_page">
<header>
<h1>Your account has been deactivated</h1>
<p>
<strong>No account found</strong>
</p>
<p>
Your account might have been deactivated by the server administrator.
You can either try to create a new account or contact the servers
administrator.
</p>
</header>
</div>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -1,189 +1,185 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Create your account</title>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script type="text/javascript">
let wasKeyboard = false;
document.addEventListener("mousedown", function() { wasKeyboard = false; });
document.addEventListener("keydown", function() { wasKeyboard = true; });
document.addEventListener("focusin", function() {
if (wasKeyboard) {
document.body.classList.add("keyboard-focus");
} else {
document.body.classList.remove("keyboard-focus");
}
});
</script>
<style type="text/css">
{% include "sso.css" without context %}
{% block title %}Create your account{% endblock %}
body.keyboard-focus :focus, body.keyboard-focus .username_input:focus-within {
outline: 3px solid #17191C;
outline-offset: 4px;
}
{% block header %}
<script type="text/javascript">
let wasKeyboard = false;
document.addEventListener("mousedown", function() { wasKeyboard = false; });
document.addEventListener("keydown", function() { wasKeyboard = true; });
document.addEventListener("focusin", function() {
if (wasKeyboard) {
document.body.classList.add("keyboard-focus");
} else {
document.body.classList.remove("keyboard-focus");
}
});
</script>
<style type="text/css">
{% include "sso.css" without context %}
.username_input {
display: flex;
border: 2px solid #418DED;
border-radius: 8px;
padding: 12px;
position: relative;
margin: 16px 0;
align-items: center;
font-size: 12px;
}
body.keyboard-focus :focus, body.keyboard-focus .username_input:focus-within {
outline: 3px solid #17191C;
outline-offset: 4px;
}
.username_input.invalid {
border-color: #FE2928;
}
.username_input {
display: flex;
border: 2px solid #418DED;
border-radius: 8px;
padding: 12px;
position: relative;
margin: 16px 0;
align-items: center;
font-size: 12px;
}
.username_input.invalid input, .username_input.invalid label {
color: #FE2928;
}
.username_input.invalid {
border-color: #FE2928;
}
.username_input div, .username_input input {
line-height: 18px;
font-size: 14px;
}
.username_input.invalid input, .username_input.invalid label {
color: #FE2928;
}
.username_input label {
position: absolute;
top: -5px;
left: 14px;
font-size: 10px;
line-height: 10px;
background: white;
padding: 0 2px;
}
.username_input div, .username_input input {
line-height: 18px;
font-size: 14px;
}
.username_input input {
flex: 1;
display: block;
min-width: 0;
border: none;
}
.username_input label {
position: absolute;
top: -5px;
left: 14px;
font-size: 10px;
line-height: 10px;
background: white;
padding: 0 2px;
}
/* only clear the outline if we know it will be shown on the parent div using :focus-within */
@supports selector(:focus-within) {
.username_input input {
outline: none !important;
}
}
.username_input input {
flex: 1;
display: block;
min-width: 0;
border: none;
}
.username_input div {
color: #8D99A5;
}
/* only clear the outline if we know it will be shown on the parent div using :focus-within */
@supports selector(:focus-within) {
.username_input input {
outline: none !important;
}
}
.idp-pick-details {
border: 1px solid #E9ECF1;
border-radius: 8px;
margin: 24px 0;
}
.username_input div {
color: #8D99A5;
}
.idp-pick-details h2 {
margin: 0;
padding: 8px 12px;
}
.idp-pick-details {
border: 1px solid #E9ECF1;
border-radius: 8px;
margin: 24px 0;
}
.idp-pick-details .idp-detail {
border-top: 1px solid #E9ECF1;
padding: 12px;
display: block;
}
.idp-pick-details .check-row {
display: flex;
align-items: center;
}
.idp-pick-details h2 {
margin: 0;
padding: 8px 12px;
}
.idp-pick-details .check-row .name {
flex: 1;
}
.idp-pick-details .idp-detail {
border-top: 1px solid #E9ECF1;
padding: 12px;
display: block;
}
.idp-pick-details .check-row {
display: flex;
align-items: center;
}
.idp-pick-details .use, .idp-pick-details .idp-value {
color: #737D8C;
}
.idp-pick-details .check-row .name {
flex: 1;
}
.idp-pick-details .idp-value {
margin: 0;
margin-top: 8px;
}
.idp-pick-details .use, .idp-pick-details .idp-value {
color: #737D8C;
}
.idp-pick-details .avatar {
width: 53px;
height: 53px;
border-radius: 100%;
display: block;
margin-top: 8px;
}
.idp-pick-details .idp-value {
margin: 0;
margin-top: 8px;
}
output {
padding: 0 14px;
display: block;
}
.idp-pick-details .avatar {
width: 53px;
height: 53px;
border-radius: 100%;
display: block;
margin-top: 8px;
}
output.error {
color: #FE2928;
}
</style>
</head>
<body>
<header>
<h1>Create your account</h1>
<p>This is required. Continue to create your account on {{ server_name }}. You can't change this later.</p>
</header>
<main>
<form method="post" class="form__input" id="form">
<div class="username_input" id="username_input">
<label for="field-username">Username (required)</label>
<div class="prefix">@</div>
<input type="text" name="username" id="field-username" value="{{ user_attributes.localpart }}" autofocus autocorrect="off" autocapitalize="none">
<div class="postfix">:{{ server_name }}</div>
output {
padding: 0 14px;
display: block;
}
output.error {
color: #FE2928;
}
</style>
{% endblock %}
{% block body %}
<header>
<h1>Create your account</h1>
<p>This is required. Continue to create your account on {{ server_name }}. You can't change this later.</p>
</header>
<main>
<form method="post" class="form__input" id="form">
<div class="username_input" id="username_input">
<label for="field-username">Username (required)</label>
<div class="prefix">@</div>
<input type="text" name="username" id="field-username" value="{{ user_attributes.localpart }}" autofocus autocorrect="off" autocapitalize="none">
<div class="postfix">:{{ server_name }}</div>
</div>
<output for="username_input" id="field-username-output"></output>
<input type="submit" value="Continue" class="primary-button">
{% if user_attributes.avatar_url or user_attributes.display_name or user_attributes.emails %}
<section class="idp-pick-details">
<h2>{% if idp.idp_icon %}<img src="{{ idp.idp_icon | mxc_to_http(24, 24) }}"/>{% endif %}Optional data from {{ idp.idp_name }}</h2>
{% if user_attributes.avatar_url %}
<label class="idp-detail idp-avatar" for="idp-avatar">
<div class="check-row">
<span class="name">Avatar</span>
<span class="use">Use</span>
<input type="checkbox" name="use_avatar" id="idp-avatar" value="true" checked>
</div>
<output for="username_input" id="field-username-output"></output>
<input type="submit" value="Continue" class="primary-button">
{% if user_attributes.avatar_url or user_attributes.display_name or user_attributes.emails %}
<section class="idp-pick-details">
<h2>{% if idp.idp_icon %}<img src="{{ idp.idp_icon | mxc_to_http(24, 24) }}"/>{% endif %}Optional data from {{ idp.idp_name }}</h2>
{% if user_attributes.avatar_url %}
<label class="idp-detail idp-avatar" for="idp-avatar">
<div class="check-row">
<span class="name">Avatar</span>
<span class="use">Use</span>
<input type="checkbox" name="use_avatar" id="idp-avatar" value="true" checked>
</div>
<img src="{{ user_attributes.avatar_url }}" class="avatar" />
</label>
{% endif %}
{% if user_attributes.display_name %}
<label class="idp-detail" for="idp-displayname">
<div class="check-row">
<span class="name">Display name</span>
<span class="use">Use</span>
<input type="checkbox" name="use_display_name" id="idp-displayname" value="true" checked>
</div>
<p class="idp-value">{{ user_attributes.display_name }}</p>
</label>
{% endif %}
{% for email in user_attributes.emails %}
<label class="idp-detail" for="idp-email{{ loop.index }}">
<div class="check-row">
<span class="name">E-mail</span>
<span class="use">Use</span>
<input type="checkbox" name="use_email" id="idp-email{{ loop.index }}" value="{{ email }}" checked>
</div>
<p class="idp-value">{{ email }}</p>
</label>
{% endfor %}
</section>
{% endif %}
</form>
</main>
{% include "sso_footer.html" without context %}
<script type="text/javascript">
{% include "sso_auth_account_details.js" without context %}
</script>
</body>
</html>
<img src="{{ user_attributes.avatar_url }}" class="avatar" />
</label>
{% endif %}
{% if user_attributes.display_name %}
<label class="idp-detail" for="idp-displayname">
<div class="check-row">
<span class="name">Display name</span>
<span class="use">Use</span>
<input type="checkbox" name="use_display_name" id="idp-displayname" value="true" checked>
</div>
<p class="idp-value">{{ user_attributes.display_name }}</p>
</label>
{% endif %}
{% for email in user_attributes.emails %}
<label class="idp-detail" for="idp-email{{ loop.index }}">
<div class="check-row">
<span class="name">E-mail</span>
<span class="use">Use</span>
<input type="checkbox" name="use_email" id="idp-email{{ loop.index }}" value="{{ email }}" checked>
</div>
<p class="idp-value">{{ email }}</p>
</label>
{% endfor %}
</section>
{% endif %}
</form>
</main>
{% include "sso_footer.html" without context %}
<script type="text/javascript">
{% include "sso_auth_account_details.js" without context %}
</script>
{% endblock %}

View file

@ -1,27 +1,25 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Authentication failed</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include "sso.css" without context %}
</style>
</head>
<body class="error_page">
<header>
<h1>That doesn't look right</h1>
<p>
<strong>We were unable to validate your {{ server_name }} account</strong>
via single&nbsp;sign&#8209;on&nbsp;(SSO), because the SSO Identity
Provider returned different details than when you logged in.
</p>
<p>
Try the operation again, and ensure that you use the same details on
the Identity Provider as when you log into your account.
</p>
</header>
{% include "sso_footer.html" without context %}
</body>
</html>
{% block title %}Authentication failed{% endblock %}
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
</style>
{% endblock %}
{% block body %}
<div class="error_page">
<header>
<h1>That doesn't look right</h1>
<p>
<strong>We were unable to validate your {{ server_name }} account</strong>
via single&nbsp;sign&#8209;on&nbsp;(SSO), because the SSO Identity
Provider returned different details than when you logged in.
</p>
<p>
Try the operation again, and ensure that you use the same details on
the Identity Provider as when you log into your account.
</p>
</header>
</div>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -1,30 +1,26 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Confirm it's you</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include "sso.css" without context %}
</style>
</head>
<body>
<header>
<h1>Confirm it's you to continue</h1>
<p>
A client is trying to {{ description }}. To confirm this action
re-authorize your account with single sign-on.
</p>
<p><strong>
If you did not expect this, your account may be compromised.
</strong></p>
</header>
<main>
<a href="{{ redirect_url }}" class="primary-button">
Continue with {{ idp.idp_name }}
</a>
</main>
{% include "sso_footer.html" without context %}
</body>
</html>
{% block title %}Confirm it's you{% endblock %}
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
</style>
{% endblock %}
{% block body %}
<header>
<h1>Confirm it's you to continue</h1>
<p>
A client is trying to {{ description }}. To confirm this action
re-authorize your account with single sign-on.
</p>
<p><strong>
If you did not expect this, your account may be compromised.
</strong></p>
</header>
<main>
<a href="{{ redirect_url }}" class="primary-button">
Continue with {{ idp.idp_name }}
</a>
</main>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -1,29 +1,25 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Authentication successful</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include "sso.css" without context %}
</style>
<script>
if (window.onAuthDone) {
window.onAuthDone();
} else if (window.opener && window.opener.postMessage) {
window.opener.postMessage("authDone", "*");
}
</script>
</head>
<body>
<header>
<h1>Thank you</h1>
<p>
Now we know its you, you can close this window and return to the
application.
</p>
</header>
{% include "sso_footer.html" without context %}
</body>
</html>
{% block title %}Authentication successful{% endblock %}
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
</style>
<script>
if (window.onAuthDone) {
window.onAuthDone();
} else if (window.opener && window.opener.postMessage) {
window.opener.postMessage("authDone", "*");
}
</script>
{% endblock %}
{% block body %}
<header>
<h1>Thank you</h1>
<p>
Now we know its you, you can close this window and return to the
application.
</p>
</header>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -1,19 +1,19 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Authentication failed</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include "sso.css" without context %}
{% block title %}Authentication failed{% endblock %}
#error_code {
margin-top: 56px;
}
</style>
</head>
<body class="error_page">
{% block header %}
{% if error == "unauthorised" %}
<style type="text/css">
{% include "sso.css" without context %}
#error_code {
margin-top: 56px;
}
</style>
{% endif %}
{% endblock %}
{% block body %}
<div class="error_page">
{# If an error of unauthorised is returned it means we have actively rejected their login #}
{% if error == "unauthorised" %}
<header>
@ -66,5 +66,5 @@
}
</script>
{% endif %}
</body>
</html>
</div>
{% endblock %}

View file

@ -1,63 +1,59 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta charset="UTF-8">
<title>Choose identity provider</title>
<style type="text/css">
{% include "sso.css" without context %}
{% block title %}Choose identity provider{% endblock %}
.providers {
list-style: none;
padding: 0;
}
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
.providers li {
margin: 12px;
}
.providers {
list-style: none;
padding: 0;
}
.providers a {
display: block;
border-radius: 4px;
border: 1px solid #17191C;
padding: 8px;
text-align: center;
text-decoration: none;
color: #17191C;
display: flex;
align-items: center;
font-weight: bold;
}
.providers li {
margin: 12px;
}
.providers a img {
width: 24px;
height: 24px;
}
.providers a span {
flex: 1;
}
</style>
</head>
<body>
<header>
<h1>Log in to {{ server_name }} </h1>
<p>Choose an identity provider to log in</p>
</header>
<main>
<ul class="providers">
{% for p in providers %}
<li>
<a href="pick_idp?idp={{ p.idp_id }}&redirectUrl={{ redirect_url | urlencode }}">
{% if p.idp_icon %}
<img src="{{ p.idp_icon | mxc_to_http(32, 32) }}"/>
{% endif %}
<span>{{ p.idp_name }}</span>
</a>
</li>
{% endfor %}
</ul>
</main>
{% include "sso_footer.html" without context %}
</body>
</html>
.providers a {
display: block;
border-radius: 4px;
border: 1px solid #17191C;
padding: 8px;
text-align: center;
text-decoration: none;
color: #17191C;
display: flex;
align-items: center;
font-weight: bold;
}
.providers a img {
width: 24px;
height: 24px;
}
.providers a span {
flex: 1;
}
</style>
{% endblock %}
{% block body %}
<header>
<h1>Log in to {{ server_name }} </h1>
<p>Choose an identity provider to log in</p>
</header>
<main>
<ul class="providers">
{% for p in providers %}
<li>
<a href="pick_idp?idp={{ p.idp_id }}&redirectUrl={{ redirect_url | urlencode }}">
{% if p.idp_icon %}
<img src="{{ p.idp_icon | mxc_to_http(32, 32) }}"/>
{% endif %}
<span>{{ p.idp_name }}</span>
</a>
</li>
{% endfor %}
</ul>
</main>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -1,33 +1,29 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Agree to terms and conditions</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include "sso.css" without context %}
{% block title %}Agree to terms and conditions{% endblock %}
#consent_form {
margin-top: 56px;
}
</style>
</head>
<body>
<header>
<h1>Your account is nearly ready</h1>
<p>Agree to the terms to create your account.</p>
</header>
<main>
{% include "sso_partial_profile.html" %}
<form method="post" action="{{my_url}}" id="consent_form">
<p>
<input id="accepted_version" type="checkbox" name="accepted_version" value="{{ consent_version }}" required>
<label for="accepted_version">I have read and agree to the <a href="{{ terms_url }}" target="_blank" rel="noopener">terms and conditions</a>.</label>
</p>
<input type="submit" class="primary-button" value="Continue"/>
</form>
</main>
{% include "sso_footer.html" without context %}
</body>
</html>
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
#consent_form {
margin-top: 56px;
}
</style>
{% endblock %}
{% block body %}
<header>
<h1>Your account is nearly ready</h1>
<p>Agree to the terms to create your account.</p>
</header>
<main>
{% include "sso_partial_profile.html" %}
<form method="post" action="{{my_url}}" id="consent_form">
<p>
<input id="accepted_version" type="checkbox" name="accepted_version" value="{{ consent_version }}" required>
<label for="accepted_version">I have read and agree to the <a href="{{ terms_url }}" target="_blank" rel="noopener">terms and conditions</a>.</label>
</p>
<input type="submit" class="primary-button" value="Continue"/>
</form>
</main>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -1,41 +1,38 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Continue to your account</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style type="text/css">
{% include "sso.css" without context %}
{% block title %}Continue to your account{% endblock %}
.confirm-trust {
margin: 34px 0;
color: #8D99A5;
}
.confirm-trust strong {
color: #17191C;
}
{% block header %}
<style type="text/css">
{% include "sso.css" without context %}
.confirm-trust::before {
content: "";
background-image: url('data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTgiIGhlaWdodD0iMTgiIHZpZXdCb3g9IjAgMCAxOCAxOCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNi41IDlDMTYuNSAxMy4xNDIxIDEzLjE0MjEgMTYuNSA5IDE2LjVDNC44NTc4NiAxNi41IDEuNSAxMy4xNDIxIDEuNSA5QzEuNSA0Ljg1Nzg2IDQuODU3ODYgMS41IDkgMS41QzEzLjE0MjEgMS41IDE2LjUgNC44NTc4NiAxNi41IDlaTTcuMjUgOUM3LjI1IDkuNDY1OTYgNy41Njg2OSA5Ljg1NzQ4IDggOS45Njg1VjEyLjM3NUM4IDEyLjkyNzMgOC40NDc3MiAxMy4zNzUgOSAxMy4zNzVIMTAuMTI1QzEwLjY3NzMgMTMuMzc1IDExLjEyNSAxMi45MjczIDExLjEyNSAxMi4zNzVDMTEuMTI1IDExLjgyMjcgMTAuNjc3MyAxMS4zNzUgMTAuMTI1IDExLjM3NUgxMFY5QzEwIDguOTY1NDggOS45OTgyNSA4LjkzMTM3IDkuOTk0ODQgOC44OTc3NkM5Ljk0MzYzIDguMzkzNSA5LjUxNzc3IDggOSA4SDguMjVDNy42OTc3MiA4IDcuMjUgOC40NDc3MiA3LjI1IDlaTTkgNy41QzkuNjIxMzIgNy41IDEwLjEyNSA2Ljk5NjMyIDEwLjEyNSA2LjM3NUMxMC4xMjUgNS43NTM2OCA5LjYyMTMyIDUuMjUgOSA1LjI1QzguMzc4NjggNS4yNSA3Ljg3NSA1Ljc1MzY4IDcuODc1IDYuMzc1QzcuODc1IDYuOTk2MzIgOC4zNzg2OCA3LjUgOSA3LjVaIiBmaWxsPSIjQzFDNkNEIi8+Cjwvc3ZnPgoK');
background-repeat: no-repeat;
width: 24px;
height: 24px;
display: block;
float: left;
}
</style>
</head>
<body>
<header>
<h1>Continue to your account</h1>
</header>
<main>
{% include "sso_partial_profile.html" %}
<p class="confirm-trust">Continuing will grant <strong>{{ display_url }}</strong> access to your account.</p>
<a href="{{ redirect_url }}" class="primary-button">Continue</a>
</main>
{% include "sso_footer.html" without context %}
</body>
</html>
.confirm-trust {
margin: 34px 0;
color: #8D99A5;
}
.confirm-trust strong {
color: #17191C;
}
.confirm-trust::before {
content: "";
background-image: url('data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTgiIGhlaWdodD0iMTgiIHZpZXdCb3g9IjAgMCAxOCAxOCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNi41IDlDMTYuNSAxMy4xNDIxIDEzLjE0MjEgMTYuNSA5IDE2LjVDNC44NTc4NiAxNi41IDEuNSAxMy4xNDIxIDEuNSA5QzEuNSA0Ljg1Nzg2IDQuODU3ODYgMS41IDkgMS41QzEzLjE0MjEgMS41IDE2LjUgNC44NTc4NiAxNi41IDlaTTcuMjUgOUM3LjI1IDkuNDY1OTYgNy41Njg2OSA5Ljg1NzQ4IDggOS45Njg1VjEyLjM3NUM4IDEyLjkyNzMgOC40NDc3MiAxMy4zNzUgOSAxMy4zNzVIMTAuMTI1QzEwLjY3NzMgMTMuMzc1IDExLjEyNSAxMi45MjczIDExLjEyNSAxMi4zNzVDMTEuMTI1IDExLjgyMjcgMTAuNjc3MyAxMS4zNzUgMTAuMTI1IDExLjM3NUgxMFY5QzEwIDguOTY1NDggOS45OTgyNSA4LjkzMTM3IDkuOTk0ODQgOC44OTc3NkM5Ljk0MzYzIDguMzkzNSA5LjUxNzc3IDggOSA4SDguMjVDNy42OTc3MiA4IDcuMjUgOC40NDc3MiA3LjI1IDlaTTkgNy41QzkuNjIxMzIgNy41IDEwLjEyNSA2Ljk5NjMyIDEwLjEyNSA2LjM3NUMxMC4xMjUgNS43NTM2OCA5LjYyMTMyIDUuMjUgOSA1LjI1QzguMzc4NjggNS4yNSA3Ljg3NSA1Ljc1MzY4IDcuODc1IDYuMzc1QzcuODc1IDYuOTk2MzIgOC4zNzg2OCA3LjUgOSA3LjVaIiBmaWxsPSIjQzFDNkNEIi8+Cjwvc3ZnPgoK');
background-repeat: no-repeat;
width: 24px;
height: 24px;
display: block;
float: left;
}
</style>
{% endblock %}
{% block body %}
<header>
<h1>Continue to your account</h1>
</header>
<main>
{% include "sso_partial_profile.html" %}
<p class="confirm-trust">Continuing will grant <strong>{{ display_url }}</strong> access to your account.</p>
<a href="{{ redirect_url }}" class="primary-button">Continue</a>
</main>
{% include "sso_footer.html" without context %}
{% endblock %}

View file

@ -0,0 +1,29 @@
html {
height: 100%;
}
body {
background: #f9fafb;
max-width: 680px;
margin: auto;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
}
.mx_Header {
border-bottom: 3px solid #ddd;
margin-bottom: 1rem;
padding-top: 1rem;
padding-bottom: 1rem;
text-align: center;
}
@media screen and (max-width: 1120px) {
body {
font-size: 20px;
}
h1 { font-size: 1rem; }
h2 { font-size: .9rem; }
h3 { font-size: .85rem; }
h4 { font-size: .8rem; }
}

View file

@ -1,11 +1,10 @@
<html>
<head>
<title>Authentication</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% block title %}Authentication{% endblock %}
{% block header %}
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
</head>
<body>
{% endblock %}
{% block body %}
<form id="registrationForm" method="post" action="{{ myurl }}">
<div>
{% if error is defined %}
@ -19,5 +18,4 @@
<input type="submit" value="Agree" />
</div>
</form>
</body>
</html>
{% endblock %}

View file

@ -77,6 +77,11 @@ class CapabilitiesRestServlet(RestServlet):
"enabled": True,
}
if self.config.experimental.msc3664_enabled:
response["capabilities"]["im.nheko.msc3664.related_event_match"] = {
"enabled": self.config.experimental.msc3664_enabled,
}
return HTTPStatus.OK, response

View file

@ -231,7 +231,7 @@ class DehydratedDeviceServlet(RestServlet):
}
}
PUT /org.matrix.msc2697/dehydrated_device
PUT /org.matrix.msc2697.v2/dehydrated_device
Content-Type: application/json
{
@ -271,7 +271,6 @@ class DehydratedDeviceServlet(RestServlet):
raise errors.NotFoundError("No dehydrated device available")
class PutBody(RequestBodyModel):
device_id: StrictStr
device_data: DehydratedDeviceDataModel
initial_device_display_name: Optional[StrictStr]
@ -281,7 +280,7 @@ class DehydratedDeviceServlet(RestServlet):
device_id = await self.device_handler.store_dehydrated_device(
requester.user.to_string(),
submission.device_data,
submission.device_data.dict(),
submission.initial_device_display_name,
)
return 200, {"device_id": device_id}

View file

@ -436,8 +436,7 @@ class LoginRestServlet(RestServlet):
The body of the JSON response.
"""
token = login_submission["token"]
auth_handler = self.auth_handler
res = await auth_handler.validate_short_term_login_token(token)
res = await self.auth_handler.consume_login_token(token)
return await self._complete_login(
res.user_id,

View file

@ -57,7 +57,6 @@ class LoginTokenRequestServlet(RestServlet):
self.store = hs.get_datastores().main
self.clock = hs.get_clock()
self.server_name = hs.config.server.server_name
self.macaroon_gen = hs.get_macaroon_generator()
self.auth_handler = hs.get_auth_handler()
self.token_timeout = hs.config.experimental.msc3882_token_timeout
self.ui_auth = hs.config.experimental.msc3882_ui_auth
@ -76,10 +75,10 @@ class LoginTokenRequestServlet(RestServlet):
can_skip_ui_auth=False, # Don't allow skipping of UI auth
)
login_token = self.macaroon_gen.generate_short_term_login_token(
login_token = await self.auth_handler.create_login_token_for_user_id(
user_id=requester.user.to_string(),
auth_provider_id="org.matrix.msc3882.login_token_request",
duration_in_ms=self.token_timeout,
duration_ms=self.token_timeout,
)
return (

View file

@ -110,6 +110,13 @@ class RoomBatchSendEventRestServlet(RestServlet):
errcode=Codes.MISSING_PARAM,
)
if await self.store.is_partial_state_room(room_id):
raise SynapseError(
HTTPStatus.BAD_REQUEST,
"Cannot insert history batches until we have fully joined the room",
errcode=Codes.UNABLE_DUE_TO_PARTIAL_STATE,
)
# Verify the batch_id_from_query corresponds to an actual insertion event
# and have the batch connected.
if batch_id_from_query:

View file

@ -146,12 +146,12 @@ class SyncRestServlet(RestServlet):
elif filter_id.startswith("{"):
try:
filter_object = json_decoder.decode(filter_id)
set_timeline_upper_limit(
filter_object, self.hs.config.server.filter_timeline_limit
)
except Exception:
raise SynapseError(400, "Invalid filter JSON")
raise SynapseError(400, "Invalid filter JSON", errcode=Codes.NOT_JSON)
self.filtering.check_valid_filter(filter_object)
set_timeline_upper_limit(
filter_object, self.hs.config.server.filter_timeline_limit
)
filter_collection = FilterCollection(self.hs, filter_object)
else:
try:

View file

@ -14,17 +14,20 @@
from typing import TYPE_CHECKING
from twisted.web.resource import Resource
from .local_key_resource import LocalKey
from .remote_key_resource import RemoteKey
from synapse.http.server import HttpServer, JsonResource
from synapse.rest.key.v2.local_key_resource import LocalKey
from synapse.rest.key.v2.remote_key_resource import RemoteKey
if TYPE_CHECKING:
from synapse.server import HomeServer
class KeyApiV2Resource(Resource):
class KeyResource(JsonResource):
def __init__(self, hs: "HomeServer"):
Resource.__init__(self)
self.putChild(b"server", LocalKey(hs))
self.putChild(b"query", RemoteKey(hs))
super().__init__(hs, canonical_json=True)
self.register_servlets(self, hs)
@staticmethod
def register_servlets(http_server: HttpServer, hs: "HomeServer") -> None:
LocalKey(hs).register(http_server)
RemoteKey(hs).register(http_server)

View file

@ -13,16 +13,15 @@
# limitations under the License.
import logging
from typing import TYPE_CHECKING, Optional
import re
from typing import TYPE_CHECKING, Optional, Tuple
from canonicaljson import encode_canonical_json
from signedjson.sign import sign_json
from unpaddedbase64 import encode_base64
from twisted.web.resource import Resource
from twisted.web.server import Request
from synapse.http.server import respond_with_json_bytes
from synapse.http.site import SynapseRequest
from synapse.http.servlet import RestServlet
from synapse.types import JsonDict
if TYPE_CHECKING:
@ -31,7 +30,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
class LocalKey(Resource):
class LocalKey(RestServlet):
"""HTTP resource containing encoding the TLS X.509 certificate and NACL
signature verification keys for this server::
@ -61,18 +60,17 @@ class LocalKey(Resource):
}
"""
isLeaf = True
PATTERNS = (re.compile("^/_matrix/key/v2/server(/(?P<key_id>[^/]*))?$"),)
def __init__(self, hs: "HomeServer"):
self.config = hs.config
self.clock = hs.get_clock()
self.update_response_body(self.clock.time_msec())
Resource.__init__(self)
def update_response_body(self, time_now_msec: int) -> None:
refresh_interval = self.config.key.key_refresh_interval
self.valid_until_ts = int(time_now_msec + refresh_interval)
self.response_body = encode_canonical_json(self.response_json_object())
self.response_body = self.response_json_object()
def response_json_object(self) -> JsonDict:
verify_keys = {}
@ -99,9 +97,11 @@ class LocalKey(Resource):
json_object = sign_json(json_object, self.config.server.server_name, key)
return json_object
def render_GET(self, request: SynapseRequest) -> Optional[int]:
def on_GET(
self, request: Request, key_id: Optional[str] = None
) -> Tuple[int, JsonDict]:
time_now = self.clock.time_msec()
# Update the expiry time if less than half the interval remains.
if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts:
self.update_response_body(time_now)
return respond_with_json_bytes(request, 200, self.response_body)
return 200, self.response_body

View file

@ -13,15 +13,20 @@
# limitations under the License.
import logging
from typing import TYPE_CHECKING, Dict, Set
import re
from typing import TYPE_CHECKING, Dict, Optional, Set, Tuple
from signedjson.sign import sign_json
from synapse.api.errors import Codes, SynapseError
from twisted.web.server import Request
from synapse.crypto.keyring import ServerKeyFetcher
from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.servlet import parse_integer, parse_json_object_from_request
from synapse.http.site import SynapseRequest
from synapse.http.server import HttpServer
from synapse.http.servlet import (
RestServlet,
parse_integer,
parse_json_object_from_request,
)
from synapse.types import JsonDict
from synapse.util import json_decoder
from synapse.util.async_helpers import yieldable_gather_results
@ -32,7 +37,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
class RemoteKey(DirectServeJsonResource):
class RemoteKey(RestServlet):
"""HTTP resource for retrieving the TLS certificate and NACL signature
verification keys for a collection of servers. Checks that the reported
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
@ -88,11 +93,7 @@ class RemoteKey(DirectServeJsonResource):
}
"""
isLeaf = True
def __init__(self, hs: "HomeServer"):
super().__init__()
self.fetcher = ServerKeyFetcher(hs)
self.store = hs.get_datastores().main
self.clock = hs.get_clock()
@ -101,36 +102,48 @@ class RemoteKey(DirectServeJsonResource):
)
self.config = hs.config
async def _async_render_GET(self, request: SynapseRequest) -> None:
assert request.postpath is not None
if len(request.postpath) == 1:
(server,) = request.postpath
query: dict = {server.decode("ascii"): {}}
elif len(request.postpath) == 2:
server, key_id = request.postpath
def register(self, http_server: HttpServer) -> None:
http_server.register_paths(
"GET",
(
re.compile(
"^/_matrix/key/v2/query/(?P<server>[^/]*)(/(?P<key_id>[^/]*))?$"
),
),
self.on_GET,
self.__class__.__name__,
)
http_server.register_paths(
"POST",
(re.compile("^/_matrix/key/v2/query$"),),
self.on_POST,
self.__class__.__name__,
)
async def on_GET(
self, request: Request, server: str, key_id: Optional[str] = None
) -> Tuple[int, JsonDict]:
if server and key_id:
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
arguments = {}
if minimum_valid_until_ts is not None:
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
query = {server.decode("ascii"): {key_id.decode("ascii"): arguments}}
query = {server: {key_id: arguments}}
else:
raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
query = {server: {}}
await self.query_keys(request, query, query_remote_on_cache_miss=True)
return 200, await self.query_keys(query, query_remote_on_cache_miss=True)
async def _async_render_POST(self, request: SynapseRequest) -> None:
async def on_POST(self, request: Request) -> Tuple[int, JsonDict]:
content = parse_json_object_from_request(request)
query = content["server_keys"]
await self.query_keys(request, query, query_remote_on_cache_miss=True)
return 200, await self.query_keys(query, query_remote_on_cache_miss=True)
async def query_keys(
self,
request: SynapseRequest,
query: JsonDict,
query_remote_on_cache_miss: bool = False,
) -> None:
self, query: JsonDict, query_remote_on_cache_miss: bool = False
) -> JsonDict:
logger.info("Handling query for keys %r", query)
store_queries = []
@ -232,7 +245,7 @@ class RemoteKey(DirectServeJsonResource):
for server_name, keys in cache_misses.items()
),
)
await self.query_keys(request, query, query_remote_on_cache_miss=False)
return await self.query_keys(query, query_remote_on_cache_miss=False)
else:
signed_keys = []
for key_json_raw in json_results:
@ -244,6 +257,4 @@ class RemoteKey(DirectServeJsonResource):
signed_keys.append(key_json)
response = {"server_keys": signed_keys}
respond_with_json(request, 200, response, canonical_json=True)
return {"server_keys": signed_keys}

View file

@ -17,6 +17,9 @@ from typing import TYPE_CHECKING
from twisted.web.resource import Resource
from synapse.rest.synapse.client.oidc.backchannel_logout_resource import (
OIDCBackchannelLogoutResource,
)
from synapse.rest.synapse.client.oidc.callback_resource import OIDCCallbackResource
if TYPE_CHECKING:
@ -29,6 +32,7 @@ class OIDCResource(Resource):
def __init__(self, hs: "HomeServer"):
Resource.__init__(self)
self.putChild(b"callback", OIDCCallbackResource(hs))
self.putChild(b"backchannel_logout", OIDCBackchannelLogoutResource(hs))
__all__ = ["OIDCResource"]

View file

@ -0,0 +1,35 @@
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import TYPE_CHECKING
from synapse.http.server import DirectServeJsonResource
from synapse.http.site import SynapseRequest
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class OIDCBackchannelLogoutResource(DirectServeJsonResource):
isLeaf = 1
def __init__(self, hs: "HomeServer"):
super().__init__()
self._oidc_handler = hs.get_oidc_handler()
async def _async_render_POST(self, request: SynapseRequest) -> None:
await self._oidc_handler.handle_backchannel_logout(request)

View file

@ -201,7 +201,7 @@ class DataStore(
name: Optional[str] = None,
guests: bool = True,
deactivated: bool = False,
order_by: str = UserSortOrder.USER_ID.value,
order_by: str = UserSortOrder.NAME.value,
direction: str = "f",
approved: bool = True,
) -> Tuple[List[JsonDict], int]:
@ -261,6 +261,7 @@ class DataStore(
sql_base = f"""
FROM users as u
LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
{where_clause}
"""
sql = "SELECT COUNT(*) as total_users " + sql_base
@ -269,7 +270,8 @@ class DataStore(
sql = f"""
SELECT name, user_type, is_guest, admin, deactivated, shadow_banned,
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved,
eu.user_id is not null as erased
{sql_base}
ORDER BY {order_by_column} {order}, u.name ASC
LIMIT ? OFFSET ?
@ -277,6 +279,13 @@ class DataStore(
args += [limit, start]
txn.execute(sql, args)
users = self.db_pool.cursor_to_dict(txn)
# some of those boolean values are returned as integers when we're on SQLite
columns_to_boolify = ["erased"]
for user in users:
for column in columns_to_boolify:
user[column] = bool(user[column])
return users, count
return await self.db_pool.runInteraction(

View file

@ -157,10 +157,23 @@ class ApplicationServiceWorkerStore(RoomMemberWorkerStore):
app_service: "ApplicationService",
cache_context: _CacheContext,
) -> List[str]:
users_in_room = await self.get_users_in_room(
"""
Get all users in a room that the appservice controls.
Args:
room_id: The room to check in.
app_service: The application service to check interest/control against
Returns:
List of user IDs that the appservice controls.
"""
# We can use `get_local_users_in_room(...)` here because an application service
# can only be interested in local users of the server it's on (ignore any remote
# users that might match the user namespace regex).
local_users_in_room = await self.get_local_users_in_room(
room_id, on_invalidate=cache_context.invalidate
)
return list(filter(app_service.is_interested_in_user, users_in_room))
return list(filter(app_service.is_interested_in_user, local_users_in_room))
class ApplicationServiceStore(ApplicationServiceWorkerStore):

View file

@ -274,6 +274,13 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
destination, int(from_stream_id)
)
if not has_changed:
# debugging for https://github.com/matrix-org/synapse/issues/14251
issue_8631_logger.debug(
"%s: no change between %i and %i",
destination,
from_stream_id,
now_stream_id,
)
return now_stream_id, []
updates = await self.db_pool.runInteraction(
@ -1848,7 +1855,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
self,
txn: LoggingTransaction,
user_id: str,
device_ids: Iterable[str],
device_id: str,
hosts: Collection[str],
stream_ids: List[int],
context: Optional[Dict[str, str]],
@ -1864,6 +1871,21 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
stream_id_iterator = iter(stream_ids)
encoded_context = json_encoder.encode(context)
mark_sent = not self.hs.is_mine_id(user_id)
values = [
(
destination,
next(stream_id_iterator),
user_id,
device_id,
mark_sent,
now,
encoded_context if whitelisted_homeserver(destination) else "{}",
)
for destination in hosts
]
self.db_pool.simple_insert_many_txn(
txn,
table="device_lists_outbound_pokes",
@ -1876,23 +1898,21 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
"ts",
"opentracing_context",
),
values=[
(
destination,
next(stream_id_iterator),
user_id,
device_id,
not self.hs.is_mine_id(
user_id
), # We only need to send out update for *our* users
now,
encoded_context if whitelisted_homeserver(destination) else "{}",
)
for destination in hosts
for device_id in device_ids
],
values=values,
)
# debugging for https://github.com/matrix-org/synapse/issues/14251
if issue_8631_logger.isEnabledFor(logging.DEBUG):
issue_8631_logger.debug(
"Recorded outbound pokes for %s:%s with device stream ids %s",
user_id,
device_id,
{
stream_id: destination
for (destination, stream_id, _, _, _, _, _) in values
},
)
def _add_device_outbound_room_poke_txn(
self,
txn: LoggingTransaction,
@ -1997,7 +2017,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
self._add_device_outbound_poke_to_stream_txn(
txn,
user_id=user_id,
device_ids=[device_id],
device_id=device_id,
hosts=hosts,
stream_ids=stream_ids,
context=context,

View file

@ -139,11 +139,15 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
@trace
@cancellable
async def get_e2e_device_keys_for_cs_api(
self, query_list: List[Tuple[str, Optional[str]]]
self,
query_list: List[Tuple[str, Optional[str]]],
include_displaynames: bool = True,
) -> Dict[str, Dict[str, JsonDict]]:
"""Fetch a list of device keys, formatted suitably for the C/S API.
Args:
query_list(list): List of pairs of user_ids and device_ids.
query_list: List of pairs of user_ids and device_ids.
include_displaynames: Whether to include the displayname of returned devices
(if one exists).
Returns:
Dict mapping from user-id to dict mapping from device_id to
key data. The key data will be a dict in the same format as the
@ -166,9 +170,12 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
continue
r["unsigned"] = {}
display_name = device_info.display_name
if display_name is not None:
r["unsigned"]["device_display_name"] = display_name
if include_displaynames:
# Include the device's display name in the "unsigned" dictionary
display_name = device_info.display_name
if display_name is not None:
r["unsigned"]["device_display_name"] = display_name
rv[user_id][device_id] = r
return rv

View file

@ -29,6 +29,7 @@ from typing import (
)
from synapse.api.errors import StoreError
from synapse.config.homeserver import ExperimentalConfig
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
from synapse.storage._base import SQLBaseStore
from synapse.storage.database import (
@ -62,7 +63,9 @@ logger = logging.getLogger(__name__)
def _load_rules(
rawrules: List[JsonDict], enabled_map: Dict[str, bool]
rawrules: List[JsonDict],
enabled_map: Dict[str, bool],
experimental_config: ExperimentalConfig,
) -> FilteredPushRules:
"""Take the DB rows returned from the DB and convert them into a full
`FilteredPushRules` object.
@ -80,7 +83,9 @@ def _load_rules(
push_rules = PushRules(ruleslist)
filtered_rules = FilteredPushRules(push_rules, enabled_map)
filtered_rules = FilteredPushRules(
push_rules, enabled_map, msc3664_enabled=experimental_config.msc3664_enabled
)
return filtered_rules
@ -160,7 +165,7 @@ class PushRulesWorkerStore(
enabled_map = await self.get_push_rules_enabled_for_user(user_id)
return _load_rules(rows, enabled_map)
return _load_rules(rows, enabled_map, self.hs.config.experimental)
async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]:
results = await self.db_pool.simple_select_list(
@ -219,7 +224,9 @@ class PushRulesWorkerStore(
results: Dict[str, FilteredPushRules] = {}
for user_id, rules in raw_rules.items():
results[user_id] = _load_rules(rules, enabled_map_by_user.get(user_id, {}))
results[user_id] = _load_rules(
rules, enabled_map_by_user.get(user_id, {}), self.hs.config.experimental
)
return results

View file

@ -21,7 +21,13 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast
import attr
from synapse.api.constants import UserTypes
from synapse.api.errors import Codes, StoreError, SynapseError, ThreepidValidationError
from synapse.api.errors import (
Codes,
NotFoundError,
StoreError,
SynapseError,
ThreepidValidationError,
)
from synapse.config.homeserver import HomeServerConfig
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.storage.database import (
@ -50,6 +56,14 @@ class ExternalIDReuseException(Exception):
because this external id is given to an other user."""
class LoginTokenExpired(Exception):
"""Exception if the login token sent expired"""
class LoginTokenReused(Exception):
"""Exception if the login token sent was already used"""
@attr.s(frozen=True, slots=True, auto_attribs=True)
class TokenLookupResult:
"""Result of looking up an access token.
@ -115,6 +129,20 @@ class RefreshTokenLookupResult:
If None, the session can be refreshed indefinitely."""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class LoginTokenLookupResult:
"""Result of looking up a login token."""
user_id: str
"""The user this token belongs to."""
auth_provider_id: Optional[str]
"""The SSO Identity Provider that the user authenticated with, to get this token."""
auth_provider_session_id: Optional[str]
"""The session ID advertised by the SSO Identity Provider."""
class RegistrationWorkerStore(CacheInvalidationWorkerStore):
def __init__(
self,
@ -1789,6 +1817,130 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
"replace_refresh_token", _replace_refresh_token_txn
)
async def add_login_token_to_user(
self,
user_id: str,
token: str,
expiry_ts: int,
auth_provider_id: Optional[str],
auth_provider_session_id: Optional[str],
) -> None:
"""Adds a short-term login token for the given user.
Args:
user_id: The user ID.
token: The new login token to add.
expiry_ts (milliseconds since the epoch): Time after which the login token
cannot be used.
auth_provider_id: The SSO Identity Provider that the user authenticated with
to get this token, if any
auth_provider_session_id: The session ID advertised by the SSO Identity
Provider, if any.
"""
await self.db_pool.simple_insert(
"login_tokens",
{
"token": token,
"user_id": user_id,
"expiry_ts": expiry_ts,
"auth_provider_id": auth_provider_id,
"auth_provider_session_id": auth_provider_session_id,
},
desc="add_login_token_to_user",
)
def _consume_login_token(
self,
txn: LoggingTransaction,
token: str,
ts: int,
) -> LoginTokenLookupResult:
values = self.db_pool.simple_select_one_txn(
txn,
"login_tokens",
keyvalues={"token": token},
retcols=(
"user_id",
"expiry_ts",
"used_ts",
"auth_provider_id",
"auth_provider_session_id",
),
allow_none=True,
)
if values is None:
raise NotFoundError()
self.db_pool.simple_update_one_txn(
txn,
"login_tokens",
keyvalues={"token": token},
updatevalues={"used_ts": ts},
)
user_id = values["user_id"]
expiry_ts = values["expiry_ts"]
used_ts = values["used_ts"]
auth_provider_id = values["auth_provider_id"]
auth_provider_session_id = values["auth_provider_session_id"]
# Token was already used
if used_ts is not None:
raise LoginTokenReused()
# Token expired
if ts > int(expiry_ts):
raise LoginTokenExpired()
return LoginTokenLookupResult(
user_id=user_id,
auth_provider_id=auth_provider_id,
auth_provider_session_id=auth_provider_session_id,
)
async def consume_login_token(self, token: str) -> LoginTokenLookupResult:
"""Lookup a login token and consume it.
Args:
token: The login token.
Returns:
The data stored with that token, including the `user_id`. Returns `None` if
the token does not exist or if it expired.
Raises:
NotFound if the login token was not found in database
LoginTokenExpired if the login token expired
LoginTokenReused if the login token was already used
"""
return await self.db_pool.runInteraction(
"consume_login_token",
self._consume_login_token,
token,
self._clock.time_msec(),
)
async def invalidate_login_tokens_by_session_id(
self, auth_provider_id: str, auth_provider_session_id: str
) -> None:
"""Invalidate login tokens with the given IdP session ID.
Args:
auth_provider_id: The SSO Identity Provider that the user authenticated with
to get this token
auth_provider_session_id: The session ID advertised by the SSO Identity
Provider
"""
await self.db_pool.simple_update(
table="login_tokens",
keyvalues={
"auth_provider_id": auth_provider_id,
"auth_provider_session_id": auth_provider_session_id,
},
updatevalues={"used_ts": self._clock.time_msec()},
desc="invalidate_login_tokens_by_session_id",
)
@cached()
async def is_guest(self, user_id: str) -> bool:
res = await self.db_pool.simple_select_one_onecol(
@ -2019,6 +2171,12 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
and hs.config.experimental.msc3866.require_approval_for_new_accounts
)
# Create a background job for removing expired login tokens
if hs.config.worker.run_background_tasks:
self._clock.looping_call(
self._delete_expired_login_tokens, THIRTY_MINUTES_IN_MS
)
async def add_access_token_to_user(
self,
user_id: str,
@ -2617,6 +2775,23 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
approved,
)
@wrap_as_background_process("delete_expired_login_tokens")
async def _delete_expired_login_tokens(self) -> None:
"""Remove login tokens with expiry dates that have passed."""
def _delete_expired_login_tokens_txn(txn: LoggingTransaction, ts: int) -> None:
sql = "DELETE FROM login_tokens WHERE expiry_ts <= ?"
txn.execute(sql, (ts,))
# We keep the expired tokens for an extra 5 minutes so we can measure how many
# times a token is being used after its expiry
now = self._clock.time_msec()
await self.db_pool.runInteraction(
"delete_expired_login_tokens",
_delete_expired_login_tokens_txn,
now - (5 * 60 * 1000),
)
def find_max_generated_user_id_localpart(cur: Cursor) -> int:
"""

View file

@ -152,6 +152,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
the forward extremities of those rooms will exclude most members. We may also
calculate room state incorrectly for such rooms and believe that a member is or
is not in the room when the opposite is true.
Note: If you only care about users in the room local to the homeserver, use
`get_local_users_in_room(...)` instead which will be more performant.
"""
return await self.db_pool.simple_select_onecol(
table="current_state_events",
@ -707,8 +710,8 @@ class RoomMemberWorkerStore(EventsWorkerStore):
# 250 users is pretty arbitrary but the data can be quite large if users
# are in many rooms.
for user_ids in batch_iter(user_ids, 250):
all_user_rooms.update(await self._get_rooms_for_users(user_ids))
for batch_user_ids in batch_iter(user_ids, 250):
all_user_rooms.update(await self._get_rooms_for_users(batch_user_ids))
return all_user_rooms
@ -742,7 +745,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
# user and the set of other users, and then checking if there is any
# overlap.
sql = f"""
SELECT b.state_key
SELECT DISTINCT b.state_key
FROM (
SELECT room_id FROM current_state_events
WHERE type = 'm.room.member' AND membership = 'join' AND state_key = ?
@ -751,7 +754,6 @@ class RoomMemberWorkerStore(EventsWorkerStore):
SELECT room_id, state_key FROM current_state_events
WHERE type = 'm.room.member' AND membership = 'join' AND {clause}
) AS b using (room_id)
LIMIT 1
"""
txn.execute(sql, (user_id, *args))

Some files were not shown because too many files have changed in this diff Show more