mirror of
https://mau.dev/maunium/synapse.git
synced 2024-05-20 04:23:44 +02:00
Merge remote-tracking branch 'upstream/release-v1.106'
This commit is contained in:
commit
ef1db42843
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
|
@ -30,7 +30,7 @@ jobs:
|
||||||
run: docker buildx inspect
|
run: docker buildx inspect
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.4.0
|
uses: sigstore/cosign-installer@v3.5.0
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
4
.github/workflows/docs-pr.yaml
vendored
4
.github/workflows/docs-pr.yaml
vendored
|
@ -19,7 +19,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
|
|
6
.github/workflows/docs.yaml
vendored
6
.github/workflows/docs.yaml
vendored
|
@ -56,7 +56,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ jobs:
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./book
|
publish_dir: ./book
|
||||||
|
@ -110,7 +110,7 @@ jobs:
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./dev-docs/_build/html
|
publish_dir: ./dev-docs/_build/html
|
||||||
|
|
18
.github/workflows/tests.yml
vendored
18
.github/workflows/tests.yml
vendored
|
@ -81,7 +81,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
|
@ -148,7 +148,7 @@ jobs:
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
|
@ -208,7 +208,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
|
@ -225,7 +225,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
@ -344,7 +344,7 @@ jobs:
|
||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
|
@ -386,7 +386,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
|
@ -498,7 +498,7 @@ jobs:
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
|
@ -642,7 +642,7 @@ jobs:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
|
@ -674,7 +674,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.65.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
42
CHANGES.md
42
CHANGES.md
|
@ -1,3 +1,45 @@
|
||||||
|
# Synapse 1.106.0rc1 (2024-04-25)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Send an email if the address is already bound to an user account. ([\#16819](https://github.com/element-hq/synapse/issues/16819))
|
||||||
|
- Implement the rendezvous mechanism described by [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108). ([\#17056](https://github.com/element-hq/synapse/issues/17056))
|
||||||
|
- Support delegating the rendezvous mechanism described [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/issues/4108) to an external implementation. ([\#17086](https://github.com/element-hq/synapse/issues/17086))
|
||||||
|
|
||||||
|
### Bugfixes
|
||||||
|
|
||||||
|
- Add validation to ensure that the `limit` parameter on `/publicRooms` is non-negative. ([\#16920](https://github.com/element-hq/synapse/issues/16920))
|
||||||
|
- Return `400 M_NOT_JSON` upon receiving invalid JSON in query parameters across various client and admin endpoints, rather than an internal server error. ([\#16923](https://github.com/element-hq/synapse/issues/16923))
|
||||||
|
- Make the CSAPI endpoint `/keys/device_signing/upload` idempotent. ([\#16943](https://github.com/element-hq/synapse/issues/16943))
|
||||||
|
- Redact membership events if the user requested erasure upon deactivating. ([\#17076](https://github.com/element-hq/synapse/issues/17076))
|
||||||
|
|
||||||
|
### Improved Documentation
|
||||||
|
|
||||||
|
- Add a prompt in the contributing guide to manually configure icu4c. ([\#17069](https://github.com/element-hq/synapse/issues/17069))
|
||||||
|
- Clarify what part of message retention is still experimental. ([\#17099](https://github.com/element-hq/synapse/issues/17099))
|
||||||
|
|
||||||
|
### Internal Changes
|
||||||
|
|
||||||
|
- Use new receipts column to optimise receipt and push action SQL queries. Contributed by Nick @ Beeper (@fizzadar). ([\#17032](https://github.com/element-hq/synapse/issues/17032), [\#17096](https://github.com/element-hq/synapse/issues/17096))
|
||||||
|
- Fix mypy with latest Twisted release. ([\#17036](https://github.com/element-hq/synapse/issues/17036))
|
||||||
|
- Bump minimum supported Rust version to 1.66.0. ([\#17079](https://github.com/element-hq/synapse/issues/17079))
|
||||||
|
- Add helpers to transform Twisted requests to Rust http Requests/Responses. ([\#17081](https://github.com/element-hq/synapse/issues/17081))
|
||||||
|
- Fix type annotation for `visited_chains` after `mypy` upgrade. ([\#17125](https://github.com/element-hq/synapse/issues/17125))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Updates to locked dependencies
|
||||||
|
|
||||||
|
* Bump anyhow from 1.0.81 to 1.0.82. ([\#17095](https://github.com/element-hq/synapse/issues/17095))
|
||||||
|
* Bump peaceiris/actions-gh-pages from 3.9.3 to 4.0.0. ([\#17087](https://github.com/element-hq/synapse/issues/17087))
|
||||||
|
* Bump peaceiris/actions-mdbook from 1.2.0 to 2.0.0. ([\#17089](https://github.com/element-hq/synapse/issues/17089))
|
||||||
|
* Bump pyasn1-modules from 0.3.0 to 0.4.0. ([\#17093](https://github.com/element-hq/synapse/issues/17093))
|
||||||
|
* Bump pygithub from 2.2.0 to 2.3.0. ([\#17092](https://github.com/element-hq/synapse/issues/17092))
|
||||||
|
* Bump ruff from 0.3.5 to 0.3.7. ([\#17094](https://github.com/element-hq/synapse/issues/17094))
|
||||||
|
* Bump sigstore/cosign-installer from 3.4.0 to 3.5.0. ([\#17088](https://github.com/element-hq/synapse/issues/17088))
|
||||||
|
* Bump twine from 4.0.2 to 5.0.0. ([\#17091](https://github.com/element-hq/synapse/issues/17091))
|
||||||
|
* Bump types-pillow from 10.2.0.20240406 to 10.2.0.20240415. ([\#17090](https://github.com/element-hq/synapse/issues/17090))
|
||||||
|
|
||||||
# Synapse 1.105.1 (2024-04-23)
|
# Synapse 1.105.1 (2024-04-23)
|
||||||
|
|
||||||
## Security advisory
|
## Security advisory
|
||||||
|
|
260
Cargo.lock
generated
260
Cargo.lock
generated
|
@ -13,9 +13,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.81"
|
version = "1.0.82"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
|
checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arc-swap"
|
name = "arc-swap"
|
||||||
|
@ -29,6 +29,12 @@ version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64"
|
||||||
|
version = "0.21.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
@ -53,12 +59,33 @@ dependencies = [
|
||||||
"generic-array",
|
"generic-array",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bumpalo"
|
||||||
|
version = "3.16.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytes"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg-if"
|
name = "cfg-if"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cpufeatures"
|
||||||
|
version = "0.2.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crypto-common"
|
name = "crypto-common"
|
||||||
version = "0.1.6"
|
version = "0.1.6"
|
||||||
|
@ -71,15 +98,21 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "digest"
|
name = "digest"
|
||||||
version = "0.10.5"
|
version = "0.10.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
|
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"block-buffer",
|
"block-buffer",
|
||||||
"crypto-common",
|
"crypto-common",
|
||||||
"subtle",
|
"subtle",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fnv"
|
||||||
|
version = "1.0.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "generic-array"
|
name = "generic-array"
|
||||||
version = "0.14.6"
|
version = "0.14.6"
|
||||||
|
@ -90,6 +123,43 @@ dependencies = [
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "getrandom"
|
||||||
|
version = "0.2.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"js-sys",
|
||||||
|
"libc",
|
||||||
|
"wasi",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "headers"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
|
||||||
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"bytes",
|
||||||
|
"headers-core",
|
||||||
|
"http",
|
||||||
|
"httpdate",
|
||||||
|
"mime",
|
||||||
|
"sha1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "headers-core"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
|
||||||
|
dependencies = [
|
||||||
|
"http",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
|
@ -102,6 +172,23 @@ version = "0.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "http"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
|
||||||
|
dependencies = [
|
||||||
|
"bytes",
|
||||||
|
"fnv",
|
||||||
|
"itoa",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpdate"
|
||||||
|
version = "1.0.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indoc"
|
name = "indoc"
|
||||||
version = "2.0.4"
|
version = "2.0.4"
|
||||||
|
@ -114,6 +201,15 @@ version = "1.0.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "js-sys"
|
||||||
|
version = "0.3.69"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
|
||||||
|
dependencies = [
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
|
@ -122,9 +218,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.135"
|
version = "0.2.153"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lock_api"
|
name = "lock_api"
|
||||||
|
@ -157,6 +253,12 @@ dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mime"
|
||||||
|
version = "0.3.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.15.0"
|
version = "1.15.0"
|
||||||
|
@ -192,6 +294,12 @@ version = "1.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
|
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ppv-lite86"
|
||||||
|
version = "0.2.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.76"
|
version = "1.0.76"
|
||||||
|
@ -295,6 +403,36 @@ dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand"
|
||||||
|
version = "0.8.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"rand_chacha",
|
||||||
|
"rand_core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand_chacha"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||||
|
dependencies = [
|
||||||
|
"ppv-lite86",
|
||||||
|
"rand_core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand_core"
|
||||||
|
version = "0.6.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redox_syscall"
|
name = "redox_syscall"
|
||||||
version = "0.2.16"
|
version = "0.2.16"
|
||||||
|
@ -376,6 +514,28 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sha1"
|
||||||
|
version = "0.10.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"cpufeatures",
|
||||||
|
"digest",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sha2"
|
||||||
|
version = "0.10.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"cpufeatures",
|
||||||
|
"digest",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smallvec"
|
name = "smallvec"
|
||||||
version = "1.10.0"
|
version = "1.10.0"
|
||||||
|
@ -404,16 +564,23 @@ name = "synapse"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"base64",
|
||||||
"blake2",
|
"blake2",
|
||||||
|
"bytes",
|
||||||
|
"headers",
|
||||||
"hex",
|
"hex",
|
||||||
|
"http",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
|
"mime",
|
||||||
"pyo3",
|
"pyo3",
|
||||||
"pyo3-log",
|
"pyo3-log",
|
||||||
"pythonize",
|
"pythonize",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"sha2",
|
||||||
|
"ulid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -428,6 +595,17 @@ version = "1.15.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ulid"
|
||||||
|
version = "1.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom",
|
||||||
|
"rand",
|
||||||
|
"web-time",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.5"
|
version = "1.0.5"
|
||||||
|
@ -446,6 +624,76 @@ version = "0.9.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasi"
|
||||||
|
version = "0.11.0+wasi-snapshot-preview1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen"
|
||||||
|
version = "0.2.92"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"wasm-bindgen-macro",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-backend"
|
||||||
|
version = "0.2.92"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
||||||
|
dependencies = [
|
||||||
|
"bumpalo",
|
||||||
|
"log",
|
||||||
|
"once_cell",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
"wasm-bindgen-shared",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-macro"
|
||||||
|
version = "0.2.92"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
||||||
|
dependencies = [
|
||||||
|
"quote",
|
||||||
|
"wasm-bindgen-macro-support",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-macro-support"
|
||||||
|
version = "0.2.92"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
"wasm-bindgen-backend",
|
||||||
|
"wasm-bindgen-shared",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-shared"
|
||||||
|
version = "0.2.92"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "web-time"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
|
||||||
|
dependencies = [
|
||||||
|
"js-sys",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.36.1"
|
version = "0.36.1"
|
||||||
|
|
6
debian/changelog
vendored
6
debian/changelog
vendored
|
@ -1,3 +1,9 @@
|
||||||
|
matrix-synapse-py3 (1.106.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.106.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Apr 2024 15:54:59 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.105.1) stable; urgency=medium
|
matrix-synapse-py3 (1.105.1) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.105.1.
|
* New Synapse release 1.105.1.
|
||||||
|
|
|
@ -102,6 +102,8 @@ experimental_features:
|
||||||
msc3391_enabled: true
|
msc3391_enabled: true
|
||||||
# Filtering /messages by relation type.
|
# Filtering /messages by relation type.
|
||||||
msc3874_enabled: true
|
msc3874_enabled: true
|
||||||
|
# no UIA for x-signing upload for the first time
|
||||||
|
msc3967_enabled: true
|
||||||
|
|
||||||
server_notices:
|
server_notices:
|
||||||
system_mxid_localpart: _server
|
system_mxid_localpart: _server
|
||||||
|
|
|
@ -86,6 +86,8 @@ poetry install --extras all
|
||||||
This will install the runtime and developer dependencies for the project. Be sure to check
|
This will install the runtime and developer dependencies for the project. Be sure to check
|
||||||
that the `poetry install` step completed cleanly.
|
that the `poetry install` step completed cleanly.
|
||||||
|
|
||||||
|
For OSX users, be sure to set `PKG_CONFIG_PATH` to support `icu4c`. Run `brew info icu4c` for more details.
|
||||||
|
|
||||||
## Running Synapse via poetry
|
## Running Synapse via poetry
|
||||||
|
|
||||||
To start a local instance of Synapse in the locked poetry environment, create a config file:
|
To start a local instance of Synapse in the locked poetry environment, create a config file:
|
||||||
|
|
|
@ -7,8 +7,10 @@ follow the semantics described in
|
||||||
and allow server and room admins to configure how long messages should
|
and allow server and room admins to configure how long messages should
|
||||||
be kept in a homeserver's database before being purged from it.
|
be kept in a homeserver's database before being purged from it.
|
||||||
**Please note that, as this feature isn't part of the Matrix
|
**Please note that, as this feature isn't part of the Matrix
|
||||||
specification yet, this implementation is to be considered as
|
specification yet, the use of `m.room.retention` events for per-room
|
||||||
experimental.**
|
retention policies is to be considered as experimental. However, the use
|
||||||
|
of a default message retention policy is considered a stable feature
|
||||||
|
in Synapse.**
|
||||||
|
|
||||||
A message retention policy is mainly defined by its `max_lifetime`
|
A message retention policy is mainly defined by its `max_lifetime`
|
||||||
parameter, which defines how long a message can be kept around after
|
parameter, which defines how long a message can be kept around after
|
||||||
|
|
68
poetry.lock
generated
68
poetry.lock
generated
|
@ -1848,17 +1848,17 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyasn1-modules"
|
name = "pyasn1-modules"
|
||||||
version = "0.3.0"
|
version = "0.4.0"
|
||||||
description = "A collection of ASN.1-based protocols modules"
|
description = "A collection of ASN.1-based protocols modules"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
|
{file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
|
||||||
{file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
|
{file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
pyasn1 = ">=0.4.6,<0.6.0"
|
pyasn1 = ">=0.4.6,<0.7.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pycparser"
|
name = "pycparser"
|
||||||
|
@ -1983,13 +1983,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygithub"
|
name = "pygithub"
|
||||||
version = "2.2.0"
|
version = "2.3.0"
|
||||||
description = "Use the full Github API v3"
|
description = "Use the full Github API v3"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "PyGithub-2.2.0-py3-none-any.whl", hash = "sha256:41042ea53e4c372219db708c38d2ca1fd4fadab75475bac27d89d339596cfad1"},
|
{file = "PyGithub-2.3.0-py3-none-any.whl", hash = "sha256:65b499728be3ce7b0cd2cd760da3b32f0f4d7bc55e5e0677617f90f6564e793e"},
|
||||||
{file = "PyGithub-2.2.0.tar.gz", hash = "sha256:e39be7c4dc39418bdd6e3ecab5931c636170b8b21b4d26f9ecf7e6102a3b51c3"},
|
{file = "PyGithub-2.3.0.tar.gz", hash = "sha256:0148d7347a1cdeed99af905077010aef81a4dad988b0ba51d4108bf66b443f7e"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -2444,28 +2444,28 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.3.5"
|
version = "0.3.7"
|
||||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"},
|
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"},
|
||||||
{file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"},
|
{file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"},
|
||||||
{file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"},
|
{file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"},
|
||||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"},
|
{file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"},
|
||||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"},
|
{file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"},
|
||||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"},
|
{file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"},
|
||||||
{file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"},
|
{file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"},
|
||||||
{file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"},
|
{file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"},
|
||||||
{file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"},
|
{file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"},
|
||||||
{file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"},
|
{file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"},
|
||||||
{file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"},
|
{file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2954,13 +2954,13 @@ docs = ["sphinx (<7.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "twine"
|
name = "twine"
|
||||||
version = "4.0.2"
|
version = "5.0.0"
|
||||||
description = "Collection of utilities for publishing packages on PyPI"
|
description = "Collection of utilities for publishing packages on PyPI"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"},
|
{file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"},
|
||||||
{file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"},
|
{file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -3109,13 +3109,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-pillow"
|
name = "types-pillow"
|
||||||
version = "10.2.0.20240406"
|
version = "10.2.0.20240415"
|
||||||
description = "Typing stubs for Pillow"
|
description = "Typing stubs for Pillow"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "types-Pillow-10.2.0.20240406.tar.gz", hash = "sha256:62e0cc1f17caba40e72e7154a483f4c7f3bea0e1c34c0ebba9de3c7745bc306d"},
|
{file = "types-Pillow-10.2.0.20240415.tar.gz", hash = "sha256:dd6058027639bcdc66ba78b228cc25fdae42524c2150c78c804da427e7e76e70"},
|
||||||
{file = "types_Pillow-10.2.0.20240406-py3-none-any.whl", hash = "sha256:5ac182e8afce53de30abca2fdf9cbec7b2500e549d0be84da035a729a84c7c47"},
|
{file = "types_Pillow-10.2.0.20240415-py3-none-any.whl", hash = "sha256:f933332b7e96010bae9b9cf82a4c9979ff0c270d63f5c5bbffb2d789b85cd00b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -3451,4 +3451,4 @@ user-search = ["pyicu"]
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.8.0"
|
python-versions = "^3.8.0"
|
||||||
content-hash = "4abda113a01f162bb3978b0372956d569364533aa39f57863c234363f8449a4f"
|
content-hash = "1951f2b4623138d47db08a405edd970e67599d05804bb459af21a085e1665f69"
|
||||||
|
|
|
@ -96,7 +96,7 @@ module-name = "synapse.synapse_rust"
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "matrix-synapse"
|
name = "matrix-synapse"
|
||||||
version = "1.105.1"
|
version = "1.106.0rc1"
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
license = "AGPL-3.0-or-later"
|
license = "AGPL-3.0-or-later"
|
||||||
|
@ -321,7 +321,7 @@ all = [
|
||||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||||
isort = ">=5.10.1"
|
isort = ">=5.10.1"
|
||||||
black = ">=22.7.0"
|
black = ">=22.7.0"
|
||||||
ruff = "0.3.5"
|
ruff = "0.3.7"
|
||||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||||
pydantic = "^2"
|
pydantic = "^2"
|
||||||
|
|
||||||
|
|
|
@ -743,9 +743,9 @@ psycopg2cffi-compat==1.1 ; python_full_version >= "3.8.0" and python_full_versio
|
||||||
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
|
--hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05
|
||||||
psycopg2cffi==2.9.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
psycopg2cffi==2.9.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \
|
||||||
--hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52
|
--hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52
|
||||||
pyasn1-modules==0.3.0 ; python_version >= "3.8" and python_full_version < "4.0.0" \
|
pyasn1-modules==0.4.0 ; python_version >= "3.8" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
|
--hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \
|
||||||
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
|
--hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b
|
||||||
pyasn1==0.5.1 ; python_version >= "3.8" and python_full_version < "4.0.0" \
|
pyasn1==0.5.1 ; python_version >= "3.8" and python_full_version < "4.0.0" \
|
||||||
--hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \
|
--hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \
|
||||||
--hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c
|
--hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c
|
||||||
|
|
|
@ -7,7 +7,7 @@ name = "synapse"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.65.0"
|
rust-version = "1.66.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "synapse"
|
name = "synapse"
|
||||||
|
@ -23,8 +23,13 @@ name = "synapse.synapse_rust"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.63"
|
anyhow = "1.0.63"
|
||||||
|
base64 = "0.21.7"
|
||||||
|
bytes = "1.6.0"
|
||||||
|
headers = "0.4.0"
|
||||||
|
http = "1.1.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
|
mime = "0.3.17"
|
||||||
pyo3 = { version = "0.20.0", features = [
|
pyo3 = { version = "0.20.0", features = [
|
||||||
"macros",
|
"macros",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -34,8 +39,10 @@ pyo3 = { version = "0.20.0", features = [
|
||||||
pyo3-log = "0.9.0"
|
pyo3-log = "0.9.0"
|
||||||
pythonize = "0.20.0"
|
pythonize = "0.20.0"
|
||||||
regex = "1.6.0"
|
regex = "1.6.0"
|
||||||
|
sha2 = "0.10.8"
|
||||||
serde = { version = "1.0.144", features = ["derive"] }
|
serde = { version = "1.0.144", features = ["derive"] }
|
||||||
serde_json = "1.0.85"
|
serde_json = "1.0.85"
|
||||||
|
ulid = "1.1.2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
extension-module = ["pyo3/extension-module"]
|
extension-module = ["pyo3/extension-module"]
|
||||||
|
|
60
rust/src/errors.rs
Normal file
60
rust/src/errors.rs
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
/*
|
||||||
|
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
*
|
||||||
|
* Copyright (C) 2024 New Vector, Ltd
|
||||||
|
*
|
||||||
|
* This program is free software: you can redistribute it and/or modify
|
||||||
|
* it under the terms of the GNU Affero General Public License as
|
||||||
|
* published by the Free Software Foundation, either version 3 of the
|
||||||
|
* License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* See the GNU Affero General Public License for more details:
|
||||||
|
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#![allow(clippy::new_ret_no_self)]
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use http::{HeaderMap, StatusCode};
|
||||||
|
use pyo3::{exceptions::PyValueError, import_exception};
|
||||||
|
|
||||||
|
import_exception!(synapse.api.errors, SynapseError);
|
||||||
|
|
||||||
|
impl SynapseError {
|
||||||
|
pub fn new(
|
||||||
|
code: StatusCode,
|
||||||
|
message: String,
|
||||||
|
errcode: &'static str,
|
||||||
|
additional_fields: Option<HashMap<String, String>>,
|
||||||
|
headers: Option<HeaderMap>,
|
||||||
|
) -> pyo3::PyErr {
|
||||||
|
// Transform the HeaderMap into a HashMap<String, String>
|
||||||
|
let headers = if let Some(headers) = headers {
|
||||||
|
let mut map = HashMap::with_capacity(headers.len());
|
||||||
|
for (key, value) in headers.iter() {
|
||||||
|
let Ok(value) = value.to_str() else {
|
||||||
|
// This should never happen, but we don't want to panic in case it does
|
||||||
|
return PyValueError::new_err(
|
||||||
|
"Could not construct SynapseError: header value is not valid ASCII",
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
map.insert(key.as_str().to_owned(), value.to_owned());
|
||||||
|
}
|
||||||
|
Some(map)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
SynapseError::new_err((code.as_u16(), message, errcode, additional_fields, headers))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
import_exception!(synapse.api.errors, NotFoundError);
|
||||||
|
|
||||||
|
impl NotFoundError {
|
||||||
|
pub fn new() -> pyo3::PyErr {
|
||||||
|
NotFoundError::new_err(())
|
||||||
|
}
|
||||||
|
}
|
165
rust/src/http.rs
Normal file
165
rust/src/http.rs
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
/*
|
||||||
|
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
*
|
||||||
|
* Copyright (C) 2024 New Vector, Ltd
|
||||||
|
*
|
||||||
|
* This program is free software: you can redistribute it and/or modify
|
||||||
|
* it under the terms of the GNU Affero General Public License as
|
||||||
|
* published by the Free Software Foundation, either version 3 of the
|
||||||
|
* License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* See the GNU Affero General Public License for more details:
|
||||||
|
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||||
|
use headers::{Header, HeaderMapExt};
|
||||||
|
use http::{HeaderName, HeaderValue, Method, Request, Response, StatusCode, Uri};
|
||||||
|
use pyo3::{
|
||||||
|
exceptions::PyValueError,
|
||||||
|
types::{PyBytes, PySequence, PyTuple},
|
||||||
|
PyAny, PyResult,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::errors::SynapseError;
|
||||||
|
|
||||||
|
/// Read a file-like Python object by chunks
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Returns an error if calling the `read` on the Python object failed
|
||||||
|
fn read_io_body(body: &PyAny, chunk_size: usize) -> PyResult<Bytes> {
|
||||||
|
let mut buf = BytesMut::new();
|
||||||
|
loop {
|
||||||
|
let bytes: &PyBytes = body.call_method1("read", (chunk_size,))?.downcast()?;
|
||||||
|
if bytes.as_bytes().is_empty() {
|
||||||
|
return Ok(buf.into());
|
||||||
|
}
|
||||||
|
buf.put(bytes.as_bytes());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Transform a Twisted `IRequest` to an [`http::Request`]
|
||||||
|
///
|
||||||
|
/// It uses the following members of `IRequest`:
|
||||||
|
/// - `content`, which is expected to be a file-like object with a `read` method
|
||||||
|
/// - `uri`, which is expected to be a valid URI as `bytes`
|
||||||
|
/// - `method`, which is expected to be a valid HTTP method as `bytes`
|
||||||
|
/// - `requestHeaders`, which is expected to have a `getAllRawHeaders` method
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Returns an error if the Python object doesn't properly implement `IRequest`
|
||||||
|
pub fn http_request_from_twisted(request: &PyAny) -> PyResult<Request<Bytes>> {
|
||||||
|
let content = request.getattr("content")?;
|
||||||
|
let body = read_io_body(content, 4096)?;
|
||||||
|
|
||||||
|
let mut req = Request::new(body);
|
||||||
|
|
||||||
|
let uri: &PyBytes = request.getattr("uri")?.downcast()?;
|
||||||
|
*req.uri_mut() =
|
||||||
|
Uri::try_from(uri.as_bytes()).map_err(|_| PyValueError::new_err("invalid uri"))?;
|
||||||
|
|
||||||
|
let method: &PyBytes = request.getattr("method")?.downcast()?;
|
||||||
|
*req.method_mut() = Method::from_bytes(method.as_bytes())
|
||||||
|
.map_err(|_| PyValueError::new_err("invalid method"))?;
|
||||||
|
|
||||||
|
let headers_iter = request
|
||||||
|
.getattr("requestHeaders")?
|
||||||
|
.call_method0("getAllRawHeaders")?
|
||||||
|
.iter()?;
|
||||||
|
|
||||||
|
for header in headers_iter {
|
||||||
|
let header = header?;
|
||||||
|
let header: &PyTuple = header.downcast()?;
|
||||||
|
let name: &PyBytes = header.get_item(0)?.downcast()?;
|
||||||
|
let name = HeaderName::from_bytes(name.as_bytes())
|
||||||
|
.map_err(|_| PyValueError::new_err("invalid header name"))?;
|
||||||
|
|
||||||
|
let values: &PySequence = header.get_item(1)?.downcast()?;
|
||||||
|
for index in 0..values.len()? {
|
||||||
|
let value: &PyBytes = values.get_item(index)?.downcast()?;
|
||||||
|
let value = HeaderValue::from_bytes(value.as_bytes())
|
||||||
|
.map_err(|_| PyValueError::new_err("invalid header value"))?;
|
||||||
|
req.headers_mut().append(name.clone(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send an [`http::Response`] through a Twisted `IRequest`
|
||||||
|
///
|
||||||
|
/// It uses the following members of `IRequest`:
|
||||||
|
///
|
||||||
|
/// - `responseHeaders`, which is expected to have a `addRawHeader(bytes, bytes)` method
|
||||||
|
/// - `setResponseCode(int)` method
|
||||||
|
/// - `write(bytes)` method
|
||||||
|
/// - `finish()` method
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Returns an error if the Python object doesn't properly implement `IRequest`
|
||||||
|
pub fn http_response_to_twisted<B>(request: &PyAny, response: Response<B>) -> PyResult<()>
|
||||||
|
where
|
||||||
|
B: Buf,
|
||||||
|
{
|
||||||
|
let (parts, mut body) = response.into_parts();
|
||||||
|
|
||||||
|
request.call_method1("setResponseCode", (parts.status.as_u16(),))?;
|
||||||
|
|
||||||
|
let response_headers = request.getattr("responseHeaders")?;
|
||||||
|
for (name, value) in parts.headers.iter() {
|
||||||
|
response_headers.call_method1("addRawHeader", (name.as_str(), value.as_bytes()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
while body.remaining() != 0 {
|
||||||
|
let chunk = body.chunk();
|
||||||
|
request.call_method1("write", (chunk,))?;
|
||||||
|
body.advance(chunk.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
request.call_method0("finish")?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An extension trait for [`HeaderMap`] that provides typed access to headers, and throws the
|
||||||
|
/// right python exceptions when the header is missing or fails to parse.
|
||||||
|
///
|
||||||
|
/// [`HeaderMap`]: headers::HeaderMap
|
||||||
|
pub trait HeaderMapPyExt: HeaderMapExt {
|
||||||
|
/// Get a header from the map, returning an error if it is missing or invalid.
|
||||||
|
fn typed_get_required<H>(&self) -> PyResult<H>
|
||||||
|
where
|
||||||
|
H: Header,
|
||||||
|
{
|
||||||
|
self.typed_get_optional::<H>()?.ok_or_else(|| {
|
||||||
|
SynapseError::new(
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
format!("Missing required header: {}", H::name()),
|
||||||
|
"M_MISSING_PARAM",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a header from the map, returning `None` if it is missing and an error if it is invalid.
|
||||||
|
fn typed_get_optional<H>(&self) -> PyResult<Option<H>>
|
||||||
|
where
|
||||||
|
H: Header,
|
||||||
|
{
|
||||||
|
self.typed_try_get::<H>().map_err(|_| {
|
||||||
|
SynapseError::new(
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
format!("Invalid header: {}", H::name()),
|
||||||
|
"M_INVALID_PARAM",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: HeaderMapExt> HeaderMapPyExt for T {}
|
|
@ -3,8 +3,11 @@ use pyo3::prelude::*;
|
||||||
use pyo3_log::ResetHandle;
|
use pyo3_log::ResetHandle;
|
||||||
|
|
||||||
pub mod acl;
|
pub mod acl;
|
||||||
|
pub mod errors;
|
||||||
pub mod events;
|
pub mod events;
|
||||||
|
pub mod http;
|
||||||
pub mod push;
|
pub mod push;
|
||||||
|
pub mod rendezvous;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
|
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
|
||||||
|
@ -43,6 +46,7 @@ fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||||
acl::register_module(py, m)?;
|
acl::register_module(py, m)?;
|
||||||
push::register_module(py, m)?;
|
push::register_module(py, m)?;
|
||||||
events::register_module(py, m)?;
|
events::register_module(py, m)?;
|
||||||
|
rendezvous::register_module(py, m)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
315
rust/src/rendezvous/mod.rs
Normal file
315
rust/src/rendezvous/mod.rs
Normal file
|
@ -0,0 +1,315 @@
|
||||||
|
/*
|
||||||
|
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
*
|
||||||
|
* Copyright (C) 2024 New Vector, Ltd
|
||||||
|
*
|
||||||
|
* This program is free software: you can redistribute it and/or modify
|
||||||
|
* it under the terms of the GNU Affero General Public License as
|
||||||
|
* published by the Free Software Foundation, either version 3 of the
|
||||||
|
* License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* See the GNU Affero General Public License for more details:
|
||||||
|
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::{BTreeMap, HashMap},
|
||||||
|
time::{Duration, SystemTime},
|
||||||
|
};
|
||||||
|
|
||||||
|
use bytes::Bytes;
|
||||||
|
use headers::{
|
||||||
|
AccessControlAllowOrigin, AccessControlExposeHeaders, CacheControl, ContentLength, ContentType,
|
||||||
|
HeaderMapExt, IfMatch, IfNoneMatch, Pragma,
|
||||||
|
};
|
||||||
|
use http::{header::ETAG, HeaderMap, Response, StatusCode, Uri};
|
||||||
|
use mime::Mime;
|
||||||
|
use pyo3::{
|
||||||
|
exceptions::PyValueError, pyclass, pymethods, types::PyModule, Py, PyAny, PyObject, PyResult,
|
||||||
|
Python, ToPyObject,
|
||||||
|
};
|
||||||
|
use ulid::Ulid;
|
||||||
|
|
||||||
|
use self::session::Session;
|
||||||
|
use crate::{
|
||||||
|
errors::{NotFoundError, SynapseError},
|
||||||
|
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod session;
|
||||||
|
|
||||||
|
// n.b. Because OPTIONS requests are handled by the Python code, we don't need to set Access-Control-Allow-Headers.
|
||||||
|
fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
||||||
|
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
||||||
|
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
||||||
|
headers.typed_insert(Pragma::no_cache());
|
||||||
|
headers.typed_insert(CacheControl::new().with_no_store());
|
||||||
|
headers.typed_insert(session.etag());
|
||||||
|
headers.typed_insert(session.expires());
|
||||||
|
headers.typed_insert(session.last_modified());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pyclass]
|
||||||
|
struct RendezvousHandler {
|
||||||
|
base: Uri,
|
||||||
|
clock: PyObject,
|
||||||
|
sessions: BTreeMap<Ulid, Session>,
|
||||||
|
capacity: usize,
|
||||||
|
max_content_length: u64,
|
||||||
|
ttl: Duration,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RendezvousHandler {
|
||||||
|
/// Check the input headers of a request which sets data for a session, and return the content type.
|
||||||
|
fn check_input_headers(&self, headers: &HeaderMap) -> PyResult<Mime> {
|
||||||
|
let ContentLength(content_length) = headers.typed_get_required()?;
|
||||||
|
|
||||||
|
if content_length > self.max_content_length {
|
||||||
|
return Err(SynapseError::new(
|
||||||
|
StatusCode::PAYLOAD_TOO_LARGE,
|
||||||
|
"Payload too large".to_owned(),
|
||||||
|
"M_TOO_LARGE",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let content_type: ContentType = headers.typed_get_required()?;
|
||||||
|
|
||||||
|
// Content-Type must be text/plain
|
||||||
|
if content_type != ContentType::text() {
|
||||||
|
return Err(SynapseError::new(
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
"Content-Type must be text/plain".to_owned(),
|
||||||
|
"M_INVALID_PARAM",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(content_type.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Evict expired sessions and remove the oldest sessions until we're under the capacity.
|
||||||
|
fn evict(&mut self, now: SystemTime) {
|
||||||
|
// First remove all the entries which expired
|
||||||
|
self.sessions.retain(|_, session| !session.expired(now));
|
||||||
|
|
||||||
|
// Then we remove the oldest entires until we're under the limit
|
||||||
|
while self.sessions.len() > self.capacity {
|
||||||
|
self.sessions.pop_first();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pymethods]
|
||||||
|
impl RendezvousHandler {
|
||||||
|
#[new]
|
||||||
|
#[pyo3(signature = (homeserver, /, capacity=100, max_content_length=4*1024, eviction_interval=60*1000, ttl=60*1000))]
|
||||||
|
fn new(
|
||||||
|
py: Python<'_>,
|
||||||
|
homeserver: &PyAny,
|
||||||
|
capacity: usize,
|
||||||
|
max_content_length: u64,
|
||||||
|
eviction_interval: u64,
|
||||||
|
ttl: u64,
|
||||||
|
) -> PyResult<Py<Self>> {
|
||||||
|
let base: String = homeserver
|
||||||
|
.getattr("config")?
|
||||||
|
.getattr("server")?
|
||||||
|
.getattr("public_baseurl")?
|
||||||
|
.extract()?;
|
||||||
|
let base = Uri::try_from(format!("{base}_synapse/client/rendezvous"))
|
||||||
|
.map_err(|_| PyValueError::new_err("Invalid base URI"))?;
|
||||||
|
|
||||||
|
let clock = homeserver.call_method0("get_clock")?.to_object(py);
|
||||||
|
|
||||||
|
// Construct a Python object so that we can get a reference to the
|
||||||
|
// evict method and schedule it to run.
|
||||||
|
let self_ = Py::new(
|
||||||
|
py,
|
||||||
|
Self {
|
||||||
|
base,
|
||||||
|
clock,
|
||||||
|
sessions: BTreeMap::new(),
|
||||||
|
capacity,
|
||||||
|
max_content_length,
|
||||||
|
ttl: Duration::from_millis(ttl),
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let evict = self_.getattr(py, "_evict")?;
|
||||||
|
homeserver.call_method0("get_clock")?.call_method(
|
||||||
|
"looping_call",
|
||||||
|
(evict, eviction_interval),
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(self_)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _evict(&mut self, py: Python<'_>) -> PyResult<()> {
|
||||||
|
let clock = self.clock.as_ref(py);
|
||||||
|
let now: u64 = clock.call_method0("time_msec")?.extract()?;
|
||||||
|
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||||
|
self.evict(now);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_post(&mut self, py: Python<'_>, twisted_request: &PyAny) -> PyResult<()> {
|
||||||
|
let request = http_request_from_twisted(twisted_request)?;
|
||||||
|
|
||||||
|
let content_type = self.check_input_headers(request.headers())?;
|
||||||
|
|
||||||
|
let clock = self.clock.as_ref(py);
|
||||||
|
let now: u64 = clock.call_method0("time_msec")?.extract()?;
|
||||||
|
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||||
|
|
||||||
|
// We trigger an immediate eviction if we're at 2x the capacity
|
||||||
|
if self.sessions.len() >= self.capacity * 2 {
|
||||||
|
self.evict(now);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a new ULID for the session from the current time.
|
||||||
|
let id = Ulid::from_datetime(now);
|
||||||
|
|
||||||
|
let uri = format!("{base}/{id}", base = self.base);
|
||||||
|
|
||||||
|
let body = request.into_body();
|
||||||
|
|
||||||
|
let session = Session::new(body, content_type, now, self.ttl);
|
||||||
|
|
||||||
|
let response = serde_json::json!({
|
||||||
|
"url": uri,
|
||||||
|
})
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let mut response = Response::new(response.as_bytes());
|
||||||
|
*response.status_mut() = StatusCode::CREATED;
|
||||||
|
response.headers_mut().typed_insert(ContentType::json());
|
||||||
|
prepare_headers(response.headers_mut(), &session);
|
||||||
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
|
self.sessions.insert(id, session);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_get(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> {
|
||||||
|
let request = http_request_from_twisted(twisted_request)?;
|
||||||
|
|
||||||
|
let if_none_match: Option<IfNoneMatch> = request.headers().typed_get_optional()?;
|
||||||
|
|
||||||
|
let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?;
|
||||||
|
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||||
|
|
||||||
|
let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?;
|
||||||
|
let session = self
|
||||||
|
.sessions
|
||||||
|
.get(&id)
|
||||||
|
.filter(|s| !s.expired(now))
|
||||||
|
.ok_or_else(NotFoundError::new)?;
|
||||||
|
|
||||||
|
if let Some(if_none_match) = if_none_match {
|
||||||
|
if !if_none_match.precondition_passes(&session.etag()) {
|
||||||
|
let mut response = Response::new(Bytes::new());
|
||||||
|
*response.status_mut() = StatusCode::NOT_MODIFIED;
|
||||||
|
prepare_headers(response.headers_mut(), session);
|
||||||
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut response = Response::new(session.data());
|
||||||
|
*response.status_mut() = StatusCode::OK;
|
||||||
|
let headers = response.headers_mut();
|
||||||
|
prepare_headers(headers, session);
|
||||||
|
headers.typed_insert(session.content_type());
|
||||||
|
headers.typed_insert(session.content_length());
|
||||||
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_put(&mut self, py: Python<'_>, twisted_request: &PyAny, id: &str) -> PyResult<()> {
|
||||||
|
let request = http_request_from_twisted(twisted_request)?;
|
||||||
|
|
||||||
|
let content_type = self.check_input_headers(request.headers())?;
|
||||||
|
|
||||||
|
let if_match: IfMatch = request.headers().typed_get_required()?;
|
||||||
|
|
||||||
|
let data = request.into_body();
|
||||||
|
|
||||||
|
let now: u64 = self.clock.call_method0(py, "time_msec")?.extract(py)?;
|
||||||
|
let now = SystemTime::UNIX_EPOCH + Duration::from_millis(now);
|
||||||
|
|
||||||
|
let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?;
|
||||||
|
let session = self
|
||||||
|
.sessions
|
||||||
|
.get_mut(&id)
|
||||||
|
.filter(|s| !s.expired(now))
|
||||||
|
.ok_or_else(NotFoundError::new)?;
|
||||||
|
|
||||||
|
if !if_match.precondition_passes(&session.etag()) {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
prepare_headers(&mut headers, session);
|
||||||
|
|
||||||
|
let mut additional_fields = HashMap::with_capacity(1);
|
||||||
|
additional_fields.insert(
|
||||||
|
String::from("org.matrix.msc4108.errcode"),
|
||||||
|
String::from("M_CONCURRENT_WRITE"),
|
||||||
|
);
|
||||||
|
|
||||||
|
return Err(SynapseError::new(
|
||||||
|
StatusCode::PRECONDITION_FAILED,
|
||||||
|
"ETag does not match".to_owned(),
|
||||||
|
"M_UNKNOWN", // Would be M_CONCURRENT_WRITE
|
||||||
|
Some(additional_fields),
|
||||||
|
Some(headers),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
session.update(data, content_type, now);
|
||||||
|
|
||||||
|
let mut response = Response::new(Bytes::new());
|
||||||
|
*response.status_mut() = StatusCode::ACCEPTED;
|
||||||
|
prepare_headers(response.headers_mut(), session);
|
||||||
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_delete(&mut self, twisted_request: &PyAny, id: &str) -> PyResult<()> {
|
||||||
|
let _request = http_request_from_twisted(twisted_request)?;
|
||||||
|
|
||||||
|
let id: Ulid = id.parse().map_err(|_| NotFoundError::new())?;
|
||||||
|
let _session = self.sessions.remove(&id).ok_or_else(NotFoundError::new)?;
|
||||||
|
|
||||||
|
let mut response = Response::new(Bytes::new());
|
||||||
|
*response.status_mut() = StatusCode::NO_CONTENT;
|
||||||
|
response
|
||||||
|
.headers_mut()
|
||||||
|
.typed_insert(AccessControlAllowOrigin::ANY);
|
||||||
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||||
|
let child_module = PyModule::new(py, "rendezvous")?;
|
||||||
|
|
||||||
|
child_module.add_class::<RendezvousHandler>()?;
|
||||||
|
|
||||||
|
m.add_submodule(child_module)?;
|
||||||
|
|
||||||
|
// We need to manually add the module to sys.modules to make `from
|
||||||
|
// synapse.synapse_rust import rendezvous` work.
|
||||||
|
py.import("sys")?
|
||||||
|
.getattr("modules")?
|
||||||
|
.set_item("synapse.synapse_rust.rendezvous", child_module)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
91
rust/src/rendezvous/session.rs
Normal file
91
rust/src/rendezvous/session.rs
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
/*
|
||||||
|
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
*
|
||||||
|
* Copyright (C) 2024 New Vector, Ltd
|
||||||
|
*
|
||||||
|
* This program is free software: you can redistribute it and/or modify
|
||||||
|
* it under the terms of the GNU Affero General Public License as
|
||||||
|
* published by the Free Software Foundation, either version 3 of the
|
||||||
|
* License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* See the GNU Affero General Public License for more details:
|
||||||
|
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use std::time::{Duration, SystemTime};
|
||||||
|
|
||||||
|
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine as _};
|
||||||
|
use bytes::Bytes;
|
||||||
|
use headers::{ContentLength, ContentType, ETag, Expires, LastModified};
|
||||||
|
use mime::Mime;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
/// A single session, containing data, metadata, and expiry information.
|
||||||
|
pub struct Session {
|
||||||
|
hash: [u8; 32],
|
||||||
|
data: Bytes,
|
||||||
|
content_type: Mime,
|
||||||
|
last_modified: SystemTime,
|
||||||
|
expires: SystemTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Session {
|
||||||
|
/// Create a new session with the given data, content type, and time-to-live.
|
||||||
|
pub fn new(data: Bytes, content_type: Mime, now: SystemTime, ttl: Duration) -> Self {
|
||||||
|
let hash = Sha256::digest(&data).into();
|
||||||
|
Self {
|
||||||
|
hash,
|
||||||
|
data,
|
||||||
|
content_type,
|
||||||
|
expires: now + ttl,
|
||||||
|
last_modified: now,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the session has expired at the given time.
|
||||||
|
pub fn expired(&self, now: SystemTime) -> bool {
|
||||||
|
self.expires <= now
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update the session with new data, content type, and last modified time.
|
||||||
|
pub fn update(&mut self, data: Bytes, content_type: Mime, now: SystemTime) {
|
||||||
|
self.hash = Sha256::digest(&data).into();
|
||||||
|
self.data = data;
|
||||||
|
self.content_type = content_type;
|
||||||
|
self.last_modified = now;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the Content-Type header of the session.
|
||||||
|
pub fn content_type(&self) -> ContentType {
|
||||||
|
self.content_type.clone().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the Content-Length header of the session.
|
||||||
|
pub fn content_length(&self) -> ContentLength {
|
||||||
|
ContentLength(self.data.len() as _)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the ETag header of the session.
|
||||||
|
pub fn etag(&self) -> ETag {
|
||||||
|
let encoded = URL_SAFE_NO_PAD.encode(self.hash);
|
||||||
|
// SAFETY: Base64 encoding is URL-safe, so ETag-safe
|
||||||
|
format!("\"{encoded}\"")
|
||||||
|
.parse()
|
||||||
|
.expect("base64-encoded hash should be URL-safe")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the Last-Modified header of the session.
|
||||||
|
pub fn last_modified(&self) -> LastModified {
|
||||||
|
self.last_modified.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the Expires header of the session.
|
||||||
|
pub fn expires(&self) -> Expires {
|
||||||
|
self.expires.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the current data stored in the session.
|
||||||
|
pub fn data(&self) -> Bytes {
|
||||||
|
self.data.clone()
|
||||||
|
}
|
||||||
|
}
|
|
@ -214,7 +214,7 @@ fi
|
||||||
|
|
||||||
extra_test_args=()
|
extra_test_args=()
|
||||||
|
|
||||||
test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902"
|
test_packages="./tests/csapi ./tests ./tests/msc3874 ./tests/msc3890 ./tests/msc3391 ./tests/msc3930 ./tests/msc3902 ./tests/msc3967"
|
||||||
|
|
||||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||||
|
|
|
@ -52,6 +52,7 @@ DEFAULT_SUBJECTS = {
|
||||||
"invite_from_person_to_space": "[%(app)s] %(person)s has invited you to join the %(space)s space on %(app)s...",
|
"invite_from_person_to_space": "[%(app)s] %(person)s has invited you to join the %(space)s space on %(app)s...",
|
||||||
"password_reset": "[%(server_name)s] Password reset",
|
"password_reset": "[%(server_name)s] Password reset",
|
||||||
"email_validation": "[%(server_name)s] Validate your email",
|
"email_validation": "[%(server_name)s] Validate your email",
|
||||||
|
"email_already_in_use": "[%(server_name)s] Email already in use",
|
||||||
}
|
}
|
||||||
|
|
||||||
LEGACY_TEMPLATE_DIR_WARNING = """
|
LEGACY_TEMPLATE_DIR_WARNING = """
|
||||||
|
@ -76,6 +77,7 @@ class EmailSubjectConfig:
|
||||||
invite_from_person_to_space: str
|
invite_from_person_to_space: str
|
||||||
password_reset: str
|
password_reset: str
|
||||||
email_validation: str
|
email_validation: str
|
||||||
|
email_already_in_use: str
|
||||||
|
|
||||||
|
|
||||||
class EmailConfig(Config):
|
class EmailConfig(Config):
|
||||||
|
@ -180,6 +182,12 @@ class EmailConfig(Config):
|
||||||
registration_template_text = email_config.get(
|
registration_template_text = email_config.get(
|
||||||
"registration_template_text", "registration.txt"
|
"registration_template_text", "registration.txt"
|
||||||
)
|
)
|
||||||
|
already_in_use_template_html = email_config.get(
|
||||||
|
"already_in_use_template_html", "already_in_use.html"
|
||||||
|
)
|
||||||
|
already_in_use_template_text = email_config.get(
|
||||||
|
"already_in_use_template_html", "already_in_use.txt"
|
||||||
|
)
|
||||||
add_threepid_template_html = email_config.get(
|
add_threepid_template_html = email_config.get(
|
||||||
"add_threepid_template_html", "add_threepid.html"
|
"add_threepid_template_html", "add_threepid.html"
|
||||||
)
|
)
|
||||||
|
@ -215,6 +223,8 @@ class EmailConfig(Config):
|
||||||
self.email_password_reset_template_text,
|
self.email_password_reset_template_text,
|
||||||
self.email_registration_template_html,
|
self.email_registration_template_html,
|
||||||
self.email_registration_template_text,
|
self.email_registration_template_text,
|
||||||
|
self.email_already_in_use_template_html,
|
||||||
|
self.email_already_in_use_template_text,
|
||||||
self.email_add_threepid_template_html,
|
self.email_add_threepid_template_html,
|
||||||
self.email_add_threepid_template_text,
|
self.email_add_threepid_template_text,
|
||||||
self.email_password_reset_template_confirmation_html,
|
self.email_password_reset_template_confirmation_html,
|
||||||
|
@ -230,6 +240,8 @@ class EmailConfig(Config):
|
||||||
password_reset_template_text,
|
password_reset_template_text,
|
||||||
registration_template_html,
|
registration_template_html,
|
||||||
registration_template_text,
|
registration_template_text,
|
||||||
|
already_in_use_template_html,
|
||||||
|
already_in_use_template_text,
|
||||||
add_threepid_template_html,
|
add_threepid_template_html,
|
||||||
add_threepid_template_text,
|
add_threepid_template_text,
|
||||||
"password_reset_confirmation.html",
|
"password_reset_confirmation.html",
|
||||||
|
|
|
@ -411,3 +411,24 @@ class ExperimentalConfig(Config):
|
||||||
self.msc4069_profile_inhibit_propagation = experimental.get(
|
self.msc4069_profile_inhibit_propagation = experimental.get(
|
||||||
"msc4069_profile_inhibit_propagation", False
|
"msc4069_profile_inhibit_propagation", False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||||
|
self.msc4108_enabled = experimental.get("msc4108_enabled", False)
|
||||||
|
|
||||||
|
self.msc4108_delegation_endpoint: Optional[str] = experimental.get(
|
||||||
|
"msc4108_delegation_endpoint", None
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
self.msc4108_enabled or self.msc4108_delegation_endpoint is not None
|
||||||
|
) and not self.msc3861.enabled:
|
||||||
|
raise ConfigError(
|
||||||
|
"MSC4108 requires MSC3861 to be enabled",
|
||||||
|
("experimental", "msc4108_delegation_endpoint"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.msc4108_delegation_endpoint is not None and self.msc4108_enabled:
|
||||||
|
raise ConfigError(
|
||||||
|
"You cannot have MSC4108 both enabled and delegated at the same time",
|
||||||
|
("experimental", "msc4108_delegation_endpoint"),
|
||||||
|
)
|
||||||
|
|
|
@ -261,11 +261,22 @@ class DeactivateAccountHandler:
|
||||||
user = UserID.from_string(user_id)
|
user = UserID.from_string(user_id)
|
||||||
|
|
||||||
rooms_for_user = await self.store.get_rooms_for_user(user_id)
|
rooms_for_user = await self.store.get_rooms_for_user(user_id)
|
||||||
|
requester = create_requester(user, authenticated_entity=self._server_name)
|
||||||
|
should_erase = await self.store.is_user_erased(user_id)
|
||||||
|
|
||||||
for room_id in rooms_for_user:
|
for room_id in rooms_for_user:
|
||||||
logger.info("User parter parting %r from %r", user_id, room_id)
|
logger.info("User parter parting %r from %r", user_id, room_id)
|
||||||
try:
|
try:
|
||||||
|
# Before parting the user, redact all membership events if requested
|
||||||
|
if should_erase:
|
||||||
|
event_ids = await self.store.get_membership_event_ids_for_user(
|
||||||
|
user_id, room_id
|
||||||
|
)
|
||||||
|
for event_id in event_ids:
|
||||||
|
await self.store.expire_event(event_id)
|
||||||
|
|
||||||
await self._room_member_handler.update_membership(
|
await self._room_member_handler.update_membership(
|
||||||
create_requester(user, authenticated_entity=self._server_name),
|
requester,
|
||||||
user,
|
user,
|
||||||
room_id,
|
room_id,
|
||||||
"leave",
|
"leave",
|
||||||
|
|
|
@ -1476,6 +1476,42 @@ class E2eKeysHandler:
|
||||||
else:
|
else:
|
||||||
return exists, self.clock.time_msec() < ts_replacable_without_uia_before
|
return exists, self.clock.time_msec() < ts_replacable_without_uia_before
|
||||||
|
|
||||||
|
async def has_different_keys(self, user_id: str, body: JsonDict) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a key provided in `body` differs from the same key stored in the DB. Returns
|
||||||
|
true on the first difference. If a key exists in `body` but does not exist in the DB,
|
||||||
|
returns True. If `body` has no keys, this always returns False.
|
||||||
|
Note by 'key' we mean Matrix key rather than JSON key.
|
||||||
|
|
||||||
|
The purpose of this function is to detect whether or not we need to apply UIA checks.
|
||||||
|
We must apply UIA checks if any key in the database is being overwritten. If a key is
|
||||||
|
being inserted for the first time, or if the key exactly matches what is in the database,
|
||||||
|
then no UIA check needs to be performed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id: The user who sent the `body`.
|
||||||
|
body: The JSON request body from POST /keys/device_signing/upload
|
||||||
|
Returns:
|
||||||
|
True if any key in `body` has a different value in the database.
|
||||||
|
"""
|
||||||
|
# Ensure that each key provided in the request body exactly matches the one we have stored.
|
||||||
|
# The first time we see the DB having a different key to the matching request key, bail.
|
||||||
|
# Note: we do not care if the DB has a key which the request does not specify, as we only
|
||||||
|
# care about *replacements* or *insertions* (i.e UPSERT)
|
||||||
|
req_body_key_to_db_key = {
|
||||||
|
"master_key": "master",
|
||||||
|
"self_signing_key": "self_signing",
|
||||||
|
"user_signing_key": "user_signing",
|
||||||
|
}
|
||||||
|
for req_body_key, db_key in req_body_key_to_db_key.items():
|
||||||
|
if req_body_key in body:
|
||||||
|
existing_key = await self.store.get_e2e_cross_signing_key(
|
||||||
|
user_id, db_key
|
||||||
|
)
|
||||||
|
if existing_key != body[req_body_key]:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _check_cross_signing_key(
|
def _check_cross_signing_key(
|
||||||
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
|
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None
|
||||||
|
|
|
@ -262,7 +262,8 @@ class _ProxyResponseBody(protocol.Protocol):
|
||||||
self._request.finish()
|
self._request.finish()
|
||||||
else:
|
else:
|
||||||
# Abort the underlying request since our remote request also failed.
|
# Abort the underlying request since our remote request also failed.
|
||||||
self._request.transport.abortConnection()
|
if self._request.channel:
|
||||||
|
self._request.channel.forceAbortClient()
|
||||||
|
|
||||||
|
|
||||||
class ProxySite(Site):
|
class ProxySite(Site):
|
||||||
|
|
|
@ -153,9 +153,9 @@ def return_json_error(
|
||||||
# Only respond with an error response if we haven't already started writing,
|
# Only respond with an error response if we haven't already started writing,
|
||||||
# otherwise lets just kill the connection
|
# otherwise lets just kill the connection
|
||||||
if request.startedWriting:
|
if request.startedWriting:
|
||||||
if request.transport:
|
if request.channel:
|
||||||
try:
|
try:
|
||||||
request.transport.abortConnection()
|
request.channel.forceAbortClient()
|
||||||
except Exception:
|
except Exception:
|
||||||
# abortConnection throws if the connection is already closed
|
# abortConnection throws if the connection is already closed
|
||||||
pass
|
pass
|
||||||
|
@ -909,7 +909,19 @@ def set_cors_headers(request: "SynapseRequest") -> None:
|
||||||
request.setHeader(
|
request.setHeader(
|
||||||
b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS"
|
b"Access-Control-Allow-Methods", b"GET, HEAD, POST, PUT, DELETE, OPTIONS"
|
||||||
)
|
)
|
||||||
if request.experimental_cors_msc3886:
|
if request.path is not None and (
|
||||||
|
request.path == b"/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
|
||||||
|
or request.path.startswith(b"/_synapse/client/rendezvous")
|
||||||
|
):
|
||||||
|
request.setHeader(
|
||||||
|
b"Access-Control-Allow-Headers",
|
||||||
|
b"Content-Type, If-Match, If-None-Match",
|
||||||
|
)
|
||||||
|
request.setHeader(
|
||||||
|
b"Access-Control-Expose-Headers",
|
||||||
|
b"Synapse-Trace-Id, Server, ETag",
|
||||||
|
)
|
||||||
|
elif request.experimental_cors_msc3886:
|
||||||
request.setHeader(
|
request.setHeader(
|
||||||
b"Access-Control-Allow-Headers",
|
b"Access-Control-Allow-Headers",
|
||||||
b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match",
|
b"X-Requested-With, Content-Type, Authorization, Date, If-Match, If-None-Match",
|
||||||
|
|
|
@ -19,9 +19,11 @@
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
""" This module contains base REST classes for constructing REST servlets. """
|
"""This module contains base REST classes for constructing REST servlets."""
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
import logging
|
import logging
|
||||||
|
import urllib.parse as urlparse
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
|
@ -65,17 +67,49 @@ def parse_integer(request: Request, name: str, default: int) -> int: ...
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def parse_integer(request: Request, name: str, *, required: Literal[True]) -> int: ...
|
def parse_integer(
|
||||||
|
request: Request, name: str, *, default: int, negative: bool
|
||||||
|
) -> int: ...
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def parse_integer(
|
def parse_integer(
|
||||||
request: Request, name: str, default: Optional[int] = None, required: bool = False
|
request: Request, name: str, *, default: int, negative: bool = False
|
||||||
|
) -> int: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def parse_integer(
|
||||||
|
request: Request, name: str, *, required: Literal[True], negative: bool = False
|
||||||
|
) -> int: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def parse_integer(
|
||||||
|
request: Request, name: str, *, default: Literal[None], negative: bool = False
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def parse_integer(request: Request, name: str, *, negative: bool) -> Optional[int]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def parse_integer(
|
||||||
|
request: Request,
|
||||||
|
name: str,
|
||||||
|
default: Optional[int] = None,
|
||||||
|
required: bool = False,
|
||||||
|
negative: bool = False,
|
||||||
) -> Optional[int]: ...
|
) -> Optional[int]: ...
|
||||||
|
|
||||||
|
|
||||||
def parse_integer(
|
def parse_integer(
|
||||||
request: Request, name: str, default: Optional[int] = None, required: bool = False
|
request: Request,
|
||||||
|
name: str,
|
||||||
|
default: Optional[int] = None,
|
||||||
|
required: bool = False,
|
||||||
|
negative: bool = False,
|
||||||
) -> Optional[int]:
|
) -> Optional[int]:
|
||||||
"""Parse an integer parameter from the request string
|
"""Parse an integer parameter from the request string
|
||||||
|
|
||||||
|
@ -85,16 +119,17 @@ def parse_integer(
|
||||||
default: value to use if the parameter is absent, defaults to None.
|
default: value to use if the parameter is absent, defaults to None.
|
||||||
required: whether to raise a 400 SynapseError if the parameter is absent,
|
required: whether to raise a 400 SynapseError if the parameter is absent,
|
||||||
defaults to False.
|
defaults to False.
|
||||||
|
negative: whether to allow negative integers, defaults to True.
|
||||||
Returns:
|
Returns:
|
||||||
An int value or the default.
|
An int value or the default.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SynapseError: if the parameter is absent and required, or if the
|
SynapseError: if the parameter is absent and required, if the
|
||||||
parameter is present and not an integer.
|
parameter is present and not an integer, or if the
|
||||||
|
parameter is illegitimate negative.
|
||||||
"""
|
"""
|
||||||
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
||||||
return parse_integer_from_args(args, name, default, required)
|
return parse_integer_from_args(args, name, default, required, negative)
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
|
@ -120,6 +155,7 @@ def parse_integer_from_args(
|
||||||
name: str,
|
name: str,
|
||||||
default: Optional[int] = None,
|
default: Optional[int] = None,
|
||||||
required: bool = False,
|
required: bool = False,
|
||||||
|
negative: bool = False,
|
||||||
) -> Optional[int]: ...
|
) -> Optional[int]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@ -128,6 +164,7 @@ def parse_integer_from_args(
|
||||||
name: str,
|
name: str,
|
||||||
default: Optional[int] = None,
|
default: Optional[int] = None,
|
||||||
required: bool = False,
|
required: bool = False,
|
||||||
|
negative: bool = True,
|
||||||
) -> Optional[int]:
|
) -> Optional[int]:
|
||||||
"""Parse an integer parameter from the request string
|
"""Parse an integer parameter from the request string
|
||||||
|
|
||||||
|
@ -137,33 +174,37 @@ def parse_integer_from_args(
|
||||||
default: value to use if the parameter is absent, defaults to None.
|
default: value to use if the parameter is absent, defaults to None.
|
||||||
required: whether to raise a 400 SynapseError if the parameter is absent,
|
required: whether to raise a 400 SynapseError if the parameter is absent,
|
||||||
defaults to False.
|
defaults to False.
|
||||||
|
negative: whether to allow negative integers, defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
An int value or the default.
|
An int value or the default.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SynapseError: if the parameter is absent and required, or if the
|
SynapseError: if the parameter is absent and required, if the
|
||||||
parameter is present and not an integer.
|
parameter is present and not an integer, or if the
|
||||||
|
parameter is illegitimate negative.
|
||||||
"""
|
"""
|
||||||
name_bytes = name.encode("ascii")
|
name_bytes = name.encode("ascii")
|
||||||
|
|
||||||
if name_bytes in args:
|
if name_bytes not in args:
|
||||||
try:
|
if not required:
|
||||||
return int(args[name_bytes][0])
|
|
||||||
except Exception:
|
|
||||||
message = "Query parameter %r must be an integer" % (name,)
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if required:
|
|
||||||
message = "Missing integer query parameter %r" % (name,)
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
message = f"Missing required integer query parameter {name}"
|
||||||
|
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
|
||||||
|
|
||||||
|
try:
|
||||||
|
integer = int(args[name_bytes][0])
|
||||||
|
except Exception:
|
||||||
|
message = f"Query parameter {name} must be an integer"
|
||||||
|
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
|
||||||
|
|
||||||
|
if not negative and integer < 0:
|
||||||
|
message = f"Query parameter {name} must be a positive integer."
|
||||||
|
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM)
|
||||||
|
|
||||||
|
return integer
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def parse_boolean(request: Request, name: str, default: bool) -> bool: ...
|
def parse_boolean(request: Request, name: str, default: bool) -> bool: ...
|
||||||
|
@ -410,6 +451,87 @@ def parse_string(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_json(
|
||||||
|
request: Request,
|
||||||
|
name: str,
|
||||||
|
default: Optional[dict] = None,
|
||||||
|
required: bool = False,
|
||||||
|
encoding: str = "ascii",
|
||||||
|
) -> Optional[JsonDict]:
|
||||||
|
"""
|
||||||
|
Parse a JSON parameter from the request query string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: the twisted HTTP request.
|
||||||
|
name: the name of the query parameter.
|
||||||
|
default: value to use if the parameter is absent,
|
||||||
|
defaults to None.
|
||||||
|
required: whether to raise a 400 SynapseError if the
|
||||||
|
parameter is absent, defaults to False.
|
||||||
|
encoding: The encoding to decode the string content with.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A JSON value, or `default` if the named query parameter was not found
|
||||||
|
and `required` was False.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SynapseError if the parameter is absent and required, or if the
|
||||||
|
parameter is present and not a JSON object.
|
||||||
|
"""
|
||||||
|
args: Mapping[bytes, Sequence[bytes]] = request.args # type: ignore
|
||||||
|
return parse_json_from_args(
|
||||||
|
args,
|
||||||
|
name,
|
||||||
|
default,
|
||||||
|
required=required,
|
||||||
|
encoding=encoding,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_json_from_args(
|
||||||
|
args: Mapping[bytes, Sequence[bytes]],
|
||||||
|
name: str,
|
||||||
|
default: Optional[dict] = None,
|
||||||
|
required: bool = False,
|
||||||
|
encoding: str = "ascii",
|
||||||
|
) -> Optional[JsonDict]:
|
||||||
|
"""
|
||||||
|
Parse a JSON parameter from the request query string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args: a mapping of request args as bytes to a list of bytes (e.g. request.args).
|
||||||
|
name: the name of the query parameter.
|
||||||
|
default: value to use if the parameter is absent,
|
||||||
|
defaults to None.
|
||||||
|
required: whether to raise a 400 SynapseError if the
|
||||||
|
parameter is absent, defaults to False.
|
||||||
|
encoding: the encoding to decode the string content with.
|
||||||
|
|
||||||
|
A JSON value, or `default` if the named query parameter was not found
|
||||||
|
and `required` was False.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SynapseError if the parameter is absent and required, or if the
|
||||||
|
parameter is present and not a JSON object.
|
||||||
|
"""
|
||||||
|
name_bytes = name.encode("ascii")
|
||||||
|
|
||||||
|
if name_bytes not in args:
|
||||||
|
if not required:
|
||||||
|
return default
|
||||||
|
|
||||||
|
message = f"Missing required integer query parameter {name}"
|
||||||
|
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.MISSING_PARAM)
|
||||||
|
|
||||||
|
json_str = parse_string_from_args(args, name, required=True, encoding=encoding)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json_decoder.decode(urlparse.unquote(json_str))
|
||||||
|
except Exception:
|
||||||
|
message = f"Query parameter {name} must be a valid JSON object"
|
||||||
|
raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.NOT_JSON)
|
||||||
|
|
||||||
|
|
||||||
EnumT = TypeVar("EnumT", bound=enum.Enum)
|
EnumT = TypeVar("EnumT", bound=enum.Enum)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -150,7 +150,8 @@ class SynapseRequest(Request):
|
||||||
self.get_method(),
|
self.get_method(),
|
||||||
self.get_redacted_uri(),
|
self.get_redacted_uri(),
|
||||||
)
|
)
|
||||||
self.transport.abortConnection()
|
if self.channel:
|
||||||
|
self.channel.forceAbortClient()
|
||||||
return
|
return
|
||||||
super().handleContentChunk(data)
|
super().handleContentChunk(data)
|
||||||
|
|
||||||
|
|
|
@ -205,6 +205,22 @@ class Mailer:
|
||||||
template_vars,
|
template_vars,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
emails_sent_counter.labels("already_in_use")
|
||||||
|
|
||||||
|
async def send_already_in_use_mail(self, email_address: str) -> None:
|
||||||
|
"""Send an email if the address is already bound to an user account
|
||||||
|
|
||||||
|
Args:
|
||||||
|
email_address: Email address we're sending to the "already in use" mail
|
||||||
|
"""
|
||||||
|
|
||||||
|
await self.send_email(
|
||||||
|
email_address,
|
||||||
|
self.email_subjects.email_already_in_use
|
||||||
|
% {"server_name": self.hs.config.server.server_name, "app": self.app_name},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
emails_sent_counter.labels("add_threepid")
|
emails_sent_counter.labels("add_threepid")
|
||||||
|
|
||||||
async def send_add_threepid_mail(
|
async def send_add_threepid_mail(
|
||||||
|
|
12
synapse/res/templates/already_in_use.html
Normal file
12
synapse/res/templates/already_in_use.html
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{% extends "_base.html" %}
|
||||||
|
{% block title %}Email already in use{% endblock %}
|
||||||
|
|
||||||
|
{% block body %}
|
||||||
|
<p>You have asked us to register this email with a new Matrix account, but this email is already registered with an existing account.</p>
|
||||||
|
|
||||||
|
<p>Please reset your password if needed.</p>
|
||||||
|
|
||||||
|
<p>If this was not you, you can safely disregard this email.</p>
|
||||||
|
|
||||||
|
<p>Thank you.</p>
|
||||||
|
{% endblock %}
|
10
synapse/res/templates/already_in_use.txt
Normal file
10
synapse/res/templates/already_in_use.txt
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
Hello there,
|
||||||
|
|
||||||
|
You have asked us to register this email with a new Matrix account,
|
||||||
|
but this email is already registered with an existing account.
|
||||||
|
|
||||||
|
Please reset your password if needed.
|
||||||
|
|
||||||
|
If this was not you, you can safely disregard this email.
|
||||||
|
|
||||||
|
Thank you.
|
|
@ -23,7 +23,7 @@ from http import HTTPStatus
|
||||||
from typing import TYPE_CHECKING, Tuple
|
from typing import TYPE_CHECKING, Tuple
|
||||||
|
|
||||||
from synapse.api.constants import Direction
|
from synapse.api.constants import Direction
|
||||||
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
from synapse.api.errors import NotFoundError, SynapseError
|
||||||
from synapse.federation.transport.server import Authenticator
|
from synapse.federation.transport.server import Authenticator
|
||||||
from synapse.http.servlet import RestServlet, parse_enum, parse_integer, parse_string
|
from synapse.http.servlet import RestServlet, parse_enum, parse_integer, parse_string
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
|
@ -61,22 +61,8 @@ class ListDestinationsRestServlet(RestServlet):
|
||||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||||
await assert_requester_is_admin(self._auth, request)
|
await assert_requester_is_admin(self._auth, request)
|
||||||
|
|
||||||
start = parse_integer(request, "from", default=0)
|
start = parse_integer(request, "from", default=0, negative=False)
|
||||||
limit = parse_integer(request, "limit", default=100)
|
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||||
|
|
||||||
if start < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter from must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
if limit < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter limit must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
destination = parse_string(request, "destination")
|
destination = parse_string(request, "destination")
|
||||||
|
|
||||||
|
@ -195,22 +181,8 @@ class DestinationMembershipRestServlet(RestServlet):
|
||||||
if not await self._store.is_destination_known(destination):
|
if not await self._store.is_destination_known(destination):
|
||||||
raise NotFoundError("Unknown destination")
|
raise NotFoundError("Unknown destination")
|
||||||
|
|
||||||
start = parse_integer(request, "from", default=0)
|
start = parse_integer(request, "from", default=0, negative=False)
|
||||||
limit = parse_integer(request, "limit", default=100)
|
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||||
|
|
||||||
if start < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter from must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
if limit < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter limit must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)
|
direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS)
|
||||||
|
|
||||||
|
|
|
@ -311,29 +311,17 @@ class DeleteMediaByDateSize(RestServlet):
|
||||||
) -> Tuple[int, JsonDict]:
|
) -> Tuple[int, JsonDict]:
|
||||||
await assert_requester_is_admin(self.auth, request)
|
await assert_requester_is_admin(self.auth, request)
|
||||||
|
|
||||||
before_ts = parse_integer(request, "before_ts", required=True)
|
before_ts = parse_integer(request, "before_ts", required=True, negative=False)
|
||||||
size_gt = parse_integer(request, "size_gt", default=0)
|
size_gt = parse_integer(request, "size_gt", default=0, negative=False)
|
||||||
keep_profiles = parse_boolean(request, "keep_profiles", default=True)
|
keep_profiles = parse_boolean(request, "keep_profiles", default=True)
|
||||||
|
|
||||||
if before_ts < 0:
|
if before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter before_ts must be a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
elif before_ts < 30000000000: # Dec 1970 in milliseconds, Aug 2920 in seconds
|
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
HTTPStatus.BAD_REQUEST,
|
HTTPStatus.BAD_REQUEST,
|
||||||
"Query parameter before_ts you provided is from the year 1970. "
|
"Query parameter before_ts you provided is from the year 1970. "
|
||||||
+ "Double check that you are providing a timestamp in milliseconds.",
|
+ "Double check that you are providing a timestamp in milliseconds.",
|
||||||
errcode=Codes.INVALID_PARAM,
|
errcode=Codes.INVALID_PARAM,
|
||||||
)
|
)
|
||||||
if size_gt < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter size_gt must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
# This check is useless, we keep it for the legacy endpoint only.
|
# This check is useless, we keep it for the legacy endpoint only.
|
||||||
if server_name is not None and self.server_name != server_name:
|
if server_name is not None and self.server_name != server_name:
|
||||||
|
@ -389,22 +377,8 @@ class UserMediaRestServlet(RestServlet):
|
||||||
if user is None:
|
if user is None:
|
||||||
raise NotFoundError("Unknown user")
|
raise NotFoundError("Unknown user")
|
||||||
|
|
||||||
start = parse_integer(request, "from", default=0)
|
start = parse_integer(request, "from", default=0, negative=False)
|
||||||
limit = parse_integer(request, "limit", default=100)
|
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||||
|
|
||||||
if start < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter from must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
if limit < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter limit must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
# If neither `order_by` nor `dir` is set, set the default order
|
# If neither `order_by` nor `dir` is set, set the default order
|
||||||
# to newest media is on top for backward compatibility.
|
# to newest media is on top for backward compatibility.
|
||||||
|
@ -447,22 +421,8 @@ class UserMediaRestServlet(RestServlet):
|
||||||
if user is None:
|
if user is None:
|
||||||
raise NotFoundError("Unknown user")
|
raise NotFoundError("Unknown user")
|
||||||
|
|
||||||
start = parse_integer(request, "from", default=0)
|
start = parse_integer(request, "from", default=0, negative=False)
|
||||||
limit = parse_integer(request, "limit", default=100)
|
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||||
|
|
||||||
if start < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter from must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
if limit < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter limit must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
# If neither `order_by` nor `dir` is set, set the default order
|
# If neither `order_by` nor `dir` is set, set the default order
|
||||||
# to newest media is on top for backward compatibility.
|
# to newest media is on top for backward compatibility.
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
import logging
|
import logging
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
from typing import TYPE_CHECKING, List, Optional, Tuple, cast
|
||||||
from urllib import parse as urlparse
|
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
|
@ -38,6 +37,7 @@ from synapse.http.servlet import (
|
||||||
assert_params_in_dict,
|
assert_params_in_dict,
|
||||||
parse_enum,
|
parse_enum,
|
||||||
parse_integer,
|
parse_integer,
|
||||||
|
parse_json,
|
||||||
parse_json_object_from_request,
|
parse_json_object_from_request,
|
||||||
parse_string,
|
parse_string,
|
||||||
)
|
)
|
||||||
|
@ -51,7 +51,6 @@ from synapse.storage.databases.main.room import RoomSortOrder
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.types import JsonDict, RoomID, ScheduledTask, UserID, create_requester
|
from synapse.types import JsonDict, RoomID, ScheduledTask, UserID, create_requester
|
||||||
from synapse.types.state import StateFilter
|
from synapse.types.state import StateFilter
|
||||||
from synapse.util import json_decoder
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from synapse.api.auth import Auth
|
from synapse.api.auth import Auth
|
||||||
|
@ -776,14 +775,8 @@ class RoomEventContextServlet(RestServlet):
|
||||||
limit = parse_integer(request, "limit", default=10)
|
limit = parse_integer(request, "limit", default=10)
|
||||||
|
|
||||||
# picking the API shape for symmetry with /messages
|
# picking the API shape for symmetry with /messages
|
||||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||||
if filter_str:
|
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||||
filter_json = urlparse.unquote(filter_str)
|
|
||||||
event_filter: Optional[Filter] = Filter(
|
|
||||||
self._hs, json_decoder.decode(filter_json)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
event_filter = None
|
|
||||||
|
|
||||||
event_context = await self.room_context_handler.get_event_context(
|
event_context = await self.room_context_handler.get_event_context(
|
||||||
requester,
|
requester,
|
||||||
|
@ -914,21 +907,16 @@ class RoomMessagesRestServlet(RestServlet):
|
||||||
)
|
)
|
||||||
# Twisted will have processed the args by now.
|
# Twisted will have processed the args by now.
|
||||||
assert request.args is not None
|
assert request.args is not None
|
||||||
|
|
||||||
|
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||||
|
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||||
|
|
||||||
as_client_event = b"raw" not in request.args
|
as_client_event = b"raw" not in request.args
|
||||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
if (
|
||||||
if filter_str:
|
event_filter
|
||||||
filter_json = urlparse.unquote(filter_str)
|
and event_filter.filter_json.get("event_format", "client") == "federation"
|
||||||
event_filter: Optional[Filter] = Filter(
|
):
|
||||||
self._hs, json_decoder.decode(filter_json)
|
as_client_event = False
|
||||||
)
|
|
||||||
if (
|
|
||||||
event_filter
|
|
||||||
and event_filter.filter_json.get("event_format", "client")
|
|
||||||
== "federation"
|
|
||||||
):
|
|
||||||
as_client_event = False
|
|
||||||
else:
|
|
||||||
event_filter = None
|
|
||||||
|
|
||||||
msgs = await self._pagination_handler.get_messages(
|
msgs = await self._pagination_handler.get_messages(
|
||||||
room_id=room_id,
|
room_id=room_id,
|
||||||
|
|
|
@ -63,38 +63,12 @@ class UserMediaStatisticsRestServlet(RestServlet):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
start = parse_integer(request, "from", default=0)
|
start = parse_integer(request, "from", default=0, negative=False)
|
||||||
if start < 0:
|
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||||
raise SynapseError(
|
from_ts = parse_integer(request, "from_ts", default=0, negative=False)
|
||||||
HTTPStatus.BAD_REQUEST,
|
until_ts = parse_integer(request, "until_ts", negative=False)
|
||||||
"Query parameter from must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
limit = parse_integer(request, "limit", default=100)
|
|
||||||
if limit < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter limit must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
from_ts = parse_integer(request, "from_ts", default=0)
|
|
||||||
if from_ts < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter from_ts must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
until_ts = parse_integer(request, "until_ts")
|
|
||||||
if until_ts is not None:
|
if until_ts is not None:
|
||||||
if until_ts < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter until_ts must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
if until_ts <= from_ts:
|
if until_ts <= from_ts:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
HTTPStatus.BAD_REQUEST,
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
|
|
@ -90,22 +90,8 @@ class UsersRestServletV2(RestServlet):
|
||||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||||
await assert_requester_is_admin(self.auth, request)
|
await assert_requester_is_admin(self.auth, request)
|
||||||
|
|
||||||
start = parse_integer(request, "from", default=0)
|
start = parse_integer(request, "from", default=0, negative=False)
|
||||||
limit = parse_integer(request, "limit", default=100)
|
limit = parse_integer(request, "limit", default=100, negative=False)
|
||||||
|
|
||||||
if start < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter from must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
if limit < 0:
|
|
||||||
raise SynapseError(
|
|
||||||
HTTPStatus.BAD_REQUEST,
|
|
||||||
"Query parameter limit must be a string representing a positive integer.",
|
|
||||||
errcode=Codes.INVALID_PARAM,
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id = parse_string(request, "user_id")
|
user_id = parse_string(request, "user_id")
|
||||||
name = parse_string(request, "name", encoding="utf-8")
|
name = parse_string(request, "name", encoding="utf-8")
|
||||||
|
|
|
@ -409,7 +409,18 @@ class SigningKeyUploadServlet(RestServlet):
|
||||||
# But first-time setup is fine
|
# But first-time setup is fine
|
||||||
|
|
||||||
elif self.hs.config.experimental.msc3967_enabled:
|
elif self.hs.config.experimental.msc3967_enabled:
|
||||||
# If we already have a master key then cross signing is set up and we require UIA to reset
|
# MSC3967 allows this endpoint to 200 OK for idempotency. Resending exactly the same
|
||||||
|
# keys should just 200 OK without doing a UIA prompt.
|
||||||
|
keys_are_different = await self.e2e_keys_handler.has_different_keys(
|
||||||
|
user_id, body
|
||||||
|
)
|
||||||
|
if not keys_are_different:
|
||||||
|
# FIXME: we do not fallthrough to upload_signing_keys_for_user because confusingly
|
||||||
|
# if we do, we 500 as it looks like it tries to INSERT the same key twice, causing a
|
||||||
|
# unique key constraint violation. This sounds like a bug?
|
||||||
|
return 200, {}
|
||||||
|
# the keys are different, is x-signing set up? If no, then the keys don't exist which is
|
||||||
|
# why they are different. If yes, then we need to UIA to change them.
|
||||||
if is_cross_signing_setup:
|
if is_cross_signing_setup:
|
||||||
await self.auth_handler.validate_user_via_ui_auth(
|
await self.auth_handler.validate_user_via_ui_auth(
|
||||||
requester,
|
requester,
|
||||||
|
@ -420,7 +431,6 @@ class SigningKeyUploadServlet(RestServlet):
|
||||||
can_skip_ui_auth=False,
|
can_skip_ui_auth=False,
|
||||||
)
|
)
|
||||||
# Otherwise we don't require UIA since we are setting up cross signing for first time
|
# Otherwise we don't require UIA since we are setting up cross signing for first time
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Previous behaviour is to always require UIA but allow it to be skipped
|
# Previous behaviour is to always require UIA but allow it to be skipped
|
||||||
await self.auth_handler.validate_user_via_ui_auth(
|
await self.auth_handler.validate_user_via_ui_auth(
|
||||||
|
|
|
@ -86,12 +86,18 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
|
||||||
self.config = hs.config
|
self.config = hs.config
|
||||||
|
|
||||||
if self.hs.config.email.can_verify_email:
|
if self.hs.config.email.can_verify_email:
|
||||||
self.mailer = Mailer(
|
self.registration_mailer = Mailer(
|
||||||
hs=self.hs,
|
hs=self.hs,
|
||||||
app_name=self.config.email.email_app_name,
|
app_name=self.config.email.email_app_name,
|
||||||
template_html=self.config.email.email_registration_template_html,
|
template_html=self.config.email.email_registration_template_html,
|
||||||
template_text=self.config.email.email_registration_template_text,
|
template_text=self.config.email.email_registration_template_text,
|
||||||
)
|
)
|
||||||
|
self.already_in_use_mailer = Mailer(
|
||||||
|
hs=self.hs,
|
||||||
|
app_name=self.config.email.email_app_name,
|
||||||
|
template_html=self.config.email.email_already_in_use_template_html,
|
||||||
|
template_text=self.config.email.email_already_in_use_template_text,
|
||||||
|
)
|
||||||
|
|
||||||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||||
if not self.hs.config.email.can_verify_email:
|
if not self.hs.config.email.can_verify_email:
|
||||||
|
@ -139,8 +145,10 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
|
||||||
if self.hs.config.server.request_token_inhibit_3pid_errors:
|
if self.hs.config.server.request_token_inhibit_3pid_errors:
|
||||||
# Make the client think the operation succeeded. See the rationale in the
|
# Make the client think the operation succeeded. See the rationale in the
|
||||||
# comments for request_token_inhibit_3pid_errors.
|
# comments for request_token_inhibit_3pid_errors.
|
||||||
|
# Still send an email to warn the user that an account already exists.
|
||||||
# Also wait for some random amount of time between 100ms and 1s to make it
|
# Also wait for some random amount of time between 100ms and 1s to make it
|
||||||
# look like we did something.
|
# look like we did something.
|
||||||
|
await self.already_in_use_mailer.send_already_in_use_mail(email)
|
||||||
await self.hs.get_clock().sleep(random.randint(1, 10) / 10)
|
await self.hs.get_clock().sleep(random.randint(1, 10) / 10)
|
||||||
return 200, {"sid": random_string(16)}
|
return 200, {"sid": random_string(16)}
|
||||||
|
|
||||||
|
@ -151,7 +159,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
|
||||||
email,
|
email,
|
||||||
client_secret,
|
client_secret,
|
||||||
send_attempt,
|
send_attempt,
|
||||||
self.mailer.send_registration_mail,
|
self.registration_mailer.send_registration_mail,
|
||||||
next_link,
|
next_link,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
#
|
#
|
||||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
# Copyright (C) 2023-2024 New Vector, Ltd
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
@ -34,7 +34,7 @@ if TYPE_CHECKING:
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RendezvousServlet(RestServlet):
|
class MSC3886RendezvousServlet(RestServlet):
|
||||||
"""
|
"""
|
||||||
This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
|
This is a placeholder implementation of [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
|
||||||
simple client rendezvous capability that is used by the "Sign in with QR" functionality.
|
simple client rendezvous capability that is used by the "Sign in with QR" functionality.
|
||||||
|
@ -76,6 +76,46 @@ class RendezvousServlet(RestServlet):
|
||||||
# PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target.
|
# PUT, GET and DELETE are not implemented as they should be fulfilled by the redirect target.
|
||||||
|
|
||||||
|
|
||||||
|
class MSC4108DelegationRendezvousServlet(RestServlet):
|
||||||
|
PATTERNS = client_patterns(
|
||||||
|
"/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, hs: "HomeServer"):
|
||||||
|
super().__init__()
|
||||||
|
redirection_target: Optional[str] = (
|
||||||
|
hs.config.experimental.msc4108_delegation_endpoint
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
redirection_target is not None
|
||||||
|
), "Servlet is only registered if there is a delegation target"
|
||||||
|
self.endpoint = redirection_target.encode("utf-8")
|
||||||
|
|
||||||
|
async def on_POST(self, request: SynapseRequest) -> None:
|
||||||
|
respond_with_redirect(
|
||||||
|
request, self.endpoint, statusCode=TEMPORARY_REDIRECT, cors=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MSC4108RendezvousServlet(RestServlet):
|
||||||
|
PATTERNS = client_patterns(
|
||||||
|
"/org.matrix.msc4108/rendezvous$", releases=[], v1=False, unstable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, hs: "HomeServer") -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._handler = hs.get_rendezvous_handler()
|
||||||
|
|
||||||
|
def on_POST(self, request: SynapseRequest) -> None:
|
||||||
|
self._handler.handle_post(request)
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||||
if hs.config.experimental.msc3886_endpoint is not None:
|
if hs.config.experimental.msc3886_endpoint is not None:
|
||||||
RendezvousServlet(hs).register(http_server)
|
MSC3886RendezvousServlet(hs).register(http_server)
|
||||||
|
|
||||||
|
if hs.config.experimental.msc4108_enabled:
|
||||||
|
MSC4108RendezvousServlet(hs).register(http_server)
|
||||||
|
|
||||||
|
if hs.config.experimental.msc4108_delegation_endpoint is not None:
|
||||||
|
MSC4108DelegationRendezvousServlet(hs).register(http_server)
|
||||||
|
|
|
@ -52,6 +52,7 @@ from synapse.http.servlet import (
|
||||||
parse_boolean,
|
parse_boolean,
|
||||||
parse_enum,
|
parse_enum,
|
||||||
parse_integer,
|
parse_integer,
|
||||||
|
parse_json,
|
||||||
parse_json_object_from_request,
|
parse_json_object_from_request,
|
||||||
parse_string,
|
parse_string,
|
||||||
parse_strings_from_args,
|
parse_strings_from_args,
|
||||||
|
@ -65,7 +66,6 @@ from synapse.rest.client.transactions import HttpTransactionCache
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.types import JsonDict, Requester, StreamToken, ThirdPartyInstanceID, UserID
|
from synapse.types import JsonDict, Requester, StreamToken, ThirdPartyInstanceID, UserID
|
||||||
from synapse.types.state import StateFilter
|
from synapse.types.state import StateFilter
|
||||||
from synapse.util import json_decoder
|
|
||||||
from synapse.util.cancellation import cancellable
|
from synapse.util.cancellation import cancellable
|
||||||
from synapse.util.stringutils import parse_and_validate_server_name, random_string
|
from synapse.util.stringutils import parse_and_validate_server_name, random_string
|
||||||
|
|
||||||
|
@ -503,7 +503,7 @@ class PublicRoomListRestServlet(RestServlet):
|
||||||
if server:
|
if server:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
limit: Optional[int] = parse_integer(request, "limit", 0)
|
limit: Optional[int] = parse_integer(request, "limit", 0, negative=False)
|
||||||
since_token = parse_string(request, "since")
|
since_token = parse_string(request, "since")
|
||||||
|
|
||||||
if limit == 0:
|
if limit == 0:
|
||||||
|
@ -707,21 +707,16 @@ class RoomMessageListRestServlet(RestServlet):
|
||||||
)
|
)
|
||||||
# Twisted will have processed the args by now.
|
# Twisted will have processed the args by now.
|
||||||
assert request.args is not None
|
assert request.args is not None
|
||||||
|
|
||||||
|
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||||
|
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||||
|
|
||||||
as_client_event = b"raw" not in request.args
|
as_client_event = b"raw" not in request.args
|
||||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
if (
|
||||||
if filter_str:
|
event_filter
|
||||||
filter_json = urlparse.unquote(filter_str)
|
and event_filter.filter_json.get("event_format", "client") == "federation"
|
||||||
event_filter: Optional[Filter] = Filter(
|
):
|
||||||
self._hs, json_decoder.decode(filter_json)
|
as_client_event = False
|
||||||
)
|
|
||||||
if (
|
|
||||||
event_filter
|
|
||||||
and event_filter.filter_json.get("event_format", "client")
|
|
||||||
== "federation"
|
|
||||||
):
|
|
||||||
as_client_event = False
|
|
||||||
else:
|
|
||||||
event_filter = None
|
|
||||||
|
|
||||||
msgs = await self.pagination_handler.get_messages(
|
msgs = await self.pagination_handler.get_messages(
|
||||||
room_id=room_id,
|
room_id=room_id,
|
||||||
|
@ -902,14 +897,8 @@ class RoomEventContextServlet(RestServlet):
|
||||||
limit = parse_integer(request, "limit", default=10)
|
limit = parse_integer(request, "limit", default=10)
|
||||||
|
|
||||||
# picking the API shape for symmetry with /messages
|
# picking the API shape for symmetry with /messages
|
||||||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
filter_json = parse_json(request, "filter", encoding="utf-8")
|
||||||
if filter_str:
|
event_filter = Filter(self._hs, filter_json) if filter_json else None
|
||||||
filter_json = urlparse.unquote(filter_str)
|
|
||||||
event_filter: Optional[Filter] = Filter(
|
|
||||||
self._hs, json_decoder.decode(filter_json)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
event_filter = None
|
|
||||||
|
|
||||||
event_context = await self.room_context_handler.get_event_context(
|
event_context = await self.room_context_handler.get_event_context(
|
||||||
requester, room_id, event_id, limit, event_filter
|
requester, room_id, event_id, limit, event_filter
|
||||||
|
|
|
@ -140,6 +140,14 @@ class VersionsRestServlet(RestServlet):
|
||||||
"org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation,
|
"org.matrix.msc4069": self.config.experimental.msc4069_profile_inhibit_propagation,
|
||||||
# Allows clients to handle push for encrypted events.
|
# Allows clients to handle push for encrypted events.
|
||||||
"org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events,
|
"org.matrix.msc4028": self.config.experimental.msc4028_push_encrypted_events,
|
||||||
|
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||||
|
"org.matrix.msc4108": (
|
||||||
|
self.config.experimental.msc4108_enabled
|
||||||
|
or (
|
||||||
|
self.config.experimental.msc4108_delegation_endpoint
|
||||||
|
is not None
|
||||||
|
)
|
||||||
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -72,9 +72,6 @@ class PreviewUrlResource(RestServlet):
|
||||||
# XXX: if get_user_by_req fails, what should we do in an async render?
|
# XXX: if get_user_by_req fails, what should we do in an async render?
|
||||||
requester = await self.auth.get_user_by_req(request)
|
requester = await self.auth.get_user_by_req(request)
|
||||||
url = parse_string(request, "url", required=True)
|
url = parse_string(request, "url", required=True)
|
||||||
ts = parse_integer(request, "ts")
|
ts = parse_integer(request, "ts", default=self.clock.time_msec())
|
||||||
if ts is None:
|
|
||||||
ts = self.clock.time_msec()
|
|
||||||
|
|
||||||
og = await self.url_previewer.preview(url, requester.user, ts)
|
og = await self.url_previewer.preview(url, requester.user, ts)
|
||||||
respond_with_json_bytes(request, 200, og, send_cors=True)
|
respond_with_json_bytes(request, 200, og, send_cors=True)
|
||||||
|
|
|
@ -26,6 +26,7 @@ from twisted.web.resource import Resource
|
||||||
from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource
|
from synapse.rest.synapse.client.new_user_consent import NewUserConsentResource
|
||||||
from synapse.rest.synapse.client.pick_idp import PickIdpResource
|
from synapse.rest.synapse.client.pick_idp import PickIdpResource
|
||||||
from synapse.rest.synapse.client.pick_username import pick_username_resource
|
from synapse.rest.synapse.client.pick_username import pick_username_resource
|
||||||
|
from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource
|
||||||
from synapse.rest.synapse.client.sso_register import SsoRegisterResource
|
from synapse.rest.synapse.client.sso_register import SsoRegisterResource
|
||||||
from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource
|
from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource
|
||||||
|
|
||||||
|
@ -76,6 +77,9 @@ def build_synapse_client_resource_tree(hs: "HomeServer") -> Mapping[str, Resourc
|
||||||
# To be removed in Synapse v1.32.0.
|
# To be removed in Synapse v1.32.0.
|
||||||
resources["/_matrix/saml2"] = res
|
resources["/_matrix/saml2"] = res
|
||||||
|
|
||||||
|
if hs.config.experimental.msc4108_enabled:
|
||||||
|
resources["/_synapse/client/rendezvous"] = MSC4108RendezvousSessionResource(hs)
|
||||||
|
|
||||||
return resources
|
return resources
|
||||||
|
|
||||||
|
|
||||||
|
|
58
synapse/rest/synapse/client/rendezvous.py
Normal file
58
synapse/rest/synapse/client/rendezvous.py
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
#
|
||||||
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
#
|
||||||
|
# Copyright (C) 2024 New Vector, Ltd
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# See the GNU Affero General Public License for more details:
|
||||||
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import TYPE_CHECKING, List
|
||||||
|
|
||||||
|
from synapse.api.errors import UnrecognizedRequestError
|
||||||
|
from synapse.http.server import DirectServeJsonResource
|
||||||
|
from synapse.http.site import SynapseRequest
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MSC4108RendezvousSessionResource(DirectServeJsonResource):
|
||||||
|
isLeaf = True
|
||||||
|
|
||||||
|
def __init__(self, hs: "HomeServer") -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._handler = hs.get_rendezvous_handler()
|
||||||
|
|
||||||
|
async def _async_render_GET(self, request: SynapseRequest) -> None:
|
||||||
|
postpath: List[bytes] = request.postpath # type: ignore
|
||||||
|
if len(postpath) != 1:
|
||||||
|
raise UnrecognizedRequestError()
|
||||||
|
session_id = postpath[0].decode("ascii")
|
||||||
|
|
||||||
|
self._handler.handle_get(request, session_id)
|
||||||
|
|
||||||
|
def _async_render_PUT(self, request: SynapseRequest) -> None:
|
||||||
|
postpath: List[bytes] = request.postpath # type: ignore
|
||||||
|
if len(postpath) != 1:
|
||||||
|
raise UnrecognizedRequestError()
|
||||||
|
session_id = postpath[0].decode("ascii")
|
||||||
|
|
||||||
|
self._handler.handle_put(request, session_id)
|
||||||
|
|
||||||
|
def _async_render_DELETE(self, request: SynapseRequest) -> None:
|
||||||
|
postpath: List[bytes] = request.postpath # type: ignore
|
||||||
|
if len(postpath) != 1:
|
||||||
|
raise UnrecognizedRequestError()
|
||||||
|
session_id = postpath[0].decode("ascii")
|
||||||
|
|
||||||
|
self._handler.handle_delete(request, session_id)
|
|
@ -143,6 +143,7 @@ from synapse.state import StateHandler, StateResolutionHandler
|
||||||
from synapse.storage import Databases
|
from synapse.storage import Databases
|
||||||
from synapse.storage.controllers import StorageControllers
|
from synapse.storage.controllers import StorageControllers
|
||||||
from synapse.streams.events import EventSources
|
from synapse.streams.events import EventSources
|
||||||
|
from synapse.synapse_rust.rendezvous import RendezvousHandler
|
||||||
from synapse.types import DomainSpecificString, ISynapseReactor
|
from synapse.types import DomainSpecificString, ISynapseReactor
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
from synapse.util.distributor import Distributor
|
from synapse.util.distributor import Distributor
|
||||||
|
@ -859,6 +860,10 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||||
def get_room_forgetter_handler(self) -> RoomForgetterHandler:
|
def get_room_forgetter_handler(self) -> RoomForgetterHandler:
|
||||||
return RoomForgetterHandler(self)
|
return RoomForgetterHandler(self)
|
||||||
|
|
||||||
|
@cache_in_self
|
||||||
|
def get_rendezvous_handler(self) -> RendezvousHandler:
|
||||||
|
return RendezvousHandler(self)
|
||||||
|
|
||||||
@cache_in_self
|
@cache_in_self
|
||||||
def get_outbound_redis_connection(self) -> "ConnectionHandler":
|
def get_outbound_redis_connection(self) -> "ConnectionHandler":
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -385,7 +385,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||||
WITH all_receipts AS (
|
WITH all_receipts AS (
|
||||||
SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering
|
SELECT room_id, thread_id, MAX(event_stream_ordering) AS max_receipt_stream_ordering
|
||||||
FROM receipts_linearized
|
FROM receipts_linearized
|
||||||
LEFT JOIN events USING (room_id, event_id)
|
|
||||||
WHERE
|
WHERE
|
||||||
{receipt_types_clause}
|
{receipt_types_clause}
|
||||||
AND user_id = ?
|
AND user_id = ?
|
||||||
|
@ -621,13 +620,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||||
SELECT notif_count, COALESCE(unread_count, 0), thread_id
|
SELECT notif_count, COALESCE(unread_count, 0), thread_id
|
||||||
FROM event_push_summary
|
FROM event_push_summary
|
||||||
LEFT JOIN (
|
LEFT JOIN (
|
||||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||||
FROM receipts_linearized
|
FROM receipts_linearized
|
||||||
LEFT JOIN events USING (room_id, event_id)
|
|
||||||
WHERE
|
WHERE
|
||||||
user_id = ?
|
user_id = ?
|
||||||
AND room_id = ?
|
AND room_id = ?
|
||||||
AND stream_ordering > ?
|
AND event_stream_ordering > ?
|
||||||
AND {receipt_types_clause}
|
AND {receipt_types_clause}
|
||||||
GROUP BY thread_id
|
GROUP BY thread_id
|
||||||
) AS receipts USING (thread_id)
|
) AS receipts USING (thread_id)
|
||||||
|
@ -659,13 +657,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT COUNT(*), thread_id FROM event_push_actions
|
SELECT COUNT(*), thread_id FROM event_push_actions
|
||||||
LEFT JOIN (
|
LEFT JOIN (
|
||||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||||
FROM receipts_linearized
|
FROM receipts_linearized
|
||||||
LEFT JOIN events USING (room_id, event_id)
|
|
||||||
WHERE
|
WHERE
|
||||||
user_id = ?
|
user_id = ?
|
||||||
AND room_id = ?
|
AND room_id = ?
|
||||||
AND stream_ordering > ?
|
AND event_stream_ordering > ?
|
||||||
AND {receipt_types_clause}
|
AND {receipt_types_clause}
|
||||||
GROUP BY thread_id
|
GROUP BY thread_id
|
||||||
) AS receipts USING (thread_id)
|
) AS receipts USING (thread_id)
|
||||||
|
@ -738,13 +735,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||||
thread_id
|
thread_id
|
||||||
FROM event_push_actions
|
FROM event_push_actions
|
||||||
LEFT JOIN (
|
LEFT JOIN (
|
||||||
SELECT thread_id, MAX(stream_ordering) AS threaded_receipt_stream_ordering
|
SELECT thread_id, MAX(event_stream_ordering) AS threaded_receipt_stream_ordering
|
||||||
FROM receipts_linearized
|
FROM receipts_linearized
|
||||||
LEFT JOIN events USING (room_id, event_id)
|
|
||||||
WHERE
|
WHERE
|
||||||
user_id = ?
|
user_id = ?
|
||||||
AND room_id = ?
|
AND room_id = ?
|
||||||
AND stream_ordering > ?
|
AND event_stream_ordering > ?
|
||||||
AND {receipt_types_clause}
|
AND {receipt_types_clause}
|
||||||
GROUP BY thread_id
|
GROUP BY thread_id
|
||||||
) AS receipts USING (thread_id)
|
) AS receipts USING (thread_id)
|
||||||
|
@ -910,9 +906,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||||
# given this function generally gets called with only one room and
|
# given this function generally gets called with only one room and
|
||||||
# thread ID.
|
# thread ID.
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT room_id, thread_id, MAX(stream_ordering)
|
SELECT room_id, thread_id, MAX(event_stream_ordering)
|
||||||
FROM receipts_linearized
|
FROM receipts_linearized
|
||||||
INNER JOIN events USING (room_id, event_id)
|
|
||||||
WHERE {receipt_types_clause}
|
WHERE {receipt_types_clause}
|
||||||
AND {thread_ids_clause}
|
AND {thread_ids_clause}
|
||||||
AND {room_ids_clause}
|
AND {room_ids_clause}
|
||||||
|
@ -1442,9 +1437,8 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||||
)
|
)
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, e.stream_ordering
|
SELECT r.stream_id, r.room_id, r.user_id, r.thread_id, r.event_stream_ordering
|
||||||
FROM receipts_linearized AS r
|
FROM receipts_linearized AS r
|
||||||
INNER JOIN events AS e USING (event_id)
|
|
||||||
WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
|
WHERE ? < r.stream_id AND r.stream_id <= ? AND user_id LIKE ?
|
||||||
ORDER BY r.stream_id ASC
|
ORDER BY r.stream_id ASC
|
||||||
LIMIT ?
|
LIMIT ?
|
||||||
|
|
|
@ -2454,7 +2454,7 @@ class _LinkMap:
|
||||||
return target_seq <= src_seq
|
return target_seq <= src_seq
|
||||||
|
|
||||||
# We have to graph traverse the links to check for indirect paths.
|
# We have to graph traverse the links to check for indirect paths.
|
||||||
visited_chains = collections.Counter()
|
visited_chains: Dict[int, int] = collections.Counter()
|
||||||
search = [(src_chain, src_seq)]
|
search = [(src_chain, src_seq)]
|
||||||
while search:
|
while search:
|
||||||
chain, seq = search.pop()
|
chain, seq = search.pop()
|
||||||
|
|
|
@ -178,14 +178,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||||
)
|
)
|
||||||
|
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT event_id, stream_ordering
|
SELECT event_id, event_stream_ordering
|
||||||
FROM receipts_linearized
|
FROM receipts_linearized
|
||||||
INNER JOIN events USING (room_id, event_id)
|
|
||||||
WHERE {clause}
|
WHERE {clause}
|
||||||
AND user_id = ?
|
AND user_id = ?
|
||||||
AND room_id = ?
|
AND room_id = ?
|
||||||
AND thread_id IS NULL
|
AND thread_id IS NULL
|
||||||
ORDER BY stream_ordering DESC
|
ORDER BY event_stream_ordering DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -735,10 +734,13 @@ class ReceiptsWorkerStore(SQLBaseStore):
|
||||||
thread_clause = "r.thread_id = ?"
|
thread_clause = "r.thread_id = ?"
|
||||||
thread_args = (thread_id,)
|
thread_args = (thread_id,)
|
||||||
|
|
||||||
|
# If the receipt doesn't have a stream ordering it is because we
|
||||||
|
# don't have the associated event, and so must be a remote receipt.
|
||||||
|
# Hence it's safe to just allow new receipts to clobber it.
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT stream_ordering, event_id FROM events
|
SELECT r.event_stream_ordering, r.event_id FROM receipts_linearized AS r
|
||||||
INNER JOIN receipts_linearized AS r USING (event_id, room_id)
|
WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ?
|
||||||
WHERE r.room_id = ? AND r.receipt_type = ? AND r.user_id = ? AND {thread_clause}
|
AND r.event_stream_ordering IS NOT NULL AND {thread_clause}
|
||||||
"""
|
"""
|
||||||
txn.execute(
|
txn.execute(
|
||||||
sql,
|
sql,
|
||||||
|
|
|
@ -1234,6 +1234,28 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
|
||||||
|
|
||||||
return set(room_ids)
|
return set(room_ids)
|
||||||
|
|
||||||
|
async def get_membership_event_ids_for_user(
|
||||||
|
self, user_id: str, room_id: str
|
||||||
|
) -> Set[str]:
|
||||||
|
"""Get all event_ids for the given user and room.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id: The user ID to get the event IDs for.
|
||||||
|
room_id: The room ID to look up events for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Set of event IDs
|
||||||
|
"""
|
||||||
|
|
||||||
|
event_ids = await self.db_pool.simple_select_onecol(
|
||||||
|
table="room_memberships",
|
||||||
|
keyvalues={"user_id": user_id, "room_id": room_id},
|
||||||
|
retcol="event_id",
|
||||||
|
desc="get_membership_event_ids_for_user",
|
||||||
|
)
|
||||||
|
|
||||||
|
return set(event_ids)
|
||||||
|
|
||||||
@cached(max_entries=5000)
|
@cached(max_entries=5000)
|
||||||
async def _get_membership_from_event_id(
|
async def _get_membership_from_event_id(
|
||||||
self, member_event_id: str
|
self, member_event_id: str
|
||||||
|
|
30
synapse/synapse_rust/rendezvous.pyi
Normal file
30
synapse/synapse_rust/rendezvous.pyi
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
#
|
||||||
|
# Copyright (C) 2024 New Vector, Ltd
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# See the GNU Affero General Public License for more details:
|
||||||
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
|
||||||
|
from twisted.web.iweb import IRequest
|
||||||
|
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
class RendezvousHandler:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
homeserver: HomeServer,
|
||||||
|
/,
|
||||||
|
capacity: int = 100,
|
||||||
|
max_content_length: int = 4 * 1024, # MSC4108 specifies 4KB
|
||||||
|
eviction_interval: int = 60 * 1000,
|
||||||
|
ttl: int = 60 * 1000,
|
||||||
|
) -> None: ...
|
||||||
|
def handle_post(self, request: IRequest) -> None: ...
|
||||||
|
def handle_get(self, request: IRequest, session_id: str) -> None: ...
|
||||||
|
def handle_put(self, request: IRequest, session_id: str) -> None: ...
|
||||||
|
def handle_delete(self, request: IRequest, session_id: str) -> None: ...
|
|
@ -424,3 +424,40 @@ class DeactivateAccountTestCase(HomeserverTestCase):
|
||||||
self._store.get_knocked_at_rooms_for_local_user(self.user)
|
self._store.get_knocked_at_rooms_for_local_user(self.user)
|
||||||
)
|
)
|
||||||
self.assertEqual(len(after_deactivate_knocks), 0)
|
self.assertEqual(len(after_deactivate_knocks), 0)
|
||||||
|
|
||||||
|
def test_membership_is_redacted_upon_deactivation(self) -> None:
|
||||||
|
"""
|
||||||
|
Tests that room membership events are redacted if erasure is requested.
|
||||||
|
"""
|
||||||
|
# Create a room
|
||||||
|
room_id = self.helper.create_room_as(
|
||||||
|
self.user,
|
||||||
|
is_public=True,
|
||||||
|
tok=self.token,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Change the displayname
|
||||||
|
membership_event, _ = self.get_success(
|
||||||
|
self.handler.update_membership(
|
||||||
|
requester=create_requester(self.user),
|
||||||
|
target=UserID.from_string(self.user),
|
||||||
|
room_id=room_id,
|
||||||
|
action=Membership.JOIN,
|
||||||
|
content={"displayname": "Hello World!"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Deactivate the account
|
||||||
|
self._deactivate_my_account()
|
||||||
|
|
||||||
|
# Get the all membership event IDs
|
||||||
|
membership_event_ids = self.get_success(
|
||||||
|
self._store.get_membership_event_ids_for_user(self.user, room_id=room_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the events incl. JSON
|
||||||
|
events = self.get_success(self._store.get_events_as_list(membership_event_ids))
|
||||||
|
|
||||||
|
# Validate that there is no displayname in any of the events
|
||||||
|
for event in events:
|
||||||
|
self.assertTrue("displayname" not in event.content)
|
||||||
|
|
|
@ -1101,6 +1101,56 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_has_different_keys(self) -> None:
|
||||||
|
"""check that has_different_keys returns True when the keys provided are different to what
|
||||||
|
is in the database."""
|
||||||
|
local_user = "@boris:" + self.hs.hostname
|
||||||
|
keys1 = {
|
||||||
|
"master_key": {
|
||||||
|
# private key: 2lonYOM6xYKdEsO+6KrC766xBcHnYnim1x/4LFGF8B0
|
||||||
|
"user_id": local_user,
|
||||||
|
"usage": ["master"],
|
||||||
|
"keys": {
|
||||||
|
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unI9kDYcHwk"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.get_success(self.handler.upload_signing_keys_for_user(local_user, keys1))
|
||||||
|
is_different = self.get_success(
|
||||||
|
self.handler.has_different_keys(
|
||||||
|
local_user,
|
||||||
|
{
|
||||||
|
"master_key": keys1["master_key"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(is_different, False)
|
||||||
|
# change the usage => different keys
|
||||||
|
keys1["master_key"]["usage"] = ["develop"]
|
||||||
|
is_different = self.get_success(
|
||||||
|
self.handler.has_different_keys(
|
||||||
|
local_user,
|
||||||
|
{
|
||||||
|
"master_key": keys1["master_key"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(is_different, True)
|
||||||
|
keys1["master_key"]["usage"] = ["master"] # reset
|
||||||
|
# change the key => different keys
|
||||||
|
keys1["master_key"]["keys"] = {
|
||||||
|
"ed25519:nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs": "nqOvzeuGWT/sRx3h7+MHoInYj3Uk2LD/unIc0rncs"
|
||||||
|
}
|
||||||
|
is_different = self.get_success(
|
||||||
|
self.handler.has_different_keys(
|
||||||
|
local_user,
|
||||||
|
{
|
||||||
|
"master_key": keys1["master_key"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(is_different, True)
|
||||||
|
|
||||||
def test_query_devices_remote_sync(self) -> None:
|
def test_query_devices_remote_sync(self) -> None:
|
||||||
"""Tests that querying keys for a remote user that we share a room with,
|
"""Tests that querying keys for a remote user that we share a room with,
|
||||||
but haven't yet fetched the keys for, returns the cross signing keys
|
but haven't yet fetched the keys for, returns the cross signing keys
|
||||||
|
|
|
@ -277,7 +277,8 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||||
self.assertEqual(400, channel.code, msg=channel.json_body)
|
self.assertEqual(400, channel.code, msg=channel.json_body)
|
||||||
self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"])
|
self.assertEqual(Codes.MISSING_PARAM, channel.json_body["errcode"])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"Missing integer query parameter 'before_ts'", channel.json_body["error"]
|
"Missing required integer query parameter before_ts",
|
||||||
|
channel.json_body["error"],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_invalid_parameter(self) -> None:
|
def test_invalid_parameter(self) -> None:
|
||||||
|
@ -320,7 +321,7 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||||
self.assertEqual(400, channel.code, msg=channel.json_body)
|
self.assertEqual(400, channel.code, msg=channel.json_body)
|
||||||
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"Query parameter size_gt must be a string representing a positive integer.",
|
"Query parameter size_gt must be a positive integer.",
|
||||||
channel.json_body["error"],
|
channel.json_body["error"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from http import HTTPStatus
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from unittest.mock import AsyncMock, Mock
|
from unittest.mock import AsyncMock, Mock
|
||||||
|
|
||||||
|
@ -2190,6 +2191,33 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase):
|
||||||
chunk = channel.json_body["chunk"]
|
chunk = channel.json_body["chunk"]
|
||||||
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
||||||
|
|
||||||
|
def test_room_message_filter_query_validation(self) -> None:
|
||||||
|
# Test json validation in (filter) query parameter.
|
||||||
|
# Does not test the validity of the filter, only the json validation.
|
||||||
|
|
||||||
|
# Check Get with valid json filter parameter, expect 200.
|
||||||
|
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={valid_filter_str}",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||||
|
|
||||||
|
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||||
|
invalid_filter_str = "}}}{}"
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_synapse/admin/v1/rooms/{self.room_id}/messages?dir=b&filter={invalid_filter_str}",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||||
servlets = [
|
servlets = [
|
||||||
|
@ -2522,6 +2550,39 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||||
else:
|
else:
|
||||||
self.fail("Event %s from events_after not found" % j)
|
self.fail("Event %s from events_after not found" % j)
|
||||||
|
|
||||||
|
def test_room_event_context_filter_query_validation(self) -> None:
|
||||||
|
# Test json validation in (filter) query parameter.
|
||||||
|
# Does not test the validity of the filter, only the json validation.
|
||||||
|
|
||||||
|
# Create a user with room and event_id.
|
||||||
|
user_id = self.register_user("test", "test")
|
||||||
|
user_tok = self.login("test", "test")
|
||||||
|
room_id = self.helper.create_room_as(user_id, tok=user_tok)
|
||||||
|
event_id = self.helper.send(room_id, "message 1", tok=user_tok)["event_id"]
|
||||||
|
|
||||||
|
# Check Get with valid json filter parameter, expect 200.
|
||||||
|
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={valid_filter_str}",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||||
|
|
||||||
|
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||||
|
invalid_filter_str = "}}}{}"
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_synapse/admin/v1/rooms/{room_id}/context/{event_id}?filter={invalid_filter_str}",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MakeRoomAdminTestCase(unittest.HomeserverTestCase):
|
class MakeRoomAdminTestCase(unittest.HomeserverTestCase):
|
||||||
servlets = [
|
servlets = [
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict, List, Tuple
|
from typing import Any, Dict, List, Tuple
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
@ -42,6 +43,7 @@ from synapse.types import JsonDict
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
from tests.server import ThreadedMemoryReactorClock
|
||||||
from tests.unittest import override_config
|
from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
|
@ -58,6 +60,13 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
|
||||||
config["allow_guest_access"] = True
|
config["allow_guest_access"] = True
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
def make_homeserver(
|
||||||
|
self, reactor: ThreadedMemoryReactorClock, clock: Clock
|
||||||
|
) -> HomeServer:
|
||||||
|
hs = super().make_homeserver(reactor, clock)
|
||||||
|
hs.get_send_email_handler()._sendmail = AsyncMock()
|
||||||
|
return hs
|
||||||
|
|
||||||
def test_POST_appservice_registration_valid(self) -> None:
|
def test_POST_appservice_registration_valid(self) -> None:
|
||||||
user_id = "@as_user_kermit:test"
|
user_id = "@as_user_kermit:test"
|
||||||
as_token = "i_am_an_app_service"
|
as_token = "i_am_an_app_service"
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
#
|
#
|
||||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
# Copyright (C) 2023-2024 New Vector, Ltd
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
@ -19,16 +19,23 @@
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from twisted.test.proto_helpers import MemoryReactor
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
from twisted.web.resource import Resource
|
||||||
|
|
||||||
from synapse.rest.client import rendezvous
|
from synapse.rest.client import rendezvous
|
||||||
|
from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.unittest import override_config
|
from tests.unittest import override_config
|
||||||
|
from tests.utils import HAS_AUTHLIB
|
||||||
|
|
||||||
endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
msc3886_endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
||||||
|
msc4108_endpoint = "/_matrix/client/unstable/org.matrix.msc4108/rendezvous"
|
||||||
|
|
||||||
|
|
||||||
class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||||
|
@ -40,12 +47,430 @@ class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||||
self.hs = self.setup_test_homeserver()
|
self.hs = self.setup_test_homeserver()
|
||||||
return self.hs
|
return self.hs
|
||||||
|
|
||||||
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
||||||
|
return {
|
||||||
|
**super().create_resource_dict(),
|
||||||
|
"/_synapse/client/rendezvous": MSC4108RendezvousSessionResource(self.hs),
|
||||||
|
}
|
||||||
|
|
||||||
def test_disabled(self) -> None:
|
def test_disabled(self) -> None:
|
||||||
channel = self.make_request("POST", endpoint, {}, access_token=None)
|
channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
|
||||||
|
self.assertEqual(channel.code, 404)
|
||||||
|
channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
|
||||||
self.assertEqual(channel.code, 404)
|
self.assertEqual(channel.code, 404)
|
||||||
|
|
||||||
@override_config({"experimental_features": {"msc3886_endpoint": "/asd"}})
|
@override_config({"experimental_features": {"msc3886_endpoint": "/asd"}})
|
||||||
def test_redirect(self) -> None:
|
def test_msc3886_redirect(self) -> None:
|
||||||
channel = self.make_request("POST", endpoint, {}, access_token=None)
|
channel = self.make_request("POST", msc3886_endpoint, {}, access_token=None)
|
||||||
self.assertEqual(channel.code, 307)
|
self.assertEqual(channel.code, 307)
|
||||||
self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"])
|
self.assertEqual(channel.headers.getRawHeaders("Location"), ["/asd"])
|
||||||
|
|
||||||
|
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"disable_registration": True,
|
||||||
|
"experimental_features": {
|
||||||
|
"msc4108_delegation_endpoint": "https://asd",
|
||||||
|
"msc3861": {
|
||||||
|
"enabled": True,
|
||||||
|
"issuer": "https://issuer",
|
||||||
|
"client_id": "client_id",
|
||||||
|
"client_auth_method": "client_secret_post",
|
||||||
|
"client_secret": "client_secret",
|
||||||
|
"admin_token": "admin_token_value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_msc4108_delegation(self) -> None:
|
||||||
|
channel = self.make_request("POST", msc4108_endpoint, {}, access_token=None)
|
||||||
|
self.assertEqual(channel.code, 307)
|
||||||
|
self.assertEqual(channel.headers.getRawHeaders("Location"), ["https://asd"])
|
||||||
|
|
||||||
|
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"disable_registration": True,
|
||||||
|
"experimental_features": {
|
||||||
|
"msc4108_enabled": True,
|
||||||
|
"msc3861": {
|
||||||
|
"enabled": True,
|
||||||
|
"issuer": "https://issuer",
|
||||||
|
"client_id": "client_id",
|
||||||
|
"client_auth_method": "client_secret_post",
|
||||||
|
"client_secret": "client_secret",
|
||||||
|
"admin_token": "admin_token_value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_msc4108(self) -> None:
|
||||||
|
"""
|
||||||
|
Test the MSC4108 rendezvous endpoint, including:
|
||||||
|
- Creating a session
|
||||||
|
- Getting the data back
|
||||||
|
- Updating the data
|
||||||
|
- Deleting the data
|
||||||
|
- ETag handling
|
||||||
|
"""
|
||||||
|
# We can post arbitrary data to the endpoint
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
self.assertSubstring("/_synapse/client/rendezvous/", channel.json_body["url"])
|
||||||
|
headers = dict(channel.headers.getAllRawHeaders())
|
||||||
|
self.assertIn(b"ETag", headers)
|
||||||
|
self.assertIn(b"Expires", headers)
|
||||||
|
self.assertEqual(headers[b"Content-Type"], [b"application/json"])
|
||||||
|
self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"])
|
||||||
|
self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"])
|
||||||
|
self.assertEqual(headers[b"Cache-Control"], [b"no-store"])
|
||||||
|
self.assertEqual(headers[b"Pragma"], [b"no-cache"])
|
||||||
|
self.assertIn("url", channel.json_body)
|
||||||
|
self.assertTrue(channel.json_body["url"].startswith("https://"))
|
||||||
|
|
||||||
|
url = urlparse(channel.json_body["url"])
|
||||||
|
session_endpoint = url.path
|
||||||
|
etag = headers[b"ETag"][0]
|
||||||
|
|
||||||
|
# We can get the data back
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
headers = dict(channel.headers.getAllRawHeaders())
|
||||||
|
self.assertEqual(headers[b"ETag"], [etag])
|
||||||
|
self.assertIn(b"Expires", headers)
|
||||||
|
self.assertEqual(headers[b"Content-Type"], [b"text/plain"])
|
||||||
|
self.assertEqual(headers[b"Access-Control-Allow-Origin"], [b"*"])
|
||||||
|
self.assertEqual(headers[b"Access-Control-Expose-Headers"], [b"etag"])
|
||||||
|
self.assertEqual(headers[b"Cache-Control"], [b"no-store"])
|
||||||
|
self.assertEqual(headers[b"Pragma"], [b"no-cache"])
|
||||||
|
self.assertEqual(channel.text_body, "foo=bar")
|
||||||
|
|
||||||
|
# We can make sure the data hasn't changed
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
custom_headers=[("If-None-Match", etag)],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 304)
|
||||||
|
|
||||||
|
# We can update the data
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
session_endpoint,
|
||||||
|
"foo=baz",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
custom_headers=[("If-Match", etag)],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 202)
|
||||||
|
headers = dict(channel.headers.getAllRawHeaders())
|
||||||
|
old_etag = etag
|
||||||
|
new_etag = headers[b"ETag"][0]
|
||||||
|
|
||||||
|
# If we try to update it again with the old etag, it should fail
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
session_endpoint,
|
||||||
|
"bar=baz",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
custom_headers=[("If-Match", old_etag)],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 412)
|
||||||
|
self.assertEqual(channel.json_body["errcode"], "M_UNKNOWN")
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["org.matrix.msc4108.errcode"], "M_CONCURRENT_WRITE"
|
||||||
|
)
|
||||||
|
|
||||||
|
# If we try to get with the old etag, we should get the updated data
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
custom_headers=[("If-None-Match", old_etag)],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
headers = dict(channel.headers.getAllRawHeaders())
|
||||||
|
self.assertEqual(headers[b"ETag"], [new_etag])
|
||||||
|
self.assertEqual(channel.text_body, "foo=baz")
|
||||||
|
|
||||||
|
# We can delete the data
|
||||||
|
channel = self.make_request(
|
||||||
|
"DELETE",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 204)
|
||||||
|
|
||||||
|
# If we try to get the data again, it should fail
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 404)
|
||||||
|
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND")
|
||||||
|
|
||||||
|
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"disable_registration": True,
|
||||||
|
"experimental_features": {
|
||||||
|
"msc4108_enabled": True,
|
||||||
|
"msc3861": {
|
||||||
|
"enabled": True,
|
||||||
|
"issuer": "https://issuer",
|
||||||
|
"client_id": "client_id",
|
||||||
|
"client_auth_method": "client_secret_post",
|
||||||
|
"client_secret": "client_secret",
|
||||||
|
"admin_token": "admin_token_value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_msc4108_expiration(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that entries are evicted after a TTL.
|
||||||
|
"""
|
||||||
|
# Start a new session
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
session_endpoint = urlparse(channel.json_body["url"]).path
|
||||||
|
|
||||||
|
# Sanity check that we can get the data back
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
self.assertEqual(channel.text_body, "foo=bar")
|
||||||
|
|
||||||
|
# Advance the clock, TTL of entries is 1 minute
|
||||||
|
self.reactor.advance(60)
|
||||||
|
|
||||||
|
# Get the data back, it should be gone
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 404)
|
||||||
|
|
||||||
|
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"disable_registration": True,
|
||||||
|
"experimental_features": {
|
||||||
|
"msc4108_enabled": True,
|
||||||
|
"msc3861": {
|
||||||
|
"enabled": True,
|
||||||
|
"issuer": "https://issuer",
|
||||||
|
"client_id": "client_id",
|
||||||
|
"client_auth_method": "client_secret_post",
|
||||||
|
"client_secret": "client_secret",
|
||||||
|
"admin_token": "admin_token_value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_msc4108_capacity(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that a capacity limit is enforced on the rendezvous sessions, as old
|
||||||
|
entries are evicted at an interval when the limit is reached.
|
||||||
|
"""
|
||||||
|
# Start a new session
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
session_endpoint = urlparse(channel.json_body["url"]).path
|
||||||
|
|
||||||
|
# Sanity check that we can get the data back
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
self.assertEqual(channel.text_body, "foo=bar")
|
||||||
|
|
||||||
|
# Start a lot of new sessions
|
||||||
|
for _ in range(100):
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
|
||||||
|
# Get the data back, it should still be there, as the eviction hasn't run yet
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
|
||||||
|
# Advance the clock, as it will trigger the eviction
|
||||||
|
self.reactor.advance(1)
|
||||||
|
|
||||||
|
# Get the data back, it should be gone
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"disable_registration": True,
|
||||||
|
"experimental_features": {
|
||||||
|
"msc4108_enabled": True,
|
||||||
|
"msc3861": {
|
||||||
|
"enabled": True,
|
||||||
|
"issuer": "https://issuer",
|
||||||
|
"client_id": "client_id",
|
||||||
|
"client_auth_method": "client_secret_post",
|
||||||
|
"client_secret": "client_secret",
|
||||||
|
"admin_token": "admin_token_value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_msc4108_hard_capacity(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that a hard capacity limit is enforced on the rendezvous sessions, as old
|
||||||
|
entries are evicted immediately when the limit is reached.
|
||||||
|
"""
|
||||||
|
# Start a new session
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
session_endpoint = urlparse(channel.json_body["url"]).path
|
||||||
|
# We advance the clock to make sure that this entry is the "lowest" in the session list
|
||||||
|
self.reactor.advance(1)
|
||||||
|
|
||||||
|
# Sanity check that we can get the data back
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
self.assertEqual(channel.text_body, "foo=bar")
|
||||||
|
|
||||||
|
# Start a lot of new sessions
|
||||||
|
for _ in range(200):
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
|
||||||
|
# Get the data back, it should already be gone as we hit the hard limit
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
session_endpoint,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 404)
|
||||||
|
|
||||||
|
@unittest.skip_unless(HAS_AUTHLIB, "requires authlib")
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"disable_registration": True,
|
||||||
|
"experimental_features": {
|
||||||
|
"msc4108_enabled": True,
|
||||||
|
"msc3861": {
|
||||||
|
"enabled": True,
|
||||||
|
"issuer": "https://issuer",
|
||||||
|
"client_id": "client_id",
|
||||||
|
"client_auth_method": "client_secret_post",
|
||||||
|
"client_secret": "client_secret",
|
||||||
|
"admin_token": "admin_token_value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_msc4108_content_type(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that the content-type is restricted to text/plain.
|
||||||
|
"""
|
||||||
|
# We cannot post invalid content-type arbitrary data to the endpoint
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_is_form=True,
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 400)
|
||||||
|
self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM")
|
||||||
|
|
||||||
|
# Make a valid request
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
msc4108_endpoint,
|
||||||
|
"foo=bar",
|
||||||
|
content_type=b"text/plain",
|
||||||
|
access_token=None,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 201)
|
||||||
|
url = urlparse(channel.json_body["url"])
|
||||||
|
session_endpoint = url.path
|
||||||
|
headers = dict(channel.headers.getAllRawHeaders())
|
||||||
|
etag = headers[b"ETag"][0]
|
||||||
|
|
||||||
|
# We can't update the data with invalid content-type
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
session_endpoint,
|
||||||
|
"foo=baz",
|
||||||
|
content_is_form=True,
|
||||||
|
access_token=None,
|
||||||
|
custom_headers=[("If-Match", etag)],
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 400)
|
||||||
|
self.assertEqual(channel.json_body["errcode"], "M_INVALID_PARAM")
|
||||||
|
|
|
@ -2175,6 +2175,31 @@ class RoomMessageListTestCase(RoomBase):
|
||||||
chunk = channel.json_body["chunk"]
|
chunk = channel.json_body["chunk"]
|
||||||
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
self.assertEqual(len(chunk), 0, [event["content"] for event in chunk])
|
||||||
|
|
||||||
|
def test_room_message_filter_query_validation(self) -> None:
|
||||||
|
# Test json validation in (filter) query parameter.
|
||||||
|
# Does not test the validity of the filter, only the json validation.
|
||||||
|
|
||||||
|
# Check Get with valid json filter parameter, expect 200.
|
||||||
|
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={valid_filter_str}",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||||
|
|
||||||
|
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||||
|
invalid_filter_str = "}}}{}"
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/rooms/{self.room_id}/messages?access_token=x&dir=b&filter={invalid_filter_str}",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RoomMessageFilterTestCase(RoomBase):
|
class RoomMessageFilterTestCase(RoomBase):
|
||||||
"""Tests /rooms/$room_id/messages REST events."""
|
"""Tests /rooms/$room_id/messages REST events."""
|
||||||
|
@ -3213,6 +3238,33 @@ class ContextTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertDictEqual(events_after[0].get("content"), {}, events_after[0])
|
self.assertDictEqual(events_after[0].get("content"), {}, events_after[0])
|
||||||
self.assertEqual(events_after[1].get("content"), {}, events_after[1])
|
self.assertEqual(events_after[1].get("content"), {}, events_after[1])
|
||||||
|
|
||||||
|
def test_room_event_context_filter_query_validation(self) -> None:
|
||||||
|
# Test json validation in (filter) query parameter.
|
||||||
|
# Does not test the validity of the filter, only the json validation.
|
||||||
|
event_id = self.helper.send(self.room_id, "message 7", tok=self.tok)["event_id"]
|
||||||
|
|
||||||
|
# Check Get with valid json filter parameter, expect 200.
|
||||||
|
valid_filter_str = '{"types": ["m.room.message"]}'
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/rooms/{self.room_id}/context/{event_id}?filter={valid_filter_str}",
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body)
|
||||||
|
|
||||||
|
# Check Get with invalid json filter parameter, expect 400 NOT_JSON.
|
||||||
|
invalid_filter_str = "}}}{}"
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/rooms/{self.room_id}/context/{event_id}?filter={invalid_filter_str}",
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST, channel.json_body)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.NOT_JSON, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RoomAliasListTestCase(unittest.HomeserverTestCase):
|
class RoomAliasListTestCase(unittest.HomeserverTestCase):
|
||||||
servlets = [
|
servlets = [
|
||||||
|
|
|
@ -351,6 +351,7 @@ def make_request(
|
||||||
request: Type[Request] = SynapseRequest,
|
request: Type[Request] = SynapseRequest,
|
||||||
shorthand: bool = True,
|
shorthand: bool = True,
|
||||||
federation_auth_origin: Optional[bytes] = None,
|
federation_auth_origin: Optional[bytes] = None,
|
||||||
|
content_type: Optional[bytes] = None,
|
||||||
content_is_form: bool = False,
|
content_is_form: bool = False,
|
||||||
await_result: bool = True,
|
await_result: bool = True,
|
||||||
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
||||||
|
@ -373,6 +374,8 @@ def make_request(
|
||||||
with the usual REST API path, if it doesn't contain it.
|
with the usual REST API path, if it doesn't contain it.
|
||||||
federation_auth_origin: if set to not-None, we will add a fake
|
federation_auth_origin: if set to not-None, we will add a fake
|
||||||
Authorization header pretenting to be the given server name.
|
Authorization header pretenting to be the given server name.
|
||||||
|
content_type: The content-type to use for the request. If not set then will default to
|
||||||
|
application/json unless content_is_form is true.
|
||||||
content_is_form: Whether the content is URL encoded form data. Adds the
|
content_is_form: Whether the content is URL encoded form data. Adds the
|
||||||
'Content-Type': 'application/x-www-form-urlencoded' header.
|
'Content-Type': 'application/x-www-form-urlencoded' header.
|
||||||
await_result: whether to wait for the request to complete rendering. If true,
|
await_result: whether to wait for the request to complete rendering. If true,
|
||||||
|
@ -436,7 +439,9 @@ def make_request(
|
||||||
)
|
)
|
||||||
|
|
||||||
if content:
|
if content:
|
||||||
if content_is_form:
|
if content_type is not None:
|
||||||
|
req.requestHeaders.addRawHeader(b"Content-Type", content_type)
|
||||||
|
elif content_is_form:
|
||||||
req.requestHeaders.addRawHeader(
|
req.requestHeaders.addRawHeader(
|
||||||
b"Content-Type", b"application/x-www-form-urlencoded"
|
b"Content-Type", b"application/x-www-form-urlencoded"
|
||||||
)
|
)
|
||||||
|
|
|
@ -523,6 +523,7 @@ class HomeserverTestCase(TestCase):
|
||||||
request: Type[Request] = SynapseRequest,
|
request: Type[Request] = SynapseRequest,
|
||||||
shorthand: bool = True,
|
shorthand: bool = True,
|
||||||
federation_auth_origin: Optional[bytes] = None,
|
federation_auth_origin: Optional[bytes] = None,
|
||||||
|
content_type: Optional[bytes] = None,
|
||||||
content_is_form: bool = False,
|
content_is_form: bool = False,
|
||||||
await_result: bool = True,
|
await_result: bool = True,
|
||||||
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
||||||
|
@ -541,6 +542,9 @@ class HomeserverTestCase(TestCase):
|
||||||
with the usual REST API path, if it doesn't contain it.
|
with the usual REST API path, if it doesn't contain it.
|
||||||
federation_auth_origin: if set to not-None, we will add a fake
|
federation_auth_origin: if set to not-None, we will add a fake
|
||||||
Authorization header pretenting to be the given server name.
|
Authorization header pretenting to be the given server name.
|
||||||
|
|
||||||
|
content_type: The content-type to use for the request. If not set then will default to
|
||||||
|
application/json unless content_is_form is true.
|
||||||
content_is_form: Whether the content is URL encoded form data. Adds the
|
content_is_form: Whether the content is URL encoded form data. Adds the
|
||||||
'Content-Type': 'application/x-www-form-urlencoded' header.
|
'Content-Type': 'application/x-www-form-urlencoded' header.
|
||||||
|
|
||||||
|
@ -566,6 +570,7 @@ class HomeserverTestCase(TestCase):
|
||||||
request,
|
request,
|
||||||
shorthand,
|
shorthand,
|
||||||
federation_auth_origin,
|
federation_auth_origin,
|
||||||
|
content_type,
|
||||||
content_is_form,
|
content_is_form,
|
||||||
await_result,
|
await_result,
|
||||||
custom_headers,
|
custom_headers,
|
||||||
|
|
Loading…
Reference in a new issue