mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-15 04:53:53 +01:00
Merge branch 'release-v1.52'
This commit is contained in:
commit
0b561a0ea1
102 changed files with 2743 additions and 814 deletions
|
@ -1,12 +1,14 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
# this script is run by GitHub Actions in a plain `focal` container; it installs the
|
||||||
# this script is run by GitHub Actions in a plain `bionic` container; it installs the
|
|
||||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||||
|
|
||||||
|
# Prevent tzdata from asking for user input
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
export LANG="C.UTF-8"
|
export LANG="C.UTF-8"
|
||||||
|
|
||||||
|
|
14
.github/workflows/docker.yml
vendored
14
.github/workflows/docker.yml
vendored
|
@ -34,6 +34,8 @@ jobs:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
# TODO: consider using https://github.com/docker/metadata-action instead of this
|
||||||
|
# custom magic
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
id: set-tag
|
id: set-tag
|
||||||
run: |
|
run: |
|
||||||
|
@ -53,18 +55,6 @@ jobs:
|
||||||
esac
|
esac
|
||||||
echo "::set-output name=tag::$tag"
|
echo "::set-output name=tag::$tag"
|
||||||
|
|
||||||
# for release builds, we want to get the amd64 image out asap, so first
|
|
||||||
# we do an amd64-only build, before following up with a multiarch build.
|
|
||||||
- name: Build and push amd64
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: "gitsha1=${{ github.sha }}"
|
|
||||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: linux/amd64
|
|
||||||
|
|
||||||
- name: Build and push all platforms
|
- name: Build and push all platforms
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
|
|
36
.github/workflows/tests.yml
vendored
36
.github/workflows/tests.yml
vendored
|
@ -141,7 +141,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Test with old deps
|
- name: Test with old deps
|
||||||
uses: docker://ubuntu:bionic # For old python and sqlite
|
uses: docker://ubuntu:focal # For old python and sqlite
|
||||||
with:
|
with:
|
||||||
workdir: /github/workspace
|
workdir: /github/workspace
|
||||||
entrypoint: .ci/scripts/test_old_deps.sh
|
entrypoint: .ci/scripts/test_old_deps.sh
|
||||||
|
@ -213,15 +213,15 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- sytest-tag: bionic
|
- sytest-tag: focal
|
||||||
|
|
||||||
- sytest-tag: bionic
|
- sytest-tag: focal
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
|
|
||||||
- sytest-tag: testing
|
- sytest-tag: testing
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
|
|
||||||
- sytest-tag: bionic
|
- sytest-tag: focal
|
||||||
postgres: multi-postgres
|
postgres: multi-postgres
|
||||||
workers: workers
|
workers: workers
|
||||||
|
|
||||||
|
@ -323,17 +323,22 @@ jobs:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
|
||||||
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
|
||||||
image: matrixdotorg/complement:latest
|
|
||||||
env:
|
|
||||||
CI: true
|
|
||||||
ports:
|
|
||||||
- 8448:8448
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||||
|
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||||
|
- name: "Set Go Version"
|
||||||
|
run: |
|
||||||
|
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||||
|
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||||
|
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||||
|
echo "~/go/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: "Install Complement Dependencies"
|
||||||
|
run: |
|
||||||
|
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
||||||
|
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
- name: Run actions/checkout@v2 for synapse
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
|
@ -376,8 +381,11 @@ jobs:
|
||||||
working-directory: complement/dockerfiles
|
working-directory: complement/dockerfiles
|
||||||
|
|
||||||
# Run Complement
|
# Run Complement
|
||||||
- run: set -o pipefail && go test -v -json -tags synapse_blacklist,msc2403 ./tests/... 2>&1 | gotestfmt
|
- run: |
|
||||||
|
set -o pipefail
|
||||||
|
go test -v -json -tags synapse_blacklist,msc2403 ./tests/... 2>&1 | gotestfmt
|
||||||
shell: bash
|
shell: bash
|
||||||
|
name: Run Complement Tests
|
||||||
env:
|
env:
|
||||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||||
working-directory: complement
|
working-directory: complement
|
||||||
|
|
2
.github/workflows/twisted_trunk.yml
vendored
2
.github/workflows/twisted_trunk.yml
vendored
|
@ -25,7 +25,7 @@ jobs:
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.6
|
python-version: 3.7
|
||||||
- run: .ci/patch_for_twisted_trunk.sh
|
- run: .ci/patch_for_twisted_trunk.sh
|
||||||
- run: pip install tox
|
- run: pip install tox
|
||||||
- run: tox -e py
|
- run: tox -e py
|
||||||
|
|
74
CHANGES.md
74
CHANGES.md
|
@ -1,3 +1,77 @@
|
||||||
|
Synapse 1.52.0 (2022-02-08)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
No significant changes since 1.52.0rc1.
|
||||||
|
|
||||||
|
Note that [Twisted 22.1.0](https://github.com/twisted/twisted/releases/tag/twisted-22.1.0)
|
||||||
|
has recently been released, which fixes a [security issue](https://github.com/twisted/twisted/security/advisories/GHSA-92x2-jw7w-xvvx)
|
||||||
|
within the Twisted library. We do not believe Synapse is affected by this vulnerability,
|
||||||
|
though we advise server administrators who installed Synapse via pip to upgrade Twisted
|
||||||
|
with `pip install --upgrade Twisted` as a matter of good practice. The Docker image
|
||||||
|
`matrixdotorg/synapse` and the Debian packages from `packages.matrix.org` are using the
|
||||||
|
updated library.
|
||||||
|
|
||||||
|
|
||||||
|
Synapse 1.52.0rc1 (2022-02-01)
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Remove account data (including client config, push rules and ignored users) upon user deactivation. ([\#11621](https://github.com/matrix-org/synapse/issues/11621), [\#11788](https://github.com/matrix-org/synapse/issues/11788), [\#11789](https://github.com/matrix-org/synapse/issues/11789))
|
||||||
|
- Add an admin API to reset connection timeouts for remote server. ([\#11639](https://github.com/matrix-org/synapse/issues/11639))
|
||||||
|
- Add an admin API to get a list of rooms that federate with a given remote homeserver. ([\#11658](https://github.com/matrix-org/synapse/issues/11658))
|
||||||
|
- Add a config flag to inhibit `M_USER_IN_USE` during registration. ([\#11743](https://github.com/matrix-org/synapse/issues/11743))
|
||||||
|
- Add a module callback to set username at registration. ([\#11790](https://github.com/matrix-org/synapse/issues/11790))
|
||||||
|
- Allow configuring a maximum file size as well as a list of allowed content types for avatars. ([\#11846](https://github.com/matrix-org/synapse/issues/11846))
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612))
|
||||||
|
- Fix a long-standing bug when previewing Reddit URLs which do not contain an image. ([\#11767](https://github.com/matrix-org/synapse/issues/11767))
|
||||||
|
- Fix a long-standing bug that media streams could cause long-lived connections when generating URL previews. ([\#11784](https://github.com/matrix-org/synapse/issues/11784))
|
||||||
|
- Include a `prev_content` field in state events sent to Application Services. Contributed by @totallynotvaishnav. ([\#11798](https://github.com/matrix-org/synapse/issues/11798))
|
||||||
|
- Fix a bug introduced in Synapse 0.33.3 causing requests to sometimes log strings such as `HTTPStatus.OK` instead of integer status codes. ([\#11827](https://github.com/matrix-org/synapse/issues/11827))
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Update pypi installation docs to indicate that we now support Python 3.10. ([\#11820](https://github.com/matrix-org/synapse/issues/11820))
|
||||||
|
- Add missing steps to the contribution submission process in the documentation. Contributed by @sequentialread. ([\#11821](https://github.com/matrix-org/synapse/issues/11821))
|
||||||
|
- Remove not needed old table of contents in documentation. ([\#11860](https://github.com/matrix-org/synapse/issues/11860))
|
||||||
|
- Consolidate the `access_token` information at the top of each relevant page in the Admin API documentation. ([\#11861](https://github.com/matrix-org/synapse/issues/11861))
|
||||||
|
|
||||||
|
|
||||||
|
Deprecations and Removals
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
- Drop support for Python 3.6, which is EOL. ([\#11683](https://github.com/matrix-org/synapse/issues/11683))
|
||||||
|
- Remove the `experimental_msc1849_support_enabled` flag as the features are now stable. ([\#11843](https://github.com/matrix-org/synapse/issues/11843))
|
||||||
|
|
||||||
|
|
||||||
|
Internal Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- Preparation for database schema simplifications: add `state_key` and `rejection_reason` columns to `events` table. ([\#11792](https://github.com/matrix-org/synapse/issues/11792))
|
||||||
|
- Add `FrozenEvent.get_state_key` and use it in a couple of places. ([\#11793](https://github.com/matrix-org/synapse/issues/11793))
|
||||||
|
- Preparation for database schema simplifications: stop reading from `event_reference_hashes`. ([\#11794](https://github.com/matrix-org/synapse/issues/11794))
|
||||||
|
- Drop unused table `public_room_list_stream`. ([\#11795](https://github.com/matrix-org/synapse/issues/11795))
|
||||||
|
- Preparation for reducing Postgres serialization errors: allow setting transaction isolation level. Contributed by Nick @ Beeper. ([\#11799](https://github.com/matrix-org/synapse/issues/11799), [\#11847](https://github.com/matrix-org/synapse/issues/11847))
|
||||||
|
- Docker: skip the initial amd64-only build and go straight to multiarch. ([\#11810](https://github.com/matrix-org/synapse/issues/11810))
|
||||||
|
- Run Complement on the Github Actions VM and not inside a Docker container. ([\#11811](https://github.com/matrix-org/synapse/issues/11811))
|
||||||
|
- Log module names at startup. ([\#11813](https://github.com/matrix-org/synapse/issues/11813))
|
||||||
|
- Improve type safety of bundled aggregations code. ([\#11815](https://github.com/matrix-org/synapse/issues/11815))
|
||||||
|
- Correct a type annotation in the event validation logic. ([\#11817](https://github.com/matrix-org/synapse/issues/11817), [\#11830](https://github.com/matrix-org/synapse/issues/11830))
|
||||||
|
- Minor updates and documentation for database schema delta files. ([\#11823](https://github.com/matrix-org/synapse/issues/11823))
|
||||||
|
- Workaround a type annotation problem in `prometheus_client` 0.13.0. ([\#11834](https://github.com/matrix-org/synapse/issues/11834))
|
||||||
|
- Minor performance improvement in room state lookup. ([\#11836](https://github.com/matrix-org/synapse/issues/11836))
|
||||||
|
- Fix some indentation inconsistencies in the sample config. ([\#11838](https://github.com/matrix-org/synapse/issues/11838))
|
||||||
|
- Add type hints to `tests/rest/admin`. ([\#11851](https://github.com/matrix-org/synapse/issues/11851))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.51.0 (2022-01-25)
|
Synapse 1.51.0 (2022-01-25)
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
|
|
12
debian/changelog
vendored
12
debian/changelog
vendored
|
@ -1,3 +1,15 @@
|
||||||
|
matrix-synapse-py3 (1.52.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.52.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Feb 2022 11:34:54 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.52.0~rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Feb 2022 11:04:09 +0000
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0) stable; urgency=medium
|
matrix-synapse-py3 (1.51.0) stable; urgency=medium
|
||||||
|
|
||||||
* New synapse release 1.51.0.
|
* New synapse release 1.51.0.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Use the Sytest image that comes with a lot of the build dependencies
|
# Use the Sytest image that comes with a lot of the build dependencies
|
||||||
# pre-installed
|
# pre-installed
|
||||||
FROM matrixdotorg/sytest:bionic
|
FROM matrixdotorg/sytest:focal
|
||||||
|
|
||||||
# The Sytest image doesn't come with python, so install that
|
# The Sytest image doesn't come with python, so install that
|
||||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||||
|
|
|
@ -16,4 +16,4 @@ sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/dat
|
||||||
# Run the tests
|
# Run the tests
|
||||||
cd /src
|
cd /src
|
||||||
export TRIAL_FLAGS="-j 4"
|
export TRIAL_FLAGS="-j 4"
|
||||||
tox --workdir=./.tox-pg-container -e py36-postgres "$@"
|
tox --workdir=./.tox-pg-container -e py37-postgres "$@"
|
||||||
|
|
|
@ -44,27 +44,6 @@ For more details and context on the release of the r0.1 Server/Server API and
|
||||||
imminent Matrix 1.0 release, you can also see our
|
imminent Matrix 1.0 release, you can also see our
|
||||||
[main talk from FOSDEM 2019](https://matrix.org/blog/2019/02/04/matrix-at-fosdem-2019/).
|
[main talk from FOSDEM 2019](https://matrix.org/blog/2019/02/04/matrix-at-fosdem-2019/).
|
||||||
|
|
||||||
## Contents
|
|
||||||
* Timeline
|
|
||||||
* Configuring certificates for compatibility with Synapse 1.0
|
|
||||||
* FAQ
|
|
||||||
* Synapse 0.99.0 has just been released, what do I need to do right now?
|
|
||||||
* How do I upgrade?
|
|
||||||
* What will happen if I do not set up a valid federation certificate
|
|
||||||
immediately?
|
|
||||||
* What will happen if I do nothing at all?
|
|
||||||
* When do I need a SRV record or .well-known URI?
|
|
||||||
* Can I still use an SRV record?
|
|
||||||
* I have created a .well-known URI. Do I still need an SRV record?
|
|
||||||
* It used to work just fine, why are you breaking everything?
|
|
||||||
* Can I manage my own certificates rather than having Synapse renew
|
|
||||||
certificates itself?
|
|
||||||
* Do you still recommend against using a reverse proxy on the federation port?
|
|
||||||
* Do I still need to give my TLS certificates to Synapse if I am using a
|
|
||||||
reverse proxy?
|
|
||||||
* Do I need the same certificate for the client and federation port?
|
|
||||||
* How do I tell Synapse to reload my keys/certificates after I replace them?
|
|
||||||
|
|
||||||
## Timeline
|
## Timeline
|
||||||
|
|
||||||
**5th Feb 2019 - Synapse 0.99.0 is released.**
|
**5th Feb 2019 - Synapse 0.99.0 is released.**
|
||||||
|
|
|
@ -4,6 +4,9 @@ This API allows a server administrator to manage the validity of an account. To
|
||||||
use it, you must enable the account validity feature (under
|
use it, you must enable the account validity feature (under
|
||||||
`account_validity`) in Synapse's configuration.
|
`account_validity`) in Synapse's configuration.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
## Renew account
|
## Renew account
|
||||||
|
|
||||||
This API extends the validity of an account by as much time as configured in the
|
This API extends the validity of an account by as much time as configured in the
|
||||||
|
|
|
@ -4,11 +4,11 @@ This API lets a server admin delete a local group. Doing so will kick all
|
||||||
users out of the group so that their clients will correctly handle the group
|
users out of the group so that their clients will correctly handle the group
|
||||||
being deleted.
|
being deleted.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
```
|
```
|
||||||
POST /_synapse/admin/v1/delete_group/<group_id>
|
POST /_synapse/admin/v1/delete_group/<group_id>
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
|
@ -2,12 +2,13 @@
|
||||||
|
|
||||||
This API returns information about reported events.
|
This API returns information about reported events.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
```
|
```
|
||||||
GET /_synapse/admin/v1/event_reports?from=0&limit=10
|
GET /_synapse/admin/v1/event_reports?from=0&limit=10
|
||||||
```
|
```
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
It returns a JSON body like the following:
|
||||||
|
|
||||||
|
@ -94,8 +95,6 @@ The api is:
|
||||||
```
|
```
|
||||||
GET /_synapse/admin/v1/event_reports/<report_id>
|
GET /_synapse/admin/v1/event_reports/<report_id>
|
||||||
```
|
```
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
It returns a JSON body like the following:
|
||||||
|
|
||||||
|
|
|
@ -1,24 +1,10 @@
|
||||||
# Contents
|
|
||||||
- [Querying media](#querying-media)
|
|
||||||
* [List all media in a room](#list-all-media-in-a-room)
|
|
||||||
* [List all media uploaded by a user](#list-all-media-uploaded-by-a-user)
|
|
||||||
- [Quarantine media](#quarantine-media)
|
|
||||||
* [Quarantining media by ID](#quarantining-media-by-id)
|
|
||||||
* [Remove media from quarantine by ID](#remove-media-from-quarantine-by-id)
|
|
||||||
* [Quarantining media in a room](#quarantining-media-in-a-room)
|
|
||||||
* [Quarantining all media of a user](#quarantining-all-media-of-a-user)
|
|
||||||
* [Protecting media from being quarantined](#protecting-media-from-being-quarantined)
|
|
||||||
* [Unprotecting media from being quarantined](#unprotecting-media-from-being-quarantined)
|
|
||||||
- [Delete local media](#delete-local-media)
|
|
||||||
* [Delete a specific local media](#delete-a-specific-local-media)
|
|
||||||
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
|
|
||||||
* [Delete media uploaded by a user](#delete-media-uploaded-by-a-user)
|
|
||||||
- [Purge Remote Media API](#purge-remote-media-api)
|
|
||||||
|
|
||||||
# Querying media
|
# Querying media
|
||||||
|
|
||||||
These APIs allow extracting media information from the homeserver.
|
These APIs allow extracting media information from the homeserver.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
## List all media in a room
|
## List all media in a room
|
||||||
|
|
||||||
This API gets a list of known media in a room.
|
This API gets a list of known media in a room.
|
||||||
|
@ -28,8 +14,6 @@ The API is:
|
||||||
```
|
```
|
||||||
GET /_synapse/admin/v1/room/<room_id>/media
|
GET /_synapse/admin/v1/room/<room_id>/media
|
||||||
```
|
```
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
The API returns a JSON body like the following:
|
The API returns a JSON body like the following:
|
||||||
```json
|
```json
|
||||||
|
@ -317,8 +301,5 @@ The following fields are returned in the JSON response body:
|
||||||
|
|
||||||
* `deleted`: integer - The number of media items successfully deleted
|
* `deleted`: integer - The number of media items successfully deleted
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
If the user re-requests purged remote media, synapse will re-request the media
|
If the user re-requests purged remote media, synapse will re-request the media
|
||||||
from the originating server.
|
from the originating server.
|
||||||
|
|
|
@ -10,15 +10,15 @@ paginate further back in the room from the point being purged from.
|
||||||
Note that Synapse requires at least one message in each room, so it will never
|
Note that Synapse requires at least one message in each room, so it will never
|
||||||
delete the last message in a room.
|
delete the last message in a room.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
```
|
```
|
||||||
POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]
|
POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
By default, events sent by local users are not deleted, as they may represent
|
By default, events sent by local users are not deleted, as they may represent
|
||||||
the only copies of this content in existence. (Events sent by remote users are
|
the only copies of this content in existence. (Events sent by remote users are
|
||||||
deleted.)
|
deleted.)
|
||||||
|
@ -57,9 +57,6 @@ It is possible to poll for updates on recent purges with a second API;
|
||||||
GET /_synapse/admin/v1/purge_history_status/<purge_id>
|
GET /_synapse/admin/v1/purge_history_status/<purge_id>
|
||||||
```
|
```
|
||||||
|
|
||||||
Again, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin.
|
|
||||||
|
|
||||||
This API returns a JSON body like the following:
|
This API returns a JSON body like the following:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
|
|
@ -5,6 +5,9 @@ to a room with a given `room_id_or_alias`. You can only modify the membership of
|
||||||
local users. The server administrator must be in the room and have permission to
|
local users. The server administrator must be in the room and have permission to
|
||||||
invite users.
|
invite users.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
## Parameters
|
## Parameters
|
||||||
|
|
||||||
The following parameters are available:
|
The following parameters are available:
|
||||||
|
@ -23,9 +26,6 @@ POST /_synapse/admin/v1/join/<room_id_or_alias>
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
Response:
|
Response:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
|
|
@ -1,24 +1,12 @@
|
||||||
# Contents
|
|
||||||
- [List Room API](#list-room-api)
|
|
||||||
- [Room Details API](#room-details-api)
|
|
||||||
- [Room Members API](#room-members-api)
|
|
||||||
- [Room State API](#room-state-api)
|
|
||||||
- [Block Room API](#block-room-api)
|
|
||||||
- [Delete Room API](#delete-room-api)
|
|
||||||
* [Version 1 (old version)](#version-1-old-version)
|
|
||||||
* [Version 2 (new version)](#version-2-new-version)
|
|
||||||
* [Status of deleting rooms](#status-of-deleting-rooms)
|
|
||||||
* [Undoing room shutdowns](#undoing-room-shutdowns)
|
|
||||||
- [Make Room Admin API](#make-room-admin-api)
|
|
||||||
- [Forward Extremities Admin API](#forward-extremities-admin-api)
|
|
||||||
- [Event Context API](#event-context-api)
|
|
||||||
|
|
||||||
# List Room API
|
# List Room API
|
||||||
|
|
||||||
The List Room admin API allows server admins to get a list of rooms on their
|
The List Room admin API allows server admins to get a list of rooms on their
|
||||||
server. There are various parameters available that allow for filtering and
|
server. There are various parameters available that allow for filtering and
|
||||||
sorting the returned list. This API supports pagination.
|
sorting the returned list. This API supports pagination.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
|
||||||
The following query parameters are available:
|
The following query parameters are available:
|
||||||
|
@ -493,9 +481,6 @@ several minutes or longer.
|
||||||
The local server will only have the power to move local user and room aliases to
|
The local server will only have the power to move local user and room aliases to
|
||||||
the new room. Users on other servers will be unaffected.
|
the new room. Users on other servers will be unaffected.
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
|
||||||
server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
## Version 1 (old version)
|
## Version 1 (old version)
|
||||||
|
|
||||||
This version works synchronously. That means you only get the response once the server has
|
This version works synchronously. That means you only get the response once the server has
|
||||||
|
|
|
@ -3,15 +3,15 @@
|
||||||
Returns information about all local media usage of users. Gives the
|
Returns information about all local media usage of users. Gives the
|
||||||
possibility to filter them by time and user.
|
possibility to filter them by time and user.
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
```
|
```
|
||||||
GET /_synapse/admin/v1/statistics/users/media
|
GET /_synapse/admin/v1/statistics/users/media
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
# User Admin API
|
# User Admin API
|
||||||
|
|
||||||
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
## Query User Account
|
## Query User Account
|
||||||
|
|
||||||
This API returns information about a specific user account.
|
This API returns information about a specific user account.
|
||||||
|
@ -10,9 +13,6 @@ The api is:
|
||||||
GET /_synapse/admin/v2/users/<user_id>
|
GET /_synapse/admin/v2/users/<user_id>
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
It returns a JSON body like the following:
|
||||||
|
|
||||||
```jsonc
|
```jsonc
|
||||||
|
@ -104,9 +104,6 @@ with a body of:
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
Returns HTTP status code:
|
Returns HTTP status code:
|
||||||
- `201` - When a new user object was created.
|
- `201` - When a new user object was created.
|
||||||
- `200` - When a user was modified.
|
- `200` - When a user was modified.
|
||||||
|
@ -156,9 +153,6 @@ By default, the response is ordered by ascending user ID.
|
||||||
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
|
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -278,9 +272,6 @@ GET /_matrix/client/r0/admin/whois/<userId>
|
||||||
See also: [Client Server
|
See also: [Client Server
|
||||||
API Whois](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid).
|
API Whois](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid).
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
It returns a JSON body like the following:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -335,9 +326,6 @@ with a body of:
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
The erase parameter is optional and defaults to `false`.
|
The erase parameter is optional and defaults to `false`.
|
||||||
An empty body may be passed for backwards compatibility.
|
An empty body may be passed for backwards compatibility.
|
||||||
|
|
||||||
|
@ -353,6 +341,11 @@ The following actions are performed when deactivating an user:
|
||||||
- Remove the user from the user directory
|
- Remove the user from the user directory
|
||||||
- Reject all pending invites
|
- Reject all pending invites
|
||||||
- Remove all account validity information related to the user
|
- Remove all account validity information related to the user
|
||||||
|
- Remove the arbitrary data store known as *account data*. For example, this includes:
|
||||||
|
- list of ignored users;
|
||||||
|
- push rules;
|
||||||
|
- secret storage keys; and
|
||||||
|
- cross-signing keys.
|
||||||
|
|
||||||
The following additional actions are performed during deactivation if `erase`
|
The following additional actions are performed during deactivation if `erase`
|
||||||
is set to `true`:
|
is set to `true`:
|
||||||
|
@ -366,7 +359,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
||||||
- Remove mappings of SSO IDs
|
- Remove mappings of SSO IDs
|
||||||
- [Delete media uploaded](#delete-media-uploaded-by-a-user) by user (included avatar images)
|
- [Delete media uploaded](#delete-media-uploaded-by-a-user) by user (included avatar images)
|
||||||
- Delete sent and received messages
|
- Delete sent and received messages
|
||||||
- Delete E2E cross-signing keys
|
|
||||||
- Remove the user's creation (registration) timestamp
|
- Remove the user's creation (registration) timestamp
|
||||||
- [Remove rate limit overrides](#override-ratelimiting-for-users)
|
- [Remove rate limit overrides](#override-ratelimiting-for-users)
|
||||||
- Remove from monthly active users
|
- Remove from monthly active users
|
||||||
|
@ -390,9 +382,6 @@ with a body of:
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
The parameter `new_password` is required.
|
The parameter `new_password` is required.
|
||||||
The parameter `logout_devices` is optional and defaults to `true`.
|
The parameter `logout_devices` is optional and defaults to `true`.
|
||||||
|
|
||||||
|
@ -405,9 +394,6 @@ The api is:
|
||||||
GET /_synapse/admin/v1/users/<user_id>/admin
|
GET /_synapse/admin/v1/users/<user_id>/admin
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -435,10 +421,6 @@ with a body of:
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
|
|
||||||
## List room memberships of a user
|
## List room memberships of a user
|
||||||
|
|
||||||
Gets a list of all `room_id` that a specific `user_id` is member.
|
Gets a list of all `room_id` that a specific `user_id` is member.
|
||||||
|
@ -449,9 +431,6 @@ The API is:
|
||||||
GET /_synapse/admin/v1/users/<user_id>/joined_rooms
|
GET /_synapse/admin/v1/users/<user_id>/joined_rooms
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -570,9 +549,6 @@ The API is:
|
||||||
GET /_synapse/admin/v1/users/<user_id>/media
|
GET /_synapse/admin/v1/users/<user_id>/media
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -687,9 +663,6 @@ The API is:
|
||||||
DELETE /_synapse/admin/v1/users/<user_id>/media
|
DELETE /_synapse/admin/v1/users/<user_id>/media
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -762,9 +735,6 @@ The API is:
|
||||||
GET /_synapse/admin/v2/users/<user_id>/devices
|
GET /_synapse/admin/v2/users/<user_id>/devices
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -830,9 +800,6 @@ POST /_synapse/admin/v2/users/<user_id>/delete_devices
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
An empty JSON dict is returned.
|
An empty JSON dict is returned.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
@ -854,9 +821,6 @@ The API is:
|
||||||
GET /_synapse/admin/v2/users/<user_id>/devices/<device_id>
|
GET /_synapse/admin/v2/users/<user_id>/devices/<device_id>
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -902,9 +866,6 @@ PUT /_synapse/admin/v2/users/<user_id>/devices/<device_id>
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
An empty JSON dict is returned.
|
An empty JSON dict is returned.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
@ -931,9 +892,6 @@ DELETE /_synapse/admin/v2/users/<user_id>/devices/<device_id>
|
||||||
{}
|
{}
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
An empty JSON dict is returned.
|
An empty JSON dict is returned.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
@ -952,9 +910,6 @@ The API is:
|
||||||
GET /_synapse/admin/v1/users/<user_id>/pushers
|
GET /_synapse/admin/v1/users/<user_id>/pushers
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -1049,9 +1004,6 @@ To un-shadow-ban a user the API is:
|
||||||
DELETE /_synapse/admin/v1/users/<user_id>/shadow_ban
|
DELETE /_synapse/admin/v1/users/<user_id>/shadow_ban
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
An empty JSON dict is returned in both cases.
|
An empty JSON dict is returned in both cases.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
@ -1074,9 +1026,6 @@ The API is:
|
||||||
GET /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
GET /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -1116,9 +1065,6 @@ The API is:
|
||||||
POST /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
POST /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -1161,9 +1107,6 @@ The API is:
|
||||||
DELETE /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
DELETE /_synapse/admin/v1/users/<user_id>/override_ratelimit
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
||||||
An empty JSON dict is returned.
|
An empty JSON dict is returned.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -1192,7 +1135,5 @@ The API is:
|
||||||
GET /_synapse/admin/v1/username_available?username=$localpart
|
GET /_synapse/admin/v1/username_available?username=$localpart
|
||||||
```
|
```
|
||||||
|
|
||||||
The request and response format is the same as the [/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
The request and response format is the same as the
|
||||||
|
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: [Admin API](../usage/administration/admin_api)
|
|
||||||
|
|
|
@ -16,6 +16,6 @@ It returns a JSON body like the following:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"server_version": "0.99.2rc1 (b=develop, abcdef123)",
|
"server_version": "0.99.2rc1 (b=develop, abcdef123)",
|
||||||
"python_version": "3.6.8"
|
"python_version": "3.7.8"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
|
@ -55,6 +55,7 @@ setup a *virtualenv*, as follows:
|
||||||
cd path/where/you/have/cloned/the/repository
|
cd path/where/you/have/cloned/the/repository
|
||||||
python3 -m venv ./env
|
python3 -m venv ./env
|
||||||
source ./env/bin/activate
|
source ./env/bin/activate
|
||||||
|
pip install wheel
|
||||||
pip install -e ".[all,dev]"
|
pip install -e ".[all,dev]"
|
||||||
pip install tox
|
pip install tox
|
||||||
```
|
```
|
||||||
|
@ -116,7 +117,7 @@ The linters look at your code and do two things:
|
||||||
- ensure that your code follows the coding style adopted by the project;
|
- ensure that your code follows the coding style adopted by the project;
|
||||||
- catch a number of errors in your code.
|
- catch a number of errors in your code.
|
||||||
|
|
||||||
They're pretty fast, don't hesitate!
|
The linter takes no time at all to run as soon as you've [downloaded the dependencies into your python virtual environment](#4-install-the-dependencies).
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
source ./env/bin/activate
|
source ./env/bin/activate
|
||||||
|
|
|
@ -96,6 +96,60 @@ Ensure postgres is installed, then run:
|
||||||
NB at the time of writing, this script predates the split into separate `state`/`main`
|
NB at the time of writing, this script predates the split into separate `state`/`main`
|
||||||
databases so will require updates to handle that correctly.
|
databases so will require updates to handle that correctly.
|
||||||
|
|
||||||
|
## Delta files
|
||||||
|
|
||||||
|
Delta files define the steps required to upgrade the database from an earlier version.
|
||||||
|
They can be written as either a file containing a series of SQL statements, or a Python
|
||||||
|
module.
|
||||||
|
|
||||||
|
Synapse remembers which delta files it has applied to a database (they are stored in the
|
||||||
|
`applied_schema_deltas` table) and will not re-apply them (even if a given file is
|
||||||
|
subsequently updated).
|
||||||
|
|
||||||
|
Delta files should be placed in a directory named `synapse/storage/schema/<database>/delta/<version>/`.
|
||||||
|
They are applied in alphanumeric order, so by convention the first two characters
|
||||||
|
of the filename should be an integer such as `01`, to put the file in the right order.
|
||||||
|
|
||||||
|
### SQL delta files
|
||||||
|
|
||||||
|
These should be named `*.sql`, or — for changes which should only be applied for a
|
||||||
|
given database engine — `*.sql.posgres` or `*.sql.sqlite`. For example, a delta which
|
||||||
|
adds a new column to the `foo` table might be called `01add_bar_to_foo.sql`.
|
||||||
|
|
||||||
|
Note that our SQL parser is a bit simple - it understands comments (`--` and `/*...*/`),
|
||||||
|
but complex statements which require a `;` in the middle of them (such as `CREATE
|
||||||
|
TRIGGER`) are beyond it and you'll have to use a Python delta file.
|
||||||
|
|
||||||
|
### Python delta files
|
||||||
|
|
||||||
|
For more flexibility, a delta file can take the form of a python module. These should
|
||||||
|
be named `*.py`. Note that database-engine-specific modules are not supported here –
|
||||||
|
instead you can write `if isinstance(database_engine, PostgresEngine)` or similar.
|
||||||
|
|
||||||
|
A Python delta module should define either or both of the following functions:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import synapse.config.homeserver
|
||||||
|
import synapse.storage.engines
|
||||||
|
import synapse.storage.types
|
||||||
|
|
||||||
|
|
||||||
|
def run_create(
|
||||||
|
cur: synapse.storage.types.Cursor,
|
||||||
|
database_engine: synapse.storage.engines.BaseDatabaseEngine,
|
||||||
|
) -> None:
|
||||||
|
"""Called whenever an existing or new database is to be upgraded"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def run_upgrade(
|
||||||
|
cur: synapse.storage.types.Cursor,
|
||||||
|
database_engine: synapse.storage.engines.BaseDatabaseEngine,
|
||||||
|
config: synapse.config.homeserver.HomeServerConfig,
|
||||||
|
) -> None:
|
||||||
|
"""Called whenever an existing database is to be upgraded."""
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
## Boolean columns
|
## Boolean columns
|
||||||
|
|
||||||
Boolean columns require special treatment, since SQLite treats booleans the
|
Boolean columns require special treatment, since SQLite treats booleans the
|
||||||
|
|
|
@ -105,6 +105,68 @@ device ID), and the (now deactivated) access token.
|
||||||
|
|
||||||
If multiple modules implement this callback, Synapse runs them all in order.
|
If multiple modules implement this callback, Synapse runs them all in order.
|
||||||
|
|
||||||
|
### `get_username_for_registration`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.52.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def get_username_for_registration(
|
||||||
|
uia_results: Dict[str, Any],
|
||||||
|
params: Dict[str, Any],
|
||||||
|
) -> Optional[str]
|
||||||
|
```
|
||||||
|
|
||||||
|
Called when registering a new user. The module can return a username to set for the user
|
||||||
|
being registered by returning it as a string, or `None` if it doesn't wish to force a
|
||||||
|
username for this user. If a username is returned, it will be used as the local part of a
|
||||||
|
user's full Matrix ID (e.g. it's `alice` in `@alice:example.com`).
|
||||||
|
|
||||||
|
This callback is called once [User-Interactive Authentication](https://spec.matrix.org/latest/client-server-api/#user-interactive-authentication-api)
|
||||||
|
has been completed by the user. It is not called when registering a user via SSO. It is
|
||||||
|
passed two dictionaries, which include the information that the user has provided during
|
||||||
|
the registration process.
|
||||||
|
|
||||||
|
The first dictionary contains the results of the [User-Interactive Authentication](https://spec.matrix.org/latest/client-server-api/#user-interactive-authentication-api)
|
||||||
|
flow followed by the user. Its keys are the identifiers of every step involved in the flow,
|
||||||
|
associated with either a boolean value indicating whether the step was correctly completed,
|
||||||
|
or additional information (e.g. email address, phone number...). A list of most existing
|
||||||
|
identifiers can be found in the [Matrix specification](https://spec.matrix.org/v1.1/client-server-api/#authentication-types).
|
||||||
|
Here's an example featuring all currently supported keys:
|
||||||
|
|
||||||
|
```python
|
||||||
|
{
|
||||||
|
"m.login.dummy": True, # Dummy authentication
|
||||||
|
"m.login.terms": True, # User has accepted the terms of service for the homeserver
|
||||||
|
"m.login.recaptcha": True, # User has completed the recaptcha challenge
|
||||||
|
"m.login.email.identity": { # User has provided and verified an email address
|
||||||
|
"medium": "email",
|
||||||
|
"address": "alice@example.com",
|
||||||
|
"validated_at": 1642701357084,
|
||||||
|
},
|
||||||
|
"m.login.msisdn": { # User has provided and verified a phone number
|
||||||
|
"medium": "msisdn",
|
||||||
|
"address": "33123456789",
|
||||||
|
"validated_at": 1642701357084,
|
||||||
|
},
|
||||||
|
"org.matrix.msc3231.login.registration_token": "sometoken", # User has registered through the flow described in MSC3231
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The second dictionary contains the parameters provided by the user's client in the request
|
||||||
|
to `/_matrix/client/v3/register`. See the [Matrix specification](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3register)
|
||||||
|
for a complete list of these parameters.
|
||||||
|
|
||||||
|
If the module cannot, or does not wish to, generate a username for this user, it must
|
||||||
|
return `None`.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
|
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||||
|
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||||
|
any of the subsequent implementations of this callback. If every callback return `None`,
|
||||||
|
the username provided by the user is used, if any (otherwise one is automatically
|
||||||
|
generated).
|
||||||
|
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
The example module below implements authentication checkers for two different login types:
|
The example module below implements authentication checkers for two different login types:
|
||||||
|
|
|
@ -41,11 +41,11 @@
|
||||||
# documentation on how to configure or create custom modules for Synapse.
|
# documentation on how to configure or create custom modules for Synapse.
|
||||||
#
|
#
|
||||||
modules:
|
modules:
|
||||||
# - module: my_super_module.MySuperClass
|
#- module: my_super_module.MySuperClass
|
||||||
# config:
|
# config:
|
||||||
# do_thing: true
|
# do_thing: true
|
||||||
# - module: my_other_super_module.SomeClass
|
#- module: my_other_super_module.SomeClass
|
||||||
# config: {}
|
# config: {}
|
||||||
|
|
||||||
|
|
||||||
## Server ##
|
## Server ##
|
||||||
|
@ -471,6 +471,20 @@ limit_remote_rooms:
|
||||||
#
|
#
|
||||||
#allow_per_room_profiles: false
|
#allow_per_room_profiles: false
|
||||||
|
|
||||||
|
# The largest allowed file size for a user avatar. Defaults to no restriction.
|
||||||
|
#
|
||||||
|
# Note that user avatar changes will not work if this is set without
|
||||||
|
# using Synapse's media repository.
|
||||||
|
#
|
||||||
|
#max_avatar_size: 10M
|
||||||
|
|
||||||
|
# The MIME types allowed for user avatars. Defaults to no restriction.
|
||||||
|
#
|
||||||
|
# Note that user avatar changes will not work if this is set without
|
||||||
|
# using Synapse's media repository.
|
||||||
|
#
|
||||||
|
#allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"]
|
||||||
|
|
||||||
# How long to keep redacted events in unredacted form in the database. After
|
# How long to keep redacted events in unredacted form in the database. After
|
||||||
# this period redacted events get replaced with their redacted form in the DB.
|
# this period redacted events get replaced with their redacted form in the DB.
|
||||||
#
|
#
|
||||||
|
@ -1428,6 +1442,16 @@ account_threepid_delegates:
|
||||||
#
|
#
|
||||||
#auto_join_rooms_for_guests: false
|
#auto_join_rooms_for_guests: false
|
||||||
|
|
||||||
|
# Whether to inhibit errors raised when registering a new account if the user ID
|
||||||
|
# already exists. If turned on, that requests to /register/available will always
|
||||||
|
# show a user ID as available, and Synapse won't raise an error when starting
|
||||||
|
# a registration with a user ID that already exists. However, Synapse will still
|
||||||
|
# raise an error if the registration completes and the username conflicts.
|
||||||
|
#
|
||||||
|
# Defaults to false.
|
||||||
|
#
|
||||||
|
#inhibit_user_in_use_error: true
|
||||||
|
|
||||||
|
|
||||||
## Metrics ###
|
## Metrics ###
|
||||||
|
|
||||||
|
|
|
@ -194,7 +194,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.7 or later, up to Python 3.9.
|
- Python 3.7 or later, up to Python 3.10.
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
To install the Synapse homeserver run:
|
To install the Synapse homeserver run:
|
||||||
|
|
|
@ -85,6 +85,18 @@ process, for example:
|
||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Upgrading to v1.52.0
|
||||||
|
|
||||||
|
## Twisted security release
|
||||||
|
|
||||||
|
Note that [Twisted 22.1.0](https://github.com/twisted/twisted/releases/tag/twisted-22.1.0)
|
||||||
|
has recently been released, which fixes a [security issue](https://github.com/twisted/twisted/security/advisories/GHSA-92x2-jw7w-xvvx)
|
||||||
|
within the Twisted library. We do not believe Synapse is affected by this vulnerability,
|
||||||
|
though we advise server administrators who installed Synapse via pip to upgrade Twisted
|
||||||
|
with `pip install --upgrade Twisted` as a matter of good practice. The Docker image
|
||||||
|
`matrixdotorg/synapse` and the Debian packages from `packages.matrix.org` are using the
|
||||||
|
updated library.
|
||||||
|
|
||||||
# Upgrading to v1.51.0
|
# Upgrading to v1.51.0
|
||||||
|
|
||||||
## Deprecation of `webclient` listeners and non-HTTP(S) `web_client_location`
|
## Deprecation of `webclient` listeners and non-HTTP(S) `web_client_location`
|
||||||
|
|
|
@ -86,7 +86,7 @@ The following fields are returned in the JSON response body:
|
||||||
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
||||||
- `total` - integer - Total number of destinations.
|
- `total` - integer - Total number of destinations.
|
||||||
|
|
||||||
# Destination Details API
|
## Destination Details API
|
||||||
|
|
||||||
This API gets the retry timing info for a specific remote server.
|
This API gets the retry timing info for a specific remote server.
|
||||||
|
|
||||||
|
@ -108,7 +108,105 @@ A response body like the following is returned:
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
The following parameters should be set in the URL:
|
||||||
|
|
||||||
|
- `destination` - Name of the remote server.
|
||||||
|
|
||||||
**Response**
|
**Response**
|
||||||
|
|
||||||
The response fields are the same like in the `destinations` array in
|
The response fields are the same like in the `destinations` array in
|
||||||
[List of destinations](#list-of-destinations) response.
|
[List of destinations](#list-of-destinations) response.
|
||||||
|
|
||||||
|
## Destination rooms
|
||||||
|
|
||||||
|
This API gets the rooms that federate with a specific remote server.
|
||||||
|
|
||||||
|
The API is:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /_synapse/admin/v1/federation/destinations/<destination>/rooms
|
||||||
|
```
|
||||||
|
|
||||||
|
A response body like the following is returned:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"rooms":[
|
||||||
|
{
|
||||||
|
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
||||||
|
"stream_ordering": 8326
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||||
|
"stream_ordering": 93534
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"total": 2
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To paginate, check for `next_token` and if present, call the endpoint again
|
||||||
|
with `from` set to the value of `next_token`. This will return a new page.
|
||||||
|
|
||||||
|
If the endpoint does not return a `next_token` then there are no more destinations
|
||||||
|
to paginate through.
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
The following parameters should be set in the URL:
|
||||||
|
|
||||||
|
- `destination` - Name of the remote server.
|
||||||
|
|
||||||
|
The following query parameters are available:
|
||||||
|
|
||||||
|
- `from` - Offset in the returned list. Defaults to `0`.
|
||||||
|
- `limit` - Maximum amount of destinations to return. Defaults to `100`.
|
||||||
|
- `dir` - Direction of room order by `room_id`. Either `f` for forwards or `b` for
|
||||||
|
backwards. Defaults to `f`.
|
||||||
|
|
||||||
|
**Response**
|
||||||
|
|
||||||
|
The following fields are returned in the JSON response body:
|
||||||
|
|
||||||
|
- `rooms` - An array of objects, each containing information about a room.
|
||||||
|
Room objects contain the following fields:
|
||||||
|
- `room_id` - string - The ID of the room.
|
||||||
|
- `stream_ordering` - integer - The stream ordering of the most recent
|
||||||
|
successfully-sent [PDU](understanding_synapse_through_grafana_graphs.md#federation)
|
||||||
|
to this destination in this room.
|
||||||
|
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
||||||
|
- `total` - integer - Total number of destinations.
|
||||||
|
|
||||||
|
## Reset connection timeout
|
||||||
|
|
||||||
|
Synapse makes federation requests to other homeservers. If a federation request fails,
|
||||||
|
Synapse will mark the destination homeserver as offline, preventing any future requests
|
||||||
|
to that server for a "cooldown" period. This period grows over time if the server
|
||||||
|
continues to fail its responses
|
||||||
|
([exponential backoff](https://en.wikipedia.org/wiki/Exponential_backoff)).
|
||||||
|
|
||||||
|
Admins can cancel the cooldown period with this API.
|
||||||
|
|
||||||
|
This API resets the retry timing for a specific remote server and tries to connect to
|
||||||
|
the remote server again. It does not wait for the next `retry_interval`.
|
||||||
|
The connection must have previously run into an error and `retry_last_ts`
|
||||||
|
([Destination Details API](#destination-details-api)) must not be equal to `0`.
|
||||||
|
|
||||||
|
The connection attempt is carried out in the background and can take a while
|
||||||
|
even if the API already returns the http status 200.
|
||||||
|
|
||||||
|
The API is:
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /_synapse/admin/v1/federation/destinations/<destination>/reset_connection
|
||||||
|
|
||||||
|
{}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
The following parameters should be set in the URL:
|
||||||
|
|
||||||
|
- `destination` - Name of the remote server.
|
||||||
|
|
3
mypy.ini
3
mypy.ini
|
@ -77,9 +77,6 @@ exclude = (?x)
|
||||||
|tests/push/test_http.py
|
|tests/push/test_http.py
|
||||||
|tests/push/test_presentable_names.py
|
|tests/push/test_presentable_names.py
|
||||||
|tests/push/test_push_rule_evaluator.py
|
|tests/push/test_push_rule_evaluator.py
|
||||||
|tests/rest/admin/test_admin.py
|
|
||||||
|tests/rest/admin/test_user.py
|
|
||||||
|tests/rest/admin/test_username_available.py
|
|
||||||
|tests/rest/client/test_account.py
|
|tests/rest/client/test_account.py
|
||||||
|tests/rest/client/test_events.py
|
|tests/rest/client/test_events.py
|
||||||
|tests/rest/client/test_filter.py
|
|tests/rest/client/test_filter.py
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -150,7 +150,7 @@ setup(
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
long_description_content_type="text/x-rst",
|
long_description_content_type="text/x-rst",
|
||||||
python_requires="~=3.6",
|
python_requires="~=3.7",
|
||||||
entry_points={
|
entry_points={
|
||||||
"console_scripts": [
|
"console_scripts": [
|
||||||
"synapse_homeserver = synapse.app.homeserver:main",
|
"synapse_homeserver = synapse.app.homeserver:main",
|
||||||
|
|
|
@ -21,8 +21,8 @@ import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Check that we're not running on an unsupported Python version.
|
# Check that we're not running on an unsupported Python version.
|
||||||
if sys.version_info < (3, 6):
|
if sys.version_info < (3, 7):
|
||||||
print("Synapse requires Python 3.6 or above.")
|
print("Synapse requires Python 3.7 or above.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Twisted and canonicaljson will fail to import when this file is executed to
|
# Twisted and canonicaljson will fail to import when this file is executed to
|
||||||
|
@ -47,7 +47,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
__version__ = "1.51.0"
|
__version__ = "1.52.0"
|
||||||
|
|
||||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||||
# We import here so that we don't have to install a bunch of deps when
|
# We import here so that we don't have to install a bunch of deps when
|
||||||
|
|
|
@ -16,7 +16,6 @@ import atexit
|
||||||
import gc
|
import gc
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
|
@ -436,7 +435,8 @@ async def start(hs: "HomeServer") -> None:
|
||||||
# before we start the listeners.
|
# before we start the listeners.
|
||||||
module_api = hs.get_module_api()
|
module_api = hs.get_module_api()
|
||||||
for module, config in hs.config.modules.loaded_modules:
|
for module, config in hs.config.modules.loaded_modules:
|
||||||
module(config=config, api=module_api)
|
m = module(config=config, api=module_api)
|
||||||
|
logger.info("Loaded module %s", m)
|
||||||
|
|
||||||
load_legacy_spam_checkers(hs)
|
load_legacy_spam_checkers(hs)
|
||||||
load_legacy_third_party_event_rules(hs)
|
load_legacy_third_party_event_rules(hs)
|
||||||
|
@ -468,15 +468,13 @@ async def start(hs: "HomeServer") -> None:
|
||||||
# everything currently allocated are things that will be used for the
|
# everything currently allocated are things that will be used for the
|
||||||
# rest of time. Doing so means less work each GC (hopefully).
|
# rest of time. Doing so means less work each GC (hopefully).
|
||||||
#
|
#
|
||||||
# This only works on Python 3.7
|
# PyPy does not (yet?) implement gc.freeze()
|
||||||
if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7):
|
if hasattr(gc, "freeze"):
|
||||||
gc.collect()
|
gc.collect()
|
||||||
gc.freeze()
|
gc.freeze()
|
||||||
|
|
||||||
# Speed up shutdowns by freezing all allocated objects. This moves everything
|
# Speed up shutdowns by freezing all allocated objects. This moves everything
|
||||||
# into the permanent generation and excludes them from the final GC.
|
# into the permanent generation and excludes them from the final GC.
|
||||||
# Unfortunately only works on Python 3.7
|
|
||||||
if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7):
|
|
||||||
atexit.register(gc.freeze)
|
atexit.register(gc.freeze)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -24,8 +24,6 @@ class ExperimentalConfig(Config):
|
||||||
def read_config(self, config: JsonDict, **kwargs):
|
def read_config(self, config: JsonDict, **kwargs):
|
||||||
experimental = config.get("experimental_features") or {}
|
experimental = config.get("experimental_features") or {}
|
||||||
|
|
||||||
# Whether to enable experimental MSC1849 (aka relations) support
|
|
||||||
self.msc1849_enabled = config.get("experimental_msc1849_support_enabled", True)
|
|
||||||
# MSC3440 (thread relation)
|
# MSC3440 (thread relation)
|
||||||
self.msc3440_enabled: bool = experimental.get("msc3440_enabled", False)
|
self.msc3440_enabled: bool = experimental.get("msc3440_enabled", False)
|
||||||
|
|
||||||
|
|
|
@ -41,9 +41,9 @@ class ModulesConfig(Config):
|
||||||
# documentation on how to configure or create custom modules for Synapse.
|
# documentation on how to configure or create custom modules for Synapse.
|
||||||
#
|
#
|
||||||
modules:
|
modules:
|
||||||
# - module: my_super_module.MySuperClass
|
#- module: my_super_module.MySuperClass
|
||||||
# config:
|
# config:
|
||||||
# do_thing: true
|
# do_thing: true
|
||||||
# - module: my_other_super_module.SomeClass
|
#- module: my_other_super_module.SomeClass
|
||||||
# config: {}
|
# config: {}
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -190,6 +190,8 @@ class RegistrationConfig(Config):
|
||||||
# The success template used during fallback auth.
|
# The success template used during fallback auth.
|
||||||
self.fallback_success_template = self.read_template("auth_success.html")
|
self.fallback_success_template = self.read_template("auth_success.html")
|
||||||
|
|
||||||
|
self.inhibit_user_in_use_error = config.get("inhibit_user_in_use_error", False)
|
||||||
|
|
||||||
def generate_config_section(self, generate_secrets=False, **kwargs):
|
def generate_config_section(self, generate_secrets=False, **kwargs):
|
||||||
if generate_secrets:
|
if generate_secrets:
|
||||||
registration_shared_secret = 'registration_shared_secret: "%s"' % (
|
registration_shared_secret = 'registration_shared_secret: "%s"' % (
|
||||||
|
@ -446,6 +448,16 @@ class RegistrationConfig(Config):
|
||||||
# Defaults to true.
|
# Defaults to true.
|
||||||
#
|
#
|
||||||
#auto_join_rooms_for_guests: false
|
#auto_join_rooms_for_guests: false
|
||||||
|
|
||||||
|
# Whether to inhibit errors raised when registering a new account if the user ID
|
||||||
|
# already exists. If turned on, that requests to /register/available will always
|
||||||
|
# show a user ID as available, and Synapse won't raise an error when starting
|
||||||
|
# a registration with a user ID that already exists. However, Synapse will still
|
||||||
|
# raise an error if the registration completes and the username conflicts.
|
||||||
|
#
|
||||||
|
# Defaults to false.
|
||||||
|
#
|
||||||
|
#inhibit_user_in_use_error: true
|
||||||
"""
|
"""
|
||||||
% locals()
|
% locals()
|
||||||
)
|
)
|
||||||
|
|
|
@ -489,6 +489,19 @@ class ServerConfig(Config):
|
||||||
# events with profile information that differ from the target's global profile.
|
# events with profile information that differ from the target's global profile.
|
||||||
self.allow_per_room_profiles = config.get("allow_per_room_profiles", True)
|
self.allow_per_room_profiles = config.get("allow_per_room_profiles", True)
|
||||||
|
|
||||||
|
# The maximum size an avatar can have, in bytes.
|
||||||
|
self.max_avatar_size = config.get("max_avatar_size")
|
||||||
|
if self.max_avatar_size is not None:
|
||||||
|
self.max_avatar_size = self.parse_size(self.max_avatar_size)
|
||||||
|
|
||||||
|
# The MIME types allowed for an avatar.
|
||||||
|
self.allowed_avatar_mimetypes = config.get("allowed_avatar_mimetypes")
|
||||||
|
if self.allowed_avatar_mimetypes and not isinstance(
|
||||||
|
self.allowed_avatar_mimetypes,
|
||||||
|
list,
|
||||||
|
):
|
||||||
|
raise ConfigError("allowed_avatar_mimetypes must be a list")
|
||||||
|
|
||||||
self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])]
|
self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])]
|
||||||
|
|
||||||
# no_tls is not really supported any more, but let's grandfather it in
|
# no_tls is not really supported any more, but let's grandfather it in
|
||||||
|
@ -1168,6 +1181,20 @@ class ServerConfig(Config):
|
||||||
#
|
#
|
||||||
#allow_per_room_profiles: false
|
#allow_per_room_profiles: false
|
||||||
|
|
||||||
|
# The largest allowed file size for a user avatar. Defaults to no restriction.
|
||||||
|
#
|
||||||
|
# Note that user avatar changes will not work if this is set without
|
||||||
|
# using Synapse's media repository.
|
||||||
|
#
|
||||||
|
#max_avatar_size: 10M
|
||||||
|
|
||||||
|
# The MIME types allowed for user avatars. Defaults to no restriction.
|
||||||
|
#
|
||||||
|
# Note that user avatar changes will not work if this is set without
|
||||||
|
# using Synapse's media repository.
|
||||||
|
#
|
||||||
|
#allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"]
|
||||||
|
|
||||||
# How long to keep redacted events in unredacted form in the database. After
|
# How long to keep redacted events in unredacted form in the database. After
|
||||||
# this period redacted events get replaced with their redacted form in the DB.
|
# this period redacted events get replaced with their redacted form in the DB.
|
||||||
#
|
#
|
||||||
|
|
|
@ -315,10 +315,11 @@ class EventBase(metaclass=abc.ABCMeta):
|
||||||
redacts: DefaultDictProperty[Optional[str]] = DefaultDictProperty("redacts", None)
|
redacts: DefaultDictProperty[Optional[str]] = DefaultDictProperty("redacts", None)
|
||||||
room_id: DictProperty[str] = DictProperty("room_id")
|
room_id: DictProperty[str] = DictProperty("room_id")
|
||||||
sender: DictProperty[str] = DictProperty("sender")
|
sender: DictProperty[str] = DictProperty("sender")
|
||||||
# TODO state_key should be Optional[str], this is generally asserted in Synapse
|
# TODO state_key should be Optional[str]. This is generally asserted in Synapse
|
||||||
# by calling is_state() first (which ensures this), but it is hard (not possible?)
|
# by calling is_state() first (which ensures it is not None), but it is hard (not possible?)
|
||||||
# to properly annotate that calling is_state() asserts that state_key exists
|
# to properly annotate that calling is_state() asserts that state_key exists
|
||||||
# and is non-None.
|
# and is non-None. It would be better to replace such direct references with
|
||||||
|
# get_state_key() (and a check for None).
|
||||||
state_key: DictProperty[str] = DictProperty("state_key")
|
state_key: DictProperty[str] = DictProperty("state_key")
|
||||||
type: DictProperty[str] = DictProperty("type")
|
type: DictProperty[str] = DictProperty("type")
|
||||||
user_id: DictProperty[str] = DictProperty("sender")
|
user_id: DictProperty[str] = DictProperty("sender")
|
||||||
|
@ -332,7 +333,11 @@ class EventBase(metaclass=abc.ABCMeta):
|
||||||
return self.content["membership"]
|
return self.content["membership"]
|
||||||
|
|
||||||
def is_state(self) -> bool:
|
def is_state(self) -> bool:
|
||||||
return hasattr(self, "state_key") and self.state_key is not None
|
return self.get_state_key() is not None
|
||||||
|
|
||||||
|
def get_state_key(self) -> Optional[str]:
|
||||||
|
"""Get the state key of this event, or None if it's not a state event"""
|
||||||
|
return self._dict.get("state_key")
|
||||||
|
|
||||||
def get_dict(self) -> JsonDict:
|
def get_dict(self) -> JsonDict:
|
||||||
d = dict(self._dict)
|
d = dict(self._dict)
|
||||||
|
|
|
@ -163,7 +163,7 @@ class EventContext:
|
||||||
return {
|
return {
|
||||||
"prev_state_id": prev_state_id,
|
"prev_state_id": prev_state_id,
|
||||||
"event_type": event.type,
|
"event_type": event.type,
|
||||||
"event_state_key": event.state_key if event.is_state() else None,
|
"event_state_key": event.get_state_key(),
|
||||||
"state_group": self._state_group,
|
"state_group": self._state_group,
|
||||||
"state_group_before_event": self.state_group_before_event,
|
"state_group_before_event": self.state_group_before_event,
|
||||||
"rejected": self.rejected,
|
"rejected": self.rejected,
|
||||||
|
|
|
@ -14,7 +14,17 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import re
|
import re
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Union
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
Mapping,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from frozendict import frozendict
|
from frozendict import frozendict
|
||||||
|
|
||||||
|
@ -26,6 +36,10 @@ from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
from . import EventBase
|
from . import EventBase
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.storage.databases.main.relations import BundledAggregations
|
||||||
|
|
||||||
|
|
||||||
# Split strings on "." but not "\." This uses a negative lookbehind assertion for '\'
|
# Split strings on "." but not "\." This uses a negative lookbehind assertion for '\'
|
||||||
# (?<!stuff) matches if the current position in the string is not preceded
|
# (?<!stuff) matches if the current position in the string is not preceded
|
||||||
# by a match for 'stuff'.
|
# by a match for 'stuff'.
|
||||||
|
@ -376,7 +390,7 @@ class EventClientSerializer:
|
||||||
event: Union[JsonDict, EventBase],
|
event: Union[JsonDict, EventBase],
|
||||||
time_now: int,
|
time_now: int,
|
||||||
*,
|
*,
|
||||||
bundle_aggregations: Optional[Dict[str, JsonDict]] = None,
|
bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> JsonDict:
|
) -> JsonDict:
|
||||||
"""Serializes a single event.
|
"""Serializes a single event.
|
||||||
|
@ -415,7 +429,7 @@ class EventClientSerializer:
|
||||||
self,
|
self,
|
||||||
event: EventBase,
|
event: EventBase,
|
||||||
time_now: int,
|
time_now: int,
|
||||||
aggregations: JsonDict,
|
aggregations: "BundledAggregations",
|
||||||
serialized_event: JsonDict,
|
serialized_event: JsonDict,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
|
"""Potentially injects bundled aggregations into the unsigned portion of the serialized event.
|
||||||
|
@ -427,13 +441,18 @@ class EventClientSerializer:
|
||||||
serialized_event: The serialized event which may be modified.
|
serialized_event: The serialized event which may be modified.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Make a copy in-case the object is cached.
|
serialized_aggregations = {}
|
||||||
aggregations = aggregations.copy()
|
|
||||||
|
|
||||||
if RelationTypes.REPLACE in aggregations:
|
if aggregations.annotations:
|
||||||
|
serialized_aggregations[RelationTypes.ANNOTATION] = aggregations.annotations
|
||||||
|
|
||||||
|
if aggregations.references:
|
||||||
|
serialized_aggregations[RelationTypes.REFERENCE] = aggregations.references
|
||||||
|
|
||||||
|
if aggregations.replace:
|
||||||
# If there is an edit replace the content, preserving existing
|
# If there is an edit replace the content, preserving existing
|
||||||
# relations.
|
# relations.
|
||||||
edit = aggregations[RelationTypes.REPLACE]
|
edit = aggregations.replace
|
||||||
|
|
||||||
# Ensure we take copies of the edit content, otherwise we risk modifying
|
# Ensure we take copies of the edit content, otherwise we risk modifying
|
||||||
# the original event.
|
# the original event.
|
||||||
|
@ -451,24 +470,28 @@ class EventClientSerializer:
|
||||||
else:
|
else:
|
||||||
serialized_event["content"].pop("m.relates_to", None)
|
serialized_event["content"].pop("m.relates_to", None)
|
||||||
|
|
||||||
aggregations[RelationTypes.REPLACE] = {
|
serialized_aggregations[RelationTypes.REPLACE] = {
|
||||||
"event_id": edit.event_id,
|
"event_id": edit.event_id,
|
||||||
"origin_server_ts": edit.origin_server_ts,
|
"origin_server_ts": edit.origin_server_ts,
|
||||||
"sender": edit.sender,
|
"sender": edit.sender,
|
||||||
}
|
}
|
||||||
|
|
||||||
# If this event is the start of a thread, include a summary of the replies.
|
# If this event is the start of a thread, include a summary of the replies.
|
||||||
if RelationTypes.THREAD in aggregations:
|
if aggregations.thread:
|
||||||
# Serialize the latest thread event.
|
serialized_aggregations[RelationTypes.THREAD] = {
|
||||||
latest_thread_event = aggregations[RelationTypes.THREAD]["latest_event"]
|
# Don't bundle aggregations as this could recurse forever.
|
||||||
|
"latest_event": self.serialize_event(
|
||||||
# Don't bundle aggregations as this could recurse forever.
|
aggregations.thread.latest_event, time_now, bundle_aggregations=None
|
||||||
aggregations[RelationTypes.THREAD]["latest_event"] = self.serialize_event(
|
),
|
||||||
latest_thread_event, time_now, bundle_aggregations=None
|
"count": aggregations.thread.count,
|
||||||
)
|
"current_user_participated": aggregations.thread.current_user_participated,
|
||||||
|
}
|
||||||
|
|
||||||
# Include the bundled aggregations in the event.
|
# Include the bundled aggregations in the event.
|
||||||
serialized_event["unsigned"].setdefault("m.relations", {}).update(aggregations)
|
if serialized_aggregations:
|
||||||
|
serialized_event["unsigned"].setdefault("m.relations", {}).update(
|
||||||
|
serialized_aggregations
|
||||||
|
)
|
||||||
|
|
||||||
def serialize_events(
|
def serialize_events(
|
||||||
self, events: Iterable[Union[JsonDict, EventBase]], time_now: int, **kwargs: Any
|
self, events: Iterable[Union[JsonDict, EventBase]], time_now: int, **kwargs: Any
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import collections.abc
|
import collections.abc
|
||||||
from typing import Iterable, Union
|
from typing import Iterable, Type, Union
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
|
@ -246,7 +246,7 @@ POWER_LEVELS_SCHEMA = {
|
||||||
|
|
||||||
# This could return something newer than Draft 7, but that's the current "latest"
|
# This could return something newer than Draft 7, but that's the current "latest"
|
||||||
# validator.
|
# validator.
|
||||||
def _create_power_level_validator() -> jsonschema.Draft7Validator:
|
def _create_power_level_validator() -> Type[jsonschema.Draft7Validator]:
|
||||||
validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA)
|
validator = jsonschema.validators.validator_for(POWER_LEVELS_SCHEMA)
|
||||||
|
|
||||||
# by default jsonschema does not consider a frozendict to be an object so
|
# by default jsonschema does not consider a frozendict to be an object so
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, Iterable, List, Optional, Tuple, Type
|
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple, Type
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
|
|
||||||
|
@ -36,17 +36,19 @@ from synapse.http.servlet import (
|
||||||
parse_integer_from_args,
|
parse_integer_from_args,
|
||||||
parse_string_from_args,
|
parse_string_from_args,
|
||||||
)
|
)
|
||||||
from synapse.server import HomeServer
|
|
||||||
from synapse.types import JsonDict, ThirdPartyInstanceID
|
from synapse.types import JsonDict, ThirdPartyInstanceID
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class TransportLayerServer(JsonResource):
|
class TransportLayerServer(JsonResource):
|
||||||
"""Handles incoming federation HTTP requests"""
|
"""Handles incoming federation HTTP requests"""
|
||||||
|
|
||||||
def __init__(self, hs: HomeServer, servlet_groups: Optional[List[str]] = None):
|
def __init__(self, hs: "HomeServer", servlet_groups: Optional[List[str]] = None):
|
||||||
"""Initialize the TransportLayerServer
|
"""Initialize the TransportLayerServer
|
||||||
|
|
||||||
Will by default register all servlets. For custom behaviour, pass in
|
Will by default register all servlets. For custom behaviour, pass in
|
||||||
|
@ -113,7 +115,7 @@ class PublicRoomList(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
@ -203,7 +205,7 @@ class FederationGroupsRenewAttestaionServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
@ -251,7 +253,7 @@ class OpenIdUserInfo(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
@ -297,7 +299,7 @@ DEFAULT_SERVLET_GROUPS: Dict[str, Iterable[Type[BaseFederationServlet]]] = {
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(
|
def register_servlets(
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
resource: HttpServer,
|
resource: HttpServer,
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from typing import Any, Awaitable, Callable, Optional, Tuple, cast
|
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple, cast
|
||||||
|
|
||||||
from synapse.api.errors import Codes, FederationDeniedError, SynapseError
|
from synapse.api.errors import Codes, FederationDeniedError, SynapseError
|
||||||
from synapse.api.urls import FEDERATION_V1_PREFIX
|
from synapse.api.urls import FEDERATION_V1_PREFIX
|
||||||
|
@ -29,11 +29,13 @@ from synapse.logging.opentracing import (
|
||||||
start_active_span_follows_from,
|
start_active_span_follows_from,
|
||||||
whitelisted_homeserver,
|
whitelisted_homeserver,
|
||||||
)
|
)
|
||||||
from synapse.server import HomeServer
|
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
from synapse.util.stringutils import parse_and_validate_server_name
|
from synapse.util.stringutils import parse_and_validate_server_name
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,7 +48,7 @@ class NoAuthenticationError(AuthenticationError):
|
||||||
|
|
||||||
|
|
||||||
class Authenticator:
|
class Authenticator:
|
||||||
def __init__(self, hs: HomeServer):
|
def __init__(self, hs: "HomeServer"):
|
||||||
self._clock = hs.get_clock()
|
self._clock = hs.get_clock()
|
||||||
self.keyring = hs.get_keyring()
|
self.keyring = hs.get_keyring()
|
||||||
self.server_name = hs.hostname
|
self.server_name = hs.hostname
|
||||||
|
@ -114,11 +116,11 @@ class Authenticator:
|
||||||
# alive
|
# alive
|
||||||
retry_timings = await self.store.get_destination_retry_timings(origin)
|
retry_timings = await self.store.get_destination_retry_timings(origin)
|
||||||
if retry_timings and retry_timings.retry_last_ts:
|
if retry_timings and retry_timings.retry_last_ts:
|
||||||
run_in_background(self._reset_retry_timings, origin)
|
run_in_background(self.reset_retry_timings, origin)
|
||||||
|
|
||||||
return origin
|
return origin
|
||||||
|
|
||||||
async def _reset_retry_timings(self, origin: str) -> None:
|
async def reset_retry_timings(self, origin: str) -> None:
|
||||||
try:
|
try:
|
||||||
logger.info("Marking origin %r as up", origin)
|
logger.info("Marking origin %r as up", origin)
|
||||||
await self.store.set_destination_retry_timings(origin, None, 0, 0)
|
await self.store.set_destination_retry_timings(origin, None, 0, 0)
|
||||||
|
@ -227,7 +229,7 @@ class BaseFederationServlet:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
|
|
@ -12,7 +12,17 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
Mapping,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
|
|
||||||
|
@ -30,11 +40,13 @@ from synapse.http.servlet import (
|
||||||
parse_string_from_args,
|
parse_string_from_args,
|
||||||
parse_strings_from_args,
|
parse_strings_from_args,
|
||||||
)
|
)
|
||||||
from synapse.server import HomeServer
|
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
issue_8631_logger = logging.getLogger("synapse.8631_debug")
|
issue_8631_logger = logging.getLogger("synapse.8631_debug")
|
||||||
|
|
||||||
|
@ -47,7 +59,7 @@ class BaseFederationServerServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
@ -596,7 +608,7 @@ class FederationSpaceSummaryServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
@ -670,7 +682,7 @@ class FederationRoomHierarchyServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
@ -706,7 +718,7 @@ class RoomComplexityServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from typing import Dict, List, Tuple, Type
|
from typing import TYPE_CHECKING, Dict, List, Tuple, Type
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.federation.transport.server._base import (
|
from synapse.federation.transport.server._base import (
|
||||||
|
@ -19,10 +19,12 @@ from synapse.federation.transport.server._base import (
|
||||||
BaseFederationServlet,
|
BaseFederationServlet,
|
||||||
)
|
)
|
||||||
from synapse.handlers.groups_local import GroupsLocalHandler
|
from synapse.handlers.groups_local import GroupsLocalHandler
|
||||||
from synapse.server import HomeServer
|
|
||||||
from synapse.types import JsonDict, get_domain_from_id
|
from synapse.types import JsonDict, get_domain_from_id
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
|
||||||
class BaseGroupsLocalServlet(BaseFederationServlet):
|
class BaseGroupsLocalServlet(BaseFederationServlet):
|
||||||
"""Abstract base class for federation servlet classes which provides a groups local handler.
|
"""Abstract base class for federation servlet classes which provides a groups local handler.
|
||||||
|
@ -32,7 +34,7 @@ class BaseGroupsLocalServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from typing import Dict, List, Tuple, Type
|
from typing import TYPE_CHECKING, Dict, List, Tuple, Type
|
||||||
|
|
||||||
from typing_extensions import Literal
|
from typing_extensions import Literal
|
||||||
|
|
||||||
|
@ -22,10 +22,12 @@ from synapse.federation.transport.server._base import (
|
||||||
BaseFederationServlet,
|
BaseFederationServlet,
|
||||||
)
|
)
|
||||||
from synapse.http.servlet import parse_string_from_args
|
from synapse.http.servlet import parse_string_from_args
|
||||||
from synapse.server import HomeServer
|
|
||||||
from synapse.types import JsonDict, get_domain_from_id
|
from synapse.types import JsonDict, get_domain_from_id
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
|
||||||
class BaseGroupsServerServlet(BaseFederationServlet):
|
class BaseGroupsServerServlet(BaseFederationServlet):
|
||||||
"""Abstract base class for federation servlet classes which provides a groups server handler.
|
"""Abstract base class for federation servlet classes which provides a groups server handler.
|
||||||
|
@ -35,7 +37,7 @@ class BaseGroupsServerServlet(BaseFederationServlet):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hs: HomeServer,
|
hs: "HomeServer",
|
||||||
authenticator: Authenticator,
|
authenticator: Authenticator,
|
||||||
ratelimiter: FederationRateLimiter,
|
ratelimiter: FederationRateLimiter,
|
||||||
server_name: str,
|
server_name: str,
|
||||||
|
|
|
@ -2060,6 +2060,10 @@ CHECK_AUTH_CALLBACK = Callable[
|
||||||
Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]
|
Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]
|
||||||
],
|
],
|
||||||
]
|
]
|
||||||
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK = Callable[
|
||||||
|
[JsonDict, JsonDict],
|
||||||
|
Awaitable[Optional[str]],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class PasswordAuthProvider:
|
class PasswordAuthProvider:
|
||||||
|
@ -2072,6 +2076,9 @@ class PasswordAuthProvider:
|
||||||
# lists of callbacks
|
# lists of callbacks
|
||||||
self.check_3pid_auth_callbacks: List[CHECK_3PID_AUTH_CALLBACK] = []
|
self.check_3pid_auth_callbacks: List[CHECK_3PID_AUTH_CALLBACK] = []
|
||||||
self.on_logged_out_callbacks: List[ON_LOGGED_OUT_CALLBACK] = []
|
self.on_logged_out_callbacks: List[ON_LOGGED_OUT_CALLBACK] = []
|
||||||
|
self.get_username_for_registration_callbacks: List[
|
||||||
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK
|
||||||
|
] = []
|
||||||
|
|
||||||
# Mapping from login type to login parameters
|
# Mapping from login type to login parameters
|
||||||
self._supported_login_types: Dict[str, Iterable[str]] = {}
|
self._supported_login_types: Dict[str, Iterable[str]] = {}
|
||||||
|
@ -2086,6 +2093,9 @@ class PasswordAuthProvider:
|
||||||
auth_checkers: Optional[
|
auth_checkers: Optional[
|
||||||
Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK]
|
Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK]
|
||||||
] = None,
|
] = None,
|
||||||
|
get_username_for_registration: Optional[
|
||||||
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK
|
||||||
|
] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
# Register check_3pid_auth callback
|
# Register check_3pid_auth callback
|
||||||
if check_3pid_auth is not None:
|
if check_3pid_auth is not None:
|
||||||
|
@ -2130,6 +2140,11 @@ class PasswordAuthProvider:
|
||||||
# Add the new method to the list of auth_checker_callbacks for this login type
|
# Add the new method to the list of auth_checker_callbacks for this login type
|
||||||
self.auth_checker_callbacks.setdefault(login_type, []).append(callback)
|
self.auth_checker_callbacks.setdefault(login_type, []).append(callback)
|
||||||
|
|
||||||
|
if get_username_for_registration is not None:
|
||||||
|
self.get_username_for_registration_callbacks.append(
|
||||||
|
get_username_for_registration,
|
||||||
|
)
|
||||||
|
|
||||||
def get_supported_login_types(self) -> Mapping[str, Iterable[str]]:
|
def get_supported_login_types(self) -> Mapping[str, Iterable[str]]:
|
||||||
"""Get the login types supported by this password provider
|
"""Get the login types supported by this password provider
|
||||||
|
|
||||||
|
@ -2285,3 +2300,46 @@ class PasswordAuthProvider:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
async def get_username_for_registration(
|
||||||
|
self,
|
||||||
|
uia_results: JsonDict,
|
||||||
|
params: JsonDict,
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Defines the username to use when registering the user, using the credentials
|
||||||
|
and parameters provided during the UIA flow.
|
||||||
|
|
||||||
|
Stops at the first callback that returns a string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
uia_results: The credentials provided during the UIA flow.
|
||||||
|
params: The parameters provided by the registration request.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The localpart to use when registering this user, or None if no module
|
||||||
|
returned a localpart.
|
||||||
|
"""
|
||||||
|
for callback in self.get_username_for_registration_callbacks:
|
||||||
|
try:
|
||||||
|
res = await callback(uia_results, params)
|
||||||
|
|
||||||
|
if isinstance(res, str):
|
||||||
|
return res
|
||||||
|
elif res is not None:
|
||||||
|
# mypy complains that this line is unreachable because it assumes the
|
||||||
|
# data returned by the module fits the expected type. We just want
|
||||||
|
# to make sure this is the case.
|
||||||
|
logger.warning( # type: ignore[unreachable]
|
||||||
|
"Ignoring non-string value returned by"
|
||||||
|
" get_username_for_registration callback %s: %s",
|
||||||
|
callback,
|
||||||
|
res,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
"Module raised an exception in get_username_for_registration: %s",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
raise SynapseError(code=500, msg="Internal Server Error")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
|
@ -157,6 +157,9 @@ class DeactivateAccountHandler:
|
||||||
# Mark the user as deactivated.
|
# Mark the user as deactivated.
|
||||||
await self.store.set_user_deactivated_status(user_id, True)
|
await self.store.set_user_deactivated_status(user_id, True)
|
||||||
|
|
||||||
|
# Remove account data (including ignored users and push rules).
|
||||||
|
await self.store.purge_account_data_for_user(user_id)
|
||||||
|
|
||||||
return identity_server_supports_unbinding
|
return identity_server_supports_unbinding
|
||||||
|
|
||||||
async def _reject_pending_invites_for_user(self, user_id: str) -> None:
|
async def _reject_pending_invites_for_user(self, user_id: str) -> None:
|
||||||
|
|
|
@ -31,6 +31,8 @@ from synapse.types import (
|
||||||
create_requester,
|
create_requester,
|
||||||
get_domain_from_id,
|
get_domain_from_id,
|
||||||
)
|
)
|
||||||
|
from synapse.util.caches.descriptors import cached
|
||||||
|
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
|
@ -64,6 +66,11 @@ class ProfileHandler:
|
||||||
self.user_directory_handler = hs.get_user_directory_handler()
|
self.user_directory_handler = hs.get_user_directory_handler()
|
||||||
self.request_ratelimiter = hs.get_request_ratelimiter()
|
self.request_ratelimiter = hs.get_request_ratelimiter()
|
||||||
|
|
||||||
|
self.max_avatar_size = hs.config.server.max_avatar_size
|
||||||
|
self.allowed_avatar_mimetypes = hs.config.server.allowed_avatar_mimetypes
|
||||||
|
|
||||||
|
self.server_name = hs.config.server.server_name
|
||||||
|
|
||||||
if hs.config.worker.run_background_tasks:
|
if hs.config.worker.run_background_tasks:
|
||||||
self.clock.looping_call(
|
self.clock.looping_call(
|
||||||
self._update_remote_profile_cache, self.PROFILE_UPDATE_MS
|
self._update_remote_profile_cache, self.PROFILE_UPDATE_MS
|
||||||
|
@ -286,6 +293,9 @@ class ProfileHandler:
|
||||||
400, "Avatar URL is too long (max %i)" % (MAX_AVATAR_URL_LEN,)
|
400, "Avatar URL is too long (max %i)" % (MAX_AVATAR_URL_LEN,)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not await self.check_avatar_size_and_mime_type(new_avatar_url):
|
||||||
|
raise SynapseError(403, "This avatar is not allowed", Codes.FORBIDDEN)
|
||||||
|
|
||||||
avatar_url_to_set: Optional[str] = new_avatar_url
|
avatar_url_to_set: Optional[str] = new_avatar_url
|
||||||
if new_avatar_url == "":
|
if new_avatar_url == "":
|
||||||
avatar_url_to_set = None
|
avatar_url_to_set = None
|
||||||
|
@ -307,6 +317,63 @@ class ProfileHandler:
|
||||||
|
|
||||||
await self._update_join_states(requester, target_user)
|
await self._update_join_states(requester, target_user)
|
||||||
|
|
||||||
|
@cached()
|
||||||
|
async def check_avatar_size_and_mime_type(self, mxc: str) -> bool:
|
||||||
|
"""Check that the size and content type of the avatar at the given MXC URI are
|
||||||
|
within the configured limits.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mxc: The MXC URI at which the avatar can be found.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A boolean indicating whether the file can be allowed to be set as an avatar.
|
||||||
|
"""
|
||||||
|
if not self.max_avatar_size and not self.allowed_avatar_mimetypes:
|
||||||
|
return True
|
||||||
|
|
||||||
|
server_name, _, media_id = parse_and_validate_mxc_uri(mxc)
|
||||||
|
|
||||||
|
if server_name == self.server_name:
|
||||||
|
media_info = await self.store.get_local_media(media_id)
|
||||||
|
else:
|
||||||
|
media_info = await self.store.get_cached_remote_media(server_name, media_id)
|
||||||
|
|
||||||
|
if media_info is None:
|
||||||
|
# Both configuration options need to access the file's metadata, and
|
||||||
|
# retrieving remote avatars just for this becomes a bit of a faff, especially
|
||||||
|
# if e.g. the file is too big. It's also generally safe to assume most files
|
||||||
|
# used as avatar are uploaded locally, or if the upload didn't happen as part
|
||||||
|
# of a PUT request on /avatar_url that the file was at least previewed by the
|
||||||
|
# user locally (and therefore downloaded to the remote media cache).
|
||||||
|
logger.warning("Forbidding avatar change to %s: avatar not on server", mxc)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.max_avatar_size:
|
||||||
|
# Ensure avatar does not exceed max allowed avatar size
|
||||||
|
if media_info["media_length"] > self.max_avatar_size:
|
||||||
|
logger.warning(
|
||||||
|
"Forbidding avatar change to %s: %d bytes is above the allowed size "
|
||||||
|
"limit",
|
||||||
|
mxc,
|
||||||
|
media_info["media_length"],
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.allowed_avatar_mimetypes:
|
||||||
|
# Ensure the avatar's file type is allowed
|
||||||
|
if (
|
||||||
|
self.allowed_avatar_mimetypes
|
||||||
|
and media_info["media_type"] not in self.allowed_avatar_mimetypes
|
||||||
|
):
|
||||||
|
logger.warning(
|
||||||
|
"Forbidding avatar change to %s: mimetype %s not allowed",
|
||||||
|
mxc,
|
||||||
|
media_info["media_type"],
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
async def on_profile_query(self, args: JsonDict) -> JsonDict:
|
async def on_profile_query(self, args: JsonDict) -> JsonDict:
|
||||||
"""Handles federation profile query requests."""
|
"""Handles federation profile query requests."""
|
||||||
|
|
||||||
|
|
|
@ -132,6 +132,7 @@ class RegistrationHandler:
|
||||||
localpart: str,
|
localpart: str,
|
||||||
guest_access_token: Optional[str] = None,
|
guest_access_token: Optional[str] = None,
|
||||||
assigned_user_id: Optional[str] = None,
|
assigned_user_id: Optional[str] = None,
|
||||||
|
inhibit_user_in_use_error: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if types.contains_invalid_mxid_characters(localpart):
|
if types.contains_invalid_mxid_characters(localpart):
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
|
@ -171,21 +172,22 @@ class RegistrationHandler:
|
||||||
|
|
||||||
users = await self.store.get_users_by_id_case_insensitive(user_id)
|
users = await self.store.get_users_by_id_case_insensitive(user_id)
|
||||||
if users:
|
if users:
|
||||||
if not guest_access_token:
|
if not inhibit_user_in_use_error and not guest_access_token:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
400, "User ID already taken.", errcode=Codes.USER_IN_USE
|
400, "User ID already taken.", errcode=Codes.USER_IN_USE
|
||||||
)
|
)
|
||||||
user_data = await self.auth.get_user_by_access_token(guest_access_token)
|
if guest_access_token:
|
||||||
if (
|
user_data = await self.auth.get_user_by_access_token(guest_access_token)
|
||||||
not user_data.is_guest
|
if (
|
||||||
or UserID.from_string(user_data.user_id).localpart != localpart
|
not user_data.is_guest
|
||||||
):
|
or UserID.from_string(user_data.user_id).localpart != localpart
|
||||||
raise AuthError(
|
):
|
||||||
403,
|
raise AuthError(
|
||||||
"Cannot register taken user ID without valid guest "
|
403,
|
||||||
"credentials for that user.",
|
"Cannot register taken user ID without valid guest "
|
||||||
errcode=Codes.FORBIDDEN,
|
"credentials for that user.",
|
||||||
)
|
errcode=Codes.FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
if guest_access_token is None:
|
if guest_access_token is None:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -30,6 +30,7 @@ from typing import (
|
||||||
Tuple,
|
Tuple,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import attr
|
||||||
from typing_extensions import TypedDict
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
from synapse.api.constants import (
|
from synapse.api.constants import (
|
||||||
|
@ -60,6 +61,7 @@ from synapse.events.utils import copy_power_levels_contents
|
||||||
from synapse.federation.federation_client import InvalidResponseError
|
from synapse.federation.federation_client import InvalidResponseError
|
||||||
from synapse.handlers.federation import get_domains_from_state
|
from synapse.handlers.federation import get_domains_from_state
|
||||||
from synapse.rest.admin._base import assert_user_is_admin
|
from synapse.rest.admin._base import assert_user_is_admin
|
||||||
|
from synapse.storage.databases.main.relations import BundledAggregations
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
from synapse.streams import EventSource
|
from synapse.streams import EventSource
|
||||||
from synapse.types import (
|
from synapse.types import (
|
||||||
|
@ -90,6 +92,17 @@ id_server_scheme = "https://"
|
||||||
FIVE_MINUTES_IN_MS = 5 * 60 * 1000
|
FIVE_MINUTES_IN_MS = 5 * 60 * 1000
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
|
class EventContext:
|
||||||
|
events_before: List[EventBase]
|
||||||
|
event: EventBase
|
||||||
|
events_after: List[EventBase]
|
||||||
|
state: List[EventBase]
|
||||||
|
aggregations: Dict[str, BundledAggregations]
|
||||||
|
start: str
|
||||||
|
end: str
|
||||||
|
|
||||||
|
|
||||||
class RoomCreationHandler:
|
class RoomCreationHandler:
|
||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
|
@ -1119,7 +1132,7 @@ class RoomContextHandler:
|
||||||
limit: int,
|
limit: int,
|
||||||
event_filter: Optional[Filter],
|
event_filter: Optional[Filter],
|
||||||
use_admin_priviledge: bool = False,
|
use_admin_priviledge: bool = False,
|
||||||
) -> Optional[JsonDict]:
|
) -> Optional[EventContext]:
|
||||||
"""Retrieves events, pagination tokens and state around a given event
|
"""Retrieves events, pagination tokens and state around a given event
|
||||||
in a room.
|
in a room.
|
||||||
|
|
||||||
|
@ -1167,38 +1180,28 @@ class RoomContextHandler:
|
||||||
results = await self.store.get_events_around(
|
results = await self.store.get_events_around(
|
||||||
room_id, event_id, before_limit, after_limit, event_filter
|
room_id, event_id, before_limit, after_limit, event_filter
|
||||||
)
|
)
|
||||||
|
events_before = results.events_before
|
||||||
|
events_after = results.events_after
|
||||||
|
|
||||||
if event_filter:
|
if event_filter:
|
||||||
results["events_before"] = await event_filter.filter(
|
events_before = await event_filter.filter(events_before)
|
||||||
results["events_before"]
|
events_after = await event_filter.filter(events_after)
|
||||||
)
|
|
||||||
results["events_after"] = await event_filter.filter(results["events_after"])
|
|
||||||
|
|
||||||
results["events_before"] = await filter_evts(results["events_before"])
|
events_before = await filter_evts(events_before)
|
||||||
results["events_after"] = await filter_evts(results["events_after"])
|
events_after = await filter_evts(events_after)
|
||||||
# filter_evts can return a pruned event in case the user is allowed to see that
|
# filter_evts can return a pruned event in case the user is allowed to see that
|
||||||
# there's something there but not see the content, so use the event that's in
|
# there's something there but not see the content, so use the event that's in
|
||||||
# `filtered` rather than the event we retrieved from the datastore.
|
# `filtered` rather than the event we retrieved from the datastore.
|
||||||
results["event"] = filtered[0]
|
event = filtered[0]
|
||||||
|
|
||||||
# Fetch the aggregations.
|
# Fetch the aggregations.
|
||||||
aggregations = await self.store.get_bundled_aggregations(
|
aggregations = await self.store.get_bundled_aggregations(
|
||||||
[results["event"]], user.to_string()
|
itertools.chain(events_before, (event,), events_after),
|
||||||
|
user.to_string(),
|
||||||
)
|
)
|
||||||
aggregations.update(
|
|
||||||
await self.store.get_bundled_aggregations(
|
|
||||||
results["events_before"], user.to_string()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
aggregations.update(
|
|
||||||
await self.store.get_bundled_aggregations(
|
|
||||||
results["events_after"], user.to_string()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
results["aggregations"] = aggregations
|
|
||||||
|
|
||||||
if results["events_after"]:
|
if events_after:
|
||||||
last_event_id = results["events_after"][-1].event_id
|
last_event_id = events_after[-1].event_id
|
||||||
else:
|
else:
|
||||||
last_event_id = event_id
|
last_event_id = event_id
|
||||||
|
|
||||||
|
@ -1206,9 +1209,9 @@ class RoomContextHandler:
|
||||||
state_filter = StateFilter.from_lazy_load_member_list(
|
state_filter = StateFilter.from_lazy_load_member_list(
|
||||||
ev.sender
|
ev.sender
|
||||||
for ev in itertools.chain(
|
for ev in itertools.chain(
|
||||||
results["events_before"],
|
events_before,
|
||||||
(results["event"],),
|
(event,),
|
||||||
results["events_after"],
|
events_after,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -1226,21 +1229,23 @@ class RoomContextHandler:
|
||||||
if event_filter:
|
if event_filter:
|
||||||
state_events = await event_filter.filter(state_events)
|
state_events = await event_filter.filter(state_events)
|
||||||
|
|
||||||
results["state"] = await filter_evts(state_events)
|
|
||||||
|
|
||||||
# We use a dummy token here as we only care about the room portion of
|
# We use a dummy token here as we only care about the room portion of
|
||||||
# the token, which we replace.
|
# the token, which we replace.
|
||||||
token = StreamToken.START
|
token = StreamToken.START
|
||||||
|
|
||||||
results["start"] = await token.copy_and_replace(
|
return EventContext(
|
||||||
"room_key", results["start"]
|
events_before=events_before,
|
||||||
).to_string(self.store)
|
event=event,
|
||||||
|
events_after=events_after,
|
||||||
results["end"] = await token.copy_and_replace(
|
state=await filter_evts(state_events),
|
||||||
"room_key", results["end"]
|
aggregations=aggregations,
|
||||||
).to_string(self.store)
|
start=await token.copy_and_replace("room_key", results.start).to_string(
|
||||||
|
self.store
|
||||||
return results
|
),
|
||||||
|
end=await token.copy_and_replace("room_key", results.end).to_string(
|
||||||
|
self.store
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TimestampLookupHandler:
|
class TimestampLookupHandler:
|
||||||
|
|
|
@ -590,6 +590,12 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||||
errcode=Codes.BAD_JSON,
|
errcode=Codes.BAD_JSON,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if "avatar_url" in content:
|
||||||
|
if not await self.profile_handler.check_avatar_size_and_mime_type(
|
||||||
|
content["avatar_url"],
|
||||||
|
):
|
||||||
|
raise SynapseError(403, "This avatar is not allowed", Codes.FORBIDDEN)
|
||||||
|
|
||||||
# The event content should *not* include the authorising user as
|
# The event content should *not* include the authorising user as
|
||||||
# it won't be properly signed. Strip it out since it might come
|
# it won't be properly signed. Strip it out since it might come
|
||||||
# back from a client updating a display name / avatar.
|
# back from a client updating a display name / avatar.
|
||||||
|
|
|
@ -361,36 +361,37 @@ class SearchHandler:
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Context for search returned %d and %d events",
|
"Context for search returned %d and %d events",
|
||||||
len(res["events_before"]),
|
len(res.events_before),
|
||||||
len(res["events_after"]),
|
len(res.events_after),
|
||||||
)
|
)
|
||||||
|
|
||||||
res["events_before"] = await filter_events_for_client(
|
events_before = await filter_events_for_client(
|
||||||
self.storage, user.to_string(), res["events_before"]
|
self.storage, user.to_string(), res.events_before
|
||||||
)
|
)
|
||||||
|
|
||||||
res["events_after"] = await filter_events_for_client(
|
events_after = await filter_events_for_client(
|
||||||
self.storage, user.to_string(), res["events_after"]
|
self.storage, user.to_string(), res.events_after
|
||||||
)
|
)
|
||||||
|
|
||||||
res["start"] = await now_token.copy_and_replace(
|
context = {
|
||||||
"room_key", res["start"]
|
"events_before": events_before,
|
||||||
).to_string(self.store)
|
"events_after": events_after,
|
||||||
|
"start": await now_token.copy_and_replace(
|
||||||
res["end"] = await now_token.copy_and_replace(
|
"room_key", res.start
|
||||||
"room_key", res["end"]
|
).to_string(self.store),
|
||||||
).to_string(self.store)
|
"end": await now_token.copy_and_replace(
|
||||||
|
"room_key", res.end
|
||||||
|
).to_string(self.store),
|
||||||
|
}
|
||||||
|
|
||||||
if include_profile:
|
if include_profile:
|
||||||
senders = {
|
senders = {
|
||||||
ev.sender
|
ev.sender
|
||||||
for ev in itertools.chain(
|
for ev in itertools.chain(events_before, [event], events_after)
|
||||||
res["events_before"], [event], res["events_after"]
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if res["events_after"]:
|
if events_after:
|
||||||
last_event_id = res["events_after"][-1].event_id
|
last_event_id = events_after[-1].event_id
|
||||||
else:
|
else:
|
||||||
last_event_id = event.event_id
|
last_event_id = event.event_id
|
||||||
|
|
||||||
|
@ -402,7 +403,7 @@ class SearchHandler:
|
||||||
last_event_id, state_filter
|
last_event_id, state_filter
|
||||||
)
|
)
|
||||||
|
|
||||||
res["profile_info"] = {
|
context["profile_info"] = {
|
||||||
s.state_key: {
|
s.state_key: {
|
||||||
"displayname": s.content.get("displayname", None),
|
"displayname": s.content.get("displayname", None),
|
||||||
"avatar_url": s.content.get("avatar_url", None),
|
"avatar_url": s.content.get("avatar_url", None),
|
||||||
|
@ -411,7 +412,7 @@ class SearchHandler:
|
||||||
if s.type == EventTypes.Member and s.state_key in senders
|
if s.type == EventTypes.Member and s.state_key in senders
|
||||||
}
|
}
|
||||||
|
|
||||||
contexts[event.event_id] = res
|
contexts[event.event_id] = context
|
||||||
else:
|
else:
|
||||||
contexts = {}
|
contexts = {}
|
||||||
|
|
||||||
|
@ -421,10 +422,10 @@ class SearchHandler:
|
||||||
|
|
||||||
for context in contexts.values():
|
for context in contexts.values():
|
||||||
context["events_before"] = self._event_serializer.serialize_events(
|
context["events_before"] = self._event_serializer.serialize_events(
|
||||||
context["events_before"], time_now
|
context["events_before"], time_now # type: ignore[arg-type]
|
||||||
)
|
)
|
||||||
context["events_after"] = self._event_serializer.serialize_events(
|
context["events_after"] = self._event_serializer.serialize_events(
|
||||||
context["events_after"], time_now
|
context["events_after"], time_now # type: ignore[arg-type]
|
||||||
)
|
)
|
||||||
|
|
||||||
state_results = {}
|
state_results = {}
|
||||||
|
|
|
@ -37,6 +37,7 @@ from synapse.logging.context import current_context
|
||||||
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
|
from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, start_active_span
|
||||||
from synapse.push.clientformat import format_push_rules_for_user
|
from synapse.push.clientformat import format_push_rules_for_user
|
||||||
from synapse.storage.databases.main.event_push_actions import NotifCounts
|
from synapse.storage.databases.main.event_push_actions import NotifCounts
|
||||||
|
from synapse.storage.databases.main.relations import BundledAggregations
|
||||||
from synapse.storage.roommember import MemberSummary
|
from synapse.storage.roommember import MemberSummary
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
from synapse.types import (
|
from synapse.types import (
|
||||||
|
@ -100,7 +101,7 @@ class TimelineBatch:
|
||||||
limited: bool
|
limited: bool
|
||||||
# A mapping of event ID to the bundled aggregations for the above events.
|
# A mapping of event ID to the bundled aggregations for the above events.
|
||||||
# This is only calculated if limited is true.
|
# This is only calculated if limited is true.
|
||||||
bundled_aggregations: Optional[Dict[str, Dict[str, Any]]] = None
|
bundled_aggregations: Optional[Dict[str, BundledAggregations]] = None
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
def __bool__(self) -> bool:
|
||||||
"""Make the result appear empty if there are no updates. This is used
|
"""Make the result appear empty if there are no updates. This is used
|
||||||
|
@ -1619,7 +1620,7 @@ class SyncHandler:
|
||||||
# TODO: Can we `SELECT ignored_user_id FROM ignored_users WHERE ignorer_user_id=?;` instead?
|
# TODO: Can we `SELECT ignored_user_id FROM ignored_users WHERE ignorer_user_id=?;` instead?
|
||||||
ignored_account_data = (
|
ignored_account_data = (
|
||||||
await self.store.get_global_account_data_by_type_for_user(
|
await self.store.get_global_account_data_by_type_for_user(
|
||||||
AccountDataTypes.IGNORED_USER_LIST, user_id=user_id
|
user_id=user_id, data_type=AccountDataTypes.IGNORED_USER_LIST
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -731,15 +731,24 @@ class SimpleHttpClient:
|
||||||
# straight back in again
|
# straight back in again
|
||||||
|
|
||||||
try:
|
try:
|
||||||
length = await make_deferred_yieldable(
|
d = read_body_with_max_size(response, output_stream, max_size)
|
||||||
read_body_with_max_size(response, output_stream, max_size)
|
|
||||||
)
|
# Ensure that the body is not read forever.
|
||||||
|
d = timeout_deferred(d, 30, self.hs.get_reactor())
|
||||||
|
|
||||||
|
length = await make_deferred_yieldable(d)
|
||||||
except BodyExceededMaxSize:
|
except BodyExceededMaxSize:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
HTTPStatus.BAD_GATEWAY,
|
HTTPStatus.BAD_GATEWAY,
|
||||||
"Requested file is too large > %r bytes" % (max_size,),
|
"Requested file is too large > %r bytes" % (max_size,),
|
||||||
Codes.TOO_LARGE,
|
Codes.TOO_LARGE,
|
||||||
)
|
)
|
||||||
|
except defer.TimeoutError:
|
||||||
|
raise SynapseError(
|
||||||
|
HTTPStatus.BAD_GATEWAY,
|
||||||
|
"Requested file took too long to download",
|
||||||
|
Codes.TOO_LARGE,
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
HTTPStatus.BAD_GATEWAY, ("Failed to download remote body: %s" % e)
|
HTTPStatus.BAD_GATEWAY, ("Failed to download remote body: %s" % e)
|
||||||
|
|
|
@ -407,7 +407,10 @@ class SynapseRequest(Request):
|
||||||
|
|
||||||
user_agent = get_request_user_agent(self, "-")
|
user_agent = get_request_user_agent(self, "-")
|
||||||
|
|
||||||
code = str(self.code)
|
# int(self.code) looks redundant, because self.code is already an int.
|
||||||
|
# But self.code might be an HTTPStatus (which inherits from int)---which has
|
||||||
|
# a different string representation. So ensure we really have an integer.
|
||||||
|
code = str(int(self.code))
|
||||||
if not self.finished:
|
if not self.finished:
|
||||||
# we didn't send the full response before we gave up (presumably because
|
# we didn't send the full response before we gave up (presumably because
|
||||||
# the connection dropped)
|
# the connection dropped)
|
||||||
|
|
|
@ -71,6 +71,7 @@ from synapse.handlers.account_validity import (
|
||||||
from synapse.handlers.auth import (
|
from synapse.handlers.auth import (
|
||||||
CHECK_3PID_AUTH_CALLBACK,
|
CHECK_3PID_AUTH_CALLBACK,
|
||||||
CHECK_AUTH_CALLBACK,
|
CHECK_AUTH_CALLBACK,
|
||||||
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK,
|
||||||
ON_LOGGED_OUT_CALLBACK,
|
ON_LOGGED_OUT_CALLBACK,
|
||||||
AuthHandler,
|
AuthHandler,
|
||||||
)
|
)
|
||||||
|
@ -177,6 +178,7 @@ class ModuleApi:
|
||||||
self._presence_stream = hs.get_event_sources().sources.presence
|
self._presence_stream = hs.get_event_sources().sources.presence
|
||||||
self._state = hs.get_state_handler()
|
self._state = hs.get_state_handler()
|
||||||
self._clock: Clock = hs.get_clock()
|
self._clock: Clock = hs.get_clock()
|
||||||
|
self._registration_handler = hs.get_registration_handler()
|
||||||
self._send_email_handler = hs.get_send_email_handler()
|
self._send_email_handler = hs.get_send_email_handler()
|
||||||
self.custom_template_dir = hs.config.server.custom_template_directory
|
self.custom_template_dir = hs.config.server.custom_template_directory
|
||||||
|
|
||||||
|
@ -310,6 +312,9 @@ class ModuleApi:
|
||||||
auth_checkers: Optional[
|
auth_checkers: Optional[
|
||||||
Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK]
|
Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK]
|
||||||
] = None,
|
] = None,
|
||||||
|
get_username_for_registration: Optional[
|
||||||
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK
|
||||||
|
] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Registers callbacks for password auth provider capabilities.
|
"""Registers callbacks for password auth provider capabilities.
|
||||||
|
|
||||||
|
@ -319,6 +324,7 @@ class ModuleApi:
|
||||||
check_3pid_auth=check_3pid_auth,
|
check_3pid_auth=check_3pid_auth,
|
||||||
on_logged_out=on_logged_out,
|
on_logged_out=on_logged_out,
|
||||||
auth_checkers=auth_checkers,
|
auth_checkers=auth_checkers,
|
||||||
|
get_username_for_registration=get_username_for_registration,
|
||||||
)
|
)
|
||||||
|
|
||||||
def register_background_update_controller_callbacks(
|
def register_background_update_controller_callbacks(
|
||||||
|
@ -1202,6 +1208,22 @@ class ModuleApi:
|
||||||
"""
|
"""
|
||||||
return await defer_to_thread(self._hs.get_reactor(), f, *args, **kwargs)
|
return await defer_to_thread(self._hs.get_reactor(), f, *args, **kwargs)
|
||||||
|
|
||||||
|
async def check_username(self, username: str) -> None:
|
||||||
|
"""Checks if the provided username uses the grammar defined in the Matrix
|
||||||
|
specification, and is already being used by an existing user.
|
||||||
|
|
||||||
|
Added in Synapse v1.52.0.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
username: The username to check. This is the local part of the user's full
|
||||||
|
Matrix user ID, i.e. it's "alice" if the full user ID is "@alice:foo.com".
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SynapseError with the errcode "M_USER_IN_USE" if the username is already in
|
||||||
|
use.
|
||||||
|
"""
|
||||||
|
await self._registration_handler.check_username(username)
|
||||||
|
|
||||||
|
|
||||||
class PublicRoomListManager:
|
class PublicRoomListManager:
|
||||||
"""Contains methods for adding to, removing from and querying whether a room
|
"""Contains methods for adding to, removing from and querying whether a room
|
||||||
|
|
|
@ -455,7 +455,7 @@ class Mailer:
|
||||||
}
|
}
|
||||||
|
|
||||||
the_events = await filter_events_for_client(
|
the_events = await filter_events_for_client(
|
||||||
self.storage, user_id, results["events_before"]
|
self.storage, user_id, results.events_before
|
||||||
)
|
)
|
||||||
the_events.append(notif_event)
|
the_events.append(notif_event)
|
||||||
|
|
||||||
|
|
|
@ -70,13 +70,14 @@ REQUIREMENTS = [
|
||||||
"pyasn1>=0.1.9",
|
"pyasn1>=0.1.9",
|
||||||
"pyasn1-modules>=0.0.7",
|
"pyasn1-modules>=0.0.7",
|
||||||
"bcrypt>=3.1.0",
|
"bcrypt>=3.1.0",
|
||||||
"pillow>=4.3.0",
|
"pillow>=5.4.0",
|
||||||
"sortedcontainers>=1.4.4",
|
"sortedcontainers>=1.4.4",
|
||||||
"pymacaroons>=0.13.0",
|
"pymacaroons>=0.13.0",
|
||||||
"msgpack>=0.5.2",
|
"msgpack>=0.5.2",
|
||||||
"phonenumbers>=8.2.0",
|
"phonenumbers>=8.2.0",
|
||||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||||
"prometheus_client>=0.4.0",
|
# 0.13.0 has an incorrect type annotation, see #11832.
|
||||||
|
"prometheus_client>=0.4.0,<0.13.0",
|
||||||
# we use `order`, which arrived in attrs 19.2.0.
|
# we use `order`, which arrived in attrs 19.2.0.
|
||||||
# Note: 21.1.0 broke `/sync`, see #9936
|
# Note: 21.1.0 broke `/sync`, see #9936
|
||||||
"attrs>=19.2.0,!=21.1.0",
|
"attrs>=19.2.0,!=21.1.0",
|
||||||
|
@ -107,7 +108,7 @@ CONDITIONAL_REQUIREMENTS = {
|
||||||
# `systemd.journal.JournalHandler`, as is documented in
|
# `systemd.journal.JournalHandler`, as is documented in
|
||||||
# `contrib/systemd/log_config.yaml`.
|
# `contrib/systemd/log_config.yaml`.
|
||||||
"systemd": ["systemd-python>=231"],
|
"systemd": ["systemd-python>=231"],
|
||||||
"url_preview": ["lxml>=3.5.0"],
|
"url_preview": ["lxml>=4.2.0"],
|
||||||
"sentry": ["sentry-sdk>=0.7.2"],
|
"sentry": ["sentry-sdk>=0.7.2"],
|
||||||
"opentracing": ["jaeger-client>=4.0.0", "opentracing>=2.2.0"],
|
"opentracing": ["jaeger-client>=4.0.0", "opentracing>=2.2.0"],
|
||||||
"jwt": ["pyjwt>=1.6.4"],
|
"jwt": ["pyjwt>=1.6.4"],
|
||||||
|
|
|
@ -52,8 +52,8 @@ class SlavedEventStore(
|
||||||
EventPushActionsWorkerStore,
|
EventPushActionsWorkerStore,
|
||||||
StreamWorkerStore,
|
StreamWorkerStore,
|
||||||
StateGroupWorkerStore,
|
StateGroupWorkerStore,
|
||||||
EventsWorkerStore,
|
|
||||||
SignatureWorkerStore,
|
SignatureWorkerStore,
|
||||||
|
EventsWorkerStore,
|
||||||
UserErasureWorkerStore,
|
UserErasureWorkerStore,
|
||||||
RelationsWorkerStore,
|
RelationsWorkerStore,
|
||||||
BaseSlavedStore,
|
BaseSlavedStore,
|
||||||
|
|
|
@ -41,7 +41,9 @@ from synapse.rest.admin.event_reports import (
|
||||||
EventReportsRestServlet,
|
EventReportsRestServlet,
|
||||||
)
|
)
|
||||||
from synapse.rest.admin.federation import (
|
from synapse.rest.admin.federation import (
|
||||||
DestinationsRestServlet,
|
DestinationMembershipRestServlet,
|
||||||
|
DestinationResetConnectionRestServlet,
|
||||||
|
DestinationRestServlet,
|
||||||
ListDestinationsRestServlet,
|
ListDestinationsRestServlet,
|
||||||
)
|
)
|
||||||
from synapse.rest.admin.groups import DeleteGroupAdminRestServlet
|
from synapse.rest.admin.groups import DeleteGroupAdminRestServlet
|
||||||
|
@ -267,7 +269,9 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||||
ListRegistrationTokensRestServlet(hs).register(http_server)
|
ListRegistrationTokensRestServlet(hs).register(http_server)
|
||||||
NewRegistrationTokenRestServlet(hs).register(http_server)
|
NewRegistrationTokenRestServlet(hs).register(http_server)
|
||||||
RegistrationTokenRestServlet(hs).register(http_server)
|
RegistrationTokenRestServlet(hs).register(http_server)
|
||||||
DestinationsRestServlet(hs).register(http_server)
|
DestinationMembershipRestServlet(hs).register(http_server)
|
||||||
|
DestinationResetConnectionRestServlet(hs).register(http_server)
|
||||||
|
DestinationRestServlet(hs).register(http_server)
|
||||||
ListDestinationsRestServlet(hs).register(http_server)
|
ListDestinationsRestServlet(hs).register(http_server)
|
||||||
|
|
||||||
# Some servlets only get registered for the main process.
|
# Some servlets only get registered for the main process.
|
||||||
|
|
|
@ -16,6 +16,7 @@ from http import HTTPStatus
|
||||||
from typing import TYPE_CHECKING, Tuple
|
from typing import TYPE_CHECKING, Tuple
|
||||||
|
|
||||||
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
||||||
|
from synapse.federation.transport.server import Authenticator
|
||||||
from synapse.http.servlet import RestServlet, parse_integer, parse_string
|
from synapse.http.servlet import RestServlet, parse_integer, parse_string
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
from synapse.rest.admin._base import admin_patterns, assert_requester_is_admin
|
from synapse.rest.admin._base import admin_patterns, assert_requester_is_admin
|
||||||
|
@ -90,7 +91,7 @@ class ListDestinationsRestServlet(RestServlet):
|
||||||
return HTTPStatus.OK, response
|
return HTTPStatus.OK, response
|
||||||
|
|
||||||
|
|
||||||
class DestinationsRestServlet(RestServlet):
|
class DestinationRestServlet(RestServlet):
|
||||||
"""Get details of a destination.
|
"""Get details of a destination.
|
||||||
This needs user to have administrator access in Synapse.
|
This needs user to have administrator access in Synapse.
|
||||||
|
|
||||||
|
@ -145,3 +146,100 @@ class DestinationsRestServlet(RestServlet):
|
||||||
}
|
}
|
||||||
|
|
||||||
return HTTPStatus.OK, response
|
return HTTPStatus.OK, response
|
||||||
|
|
||||||
|
|
||||||
|
class DestinationMembershipRestServlet(RestServlet):
|
||||||
|
"""Get list of rooms of a destination.
|
||||||
|
This needs user to have administrator access in Synapse.
|
||||||
|
|
||||||
|
GET /_synapse/admin/v1/federation/destinations/<destination>/rooms?from=0&limit=10
|
||||||
|
|
||||||
|
returns:
|
||||||
|
200 OK with a list of rooms if success otherwise an error.
|
||||||
|
|
||||||
|
The parameters `from` and `limit` are required only for pagination.
|
||||||
|
By default, a `limit` of 100 is used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
PATTERNS = admin_patterns("/federation/destinations/(?P<destination>[^/]*)/rooms$")
|
||||||
|
|
||||||
|
def __init__(self, hs: "HomeServer"):
|
||||||
|
self._auth = hs.get_auth()
|
||||||
|
self._store = hs.get_datastore()
|
||||||
|
|
||||||
|
async def on_GET(
|
||||||
|
self, request: SynapseRequest, destination: str
|
||||||
|
) -> Tuple[int, JsonDict]:
|
||||||
|
await assert_requester_is_admin(self._auth, request)
|
||||||
|
|
||||||
|
if not await self._store.is_destination_known(destination):
|
||||||
|
raise NotFoundError("Unknown destination")
|
||||||
|
|
||||||
|
start = parse_integer(request, "from", default=0)
|
||||||
|
limit = parse_integer(request, "limit", default=100)
|
||||||
|
|
||||||
|
if start < 0:
|
||||||
|
raise SynapseError(
|
||||||
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
"Query parameter from must be a string representing a positive integer.",
|
||||||
|
errcode=Codes.INVALID_PARAM,
|
||||||
|
)
|
||||||
|
|
||||||
|
if limit < 0:
|
||||||
|
raise SynapseError(
|
||||||
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
"Query parameter limit must be a string representing a positive integer.",
|
||||||
|
errcode=Codes.INVALID_PARAM,
|
||||||
|
)
|
||||||
|
|
||||||
|
direction = parse_string(request, "dir", default="f", allowed_values=("f", "b"))
|
||||||
|
|
||||||
|
rooms, total = await self._store.get_destination_rooms_paginate(
|
||||||
|
destination, start, limit, direction
|
||||||
|
)
|
||||||
|
response = {"rooms": rooms, "total": total}
|
||||||
|
if (start + limit) < total:
|
||||||
|
response["next_token"] = str(start + len(rooms))
|
||||||
|
|
||||||
|
return HTTPStatus.OK, response
|
||||||
|
|
||||||
|
|
||||||
|
class DestinationResetConnectionRestServlet(RestServlet):
|
||||||
|
"""Reset destinations' connection timeouts and wake it up.
|
||||||
|
This needs user to have administrator access in Synapse.
|
||||||
|
|
||||||
|
POST /_synapse/admin/v1/federation/destinations/<destination>/reset_connection
|
||||||
|
{}
|
||||||
|
|
||||||
|
returns:
|
||||||
|
200 OK otherwise an error.
|
||||||
|
"""
|
||||||
|
|
||||||
|
PATTERNS = admin_patterns(
|
||||||
|
"/federation/destinations/(?P<destination>[^/]+)/reset_connection$"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, hs: "HomeServer"):
|
||||||
|
self._auth = hs.get_auth()
|
||||||
|
self._store = hs.get_datastore()
|
||||||
|
self._authenticator = Authenticator(hs)
|
||||||
|
|
||||||
|
async def on_POST(
|
||||||
|
self, request: SynapseRequest, destination: str
|
||||||
|
) -> Tuple[int, JsonDict]:
|
||||||
|
await assert_requester_is_admin(self._auth, request)
|
||||||
|
|
||||||
|
if not await self._store.is_destination_known(destination):
|
||||||
|
raise NotFoundError("Unknown destination")
|
||||||
|
|
||||||
|
retry_timings = await self._store.get_destination_retry_timings(destination)
|
||||||
|
if not (retry_timings and retry_timings.retry_last_ts):
|
||||||
|
raise SynapseError(
|
||||||
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
"The retry timing does not need to be reset for this destination.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# reset timings and wake up
|
||||||
|
await self._authenticator.reset_retry_timings(destination)
|
||||||
|
|
||||||
|
return HTTPStatus.OK, {}
|
||||||
|
|
|
@ -729,7 +729,7 @@ class RoomEventContextServlet(RestServlet):
|
||||||
else:
|
else:
|
||||||
event_filter = None
|
event_filter = None
|
||||||
|
|
||||||
results = await self.room_context_handler.get_event_context(
|
event_context = await self.room_context_handler.get_event_context(
|
||||||
requester,
|
requester,
|
||||||
room_id,
|
room_id,
|
||||||
event_id,
|
event_id,
|
||||||
|
@ -738,25 +738,34 @@ class RoomEventContextServlet(RestServlet):
|
||||||
use_admin_priviledge=True,
|
use_admin_priviledge=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not results:
|
if not event_context:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
HTTPStatus.NOT_FOUND, "Event not found.", errcode=Codes.NOT_FOUND
|
HTTPStatus.NOT_FOUND, "Event not found.", errcode=Codes.NOT_FOUND
|
||||||
)
|
)
|
||||||
|
|
||||||
time_now = self.clock.time_msec()
|
time_now = self.clock.time_msec()
|
||||||
aggregations = results.pop("aggregations", None)
|
results = {
|
||||||
results["events_before"] = self._event_serializer.serialize_events(
|
"events_before": self._event_serializer.serialize_events(
|
||||||
results["events_before"], time_now, bundle_aggregations=aggregations
|
event_context.events_before,
|
||||||
)
|
time_now,
|
||||||
results["event"] = self._event_serializer.serialize_event(
|
bundle_aggregations=event_context.aggregations,
|
||||||
results["event"], time_now, bundle_aggregations=aggregations
|
),
|
||||||
)
|
"event": self._event_serializer.serialize_event(
|
||||||
results["events_after"] = self._event_serializer.serialize_events(
|
event_context.event,
|
||||||
results["events_after"], time_now, bundle_aggregations=aggregations
|
time_now,
|
||||||
)
|
bundle_aggregations=event_context.aggregations,
|
||||||
results["state"] = self._event_serializer.serialize_events(
|
),
|
||||||
results["state"], time_now
|
"events_after": self._event_serializer.serialize_events(
|
||||||
)
|
event_context.events_after,
|
||||||
|
time_now,
|
||||||
|
bundle_aggregations=event_context.aggregations,
|
||||||
|
),
|
||||||
|
"state": self._event_serializer.serialize_events(
|
||||||
|
event_context.state, time_now
|
||||||
|
),
|
||||||
|
"start": event_context.start,
|
||||||
|
"end": event_context.end,
|
||||||
|
}
|
||||||
|
|
||||||
return HTTPStatus.OK, results
|
return HTTPStatus.OK, results
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ class AccountDataServlet(RestServlet):
|
||||||
raise AuthError(403, "Cannot get account data for other users.")
|
raise AuthError(403, "Cannot get account data for other users.")
|
||||||
|
|
||||||
event = await self.store.get_global_account_data_by_type_for_user(
|
event = await self.store.get_global_account_data_by_type_for_user(
|
||||||
account_data_type, user_id
|
user_id, account_data_type
|
||||||
)
|
)
|
||||||
|
|
||||||
if event is None:
|
if event is None:
|
||||||
|
|
|
@ -339,12 +339,19 @@ class UsernameAvailabilityRestServlet(RestServlet):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.inhibit_user_in_use_error = (
|
||||||
|
hs.config.registration.inhibit_user_in_use_error
|
||||||
|
)
|
||||||
|
|
||||||
async def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
|
async def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
|
||||||
if not self.hs.config.registration.enable_registration:
|
if not self.hs.config.registration.enable_registration:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
403, "Registration has been disabled", errcode=Codes.FORBIDDEN
|
403, "Registration has been disabled", errcode=Codes.FORBIDDEN
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.inhibit_user_in_use_error:
|
||||||
|
return 200, {"available": True}
|
||||||
|
|
||||||
ip = request.getClientIP()
|
ip = request.getClientIP()
|
||||||
with self.ratelimiter.ratelimit(ip) as wait_deferred:
|
with self.ratelimiter.ratelimit(ip) as wait_deferred:
|
||||||
await wait_deferred
|
await wait_deferred
|
||||||
|
@ -418,10 +425,14 @@ class RegisterRestServlet(RestServlet):
|
||||||
self.ratelimiter = hs.get_registration_ratelimiter()
|
self.ratelimiter = hs.get_registration_ratelimiter()
|
||||||
self.password_policy_handler = hs.get_password_policy_handler()
|
self.password_policy_handler = hs.get_password_policy_handler()
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
|
self.password_auth_provider = hs.get_password_auth_provider()
|
||||||
self._registration_enabled = self.hs.config.registration.enable_registration
|
self._registration_enabled = self.hs.config.registration.enable_registration
|
||||||
self._refresh_tokens_enabled = (
|
self._refresh_tokens_enabled = (
|
||||||
hs.config.registration.refreshable_access_token_lifetime is not None
|
hs.config.registration.refreshable_access_token_lifetime is not None
|
||||||
)
|
)
|
||||||
|
self._inhibit_user_in_use_error = (
|
||||||
|
hs.config.registration.inhibit_user_in_use_error
|
||||||
|
)
|
||||||
|
|
||||||
self._registration_flows = _calculate_registration_flows(
|
self._registration_flows = _calculate_registration_flows(
|
||||||
hs.config, self.auth_handler
|
hs.config, self.auth_handler
|
||||||
|
@ -564,6 +575,7 @@ class RegisterRestServlet(RestServlet):
|
||||||
desired_username,
|
desired_username,
|
||||||
guest_access_token=guest_access_token,
|
guest_access_token=guest_access_token,
|
||||||
assigned_user_id=registered_user_id,
|
assigned_user_id=registered_user_id,
|
||||||
|
inhibit_user_in_use_error=self._inhibit_user_in_use_error,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if the user-interactive authentication flows are complete, if
|
# Check if the user-interactive authentication flows are complete, if
|
||||||
|
@ -627,7 +639,16 @@ class RegisterRestServlet(RestServlet):
|
||||||
if not password_hash:
|
if not password_hash:
|
||||||
raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM)
|
raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM)
|
||||||
|
|
||||||
desired_username = params.get("username", None)
|
desired_username = await (
|
||||||
|
self.password_auth_provider.get_username_for_registration(
|
||||||
|
auth_result,
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if desired_username is None:
|
||||||
|
desired_username = params.get("username", None)
|
||||||
|
|
||||||
guest_access_token = params.get("guest_access_token", None)
|
guest_access_token = params.get("guest_access_token", None)
|
||||||
|
|
||||||
if desired_username is not None:
|
if desired_username is not None:
|
||||||
|
|
|
@ -706,27 +706,36 @@ class RoomEventContextServlet(RestServlet):
|
||||||
else:
|
else:
|
||||||
event_filter = None
|
event_filter = None
|
||||||
|
|
||||||
results = await self.room_context_handler.get_event_context(
|
event_context = await self.room_context_handler.get_event_context(
|
||||||
requester, room_id, event_id, limit, event_filter
|
requester, room_id, event_id, limit, event_filter
|
||||||
)
|
)
|
||||||
|
|
||||||
if not results:
|
if not event_context:
|
||||||
raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)
|
raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)
|
||||||
|
|
||||||
time_now = self.clock.time_msec()
|
time_now = self.clock.time_msec()
|
||||||
aggregations = results.pop("aggregations", None)
|
results = {
|
||||||
results["events_before"] = self._event_serializer.serialize_events(
|
"events_before": self._event_serializer.serialize_events(
|
||||||
results["events_before"], time_now, bundle_aggregations=aggregations
|
event_context.events_before,
|
||||||
)
|
time_now,
|
||||||
results["event"] = self._event_serializer.serialize_event(
|
bundle_aggregations=event_context.aggregations,
|
||||||
results["event"], time_now, bundle_aggregations=aggregations
|
),
|
||||||
)
|
"event": self._event_serializer.serialize_event(
|
||||||
results["events_after"] = self._event_serializer.serialize_events(
|
event_context.event,
|
||||||
results["events_after"], time_now, bundle_aggregations=aggregations
|
time_now,
|
||||||
)
|
bundle_aggregations=event_context.aggregations,
|
||||||
results["state"] = self._event_serializer.serialize_events(
|
),
|
||||||
results["state"], time_now
|
"events_after": self._event_serializer.serialize_events(
|
||||||
)
|
event_context.events_after,
|
||||||
|
time_now,
|
||||||
|
bundle_aggregations=event_context.aggregations,
|
||||||
|
),
|
||||||
|
"state": self._event_serializer.serialize_events(
|
||||||
|
event_context.state, time_now
|
||||||
|
),
|
||||||
|
"start": event_context.start,
|
||||||
|
"end": event_context.end,
|
||||||
|
}
|
||||||
|
|
||||||
return 200, results
|
return 200, results
|
||||||
|
|
||||||
|
|
|
@ -48,6 +48,7 @@ from synapse.http.server import HttpServer
|
||||||
from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string
|
from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
from synapse.logging.opentracing import trace
|
from synapse.logging.opentracing import trace
|
||||||
|
from synapse.storage.databases.main.relations import BundledAggregations
|
||||||
from synapse.types import JsonDict, StreamToken
|
from synapse.types import JsonDict, StreamToken
|
||||||
from synapse.util import json_decoder
|
from synapse.util import json_decoder
|
||||||
|
|
||||||
|
@ -526,7 +527,7 @@ class SyncRestServlet(RestServlet):
|
||||||
|
|
||||||
def serialize(
|
def serialize(
|
||||||
events: Iterable[EventBase],
|
events: Iterable[EventBase],
|
||||||
aggregations: Optional[Dict[str, Dict[str, Any]]] = None,
|
aggregations: Optional[Dict[str, BundledAggregations]] = None,
|
||||||
) -> List[JsonDict]:
|
) -> List[JsonDict]:
|
||||||
return self._event_serializer.serialize_events(
|
return self._event_serializer.serialize_events(
|
||||||
events,
|
events,
|
||||||
|
|
|
@ -321,14 +321,33 @@ def _iterate_over_text(
|
||||||
|
|
||||||
|
|
||||||
def rebase_url(url: str, base: str) -> str:
|
def rebase_url(url: str, base: str) -> str:
|
||||||
base_parts = list(urlparse.urlparse(base))
|
"""
|
||||||
|
Resolves a potentially relative `url` against an absolute `base` URL.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> rebase_url("subpage", "https://example.com/foo/")
|
||||||
|
'https://example.com/foo/subpage'
|
||||||
|
>>> rebase_url("sibling", "https://example.com/foo")
|
||||||
|
'https://example.com/sibling'
|
||||||
|
>>> rebase_url("/bar", "https://example.com/foo/")
|
||||||
|
'https://example.com/bar'
|
||||||
|
>>> rebase_url("https://alice.com/a/", "https://example.com/foo/")
|
||||||
|
'https://alice.com/a'
|
||||||
|
"""
|
||||||
|
base_parts = urlparse.urlparse(base)
|
||||||
|
# Convert the parsed URL to a list for (potential) modification.
|
||||||
url_parts = list(urlparse.urlparse(url))
|
url_parts = list(urlparse.urlparse(url))
|
||||||
if not url_parts[0]: # fix up schema
|
# Add a scheme, if one does not exist.
|
||||||
url_parts[0] = base_parts[0] or "http"
|
if not url_parts[0]:
|
||||||
if not url_parts[1]: # fix up hostname
|
url_parts[0] = base_parts.scheme or "http"
|
||||||
url_parts[1] = base_parts[1]
|
# Fix up the hostname, if this is not a data URL.
|
||||||
|
if url_parts[0] != "data" and not url_parts[1]:
|
||||||
|
url_parts[1] = base_parts.netloc
|
||||||
|
# If the path does not start with a /, nest it under the base path's last
|
||||||
|
# directory.
|
||||||
if not url_parts[2].startswith("/"):
|
if not url_parts[2].startswith("/"):
|
||||||
url_parts[2] = re.sub(r"/[^/]+$", "/", base_parts[2]) + url_parts[2]
|
url_parts[2] = re.sub(r"/[^/]+$", "/", base_parts.path) + url_parts[2]
|
||||||
return urlparse.urlunparse(url_parts)
|
return urlparse.urlunparse(url_parts)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -21,8 +21,9 @@ import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from typing import TYPE_CHECKING, Iterable, Optional, Tuple
|
from typing import TYPE_CHECKING, BinaryIO, Iterable, Optional, Tuple
|
||||||
from urllib import parse as urlparse
|
from urllib import parse as urlparse
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
|
@ -70,6 +71,17 @@ ONE_DAY = 24 * ONE_HOUR
|
||||||
IMAGE_CACHE_EXPIRY_MS = 2 * ONE_DAY
|
IMAGE_CACHE_EXPIRY_MS = 2 * ONE_DAY
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
|
class DownloadResult:
|
||||||
|
length: int
|
||||||
|
uri: str
|
||||||
|
response_code: int
|
||||||
|
media_type: str
|
||||||
|
download_name: Optional[str]
|
||||||
|
expires: int
|
||||||
|
etag: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
class MediaInfo:
|
class MediaInfo:
|
||||||
"""
|
"""
|
||||||
|
@ -256,7 +268,7 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
if oembed_url:
|
if oembed_url:
|
||||||
url_to_download = oembed_url
|
url_to_download = oembed_url
|
||||||
|
|
||||||
media_info = await self._download_url(url_to_download, user)
|
media_info = await self._handle_url(url_to_download, user)
|
||||||
|
|
||||||
logger.debug("got media_info of '%s'", media_info)
|
logger.debug("got media_info of '%s'", media_info)
|
||||||
|
|
||||||
|
@ -297,7 +309,9 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
oembed_url = self._oembed.autodiscover_from_html(tree)
|
oembed_url = self._oembed.autodiscover_from_html(tree)
|
||||||
og_from_oembed: JsonDict = {}
|
og_from_oembed: JsonDict = {}
|
||||||
if oembed_url:
|
if oembed_url:
|
||||||
oembed_info = await self._download_url(oembed_url, user)
|
oembed_info = await self._handle_url(
|
||||||
|
oembed_url, user, allow_data_urls=True
|
||||||
|
)
|
||||||
(
|
(
|
||||||
og_from_oembed,
|
og_from_oembed,
|
||||||
author_name,
|
author_name,
|
||||||
|
@ -367,7 +381,135 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
|
|
||||||
return jsonog.encode("utf8")
|
return jsonog.encode("utf8")
|
||||||
|
|
||||||
async def _download_url(self, url: str, user: UserID) -> MediaInfo:
|
async def _download_url(self, url: str, output_stream: BinaryIO) -> DownloadResult:
|
||||||
|
"""
|
||||||
|
Fetches a remote URL and parses the headers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to fetch.
|
||||||
|
output_stream: The stream to write the content to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple of:
|
||||||
|
Media length, URL downloaded, the HTTP response code,
|
||||||
|
the media type, the downloaded file name, the number of
|
||||||
|
milliseconds the result is valid for, the etag header.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.debug("Trying to get preview for url '%s'", url)
|
||||||
|
length, headers, uri, code = await self.client.get_file(
|
||||||
|
url,
|
||||||
|
output_stream=output_stream,
|
||||||
|
max_size=self.max_spider_size,
|
||||||
|
headers={"Accept-Language": self.url_preview_accept_language},
|
||||||
|
)
|
||||||
|
except SynapseError:
|
||||||
|
# Pass SynapseErrors through directly, so that the servlet
|
||||||
|
# handler will return a SynapseError to the client instead of
|
||||||
|
# blank data or a 500.
|
||||||
|
raise
|
||||||
|
except DNSLookupError:
|
||||||
|
# DNS lookup returned no results
|
||||||
|
# Note: This will also be the case if one of the resolved IP
|
||||||
|
# addresses is blacklisted
|
||||||
|
raise SynapseError(
|
||||||
|
502,
|
||||||
|
"DNS resolution failure during URL preview generation",
|
||||||
|
Codes.UNKNOWN,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# FIXME: pass through 404s and other error messages nicely
|
||||||
|
logger.warning("Error downloading %s: %r", url, e)
|
||||||
|
|
||||||
|
raise SynapseError(
|
||||||
|
500,
|
||||||
|
"Failed to download content: %s"
|
||||||
|
% (traceback.format_exception_only(sys.exc_info()[0], e),),
|
||||||
|
Codes.UNKNOWN,
|
||||||
|
)
|
||||||
|
|
||||||
|
if b"Content-Type" in headers:
|
||||||
|
media_type = headers[b"Content-Type"][0].decode("ascii")
|
||||||
|
else:
|
||||||
|
media_type = "application/octet-stream"
|
||||||
|
|
||||||
|
download_name = get_filename_from_headers(headers)
|
||||||
|
|
||||||
|
# FIXME: we should calculate a proper expiration based on the
|
||||||
|
# Cache-Control and Expire headers. But for now, assume 1 hour.
|
||||||
|
expires = ONE_HOUR
|
||||||
|
etag = headers[b"ETag"][0].decode("ascii") if b"ETag" in headers else None
|
||||||
|
|
||||||
|
return DownloadResult(
|
||||||
|
length, uri, code, media_type, download_name, expires, etag
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _parse_data_url(
|
||||||
|
self, url: str, output_stream: BinaryIO
|
||||||
|
) -> DownloadResult:
|
||||||
|
"""
|
||||||
|
Parses a data: URL.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to parse.
|
||||||
|
output_stream: The stream to write the content to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple of:
|
||||||
|
Media length, URL downloaded, the HTTP response code,
|
||||||
|
the media type, the downloaded file name, the number of
|
||||||
|
milliseconds the result is valid for, the etag header.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.debug("Trying to parse data url '%s'", url)
|
||||||
|
with urlopen(url) as url_info:
|
||||||
|
# TODO Can this be more efficient.
|
||||||
|
output_stream.write(url_info.read())
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Error parsing data: URL %s: %r", url, e)
|
||||||
|
|
||||||
|
raise SynapseError(
|
||||||
|
500,
|
||||||
|
"Failed to parse data URL: %s"
|
||||||
|
% (traceback.format_exception_only(sys.exc_info()[0], e),),
|
||||||
|
Codes.UNKNOWN,
|
||||||
|
)
|
||||||
|
|
||||||
|
return DownloadResult(
|
||||||
|
# Read back the length that has been written.
|
||||||
|
length=output_stream.tell(),
|
||||||
|
uri=url,
|
||||||
|
# If it was parsed, consider this a 200 OK.
|
||||||
|
response_code=200,
|
||||||
|
# urlopen shoves the media-type from the data URL into the content type
|
||||||
|
# header object.
|
||||||
|
media_type=url_info.headers.get_content_type(),
|
||||||
|
# Some features are not supported by data: URLs.
|
||||||
|
download_name=None,
|
||||||
|
expires=ONE_HOUR,
|
||||||
|
etag=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _handle_url(
|
||||||
|
self, url: str, user: UserID, allow_data_urls: bool = False
|
||||||
|
) -> MediaInfo:
|
||||||
|
"""
|
||||||
|
Fetches content from a URL and parses the result to generate a MediaInfo.
|
||||||
|
|
||||||
|
It uses the media storage provider to persist the fetched content and
|
||||||
|
stores the mapping into the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The URL to fetch.
|
||||||
|
user: The user who ahs requested this URL.
|
||||||
|
allow_data_urls: True if data URLs should be allowed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A MediaInfo object describing the fetched content.
|
||||||
|
"""
|
||||||
|
|
||||||
# TODO: we should probably honour robots.txt... except in practice
|
# TODO: we should probably honour robots.txt... except in practice
|
||||||
# we're most likely being explicitly triggered by a human rather than a
|
# we're most likely being explicitly triggered by a human rather than a
|
||||||
# bot, so are we really a robot?
|
# bot, so are we really a robot?
|
||||||
|
@ -377,61 +519,27 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
file_info = FileInfo(server_name=None, file_id=file_id, url_cache=True)
|
file_info = FileInfo(server_name=None, file_id=file_id, url_cache=True)
|
||||||
|
|
||||||
with self.media_storage.store_into_file(file_info) as (f, fname, finish):
|
with self.media_storage.store_into_file(file_info) as (f, fname, finish):
|
||||||
try:
|
if url.startswith("data:"):
|
||||||
logger.debug("Trying to get preview for url '%s'", url)
|
if not allow_data_urls:
|
||||||
length, headers, uri, code = await self.client.get_file(
|
raise SynapseError(
|
||||||
url,
|
500, "Previewing of data: URLs is forbidden", Codes.UNKNOWN
|
||||||
output_stream=f,
|
)
|
||||||
max_size=self.max_spider_size,
|
|
||||||
headers={"Accept-Language": self.url_preview_accept_language},
|
|
||||||
)
|
|
||||||
except SynapseError:
|
|
||||||
# Pass SynapseErrors through directly, so that the servlet
|
|
||||||
# handler will return a SynapseError to the client instead of
|
|
||||||
# blank data or a 500.
|
|
||||||
raise
|
|
||||||
except DNSLookupError:
|
|
||||||
# DNS lookup returned no results
|
|
||||||
# Note: This will also be the case if one of the resolved IP
|
|
||||||
# addresses is blacklisted
|
|
||||||
raise SynapseError(
|
|
||||||
502,
|
|
||||||
"DNS resolution failure during URL preview generation",
|
|
||||||
Codes.UNKNOWN,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
# FIXME: pass through 404s and other error messages nicely
|
|
||||||
logger.warning("Error downloading %s: %r", url, e)
|
|
||||||
|
|
||||||
raise SynapseError(
|
download_result = await self._parse_data_url(url, f)
|
||||||
500,
|
|
||||||
"Failed to download content: %s"
|
|
||||||
% (traceback.format_exception_only(sys.exc_info()[0], e),),
|
|
||||||
Codes.UNKNOWN,
|
|
||||||
)
|
|
||||||
await finish()
|
|
||||||
|
|
||||||
if b"Content-Type" in headers:
|
|
||||||
media_type = headers[b"Content-Type"][0].decode("ascii")
|
|
||||||
else:
|
else:
|
||||||
media_type = "application/octet-stream"
|
download_result = await self._download_url(url, f)
|
||||||
|
|
||||||
download_name = get_filename_from_headers(headers)
|
await finish()
|
||||||
|
|
||||||
# FIXME: we should calculate a proper expiration based on the
|
|
||||||
# Cache-Control and Expire headers. But for now, assume 1 hour.
|
|
||||||
expires = ONE_HOUR
|
|
||||||
etag = headers[b"ETag"][0].decode("ascii") if b"ETag" in headers else None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
time_now_ms = self.clock.time_msec()
|
time_now_ms = self.clock.time_msec()
|
||||||
|
|
||||||
await self.store.store_local_media(
|
await self.store.store_local_media(
|
||||||
media_id=file_id,
|
media_id=file_id,
|
||||||
media_type=media_type,
|
media_type=download_result.media_type,
|
||||||
time_now_ms=time_now_ms,
|
time_now_ms=time_now_ms,
|
||||||
upload_name=download_name,
|
upload_name=download_result.download_name,
|
||||||
media_length=length,
|
media_length=download_result.length,
|
||||||
user_id=user,
|
user_id=user,
|
||||||
url_cache=url,
|
url_cache=url,
|
||||||
)
|
)
|
||||||
|
@ -444,16 +552,16 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return MediaInfo(
|
return MediaInfo(
|
||||||
media_type=media_type,
|
media_type=download_result.media_type,
|
||||||
media_length=length,
|
media_length=download_result.length,
|
||||||
download_name=download_name,
|
download_name=download_result.download_name,
|
||||||
created_ts_ms=time_now_ms,
|
created_ts_ms=time_now_ms,
|
||||||
filesystem_id=file_id,
|
filesystem_id=file_id,
|
||||||
filename=fname,
|
filename=fname,
|
||||||
uri=uri,
|
uri=download_result.uri,
|
||||||
response_code=code,
|
response_code=download_result.response_code,
|
||||||
expires=expires,
|
expires=download_result.expires,
|
||||||
etag=etag,
|
etag=download_result.etag,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _precache_image_url(
|
async def _precache_image_url(
|
||||||
|
@ -474,8 +582,8 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
# FIXME: it might be cleaner to use the same flow as the main /preview_url
|
# FIXME: it might be cleaner to use the same flow as the main /preview_url
|
||||||
# request itself and benefit from the same caching etc. But for now we
|
# request itself and benefit from the same caching etc. But for now we
|
||||||
# just rely on the caching on the master request to speed things up.
|
# just rely on the caching on the master request to speed things up.
|
||||||
image_info = await self._download_url(
|
image_info = await self._handle_url(
|
||||||
rebase_url(og["og:image"], media_info.uri), user
|
rebase_url(og["og:image"], media_info.uri), user, allow_data_urls=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if _is_media(image_info.media_type):
|
if _is_media(image_info.media_type):
|
||||||
|
|
|
@ -702,6 +702,7 @@ class DatabasePool:
|
||||||
func: Callable[..., R],
|
func: Callable[..., R],
|
||||||
*args: Any,
|
*args: Any,
|
||||||
db_autocommit: bool = False,
|
db_autocommit: bool = False,
|
||||||
|
isolation_level: Optional[int] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> R:
|
) -> R:
|
||||||
"""Starts a transaction on the database and runs a given function
|
"""Starts a transaction on the database and runs a given function
|
||||||
|
@ -724,6 +725,7 @@ class DatabasePool:
|
||||||
called multiple times if the transaction is retried, so must
|
called multiple times if the transaction is retried, so must
|
||||||
correctly handle that case.
|
correctly handle that case.
|
||||||
|
|
||||||
|
isolation_level: Set the server isolation level for this transaction.
|
||||||
args: positional args to pass to `func`
|
args: positional args to pass to `func`
|
||||||
kwargs: named args to pass to `func`
|
kwargs: named args to pass to `func`
|
||||||
|
|
||||||
|
@ -746,6 +748,7 @@ class DatabasePool:
|
||||||
func,
|
func,
|
||||||
*args,
|
*args,
|
||||||
db_autocommit=db_autocommit,
|
db_autocommit=db_autocommit,
|
||||||
|
isolation_level=isolation_level,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -763,6 +766,7 @@ class DatabasePool:
|
||||||
func: Callable[..., R],
|
func: Callable[..., R],
|
||||||
*args: Any,
|
*args: Any,
|
||||||
db_autocommit: bool = False,
|
db_autocommit: bool = False,
|
||||||
|
isolation_level: Optional[int] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> R:
|
) -> R:
|
||||||
"""Wraps the .runWithConnection() method on the underlying db_pool.
|
"""Wraps the .runWithConnection() method on the underlying db_pool.
|
||||||
|
@ -775,6 +779,7 @@ class DatabasePool:
|
||||||
db_autocommit: Whether to run the function in "autocommit" mode,
|
db_autocommit: Whether to run the function in "autocommit" mode,
|
||||||
i.e. outside of a transaction. This is useful for transaction
|
i.e. outside of a transaction. This is useful for transaction
|
||||||
that are only a single query. Currently only affects postgres.
|
that are only a single query. Currently only affects postgres.
|
||||||
|
isolation_level: Set the server isolation level for this transaction.
|
||||||
kwargs: named args to pass to `func`
|
kwargs: named args to pass to `func`
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -834,6 +839,10 @@ class DatabasePool:
|
||||||
try:
|
try:
|
||||||
if db_autocommit:
|
if db_autocommit:
|
||||||
self.engine.attempt_to_set_autocommit(conn, True)
|
self.engine.attempt_to_set_autocommit(conn, True)
|
||||||
|
if isolation_level is not None:
|
||||||
|
self.engine.attempt_to_set_isolation_level(
|
||||||
|
conn, isolation_level
|
||||||
|
)
|
||||||
|
|
||||||
db_conn = LoggingDatabaseConnection(
|
db_conn = LoggingDatabaseConnection(
|
||||||
conn, self.engine, "runWithConnection"
|
conn, self.engine, "runWithConnection"
|
||||||
|
@ -842,6 +851,8 @@ class DatabasePool:
|
||||||
finally:
|
finally:
|
||||||
if db_autocommit:
|
if db_autocommit:
|
||||||
self.engine.attempt_to_set_autocommit(conn, False)
|
self.engine.attempt_to_set_autocommit(conn, False)
|
||||||
|
if isolation_level:
|
||||||
|
self.engine.attempt_to_set_isolation_level(conn, None)
|
||||||
|
|
||||||
return await make_deferred_yieldable(
|
return await make_deferred_yieldable(
|
||||||
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
|
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
|
||||||
|
|
|
@ -26,6 +26,7 @@ from synapse.storage.database import (
|
||||||
LoggingTransaction,
|
LoggingTransaction,
|
||||||
)
|
)
|
||||||
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
||||||
|
from synapse.storage.databases.main.push_rule import PushRulesWorkerStore
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.storage.util.id_generators import (
|
from synapse.storage.util.id_generators import (
|
||||||
AbstractStreamIdGenerator,
|
AbstractStreamIdGenerator,
|
||||||
|
@ -44,7 +45,7 @@ if TYPE_CHECKING:
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
database: DatabasePool,
|
database: DatabasePool,
|
||||||
|
@ -158,9 +159,9 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
||||||
"get_account_data_for_user", get_account_data_for_user_txn
|
"get_account_data_for_user", get_account_data_for_user_txn
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached(num_args=2, max_entries=5000)
|
@cached(num_args=2, max_entries=5000, tree=True)
|
||||||
async def get_global_account_data_by_type_for_user(
|
async def get_global_account_data_by_type_for_user(
|
||||||
self, data_type: str, user_id: str
|
self, user_id: str, data_type: str
|
||||||
) -> Optional[JsonDict]:
|
) -> Optional[JsonDict]:
|
||||||
"""
|
"""
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -179,7 +180,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@cached(num_args=2)
|
@cached(num_args=2, tree=True)
|
||||||
async def get_account_data_for_room(
|
async def get_account_data_for_room(
|
||||||
self, user_id: str, room_id: str
|
self, user_id: str, room_id: str
|
||||||
) -> Dict[str, JsonDict]:
|
) -> Dict[str, JsonDict]:
|
||||||
|
@ -210,7 +211,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
||||||
"get_account_data_for_room", get_account_data_for_room_txn
|
"get_account_data_for_room", get_account_data_for_room_txn
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached(num_args=3, max_entries=5000)
|
@cached(num_args=3, max_entries=5000, tree=True)
|
||||||
async def get_account_data_for_room_and_type(
|
async def get_account_data_for_room_and_type(
|
||||||
self, user_id: str, room_id: str, account_data_type: str
|
self, user_id: str, room_id: str, account_data_type: str
|
||||||
) -> Optional[JsonDict]:
|
) -> Optional[JsonDict]:
|
||||||
|
@ -392,7 +393,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
||||||
for row in rows:
|
for row in rows:
|
||||||
if not row.room_id:
|
if not row.room_id:
|
||||||
self.get_global_account_data_by_type_for_user.invalidate(
|
self.get_global_account_data_by_type_for_user.invalidate(
|
||||||
(row.data_type, row.user_id)
|
(row.user_id, row.data_type)
|
||||||
)
|
)
|
||||||
self.get_account_data_for_user.invalidate((row.user_id,))
|
self.get_account_data_for_user.invalidate((row.user_id,))
|
||||||
self.get_account_data_for_room.invalidate((row.user_id, row.room_id))
|
self.get_account_data_for_room.invalidate((row.user_id, row.room_id))
|
||||||
|
@ -476,7 +477,7 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
||||||
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
self._account_data_stream_cache.entity_has_changed(user_id, next_id)
|
||||||
self.get_account_data_for_user.invalidate((user_id,))
|
self.get_account_data_for_user.invalidate((user_id,))
|
||||||
self.get_global_account_data_by_type_for_user.invalidate(
|
self.get_global_account_data_by_type_for_user.invalidate(
|
||||||
(account_data_type, user_id)
|
(user_id, account_data_type)
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._account_data_id_gen.get_current_token()
|
return self._account_data_id_gen.get_current_token()
|
||||||
|
@ -546,6 +547,74 @@ class AccountDataWorkerStore(CacheInvalidationWorkerStore):
|
||||||
for ignored_user_id in previously_ignored_users ^ currently_ignored_users:
|
for ignored_user_id in previously_ignored_users ^ currently_ignored_users:
|
||||||
self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,))
|
self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,))
|
||||||
|
|
||||||
|
async def purge_account_data_for_user(self, user_id: str) -> None:
|
||||||
|
"""
|
||||||
|
Removes the account data for a user.
|
||||||
|
|
||||||
|
This is intended to be used upon user deactivation and also removes any
|
||||||
|
derived information from account data (e.g. push rules and ignored users).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id: The user ID to remove data for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def purge_account_data_for_user_txn(txn: LoggingTransaction) -> None:
|
||||||
|
# Purge from the primary account_data tables.
|
||||||
|
self.db_pool.simple_delete_txn(
|
||||||
|
txn, table="account_data", keyvalues={"user_id": user_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.db_pool.simple_delete_txn(
|
||||||
|
txn, table="room_account_data", keyvalues={"user_id": user_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Purge from ignored_users where this user is the ignorer.
|
||||||
|
# N.B. We don't purge where this user is the ignoree, because that
|
||||||
|
# interferes with other users' account data.
|
||||||
|
# It's also not this user's data to delete!
|
||||||
|
self.db_pool.simple_delete_txn(
|
||||||
|
txn, table="ignored_users", keyvalues={"ignorer_user_id": user_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove the push rules
|
||||||
|
self.db_pool.simple_delete_txn(
|
||||||
|
txn, table="push_rules", keyvalues={"user_name": user_id}
|
||||||
|
)
|
||||||
|
self.db_pool.simple_delete_txn(
|
||||||
|
txn, table="push_rules_enable", keyvalues={"user_name": user_id}
|
||||||
|
)
|
||||||
|
self.db_pool.simple_delete_txn(
|
||||||
|
txn, table="push_rules_stream", keyvalues={"user_id": user_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Invalidate caches as appropriate
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_account_data_for_room_and_type, (user_id,)
|
||||||
|
)
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_account_data_for_user, (user_id,)
|
||||||
|
)
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_global_account_data_by_type_for_user, (user_id,)
|
||||||
|
)
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_account_data_for_room, (user_id,)
|
||||||
|
)
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_push_rules_for_user, (user_id,)
|
||||||
|
)
|
||||||
|
self._invalidate_cache_and_stream(
|
||||||
|
txn, self.get_push_rules_enabled_for_user, (user_id,)
|
||||||
|
)
|
||||||
|
# This user might be contained in the ignored_by cache for other users,
|
||||||
|
# so we have to invalidate it all.
|
||||||
|
self._invalidate_all_cache_and_stream(txn, self.ignored_by)
|
||||||
|
|
||||||
|
await self.db_pool.runInteraction(
|
||||||
|
"purge_account_data_for_user_txn",
|
||||||
|
purge_account_data_for_user_txn,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AccountDataStore(AccountDataWorkerStore):
|
class AccountDataStore(AccountDataWorkerStore):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -384,7 +384,7 @@ class ApplicationServiceTransactionWorkerStore(
|
||||||
"get_new_events_for_appservice", get_new_events_for_appservice_txn
|
"get_new_events_for_appservice", get_new_events_for_appservice_txn
|
||||||
)
|
)
|
||||||
|
|
||||||
events = await self.get_events_as_list(event_ids)
|
events = await self.get_events_as_list(event_ids, get_prev_content=True)
|
||||||
|
|
||||||
return upper_bound, events
|
return upper_bound, events
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ class _NoChainCoverIndex(Exception):
|
||||||
super().__init__("Unexpectedly no chain cover for events in %s" % (room_id,))
|
super().__init__("Unexpectedly no chain cover for events in %s" % (room_id,))
|
||||||
|
|
||||||
|
|
||||||
class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBaseStore):
|
class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBaseStore):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
database: DatabasePool,
|
database: DatabasePool,
|
||||||
|
|
|
@ -1389,6 +1389,8 @@ class PersistEventsStore:
|
||||||
"received_ts",
|
"received_ts",
|
||||||
"sender",
|
"sender",
|
||||||
"contains_url",
|
"contains_url",
|
||||||
|
"state_key",
|
||||||
|
"rejection_reason",
|
||||||
),
|
),
|
||||||
values=(
|
values=(
|
||||||
(
|
(
|
||||||
|
@ -1405,8 +1407,10 @@ class PersistEventsStore:
|
||||||
self._clock.time_msec(),
|
self._clock.time_msec(),
|
||||||
event.sender,
|
event.sender,
|
||||||
"url" in event.content and isinstance(event.content["url"], str),
|
"url" in event.content and isinstance(event.content["url"], str),
|
||||||
|
event.get_state_key(),
|
||||||
|
context.rejected or None,
|
||||||
)
|
)
|
||||||
for event, _ in events_and_contexts
|
for event, context in events_and_contexts
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1456,6 +1460,7 @@ class PersistEventsStore:
|
||||||
for event, context in events_and_contexts:
|
for event, context in events_and_contexts:
|
||||||
if context.rejected:
|
if context.rejected:
|
||||||
# Insert the event_id into the rejections table
|
# Insert the event_id into the rejections table
|
||||||
|
# (events.rejection_reason has already been done)
|
||||||
self._store_rejections_txn(txn, event.event_id, context.rejected)
|
self._store_rejections_txn(txn, event.event_id, context.rejected)
|
||||||
to_remove.add(event)
|
to_remove.add(event)
|
||||||
|
|
||||||
|
|
|
@ -390,7 +390,6 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
||||||
"event_search",
|
"event_search",
|
||||||
"events",
|
"events",
|
||||||
"group_rooms",
|
"group_rooms",
|
||||||
"public_room_list_stream",
|
|
||||||
"receipts_graph",
|
"receipts_graph",
|
||||||
"receipts_linearized",
|
"receipts_linearized",
|
||||||
"room_aliases",
|
"room_aliases",
|
||||||
|
|
|
@ -13,17 +13,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import (
|
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple, Union, cast
|
||||||
TYPE_CHECKING,
|
|
||||||
Any,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
from frozendict import frozendict
|
from frozendict import frozendict
|
||||||
|
@ -43,6 +33,7 @@ from synapse.storage.relations import (
|
||||||
PaginationChunk,
|
PaginationChunk,
|
||||||
RelationPaginationToken,
|
RelationPaginationToken,
|
||||||
)
|
)
|
||||||
|
from synapse.types import JsonDict
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -51,6 +42,30 @@ if TYPE_CHECKING:
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
|
class _ThreadAggregation:
|
||||||
|
latest_event: EventBase
|
||||||
|
count: int
|
||||||
|
current_user_participated: bool
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(slots=True, auto_attribs=True)
|
||||||
|
class BundledAggregations:
|
||||||
|
"""
|
||||||
|
The bundled aggregations for an event.
|
||||||
|
|
||||||
|
Some values require additional processing during serialization.
|
||||||
|
"""
|
||||||
|
|
||||||
|
annotations: Optional[JsonDict] = None
|
||||||
|
references: Optional[JsonDict] = None
|
||||||
|
replace: Optional[EventBase] = None
|
||||||
|
thread: Optional[_ThreadAggregation] = None
|
||||||
|
|
||||||
|
def __bool__(self) -> bool:
|
||||||
|
return bool(self.annotations or self.references or self.replace or self.thread)
|
||||||
|
|
||||||
|
|
||||||
class RelationsWorkerStore(SQLBaseStore):
|
class RelationsWorkerStore(SQLBaseStore):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -60,7 +75,6 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
):
|
):
|
||||||
super().__init__(database, db_conn, hs)
|
super().__init__(database, db_conn, hs)
|
||||||
|
|
||||||
self._msc1849_enabled = hs.config.experimental.msc1849_enabled
|
|
||||||
self._msc3440_enabled = hs.config.experimental.msc3440_enabled
|
self._msc3440_enabled = hs.config.experimental.msc3440_enabled
|
||||||
|
|
||||||
@cached(tree=True)
|
@cached(tree=True)
|
||||||
|
@ -585,7 +599,7 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
async def _get_bundled_aggregation_for_event(
|
async def _get_bundled_aggregation_for_event(
|
||||||
self, event: EventBase, user_id: str
|
self, event: EventBase, user_id: str
|
||||||
) -> Optional[Dict[str, Any]]:
|
) -> Optional[BundledAggregations]:
|
||||||
"""Generate bundled aggregations for an event.
|
"""Generate bundled aggregations for an event.
|
||||||
|
|
||||||
Note that this does not use a cache, but depends on cached methods.
|
Note that this does not use a cache, but depends on cached methods.
|
||||||
|
@ -616,24 +630,24 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
# The bundled aggregations to include, a mapping of relation type to a
|
# The bundled aggregations to include, a mapping of relation type to a
|
||||||
# type-specific value. Some types include the direct return type here
|
# type-specific value. Some types include the direct return type here
|
||||||
# while others need more processing during serialization.
|
# while others need more processing during serialization.
|
||||||
aggregations: Dict[str, Any] = {}
|
aggregations = BundledAggregations()
|
||||||
|
|
||||||
annotations = await self.get_aggregation_groups_for_event(event_id, room_id)
|
annotations = await self.get_aggregation_groups_for_event(event_id, room_id)
|
||||||
if annotations.chunk:
|
if annotations.chunk:
|
||||||
aggregations[RelationTypes.ANNOTATION] = annotations.to_dict()
|
aggregations.annotations = annotations.to_dict()
|
||||||
|
|
||||||
references = await self.get_relations_for_event(
|
references = await self.get_relations_for_event(
|
||||||
event_id, room_id, RelationTypes.REFERENCE, direction="f"
|
event_id, room_id, RelationTypes.REFERENCE, direction="f"
|
||||||
)
|
)
|
||||||
if references.chunk:
|
if references.chunk:
|
||||||
aggregations[RelationTypes.REFERENCE] = references.to_dict()
|
aggregations.references = references.to_dict()
|
||||||
|
|
||||||
edit = None
|
edit = None
|
||||||
if event.type == EventTypes.Message:
|
if event.type == EventTypes.Message:
|
||||||
edit = await self.get_applicable_edit(event_id, room_id)
|
edit = await self.get_applicable_edit(event_id, room_id)
|
||||||
|
|
||||||
if edit:
|
if edit:
|
||||||
aggregations[RelationTypes.REPLACE] = edit
|
aggregations.replace = edit
|
||||||
|
|
||||||
# If this event is the start of a thread, include a summary of the replies.
|
# If this event is the start of a thread, include a summary of the replies.
|
||||||
if self._msc3440_enabled:
|
if self._msc3440_enabled:
|
||||||
|
@ -644,11 +658,11 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
event_id, room_id, user_id
|
event_id, room_id, user_id
|
||||||
)
|
)
|
||||||
if latest_thread_event:
|
if latest_thread_event:
|
||||||
aggregations[RelationTypes.THREAD] = {
|
aggregations.thread = _ThreadAggregation(
|
||||||
"latest_event": latest_thread_event,
|
latest_event=latest_thread_event,
|
||||||
"count": thread_count,
|
count=thread_count,
|
||||||
"current_user_participated": participated,
|
current_user_participated=participated,
|
||||||
}
|
)
|
||||||
|
|
||||||
# Store the bundled aggregations in the event metadata for later use.
|
# Store the bundled aggregations in the event metadata for later use.
|
||||||
return aggregations
|
return aggregations
|
||||||
|
@ -657,7 +671,7 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
self,
|
self,
|
||||||
events: Iterable[EventBase],
|
events: Iterable[EventBase],
|
||||||
user_id: str,
|
user_id: str,
|
||||||
) -> Dict[str, Dict[str, Any]]:
|
) -> Dict[str, BundledAggregations]:
|
||||||
"""Generate bundled aggregations for events.
|
"""Generate bundled aggregations for events.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -668,15 +682,12 @@ class RelationsWorkerStore(SQLBaseStore):
|
||||||
A map of event ID to the bundled aggregation for the event. Not all
|
A map of event ID to the bundled aggregation for the event. Not all
|
||||||
events may have bundled aggregations in the results.
|
events may have bundled aggregations in the results.
|
||||||
"""
|
"""
|
||||||
# If bundled aggregations are disabled, nothing to do.
|
|
||||||
if not self._msc1849_enabled:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# TODO Parallelize.
|
# TODO Parallelize.
|
||||||
results = {}
|
results = {}
|
||||||
for event in events:
|
for event in events:
|
||||||
event_result = await self._get_bundled_aggregation_for_event(event, user_id)
|
event_result = await self._get_bundled_aggregation_for_event(event, user_id)
|
||||||
if event_result is not None:
|
if event_result:
|
||||||
results[event.event_id] = event_result
|
results[event.event_id] = event_result
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -12,16 +12,19 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from typing import Dict, Iterable, List, Tuple
|
from typing import Collection, Dict, List, Tuple
|
||||||
|
|
||||||
from unpaddedbase64 import encode_base64
|
from unpaddedbase64 import encode_base64
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.crypto.event_signing import compute_event_reference_hash
|
||||||
from synapse.storage.types import Cursor
|
from synapse.storage.databases.main.events_worker import (
|
||||||
|
EventRedactBehaviour,
|
||||||
|
EventsWorkerStore,
|
||||||
|
)
|
||||||
from synapse.util.caches.descriptors import cached, cachedList
|
from synapse.util.caches.descriptors import cached, cachedList
|
||||||
|
|
||||||
|
|
||||||
class SignatureWorkerStore(SQLBaseStore):
|
class SignatureWorkerStore(EventsWorkerStore):
|
||||||
@cached()
|
@cached()
|
||||||
def get_event_reference_hash(self, event_id):
|
def get_event_reference_hash(self, event_id):
|
||||||
# This is a dummy function to allow get_event_reference_hashes
|
# This is a dummy function to allow get_event_reference_hashes
|
||||||
|
@ -32,7 +35,7 @@ class SignatureWorkerStore(SQLBaseStore):
|
||||||
cached_method_name="get_event_reference_hash", list_name="event_ids", num_args=1
|
cached_method_name="get_event_reference_hash", list_name="event_ids", num_args=1
|
||||||
)
|
)
|
||||||
async def get_event_reference_hashes(
|
async def get_event_reference_hashes(
|
||||||
self, event_ids: Iterable[str]
|
self, event_ids: Collection[str]
|
||||||
) -> Dict[str, Dict[str, bytes]]:
|
) -> Dict[str, Dict[str, bytes]]:
|
||||||
"""Get all hashes for given events.
|
"""Get all hashes for given events.
|
||||||
|
|
||||||
|
@ -41,18 +44,27 @@ class SignatureWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A mapping of event ID to a mapping of algorithm to hash.
|
A mapping of event ID to a mapping of algorithm to hash.
|
||||||
|
Returns an empty dict for a given event id if that event is unknown.
|
||||||
"""
|
"""
|
||||||
|
events = await self.get_events(
|
||||||
|
event_ids,
|
||||||
|
redact_behaviour=EventRedactBehaviour.AS_IS,
|
||||||
|
allow_rejected=True,
|
||||||
|
)
|
||||||
|
|
||||||
def f(txn):
|
hashes: Dict[str, Dict[str, bytes]] = {}
|
||||||
return {
|
for event_id in event_ids:
|
||||||
event_id: self._get_event_reference_hashes_txn(txn, event_id)
|
event = events.get(event_id)
|
||||||
for event_id in event_ids
|
if event is None:
|
||||||
}
|
hashes[event_id] = {}
|
||||||
|
else:
|
||||||
|
ref_alg, ref_hash_bytes = compute_event_reference_hash(event)
|
||||||
|
hashes[event_id] = {ref_alg: ref_hash_bytes}
|
||||||
|
|
||||||
return await self.db_pool.runInteraction("get_event_reference_hashes", f)
|
return hashes
|
||||||
|
|
||||||
async def add_event_hashes(
|
async def add_event_hashes(
|
||||||
self, event_ids: Iterable[str]
|
self, event_ids: Collection[str]
|
||||||
) -> List[Tuple[str, Dict[str, str]]]:
|
) -> List[Tuple[str, Dict[str, str]]]:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -70,24 +82,6 @@ class SignatureWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
return list(encoded_hashes.items())
|
return list(encoded_hashes.items())
|
||||||
|
|
||||||
def _get_event_reference_hashes_txn(
|
|
||||||
self, txn: Cursor, event_id: str
|
|
||||||
) -> Dict[str, bytes]:
|
|
||||||
"""Get all the hashes for a given PDU.
|
|
||||||
Args:
|
|
||||||
txn:
|
|
||||||
event_id: Id for the Event.
|
|
||||||
Returns:
|
|
||||||
A mapping of algorithm -> hash.
|
|
||||||
"""
|
|
||||||
query = (
|
|
||||||
"SELECT algorithm, hash"
|
|
||||||
" FROM event_reference_hashes"
|
|
||||||
" WHERE event_id = ?"
|
|
||||||
)
|
|
||||||
txn.execute(query, (event_id,))
|
|
||||||
return {k: v for k, v in txn}
|
|
||||||
|
|
||||||
|
|
||||||
class SignatureStore(SignatureWorkerStore):
|
class SignatureStore(SignatureWorkerStore):
|
||||||
"""Persistence for event signatures and hashes"""
|
"""Persistence for event signatures and hashes"""
|
||||||
|
|
|
@ -81,6 +81,14 @@ class _EventDictReturn:
|
||||||
stream_ordering: int
|
stream_ordering: int
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
|
class _EventsAround:
|
||||||
|
events_before: List[EventBase]
|
||||||
|
events_after: List[EventBase]
|
||||||
|
start: RoomStreamToken
|
||||||
|
end: RoomStreamToken
|
||||||
|
|
||||||
|
|
||||||
def generate_pagination_where_clause(
|
def generate_pagination_where_clause(
|
||||||
direction: str,
|
direction: str,
|
||||||
column_names: Tuple[str, str],
|
column_names: Tuple[str, str],
|
||||||
|
@ -846,7 +854,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
|
||||||
before_limit: int,
|
before_limit: int,
|
||||||
after_limit: int,
|
after_limit: int,
|
||||||
event_filter: Optional[Filter] = None,
|
event_filter: Optional[Filter] = None,
|
||||||
) -> dict:
|
) -> _EventsAround:
|
||||||
"""Retrieve events and pagination tokens around a given event in a
|
"""Retrieve events and pagination tokens around a given event in a
|
||||||
room.
|
room.
|
||||||
"""
|
"""
|
||||||
|
@ -869,12 +877,12 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
|
||||||
list(results["after"]["event_ids"]), get_prev_content=True
|
list(results["after"]["event_ids"]), get_prev_content=True
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return _EventsAround(
|
||||||
"events_before": events_before,
|
events_before=events_before,
|
||||||
"events_after": events_after,
|
events_after=events_after,
|
||||||
"start": results["before"]["token"],
|
start=results["before"]["token"],
|
||||||
"end": results["after"]["token"],
|
end=results["after"]["token"],
|
||||||
}
|
)
|
||||||
|
|
||||||
def _get_events_around_txn(
|
def _get_events_around_txn(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -561,6 +561,54 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore):
|
||||||
"get_destinations_paginate_txn", get_destinations_paginate_txn
|
"get_destinations_paginate_txn", get_destinations_paginate_txn
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def get_destination_rooms_paginate(
|
||||||
|
self, destination: str, start: int, limit: int, direction: str = "f"
|
||||||
|
) -> Tuple[List[JsonDict], int]:
|
||||||
|
"""Function to retrieve a paginated list of destination's rooms.
|
||||||
|
This will return a json list of rooms and the
|
||||||
|
total number of rooms.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
destination: the destination to query
|
||||||
|
start: start number to begin the query from
|
||||||
|
limit: number of rows to retrieve
|
||||||
|
direction: sort ascending or descending by room_id
|
||||||
|
Returns:
|
||||||
|
A tuple of a dict of rooms and a count of total rooms.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_destination_rooms_paginate_txn(
|
||||||
|
txn: LoggingTransaction,
|
||||||
|
) -> Tuple[List[JsonDict], int]:
|
||||||
|
|
||||||
|
if direction == "b":
|
||||||
|
order = "DESC"
|
||||||
|
else:
|
||||||
|
order = "ASC"
|
||||||
|
|
||||||
|
sql = """
|
||||||
|
SELECT COUNT(*) as total_rooms
|
||||||
|
FROM destination_rooms
|
||||||
|
WHERE destination = ?
|
||||||
|
"""
|
||||||
|
txn.execute(sql, [destination])
|
||||||
|
count = cast(Tuple[int], txn.fetchone())[0]
|
||||||
|
|
||||||
|
rooms = self.db_pool.simple_select_list_paginate_txn(
|
||||||
|
txn=txn,
|
||||||
|
table="destination_rooms",
|
||||||
|
orderby="room_id",
|
||||||
|
start=start,
|
||||||
|
limit=limit,
|
||||||
|
retcols=("room_id", "stream_ordering"),
|
||||||
|
order_direction=order,
|
||||||
|
)
|
||||||
|
return rooms, count
|
||||||
|
|
||||||
|
return await self.db_pool.runInteraction(
|
||||||
|
"get_destination_rooms_paginate_txn", get_destination_rooms_paginate_txn
|
||||||
|
)
|
||||||
|
|
||||||
async def is_destination_known(self, destination: str) -> bool:
|
async def is_destination_known(self, destination: str) -> bool:
|
||||||
"""Check if a destination is known to the server."""
|
"""Check if a destination is known to the server."""
|
||||||
result = await self.db_pool.simple_select_one_onecol(
|
result = await self.db_pool.simple_select_one_onecol(
|
||||||
|
|
|
@ -12,11 +12,18 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import abc
|
import abc
|
||||||
from typing import Generic, TypeVar
|
from enum import IntEnum
|
||||||
|
from typing import Generic, Optional, TypeVar
|
||||||
|
|
||||||
from synapse.storage.types import Connection
|
from synapse.storage.types import Connection
|
||||||
|
|
||||||
|
|
||||||
|
class IsolationLevel(IntEnum):
|
||||||
|
READ_COMMITTED: int = 1
|
||||||
|
REPEATABLE_READ: int = 2
|
||||||
|
SERIALIZABLE: int = 3
|
||||||
|
|
||||||
|
|
||||||
class IncorrectDatabaseSetup(RuntimeError):
|
class IncorrectDatabaseSetup(RuntimeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -109,3 +116,13 @@ class BaseDatabaseEngine(Generic[ConnectionType], metaclass=abc.ABCMeta):
|
||||||
commit/rollback the connections.
|
commit/rollback the connections.
|
||||||
"""
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def attempt_to_set_isolation_level(
|
||||||
|
self, conn: Connection, isolation_level: Optional[int]
|
||||||
|
):
|
||||||
|
"""Attempt to set the connections isolation level.
|
||||||
|
|
||||||
|
Note: This has no effect on SQLite3, as transactions are SERIALIZABLE by default.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
|
@ -13,8 +13,13 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Mapping, Optional
|
||||||
|
|
||||||
from synapse.storage.engines._base import BaseDatabaseEngine, IncorrectDatabaseSetup
|
from synapse.storage.engines._base import (
|
||||||
|
BaseDatabaseEngine,
|
||||||
|
IncorrectDatabaseSetup,
|
||||||
|
IsolationLevel,
|
||||||
|
)
|
||||||
from synapse.storage.types import Connection
|
from synapse.storage.types import Connection
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -34,6 +39,15 @@ class PostgresEngine(BaseDatabaseEngine):
|
||||||
self.synchronous_commit = database_config.get("synchronous_commit", True)
|
self.synchronous_commit = database_config.get("synchronous_commit", True)
|
||||||
self._version = None # unknown as yet
|
self._version = None # unknown as yet
|
||||||
|
|
||||||
|
self.isolation_level_map: Mapping[int, int] = {
|
||||||
|
IsolationLevel.READ_COMMITTED: self.module.extensions.ISOLATION_LEVEL_READ_COMMITTED,
|
||||||
|
IsolationLevel.REPEATABLE_READ: self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ,
|
||||||
|
IsolationLevel.SERIALIZABLE: self.module.extensions.ISOLATION_LEVEL_SERIALIZABLE,
|
||||||
|
}
|
||||||
|
self.default_isolation_level = (
|
||||||
|
self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def single_threaded(self) -> bool:
|
def single_threaded(self) -> bool:
|
||||||
return False
|
return False
|
||||||
|
@ -46,8 +60,8 @@ class PostgresEngine(BaseDatabaseEngine):
|
||||||
self._version = db_conn.server_version
|
self._version = db_conn.server_version
|
||||||
|
|
||||||
# Are we on a supported PostgreSQL version?
|
# Are we on a supported PostgreSQL version?
|
||||||
if not allow_outdated_version and self._version < 90600:
|
if not allow_outdated_version and self._version < 100000:
|
||||||
raise RuntimeError("Synapse requires PostgreSQL 9.6 or above.")
|
raise RuntimeError("Synapse requires PostgreSQL 10 or above.")
|
||||||
|
|
||||||
with db_conn.cursor() as txn:
|
with db_conn.cursor() as txn:
|
||||||
txn.execute("SHOW SERVER_ENCODING")
|
txn.execute("SHOW SERVER_ENCODING")
|
||||||
|
@ -104,9 +118,7 @@ class PostgresEngine(BaseDatabaseEngine):
|
||||||
return sql.replace("?", "%s")
|
return sql.replace("?", "%s")
|
||||||
|
|
||||||
def on_new_connection(self, db_conn):
|
def on_new_connection(self, db_conn):
|
||||||
db_conn.set_isolation_level(
|
db_conn.set_isolation_level(self.default_isolation_level)
|
||||||
self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set the bytea output to escape, vs the default of hex
|
# Set the bytea output to escape, vs the default of hex
|
||||||
cursor = db_conn.cursor()
|
cursor = db_conn.cursor()
|
||||||
|
@ -175,3 +187,12 @@ class PostgresEngine(BaseDatabaseEngine):
|
||||||
|
|
||||||
def attempt_to_set_autocommit(self, conn: Connection, autocommit: bool):
|
def attempt_to_set_autocommit(self, conn: Connection, autocommit: bool):
|
||||||
return conn.set_session(autocommit=autocommit) # type: ignore
|
return conn.set_session(autocommit=autocommit) # type: ignore
|
||||||
|
|
||||||
|
def attempt_to_set_isolation_level(
|
||||||
|
self, conn: Connection, isolation_level: Optional[int]
|
||||||
|
):
|
||||||
|
if isolation_level is None:
|
||||||
|
isolation_level = self.default_isolation_level
|
||||||
|
else:
|
||||||
|
isolation_level = self.isolation_level_map[isolation_level]
|
||||||
|
return conn.set_isolation_level(isolation_level) # type: ignore
|
||||||
|
|
|
@ -15,6 +15,7 @@ import platform
|
||||||
import struct
|
import struct
|
||||||
import threading
|
import threading
|
||||||
import typing
|
import typing
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from synapse.storage.engines import BaseDatabaseEngine
|
from synapse.storage.engines import BaseDatabaseEngine
|
||||||
from synapse.storage.types import Connection
|
from synapse.storage.types import Connection
|
||||||
|
@ -122,6 +123,12 @@ class Sqlite3Engine(BaseDatabaseEngine["sqlite3.Connection"]):
|
||||||
# set the connection to autocommit mode.
|
# set the connection to autocommit mode.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def attempt_to_set_isolation_level(
|
||||||
|
self, conn: Connection, isolation_level: Optional[int]
|
||||||
|
):
|
||||||
|
# All transactions are SERIALIZABLE by default in sqllite
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# Following functions taken from: https://github.com/coleifer/peewee
|
# Following functions taken from: https://github.com/coleifer/peewee
|
||||||
|
|
||||||
|
|
|
@ -499,9 +499,12 @@ def _upgrade_existing_database(
|
||||||
module = importlib.util.module_from_spec(spec)
|
module = importlib.util.module_from_spec(spec)
|
||||||
spec.loader.exec_module(module) # type: ignore
|
spec.loader.exec_module(module) # type: ignore
|
||||||
|
|
||||||
logger.info("Running script %s", relative_path)
|
if hasattr(module, "run_create"):
|
||||||
module.run_create(cur, database_engine) # type: ignore
|
logger.info("Running %s:run_create", relative_path)
|
||||||
if not is_empty:
|
module.run_create(cur, database_engine) # type: ignore
|
||||||
|
|
||||||
|
if not is_empty and hasattr(module, "run_upgrade"):
|
||||||
|
logger.info("Running %s:run_upgrade", relative_path)
|
||||||
module.run_upgrade(cur, database_engine, config=config) # type: ignore
|
module.run_upgrade(cur, database_engine, config=config) # type: ignore
|
||||||
elif ext == ".pyc" or file_name == "__pycache__":
|
elif ext == ".pyc" or file_name == "__pycache__":
|
||||||
# Sometimes .pyc files turn up anyway even though we've
|
# Sometimes .pyc files turn up anyway even though we've
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
SCHEMA_VERSION = 67 # remember to update the list below when updating
|
SCHEMA_VERSION = 68 # remember to update the list below when updating
|
||||||
"""Represents the expectations made by the codebase about the database schema
|
"""Represents the expectations made by the codebase about the database schema
|
||||||
|
|
||||||
This should be incremented whenever the codebase changes its requirements on the
|
This should be incremented whenever the codebase changes its requirements on the
|
||||||
|
@ -53,11 +53,18 @@ Changes in SCHEMA_VERSION = 66:
|
||||||
|
|
||||||
Changes in SCHEMA_VERSION = 67:
|
Changes in SCHEMA_VERSION = 67:
|
||||||
- state_events.prev_state is no longer written to.
|
- state_events.prev_state is no longer written to.
|
||||||
|
|
||||||
|
Changes in SCHEMA_VERSION = 68:
|
||||||
|
- event_reference_hashes is no longer read.
|
||||||
|
- `events` has `state_key` and `rejection_reason` columns, which are populated for
|
||||||
|
new events.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_COMPAT_VERSION = (
|
SCHEMA_COMPAT_VERSION = (
|
||||||
61 # 61: Remove unused tables `user_stats_historical` and `room_stats_historical`
|
# we now have `state_key` columns in both `events` and `state_events`, so
|
||||||
|
# now incompatible with synapses wth SCHEMA_VERSION < 66.
|
||||||
|
66
|
||||||
)
|
)
|
||||||
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
/* Copyright 2022 The Matrix.org Foundation C.I.C
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- this table is unused as of Synapse 1.41
|
||||||
|
DROP TABLE public_room_list_stream;
|
||||||
|
|
26
synapse/storage/schema/main/delta/68/01event_columns.sql
Normal file
26
synapse/storage/schema/main/delta/68/01event_columns.sql
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
/* Copyright 2022 The Matrix.org Foundation C.I.C
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
-- Add new colums to the `events` table which will (one day) make the `state_events`
|
||||||
|
-- and `rejections` tables redundant.
|
||||||
|
|
||||||
|
ALTER TABLE events
|
||||||
|
-- if this event is a state event, its state key
|
||||||
|
ADD COLUMN state_key TEXT DEFAULT NULL;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE events
|
||||||
|
-- if this event was rejected, the reason it was rejected.
|
||||||
|
ADD COLUMN rejection_reason TEXT DEFAULT NULL;
|
|
@ -74,21 +74,21 @@ class StateFilter:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def all() -> "StateFilter":
|
def all() -> "StateFilter":
|
||||||
"""Creates a filter that fetches everything.
|
"""Returns a filter that fetches everything.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The new state filter.
|
The state filter.
|
||||||
"""
|
"""
|
||||||
return StateFilter(types=frozendict(), include_others=True)
|
return _ALL_STATE_FILTER
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def none() -> "StateFilter":
|
def none() -> "StateFilter":
|
||||||
"""Creates a filter that fetches nothing.
|
"""Returns a filter that fetches nothing.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The new state filter.
|
The new state filter.
|
||||||
"""
|
"""
|
||||||
return StateFilter(types=frozendict(), include_others=False)
|
return _NONE_STATE_FILTER
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter":
|
def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter":
|
||||||
|
@ -527,6 +527,10 @@ class StateFilter:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_ALL_STATE_FILTER = StateFilter(types=frozendict(), include_others=True)
|
||||||
|
_NONE_STATE_FILTER = StateFilter(types=frozendict(), include_others=False)
|
||||||
|
|
||||||
|
|
||||||
class StateGroupStorage:
|
class StateGroupStorage:
|
||||||
"""High level interface to fetching state for event."""
|
"""High level interface to fetching state for event."""
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ async def filter_events_for_client(
|
||||||
)
|
)
|
||||||
|
|
||||||
ignore_dict_content = await storage.main.get_global_account_data_by_type_for_user(
|
ignore_dict_content = await storage.main.get_global_account_data_by_type_for_user(
|
||||||
AccountDataTypes.IGNORED_USER_LIST, user_id
|
user_id, AccountDataTypes.IGNORED_USER_LIST
|
||||||
)
|
)
|
||||||
|
|
||||||
ignore_list: FrozenSet[str] = frozenset()
|
ignore_list: FrozenSet[str] = frozenset()
|
||||||
|
|
219
tests/handlers/test_deactivate_account.py
Normal file
219
tests/handlers/test_deactivate_account.py
Normal file
|
@ -0,0 +1,219 @@
|
||||||
|
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
from http import HTTPStatus
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
|
from synapse.api.constants import AccountDataTypes
|
||||||
|
from synapse.push.rulekinds import PRIORITY_CLASS_MAP
|
||||||
|
from synapse.rest import admin
|
||||||
|
from synapse.rest.client import account, login
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.util import Clock
|
||||||
|
|
||||||
|
from tests.unittest import HomeserverTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class DeactivateAccountTestCase(HomeserverTestCase):
|
||||||
|
servlets = [
|
||||||
|
login.register_servlets,
|
||||||
|
admin.register_servlets,
|
||||||
|
account.register_servlets,
|
||||||
|
]
|
||||||
|
|
||||||
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
self._store = hs.get_datastore()
|
||||||
|
|
||||||
|
self.user = self.register_user("user", "pass")
|
||||||
|
self.token = self.login("user", "pass")
|
||||||
|
|
||||||
|
def _deactivate_my_account(self):
|
||||||
|
"""
|
||||||
|
Deactivates the account `self.user` using `self.token` and asserts
|
||||||
|
that it returns a 200 success code.
|
||||||
|
"""
|
||||||
|
req = self.get_success(
|
||||||
|
self.make_request(
|
||||||
|
"POST",
|
||||||
|
"account/deactivate",
|
||||||
|
{
|
||||||
|
"auth": {
|
||||||
|
"type": "m.login.password",
|
||||||
|
"user": self.user,
|
||||||
|
"password": "pass",
|
||||||
|
},
|
||||||
|
"erase": True,
|
||||||
|
},
|
||||||
|
access_token=self.token,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(req.code, HTTPStatus.OK, req)
|
||||||
|
|
||||||
|
def test_global_account_data_deleted_upon_deactivation(self) -> None:
|
||||||
|
"""
|
||||||
|
Tests that global account data is removed upon deactivation.
|
||||||
|
"""
|
||||||
|
# Add some account data
|
||||||
|
self.get_success(
|
||||||
|
self._store.add_account_data_for_user(
|
||||||
|
self.user,
|
||||||
|
AccountDataTypes.DIRECT,
|
||||||
|
{"@someone:remote": ["!somewhere:remote"]},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check that we actually added some.
|
||||||
|
self.assertIsNotNone(
|
||||||
|
self.get_success(
|
||||||
|
self._store.get_global_account_data_by_type_for_user(
|
||||||
|
self.user, AccountDataTypes.DIRECT
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Request the deactivation of our account
|
||||||
|
self._deactivate_my_account()
|
||||||
|
|
||||||
|
# Check that the account data does not persist.
|
||||||
|
self.assertIsNone(
|
||||||
|
self.get_success(
|
||||||
|
self._store.get_global_account_data_by_type_for_user(
|
||||||
|
self.user, AccountDataTypes.DIRECT
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_room_account_data_deleted_upon_deactivation(self) -> None:
|
||||||
|
"""
|
||||||
|
Tests that room account data is removed upon deactivation.
|
||||||
|
"""
|
||||||
|
room_id = "!room:test"
|
||||||
|
|
||||||
|
# Add some room account data
|
||||||
|
self.get_success(
|
||||||
|
self._store.add_account_data_to_room(
|
||||||
|
self.user,
|
||||||
|
room_id,
|
||||||
|
"m.fully_read",
|
||||||
|
{"event_id": "$aaaa:test"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check that we actually added some.
|
||||||
|
self.assertIsNotNone(
|
||||||
|
self.get_success(
|
||||||
|
self._store.get_account_data_for_room_and_type(
|
||||||
|
self.user, room_id, "m.fully_read"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Request the deactivation of our account
|
||||||
|
self._deactivate_my_account()
|
||||||
|
|
||||||
|
# Check that the account data does not persist.
|
||||||
|
self.assertIsNone(
|
||||||
|
self.get_success(
|
||||||
|
self._store.get_account_data_for_room_and_type(
|
||||||
|
self.user, room_id, "m.fully_read"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _is_custom_rule(self, push_rule: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
Default rules start with a dot: such as .m.rule and .im.vector.
|
||||||
|
This function returns true iff a rule is custom (not default).
|
||||||
|
"""
|
||||||
|
return "/." not in push_rule["rule_id"]
|
||||||
|
|
||||||
|
def test_push_rules_deleted_upon_account_deactivation(self) -> None:
|
||||||
|
"""
|
||||||
|
Push rules are a special case of account data.
|
||||||
|
They are stored separately but get sent to the client as account data in /sync.
|
||||||
|
This tests that deactivating a user deletes push rules along with the rest
|
||||||
|
of their account data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Add a push rule
|
||||||
|
self.get_success(
|
||||||
|
self._store.add_push_rule(
|
||||||
|
self.user,
|
||||||
|
"personal.override.rule1",
|
||||||
|
PRIORITY_CLASS_MAP["override"],
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test the rule exists
|
||||||
|
push_rules = self.get_success(self._store.get_push_rules_for_user(self.user))
|
||||||
|
# Filter out default rules; we don't care
|
||||||
|
push_rules = list(filter(self._is_custom_rule, push_rules))
|
||||||
|
# Check our rule made it
|
||||||
|
self.assertEqual(
|
||||||
|
push_rules,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"user_name": "@user:test",
|
||||||
|
"rule_id": "personal.override.rule1",
|
||||||
|
"priority_class": 5,
|
||||||
|
"priority": 0,
|
||||||
|
"conditions": [],
|
||||||
|
"actions": [],
|
||||||
|
"default": False,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
push_rules,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Request the deactivation of our account
|
||||||
|
self._deactivate_my_account()
|
||||||
|
|
||||||
|
push_rules = self.get_success(self._store.get_push_rules_for_user(self.user))
|
||||||
|
# Filter out default rules; we don't care
|
||||||
|
push_rules = list(filter(self._is_custom_rule, push_rules))
|
||||||
|
# Check our rule no longer exists
|
||||||
|
self.assertEqual(push_rules, [], push_rules)
|
||||||
|
|
||||||
|
def test_ignored_users_deleted_upon_deactivation(self) -> None:
|
||||||
|
"""
|
||||||
|
Ignored users are a special case of account data.
|
||||||
|
They get denormalised into the `ignored_users` table upon being stored as
|
||||||
|
account data.
|
||||||
|
Test that a user's list of ignored users is deleted upon deactivation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Add an ignored user
|
||||||
|
self.get_success(
|
||||||
|
self._store.add_account_data_for_user(
|
||||||
|
self.user,
|
||||||
|
AccountDataTypes.IGNORED_USER_LIST,
|
||||||
|
{"ignored_users": {"@sheltie:test": {}}},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test the user is ignored
|
||||||
|
self.assertEqual(
|
||||||
|
self.get_success(self._store.ignored_by("@sheltie:test")), {self.user}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Request the deactivation of our account
|
||||||
|
self._deactivate_my_account()
|
||||||
|
|
||||||
|
# Test the user is no longer ignored by the user that was deactivated
|
||||||
|
self.assertEqual(
|
||||||
|
self.get_success(self._store.ignored_by("@sheltie:test")), set()
|
||||||
|
)
|
|
@ -20,10 +20,11 @@ from unittest.mock import Mock
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
import synapse
|
import synapse
|
||||||
|
from synapse.api.constants import LoginType
|
||||||
from synapse.handlers.auth import load_legacy_password_auth_providers
|
from synapse.handlers.auth import load_legacy_password_auth_providers
|
||||||
from synapse.module_api import ModuleApi
|
from synapse.module_api import ModuleApi
|
||||||
from synapse.rest.client import devices, login, logout
|
from synapse.rest.client import devices, login, logout, register
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict, UserID
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.server import FakeChannel
|
from tests.server import FakeChannel
|
||||||
|
@ -156,6 +157,7 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase):
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
devices.register_servlets,
|
devices.register_servlets,
|
||||||
logout.register_servlets,
|
logout.register_servlets,
|
||||||
|
register.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -745,6 +747,79 @@ class PasswordAuthProviderTests(unittest.HomeserverTestCase):
|
||||||
on_logged_out.assert_called_once()
|
on_logged_out.assert_called_once()
|
||||||
self.assertTrue(self.called)
|
self.assertTrue(self.called)
|
||||||
|
|
||||||
|
def test_username(self):
|
||||||
|
"""Tests that the get_username_for_registration callback can define the username
|
||||||
|
of a user when registering.
|
||||||
|
"""
|
||||||
|
self._setup_get_username_for_registration()
|
||||||
|
|
||||||
|
username = "rin"
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
"/register",
|
||||||
|
{
|
||||||
|
"username": username,
|
||||||
|
"password": "bar",
|
||||||
|
"auth": {"type": LoginType.DUMMY},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
|
||||||
|
# Our callback takes the username and appends "-foo" to it, check that's what we
|
||||||
|
# have.
|
||||||
|
mxid = channel.json_body["user_id"]
|
||||||
|
self.assertEqual(UserID.from_string(mxid).localpart, username + "-foo")
|
||||||
|
|
||||||
|
def test_username_uia(self):
|
||||||
|
"""Tests that the get_username_for_registration callback is only called at the
|
||||||
|
end of the UIA flow.
|
||||||
|
"""
|
||||||
|
m = self._setup_get_username_for_registration()
|
||||||
|
|
||||||
|
# Initiate the UIA flow.
|
||||||
|
username = "rin"
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
"register",
|
||||||
|
{"username": username, "type": "m.login.password", "password": "bar"},
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 401)
|
||||||
|
self.assertIn("session", channel.json_body)
|
||||||
|
|
||||||
|
# Check that the callback hasn't been called yet.
|
||||||
|
m.assert_not_called()
|
||||||
|
|
||||||
|
# Finish the UIA flow.
|
||||||
|
session = channel.json_body["session"]
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
"register",
|
||||||
|
{"auth": {"session": session, "type": LoginType.DUMMY}},
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200, channel.json_body)
|
||||||
|
mxid = channel.json_body["user_id"]
|
||||||
|
self.assertEqual(UserID.from_string(mxid).localpart, username + "-foo")
|
||||||
|
|
||||||
|
# Check that the callback has been called.
|
||||||
|
m.assert_called_once()
|
||||||
|
|
||||||
|
def _setup_get_username_for_registration(self) -> Mock:
|
||||||
|
"""Registers a get_username_for_registration callback that appends "-foo" to the
|
||||||
|
username the client is trying to register.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def get_username_for_registration(uia_results, params):
|
||||||
|
self.assertIn(LoginType.DUMMY, uia_results)
|
||||||
|
username = params["username"]
|
||||||
|
return username + "-foo"
|
||||||
|
|
||||||
|
m = Mock(side_effect=get_username_for_registration)
|
||||||
|
|
||||||
|
password_auth_provider = self.hs.get_password_auth_provider()
|
||||||
|
password_auth_provider.get_username_for_registration_callbacks.append(m)
|
||||||
|
|
||||||
|
return m
|
||||||
|
|
||||||
def _get_login_flows(self) -> JsonDict:
|
def _get_login_flows(self) -> JsonDict:
|
||||||
channel = self.make_request("GET", "/_matrix/client/r0/login")
|
channel = self.make_request("GET", "/_matrix/client/r0/login")
|
||||||
self.assertEqual(channel.code, 200, channel.result)
|
self.assertEqual(channel.code, 200, channel.result)
|
||||||
|
|
|
@ -11,12 +11,13 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
from typing import Any, Dict
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
|
||||||
import synapse.types
|
import synapse.types
|
||||||
from synapse.api.errors import AuthError, SynapseError
|
from synapse.api.errors import AuthError, SynapseError
|
||||||
from synapse.rest import admin
|
from synapse.rest import admin
|
||||||
|
from synapse.server import HomeServer
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
@ -46,7 +47,7 @@ class ProfileTestCase(unittest.HomeserverTestCase):
|
||||||
)
|
)
|
||||||
return hs
|
return hs
|
||||||
|
|
||||||
def prepare(self, reactor, clock, hs):
|
def prepare(self, reactor, clock, hs: HomeServer):
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
|
|
||||||
self.frank = UserID.from_string("@1234abcd:test")
|
self.frank = UserID.from_string("@1234abcd:test")
|
||||||
|
@ -248,3 +249,92 @@ class ProfileTestCase(unittest.HomeserverTestCase):
|
||||||
),
|
),
|
||||||
SynapseError,
|
SynapseError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_avatar_constraints_no_config(self):
|
||||||
|
"""Tests that the method to check an avatar against configured constraints skips
|
||||||
|
all of its check if no constraint is configured.
|
||||||
|
"""
|
||||||
|
# The first check that's done by this method is whether the file exists; if we
|
||||||
|
# don't get an error on a non-existing file then it means all of the checks were
|
||||||
|
# successfully skipped.
|
||||||
|
res = self.get_success(
|
||||||
|
self.handler.check_avatar_size_and_mime_type("mxc://test/unknown_file")
|
||||||
|
)
|
||||||
|
self.assertTrue(res)
|
||||||
|
|
||||||
|
@unittest.override_config({"max_avatar_size": 50})
|
||||||
|
def test_avatar_constraints_missing(self):
|
||||||
|
"""Tests that an avatar isn't allowed if the file at the given MXC URI couldn't
|
||||||
|
be found.
|
||||||
|
"""
|
||||||
|
res = self.get_success(
|
||||||
|
self.handler.check_avatar_size_and_mime_type("mxc://test/unknown_file")
|
||||||
|
)
|
||||||
|
self.assertFalse(res)
|
||||||
|
|
||||||
|
@unittest.override_config({"max_avatar_size": 50})
|
||||||
|
def test_avatar_constraints_file_size(self):
|
||||||
|
"""Tests that a file that's above the allowed file size is forbidden but one
|
||||||
|
that's below it is allowed.
|
||||||
|
"""
|
||||||
|
self._setup_local_files(
|
||||||
|
{
|
||||||
|
"small": {"size": 40},
|
||||||
|
"big": {"size": 60},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.get_success(
|
||||||
|
self.handler.check_avatar_size_and_mime_type("mxc://test/small")
|
||||||
|
)
|
||||||
|
self.assertTrue(res)
|
||||||
|
|
||||||
|
res = self.get_success(
|
||||||
|
self.handler.check_avatar_size_and_mime_type("mxc://test/big")
|
||||||
|
)
|
||||||
|
self.assertFalse(res)
|
||||||
|
|
||||||
|
@unittest.override_config({"allowed_avatar_mimetypes": ["image/png"]})
|
||||||
|
def test_avatar_constraint_mime_type(self):
|
||||||
|
"""Tests that a file with an unauthorised MIME type is forbidden but one with
|
||||||
|
an authorised content type is allowed.
|
||||||
|
"""
|
||||||
|
self._setup_local_files(
|
||||||
|
{
|
||||||
|
"good": {"mimetype": "image/png"},
|
||||||
|
"bad": {"mimetype": "application/octet-stream"},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.get_success(
|
||||||
|
self.handler.check_avatar_size_and_mime_type("mxc://test/good")
|
||||||
|
)
|
||||||
|
self.assertTrue(res)
|
||||||
|
|
||||||
|
res = self.get_success(
|
||||||
|
self.handler.check_avatar_size_and_mime_type("mxc://test/bad")
|
||||||
|
)
|
||||||
|
self.assertFalse(res)
|
||||||
|
|
||||||
|
def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]):
|
||||||
|
"""Stores metadata about files in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
names_and_props: A dictionary with one entry per file, with the key being the
|
||||||
|
file's name, and the value being a dictionary of properties. Supported
|
||||||
|
properties are "mimetype" (for the file's type) and "size" (for the
|
||||||
|
file's size).
|
||||||
|
"""
|
||||||
|
store = self.hs.get_datastore()
|
||||||
|
|
||||||
|
for name, props in names_and_props.items():
|
||||||
|
self.get_success(
|
||||||
|
store.store_local_media(
|
||||||
|
media_id=name,
|
||||||
|
media_type=props.get("mimetype", "image/png"),
|
||||||
|
time_now_ms=self.clock.time_msec(),
|
||||||
|
upload_name=None,
|
||||||
|
media_length=props.get("size", 50),
|
||||||
|
user_id=UserID.from_string("@rin:test"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -30,7 +30,7 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase):
|
||||||
)
|
)
|
||||||
self.replicate()
|
self.replicate()
|
||||||
self.check(
|
self.check(
|
||||||
"get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 1}
|
"get_global_account_data_by_type_for_user", [USER_ID, TYPE], {"a": 1}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.get_success(
|
self.get_success(
|
||||||
|
@ -38,5 +38,5 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase):
|
||||||
)
|
)
|
||||||
self.replicate()
|
self.replicate()
|
||||||
self.check(
|
self.check(
|
||||||
"get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 2}
|
"get_global_account_data_by_type_for_user", [USER_ID, TYPE], {"a": 2}
|
||||||
)
|
)
|
||||||
|
|
|
@ -12,18 +12,20 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import os
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from unittest.mock import Mock
|
from typing import List
|
||||||
|
|
||||||
from twisted.internet.defer import Deferred
|
from parameterized import parameterized
|
||||||
|
|
||||||
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
import synapse.rest.admin
|
import synapse.rest.admin
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.logging.context import make_deferred_yieldable
|
|
||||||
from synapse.rest.admin import VersionServlet
|
from synapse.rest.admin import VersionServlet
|
||||||
from synapse.rest.client import groups, login, room
|
from synapse.rest.client import groups, login, room
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.util import Clock
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.server import FakeSite, make_request
|
from tests.server import FakeSite, make_request
|
||||||
|
@ -33,12 +35,12 @@ from tests.test_utils import SMALL_PNG
|
||||||
class VersionTestCase(unittest.HomeserverTestCase):
|
class VersionTestCase(unittest.HomeserverTestCase):
|
||||||
url = "/_synapse/admin/v1/server_version"
|
url = "/_synapse/admin/v1/server_version"
|
||||||
|
|
||||||
def create_test_resource(self):
|
def create_test_resource(self) -> JsonResource:
|
||||||
resource = JsonResource(self.hs)
|
resource = JsonResource(self.hs)
|
||||||
VersionServlet(self.hs).register(resource)
|
VersionServlet(self.hs).register(resource)
|
||||||
return resource
|
return resource
|
||||||
|
|
||||||
def test_version_string(self):
|
def test_version_string(self) -> None:
|
||||||
channel = self.make_request("GET", self.url, shorthand=False)
|
channel = self.make_request("GET", self.url, shorthand=False)
|
||||||
|
|
||||||
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
@ -54,14 +56,14 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase):
|
||||||
groups.register_servlets,
|
groups.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
||||||
def prepare(self, reactor, clock, hs):
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
self.admin_user = self.register_user("admin", "pass", admin=True)
|
self.admin_user = self.register_user("admin", "pass", admin=True)
|
||||||
self.admin_user_tok = self.login("admin", "pass")
|
self.admin_user_tok = self.login("admin", "pass")
|
||||||
|
|
||||||
self.other_user = self.register_user("user", "pass")
|
self.other_user = self.register_user("user", "pass")
|
||||||
self.other_user_token = self.login("user", "pass")
|
self.other_user_token = self.login("user", "pass")
|
||||||
|
|
||||||
def test_delete_group(self):
|
def test_delete_group(self) -> None:
|
||||||
# Create a new group
|
# Create a new group
|
||||||
channel = self.make_request(
|
channel = self.make_request(
|
||||||
"POST",
|
"POST",
|
||||||
|
@ -112,7 +114,7 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertNotIn(group_id, self._get_groups_user_is_in(self.admin_user_tok))
|
self.assertNotIn(group_id, self._get_groups_user_is_in(self.admin_user_tok))
|
||||||
self.assertNotIn(group_id, self._get_groups_user_is_in(self.other_user_token))
|
self.assertNotIn(group_id, self._get_groups_user_is_in(self.other_user_token))
|
||||||
|
|
||||||
def _check_group(self, group_id, expect_code):
|
def _check_group(self, group_id: str, expect_code: int) -> None:
|
||||||
"""Assert that trying to fetch the given group results in the given
|
"""Assert that trying to fetch the given group results in the given
|
||||||
HTTP status code
|
HTTP status code
|
||||||
"""
|
"""
|
||||||
|
@ -124,7 +126,7 @@ class DeleteGroupTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
self.assertEqual(expect_code, channel.code, msg=channel.json_body)
|
self.assertEqual(expect_code, channel.code, msg=channel.json_body)
|
||||||
|
|
||||||
def _get_groups_user_is_in(self, access_token):
|
def _get_groups_user_is_in(self, access_token: str) -> List[str]:
|
||||||
"""Returns the list of groups the user is in (given their access token)"""
|
"""Returns the list of groups the user is in (given their access token)"""
|
||||||
channel = self.make_request("GET", b"/joined_groups", access_token=access_token)
|
channel = self.make_request("GET", b"/joined_groups", access_token=access_token)
|
||||||
|
|
||||||
|
@ -143,59 +145,15 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
||||||
def prepare(self, reactor, clock, hs):
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
# Allow for uploading and downloading to/from the media repo
|
# Allow for uploading and downloading to/from the media repo
|
||||||
self.media_repo = hs.get_media_repository_resource()
|
self.media_repo = hs.get_media_repository_resource()
|
||||||
self.download_resource = self.media_repo.children[b"download"]
|
self.download_resource = self.media_repo.children[b"download"]
|
||||||
self.upload_resource = self.media_repo.children[b"upload"]
|
self.upload_resource = self.media_repo.children[b"upload"]
|
||||||
|
|
||||||
def make_homeserver(self, reactor, clock):
|
def _ensure_quarantined(
|
||||||
|
self, admin_user_tok: str, server_and_media_id: str
|
||||||
self.fetches = []
|
) -> None:
|
||||||
|
|
||||||
async def get_file(destination, path, output_stream, args=None, max_size=None):
|
|
||||||
"""
|
|
||||||
Returns tuple[int,dict,str,int] of file length, response headers,
|
|
||||||
absolute URI, and response code.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def write_to(r):
|
|
||||||
data, response = r
|
|
||||||
output_stream.write(data)
|
|
||||||
return response
|
|
||||||
|
|
||||||
d = Deferred()
|
|
||||||
d.addCallback(write_to)
|
|
||||||
self.fetches.append((d, destination, path, args))
|
|
||||||
return await make_deferred_yieldable(d)
|
|
||||||
|
|
||||||
client = Mock()
|
|
||||||
client.get_file = get_file
|
|
||||||
|
|
||||||
self.storage_path = self.mktemp()
|
|
||||||
self.media_store_path = self.mktemp()
|
|
||||||
os.mkdir(self.storage_path)
|
|
||||||
os.mkdir(self.media_store_path)
|
|
||||||
|
|
||||||
config = self.default_config()
|
|
||||||
config["media_store_path"] = self.media_store_path
|
|
||||||
config["thumbnail_requirements"] = {}
|
|
||||||
config["max_image_pixels"] = 2000000
|
|
||||||
|
|
||||||
provider_config = {
|
|
||||||
"module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
|
|
||||||
"store_local": True,
|
|
||||||
"store_synchronous": False,
|
|
||||||
"store_remote": True,
|
|
||||||
"config": {"directory": self.storage_path},
|
|
||||||
}
|
|
||||||
config["media_storage_providers"] = [provider_config]
|
|
||||||
|
|
||||||
hs = self.setup_test_homeserver(config=config, federation_http_client=client)
|
|
||||||
|
|
||||||
return hs
|
|
||||||
|
|
||||||
def _ensure_quarantined(self, admin_user_tok, server_and_media_id):
|
|
||||||
"""Ensure a piece of media is quarantined when trying to access it."""
|
"""Ensure a piece of media is quarantined when trying to access it."""
|
||||||
channel = make_request(
|
channel = make_request(
|
||||||
self.reactor,
|
self.reactor,
|
||||||
|
@ -216,12 +174,18 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_quarantine_media_requires_admin(self):
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
# Attempt quarantine media APIs as non-admin
|
||||||
|
"/_synapse/admin/v1/media/quarantine/example.org/abcde12345",
|
||||||
|
# And the roomID/userID endpoint
|
||||||
|
"/_synapse/admin/v1/room/!room%3Aexample.com/media/quarantine",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_quarantine_media_requires_admin(self, url: str) -> None:
|
||||||
self.register_user("nonadmin", "pass", admin=False)
|
self.register_user("nonadmin", "pass", admin=False)
|
||||||
non_admin_user_tok = self.login("nonadmin", "pass")
|
non_admin_user_tok = self.login("nonadmin", "pass")
|
||||||
|
|
||||||
# Attempt quarantine media APIs as non-admin
|
|
||||||
url = "/_synapse/admin/v1/media/quarantine/example.org/abcde12345"
|
|
||||||
channel = self.make_request(
|
channel = self.make_request(
|
||||||
"POST",
|
"POST",
|
||||||
url.encode("ascii"),
|
url.encode("ascii"),
|
||||||
|
@ -235,22 +199,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
msg="Expected forbidden on quarantining media as a non-admin",
|
msg="Expected forbidden on quarantining media as a non-admin",
|
||||||
)
|
)
|
||||||
|
|
||||||
# And the roomID/userID endpoint
|
def test_quarantine_media_by_id(self) -> None:
|
||||||
url = "/_synapse/admin/v1/room/!room%3Aexample.com/media/quarantine"
|
|
||||||
channel = self.make_request(
|
|
||||||
"POST",
|
|
||||||
url.encode("ascii"),
|
|
||||||
access_token=non_admin_user_tok,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Expect a forbidden error
|
|
||||||
self.assertEqual(
|
|
||||||
HTTPStatus.FORBIDDEN,
|
|
||||||
channel.code,
|
|
||||||
msg="Expected forbidden on quarantining media as a non-admin",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_quarantine_media_by_id(self):
|
|
||||||
self.register_user("id_admin", "pass", admin=True)
|
self.register_user("id_admin", "pass", admin=True)
|
||||||
admin_user_tok = self.login("id_admin", "pass")
|
admin_user_tok = self.login("id_admin", "pass")
|
||||||
|
|
||||||
|
@ -295,7 +244,15 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
# Attempt to access the media
|
# Attempt to access the media
|
||||||
self._ensure_quarantined(admin_user_tok, server_name_and_media_id)
|
self._ensure_quarantined(admin_user_tok, server_name_and_media_id)
|
||||||
|
|
||||||
def test_quarantine_all_media_in_room(self, override_url_template=None):
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
# regular API path
|
||||||
|
"/_synapse/admin/v1/room/%s/media/quarantine",
|
||||||
|
# deprecated API path
|
||||||
|
"/_synapse/admin/v1/quarantine_media/%s",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_quarantine_all_media_in_room(self, url: str) -> None:
|
||||||
self.register_user("room_admin", "pass", admin=True)
|
self.register_user("room_admin", "pass", admin=True)
|
||||||
admin_user_tok = self.login("room_admin", "pass")
|
admin_user_tok = self.login("room_admin", "pass")
|
||||||
|
|
||||||
|
@ -333,16 +290,9 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
tok=non_admin_user_tok,
|
tok=non_admin_user_tok,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Quarantine all media in the room
|
|
||||||
if override_url_template:
|
|
||||||
url = override_url_template % urllib.parse.quote(room_id)
|
|
||||||
else:
|
|
||||||
url = "/_synapse/admin/v1/room/%s/media/quarantine" % urllib.parse.quote(
|
|
||||||
room_id
|
|
||||||
)
|
|
||||||
channel = self.make_request(
|
channel = self.make_request(
|
||||||
"POST",
|
"POST",
|
||||||
url,
|
url % urllib.parse.quote(room_id),
|
||||||
access_token=admin_user_tok,
|
access_token=admin_user_tok,
|
||||||
)
|
)
|
||||||
self.pump(1.0)
|
self.pump(1.0)
|
||||||
|
@ -359,11 +309,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
||||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_2)
|
self._ensure_quarantined(admin_user_tok, server_and_media_id_2)
|
||||||
|
|
||||||
def test_quarantine_all_media_in_room_deprecated_api_path(self):
|
def test_quarantine_all_media_by_user(self) -> None:
|
||||||
# Perform the above test with the deprecated API path
|
|
||||||
self.test_quarantine_all_media_in_room("/_synapse/admin/v1/quarantine_media/%s")
|
|
||||||
|
|
||||||
def test_quarantine_all_media_by_user(self):
|
|
||||||
self.register_user("user_admin", "pass", admin=True)
|
self.register_user("user_admin", "pass", admin=True)
|
||||||
admin_user_tok = self.login("user_admin", "pass")
|
admin_user_tok = self.login("user_admin", "pass")
|
||||||
|
|
||||||
|
@ -401,7 +347,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
||||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_2)
|
self._ensure_quarantined(admin_user_tok, server_and_media_id_2)
|
||||||
|
|
||||||
def test_cannot_quarantine_safe_media(self):
|
def test_cannot_quarantine_safe_media(self) -> None:
|
||||||
self.register_user("user_admin", "pass", admin=True)
|
self.register_user("user_admin", "pass", admin=True)
|
||||||
admin_user_tok = self.login("user_admin", "pass")
|
admin_user_tok = self.login("user_admin", "pass")
|
||||||
|
|
||||||
|
@ -475,7 +421,7 @@ class PurgeHistoryTestCase(unittest.HomeserverTestCase):
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
||||||
def prepare(self, reactor, clock, hs):
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
self.admin_user = self.register_user("admin", "pass", admin=True)
|
self.admin_user = self.register_user("admin", "pass", admin=True)
|
||||||
self.admin_user_tok = self.login("admin", "pass")
|
self.admin_user_tok = self.login("admin", "pass")
|
||||||
|
|
||||||
|
@ -488,7 +434,7 @@ class PurgeHistoryTestCase(unittest.HomeserverTestCase):
|
||||||
self.url = f"/_synapse/admin/v1/purge_history/{self.room_id}"
|
self.url = f"/_synapse/admin/v1/purge_history/{self.room_id}"
|
||||||
self.url_status = "/_synapse/admin/v1/purge_history_status/"
|
self.url_status = "/_synapse/admin/v1/purge_history_status/"
|
||||||
|
|
||||||
def test_purge_history(self):
|
def test_purge_history(self) -> None:
|
||||||
"""
|
"""
|
||||||
Simple test of purge history API.
|
Simple test of purge history API.
|
||||||
Test only that is is possible to call, get status HTTPStatus.OK and purge_id.
|
Test only that is is possible to call, get status HTTPStatus.OK and purge_id.
|
||||||
|
|
|
@ -20,7 +20,7 @@ from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
import synapse.rest.admin
|
import synapse.rest.admin
|
||||||
from synapse.api.errors import Codes
|
from synapse.api.errors import Codes
|
||||||
from synapse.rest.client import login
|
from synapse.rest.client import login, room
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
|
@ -43,20 +43,22 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
@parameterized.expand(
|
@parameterized.expand(
|
||||||
[
|
[
|
||||||
("/_synapse/admin/v1/federation/destinations",),
|
("GET", "/_synapse/admin/v1/federation/destinations"),
|
||||||
("/_synapse/admin/v1/federation/destinations/dummy",),
|
("GET", "/_synapse/admin/v1/federation/destinations/dummy"),
|
||||||
|
(
|
||||||
|
"POST",
|
||||||
|
"/_synapse/admin/v1/federation/destinations/dummy/reset_connection",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_requester_is_no_admin(self, url: str) -> None:
|
def test_requester_is_no_admin(self, method: str, url: str) -> None:
|
||||||
"""
|
"""If the user is not a server admin, an error 403 is returned."""
|
||||||
If the user is not a server admin, an error 403 is returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.register_user("user", "pass", admin=False)
|
self.register_user("user", "pass", admin=False)
|
||||||
other_user_tok = self.login("user", "pass")
|
other_user_tok = self.login("user", "pass")
|
||||||
|
|
||||||
channel = self.make_request(
|
channel = self.make_request(
|
||||||
"GET",
|
method,
|
||||||
url,
|
url,
|
||||||
content={},
|
content={},
|
||||||
access_token=other_user_tok,
|
access_token=other_user_tok,
|
||||||
|
@ -66,9 +68,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
|
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
|
||||||
|
|
||||||
def test_invalid_parameter(self) -> None:
|
def test_invalid_parameter(self) -> None:
|
||||||
"""
|
"""If parameters are invalid, an error is returned."""
|
||||||
If parameters are invalid, an error is returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# negative limit
|
# negative limit
|
||||||
channel = self.make_request(
|
channel = self.make_request(
|
||||||
|
@ -120,10 +120,18 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body)
|
self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body)
|
||||||
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
|
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
|
||||||
|
|
||||||
|
# invalid destination
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
self.url + "/dummy/reset_connection",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
|
||||||
|
|
||||||
def test_limit(self) -> None:
|
def test_limit(self) -> None:
|
||||||
"""
|
"""Testing list of destinations with limit"""
|
||||||
Testing list of destinations with limit
|
|
||||||
"""
|
|
||||||
|
|
||||||
number_destinations = 20
|
number_destinations = 20
|
||||||
self._create_destinations(number_destinations)
|
self._create_destinations(number_destinations)
|
||||||
|
@ -141,9 +149,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self._check_fields(channel.json_body["destinations"])
|
self._check_fields(channel.json_body["destinations"])
|
||||||
|
|
||||||
def test_from(self) -> None:
|
def test_from(self) -> None:
|
||||||
"""
|
"""Testing list of destinations with a defined starting point (from)"""
|
||||||
Testing list of destinations with a defined starting point (from)
|
|
||||||
"""
|
|
||||||
|
|
||||||
number_destinations = 20
|
number_destinations = 20
|
||||||
self._create_destinations(number_destinations)
|
self._create_destinations(number_destinations)
|
||||||
|
@ -161,9 +167,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self._check_fields(channel.json_body["destinations"])
|
self._check_fields(channel.json_body["destinations"])
|
||||||
|
|
||||||
def test_limit_and_from(self) -> None:
|
def test_limit_and_from(self) -> None:
|
||||||
"""
|
"""Testing list of destinations with a defined starting point and limit"""
|
||||||
Testing list of destinations with a defined starting point and limit
|
|
||||||
"""
|
|
||||||
|
|
||||||
number_destinations = 20
|
number_destinations = 20
|
||||||
self._create_destinations(number_destinations)
|
self._create_destinations(number_destinations)
|
||||||
|
@ -181,9 +185,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self._check_fields(channel.json_body["destinations"])
|
self._check_fields(channel.json_body["destinations"])
|
||||||
|
|
||||||
def test_next_token(self) -> None:
|
def test_next_token(self) -> None:
|
||||||
"""
|
"""Testing that `next_token` appears at the right place"""
|
||||||
Testing that `next_token` appears at the right place
|
|
||||||
"""
|
|
||||||
|
|
||||||
number_destinations = 20
|
number_destinations = 20
|
||||||
self._create_destinations(number_destinations)
|
self._create_destinations(number_destinations)
|
||||||
|
@ -242,9 +244,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertNotIn("next_token", channel.json_body)
|
self.assertNotIn("next_token", channel.json_body)
|
||||||
|
|
||||||
def test_list_all_destinations(self) -> None:
|
def test_list_all_destinations(self) -> None:
|
||||||
"""
|
"""List all destinations."""
|
||||||
List all destinations.
|
|
||||||
"""
|
|
||||||
number_destinations = 5
|
number_destinations = 5
|
||||||
self._create_destinations(number_destinations)
|
self._create_destinations(number_destinations)
|
||||||
|
|
||||||
|
@ -263,9 +263,7 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self._check_fields(channel.json_body["destinations"])
|
self._check_fields(channel.json_body["destinations"])
|
||||||
|
|
||||||
def test_order_by(self) -> None:
|
def test_order_by(self) -> None:
|
||||||
"""
|
"""Testing order list with parameter `order_by`"""
|
||||||
Testing order list with parameter `order_by`
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _order_test(
|
def _order_test(
|
||||||
expected_destination_list: List[str],
|
expected_destination_list: List[str],
|
||||||
|
@ -444,6 +442,39 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertIsNone(channel.json_body["failure_ts"])
|
self.assertIsNone(channel.json_body["failure_ts"])
|
||||||
self.assertIsNone(channel.json_body["last_successful_stream_ordering"])
|
self.assertIsNone(channel.json_body["last_successful_stream_ordering"])
|
||||||
|
|
||||||
|
def test_destination_reset_connection(self) -> None:
|
||||||
|
"""Reset timeouts and wake up destination."""
|
||||||
|
self._create_destination("sub0.example.com", 100, 100, 100)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
self.url + "/sub0.example.com/reset_connection",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
|
||||||
|
retry_timings = self.get_success(
|
||||||
|
self.store.get_destination_retry_timings("sub0.example.com")
|
||||||
|
)
|
||||||
|
self.assertIsNone(retry_timings)
|
||||||
|
|
||||||
|
def test_destination_reset_connection_not_required(self) -> None:
|
||||||
|
"""Try to reset timeouts of a destination with no timeouts and get an error."""
|
||||||
|
self._create_destination("sub0.example.com", None, 0, 0)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
self.url + "/sub0.example.com/reset_connection",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(
|
||||||
|
"The retry timing does not need to be reset for this destination.",
|
||||||
|
channel.json_body["error"],
|
||||||
|
)
|
||||||
|
|
||||||
def _create_destination(
|
def _create_destination(
|
||||||
self,
|
self,
|
||||||
destination: str,
|
destination: str,
|
||||||
|
@ -496,3 +527,271 @@ class FederationTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertIn("retry_interval", c)
|
self.assertIn("retry_interval", c)
|
||||||
self.assertIn("failure_ts", c)
|
self.assertIn("failure_ts", c)
|
||||||
self.assertIn("last_successful_stream_ordering", c)
|
self.assertIn("last_successful_stream_ordering", c)
|
||||||
|
|
||||||
|
|
||||||
|
class DestinationMembershipTestCase(unittest.HomeserverTestCase):
|
||||||
|
servlets = [
|
||||||
|
synapse.rest.admin.register_servlets,
|
||||||
|
login.register_servlets,
|
||||||
|
room.register_servlets,
|
||||||
|
]
|
||||||
|
|
||||||
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
self.store = hs.get_datastore()
|
||||||
|
self.admin_user = self.register_user("admin", "pass", admin=True)
|
||||||
|
self.admin_user_tok = self.login("admin", "pass")
|
||||||
|
|
||||||
|
self.dest = "sub0.example.com"
|
||||||
|
self.url = f"/_synapse/admin/v1/federation/destinations/{self.dest}/rooms"
|
||||||
|
|
||||||
|
# Record that we successfully contacted a destination in the DB.
|
||||||
|
self.get_success(
|
||||||
|
self.store.set_destination_retry_timings(self.dest, None, 0, 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_requester_is_no_admin(self) -> None:
|
||||||
|
"""If the user is not a server admin, an error 403 is returned."""
|
||||||
|
|
||||||
|
self.register_user("user", "pass", admin=False)
|
||||||
|
other_user_tok = self.login("user", "pass")
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url,
|
||||||
|
access_token=other_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.FORBIDDEN, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
|
||||||
|
|
||||||
|
def test_invalid_parameter(self) -> None:
|
||||||
|
"""If parameters are invalid, an error is returned."""
|
||||||
|
|
||||||
|
# negative limit
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?limit=-5",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
||||||
|
|
||||||
|
# negative from
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?from=-5",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
||||||
|
|
||||||
|
# invalid search order
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?dir=bar",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.BAD_REQUEST, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(Codes.INVALID_PARAM, channel.json_body["errcode"])
|
||||||
|
|
||||||
|
# invalid destination
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_synapse/admin/v1/federation/destinations/%s/rooms" % ("invalid",),
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.NOT_FOUND, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(Codes.NOT_FOUND, channel.json_body["errcode"])
|
||||||
|
|
||||||
|
def test_limit(self) -> None:
|
||||||
|
"""Testing list of destinations with limit"""
|
||||||
|
|
||||||
|
number_rooms = 5
|
||||||
|
self._create_destination_rooms(number_rooms)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?limit=3",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), 3)
|
||||||
|
self.assertEqual(channel.json_body["next_token"], "3")
|
||||||
|
self._check_fields(channel.json_body["rooms"])
|
||||||
|
|
||||||
|
def test_from(self) -> None:
|
||||||
|
"""Testing list of rooms with a defined starting point (from)"""
|
||||||
|
|
||||||
|
number_rooms = 10
|
||||||
|
self._create_destination_rooms(number_rooms)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?from=5",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), 5)
|
||||||
|
self.assertNotIn("next_token", channel.json_body)
|
||||||
|
self._check_fields(channel.json_body["rooms"])
|
||||||
|
|
||||||
|
def test_limit_and_from(self) -> None:
|
||||||
|
"""Testing list of rooms with a defined starting point and limit"""
|
||||||
|
|
||||||
|
number_rooms = 10
|
||||||
|
self._create_destination_rooms(number_rooms)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?from=3&limit=5",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(channel.json_body["next_token"], "8")
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), 5)
|
||||||
|
self._check_fields(channel.json_body["rooms"])
|
||||||
|
|
||||||
|
def test_order_direction(self) -> None:
|
||||||
|
"""Testing order list with parameter `dir`"""
|
||||||
|
number_rooms = 4
|
||||||
|
self._create_destination_rooms(number_rooms)
|
||||||
|
|
||||||
|
# get list in forward direction
|
||||||
|
channel_asc = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?dir=f",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel_asc.code, msg=channel_asc.json_body)
|
||||||
|
self.assertEqual(channel_asc.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(number_rooms, len(channel_asc.json_body["rooms"]))
|
||||||
|
self._check_fields(channel_asc.json_body["rooms"])
|
||||||
|
|
||||||
|
# get list in backward direction
|
||||||
|
channel_desc = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?dir=b",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel_desc.code, msg=channel_desc.json_body)
|
||||||
|
self.assertEqual(channel_desc.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(number_rooms, len(channel_desc.json_body["rooms"]))
|
||||||
|
self._check_fields(channel_desc.json_body["rooms"])
|
||||||
|
|
||||||
|
# test that both lists have different directions
|
||||||
|
for i in range(0, number_rooms):
|
||||||
|
self.assertEqual(
|
||||||
|
channel_asc.json_body["rooms"][i]["room_id"],
|
||||||
|
channel_desc.json_body["rooms"][number_rooms - 1 - i]["room_id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_next_token(self) -> None:
|
||||||
|
"""Testing that `next_token` appears at the right place"""
|
||||||
|
|
||||||
|
number_rooms = 5
|
||||||
|
self._create_destination_rooms(number_rooms)
|
||||||
|
|
||||||
|
# `next_token` does not appear
|
||||||
|
# Number of results is the number of entries
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?limit=5",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), number_rooms)
|
||||||
|
self.assertNotIn("next_token", channel.json_body)
|
||||||
|
|
||||||
|
# `next_token` does not appear
|
||||||
|
# Number of max results is larger than the number of entries
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?limit=6",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), number_rooms)
|
||||||
|
self.assertNotIn("next_token", channel.json_body)
|
||||||
|
|
||||||
|
# `next_token` does appear
|
||||||
|
# Number of max results is smaller than the number of entries
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?limit=4",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), 4)
|
||||||
|
self.assertEqual(channel.json_body["next_token"], "4")
|
||||||
|
|
||||||
|
# Check
|
||||||
|
# Set `from` to value of `next_token` for request remaining entries
|
||||||
|
# `next_token` does not appear
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url + "?from=4",
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(len(channel.json_body["rooms"]), 1)
|
||||||
|
self.assertNotIn("next_token", channel.json_body)
|
||||||
|
|
||||||
|
def test_destination_rooms(self) -> None:
|
||||||
|
"""Testing that request the list of rooms is successfully."""
|
||||||
|
number_rooms = 3
|
||||||
|
self._create_destination_rooms(number_rooms)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
self.url,
|
||||||
|
access_token=self.admin_user_tok,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["total"], number_rooms)
|
||||||
|
self.assertEqual(number_rooms, len(channel.json_body["rooms"]))
|
||||||
|
self._check_fields(channel.json_body["rooms"])
|
||||||
|
|
||||||
|
def _create_destination_rooms(self, number_rooms: int) -> None:
|
||||||
|
"""Create a number rooms for destination
|
||||||
|
|
||||||
|
Args:
|
||||||
|
number_rooms: Number of rooms to be created
|
||||||
|
"""
|
||||||
|
for _ in range(0, number_rooms):
|
||||||
|
room_id = self.helper.create_room_as(
|
||||||
|
self.admin_user, tok=self.admin_user_tok
|
||||||
|
)
|
||||||
|
self.get_success(
|
||||||
|
self.store.store_destination_rooms_entries((self.dest,), room_id, 1234)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_fields(self, content: List[JsonDict]) -> None:
|
||||||
|
"""Checks that the expected room attributes are present in content
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: List that is checked for content
|
||||||
|
"""
|
||||||
|
for c in content:
|
||||||
|
self.assertIn("room_id", c)
|
||||||
|
self.assertIn("stream_ordering", c)
|
||||||
|
|
|
@ -2468,7 +2468,6 @@ PURGE_TABLES = [
|
||||||
"event_search",
|
"event_search",
|
||||||
"events",
|
"events",
|
||||||
"group_rooms",
|
"group_rooms",
|
||||||
"public_room_list_stream",
|
|
||||||
"receipts_graph",
|
"receipts_graph",
|
||||||
"receipts_linearized",
|
"receipts_linearized",
|
||||||
"room_aliases",
|
"room_aliases",
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -14,9 +14,13 @@
|
||||||
|
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
|
|
||||||
|
from twisted.test.proto_helpers import MemoryReactor
|
||||||
|
|
||||||
import synapse.rest.admin
|
import synapse.rest.admin
|
||||||
from synapse.api.errors import Codes, SynapseError
|
from synapse.api.errors import Codes, SynapseError
|
||||||
from synapse.rest.client import login
|
from synapse.rest.client import login
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.util import Clock
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
@ -28,11 +32,11 @@ class UsernameAvailableTestCase(unittest.HomeserverTestCase):
|
||||||
]
|
]
|
||||||
url = "/_synapse/admin/v1/username_available"
|
url = "/_synapse/admin/v1/username_available"
|
||||||
|
|
||||||
def prepare(self, reactor, clock, hs):
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
self.register_user("admin", "pass", admin=True)
|
self.register_user("admin", "pass", admin=True)
|
||||||
self.admin_user_tok = self.login("admin", "pass")
|
self.admin_user_tok = self.login("admin", "pass")
|
||||||
|
|
||||||
async def check_username(username):
|
async def check_username(username: str) -> bool:
|
||||||
if username == "allowed":
|
if username == "allowed":
|
||||||
return True
|
return True
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
|
@ -44,24 +48,24 @@ class UsernameAvailableTestCase(unittest.HomeserverTestCase):
|
||||||
handler = self.hs.get_registration_handler()
|
handler = self.hs.get_registration_handler()
|
||||||
handler.check_username = check_username
|
handler.check_username = check_username
|
||||||
|
|
||||||
def test_username_available(self):
|
def test_username_available(self) -> None:
|
||||||
"""
|
"""
|
||||||
The endpoint should return a HTTPStatus.OK response if the username does not exist
|
The endpoint should return a HTTPStatus.OK response if the username does not exist
|
||||||
"""
|
"""
|
||||||
|
|
||||||
url = "%s?username=%s" % (self.url, "allowed")
|
url = "%s?username=%s" % (self.url, "allowed")
|
||||||
channel = self.make_request("GET", url, None, self.admin_user_tok)
|
channel = self.make_request("GET", url, access_token=self.admin_user_tok)
|
||||||
|
|
||||||
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
self.assertEqual(HTTPStatus.OK, channel.code, msg=channel.json_body)
|
||||||
self.assertTrue(channel.json_body["available"])
|
self.assertTrue(channel.json_body["available"])
|
||||||
|
|
||||||
def test_username_unavailable(self):
|
def test_username_unavailable(self) -> None:
|
||||||
"""
|
"""
|
||||||
The endpoint should return a HTTPStatus.OK response if the username does not exist
|
The endpoint should return a HTTPStatus.OK response if the username does not exist
|
||||||
"""
|
"""
|
||||||
|
|
||||||
url = "%s?username=%s" % (self.url, "disallowed")
|
url = "%s?username=%s" % (self.url, "disallowed")
|
||||||
channel = self.make_request("GET", url, None, self.admin_user_tok)
|
channel = self.make_request("GET", url, access_token=self.admin_user_tok)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
HTTPStatus.BAD_REQUEST,
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
|
|
@ -13,8 +13,12 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""Tests REST events for /profile paths."""
|
"""Tests REST events for /profile paths."""
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from synapse.api.errors import Codes
|
||||||
from synapse.rest import admin
|
from synapse.rest import admin
|
||||||
from synapse.rest.client import login, profile, room
|
from synapse.rest.client import login, profile, room
|
||||||
|
from synapse.types import UserID
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
@ -25,6 +29,7 @@ class ProfileTestCase(unittest.HomeserverTestCase):
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
profile.register_servlets,
|
profile.register_servlets,
|
||||||
|
room.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
||||||
def make_homeserver(self, reactor, clock):
|
def make_homeserver(self, reactor, clock):
|
||||||
|
@ -150,6 +155,157 @@ class ProfileTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(channel.code, 200, channel.result)
|
self.assertEqual(channel.code, 200, channel.result)
|
||||||
return channel.json_body.get("avatar_url")
|
return channel.json_body.get("avatar_url")
|
||||||
|
|
||||||
|
@unittest.override_config({"max_avatar_size": 50})
|
||||||
|
def test_avatar_size_limit_global(self):
|
||||||
|
"""Tests that the maximum size limit for avatars is enforced when updating a
|
||||||
|
global profile.
|
||||||
|
"""
|
||||||
|
self._setup_local_files(
|
||||||
|
{
|
||||||
|
"small": {"size": 40},
|
||||||
|
"big": {"size": 60},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/profile/{self.owner}/avatar_url",
|
||||||
|
content={"avatar_url": "mxc://test/big"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 403, channel.result)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/profile/{self.owner}/avatar_url",
|
||||||
|
content={"avatar_url": "mxc://test/small"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200, channel.result)
|
||||||
|
|
||||||
|
@unittest.override_config({"max_avatar_size": 50})
|
||||||
|
def test_avatar_size_limit_per_room(self):
|
||||||
|
"""Tests that the maximum size limit for avatars is enforced when updating a
|
||||||
|
per-room profile.
|
||||||
|
"""
|
||||||
|
self._setup_local_files(
|
||||||
|
{
|
||||||
|
"small": {"size": 40},
|
||||||
|
"big": {"size": 60},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
room_id = self.helper.create_room_as(tok=self.owner_tok)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/rooms/{room_id}/state/m.room.member/{self.owner}",
|
||||||
|
content={"membership": "join", "avatar_url": "mxc://test/big"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 403, channel.result)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/rooms/{room_id}/state/m.room.member/{self.owner}",
|
||||||
|
content={"membership": "join", "avatar_url": "mxc://test/small"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200, channel.result)
|
||||||
|
|
||||||
|
@unittest.override_config({"allowed_avatar_mimetypes": ["image/png"]})
|
||||||
|
def test_avatar_allowed_mime_type_global(self):
|
||||||
|
"""Tests that the MIME type whitelist for avatars is enforced when updating a
|
||||||
|
global profile.
|
||||||
|
"""
|
||||||
|
self._setup_local_files(
|
||||||
|
{
|
||||||
|
"good": {"mimetype": "image/png"},
|
||||||
|
"bad": {"mimetype": "application/octet-stream"},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/profile/{self.owner}/avatar_url",
|
||||||
|
content={"avatar_url": "mxc://test/bad"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 403, channel.result)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/profile/{self.owner}/avatar_url",
|
||||||
|
content={"avatar_url": "mxc://test/good"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200, channel.result)
|
||||||
|
|
||||||
|
@unittest.override_config({"allowed_avatar_mimetypes": ["image/png"]})
|
||||||
|
def test_avatar_allowed_mime_type_per_room(self):
|
||||||
|
"""Tests that the MIME type whitelist for avatars is enforced when updating a
|
||||||
|
per-room profile.
|
||||||
|
"""
|
||||||
|
self._setup_local_files(
|
||||||
|
{
|
||||||
|
"good": {"mimetype": "image/png"},
|
||||||
|
"bad": {"mimetype": "application/octet-stream"},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
room_id = self.helper.create_room_as(tok=self.owner_tok)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/rooms/{room_id}/state/m.room.member/{self.owner}",
|
||||||
|
content={"membership": "join", "avatar_url": "mxc://test/bad"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 403, channel.result)
|
||||||
|
self.assertEqual(
|
||||||
|
channel.json_body["errcode"], Codes.FORBIDDEN, channel.json_body
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"PUT",
|
||||||
|
f"/rooms/{room_id}/state/m.room.member/{self.owner}",
|
||||||
|
content={"membership": "join", "avatar_url": "mxc://test/good"},
|
||||||
|
access_token=self.owner_tok,
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 200, channel.result)
|
||||||
|
|
||||||
|
def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]):
|
||||||
|
"""Stores metadata about files in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
names_and_props: A dictionary with one entry per file, with the key being the
|
||||||
|
file's name, and the value being a dictionary of properties. Supported
|
||||||
|
properties are "mimetype" (for the file's type) and "size" (for the
|
||||||
|
file's size).
|
||||||
|
"""
|
||||||
|
store = self.hs.get_datastore()
|
||||||
|
|
||||||
|
for name, props in names_and_props.items():
|
||||||
|
self.get_success(
|
||||||
|
store.store_local_media(
|
||||||
|
media_id=name,
|
||||||
|
media_type=props.get("mimetype", "image/png"),
|
||||||
|
time_now_ms=self.clock.time_msec(),
|
||||||
|
upload_name=None,
|
||||||
|
media_length=props.get("size", 50),
|
||||||
|
user_id=UserID.from_string("@rin:test"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
|
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
|
@ -726,6 +726,47 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
|
||||||
{"errcode": "M_UNKNOWN", "error": "Unable to parse email address"},
|
{"errcode": "M_UNKNOWN", "error": "Unable to parse email address"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"inhibit_user_in_use_error": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_inhibit_user_in_use_error(self):
|
||||||
|
"""Tests that the 'inhibit_user_in_use_error' configuration flag behaves
|
||||||
|
correctly.
|
||||||
|
"""
|
||||||
|
username = "arthur"
|
||||||
|
|
||||||
|
# Manually register the user, so we know the test isn't passing because of a lack
|
||||||
|
# of clashing.
|
||||||
|
reg_handler = self.hs.get_registration_handler()
|
||||||
|
self.get_success(reg_handler.register_user(username))
|
||||||
|
|
||||||
|
# Check that /available correctly ignores the username provided despite the
|
||||||
|
# username being already registered.
|
||||||
|
channel = self.make_request("GET", "register/available?username=" + username)
|
||||||
|
self.assertEquals(200, channel.code, channel.result)
|
||||||
|
|
||||||
|
# Test that when starting a UIA registration flow the request doesn't fail because
|
||||||
|
# of a conflicting username
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
"register",
|
||||||
|
{"username": username, "type": "m.login.password", "password": "foo"},
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 401)
|
||||||
|
self.assertIn("session", channel.json_body)
|
||||||
|
|
||||||
|
# Test that finishing the registration fails because of a conflicting username.
|
||||||
|
session = channel.json_body["session"]
|
||||||
|
channel = self.make_request(
|
||||||
|
"POST",
|
||||||
|
"register",
|
||||||
|
{"auth": {"session": session, "type": LoginType.DUMMY}},
|
||||||
|
)
|
||||||
|
self.assertEqual(channel.code, 400, channel.json_body)
|
||||||
|
self.assertEqual(channel.json_body["errcode"], Codes.USER_IN_USE)
|
||||||
|
|
||||||
|
|
||||||
class AccountValidityTestCase(unittest.HomeserverTestCase):
|
class AccountValidityTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
|
@ -577,7 +577,7 @@ class RelationsTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertEquals(200, channel.code, channel.json_body)
|
self.assertEquals(200, channel.code, channel.json_body)
|
||||||
room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"]
|
room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"]
|
||||||
self.assertTrue(room_timeline["limited"])
|
self.assertTrue(room_timeline["limited"])
|
||||||
self._find_event_in_chunk(room_timeline["events"])
|
assert_bundle(self._find_event_in_chunk(room_timeline["events"]))
|
||||||
|
|
||||||
def test_aggregation_get_event_for_annotation(self):
|
def test_aggregation_get_event_for_annotation(self):
|
||||||
"""Test that annotations do not get bundled aggregations included
|
"""Test that annotations do not get bundled aggregations included
|
||||||
|
|
|
@ -16,10 +16,11 @@ from synapse.rest.media.v1.preview_html import (
|
||||||
_get_html_media_encodings,
|
_get_html_media_encodings,
|
||||||
decode_body,
|
decode_body,
|
||||||
parse_html_to_open_graph,
|
parse_html_to_open_graph,
|
||||||
|
rebase_url,
|
||||||
summarize_paragraphs,
|
summarize_paragraphs,
|
||||||
)
|
)
|
||||||
|
|
||||||
from . import unittest
|
from tests import unittest
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import lxml
|
import lxml
|
||||||
|
@ -447,3 +448,34 @@ class MediaEncodingTestCase(unittest.TestCase):
|
||||||
'text/html; charset="invalid"',
|
'text/html; charset="invalid"',
|
||||||
)
|
)
|
||||||
self.assertEqual(list(encodings), ["utf-8", "cp1252"])
|
self.assertEqual(list(encodings), ["utf-8", "cp1252"])
|
||||||
|
|
||||||
|
|
||||||
|
class RebaseUrlTestCase(unittest.TestCase):
|
||||||
|
def test_relative(self):
|
||||||
|
"""Relative URLs should be resolved based on the context of the base URL."""
|
||||||
|
self.assertEqual(
|
||||||
|
rebase_url("subpage", "https://example.com/foo/"),
|
||||||
|
"https://example.com/foo/subpage",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
rebase_url("sibling", "https://example.com/foo"),
|
||||||
|
"https://example.com/sibling",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
rebase_url("/bar", "https://example.com/foo/"),
|
||||||
|
"https://example.com/bar",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_absolute(self):
|
||||||
|
"""Absolute URLs should not be modified."""
|
||||||
|
self.assertEqual(
|
||||||
|
rebase_url("https://alice.com/a/", "https://example.com/foo/"),
|
||||||
|
"https://alice.com/a/",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_data(self):
|
||||||
|
"""Data URLs should not be modified."""
|
||||||
|
self.assertEqual(
|
||||||
|
rebase_url("data:,Hello%2C%20World%21", "https://example.com/foo/"),
|
||||||
|
"data:,Hello%2C%20World%21",
|
||||||
|
)
|
|
@ -12,9 +12,11 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from twisted.internet._resolver import HostResolution
|
from twisted.internet._resolver import HostResolution
|
||||||
from twisted.internet.address import IPv4Address, IPv6Address
|
from twisted.internet.address import IPv4Address, IPv6Address
|
||||||
|
@ -23,6 +25,7 @@ from twisted.test.proto_helpers import AccumulatingProtocol
|
||||||
|
|
||||||
from synapse.config.oembed import OEmbedEndpointConfig
|
from synapse.config.oembed import OEmbedEndpointConfig
|
||||||
from synapse.rest.media.v1.preview_url_resource import IMAGE_CACHE_EXPIRY_MS
|
from synapse.rest.media.v1.preview_url_resource import IMAGE_CACHE_EXPIRY_MS
|
||||||
|
from synapse.types import JsonDict
|
||||||
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
@ -142,6 +145,14 @@ class URLPreviewTests(unittest.HomeserverTestCase):
|
||||||
def create_test_resource(self):
|
def create_test_resource(self):
|
||||||
return self.hs.get_media_repository_resource()
|
return self.hs.get_media_repository_resource()
|
||||||
|
|
||||||
|
def _assert_small_png(self, json_body: JsonDict) -> None:
|
||||||
|
"""Assert properties from the SMALL_PNG test image."""
|
||||||
|
self.assertTrue(json_body["og:image"].startswith("mxc://"))
|
||||||
|
self.assertEqual(json_body["og:image:height"], 1)
|
||||||
|
self.assertEqual(json_body["og:image:width"], 1)
|
||||||
|
self.assertEqual(json_body["og:image:type"], "image/png")
|
||||||
|
self.assertEqual(json_body["matrix:image:size"], 67)
|
||||||
|
|
||||||
def test_cache_returns_correct_type(self):
|
def test_cache_returns_correct_type(self):
|
||||||
self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")]
|
self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")]
|
||||||
|
|
||||||
|
@ -569,6 +580,66 @@ class URLPreviewTests(unittest.HomeserverTestCase):
|
||||||
server.data,
|
server.data,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_data_url(self):
|
||||||
|
"""
|
||||||
|
Requesting to preview a data URL is not supported.
|
||||||
|
"""
|
||||||
|
self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")]
|
||||||
|
|
||||||
|
data = base64.b64encode(SMALL_PNG).decode()
|
||||||
|
|
||||||
|
query_params = urlencode(
|
||||||
|
{
|
||||||
|
"url": f'<html><head><img src="data:image/png;base64,{data}" /></head></html>'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"preview_url?{query_params}",
|
||||||
|
shorthand=False,
|
||||||
|
)
|
||||||
|
self.pump()
|
||||||
|
|
||||||
|
self.assertEqual(channel.code, 500)
|
||||||
|
|
||||||
|
def test_inline_data_url(self):
|
||||||
|
"""
|
||||||
|
An inline image (as a data URL) should be parsed properly.
|
||||||
|
"""
|
||||||
|
self.lookups["matrix.org"] = [(IPv4Address, "10.1.2.3")]
|
||||||
|
|
||||||
|
data = base64.b64encode(SMALL_PNG)
|
||||||
|
|
||||||
|
end_content = (
|
||||||
|
b"<html><head>" b'<img src="data:image/png;base64,%s" />' b"</head></html>"
|
||||||
|
) % (data,)
|
||||||
|
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"preview_url?url=http://matrix.org",
|
||||||
|
shorthand=False,
|
||||||
|
await_result=False,
|
||||||
|
)
|
||||||
|
self.pump()
|
||||||
|
|
||||||
|
client = self.reactor.tcpClients[0][2].buildProtocol(None)
|
||||||
|
server = AccumulatingProtocol()
|
||||||
|
server.makeConnection(FakeTransport(client, self.reactor))
|
||||||
|
client.makeConnection(FakeTransport(server, self.reactor))
|
||||||
|
client.dataReceived(
|
||||||
|
(
|
||||||
|
b"HTTP/1.0 200 OK\r\nContent-Length: %d\r\n"
|
||||||
|
b'Content-Type: text/html; charset="utf8"\r\n\r\n'
|
||||||
|
)
|
||||||
|
% (len(end_content),)
|
||||||
|
+ end_content
|
||||||
|
)
|
||||||
|
|
||||||
|
self.pump()
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
self._assert_small_png(channel.json_body)
|
||||||
|
|
||||||
def test_oembed_photo(self):
|
def test_oembed_photo(self):
|
||||||
"""Test an oEmbed endpoint which returns a 'photo' type which redirects the preview to a new URL."""
|
"""Test an oEmbed endpoint which returns a 'photo' type which redirects the preview to a new URL."""
|
||||||
self.lookups["publish.twitter.com"] = [(IPv4Address, "10.1.2.3")]
|
self.lookups["publish.twitter.com"] = [(IPv4Address, "10.1.2.3")]
|
||||||
|
@ -626,10 +697,7 @@ class URLPreviewTests(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(channel.code, 200)
|
self.assertEqual(channel.code, 200)
|
||||||
body = channel.json_body
|
body = channel.json_body
|
||||||
self.assertEqual(body["og:url"], "http://twitter.com/matrixdotorg/status/12345")
|
self.assertEqual(body["og:url"], "http://twitter.com/matrixdotorg/status/12345")
|
||||||
self.assertTrue(body["og:image"].startswith("mxc://"))
|
self._assert_small_png(body)
|
||||||
self.assertEqual(body["og:image:height"], 1)
|
|
||||||
self.assertEqual(body["og:image:width"], 1)
|
|
||||||
self.assertEqual(body["og:image:type"], "image/png")
|
|
||||||
|
|
||||||
def test_oembed_rich(self):
|
def test_oembed_rich(self):
|
||||||
"""Test an oEmbed endpoint which returns HTML content via the 'rich' type."""
|
"""Test an oEmbed endpoint which returns HTML content via the 'rich' type."""
|
||||||
|
@ -820,10 +888,7 @@ class URLPreviewTests(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
body["og:url"], "http://www.twitter.com/matrixdotorg/status/12345"
|
body["og:url"], "http://www.twitter.com/matrixdotorg/status/12345"
|
||||||
)
|
)
|
||||||
self.assertTrue(body["og:image"].startswith("mxc://"))
|
self._assert_small_png(body)
|
||||||
self.assertEqual(body["og:image:height"], 1)
|
|
||||||
self.assertEqual(body["og:image:width"], 1)
|
|
||||||
self.assertEqual(body["og:image:type"], "image/png")
|
|
||||||
|
|
||||||
def _download_image(self):
|
def _download_image(self):
|
||||||
"""Downloads an image into the URL cache.
|
"""Downloads an image into the URL cache.
|
||||||
|
|
|
@ -313,7 +313,7 @@ def make_request(
|
||||||
req = request(channel, site)
|
req = request(channel, site)
|
||||||
req.content = BytesIO(content)
|
req.content = BytesIO(content)
|
||||||
# Twisted expects to be at the end of the content when parsing the request.
|
# Twisted expects to be at the end of the content when parsing the request.
|
||||||
req.content.seek(SEEK_END)
|
req.content.seek(0, SEEK_END)
|
||||||
|
|
||||||
if access_token:
|
if access_token:
|
||||||
req.requestHeaders.addRawHeader(
|
req.requestHeaders.addRawHeader(
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue