diff --git a/.ci/complement_package.gotpl b/.ci/complement_package.gotpl index e1625fd31..2dd5e5e0e 100644 --- a/.ci/complement_package.gotpl +++ b/.ci/complement_package.gotpl @@ -27,10 +27,10 @@ which is under the Unlicense licence. {{- . -}}{{- "\n" -}} {{- end -}} {{- with .TestCases -}} - {{- /* Failing tests are first */ -}} + {{- /* Passing tests are first */ -}} {{- range . -}} - {{- if and (ne .Result "PASS") (ne .Result "SKIP") -}} - ::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}} + {{- if eq .Result "PASS" -}} + ::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}} {{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}} {{- with .Coverage -}} , coverage: {{ . }}% @@ -47,7 +47,6 @@ which is under the Unlicense licence. {{- end -}} {{- end -}} - {{- /* Then skipped tests are second */ -}} {{- range . -}} {{- if eq .Result "SKIP" -}} @@ -68,11 +67,10 @@ which is under the Unlicense licence. {{- end -}} {{- end -}} - - {{- /* Then passing tests are last */ -}} + {{- /* and failing tests are last */ -}} {{- range . -}} - {{- if eq .Result "PASS" -}} - ::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}} + {{- if and (ne .Result "PASS") (ne .Result "SKIP") -}} + ::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}} {{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}} {{- with .Coverage -}} , coverage: {{ . }}% diff --git a/.ci/scripts/calculate_jobs.py b/.ci/scripts/calculate_jobs.py new file mode 100755 index 000000000..b1f604eeb --- /dev/null +++ b/.ci/scripts/calculate_jobs.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# Copyright 2022 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Calculate the trial jobs to run based on if we're in a PR or not. + +import json +import os + +IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/") + +# First calculate the various trial jobs. +# +# For each type of test we only run on Py3.7 on PRs + +trial_sqlite_tests = [ + { + "python-version": "3.7", + "database": "sqlite", + "extras": "all", + } +] + +if not IS_PR: + trial_sqlite_tests.extend( + { + "python-version": version, + "database": "sqlite", + "extras": "all", + } + for version in ("3.8", "3.9", "3.10") + ) + + +trial_postgres_tests = [ + { + "python-version": "3.7", + "database": "postgres", + "postgres-version": "10", + "extras": "all", + } +] + +if not IS_PR: + trial_postgres_tests.append( + { + "python-version": "3.10", + "database": "postgres", + "postgres-version": "14", + "extras": "all", + } + ) + +trial_no_extra_tests = [ + { + "python-version": "3.7", + "database": "sqlite", + "extras": "", + } +] + +print("::group::Calculated trial jobs") +print( + json.dumps( + trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4 + ) +) +print("::endgroup::") + +test_matrix = json.dumps( + trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests +) +print(f"::set-output name=trial_test_matrix::{test_matrix}") + + +# First calculate the various sytest jobs. +# +# For each type of test we only run on focal on PRs + + +sytest_tests = [ + { + "sytest-tag": "focal", + }, + { + "sytest-tag": "focal", + "postgres": "postgres", + }, + { + "sytest-tag": "focal", + "postgres": "multi-postgres", + "workers": "workers", + }, +] + +if not IS_PR: + sytest_tests.extend( + [ + { + "sytest-tag": "testing", + "postgres": "postgres", + }, + { + "sytest-tag": "buster", + "postgres": "multi-postgres", + "workers": "workers", + }, + ] + ) + + +print("::group::Calculated sytest jobs") +print(json.dumps(sytest_tests, indent=4)) +print("::endgroup::") + +test_matrix = json.dumps(sytest_tests) +print(f"::set-output name=sytest_test_matrix::{test_matrix}") diff --git a/.ci/scripts/gotestfmt b/.ci/scripts/gotestfmt new file mode 100755 index 000000000..83e0ec636 --- /dev/null +++ b/.ci/scripts/gotestfmt @@ -0,0 +1,21 @@ +#!/bin/bash +# +# wraps `gotestfmt`, hiding output from successful packages unless +# all tests passed. + +set -o pipefail +set -e + +# tee the test results to a log, whilst also piping them into gotestfmt, +# telling it to hide successful results, so that we can clearly see +# unsuccessful results. +tee complement.log | gotestfmt -hide successful-packages + +# gotestfmt will exit non-zero if there were any failures, so if we got to this +# point, we must have had a successful result. +echo "All tests successful; showing all test results" + +# Pipe the test results back through gotestfmt, showing all results. +# The log file consists of JSON lines giving the test results, interspersed +# with regular stdout lines (including reports of downloaded packages). +grep '^{"Time":' complement.log | gotestfmt diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index f263cf612..7dac617c4 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -163,7 +163,7 @@ jobs: - run: | set -o pipefail - TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt + TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt shell: bash name: Run Complement Tests diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 144cb9ffa..bc1de2893 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -73,53 +73,48 @@ jobs: steps: - run: "true" - trial: + calculate-test-jobs: if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail needs: linting-done runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - id: get-matrix + run: .ci/scripts/calculate_jobs.py + outputs: + trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }} + sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }} + + trial: + if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail + needs: calculate-test-jobs + runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] - database: ["sqlite"] - extras: ["all"] - include: - # Newest Python without optional deps - - python-version: "3.10" - extras: "" - - # Oldest Python with PostgreSQL - - python-version: "3.7" - database: "postgres" - postgres-version: "10" - extras: "all" - - # Newest Python with newest PostgreSQL - - python-version: "3.10" - database: "postgres" - postgres-version: "14" - extras: "all" + job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }} steps: - uses: actions/checkout@v2 - run: sudo apt-get -qq install xmlsec1 - - name: Set up PostgreSQL ${{ matrix.postgres-version }} - if: ${{ matrix.postgres-version }} + - name: Set up PostgreSQL ${{ matrix.job.postgres-version }} + if: ${{ matrix.job.postgres-version }} run: | docker run -d -p 5432:5432 \ -e POSTGRES_PASSWORD=postgres \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ - postgres:${{ matrix.postgres-version }} + postgres:${{ matrix.job.postgres-version }} - uses: matrix-org/setup-python-poetry@v1 with: - python-version: ${{ matrix.python-version }} - extras: ${{ matrix.extras }} + python-version: ${{ matrix.job.python-version }} + extras: ${{ matrix.job.extras }} - name: Await PostgreSQL - if: ${{ matrix.postgres-version }} + if: ${{ matrix.job.postgres-version }} timeout-minutes: 2 run: until pg_isready -h localhost; do sleep 1; done - run: poetry run trial --jobs=2 tests env: - SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }} + SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }} SYNAPSE_POSTGRES_HOST: localhost SYNAPSE_POSTGRES_USER: postgres SYNAPSE_POSTGRES_PASSWORD: postgres @@ -198,45 +193,24 @@ jobs: sytest: if: ${{ !failure() && !cancelled() }} - needs: linting-done + needs: calculate-test-jobs runs-on: ubuntu-latest container: - image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }} + image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }} volumes: - ${{ github.workspace }}:/src env: SYTEST_BRANCH: ${{ github.head_ref }} - POSTGRES: ${{ matrix.postgres && 1}} - MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}} - WORKERS: ${{ matrix.workers && 1 }} - REDIS: ${{ matrix.redis && 1 }} - BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }} + POSTGRES: ${{ matrix.job.postgres && 1}} + MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}} + WORKERS: ${{ matrix.job.workers && 1 }} + BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }} TOP: ${{ github.workspace }} strategy: fail-fast: false matrix: - include: - - sytest-tag: focal - - - sytest-tag: focal - postgres: postgres - - - sytest-tag: testing - postgres: postgres - - - sytest-tag: focal - postgres: multi-postgres - workers: workers - - - sytest-tag: buster - postgres: multi-postgres - workers: workers - - - sytest-tag: buster - postgres: postgres - workers: workers - redis: redis + job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }} steps: - uses: actions/checkout@v2 @@ -252,7 +226,7 @@ jobs: uses: actions/upload-artifact@v2 if: ${{ always() }} with: - name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) + name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }}) path: | /logs/results.tap /logs/**/*.log* @@ -283,7 +257,6 @@ jobs: - run: sudo apt-get -qq install xmlsec1 - uses: matrix-org/setup-python-poetry@v1 with: - python-version: ${{ matrix.python-version }} extras: "postgres" - run: .ci/scripts/test_export_data_command.sh @@ -354,7 +327,7 @@ jobs: - run: | set -o pipefail - POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt + POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt shell: bash name: Run Complement Tests diff --git a/.github/workflows/triage-incoming.yml b/.github/workflows/triage-incoming.yml new file mode 100644 index 000000000..f926bcb75 --- /dev/null +++ b/.github/workflows/triage-incoming.yml @@ -0,0 +1,28 @@ +name: Move new issues into the issue triage board + +on: + issues: + types: [ opened ] + +jobs: + add_new_issues: + name: Add new issues to the triage board + runs-on: ubuntu-latest + steps: + - uses: octokit/graphql-action@v2.x + id: add_to_project + with: + headers: '{"GraphQL-Features": "projects_next_graphql"}' + query: | + mutation add_to_project($projectid:ID!,$contentid:ID!) { + addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) { + item { + id + } + } + } + projectid: ${{ env.PROJECT_ID }} + contentid: ${{ github.event.issue.node_id }} + env: + PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ" + GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }} diff --git a/.github/workflows/triage_labelled.yml b/.github/workflows/triage_labelled.yml new file mode 100644 index 000000000..fbd55de17 --- /dev/null +++ b/.github/workflows/triage_labelled.yml @@ -0,0 +1,44 @@ +name: Move labelled issues to correct projects + +on: + issues: + types: [ labeled ] + +jobs: + move_needs_info: + name: Move X-Needs-Info on the triage board + runs-on: ubuntu-latest + if: > + contains(github.event.issue.labels.*.name, 'X-Needs-Info') + steps: + - uses: octokit/graphql-action@v2.x + id: add_to_project + with: + headers: '{"GraphQL-Features": "projects_next_graphql"}' + query: | + mutation { + updateProjectV2ItemFieldValue( + input: { + projectId: $projectid + itemId: $contentid + fieldId: $fieldid + value: { + singleSelectOptionId: "Todo" + } + } + ) { + projectV2Item { + id + } + } + } + + projectid: ${{ env.PROJECT_ID }} + contentid: ${{ github.event.issue.node_id }} + fieldid: ${{ env.FIELD_ID }} + optionid: ${{ env.OPTION_ID }} + env: + PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ" + GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }} + FIELD_ID: "PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" + OPTION_ID: "ba22e43c" diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index dd8e6fbb1..0906101cc 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -137,7 +137,7 @@ jobs: - run: | set -o pipefail - TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt + TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt shell: bash name: Run Complement Tests diff --git a/CHANGES.md b/CHANGES.md index 0b10e9018..ad277f32e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,100 @@ +Synapse 1.67.0rc1 (2022-09-06) +============================== + +This release removes using the deprecated direct TCP replication configuration +for workers. Server admins should use Redis instead. See the [upgrade +notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670). + +The minimum version of `poetry` supported for managing source checkouts is now +1.2.0. + +Notice: from the next major release (v1.68.0) installing Synapse from a source +checkout will require a recent Rust compiler. Those using packages or +`pip install matrix-synapse` will not be affected. See the [upgrade +notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670). + + +Features +-------- + +- Support setting the registration shared secret in a file, via a new `registration_shared_secret_path` configuration option. ([\#13614](https://github.com/matrix-org/synapse/issues/13614)) +- Change the default startup behaviour so that any missing "additional" configuration files (signing key, etc) are generated automatically. ([\#13615](https://github.com/matrix-org/synapse/issues/13615)) +- Improve performance of sending messages in rooms with thousands of local users. ([\#13634](https://github.com/matrix-org/synapse/issues/13634)) + + +Bugfixes +-------- + +- Fix a bug introduced in Synapse 1.13 where the [List Rooms admin API](https://matrix-org.github.io/synapse/develop/admin_api/rooms.html#list-room-api) would return integers instead of booleans for the `federatable` and `public` fields when using a Sqlite database. ([\#13509](https://github.com/matrix-org/synapse/issues/13509)) +- Fix bug that user cannot `/forget` rooms after the last member has left the room. ([\#13546](https://github.com/matrix-org/synapse/issues/13546)) +- Faster Room Joins: fix `/make_knock` blocking indefinitely when the room in question is a partial-stated room. ([\#13583](https://github.com/matrix-org/synapse/issues/13583)) +- Fix loading the current stream position behind the actual position. ([\#13585](https://github.com/matrix-org/synapse/issues/13585)) +- Fix a longstanding bug in `register_new_matrix_user` which meant it was always necessary to explicitly give a server URL. ([\#13616](https://github.com/matrix-org/synapse/issues/13616)) +- Fix the running of [MSC1763](https://github.com/matrix-org/matrix-spec-proposals/pull/1763) retention purge_jobs in deployments with background jobs running on a worker by forcing them back onto the main worker. Contributed by Brad @ Beeper. ([\#13632](https://github.com/matrix-org/synapse/issues/13632)) +- Fix a long-standing bug that downloaded media for URL previews was not deleted while database background updates were running. ([\#13657](https://github.com/matrix-org/synapse/issues/13657)) +- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to return the correct next event when the events have the same timestamp. ([\#13658](https://github.com/matrix-org/synapse/issues/13658)) +- Fix bug where we wedge media plugins if clients disconnect early. Introduced in v1.22.0. ([\#13660](https://github.com/matrix-org/synapse/issues/13660)) +- Fix a long-standing bug which meant that keys for unwhitelisted servers were not returned by `/_matrix/key/v2/query`. ([\#13683](https://github.com/matrix-org/synapse/issues/13683)) +- Fix a bug introduced in Synapse v1.20.0 that would cause the unstable unread counts from [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) to be calculated even if the feature is disabled. ([\#13694](https://github.com/matrix-org/synapse/issues/13694)) + + +Updates to the Docker image +--------------------------- + +- Update docker image to use a stable version of poetry. ([\#13688](https://github.com/matrix-org/synapse/issues/13688)) + + +Improved Documentation +---------------------- + +- Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse. ([\#13602](https://github.com/matrix-org/synapse/issues/13602)) +- Document how ["monthly active users"](https://matrix-org.github.io/synapse/latest/usage/administration/monthly_active_users.html) is calculated and used. ([\#13617](https://github.com/matrix-org/synapse/issues/13617)) +- Improve documentation around user registration. ([\#13640](https://github.com/matrix-org/synapse/issues/13640)) +- Remove documentation of legacy `frontend_proxy` worker app. ([\#13645](https://github.com/matrix-org/synapse/issues/13645)) +- Clarify documentation that HTTP replication traffic can be protected with a shared secret. ([\#13656](https://github.com/matrix-org/synapse/issues/13656)) +- Remove unintentional colons from [config manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html) headers. ([\#13665](https://github.com/matrix-org/synapse/issues/13665)) +- Update docs to make enabling metrics more clear. ([\#13678](https://github.com/matrix-org/synapse/issues/13678)) +- Clarify `(room_id, event_id)` global uniqueness and how we should scope our database schemas. ([\#13701](https://github.com/matrix-org/synapse/issues/13701)) + + +Deprecations and Removals +------------------------- + +- Drop support for calling `/_matrix/client/v3/rooms/{roomId}/invite` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13241](https://github.com/matrix-org/synapse/issues/13241)) +- Remove redundant `_get_joined_users_from_context` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13569](https://github.com/matrix-org/synapse/issues/13569)) +- Remove the ability to use direct TCP replication with workers. Direct TCP replication was deprecated in Synapse v1.18.0. Workers now require using Redis. ([\#13647](https://github.com/matrix-org/synapse/issues/13647)) +- Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13653](https://github.com/matrix-org/synapse/issues/13653), [\#13692](https://github.com/matrix-org/synapse/issues/13692)) + + +Internal Changes +---------------- + +- Extend the release script to wait for GitHub Actions to finish and to be usable as a guide for the whole process. ([\#13483](https://github.com/matrix-org/synapse/issues/13483)) +- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13540](https://github.com/matrix-org/synapse/issues/13540)) +- Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar). ([\#13573](https://github.com/matrix-org/synapse/issues/13573), [\#13600](https://github.com/matrix-org/synapse/issues/13600)) +- Optimize how Synapse calculates domains to fetch from during backfill. ([\#13575](https://github.com/matrix-org/synapse/issues/13575)) +- Comment about a better future where we can get the state diff between two events. ([\#13586](https://github.com/matrix-org/synapse/issues/13586)) +- Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger. ([\#13588](https://github.com/matrix-org/synapse/issues/13588)) +- Improve performance of `@cachedList`. ([\#13591](https://github.com/matrix-org/synapse/issues/13591)) +- Minor speed up of fetching large numbers of push rules. ([\#13592](https://github.com/matrix-org/synapse/issues/13592)) +- Optimise push action fetching queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13597](https://github.com/matrix-org/synapse/issues/13597)) +- Rename `event_map` to `unpersisted_events` when computing the auth differences. ([\#13603](https://github.com/matrix-org/synapse/issues/13603)) +- Refactor `get_users_in_room(room_id)` mis-use with dedicated `get_current_hosts_in_room(room_id)` function. ([\#13605](https://github.com/matrix-org/synapse/issues/13605)) +- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating `join_authorised_via_users_server` of a `/make_join` request. ([\#13606](https://github.com/matrix-org/synapse/issues/13606)) +- Refactor `get_users_in_room(room_id)` mis-use to lookup single local user with dedicated `check_local_user_in_room(...)` function. ([\#13608](https://github.com/matrix-org/synapse/issues/13608)) +- Drop unused column `application_services_state.last_txn`. ([\#13627](https://github.com/matrix-org/synapse/issues/13627)) +- Improve readability of Complement CI logs by printing failure results last. ([\#13639](https://github.com/matrix-org/synapse/issues/13639)) +- Generalise the `@cancellable` annotation so it can be used on functions other than just servlet methods. ([\#13662](https://github.com/matrix-org/synapse/issues/13662)) +- Introduce a `CommonUsageMetrics` class to share some usage metrics between the Prometheus exporter and the phone home stats. ([\#13671](https://github.com/matrix-org/synapse/issues/13671)) +- Add some logging to help track down #13444. ([\#13679](https://github.com/matrix-org/synapse/issues/13679)) +- Update poetry lock file for v1.2.0. ([\#13689](https://github.com/matrix-org/synapse/issues/13689)) +- Add cache to `is_partial_state_room`. ([\#13693](https://github.com/matrix-org/synapse/issues/13693)) +- Update the Grafana dashboard that is included with Synapse in the `contrib` directory. ([\#13697](https://github.com/matrix-org/synapse/issues/13697)) +- Only run trial CI on all python versions on non-PRs. ([\#13698](https://github.com/matrix-org/synapse/issues/13698)) +- Fix typechecking with latest types-jsonschema. ([\#13712](https://github.com/matrix-org/synapse/issues/13712)) +- Reduce number of CI checks we run for PRs. ([\#13713](https://github.com/matrix-org/synapse/issues/13713)) + + Synapse 1.66.0 (2022-08-31) =========================== diff --git a/README.rst b/README.rst index 84e531030..d116cd51f 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,7 @@ Synapse |support| |development| |documentation| |license| |pypi| |python| ========================================================================= Synapse is an open-source `Matrix `_ homeserver written and -maintained by the Matrix.org Foundation. We began rapid development began in 2014, +maintained by the Matrix.org Foundation. We began rapid development in 2014, reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues in earnest today. diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 0f23fc17e..248cd6d9a 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -3244,6 +3244,104 @@ "yaxis": { "align": false } + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "description": "Average number of hosts being rate limited across each worker type.", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + }, + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 53 + }, + "id": 225, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom" + }, + "tooltip": { + "mode": "single", + "sort": "desc" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "builder", + "expr": "avg by(job, rate_limiter_name) (synapse_rate_limit_sleep_affected_hosts{instance=\"$instance\", job=~\"$job\", index=~\"$index\"})", + "hide": false, + "legendFormat": "Slept by {{job}}:{{rate_limiter_name}}", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "builder", + "expr": "avg by(job, rate_limiter_name) (synapse_rate_limit_reject_affected_hosts{instance=\"$instance\", job=~\"$job\", index=~\"$index\"})", + "legendFormat": "Rejected by {{job}}:{{rate_limiter_name}}", + "range": true, + "refId": "A" + } + ], + "title": "Hosts being rate limited", + "type": "timeseries" } ], "targets": [ @@ -6404,7 +6502,7 @@ "h": 13, "w": 12, "x": 0, - "y": 10 + "y": 35 }, "hiddenSeries": false, "id": 12, @@ -6502,7 +6600,7 @@ "h": 13, "w": 12, "x": 12, - "y": 10 + "y": 35 }, "hiddenSeries": false, "id": 26, @@ -6601,7 +6699,7 @@ "h": 13, "w": 12, "x": 0, - "y": 23 + "y": 48 }, "hiddenSeries": false, "id": 13, @@ -6705,7 +6803,7 @@ "h": 13, "w": 12, "x": 12, - "y": 23 + "y": 48 }, "hiddenSeries": false, "id": 27, @@ -6803,7 +6901,7 @@ "h": 13, "w": 12, "x": 0, - "y": 36 + "y": 61 }, "hiddenSeries": false, "id": 28, @@ -6900,7 +6998,7 @@ "h": 13, "w": 12, "x": 12, - "y": 36 + "y": 61 }, "hiddenSeries": false, "id": 25, @@ -6935,7 +7033,7 @@ "datasource": { "uid": "$datasource" }, - "expr": "rate(synapse_util_metrics_block_time_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count[$bucket_size])", + "expr": "rate(synapse_util_metrics_block_time_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -6960,11 +7058,13 @@ }, "yaxes": [ { - "format": "ms", + "$$hashKey": "object:180", + "format": "s", "logBase": 1, "show": true }, { + "$$hashKey": "object:181", "format": "short", "logBase": 1, "show": true @@ -6988,7 +7088,7 @@ "h": 15, "w": 12, "x": 0, - "y": 49 + "y": 74 }, "hiddenSeries": false, "id": 154, @@ -7009,7 +7109,7 @@ "alertThreshold": true }, "percentage": false, - "pluginVersion": "8.4.3", + "pluginVersion": "9.0.4", "pointradius": 2, "points": false, "renderer": "flot", @@ -7109,7 +7209,7 @@ "h": 10, "w": 12, "x": 0, - "y": 36 + "y": 69 }, "hiddenSeries": false, "id": 1, @@ -7211,7 +7311,7 @@ "h": 10, "w": 12, "x": 12, - "y": 36 + "y": 69 }, "hiddenSeries": false, "id": 8, @@ -7311,7 +7411,7 @@ "h": 10, "w": 12, "x": 0, - "y": 46 + "y": 79 }, "hiddenSeries": false, "id": 38, @@ -7407,7 +7507,7 @@ "h": 10, "w": 12, "x": 12, - "y": 46 + "y": 79 }, "hiddenSeries": false, "id": 39, @@ -7415,11 +7515,16 @@ "alignAsTable": true, "avg": false, "current": false, - "max": false, + "hideEmpty": false, + "hideZero": false, + "max": true, "min": false, + "rightSide": false, "show": true, + "sort": "max", + "sortDesc": true, "total": false, - "values": false + "values": true }, "lines": true, "linewidth": 1, @@ -7467,11 +7572,13 @@ }, "yaxes": [ { + "$$hashKey": "object:101", "format": "rps", "logBase": 1, "show": true }, { + "$$hashKey": "object:102", "format": "short", "logBase": 1, "show": true @@ -7501,7 +7608,7 @@ "h": 9, "w": 12, "x": 0, - "y": 56 + "y": 89 }, "hiddenSeries": false, "id": 65, @@ -11757,8 +11864,8 @@ ] }, "time": { - "from": "2022-07-22T04:08:13.716Z", - "to": "2022-07-22T18:44:27.863Z" + "from": "now-3h", + "to": "now" }, "timepicker": { "now": true, @@ -11789,6 +11896,6 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 124, + "version": 132, "weekStart": "" } \ No newline at end of file diff --git a/debian/build_virtualenv b/debian/build_virtualenv index f1ec60916..ed916ac97 100755 --- a/debian/build_virtualenv +++ b/debian/build_virtualenv @@ -36,7 +36,7 @@ TEMP_VENV="$(mktemp -d)" python3 -m venv "$TEMP_VENV" source "$TEMP_VENV/bin/activate" pip install -U pip -pip install poetry==1.2.0b1 +pip install poetry==1.2.0 poetry export \ --extras all \ --extras test \ diff --git a/debian/changelog b/debian/changelog index 7ff142884..4fa54dc5e 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,9 +1,28 @@ +matrix-synapse-py3 (1.67.0~rc1) stable; urgency=medium + + [ Erik Johnston ] + * Use stable poetry 1.2.0 version, rather than a prerelease. + + [ Synapse Packaging team ] + * New Synapse release 1.67.0rc1. + + -- Synapse Packaging team Tue, 06 Sep 2022 09:01:06 +0100 + matrix-synapse-py3 (1.66.0) stable; urgency=medium * New Synapse release 1.66.0. -- Synapse Packaging team Wed, 31 Aug 2022 11:20:17 +0100 +matrix-synapse-py3 (1.66.0~rc2+nmu1) UNRELEASED; urgency=medium + + * Update debhelper to compatibility level 12. + * Drop the preinst script stopping synapse. + * Allocate a group for the system user. + * Change dpkg-statoverride to --force-statoverride-add. + + -- Jörg Behrmann Tue, 23 Aug 2022 17:17:00 +0100 + matrix-synapse-py3 (1.66.0~rc2) stable; urgency=medium * New Synapse release 1.66.0rc2. diff --git a/debian/compat b/debian/compat deleted file mode 100644 index f599e28b8..000000000 --- a/debian/compat +++ /dev/null @@ -1 +0,0 @@ -10 diff --git a/debian/control b/debian/control index 412a9e1d4..86f5a66d0 100644 --- a/debian/control +++ b/debian/control @@ -4,7 +4,7 @@ Priority: extra Maintainer: Synapse Packaging team # keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv. Build-Depends: - debhelper (>= 10), + debhelper-compat (= 12), dh-virtualenv (>= 1.1), libsystemd-dev, libpq-dev, diff --git a/debian/matrix-synapse-py3.postinst b/debian/matrix-synapse-py3.postinst index 029b9e024..acab0877a 100644 --- a/debian/matrix-synapse-py3.postinst +++ b/debian/matrix-synapse-py3.postinst @@ -40,12 +40,12 @@ EOF /opt/venvs/matrix-synapse/lib/manage_debconf.pl update if ! getent passwd $USER >/dev/null; then - adduser --quiet --system --no-create-home --home /var/lib/matrix-synapse $USER + adduser --quiet --system --group --no-create-home --home /var/lib/matrix-synapse $USER fi for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then - dpkg-statoverride --force --quiet --update --add $USER nogroup 0755 $DIR + dpkg-statoverride --force-statoverride-add --quiet --update --add $USER "$(id -gn $USER)" 0755 $DIR fi done diff --git a/debian/matrix-synapse-py3.preinst b/debian/matrix-synapse-py3.preinst deleted file mode 100644 index 4b5612f05..000000000 --- a/debian/matrix-synapse-py3.preinst +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh -e - -# Attempt to undo some of the braindamage caused by -# https://github.com/matrix-org/package-synapse-debian/issues/18. -# -# Due to reasons [1], the old python2 matrix-synapse package will not stop the -# service when the package is uninstalled. Our maintainer scripts will do the -# right thing in terms of ensuring the service is enabled and unmasked, but -# then do a `systemctl start matrix-synapse`, which of course does nothing - -# leaving the old (py2) service running. -# -# There should normally be no reason for the service to be running during our -# preinst, so we assume that if it *is* running, it's due to that situation, -# and stop it. -# -# [1] dh_systemd_start doesn't do anything because it sees that there is an -# init.d script with the same name, so leaves it to dh_installinit. -# -# dh_installinit doesn't do anything because somebody gave it a --no-start -# for unknown reasons. - -if [ -x /bin/systemctl ]; then - if /bin/systemctl --quiet is-active -- matrix-synapse; then - echo >&2 "stopping existing matrix-synapse service" - /bin/systemctl stop matrix-synapse || true - fi -fi - -#DEBHELPER# - -exit 0 diff --git a/debian/matrix-synapse.default b/debian/matrix-synapse.default deleted file mode 100644 index f402d73bb..000000000 --- a/debian/matrix-synapse.default +++ /dev/null @@ -1,2 +0,0 @@ -# Specify environment variables used when running Synapse -# SYNAPSE_CACHE_FACTOR=0.5 (default) diff --git a/debian/matrix-synapse.service b/debian/matrix-synapse.service index bde1c6cb9..c3f966028 100644 --- a/debian/matrix-synapse.service +++ b/debian/matrix-synapse.service @@ -5,7 +5,6 @@ Description=Synapse Matrix homeserver Type=notify User=matrix-synapse WorkingDirectory=/var/lib/matrix-synapse -EnvironmentFile=-/etc/default/matrix-synapse ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ ExecReload=/bin/kill -HUP $MAINPID @@ -13,5 +12,10 @@ Restart=always RestartSec=3 SyslogIdentifier=matrix-synapse +# The environment file is not shipped by default anymore and the below directive +# is for backwards compatibility only. Please use your homeserver.yaml if +# possible. +EnvironmentFile=-/etc/default/matrix-synapse + [Install] WantedBy=multi-user.target diff --git a/debian/rules b/debian/rules index 5baf2475f..3b79d5607 100755 --- a/debian/rules +++ b/debian/rules @@ -6,15 +6,17 @@ # assume we only have one package PACKAGE_NAME:=`dh_listpackages` -override_dh_systemd_enable: - dh_systemd_enable --name=matrix-synapse - -override_dh_installinit: - dh_installinit --name=matrix-synapse +override_dh_installsystemd: + dh_installsystemd --name=matrix-synapse # we don't really want to strip the symbols from our object files. override_dh_strip: +# many libraries pulled from PyPI have allocatable sections after +# non-allocatable ones on which dwz errors out. For those without the issue the +# gains are only marginal +override_dh_dwz: + # dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files # (executables and shared libs) in the package, and looks for the shared # libraries that they depend on. It then adds a dependency on the package that diff --git a/docker/Dockerfile b/docker/Dockerfile index fa58ae3ac..b87d263cf 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -46,17 +46,8 @@ RUN \ # We install poetry in its own build stage to avoid its dependencies conflicting with # synapse's dependencies. -# We use a specific commit from poetry's master branch instead of our usual 1.1.14, -# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to -# https://github.com/python-poetry/poetry/pull/5156 and -# https://github.com/python-poetry/poetry/issues/5141 ; -# without it, we generate a requirements.txt with incorrect environment markers, -# which causes necessary packages to be omitted when we `pip install`. -# -# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also -# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export). RUN --mount=type=cache,target=/root/.cache/pip \ - pip install --user "poetry-core==1.1.0a7" "git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5" + pip install --user "poetry==1.2.0" WORKDIR /synapse diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 2d56b084e..16720bceb 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -69,6 +69,7 @@ - [Manhole](manhole.md) - [Monitoring](metrics-howto.md) - [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md) + - [Monthly Active Users](usage/administration/monthly_active_users.md) - [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md) - [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md) - [Database Maintenance Tools](usage/administration/database_maintenance_tools.md) diff --git a/docs/admin_api/register_api.md b/docs/admin_api/register_api.md index d7b7cf6a7..f6be31b44 100644 --- a/docs/admin_api/register_api.md +++ b/docs/admin_api/register_api.md @@ -5,9 +5,9 @@ non-interactive way. This is generally used for bootstrapping a Synapse instance with administrator accounts. To authenticate yourself to the server, you will need both the shared secret -(`registration_shared_secret` in the homeserver configuration), and a -one-time nonce. If the registration shared secret is not configured, this API -is not enabled. +([`registration_shared_secret`](../configuration/config_documentation.md#registration_shared_secret) +in the homeserver configuration), and a one-time nonce. If the registration +shared secret is not configured, this API is not enabled. To fetch the nonce, you need to request one from the API: diff --git a/docs/auth_chain_difference_algorithm.md b/docs/auth_chain_difference_algorithm.md index 30f72a70d..ebc9de25b 100644 --- a/docs/auth_chain_difference_algorithm.md +++ b/docs/auth_chain_difference_algorithm.md @@ -34,13 +34,45 @@ the process of indexing it). ## Chain Cover Index Synapse computes auth chain differences by pre-computing a "chain cover" index -for the auth chain in a room, allowing efficient reachability queries like "is -event A in the auth chain of event B". This is done by assigning every event a -*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links* -between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A` -is in the auth chain of `B`) if and only if either: +for the auth chain in a room, allowing us to efficiently make reachability queries +like "is event `A` in the auth chain of event `B`?". We could do this with an index +that tracks all pairs `(A, B)` such that `A` is in the auth chain of `B`. However, this +would be prohibitively large, scaling poorly as the room accumulates more state +events. -1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s +Instead, we break down the graph into *chains*. A chain is a subset of a DAG +with the following property: for any pair of events `E` and `F` in the chain, +the chain contains a path `E -> F` or a path `F -> E`. This forces a chain to be +linear (without forks), e.g. `E -> F -> G -> ... -> H`. Each event in the chain +is given a *sequence number* local to that chain. The oldest event `E` in the +chain has sequence number 1. If `E` has a child `F` in the chain, then `F` has +sequence number 2. If `E` has a grandchild `G` in the chain, then `G` has +sequence number 3; and so on. + +Synapse ensures that each persisted event belongs to exactly one chain, and +tracks how the chains are connected to one another. This allows us to +efficiently answer reachability queries. Doing so uses less storage than +tracking reachability on an event-by-event basis, particularly when we have +fewer and longer chains. See + +> Jagadish, H. (1990). [A compression technique to materialize transitive closure](https://doi.org/10.1145/99935.99944). +> *ACM Transactions on Database Systems (TODS)*, 15*(4)*, 558-598. + +for the original idea or + +> Y. Chen, Y. Chen, [An efficient algorithm for answering graph +> reachability queries](https://doi.org/10.1109/ICDE.2008.4497498), +> in: 2008 IEEE 24th International Conference on Data Engineering, April 2008, +> pp. 893–902. (PDF available via [Google Scholar](https://scholar.google.com/scholar?q=Y.%20Chen,%20Y.%20Chen,%20An%20efficient%20algorithm%20for%20answering%20graph%20reachability%20queries,%20in:%202008%20IEEE%2024th%20International%20Conference%20on%20Data%20Engineering,%20April%202008,%20pp.%20893902.).) + +for a more modern take. + +In practical terms, the chain cover assigns every event a +*chain ID* and *sequence number* (e.g. `(5,3)`), and maintains a map of *links* +between events in chains (e.g. `(5,3) -> (2,4)`) such that `A` is reachable by `B` +(i.e. `A` is in the auth chain of `B`) if and only if either: + +1. `A` and `B` have the same chain ID and `A`'s sequence number is less than `B`'s sequence number; or 2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that `L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`. @@ -49,8 +81,9 @@ There are actually two potential implementations, one where we store links from each chain to every other reachable chain (the transitive closure of the links graph), and one where we remove redundant links (the transitive reduction of the links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1` -would not be stored. Synapse uses the former implementations so that it doesn't -need to recurse to test reachability between chains. +would not be stored. Synapse uses the former implementation so that it doesn't +need to recurse to test reachability between chains. This trades-off extra storage +in order to save CPU cycles and DB queries. ### Example diff --git a/docs/development/contributing_guide.md b/docs/development/contributing_guide.md index ab320cbd7..4e1df5116 100644 --- a/docs/development/contributing_guide.md +++ b/docs/development/contributing_guide.md @@ -62,6 +62,8 @@ pipx install poetry but see poetry's [installation instructions](https://python-poetry.org/docs/#installation) for other installation methods. +Synapse requires Poetry version 1.2.0 or later. + Next, open a terminal and install dependencies as follows: ```sh diff --git a/docs/development/database_schema.md b/docs/development/database_schema.md index d996a7caa..e9b925ddd 100644 --- a/docs/development/database_schema.md +++ b/docs/development/database_schema.md @@ -191,3 +191,27 @@ There are three separate aspects to this: flavour will be accepted by SQLite 3.22, but will give a column whose default value is the **string** `"FALSE"` - which, when cast back to a boolean in Python, evaluates to `True`. + + +## `event_id` global uniqueness + +In room versions `1` and `2` it's possible to end up with two events with the +same `event_id` (in the same or different rooms). After room version `3`, that +can only happen with a hash collision, which we basically hope will never +happen. + +There are several places in Synapse and even Matrix APIs like [`GET +/_matrix/federation/v1/event/{eventId}`](https://spec.matrix.org/v1.1/server-server-api/#get_matrixfederationv1eventeventid) +where we assume that event IDs are globally unique. + +But hash collisions are still possible, and by treating event IDs as room +scoped, we can reduce the possibility of a hash collision. When scoping +`event_id` in the database schema, it should be also accompanied by `room_id` +(`PRIMARY KEY (room_id, event_id)`) and lookups should be done through the pair +`(room_id, event_id)`. + +There has been a lot of debate on this in places like +https://github.com/matrix-org/matrix-spec-proposals/issues/2779 and +[MSC2848](https://github.com/matrix-org/matrix-spec-proposals/pull/2848) which +has no resolution yet (as of 2022-09-01). + diff --git a/docs/development/dependencies.md b/docs/development/dependencies.md index 236856a6b..b356870f2 100644 --- a/docs/development/dependencies.md +++ b/docs/development/dependencies.md @@ -243,14 +243,11 @@ doesn't require poetry. (It's what we use in CI too). However, you could try ## Check the version of poetry with `poetry --version`. -At the time of writing, the 1.2 series is beta only. We have seen some examples -where the lockfiles generated by 1.2 prereleasese aren't interpreted correctly -by poetry 1.1.x. For now, use poetry 1.1.14, which includes a critical -[change](https://github.com/python-poetry/poetry/pull/5973) needed to remain -[compatible with PyPI](https://github.com/pypi/warehouse/pull/11775). +The minimum version of poetry supported by Synapse is 1.2. It can also be useful to check the version of `poetry-core` in use. If you've -installed `poetry` with `pipx`, try `pipx runpip poetry list | grep poetry-core`. +installed `poetry` with `pipx`, try `pipx runpip poetry list | grep +poetry-core`. ## Clear caches: `poetry cache clear --all pypi`. diff --git a/docs/metrics-howto.md b/docs/metrics-howto.md index 4a77d5604..279303a79 100644 --- a/docs/metrics-howto.md +++ b/docs/metrics-howto.md @@ -7,7 +7,13 @@ 1. Enable Synapse metrics: - There are two methods of enabling metrics in Synapse. + In `homeserver.yaml`, make sure `enable_metrics` is + set to `True`. + +1. Enable the `/_synapse/metrics` Synapse endpoint that Prometheus uses to + collect data: + + There are two methods of enabling the metrics endpoint in Synapse. The first serves the metrics as a part of the usual web server and can be enabled by adding the \"metrics\" resource to the existing @@ -41,9 +47,6 @@ - '0.0.0.0' ``` - For both options, you will need to ensure that `enable_metrics` is - set to `True`. - 1. Restart Synapse. 1. Add a Prometheus target for Synapse. diff --git a/docs/openid.md b/docs/openid.md index d0ccf36f7..ce9b02622 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -174,7 +174,9 @@ oidc_providers: 1. Create a regular web application for Synapse 2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback` -3. Add a rule to add the `preferred_username` claim. +3. Add a rule with any name to add the `preferred_username` claim. +(See https://auth0.com/docs/customize/rules/create-rules for more information on how to create rules.) +
Code sample diff --git a/docs/setup/installation.md b/docs/setup/installation.md index 260e50577..bb78b3267 100644 --- a/docs/setup/installation.md +++ b/docs/setup/installation.md @@ -506,9 +506,13 @@ email will be disabled. ### Registering a user -The easiest way to create a new user is to do so from a client like [Element](https://element.io/). +One way to create a new user is to do so from a client like +[Element](https://element.io/). This requires registration to be enabled via +the +[`enable_registration`](../usage/configuration/config_documentation.md#enable_registration) +setting. -Alternatively, you can do so from the command line. This can be done as follows: +Alternatively, you can create new users from the command line. This can be done as follows: 1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was installed via a prebuilt package, `register_new_matrix_user` should already be @@ -520,7 +524,7 @@ Alternatively, you can do so from the command line. This can be done as follows: ``` 2. Run the following command: ```sh - register_new_matrix_user -c homeserver.yaml http://localhost:8008 + register_new_matrix_user -c homeserver.yaml ``` This will prompt you to add details for the new user, and will then connect to @@ -533,12 +537,13 @@ Make admin [no]: Success! ``` -This process uses a setting `registration_shared_secret` in -`homeserver.yaml`, which is shared between Synapse itself and the -`register_new_matrix_user` script. It doesn't matter what it is (a random -value is generated by `--generate-config`), but it should be kept secret, as -anyone with knowledge of it can register users, including admin accounts, -on your server even if `enable_registration` is `false`. +This process uses a setting +[`registration_shared_secret`](../usage/configuration/config_documentation.md#registration_shared_secret), +which is shared between Synapse itself and the `register_new_matrix_user` +script. It doesn't matter what it is (a random value is generated by +`--generate-config`), but it should be kept secret, as anyone with knowledge of +it can register users, including admin accounts, on your server even if +`enable_registration` is `false`. ### Setting up a TURN server diff --git a/docs/systemd-with-workers/workers/generic_worker.yaml b/docs/systemd-with-workers/workers/generic_worker.yaml index a82f9c161..6e7b60886 100644 --- a/docs/systemd-with-workers/workers/generic_worker.yaml +++ b/docs/systemd-with-workers/workers/generic_worker.yaml @@ -5,6 +5,8 @@ worker_name: generic_worker1 worker_replication_host: 127.0.0.1 worker_replication_http_port: 9093 +worker_main_http_uri: http://localhost:8008/ + worker_listeners: - type: http port: 8083 diff --git a/docs/upgrade.md b/docs/upgrade.md index 51719f8c7..023ca0a30 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -89,6 +89,37 @@ process, for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb ``` +# Upgrading to v1.67.0 + +## Direct TCP replication is no longer supported: migrate to Redis + +Redis support was added in v1.13.0 with it becoming the recommended method in +v1.18.0. It replaced the old direct TCP connections (which was deprecated as of +v1.18.0) to the main process. With Redis, rather than all the workers connecting +to the main process, all the workers and the main process connect to Redis, +which relays replication commands between processes. This can give a significant +CPU saving on the main process and is a prerequisite for upcoming +performance improvements. + +To migrate to Redis add the [`redis` config](./workers.md#shared-configuration), +and remove the TCP `replication` listener from config of the master and +`worker_replication_port` from worker config. Note that a HTTP listener with a +`replication` resource is still required. + +## Minimum version of Poetry is now v1.2.0 + +The minimum supported version of poetry is now 1.2. This should only affect +those installing from a source checkout. + +## Rust requirement in the next release + +From the next major release (v1.68.0) installing Synapse from a source checkout +will require a recent Rust compiler. Those using packages or +`pip install matrix-synapse` will not be affected. + +The simplest way of installing Rust is via [rustup.rs](https://rustup.rs/) + + # Upgrading to v1.66.0 ## Delegation of email validation no longer supported diff --git a/docs/usage/administration/monthly_active_users.md b/docs/usage/administration/monthly_active_users.md new file mode 100644 index 000000000..d4e903728 --- /dev/null +++ b/docs/usage/administration/monthly_active_users.md @@ -0,0 +1,84 @@ +# Monthly Active Users + +Synapse can be configured to record the number of monthly active users (also referred to as MAU) on a given homeserver. +For clarity's sake, MAU only tracks local users. + +Please note that the metrics recorded by the [Homeserver Usage Stats](../../usage/administration/monitoring/reporting_homeserver_usage_statistics.md) +are calculated differently. The `monthly_active_users` from the usage stats does not take into account any +of the rules below, and counts any users who have made a request to the homeserver in the last 30 days. + +See the [configuration manual](../../usage/configuration/config_documentation.md#limit_usage_by_mau) for details on how to configure MAU. + +## Calculating active users + +Individual user activity is measured in active days. If a user performs an action, the exact time of that action is then recorded. When +calculating the MAU figure, any users with a recorded action in the last 30 days are considered part of the cohort. Days are measured +as a rolling window from the current system time to 30 days ago. + +So for example, if Synapse were to calculate the active users on the 15th July at 13:25, it would include any activity from 15th June 13:25 onwards. + +A user is **never** considered active if they are either: + - Part of the trial day cohort (described below) + - Owned by an application service. + - Note: This **only** covers users that are part of an application service `namespaces.users` registration. The namespace + must also be marked as `exclusive`. + +Otherwise, any request to Synapse will mark the user as active. Please note that registration will not mark a user as active *unless* +they register with a 3pid that is included in the config field `mau_limits_reserved_threepids`. + +The Prometheus metric for MAU is refreshed every 5 minutes. + +Once an hour, Synapse checks to see if any users are inactive (with only activity timestamps later than 30 days). These users +are removed from the active users cohort. If they then become active, they are immediately restored to the cohort. + +It is important to note that **deactivated** users are not immediately removed from the pool of active users, but as these users won't +perform actions they will eventually be removed from the cohort. + +### Trial days + +If the config option `mau_trial_days` is set, a user must have been active this many days **after** registration to be active. A user is in the +trial period if their registration timestamp (also known as the `creation_ts`) is less than `mau_trial_days` old. + +As an example, if `mau_trial_days` is set to `3` and a user is active **after** 3 days (72 hours from registration time) then they will be counted as active. + +The `mau_appservice_trial_days` config further extends this rule by applying different durations depending on the `appservice_id` of the user. +Users registered by an application service will be recorded with an `appservice_id` matching the `id` key in the registration file for that service. + + +## Limiting usage of the homeserver when the maximum MAU is reached + +If both config options `limit_usage_by_mau` and `max_mau_value` is set, and the current MAU value exceeds the maximum value, the +homeserver will begin to block some actions. + +Individual users matching **any** of the below criteria never have their actions blocked: + - Considered part of the cohort of MAU users. + - Considered part of the trial period. + - Registered as a `support` user. + - Application service users if `track_appservice_user_ips` is NOT set. + +Please not that server admins are **not** exempt from blocking. + +The following actions are blocked when the MAU limit is exceeded: + - Logging in + - Sending events + - Creating rooms + - Syncing + +Registration is also blocked for all new signups *unless* the user is registering with a threepid included in the `mau_limits_reserved_threepids` +config value. + +When a request is blocked, the response will have the `errcode` `M_RESOURCE_LIMIT_EXCEEDED`. + +## Metrics + +Synapse records several different prometheus metrics for MAU. + +`synapse_admin_mau:current` records the current MAU figure for native (non-application-service) users. + +`synapse_admin_mau:max` records the maximum MAU as dictated by the `max_mau_value` config value. + +`synapse_admin_mau_current_mau_by_service` records the current MAU including application service users. The label `app_service` can be used +to filter by a specific service ID. This *also* includes non-application-service users under `app_service=native` . + +`synapse_admin_mau:registered_reserved_users` records the number of users specified in `mau_limits_reserved_threepids` which have +registered accounts on the homeserver. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 5dee38d28..757957a1d 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -431,8 +431,6 @@ Sub-options for each listener include: * `metrics`: (see the docs [here](../../metrics-howto.md)), - * `replication`: (deprecated as of Synapse 1.18, see the docs [here](../../workers.md)). - * `tls`: set to true to enable TLS for this listener. Will use the TLS key/cert specified in tls_private_key_path / tls_certificate_path. * `x_forwarded`: Only valid for an 'http' listener. Set to true to use the X-Forwarded-For header as the client IP. Useful when Synapse is @@ -595,6 +593,8 @@ server owner wants to limit to the number of monthly active users. When enabled reached the server returns a `ResourceLimitError` with error type `Codes.RESOURCE_LIMIT_EXCEEDED`. Defaults to false. If this is enabled, a value for `max_mau_value` must also be set. +See [Monthly Active Users](../administration/monthly_active_users.md) for details on how to configure MAU. + Example configuration: ```yaml limit_usage_by_mau: true @@ -1873,8 +1873,8 @@ See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha. --- ### `recaptcha_public_key` -This homeserver's ReCAPTCHA public key. Must be specified if `enable_registration_captcha` is -enabled. +This homeserver's ReCAPTCHA public key. Must be specified if +[`enable_registration_captcha`](#enable_registration_captcha) is enabled. Example configuration: ```yaml @@ -1883,7 +1883,8 @@ recaptcha_public_key: "YOUR_PUBLIC_KEY" --- ### `recaptcha_private_key` -This homeserver's ReCAPTCHA private key. Must be specified if `enable_registration_captcha` is +This homeserver's ReCAPTCHA private key. Must be specified if +[`enable_registration_captcha`](#enable_registration_captcha) is enabled. Example configuration: @@ -1893,9 +1894,11 @@ recaptcha_private_key: "YOUR_PRIVATE_KEY" --- ### `enable_registration_captcha` -Set to true to enable ReCaptcha checks when registering, preventing signup -unless a captcha is answered. Requires a valid ReCaptcha public/private key. -Defaults to false. +Set to `true` to require users to complete a CAPTCHA test when registering an account. +Requires a valid ReCaptcha public/private key. +Defaults to `false`. + +Note that [`enable_registration`](#enable_registration) must also be set to allow account registration. Example configuration: ```yaml @@ -1971,9 +1974,21 @@ Registration can be rate-limited using the parameters in the [Ratelimiting](#rat --- ### `enable_registration` -Enable registration for new users. Defaults to false. It is highly recommended that if you enable registration, -you use either captcha, email, or token-based verification to verify that new users are not bots. In order to enable registration -without any verification, you must also set `enable_registration_without_verification` to true. +Enable registration for new users. Defaults to `false`. + +It is highly recommended that if you enable registration, you set one or more +or the following options, to avoid abuse of your server by "bots": + + * [`enable_registration_captcha`](#enable_registration_captcha) + * [`registrations_require_3pid`](#registrations_require_3pid) + * [`registration_requires_token`](#registration_requires_token) + +(In order to enable registration without any verification, you must also set +[`enable_registration_without_verification`](#enable_registration_without_verification).) + +Note that even if this setting is disabled, new accounts can still be created +via the admin API if +[`registration_shared_secret`](#registration_shared_secret) is set. Example configuration: ```yaml @@ -1981,88 +1996,21 @@ enable_registration: true ``` --- ### `enable_registration_without_verification` + Enable registration without email or captcha verification. Note: this option is *not* recommended, -as registration without verification is a known vector for spam and abuse. Defaults to false. Has no effect -unless `enable_registration` is also enabled. +as registration without verification is a known vector for spam and abuse. Defaults to `false`. Has no effect +unless [`enable_registration`](#enable_registration) is also enabled. Example configuration: ```yaml enable_registration_without_verification: true ``` --- -### `session_lifetime` - -Time that a user's session remains valid for, after they log in. - -Note that this is not currently compatible with guest logins. - -Note also that this is calculated at login time: changes are not applied retrospectively to users who have already -logged in. - -By default, this is infinite. - -Example configuration: -```yaml -session_lifetime: 24h -``` ----- -### `refresh_access_token_lifetime` - -Time that an access token remains valid for, if the session is using refresh tokens. - -For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md). - -Note that this only applies to clients which advertise support for refresh tokens. - -Note also that this is calculated at login time and refresh time: changes are not applied to -existing sessions until they are refreshed. - -By default, this is 5 minutes. - -Example configuration: -```yaml -refreshable_access_token_lifetime: 10m -``` ---- -### `refresh_token_lifetime: 24h` - -Time that a refresh token remains valid for (provided that it is not -exchanged for another one first). -This option can be used to automatically log-out inactive sessions. -Please see the manual for more information. - -Note also that this is calculated at login time and refresh time: -changes are not applied to existing sessions until they are refreshed. - -By default, this is infinite. - -Example configuration: -```yaml -refresh_token_lifetime: 24h -``` ---- -### `nonrefreshable_access_token_lifetime` - -Time that an access token remains valid for, if the session is NOT -using refresh tokens. - -Please note that not all clients support refresh tokens, so setting -this to a short value may be inconvenient for some users who will -then be logged out frequently. - -Note also that this is calculated at login time: changes are not applied -retrospectively to existing sessions for users that have already logged in. - -By default, this is infinite. - -Example configuration: -```yaml -nonrefreshable_access_token_lifetime: 24h -``` ---- ### `registrations_require_3pid` -If this is set, the user must provide all of the specified types of 3PID when registering. +If this is set, users must provide all of the specified types of 3PID when registering an account. + +Note that [`enable_registration`](#enable_registration) must also be set to allow account registration. Example configuration: ```yaml @@ -2110,9 +2058,11 @@ enable_3pid_lookup: false Require users to submit a token during registration. Tokens can be managed using the admin [API](../administration/admin_api/registration_tokens.md). -Note that `enable_registration` must be set to true. Disabling this option will not delete any tokens previously generated. -Defaults to false. Set to true to enable. +Defaults to `false`. Set to `true` to enable. + + +Note that [`enable_registration`](#enable_registration) must also be set to allow account registration. Example configuration: ```yaml @@ -2121,13 +2071,39 @@ registration_requires_token: true --- ### `registration_shared_secret` -If set, allows registration of standard or admin accounts by anyone who -has the shared secret, even if registration is otherwise disabled. +If set, allows registration of standard or admin accounts by anyone who has the +shared secret, even if [`enable_registration`](#enable_registration) is not +set. + +This is primarily intended for use with the `register_new_matrix_user` script +(see [Registering a user](../../setup/installation.md#registering-a-user)); +however, the interface is [documented](../admin_api/register_api.html). + +See also [`registration_shared_secret_path`](#registration_shared_secret_path). Example configuration: ```yaml registration_shared_secret: ``` + +--- +### `registration_shared_secret_path` + +An alternative to [`registration_shared_secret`](#registration_shared_secret): +allows the shared secret to be specified in an external file. + +The file should be a plain text file, containing only the shared secret. + +If this file does not exist, Synapse will create a new signing +key on startup and store it in this file. + +Example configuration: +```yaml +registration_shared_secret_file: /path/to/secrets/file +``` + +_Added in Synapse 1.67.0._ + --- ### `bcrypt_rounds` @@ -2358,6 +2334,79 @@ Example configuration: ```yaml inhibit_user_in_use_error: true ``` +--- +## User session management +--- +### `session_lifetime` + +Time that a user's session remains valid for, after they log in. + +Note that this is not currently compatible with guest logins. + +Note also that this is calculated at login time: changes are not applied retrospectively to users who have already +logged in. + +By default, this is infinite. + +Example configuration: +```yaml +session_lifetime: 24h +``` +---- +### `refresh_access_token_lifetime` + +Time that an access token remains valid for, if the session is using refresh tokens. + +For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md). + +Note that this only applies to clients which advertise support for refresh tokens. + +Note also that this is calculated at login time and refresh time: changes are not applied to +existing sessions until they are refreshed. + +By default, this is 5 minutes. + +Example configuration: +```yaml +refreshable_access_token_lifetime: 10m +``` +--- +### `refresh_token_lifetime: 24h` + +Time that a refresh token remains valid for (provided that it is not +exchanged for another one first). +This option can be used to automatically log-out inactive sessions. +Please see the manual for more information. + +Note also that this is calculated at login time and refresh time: +changes are not applied to existing sessions until they are refreshed. + +By default, this is infinite. + +Example configuration: +```yaml +refresh_token_lifetime: 24h +``` +--- +### `nonrefreshable_access_token_lifetime` + +Time that an access token remains valid for, if the session is NOT +using refresh tokens. + +Please note that not all clients support refresh tokens, so setting +this to a short value may be inconvenient for some users who will +then be logged out frequently. + +Note also that this is calculated at login time: changes are not applied +retrospectively to existing sessions for users that have already logged in. + +By default, this is infinite. + +Example configuration: +```yaml +nonrefreshable_access_token_lifetime: 24h +``` + --- ## Metrics ### Config options related to metrics. @@ -2435,7 +2484,7 @@ report_stats_endpoint: https://example.com/report-usage-stats/push Config settings related to the client/server API --- -### `room_prejoin_state:` +### `room_prejoin_state` Controls for the state that is shared with users who receive an invite to a room. By default, the following state event types are shared with users who @@ -2537,7 +2586,10 @@ Config options relating to signing keys --- ### `signing_key_path` -Path to the signing key to sign messages with. +Path to the signing key to sign events and federation requests with. + +*New in Synapse 1.67*: If this file does not exist, Synapse will create a new signing +key on startup and store it in this file. Example configuration: ```yaml @@ -2572,7 +2624,7 @@ Example configuration: key_refresh_interval: 2d ``` --- -### `trusted_key_servers:` +### `trusted_key_servers` The trusted servers to download signing keys from. @@ -2642,13 +2694,10 @@ key_server_signing_keys_path: "key_server_signing_keys.key" The following settings can be used to make Synapse use a single sign-on provider for authentication, instead of its internal password database. -You will probably also want to set the following options to false to +You will probably also want to set the following options to `false` to disable the regular login/registration flows: - * `enable_registration` - * `password_config.enabled` - -You will also want to investigate the settings under the "sso" configuration -section below. + * [`enable_registration`](#enable_registration) + * [`password_config.enabled`](#password_config) --- ### `saml2_config` diff --git a/docs/workers.md b/docs/workers.md index 6969c424d..40b185231 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -32,13 +32,8 @@ stream between all configured Synapse processes. Additionally, processes may make HTTP requests to each other, primarily for operations which need to wait for a reply ─ such as sending an event. -Redis support was added in v1.13.0 with it becoming the recommended method in -v1.18.0. It replaced the old direct TCP connections (which is deprecated as of -v1.18.0) to the main process. With Redis, rather than all the workers connecting -to the main process, all the workers and the main process connect to Redis, -which relays replication commands between processes. This can give a significant -cpu saving on the main process and will be a prerequisite for upcoming -performance improvements. +All the workers and the main process connect to Redis, which relays replication +commands between processes. If Redis support is enabled Synapse will use it as a shared cache, as well as a pub/sub mechanism. @@ -117,23 +112,26 @@ redis: enabled: true ``` -See the sample config for the full documentation of each option. +See the [configuration manual](usage/configuration/config_documentation.html) for the full documentation of each option. Under **no circumstances** should the replication listener be exposed to the -public internet; it has no authentication and is unencrypted. +public internet; replication traffic is: + +* always unencrypted +* unauthenticated, unless `worker_replication_secret` is configured ### Worker configuration -In the config file for each worker, you must specify the type of worker -application (`worker_app`), and you should specify a unique name for the worker -(`worker_name`). The currently available worker applications are listed below. -You must also specify the HTTP replication endpoint that it should talk to on -the main synapse process. `worker_replication_host` should specify the host of -the main synapse and `worker_replication_http_port` should point to the HTTP -replication port. If the worker will handle HTTP requests then the -`worker_listeners` option should be set with a `http` listener, in the same way -as the `listeners` option in the shared config. +In the config file for each worker, you must specify: + * The type of worker (`worker_app`). The currently available worker applications are listed below. + * A unique name for the worker (`worker_name`). + * The HTTP replication endpoint that it should talk to on the main synapse process + (`worker_replication_host` and `worker_replication_http_port`) + * If handling HTTP requests, a `worker_listeners` option with an `http` + listener, in the same way as the `listeners` option in the shared config. + * If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for + the main process (`worker_main_http_uri`). For example: @@ -217,10 +215,12 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable)/search$ # Encryption requests + # Note that ^/_matrix/client/(r0|v3|unstable)/keys/upload/ requires `worker_main_http_uri` ^/_matrix/client/(r0|v3|unstable)/keys/query$ ^/_matrix/client/(r0|v3|unstable)/keys/changes$ ^/_matrix/client/(r0|v3|unstable)/keys/claim$ ^/_matrix/client/(r0|v3|unstable)/room_keys/ + ^/_matrix/client/(r0|v3|unstable)/keys/upload/ # Registration/login requests ^/_matrix/client/(api/v1|r0|v3|unstable)/login$ @@ -325,7 +325,6 @@ effects of bursts of events from that bridge on events sent by normal users. Additionally, the writing of specific streams (such as events) can be moved off of the main process to a particular worker. -(This is only supported with Redis-based replication.) To enable this, the worker must have a HTTP replication listener configured, have a `worker_name` and be listed in the `instance_map` config. The same worker @@ -581,52 +580,23 @@ handle it, and are online. If `update_user_directory` is set to `false`, and this worker is not running, the above endpoint may give outdated results. -### `synapse.app.frontend_proxy` - -Proxies some frequently-requested client endpoints to add caching and remove -load from the main synapse. It can handle REST endpoints matching the following -regular expressions: - - ^/_matrix/client/(r0|v3|unstable)/keys/upload - -If `use_presence` is False in the homeserver config, it can also handle REST -endpoints matching the following regular expressions: - - ^/_matrix/client/(api/v1|r0|v3|unstable)/presence/[^/]+/status - -This "stub" presence handler will pass through `GET` request but make the -`PUT` effectively a no-op. - -It will proxy any requests it cannot handle to the main synapse instance. It -must therefore be configured with the location of the main instance, via -the `worker_main_http_uri` setting in the `frontend_proxy` worker configuration -file. For example: - -```yaml -worker_main_http_uri: http://127.0.0.1:8008 -``` - ### Historical apps -*Note:* Historically there used to be more apps, however they have been -amalgamated into a single `synapse.app.generic_worker` app. The remaining apps -are ones that do specific processing unrelated to requests, e.g. the `pusher` -that handles sending out push notifications for new events. The intention is for -all these to be folded into the `generic_worker` app and to use config to define -which processes handle the various proccessing such as push notifications. +The following used to be separate worker application types, but are now +equivalent to `synapse.app.generic_worker`: + + * `synapse.app.client_reader` + * `synapse.app.event_creator` + * `synapse.app.federation_reader` + * `synapse.app.frontend_proxy` + * `synapse.app.synchrotron` ## Migration from old config -There are two main independent changes that have been made: introducing Redis -support and merging apps into `synapse.app.generic_worker`. Both these changes -are backwards compatible and so no changes to the config are required, however -server admins are encouraged to plan to migrate to Redis as the old style direct -TCP replication config is deprecated. - -To migrate to Redis add the `redis` config as above, and optionally remove the -TCP `replication` listener from master and `worker_replication_port` from worker -config. +A main change that has occurred is the merging of worker apps into +`synapse.app.generic_worker`. This change is backwards compatible and so no +changes to the config are required. To migrate apps to use `synapse.app.generic_worker` simply update the `worker_app` option in the worker configs, and where worker are started (e.g. diff --git a/poetry.lock b/poetry.lock index 651659ec9..35021390b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,10 +7,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "authlib" @@ -39,7 +39,7 @@ attrs = ">=19.2.0" six = "*" [package.extras] -visualize = ["graphviz (>0.5.1)", "Twisted (>=16.1.1)"] +visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "bcrypt" @@ -177,7 +177,7 @@ optional = false python-versions = "*" [package.extras] -test = ["hypothesis (==3.55.3)", "flake8 (==3.7.8)"] +test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "constantly" @@ -199,12 +199,12 @@ python-versions = ">=3.6" cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "defusedxml" @@ -226,7 +226,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] +dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "docutils" @@ -245,7 +245,7 @@ optional = true python-versions = ">=3.7" [package.extras] -dev = ["tox", "coverage", "lxml", "xmlschema (>=1.8.0)", "sphinx", "memory-profiler", "flake8", "mypy (==0.910)"] +dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"] [[package]] name = "flake8" @@ -274,7 +274,7 @@ attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["coverage", "black", "hypothesis", "hypothesmith"] +dev = ["black", "coverage", "hypothesis", "hypothesmith"] [[package]] name = "flake8-comprehensions" @@ -367,8 +367,8 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "importlib-resources" @@ -382,8 +382,8 @@ python-versions = ">=3.6" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "incremental" @@ -405,9 +405,9 @@ optional = false python-versions = ">=3.6,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jaeger-client" @@ -424,7 +424,7 @@ thrift = "*" tornado = ">=4.3" [package.extras] -tests = ["mock", "pycurl", "pytest", "pytest-cov", "coverage", "pytest-timeout", "pytest-tornado", "pytest-benchmark", "pytest-localserver", "flake8", "flake8-quotes", "flake8-typing-imports", "codecov", "tchannel (==2.1.0)", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "mypy"] +tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"] [[package]] name = "jeepney" @@ -435,8 +435,8 @@ optional = false python-versions = ">=3.6" [package.extras] -trio = ["async-generator", "trio"] -test = ["async-timeout", "trio", "testpath", "pytest-asyncio", "pytest-trio", "pytest"] +test = ["async-timeout", "pytest", "pytest-asyncio", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] [[package]] name = "jinja2" @@ -486,8 +486,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "ldap3" @@ -511,7 +511,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] -htmlsoup = ["beautifulsoup4"] +htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] [[package]] @@ -535,8 +535,8 @@ attrs = "*" importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""} [package.extras] -test = ["aiounittest", "twisted", "tox"] -dev = ["twine (==4.0.1)", "build (==0.8.0)", "isort (==5.9.3)", "flake8 (==4.0.1)", "black (==22.3.0)", "mypy (==0.910)", "aiounittest", "twisted", "tox"] +dev = ["aiounittest", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "mypy (==0.910)", "tox", "twine (==4.0.1)", "twisted"] +test = ["aiounittest", "tox", "twisted"] [[package]] name = "matrix-synapse-ldap3" @@ -552,7 +552,7 @@ service-identity = "*" Twisted = ">=15.1.0" [package.extras] -dev = ["isort (==5.9.3)", "flake8 (==4.0.1)", "black (==22.3.0)", "types-setuptools", "mypy (==0.910)", "ldaptor", "tox", "matrix-synapse"] +dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "matrix-synapse", "mypy (==0.910)", "tox", "types-setuptools"] [[package]] name = "mccabe" @@ -611,7 +611,7 @@ mypy = "0.950" "zope.schema" = "*" [package.extras] -test = ["pytest (>=4.6)", "pytest-cov", "lxml"] +test = ["lxml", "pytest (>=4.6)", "pytest-cov"] [[package]] name = "netaddr" @@ -630,7 +630,7 @@ optional = true python-versions = "*" [package.extras] -tests = ["doubles", "flake8", "flake8-quotes", "mock", "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-rtd-theme", "six (>=1.10.0,<2.0)", "gevent", "tornado"] +tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"] [[package]] name = "packaging" @@ -835,10 +835,10 @@ optional = false python-versions = ">=3.6" [package.extras] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] -docs = ["zope.interface", "sphinx-rtd-theme", "sphinx"] -dev = ["pre-commit", "mypy", "coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)", "cryptography (>=3.3.1)", "zope.interface", "sphinx-rtd-theme", "sphinx"] crypto = ["cryptography (>=3.3.1)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymacaroons" @@ -872,8 +872,8 @@ python-versions = ">=3.6" cffi = ">=1.4.1" [package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] +docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" @@ -925,11 +925,12 @@ pyOpenSSL = "*" python-dateutil = "*" pytz = "*" requests = ">=1.0.0" +setuptools = "*" six = "*" xmlschema = ">=1.2.1" [package.extras] -s2repoze = ["paste", "zope.interface", "repoze.who"] +s2repoze = ["paste", "repoze.who", "zope.interface"] [[package]] name = "python-dateutil" @@ -1054,11 +1055,11 @@ celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] -flask = ["flask (>=0.11)", "blinker (>=1.1)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)"] httpx = ["httpx (>=0.16.0)"] -pure_eval = ["pure-eval", "executing", "asttokens"] +pure_eval = ["asttokens", "executing", "pure-eval"] pyspark = ["pyspark (>=2.4.4)"] -quart = ["quart (>=0.16.1)", "blinker (>=1.1)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] rq = ["rq (>=0.6)"] sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] @@ -1080,11 +1081,24 @@ pyasn1-modules = "*" six = "*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "pytest", "sphinx", "furo", "idna", "pyopenssl"] -docs = ["sphinx", "furo"] +dev = ["coverage[toml] (>=5.0.2)", "furo", "idna", "pyOpenSSL", "pytest", "sphinx"] +docs = ["furo", "sphinx"] idna = ["idna"] tests = ["coverage[toml] (>=5.0.2)", "pytest"] +[[package]] +name = "setuptools" +version = "65.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "signedjson" version = "1.1.4" @@ -1199,6 +1213,7 @@ click = "*" click-default-group = "*" incremental = "*" jinja2 = "*" +setuptools = "*" tomli = {version = "*", markers = "python_version >= \"3.6\""} [package.extras] @@ -1236,7 +1251,7 @@ requests = ">=2.1.0" Twisted = {version = ">=18.7.0", extras = ["tls"]} [package.extras] -dev = ["pep8", "pyflakes", "httpbin (==0.5.0)"] +dev = ["httpbin (==0.5.0)", "pep8", "pyflakes"] docs = ["sphinx (>=1.4.8)"] [[package]] @@ -1281,20 +1296,20 @@ typing-extensions = ">=3.6.5" "zope.interface" = ">=4.4.2" [package.extras] -all_non_platform = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"] -conch = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)"] -conch_nacl = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pynacl"] +all_non_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] +conch_nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] contextvars = ["contextvars (>=2.4,<3)"] -dev = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "python-subunit (>=1.4,<2.0)", "pydoctor (>=21.9.0,<21.10.0)"] -dev_release = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pydoctor (>=21.9.0,<21.10.0)"] +dev = ["coverage (>=6b1,<7)", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)"] +dev_release = ["pydoctor (>=21.9.0,<21.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"] -mypy = ["mypy (==0.930)", "mypy-zope (==0.3.4)", "types-setuptools", "types-pyopenssl", "towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pynacl", "pywin32 (!=226)", "python-subunit (>=1.4,<2.0)", "contextvars (>=2.4,<3)", "pydoctor (>=21.9.0,<21.10.0)"] -osx_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"] +macos_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] +osx_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] -test = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)"] -tls = ["pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)"] -windows_platform = ["pywin32 (!=226)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"] +test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)"] +tls = ["idna (>=2.4)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)"] +windows_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] [[package]] name = "twisted-iocpsupport" @@ -1472,7 +1487,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] brotli = ["brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1504,8 +1519,8 @@ elementpath = ">=2.5.0,<3.0.0" [package.extras] codegen = ["elementpath (>=2.5.0,<3.0.0)", "jinja2"] -dev = ["tox", "coverage", "lxml", "elementpath (>=2.5.0,<3.0.0)", "memory-profiler", "sphinx", "sphinx-rtd-theme", "jinja2", "flake8", "mypy", "lxml-stubs"] -docs = ["elementpath (>=2.5.0,<3.0.0)", "sphinx", "sphinx-rtd-theme", "jinja2"] +dev = ["Sphinx", "coverage", "elementpath (>=2.5.0,<3.0.0)", "flake8", "jinja2", "lxml", "lxml-stubs", "memory-profiler", "mypy", "sphinx-rtd-theme", "tox"] +docs = ["Sphinx", "elementpath (>=2.5.0,<3.0.0)", "jinja2", "sphinx-rtd-theme"] [[package]] name = "zipp" @@ -1516,8 +1531,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "zope.event" @@ -1527,8 +1542,11 @@ category = "dev" optional = false python-versions = "*" +[package.dependencies] +setuptools = "*" + [package.extras] -docs = ["sphinx"] +docs = ["Sphinx"] test = ["zope.testrunner"] [[package]] @@ -1539,8 +1557,11 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[package.dependencies] +setuptools = "*" + [package.extras] -docs = ["sphinx", "repoze.sphinx.autointerface"] +docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] @@ -1553,11 +1574,12 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] +setuptools = "*" "zope.event" = "*" "zope.interface" = ">=5.0.0" [package.extras] -docs = ["sphinx", "repoze.sphinx.autointerface"] +docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] [extras] @@ -2458,6 +2480,10 @@ service-identity = [ {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, {file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"}, ] +setuptools = [ + {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, + {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, +] signedjson = [ {file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"}, {file = "signedjson-1.1.4.tar.gz", hash = "sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492"}, diff --git a/pyproject.toml b/pyproject.toml index a41d88ea7..dfb5c788b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ skip_gitignore = true [tool.poetry] name = "matrix-synapse" -version = "1.66.0" +version = "1.67.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "Apache-2.0" diff --git a/requirements.txt b/requirements.txt index 7a6c0398e..70dd5928e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,777 +1,783 @@ -attrs==21.4.0 \ +attrs==21.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd -authlib==0.15.5 \ - --hash=sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf \ - --hash=sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252 -automat==20.2.0 ; python_full_version >= "3.6.7" \ - --hash=sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111 \ - --hash=sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33 -bcrypt==3.2.0 \ +authlib==0.15.5 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:b83cf6360c8e92b0e9df0d1f32d675790bcc4e3c03977499b1eed24dcdef4252 \ + --hash=sha256:ecf4a7a9f2508c0bb07e93a752dd3c495cfaffc20e864ef0ffc95e3f40d2abaf +automat==20.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33 \ + --hash=sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111 +bcrypt==3.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d \ + --hash=sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29 \ + --hash=sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7 \ + --hash=sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34 \ + --hash=sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7 \ + --hash=sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55 \ --hash=sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd \ --hash=sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6 \ - --hash=sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7 \ --hash=sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1 \ - --hash=sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d \ - --hash=sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7 \ - --hash=sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d \ - --hash=sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55 \ - --hash=sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34 \ - --hash=sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29 -bleach==4.1.0 \ - --hash=sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994 \ - --hash=sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da -canonicaljson==1.6.0 \ + --hash=sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d +bleach==4.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da \ + --hash=sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994 +canonicaljson==1.6.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \ --hash=sha256:7230c2a2a3db07874f622af84effe41a655e07bf23734830e18a454e65d5b998 \ --hash=sha256:8739d5fd91aca7281d425660ae65af7663808c8177778965f67e90b16a2b2427 -certifi==2021.10.8 ; python_full_version >= "3.6.0" \ - --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 \ - --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 -cffi==1.15.0 ; python_version >= "3.6" or platform_python_implementation == "PyPy" \ - --hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \ - --hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \ - --hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \ - --hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \ - --hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \ - --hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \ - --hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \ - --hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \ - --hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \ - --hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \ +certifi==2021.10.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \ + --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 +cffi==1.15.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3 \ - --hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \ - --hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \ - --hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \ - --hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \ + --hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \ + --hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \ + --hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \ + --hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \ + --hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \ + --hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \ + --hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \ + --hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \ + --hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \ + --hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \ + --hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \ + --hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \ + --hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \ + --hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \ + --hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \ + --hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \ + --hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \ + --hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \ + --hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \ + --hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \ + --hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \ + --hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \ + --hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \ + --hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \ + --hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \ --hash=sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0 \ --hash=sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e \ - --hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \ - --hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \ - --hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \ - --hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \ - --hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \ - --hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \ - --hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \ - --hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \ - --hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \ - --hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \ - --hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \ - --hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \ - --hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \ - --hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \ - --hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \ - --hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \ - --hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \ - --hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \ - --hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \ - --hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \ - --hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \ - --hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \ - --hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \ - --hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \ - --hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \ --hash=sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a \ --hash=sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e \ - --hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796 \ - --hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \ + --hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \ + --hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \ + --hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \ + --hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \ + --hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \ + --hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 \ + --hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \ + --hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \ + --hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \ + --hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \ + --hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \ + --hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \ + --hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \ + --hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \ --hash=sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8 \ - --hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \ - --hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \ - --hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 -charset-normalizer==2.0.12 ; python_full_version >= "3.6.0" \ + --hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \ + --hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \ + --hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \ + --hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \ + --hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796 +charset-normalizer==2.0.12 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df -constantly==15.1.0 ; python_full_version >= "3.6.7" \ - --hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d \ - --hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 -cryptography==36.0.1 \ - --hash=sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b \ - --hash=sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31 \ - --hash=sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a \ - --hash=sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173 \ - --hash=sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12 \ +constantly==15.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \ + --hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d +cryptography==36.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3 \ - --hash=sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2 \ - --hash=sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f \ - --hash=sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3 \ - --hash=sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca \ - --hash=sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf \ - --hash=sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9 \ - --hash=sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1 \ - --hash=sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94 \ + --hash=sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31 \ --hash=sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac \ - --hash=sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee \ + --hash=sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf \ + --hash=sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316 \ + --hash=sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca \ + --hash=sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638 \ + --hash=sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94 \ + --hash=sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12 \ + --hash=sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173 \ + --hash=sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b \ + --hash=sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a \ + --hash=sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f \ + --hash=sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2 \ + --hash=sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9 \ --hash=sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46 \ --hash=sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903 \ - --hash=sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316 \ - --hash=sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638 -frozendict==2.3.3 \ + --hash=sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3 \ + --hash=sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1 \ + --hash=sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee +frozendict==2.3.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:027952d1698ac9c766ef43711226b178cdd49d2acbdff396936639ad1d2a5615 \ + --hash=sha256:15b5f82aad108125336593cec1b6420c638bf45f449c57e50949fc7654ea5a41 \ + --hash=sha256:310aaf81793abf4f471895e6fe65e0e74a28a2aaf7b25c2ba6ccd4e35af06842 \ + --hash=sha256:35eb7e59e287c41f4f712d4d3d2333354175b155d217b97c99c201d2d8920790 \ + --hash=sha256:38c4660f37fcc70a32ff997fe58e40b3fcc60b2017b286e33828efaa16b01308 \ + --hash=sha256:398539c52af3c647d103185bbaa1291679f0507ad035fe3bab2a8b0366d52cf1 \ --hash=sha256:39942914c1217a5a49c7551495a103b3dbd216e19413687e003b859c6b0ebc12 \ --hash=sha256:5589256058b31f2b91419fa30b8dc62dbdefe7710e688a3fd5b43849161eecc9 \ - --hash=sha256:35eb7e59e287c41f4f712d4d3d2333354175b155d217b97c99c201d2d8920790 \ - --hash=sha256:310aaf81793abf4f471895e6fe65e0e74a28a2aaf7b25c2ba6ccd4e35af06842 \ - --hash=sha256:c353c11010a986566a0cb37f9a783c560ffff7d67d5e7fd52221fb03757cdc43 \ - --hash=sha256:15b5f82aad108125336593cec1b6420c638bf45f449c57e50949fc7654ea5a41 \ - --hash=sha256:a4737e5257756bd6b877504ff50185b705db577b5330d53040a6cf6417bb3cdb \ --hash=sha256:80a14c11e33e8b0bc09e07bba3732c77a502c39edb8c3959fd9a0e490e031158 \ - --hash=sha256:027952d1698ac9c766ef43711226b178cdd49d2acbdff396936639ad1d2a5615 \ - --hash=sha256:ef818d66c85098a37cf42509545a4ba7dd0c4c679d6262123a8dc14cc474bab7 \ --hash=sha256:812279f2b270c980112dc4e367b168054f937108f8044eced4199e0ab2945a37 \ - --hash=sha256:c1fb7efbfebc2075f781be3d9774e4ba6ce4fc399148b02097f68d4b3c4bc00a \ - --hash=sha256:a0b46d4bf95bce843c0151959d54c3e5b8d0ce29cb44794e820b3ec980d63eee \ - --hash=sha256:38c4660f37fcc70a32ff997fe58e40b3fcc60b2017b286e33828efaa16b01308 \ --hash=sha256:919e3609844fece11ab18bcbf28a3ed20f8108ad4149d7927d413687f281c6c9 \ - --hash=sha256:f988b482d08972a196664718167a993a61c9e9f6fe7b0ca2443570b5f20ca44a \ - --hash=sha256:398539c52af3c647d103185bbaa1291679f0507ad035fe3bab2a8b0366d52cf1 -hiredis==2.0.0 \ - --hash=sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048 \ - --hash=sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26 \ - --hash=sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea \ - --hash=sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99 \ - --hash=sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05 \ - --hash=sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a \ - --hash=sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63 \ - --hash=sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6 \ - --hash=sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485 \ - --hash=sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a \ - --hash=sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc \ - --hash=sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579 \ + --hash=sha256:a0b46d4bf95bce843c0151959d54c3e5b8d0ce29cb44794e820b3ec980d63eee \ + --hash=sha256:a4737e5257756bd6b877504ff50185b705db577b5330d53040a6cf6417bb3cdb \ + --hash=sha256:c1fb7efbfebc2075f781be3d9774e4ba6ce4fc399148b02097f68d4b3c4bc00a \ + --hash=sha256:c353c11010a986566a0cb37f9a783c560ffff7d67d5e7fd52221fb03757cdc43 \ + --hash=sha256:ef818d66c85098a37cf42509545a4ba7dd0c4c679d6262123a8dc14cc474bab7 \ + --hash=sha256:f988b482d08972a196664718167a993a61c9e9f6fe7b0ca2443570b5f20ca44a +hiredis==2.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e \ - --hash=sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79 \ - --hash=sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc \ - --hash=sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a \ - --hash=sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb \ - --hash=sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5 \ - --hash=sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298 \ - --hash=sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d \ - --hash=sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db \ - --hash=sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048 \ - --hash=sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426 \ - --hash=sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581 \ - --hash=sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5 \ - --hash=sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e \ - --hash=sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce \ - --hash=sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443 \ - --hash=sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0 \ - --hash=sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e \ - --hash=sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d \ - --hash=sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9 \ - --hash=sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54 \ --hash=sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27 \ - --hash=sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d \ --hash=sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163 \ - --hash=sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a \ + --hash=sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc \ + --hash=sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26 \ + --hash=sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e \ + --hash=sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579 \ + --hash=sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a \ + --hash=sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048 \ --hash=sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87 \ + --hash=sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63 \ + --hash=sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54 \ + --hash=sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05 \ + --hash=sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb \ + --hash=sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea \ + --hash=sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5 \ + --hash=sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e \ + --hash=sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc \ + --hash=sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99 \ + --hash=sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a \ + --hash=sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581 \ + --hash=sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426 \ + --hash=sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db \ + --hash=sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a \ + --hash=sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a \ + --hash=sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d \ + --hash=sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443 \ + --hash=sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79 \ + --hash=sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d \ + --hash=sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9 \ + --hash=sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d \ + --hash=sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485 \ + --hash=sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5 \ + --hash=sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048 \ + --hash=sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0 \ + --hash=sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6 \ --hash=sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41 \ + --hash=sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298 \ + --hash=sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce \ --hash=sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0 \ - --hash=sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a -hyperlink==21.0.0 ; python_version >= "3.6" \ - --hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4 \ - --hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b -idna==3.3 \ + --hash=sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a +hyperlink==21.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \ + --hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4 +idna==3.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d -ijson==3.1.4 \ - --hash=sha256:6c1a777096be5f75ffebb335c6d2ebc0e489b231496b7f2ca903aa061fe7d381 \ - --hash=sha256:475fc25c3d2a86230b85777cae9580398b42eed422506bf0b6aacfa936f7bfcd \ - --hash=sha256:f587699b5a759e30accf733e37950cc06c4118b72e3e146edcea77dded467426 \ - --hash=sha256:339b2b4c7bbd64849dd69ef94ee21e29dcd92c831f47a281fdd48122bb2a715a \ - --hash=sha256:446ef8980504da0af8d20d3cb6452c4dc3d8aa5fd788098985e899b913191fe6 \ - --hash=sha256:3997a2fdb28bc04b9ab0555db5f3b33ed28d91e9d42a3bf2c1842d4990beb158 \ - --hash=sha256:fa10a1d88473303ec97aae23169d77c5b92657b7fb189f9c584974c00a79f383 \ - --hash=sha256:9a5bf5b9d8f2ceaca131ee21fc7875d0f34b95762f4f32e4d65109ca46472147 \ - --hash=sha256:81cc8cee590c8a70cca3c9aefae06dd7cb8e9f75f3a7dc12b340c2e332d33a2a \ - --hash=sha256:4ea5fc50ba158f72943d5174fbc29ebefe72a2adac051c814c87438dc475cf78 \ - --hash=sha256:3b98861a4280cf09d267986cefa46c3bd80af887eae02aba07488d80eb798afa \ +ijson==3.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:068c692efba9692406b86736dcc6803e4a0b6280d7f0b7534bff3faec677ff38 \ - --hash=sha256:86884ac06ac69cea6d89ab7b84683b3b4159c4013e4a20276d3fc630fe9b7588 \ - --hash=sha256:41e5886ff6fade26f10b87edad723d2db14dcbb1178717790993fcbbb8ccd333 \ - --hash=sha256:24b58933bf777d03dc1caa3006112ec7f9e6f6db6ffe1f5f5bd233cb1281f719 \ - --hash=sha256:13f80aad0b84d100fb6a88ced24bade21dc6ddeaf2bba3294b58728463194f50 \ - --hash=sha256:fa9a25d0bd32f9515e18a3611690f1de12cb7d1320bd93e9da835936b41ad3ff \ - --hash=sha256:c4c1bf98aaab4c8f60d238edf9bcd07c896cfcc51c2ca84d03da22aad88957c5 \ - --hash=sha256:f0f2a87c423e8767368aa055310024fa28727f4454463714fef22230c9717f64 \ - --hash=sha256:15507de59d74d21501b2a076d9c49abf927eb58a51a01b8f28a0a0565db0a99f \ - --hash=sha256:2e6bd6ad95ab40c858592b905e2bbb4fe79bbff415b69a4923dafe841ffadcb4 \ - --hash=sha256:68e295bb12610d086990cedc89fb8b59b7c85740d66e9515aed062649605d0bf \ - --hash=sha256:3bb461352c0f0f2ec460a4b19400a665b8a5a3a2da663a32093df1699642ee3f \ - --hash=sha256:f91c75edd6cf1a66f02425bafc59a22ec29bc0adcbc06f4bfd694d92f424ceb3 \ - --hash=sha256:4c53cc72f79a4c32d5fc22efb85aa22f248e8f4f992707a84bdc896cc0b1ecf9 \ - --hash=sha256:ac9098470c1ff6e5c23ec0946818bc102bfeeeea474554c8d081dc934be20988 \ - --hash=sha256:dcd6f04df44b1945b859318010234651317db2c4232f75e3933f8bb41c4fa055 \ - --hash=sha256:5a2f40c053c837591636dc1afb79d85e90b9a9d65f3d9963aae31d1eb11bfed2 \ - --hash=sha256:f50337e3b8e72ec68441b573c2848f108a8976a57465c859b227ebd2a2342901 \ - --hash=sha256:454918f908abbed3c50a0a05c14b20658ab711b155e4f890900e6f60746dd7cc \ - --hash=sha256:387c2ec434cc1bc7dc9bd33ec0b70d95d443cc1e5934005f26addc2284a437ab \ - --hash=sha256:179ed6fd42e121d252b43a18833df2de08378fac7bce380974ef6f5e522afefa \ - --hash=sha256:26a6a550b270df04e3f442e2bf0870c9362db4912f0e7bdfd300f30ea43115a2 \ - --hash=sha256:ff8cf7507d9d8939264068c2cff0a23f99703fa2f31eb3cb45a9a52798843586 \ --hash=sha256:09c9d7913c88a6059cd054ff854958f34d757402b639cf212ffbec201a705a0d \ - --hash=sha256:702ba9a732116d659a5e950ee176be6a2e075998ef1bcde11cbf79a77ed0f717 \ - --hash=sha256:667841591521158770adc90793c2bdbb47c94fe28888cb802104b8bbd61f3d51 \ - --hash=sha256:df641dd07b38c63eecd4f454db7b27aa5201193df160f06b48111ba97ab62504 \ - --hash=sha256:9348e7d507eb40b52b12eecff3d50934fcc3d2a15a2f54ec1127a36063b9ba8f \ - --hash=sha256:93455902fdc33ba9485c7fae63ac95d96e0ab8942224a357113174bbeaff92e9 \ - --hash=sha256:5b725f2e984ce70d464b195f206fa44bebbd744da24139b61fec72de77c03a16 \ - --hash=sha256:a5965c315fbb2dc9769dfdf046eb07daf48ae20b637da95ec8d62b629be09df4 \ - --hash=sha256:b8ee7dbb07cec9ba29d60cfe4954b3cc70adb5f85bba1f72225364b59c1cf82b \ - --hash=sha256:d9e01c55d501e9c3d686b6ee3af351c9c0c8c3e45c5576bd5601bee3e1300b09 \ - --hash=sha256:297f26f27a04cd0d0a2f865d154090c48ea11b239cabe0a17a6c65f0314bd1ca \ - --hash=sha256:9239973100338a4138d09d7a4602bd289861e553d597cd67390c33bfc452253e \ - --hash=sha256:2a64c66a08f56ed45a805691c2fd2e1caef00edd6ccf4c4e5eff02cd94ad8364 \ - --hash=sha256:d17fd199f0d0a4ab6e0d541b4eec1b68b5bd5bb5d8104521e22243015b51049b \ - --hash=sha256:70ee3c8fa0eba18c80c5911639c01a8de4089a4361bad2862a9949e25ec9b1c8 \ - --hash=sha256:6bf2b64304321705d03fa5e403ec3f36fa5bb27bf661849ad62e0a3a49bc23e3 \ - --hash=sha256:5d7e3fcc3b6de76a9dba1e9fc6ca23dad18f0fa6b4e6499415e16b684b2e9af1 \ - --hash=sha256:a72eb0359ebff94754f7a2f00a6efe4c57716f860fc040c606dedcb40f49f233 \ - --hash=sha256:28fc168f5faf5759fdfa2a63f85f1f7a148bbae98f34404a6ba19f3d08e89e87 \ - --hash=sha256:2844d4a38d27583897ed73f7946e205b16926b4cab2525d1ce17e8b08064c706 \ - --hash=sha256:252defd1f139b5fb8c764d78d5e3a6df81543d9878c58992a89b261369ea97a7 \ + --hash=sha256:13f80aad0b84d100fb6a88ced24bade21dc6ddeaf2bba3294b58728463194f50 \ + --hash=sha256:15507de59d74d21501b2a076d9c49abf927eb58a51a01b8f28a0a0565db0a99f \ --hash=sha256:15d5356b4d090c699f382c8eb6a2bcd5992a8c8e8b88c88bc6e54f686018328a \ - --hash=sha256:6774ec0a39647eea70d35fb76accabe3d71002a8701c0545b9120230c182b75b \ - --hash=sha256:f11da15ec04cc83ff0f817a65a3392e169be8d111ba81f24d6e09236597bb28c \ - --hash=sha256:ee13ceeed9b6cf81b3b8197ef15595fc43fd54276842ed63840ddd49db0603da \ - --hash=sha256:97e4df67235fae40d6195711223520d2c5bf1f7f5087c2963fcde44d72ebf448 \ + --hash=sha256:179ed6fd42e121d252b43a18833df2de08378fac7bce380974ef6f5e522afefa \ + --hash=sha256:1d1003ae3c6115ec9b587d29dd136860a81a23c7626b682e2b5b12c9fd30e4ea \ + --hash=sha256:24b58933bf777d03dc1caa3006112ec7f9e6f6db6ffe1f5f5bd233cb1281f719 \ + --hash=sha256:252defd1f139b5fb8c764d78d5e3a6df81543d9878c58992a89b261369ea97a7 \ + --hash=sha256:26a6a550b270df04e3f442e2bf0870c9362db4912f0e7bdfd300f30ea43115a2 \ + --hash=sha256:2844d4a38d27583897ed73f7946e205b16926b4cab2525d1ce17e8b08064c706 \ + --hash=sha256:28fc168f5faf5759fdfa2a63f85f1f7a148bbae98f34404a6ba19f3d08e89e87 \ + --hash=sha256:297f26f27a04cd0d0a2f865d154090c48ea11b239cabe0a17a6c65f0314bd1ca \ + --hash=sha256:2a64c66a08f56ed45a805691c2fd2e1caef00edd6ccf4c4e5eff02cd94ad8364 \ + --hash=sha256:2e6bd6ad95ab40c858592b905e2bbb4fe79bbff415b69a4923dafe841ffadcb4 \ + --hash=sha256:339b2b4c7bbd64849dd69ef94ee21e29dcd92c831f47a281fdd48122bb2a715a \ + --hash=sha256:387c2ec434cc1bc7dc9bd33ec0b70d95d443cc1e5934005f26addc2284a437ab \ + --hash=sha256:3997a2fdb28bc04b9ab0555db5f3b33ed28d91e9d42a3bf2c1842d4990beb158 \ + --hash=sha256:3b98861a4280cf09d267986cefa46c3bd80af887eae02aba07488d80eb798afa \ + --hash=sha256:3bb461352c0f0f2ec460a4b19400a665b8a5a3a2da663a32093df1699642ee3f \ --hash=sha256:3d10eee52428f43f7da28763bb79f3d90bbbeea1accb15de01e40a00885b6e89 \ - --hash=sha256:1d1003ae3c6115ec9b587d29dd136860a81a23c7626b682e2b5b12c9fd30e4ea -importlib-metadata==4.2.0 ; python_version < "3.8" and python_version >= "3.7" or python_version >= "3.6" and python_version < "3.8" or python_version < "3.8" \ + --hash=sha256:41e5886ff6fade26f10b87edad723d2db14dcbb1178717790993fcbbb8ccd333 \ + --hash=sha256:446ef8980504da0af8d20d3cb6452c4dc3d8aa5fd788098985e899b913191fe6 \ + --hash=sha256:454918f908abbed3c50a0a05c14b20658ab711b155e4f890900e6f60746dd7cc \ + --hash=sha256:475fc25c3d2a86230b85777cae9580398b42eed422506bf0b6aacfa936f7bfcd \ + --hash=sha256:4c53cc72f79a4c32d5fc22efb85aa22f248e8f4f992707a84bdc896cc0b1ecf9 \ + --hash=sha256:4ea5fc50ba158f72943d5174fbc29ebefe72a2adac051c814c87438dc475cf78 \ + --hash=sha256:5a2f40c053c837591636dc1afb79d85e90b9a9d65f3d9963aae31d1eb11bfed2 \ + --hash=sha256:5b725f2e984ce70d464b195f206fa44bebbd744da24139b61fec72de77c03a16 \ + --hash=sha256:5d7e3fcc3b6de76a9dba1e9fc6ca23dad18f0fa6b4e6499415e16b684b2e9af1 \ + --hash=sha256:667841591521158770adc90793c2bdbb47c94fe28888cb802104b8bbd61f3d51 \ + --hash=sha256:6774ec0a39647eea70d35fb76accabe3d71002a8701c0545b9120230c182b75b \ + --hash=sha256:68e295bb12610d086990cedc89fb8b59b7c85740d66e9515aed062649605d0bf \ + --hash=sha256:6bf2b64304321705d03fa5e403ec3f36fa5bb27bf661849ad62e0a3a49bc23e3 \ + --hash=sha256:6c1a777096be5f75ffebb335c6d2ebc0e489b231496b7f2ca903aa061fe7d381 \ + --hash=sha256:702ba9a732116d659a5e950ee176be6a2e075998ef1bcde11cbf79a77ed0f717 \ + --hash=sha256:70ee3c8fa0eba18c80c5911639c01a8de4089a4361bad2862a9949e25ec9b1c8 \ + --hash=sha256:81cc8cee590c8a70cca3c9aefae06dd7cb8e9f75f3a7dc12b340c2e332d33a2a \ + --hash=sha256:86884ac06ac69cea6d89ab7b84683b3b4159c4013e4a20276d3fc630fe9b7588 \ + --hash=sha256:9239973100338a4138d09d7a4602bd289861e553d597cd67390c33bfc452253e \ + --hash=sha256:93455902fdc33ba9485c7fae63ac95d96e0ab8942224a357113174bbeaff92e9 \ + --hash=sha256:9348e7d507eb40b52b12eecff3d50934fcc3d2a15a2f54ec1127a36063b9ba8f \ + --hash=sha256:97e4df67235fae40d6195711223520d2c5bf1f7f5087c2963fcde44d72ebf448 \ + --hash=sha256:9a5bf5b9d8f2ceaca131ee21fc7875d0f34b95762f4f32e4d65109ca46472147 \ + --hash=sha256:a5965c315fbb2dc9769dfdf046eb07daf48ae20b637da95ec8d62b629be09df4 \ + --hash=sha256:a72eb0359ebff94754f7a2f00a6efe4c57716f860fc040c606dedcb40f49f233 \ + --hash=sha256:ac9098470c1ff6e5c23ec0946818bc102bfeeeea474554c8d081dc934be20988 \ + --hash=sha256:b8ee7dbb07cec9ba29d60cfe4954b3cc70adb5f85bba1f72225364b59c1cf82b \ + --hash=sha256:c4c1bf98aaab4c8f60d238edf9bcd07c896cfcc51c2ca84d03da22aad88957c5 \ + --hash=sha256:d17fd199f0d0a4ab6e0d541b4eec1b68b5bd5bb5d8104521e22243015b51049b \ + --hash=sha256:d9e01c55d501e9c3d686b6ee3af351c9c0c8c3e45c5576bd5601bee3e1300b09 \ + --hash=sha256:dcd6f04df44b1945b859318010234651317db2c4232f75e3933f8bb41c4fa055 \ + --hash=sha256:df641dd07b38c63eecd4f454db7b27aa5201193df160f06b48111ba97ab62504 \ + --hash=sha256:ee13ceeed9b6cf81b3b8197ef15595fc43fd54276842ed63840ddd49db0603da \ + --hash=sha256:f0f2a87c423e8767368aa055310024fa28727f4454463714fef22230c9717f64 \ + --hash=sha256:f11da15ec04cc83ff0f817a65a3392e169be8d111ba81f24d6e09236597bb28c \ + --hash=sha256:f50337e3b8e72ec68441b573c2848f108a8976a57465c859b227ebd2a2342901 \ + --hash=sha256:f587699b5a759e30accf733e37950cc06c4118b72e3e146edcea77dded467426 \ + --hash=sha256:f91c75edd6cf1a66f02425bafc59a22ec29bc0adcbc06f4bfd694d92f424ceb3 \ + --hash=sha256:fa10a1d88473303ec97aae23169d77c5b92657b7fb189f9c584974c00a79f383 \ + --hash=sha256:fa9a25d0bd32f9515e18a3611690f1de12cb7d1320bd93e9da835936b41ad3ff \ + --hash=sha256:ff8cf7507d9d8939264068c2cff0a23f99703fa2f31eb3cb45a9a52798843586 +importlib-metadata==4.2.0 ; python_full_version >= "3.7.1" and python_version < "3.8" \ --hash=sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b \ --hash=sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31 -importlib-resources==5.4.0 ; python_version < "3.9" and python_version >= "3.7" \ +importlib-resources==5.4.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \ --hash=sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45 \ --hash=sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b -incremental==21.3.0 ; python_version >= "3.6" \ - --hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321 \ - --hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 -jinja2==3.0.3 \ +incremental==21.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \ + --hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321 +jinja2==3.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \ --hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7 -jsonschema==4.4.0 \ - --hash=sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823 \ - --hash=sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83 -lxml==4.9.1 \ - --hash=sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed \ - --hash=sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc \ - --hash=sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc \ - --hash=sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3 \ - --hash=sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627 \ - --hash=sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84 \ - --hash=sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837 \ - --hash=sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad \ - --hash=sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5 \ - --hash=sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8 \ - --hash=sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8 \ - --hash=sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d \ - --hash=sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7 \ - --hash=sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b \ - --hash=sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d \ - --hash=sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3 \ - --hash=sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29 \ - --hash=sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d \ +jsonschema==4.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83 \ + --hash=sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823 +lxml==4.9.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318 \ - --hash=sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7 \ - --hash=sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4 \ - --hash=sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb \ - --hash=sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067 \ - --hash=sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536 \ - --hash=sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8 \ - --hash=sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b \ - --hash=sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf \ - --hash=sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3 \ - --hash=sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391 \ - --hash=sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e \ - --hash=sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7 \ - --hash=sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2 \ - --hash=sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc \ - --hash=sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c \ - --hash=sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4 \ - --hash=sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3 \ - --hash=sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca \ - --hash=sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785 \ - --hash=sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785 \ - --hash=sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a \ - --hash=sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e \ - --hash=sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b \ - --hash=sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97 \ - --hash=sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21 \ - --hash=sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2 \ - --hash=sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130 \ - --hash=sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715 \ - --hash=sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036 \ - --hash=sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387 \ - --hash=sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94 \ - --hash=sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345 \ - --hash=sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67 \ - --hash=sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb \ - --hash=sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448 \ - --hash=sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7 \ - --hash=sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91 \ - --hash=sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000 \ - --hash=sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25 \ - --hash=sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd \ - --hash=sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb \ - --hash=sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d \ --hash=sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c \ --hash=sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b \ - --hash=sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc \ - --hash=sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b \ - --hash=sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2 \ + --hash=sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000 \ --hash=sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73 \ - --hash=sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c \ + --hash=sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d \ + --hash=sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb \ + --hash=sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8 \ + --hash=sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2 \ + --hash=sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345 \ + --hash=sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94 \ + --hash=sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e \ + --hash=sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b \ + --hash=sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc \ + --hash=sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a \ --hash=sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9 \ + --hash=sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc \ + --hash=sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387 \ + --hash=sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb \ + --hash=sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7 \ + --hash=sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4 \ + --hash=sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97 \ + --hash=sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67 \ + --hash=sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627 \ + --hash=sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7 \ + --hash=sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd \ + --hash=sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3 \ + --hash=sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7 \ + --hash=sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130 \ + --hash=sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b \ + --hash=sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036 \ + --hash=sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785 \ + --hash=sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca \ + --hash=sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91 \ + --hash=sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc \ + --hash=sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536 \ + --hash=sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391 \ + --hash=sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3 \ + --hash=sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d \ + --hash=sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21 \ + --hash=sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3 \ + --hash=sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d \ + --hash=sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29 \ + --hash=sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715 \ + --hash=sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed \ + --hash=sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25 \ + --hash=sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c \ + --hash=sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785 \ + --hash=sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837 \ + --hash=sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4 \ + --hash=sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b \ + --hash=sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2 \ + --hash=sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067 \ + --hash=sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448 \ + --hash=sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d \ + --hash=sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2 \ + --hash=sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc \ + --hash=sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c \ + --hash=sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5 \ + --hash=sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84 \ + --hash=sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8 \ + --hash=sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf \ + --hash=sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7 \ + --hash=sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e \ + --hash=sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb \ + --hash=sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b \ + --hash=sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3 \ + --hash=sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad \ + --hash=sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8 \ --hash=sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f -markupsafe==2.1.0 ; python_version >= "3.7" \ - --hash=sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c \ - --hash=sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a \ - --hash=sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce \ - --hash=sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3 \ - --hash=sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989 \ - --hash=sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26 \ - --hash=sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076 \ - --hash=sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f \ - --hash=sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454 \ - --hash=sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c \ - --hash=sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357 \ - --hash=sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61 \ - --hash=sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8 \ - --hash=sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb \ - --hash=sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e \ - --hash=sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49 \ - --hash=sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7 \ - --hash=sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a \ - --hash=sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad \ - --hash=sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759 \ - --hash=sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7 \ - --hash=sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed \ - --hash=sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea \ - --hash=sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730 \ - --hash=sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1 \ - --hash=sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8 \ - --hash=sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f \ - --hash=sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8 \ - --hash=sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea \ +markupsafe==2.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3 \ - --hash=sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448 \ - --hash=sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c \ - --hash=sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956 \ + --hash=sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8 \ + --hash=sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759 \ + --hash=sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed \ + --hash=sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989 \ + --hash=sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3 \ + --hash=sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a \ + --hash=sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c \ --hash=sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c \ - --hash=sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7 \ + --hash=sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8 \ + --hash=sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454 \ + --hash=sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad \ --hash=sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d \ --hash=sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635 \ - --hash=sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05 \ + --hash=sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61 \ + --hash=sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea \ + --hash=sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49 \ + --hash=sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce \ + --hash=sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e \ + --hash=sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f \ + --hash=sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f \ + --hash=sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f \ + --hash=sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7 \ + --hash=sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a \ + --hash=sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7 \ + --hash=sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076 \ + --hash=sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb \ + --hash=sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7 \ --hash=sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7 \ - --hash=sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f -matrix-common==1.2.1 \ + --hash=sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c \ + --hash=sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26 \ + --hash=sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c \ + --hash=sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8 \ + --hash=sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448 \ + --hash=sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956 \ + --hash=sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05 \ + --hash=sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1 \ + --hash=sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357 \ + --hash=sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea \ + --hash=sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730 +matrix-common==1.2.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:946709c405944a0d4b1d73207b77eb064b6dbfc5d70a69471320b06d8ce98b20 \ --hash=sha256:a99dcf02a6bd95b24a5a61b354888a2ac92bf2b4b839c727b8dd9da2cdfa3853 -msgpack==1.0.3 \ - --hash=sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079 \ - --hash=sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3 \ - --hash=sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73 \ +msgpack==1.0.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc \ --hash=sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147 \ + --hash=sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3 \ + --hash=sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba \ --hash=sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39 \ --hash=sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85 \ - --hash=sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7 \ - --hash=sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d \ - --hash=sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b \ - --hash=sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec \ - --hash=sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770 \ --hash=sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9 \ --hash=sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a \ - --hash=sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a \ - --hash=sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc \ - --hash=sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a \ - --hash=sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920 \ - --hash=sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50 \ - --hash=sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba \ - --hash=sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea \ - --hash=sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a \ - --hash=sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef \ - --hash=sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611 \ - --hash=sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1 \ - --hash=sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4 \ - --hash=sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a \ - --hash=sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3 \ - --hash=sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52 \ - --hash=sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2 \ - --hash=sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996 \ - --hash=sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2 \ + --hash=sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec \ --hash=sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88 \ + --hash=sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e \ + --hash=sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a \ + --hash=sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b \ + --hash=sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1 \ + --hash=sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3 \ + --hash=sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef \ + --hash=sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079 \ + --hash=sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52 \ + --hash=sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a \ + --hash=sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a \ + --hash=sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4 \ + --hash=sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996 \ + --hash=sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73 \ + --hash=sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a \ + --hash=sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920 \ + --hash=sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7 \ + --hash=sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d \ + --hash=sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770 \ + --hash=sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50 \ + --hash=sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2 \ + --hash=sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2 \ --hash=sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d \ - --hash=sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e -netaddr==0.8.0 \ + --hash=sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea \ + --hash=sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611 +netaddr==0.8.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac \ --hash=sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243 -packaging==21.3 \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb -parameterized==0.8.1 \ - --hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9 \ - --hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c -phonenumbers==8.12.44 \ - --hash=sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6 \ - --hash=sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce -pillow==9.0.1 \ - --hash=sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4 \ - --hash=sha256:2632d0f846b7c7600edf53c48f8f9f1e13e62f66a6dbc15191029d950bfed976 \ - --hash=sha256:b9618823bd237c0d2575283f2939655f54d51b4527ec3972907a927acbcc5bfc \ - --hash=sha256:9bfdb82cdfeccec50aad441afc332faf8606dfa5e8efd18a6692b5d6e79f00fd \ - --hash=sha256:5100b45a4638e3c00e4d2320d3193bdabb2d75e79793af7c3eb139e4f569f16f \ - --hash=sha256:528a2a692c65dd5cafc130de286030af251d2ee0483a5bf50c9348aefe834e8a \ +packaging==21.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +parameterized==0.8.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:41bbff37d6186430f77f900d777e5bb6a24928a1c46fb1de692f8b52b8833b5c \ + --hash=sha256:9cbb0b69a03e8695d68b3399a8a5825200976536fe1cb79db60ed6a4c8c9efe9 +phonenumbers==8.12.44 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce \ + --hash=sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6 +pillow==9.0.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97 \ --hash=sha256:0f29d831e2151e0b7b39981756d201f7108d3d215896212ffe2e992d06bfe049 \ - --hash=sha256:855c583f268edde09474b081e3ddcd5cf3b20c12f26e0d434e1386cc5d318e7a \ - --hash=sha256:d9d7942b624b04b895cb95af03a23407f17646815495ce4547f0e60e0b06f58e \ - --hash=sha256:81c4b81611e3a3cb30e59b0cf05b888c675f97e3adb2c8672c3154047980726b \ - --hash=sha256:413ce0bbf9fc6278b2d63309dfeefe452835e1c78398efb431bab0672fe9274e \ - --hash=sha256:80fe64a6deb6fcfdf7b8386f2cf216d329be6f2781f7d90304351811fb591360 \ - --hash=sha256:cef9c85ccbe9bee00909758936ea841ef12035296c748aaceee535969e27d31b \ - --hash=sha256:1d19397351f73a88904ad1aee421e800fe4bbcd1aeee6435fb62d0a05ccd1030 \ - --hash=sha256:d21237d0cd37acded35154e29aec853e945950321dd2ffd1a7d86fe686814669 \ - --hash=sha256:ede5af4a2702444a832a800b8eb7f0a7a1c0eed55b644642e049c98d589e5092 \ - --hash=sha256:b5b3f092fe345c03bca1e0b687dfbb39364b21ebb8ba90e3fa707374b7915204 \ - --hash=sha256:335ace1a22325395c4ea88e00ba3dc89ca029bd66bd5a3c382d53e44f0ccd77e \ - --hash=sha256:db6d9fac65bd08cea7f3540b899977c6dee9edad959fa4eaf305940d9cbd861c \ - --hash=sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5 \ + --hash=sha256:12875d118f21cf35604176872447cdb57b07126750a33748bac15e77f90f1f9c \ --hash=sha256:14d4b1341ac07ae07eb2cc682f459bec932a380c3b122f5540432d8977e64eae \ - --hash=sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c \ + --hash=sha256:1c3c33ac69cf059bbb9d1a71eeaba76781b450bc307e2291f8a4764d779a6b28 \ + --hash=sha256:1d19397351f73a88904ad1aee421e800fe4bbcd1aeee6435fb62d0a05ccd1030 \ + --hash=sha256:253e8a302a96df6927310a9d44e6103055e8fb96a6822f8b7f514bb7ef77de56 \ + --hash=sha256:2632d0f846b7c7600edf53c48f8f9f1e13e62f66a6dbc15191029d950bfed976 \ + --hash=sha256:335ace1a22325395c4ea88e00ba3dc89ca029bd66bd5a3c382d53e44f0ccd77e \ + --hash=sha256:413ce0bbf9fc6278b2d63309dfeefe452835e1c78398efb431bab0672fe9274e \ + --hash=sha256:5100b45a4638e3c00e4d2320d3193bdabb2d75e79793af7c3eb139e4f569f16f \ + --hash=sha256:514ceac913076feefbeaf89771fd6febde78b0c4c1b23aaeab082c41c694e81b \ + --hash=sha256:528a2a692c65dd5cafc130de286030af251d2ee0483a5bf50c9348aefe834e8a \ + --hash=sha256:6295f6763749b89c994fcb6d8a7f7ce03c3992e695f89f00b741b4580b199b7e \ + --hash=sha256:6c8bc8238a7dfdaf7a75f5ec5a663f4173f8c367e5a39f87e720495e1eed75fa \ + --hash=sha256:718856856ba31f14f13ba885ff13874be7fefc53984d2832458f12c38205f7f7 \ --hash=sha256:7f7609a718b177bf171ac93cea9fd2ddc0e03e84d8fa4e887bdfc39671d46b00 \ --hash=sha256:80ca33961ced9c63358056bd08403ff866512038883e74f3a4bf88ad3eb66838 \ - --hash=sha256:1c3c33ac69cf059bbb9d1a71eeaba76781b450bc307e2291f8a4764d779a6b28 \ - --hash=sha256:12875d118f21cf35604176872447cdb57b07126750a33748bac15e77f90f1f9c \ - --hash=sha256:514ceac913076feefbeaf89771fd6febde78b0c4c1b23aaeab082c41c694e81b \ - --hash=sha256:d3c5c79ab7dfce6d88f1ba639b77e77a17ea33a01b07b99840d6ed08031cb2a7 \ - --hash=sha256:718856856ba31f14f13ba885ff13874be7fefc53984d2832458f12c38205f7f7 \ - --hash=sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac \ - --hash=sha256:011233e0c42a4a7836498e98c1acf5e744c96a67dd5032a6f666cc1fb97eab97 \ - --hash=sha256:253e8a302a96df6927310a9d44e6103055e8fb96a6822f8b7f514bb7ef77de56 \ - --hash=sha256:6295f6763749b89c994fcb6d8a7f7ce03c3992e695f89f00b741b4580b199b7e \ + --hash=sha256:80fe64a6deb6fcfdf7b8386f2cf216d329be6f2781f7d90304351811fb591360 \ + --hash=sha256:81c4b81611e3a3cb30e59b0cf05b888c675f97e3adb2c8672c3154047980726b \ + --hash=sha256:855c583f268edde09474b081e3ddcd5cf3b20c12f26e0d434e1386cc5d318e7a \ + --hash=sha256:9bfdb82cdfeccec50aad441afc332faf8606dfa5e8efd18a6692b5d6e79f00fd \ + --hash=sha256:a5d24e1d674dd9d72c66ad3ea9131322819ff86250b30dc5821cbafcfa0b96b4 \ --hash=sha256:a9f44cd7e162ac6191491d7249cceb02b8116b0f7e847ee33f739d7cb1ea1f70 \ - --hash=sha256:6c8bc8238a7dfdaf7a75f5ec5a663f4173f8c367e5a39f87e720495e1eed75fa -prometheus-client==0.14.0 \ - --hash=sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2 \ - --hash=sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750 -psycopg2==2.9.3 ; python_version >= "3.6" and platform_python_implementation == "PyPy" or platform_python_implementation != "PyPy" \ + --hash=sha256:b5b3f092fe345c03bca1e0b687dfbb39364b21ebb8ba90e3fa707374b7915204 \ + --hash=sha256:b9618823bd237c0d2575283f2939655f54d51b4527ec3972907a927acbcc5bfc \ + --hash=sha256:cef9c85ccbe9bee00909758936ea841ef12035296c748aaceee535969e27d31b \ + --hash=sha256:d21237d0cd37acded35154e29aec853e945950321dd2ffd1a7d86fe686814669 \ + --hash=sha256:d3c5c79ab7dfce6d88f1ba639b77e77a17ea33a01b07b99840d6ed08031cb2a7 \ + --hash=sha256:d9d7942b624b04b895cb95af03a23407f17646815495ce4547f0e60e0b06f58e \ + --hash=sha256:db6d9fac65bd08cea7f3540b899977c6dee9edad959fa4eaf305940d9cbd861c \ + --hash=sha256:ede5af4a2702444a832a800b8eb7f0a7a1c0eed55b644642e049c98d589e5092 \ + --hash=sha256:effb7749713d5317478bb3acb3f81d9d7c7f86726d41c1facca068a04cf5bb4c \ + --hash=sha256:f154d173286a5d1863637a7dcd8c3437bb557520b01bddb0be0258dcb72696b5 \ + --hash=sha256:f25ed6e28ddf50de7e7ea99d7a976d6a9c415f03adcaac9c41ff6ff41b6d86ac +prometheus-client==0.14.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:8f7a922dd5455ad524b6ba212ce8eb2b4b05e073f4ec7218287f88b1cac34750 \ + --hash=sha256:f4aba3fdd1735852049f537c1f0ab177159b7ab76f271ecc4d2f45aa2a1d01f2 +psycopg2==2.9.3 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:06f32425949bd5fe8f625c49f17ebb9784e1e4fe928b7cce72edc36fb68e4c0c \ + --hash=sha256:0762c27d018edbcb2d34d51596e4346c983bd27c330218c56c4dc25ef7e819bf \ --hash=sha256:083707a696e5e1c330af2508d8fab36f9700b26621ccbcb538abe22e15485362 \ - --hash=sha256:d3ca6421b942f60c008f81a3541e8faf6865a28d5a9b48544b0ee4f40cac7fca \ + --hash=sha256:34b33e0162cfcaad151f249c2649fd1030010c16f4bbc40a604c1cb77173dcf7 \ + --hash=sha256:4295093a6ae3434d33ec6baab4ca5512a5082cc43c0505293087b8a46d108461 \ + --hash=sha256:8cf3878353cc04b053822896bc4922b194792df9df2f1ad8da01fb3043602126 \ + --hash=sha256:8e841d1bf3434da985cc5ef13e6f75c8981ced601fd70cc6bf33351b91562981 \ --hash=sha256:9572e08b50aed176ef6d66f15a21d823bb6f6d23152d35e8451d7d2d18fdac56 \ --hash=sha256:a81e3866f99382dfe8c15a151f1ca5fde5815fde879348fe5a9884a7c092a305 \ --hash=sha256:cb10d44e6694d763fa1078a26f7f6137d69f555a78ec85dc2ef716c37447e4b2 \ - --hash=sha256:4295093a6ae3434d33ec6baab4ca5512a5082cc43c0505293087b8a46d108461 \ - --hash=sha256:34b33e0162cfcaad151f249c2649fd1030010c16f4bbc40a604c1cb77173dcf7 \ - --hash=sha256:0762c27d018edbcb2d34d51596e4346c983bd27c330218c56c4dc25ef7e819bf \ - --hash=sha256:8cf3878353cc04b053822896bc4922b194792df9df2f1ad8da01fb3043602126 \ - --hash=sha256:06f32425949bd5fe8f625c49f17ebb9784e1e4fe928b7cce72edc36fb68e4c0c \ - --hash=sha256:8e841d1bf3434da985cc5ef13e6f75c8981ced601fd70cc6bf33351b91562981 -psycopg2cffi-compat==1.1 ; platform_python_implementation == "PyPy" \ + --hash=sha256:d3ca6421b942f60c008f81a3541e8faf6865a28d5a9b48544b0ee4f40cac7fca +psycopg2cffi-compat==1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \ --hash=sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05 -psycopg2cffi==2.9.0 ; platform_python_implementation == "PyPy" \ +psycopg2cffi==2.9.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_python_implementation == "PyPy" \ --hash=sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52 -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199 \ - --hash=sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405 \ - --hash=sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb \ +pyasn1-modules==0.2.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8 \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 \ - --hash=sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d \ - --hash=sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45 \ - --hash=sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4 \ + --hash=sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199 \ --hash=sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811 \ --hash=sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed \ + --hash=sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 \ + --hash=sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb \ + --hash=sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45 \ + --hash=sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd \ --hash=sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0 \ - --hash=sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd -pyasn1==0.4.8 \ - --hash=sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3 \ + --hash=sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d \ + --hash=sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405 +pyasn1==0.4.8 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359 \ + --hash=sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576 \ --hash=sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf \ + --hash=sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00 \ --hash=sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86 \ - --hash=sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7 \ - --hash=sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576 \ --hash=sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12 \ - --hash=sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2 \ - --hash=sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359 \ --hash=sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776 \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba -pycparser==2.21 ; python_full_version >= "3.6.7" and platform_python_implementation == "PyPy" \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba \ + --hash=sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2 \ + --hash=sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3 +pycparser==2.21 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 -pydantic==1.9.1 \ - --hash=sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193 \ - --hash=sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11 \ - --hash=sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310 \ - --hash=sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131 \ - --hash=sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580 \ - --hash=sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd \ - --hash=sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd \ - --hash=sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761 \ - --hash=sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918 \ - --hash=sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74 \ - --hash=sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a \ - --hash=sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166 \ - --hash=sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b \ - --hash=sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892 \ - --hash=sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e \ - --hash=sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608 \ - --hash=sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537 \ - --hash=sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380 \ - --hash=sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728 \ - --hash=sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a \ - --hash=sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1 \ - --hash=sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195 \ - --hash=sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b \ - --hash=sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49 \ - --hash=sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6 \ - --hash=sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0 \ - --hash=sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6 \ - --hash=sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810 \ +pydantic==1.9.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f \ - --hash=sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee \ - --hash=sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761 \ - --hash=sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd \ - --hash=sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1 \ + --hash=sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74 \ + --hash=sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1 \ + --hash=sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b \ + --hash=sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537 \ + --hash=sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310 \ + --hash=sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810 \ + --hash=sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a \ + --hash=sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761 \ + --hash=sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892 \ --hash=sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58 \ - --hash=sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a -pymacaroons==0.13.0 \ - --hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907 \ - --hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 -pynacl==1.5.0 ; python_version >= "3.6" \ + --hash=sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761 \ + --hash=sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195 \ + --hash=sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1 \ + --hash=sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd \ + --hash=sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b \ + --hash=sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee \ + --hash=sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580 \ + --hash=sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608 \ + --hash=sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918 \ + --hash=sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380 \ + --hash=sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a \ + --hash=sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0 \ + --hash=sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd \ + --hash=sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728 \ + --hash=sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49 \ + --hash=sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166 \ + --hash=sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6 \ + --hash=sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131 \ + --hash=sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11 \ + --hash=sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193 \ + --hash=sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a \ + --hash=sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd \ + --hash=sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e \ + --hash=sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6 +pymacaroons==0.13.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8 \ + --hash=sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907 +pynacl==1.5.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \ + --hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \ + --hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \ --hash=sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 \ --hash=sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92 \ - --hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \ - --hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \ - --hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \ - --hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \ --hash=sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff \ - --hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543 \ - --hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \ - --hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba -pyopenssl==22.0.0 \ - --hash=sha256:ea252b38c87425b64116f808355e8da644ef9b07e429398bfece610f893ee2e0 \ - --hash=sha256:660b1b1425aac4a1bea1d94168a85d99f0b3144c869dd4390d27629d0087f1bf -pyparsing==3.0.7 ; python_version >= "3.6" \ - --hash=sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484 \ - --hash=sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea -pyrsistent==0.18.1 ; python_version >= "3.7" \ - --hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \ - --hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \ - --hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \ - --hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \ - --hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \ - --hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \ - --hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \ - --hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \ - --hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \ - --hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \ - --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 \ - --hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \ + --hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba \ + --hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \ + --hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \ + --hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543 +pyopenssl==22.0.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:660b1b1425aac4a1bea1d94168a85d99f0b3144c869dd4390d27629d0087f1bf \ + --hash=sha256:ea252b38c87425b64116f808355e8da644ef9b07e429398bfece610f893ee2e0 +pyparsing==3.0.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea \ + --hash=sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484 +pyrsistent==0.18.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \ - --hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \ - --hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \ - --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ - --hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \ - --hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \ --hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \ + --hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \ + --hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \ + --hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \ + --hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \ + --hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \ + --hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \ + --hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \ + --hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \ + --hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \ + --hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \ + --hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \ + --hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \ + --hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \ + --hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \ --hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \ - --hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 -pyyaml==6.0 \ - --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ - --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ - --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ - --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ - --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 \ - --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ - --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ - --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ - --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ - --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ - --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ + --hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \ + --hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \ + --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ + --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 +pyyaml==6.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ - --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ - --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ - --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ - --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \ - --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \ - --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ - --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ - --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \ - --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \ - --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ - --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ - --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ - --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \ --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ - --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ - --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ - --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ + --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ + --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \ + --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \ + --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \ + --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \ + --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \ + --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ + --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ - --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ + --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ + --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ + --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \ + --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ + --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ + --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ + --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ + --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ + --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ + --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ - --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 -requests==2.27.1 ; python_full_version >= "3.6.0" \ - --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d \ - --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 -service-identity==21.1.0 \ + --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ + --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ + --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ + --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ + --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ + --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ + --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ + --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ + --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 +requests==2.27.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ + --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d +service-identity==21.1.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34 \ --hash=sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db -signedjson==1.1.4 \ +setuptools==65.3.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82 \ + --hash=sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57 +signedjson==1.1.4 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228 \ --hash=sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492 -simplejson==3.17.6 ; python_version >= "3.7" and python_version < "4.0" \ - --hash=sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a \ - --hash=sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c \ - --hash=sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a \ - --hash=sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb \ - --hash=sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366 \ - --hash=sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5 \ - --hash=sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2 \ - --hash=sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1 \ - --hash=sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211 \ - --hash=sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed \ - --hash=sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd \ - --hash=sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda \ - --hash=sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180 \ +simplejson==3.17.6 ; python_full_version >= "3.7.1" and python_version < "4.0" \ --hash=sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce \ - --hash=sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a \ - --hash=sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6 \ - --hash=sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40 \ - --hash=sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882 \ - --hash=sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045 \ - --hash=sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70 \ - --hash=sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7 \ - --hash=sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd \ - --hash=sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27 \ - --hash=sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f \ - --hash=sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51 \ - --hash=sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7 \ - --hash=sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d \ - --hash=sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044 \ - --hash=sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566 \ - --hash=sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837 \ - --hash=sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a \ - --hash=sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802 \ - --hash=sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494 \ - --hash=sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2 \ - --hash=sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81 \ - --hash=sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33 \ - --hash=sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a \ - --hash=sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0 \ --hash=sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1 \ - --hash=sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3 \ - --hash=sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94 \ - --hash=sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81 \ - --hash=sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d \ - --hash=sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab \ - --hash=sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe \ - --hash=sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a \ + --hash=sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0 \ --hash=sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf \ - --hash=sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a \ - --hash=sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198 \ - --hash=sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba \ + --hash=sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a \ --hash=sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe \ - --hash=sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9 \ - --hash=sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9 \ + --hash=sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d \ + --hash=sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94 \ --hash=sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad \ - --hash=sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851 \ - --hash=sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f \ - --hash=sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7 \ - --hash=sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198 \ - --hash=sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e \ + --hash=sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a \ + --hash=sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2 \ --hash=sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc \ - --hash=sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6 -six==1.16.0 ; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" or python_version >= "2.7" and python_full_version < "3.0.0" and platform_python_implementation == "PyPy" or python_full_version >= "3.3.0" and platform_python_implementation == "PyPy" \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 -sortedcontainers==2.4.0 \ - --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 \ - --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 -treq==22.2.0 \ + --hash=sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81 \ + --hash=sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7 \ + --hash=sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837 \ + --hash=sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566 \ + --hash=sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3 \ + --hash=sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f \ + --hash=sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802 \ + --hash=sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e \ + --hash=sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9 \ + --hash=sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7 \ + --hash=sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a \ + --hash=sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044 \ + --hash=sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494 \ + --hash=sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51 \ + --hash=sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d \ + --hash=sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33 \ + --hash=sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27 \ + --hash=sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f \ + --hash=sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198 \ + --hash=sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70 \ + --hash=sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c \ + --hash=sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab \ + --hash=sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045 \ + --hash=sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5 \ + --hash=sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7 \ + --hash=sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882 \ + --hash=sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40 \ + --hash=sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198 \ + --hash=sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180 \ + --hash=sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a \ + --hash=sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda \ + --hash=sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2 \ + --hash=sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851 \ + --hash=sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe \ + --hash=sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a \ + --hash=sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211 \ + --hash=sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1 \ + --hash=sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6 \ + --hash=sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb \ + --hash=sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9 \ + --hash=sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed \ + --hash=sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd \ + --hash=sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd \ + --hash=sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6 \ + --hash=sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a \ + --hash=sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366 \ + --hash=sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81 \ + --hash=sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a \ + --hash=sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba +six==1.16.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +sortedcontainers==2.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \ + --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 +treq==22.2.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:27d95b07c5c14be3e7b280416139b036087617ad5595be913b1f9b3ce981b9b2 \ --hash=sha256:df757e3f141fc782ede076a604521194ffcb40fa2645cf48e5a37060307f52ec -twisted-iocpsupport==1.0.2 ; platform_system == "Windows" and python_full_version >= "3.6.7" \ - --hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \ - --hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \ - --hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \ - --hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \ - --hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \ - --hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \ - --hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \ - --hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415 \ +twisted-iocpsupport==1.0.2 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and platform_system == "Windows" \ --hash=sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41 \ --hash=sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d \ + --hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \ + --hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf \ + --hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \ + --hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \ + --hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \ + --hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \ --hash=sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546 \ - --hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf -twisted==22.4.0 \ - --hash=sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2 \ - --hash=sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680 -txredisapi==1.4.7 \ + --hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \ + --hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \ + --hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415 +twisted==22.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680 \ + --hash=sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2 +twisted[tls]==22.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680 \ + --hash=sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2 +txredisapi==1.4.7 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:34c9eba8d34f452d30661f073b67b8cd42b695e3d31678ec1bbf628a65a0f059 \ --hash=sha256:e6cc43f51e35d608abdca8f8c7d20e148fe1d82679f6e584baea613ebec812bb -typing-extensions==4.1.1 \ - --hash=sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2 \ - --hash=sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42 -unpaddedbase64==2.1.0 \ +typing-extensions==4.1.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ + --hash=sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42 \ + --hash=sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2 +unpaddedbase64==2.1.0 ; python_full_version >= "3.7.1" and python_version < "4.0" \ --hash=sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6 \ --hash=sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005 -urllib3==1.26.8 ; python_full_version >= "3.6.0" and python_version < "4" \ +urllib3==1.26.8 ; python_full_version >= "3.7.1" and python_version < "4" \ --hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \ --hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c -webencodings==0.5.1 ; python_version >= "3.6" \ +webencodings==0.5.1 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 -zipp==3.7.0 ; python_version >= "3.7" and python_version < "3.8" or python_version < "3.9" and python_version >= "3.7" \ - --hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375 \ - --hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d -zope.interface==5.4.0 ; python_full_version >= "3.6.7" \ - --hash=sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7 \ - --hash=sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021 \ +zipp==3.7.0 ; python_full_version >= "3.7.1" and python_version < "3.9" \ + --hash=sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d \ + --hash=sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375 +zope-interface==5.4.0 ; python_full_version >= "3.7.1" and python_full_version < "4.0.0" \ --hash=sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192 \ - --hash=sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a \ - --hash=sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531 \ - --hash=sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325 \ - --hash=sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155 \ - --hash=sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263 \ - --hash=sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959 \ - --hash=sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e \ - --hash=sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c \ --hash=sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702 \ - --hash=sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f \ - --hash=sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05 \ - --hash=sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004 \ - --hash=sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117 \ - --hash=sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8 \ - --hash=sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63 \ - --hash=sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f \ - --hash=sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920 \ - --hash=sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46 \ - --hash=sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc \ - --hash=sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9 \ - --hash=sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2 \ - --hash=sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78 \ - --hash=sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1 \ - --hash=sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e \ - --hash=sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b \ - --hash=sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f \ - --hash=sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d \ - --hash=sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8 \ - --hash=sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf \ - --hash=sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7 \ - --hash=sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94 \ - --hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \ - --hash=sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e \ - --hash=sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7 \ - --hash=sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120 \ - --hash=sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48 \ - --hash=sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4 \ - --hash=sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb \ - --hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \ - --hash=sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4 \ - --hash=sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d \ - --hash=sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83 \ - --hash=sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25 \ - --hash=sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1 \ - --hash=sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c \ - --hash=sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e \ --hash=sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09 \ - --hash=sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e + --hash=sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4 \ + --hash=sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a \ + --hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \ + --hash=sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf \ + --hash=sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c \ + --hash=sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d \ + --hash=sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78 \ + --hash=sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83 \ + --hash=sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531 \ + --hash=sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46 \ + --hash=sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021 \ + --hash=sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94 \ + --hash=sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc \ + --hash=sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63 \ + --hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \ + --hash=sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117 \ + --hash=sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25 \ + --hash=sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05 \ + --hash=sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e \ + --hash=sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1 \ + --hash=sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004 \ + --hash=sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2 \ + --hash=sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e \ + --hash=sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f \ + --hash=sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f \ + --hash=sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120 \ + --hash=sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f \ + --hash=sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1 \ + --hash=sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9 \ + --hash=sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e \ + --hash=sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7 \ + --hash=sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8 \ + --hash=sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b \ + --hash=sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155 \ + --hash=sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7 \ + --hash=sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c \ + --hash=sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325 \ + --hash=sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d \ + --hash=sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb \ + --hash=sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e \ + --hash=sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959 \ + --hash=sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7 \ + --hash=sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920 \ + --hash=sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e \ + --hash=sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48 \ + --hash=sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8 \ + --hash=sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4 \ + --hash=sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263 diff --git a/scripts-dev/release.py b/scripts-dev/release.py index 46220c4dd..6603bc593 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -18,10 +18,12 @@ """ import glob +import json import os import re import subprocess import sys +import time import urllib.request from os import path from tempfile import TemporaryDirectory @@ -71,18 +73,21 @@ def cli() -> None: ./scripts-dev/release.py tag - # ... wait for assets to build ... + # wait for assets to build, either manually or with: + ./scripts-dev/release.py wait-for-actions ./scripts-dev/release.py publish ./scripts-dev/release.py upload - # Optional: generate some nice links for the announcement - ./scripts-dev/release.py merge-back + # Optional: generate some nice links for the announcement ./scripts-dev/release.py announce + Alternatively, `./scripts-dev/release.py full` will do all the above + as well as guiding you through the manual steps. + If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the `tag`/`publish` command, then a new draft release will be created/published. """ @@ -90,6 +95,10 @@ def cli() -> None: @cli.command() def prepare() -> None: + _prepare() + + +def _prepare() -> None: """Do the initial stages of creating a release, including creating release branch, updating changelog and pushing to GitHub. """ @@ -284,6 +293,10 @@ def prepare() -> None: @cli.command() @click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"]) def tag(gh_token: Optional[str]) -> None: + _tag(gh_token) + + +def _tag(gh_token: Optional[str]) -> None: """Tags the release and generates a draft GitHub release""" # Make sure we're in a git repo. @@ -374,6 +387,10 @@ def tag(gh_token: Optional[str]) -> None: @cli.command() @click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True) def publish(gh_token: str) -> None: + _publish(gh_token) + + +def _publish(gh_token: str) -> None: """Publish release on GitHub.""" # Make sure we're in a git repo. @@ -411,6 +428,10 @@ def publish(gh_token: str) -> None: @cli.command() def upload() -> None: + _upload() + + +def _upload() -> None: """Upload release to pypi.""" current_version = get_package_version() @@ -479,8 +500,75 @@ def _merge_into(repo: Repo, source: str, target: str) -> None: repo.remote().push() +@cli.command() +@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False) +def wait_for_actions(gh_token: Optional[str]) -> None: + _wait_for_actions(gh_token) + + +def _wait_for_actions(gh_token: Optional[str]) -> None: + # Find out the version and tag name. + current_version = get_package_version() + tag_name = f"v{current_version}" + + # Authentication is optional on this endpoint, + # but use a token if we have one to reduce the chance of being rate-limited. + url = f"https://api.github.com/repos/matrix-org/synapse/actions/runs?branch={tag_name}" + headers = {"Accept": "application/vnd.github+json"} + if gh_token is not None: + headers["authorization"] = f"token {gh_token}" + req = urllib.request.Request(url, headers=headers) + + time.sleep(10 * 60) + while True: + time.sleep(5 * 60) + response = urllib.request.urlopen(req) + resp = json.loads(response.read()) + + if len(resp["workflow_runs"]) == 0: + continue + + if all( + workflow["status"] != "in_progress" for workflow in resp["workflow_runs"] + ): + success = ( + workflow["status"] == "completed" for workflow in resp["workflow_runs"] + ) + if success: + _notify("Workflows successful. You can now continue the release.") + else: + _notify("Workflows failed.") + click.confirm("Continue anyway?", abort=True) + + break + + +def _notify(message: str) -> None: + # Send a bell character. Most terminals will play a sound or show a notification + # for this. + click.echo(f"\a{message}") + + # Try and run notify-send, but don't raise an Exception if this fails + # (This is best-effort) + # TODO Support other platforms? + subprocess.run( + [ + "notify-send", + "--app-name", + "Synapse Release Script", + "--expire-time", + "3600000", + message, + ] + ) + + @cli.command() def merge_back() -> None: + _merge_back() + + +def _merge_back() -> None: """Merge the release branch back into the appropriate branches. All branches will be automatically pulled from the remote and the results will be pushed to the remote.""" @@ -519,6 +607,10 @@ def merge_back() -> None: @cli.command() def announce() -> None: + _announce() + + +def _announce() -> None: """Generate markdown to announce the release.""" current_version = get_package_version() @@ -548,10 +640,56 @@ Announce the release in - #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic - #synapse:matrix.org (Synapse Admins), bumping the version in the topic - #synapse-dev:matrix.org -- #synapse-package-maintainers:matrix.org""" +- #synapse-package-maintainers:matrix.org + +Ask the designated people to do the blog and tweets.""" ) +@cli.command() +@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True) +def full(gh_token: str) -> None: + click.echo("1. If this is a security release, read the security wiki page.") + click.echo("2. Check for any release blockers before proceeding.") + click.echo(" https://github.com/matrix-org/synapse/labels/X-Release-Blocker") + + click.confirm("Ready?", abort=True) + + click.echo("\n*** prepare ***") + _prepare() + + click.echo("Deploy to matrix.org and ensure that it hasn't fallen over.") + click.echo("Remember to silence the alerts to prevent alert spam.") + click.confirm("Deployed?", abort=True) + + click.echo("\n*** tag ***") + _tag(gh_token) + + click.echo("\n*** wait for actions ***") + _wait_for_actions(gh_token) + + click.echo("\n*** publish ***") + _publish(gh_token) + + click.echo("\n*** upload ***") + _upload() + + click.echo("\n*** merge back ***") + _merge_back() + + click.echo("\nUpdate the Debian repository") + click.confirm("Started updating Debian repository?", abort=True) + + click.echo("\nWait for all release methods to be ready.") + # Docker should be ready because it was done by the workflows earlier + # PyPI should be ready because we just ran upload(). + # TODO Automatically poll until the Debs have made it to packages.matrix.org + click.confirm("Debs ready?", abort=True) + + click.echo("\n*** announce ***") + _announce() + + def get_package_version() -> version.Version: version_string = subprocess.check_output(["poetry", "version", "--short"]).decode( "utf-8" diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py index 092601f53..0c4504d5d 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py @@ -1,6 +1,6 @@ # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2018 New Vector -# Copyright 2021 The Matrix.org Foundation C.I.C. +# Copyright 2021-22 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,11 +20,22 @@ import hashlib import hmac import logging import sys -from typing import Callable, Optional +from typing import Any, Callable, Dict, Optional import requests import yaml +_CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\ +Conflicting options 'registration_shared_secret' and 'registration_shared_secret_path' +are both defined in config file. +""" + +_NO_SHARED_SECRET_OPTS_ERROR = """\ +No 'registration_shared_secret' or 'registration_shared_secret_path' defined in config. +""" + +_DEFAULT_SERVER_URL = "http://localhost:8008" + def request_registration( user: str, @@ -203,31 +214,104 @@ def main() -> None: parser.add_argument( "server_url", - default="https://localhost:8448", nargs="?", - help="URL to use to talk to the homeserver. Defaults to " - " 'https://localhost:8448'.", + help="URL to use to talk to the homeserver. By default, tries to find a " + "suitable URL from the configuration file. Otherwise, defaults to " + f"'{_DEFAULT_SERVER_URL}'.", ) args = parser.parse_args() if "config" in args and args.config: config = yaml.safe_load(args.config) - secret = config.get("registration_shared_secret", None) - if not secret: - print("No 'registration_shared_secret' defined in config.") - sys.exit(1) - else: + + if args.shared_secret: secret = args.shared_secret + else: + # argparse should check that we have either config or shared secret + assert config + + secret = config.get("registration_shared_secret") + secret_file = config.get("registration_shared_secret_path") + if secret_file: + if secret: + print(_CONFLICTING_SHARED_SECRET_OPTS_ERROR, file=sys.stderr) + sys.exit(1) + secret = _read_file(secret_file, "registration_shared_secret_path").strip() + if not secret: + print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr) + sys.exit(1) + + if args.server_url: + server_url = args.server_url + elif config: + server_url = _find_client_listener(config) + if not server_url: + server_url = _DEFAULT_SERVER_URL + print( + "Unable to find a suitable HTTP listener in the configuration file. " + f"Trying {server_url} as a last resort.", + file=sys.stderr, + ) + else: + server_url = _DEFAULT_SERVER_URL + print( + f"No server url or configuration file given. Defaulting to {server_url}.", + file=sys.stderr, + ) admin = None if args.admin or args.no_admin: admin = args.admin register_new_user( - args.user, args.password, args.server_url, secret, admin, args.user_type + args.user, args.password, server_url, secret, admin, args.user_type ) +def _read_file(file_path: Any, config_path: str) -> str: + """Check the given file exists, and read it into a string + + If it does not, exit with an error indicating the problem + + Args: + file_path: the file to be read + config_path: where in the configuration file_path came from, so that a useful + error can be emitted if it does not exist. + Returns: + content of the file. + """ + if not isinstance(file_path, str): + print(f"{config_path} setting is not a string", file=sys.stderr) + sys.exit(1) + + try: + with open(file_path) as file_stream: + return file_stream.read() + except OSError as e: + print(f"Error accessing file {file_path}: {e}", file=sys.stderr) + sys.exit(1) + + +def _find_client_listener(config: Dict[str, Any]) -> Optional[str]: + # try to find a listener in the config. Returns a host:port pair + for listener in config.get("listeners", []): + if listener.get("type") != "http" or listener.get("tls", False): + continue + + if not any( + name == "client" + for resource in listener.get("resources", []) + for name in resource.get("names", []) + ): + continue + + # TODO: consider bind_addresses + return f"http://localhost:{listener['port']}" + + # no suitable listeners? + return None + + if __name__ == "__main__": main() diff --git a/synapse/api/constants.py b/synapse/api/constants.py index c73aea622..c178ddf07 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -258,7 +258,6 @@ class GuestAccess: class ReceiptTypes: READ: Final = "m.read" READ_PRIVATE: Final = "m.read.private" - UNSTABLE_READ_PRIVATE: Final = "org.matrix.msc2285.read.private" FULLY_READ: Final = "m.fully_read" diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index b00714751..102889ac4 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -140,13 +140,13 @@ USER_FILTER_SCHEMA = { @FormatChecker.cls_checks("matrix_room_id") -def matrix_room_id_validator(room_id_str: str) -> RoomID: - return RoomID.from_string(room_id_str) +def matrix_room_id_validator(room_id_str: str) -> bool: + return RoomID.is_valid(room_id_str) @FormatChecker.cls_checks("matrix_user_id") -def matrix_user_id_validator(user_id_str: str) -> UserID: - return UserID.from_string(user_id_str) +def matrix_user_id_validator(user_id_str: str) -> bool: + return UserID.is_valid(user_id_str) class Filtering: diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 923891ae0..9a24bed0a 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -266,15 +266,48 @@ def register_start( reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper())) -def listen_metrics(bind_addresses: Iterable[str], port: int) -> None: +def listen_metrics( + bind_addresses: Iterable[str], port: int, enable_legacy_metric_names: bool +) -> None: """ Start Prometheus metrics server. """ - from synapse.metrics import RegistryProxy, start_http_server + from prometheus_client import start_http_server as start_http_server_prometheus + + from synapse.metrics import ( + RegistryProxy, + start_http_server as start_http_server_legacy, + ) for host in bind_addresses: logger.info("Starting metrics listener on %s:%d", host, port) - start_http_server(port, addr=host, registry=RegistryProxy) + if enable_legacy_metric_names: + start_http_server_legacy(port, addr=host, registry=RegistryProxy) + else: + _set_prometheus_client_use_created_metrics(False) + start_http_server_prometheus(port, addr=host, registry=RegistryProxy) + + +def _set_prometheus_client_use_created_metrics(new_value: bool) -> None: + """ + Sets whether prometheus_client should expose `_created`-suffixed metrics for + all gauges, histograms and summaries. + There is no programmatic way to disable this without poking at internals; + the proper way is to use an environment variable which prometheus_client + loads at import time. + + The motivation for disabling these `_created` metrics is that they're + a waste of space as they're not useful but they take up space in Prometheus. + """ + + import prometheus_client.metrics + + if hasattr(prometheus_client.metrics, "_use_created"): + prometheus_client.metrics._use_created = new_value + else: + logger.error( + "Can't disable `_created` metrics in prometheus_client (brittle hack broken?)" + ) def listen_manhole( @@ -478,9 +511,10 @@ async def start(hs: "HomeServer") -> None: setup_sentry(hs) setup_sdnotify(hs) - # If background tasks are running on the main process, start collecting the - # phone home stats. + # If background tasks are running on the main process or this is the worker in + # charge of them, start collecting the phone home stats and shared usage metrics. if hs.config.worker.run_background_tasks: + await hs.get_common_usage_metrics_manager().setup() start_phone_stats_home(hs) # We now freeze all allocated objects in the hopes that (almost) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 30e21d970..5e3825fca 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -412,7 +412,11 @@ class GenericWorkerServer(HomeServer): "enable_metrics is not True!" ) else: - _base.listen_metrics(listener.bind_addresses, listener.port) + _base.listen_metrics( + listener.bind_addresses, + listener.port, + enable_legacy_metric_names=self.config.metrics.enable_legacy_metrics, + ) else: logger.warning("Unsupported listener type: %s", listener.type) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 68993d91a..883f2fd2e 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -57,7 +57,6 @@ from synapse.http.site import SynapseSite from synapse.logging.context import LoggingContext from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource -from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.rest import ClientRestResource from synapse.rest.admin import AdminRestResource from synapse.rest.health import HealthResource @@ -290,16 +289,6 @@ class SynapseHomeServer(HomeServer): manhole_settings=self.config.server.manhole_settings, manhole_globals={"hs": self}, ) - elif listener.type == "replication": - services = listen_tcp( - listener.bind_addresses, - listener.port, - ReplicationStreamProtocolFactory(self), - ) - for s in services: - self.get_reactor().addSystemEventTrigger( - "before", "shutdown", s.stopListening - ) elif listener.type == "metrics": if not self.config.metrics.enable_metrics: logger.warning( @@ -307,7 +296,11 @@ class SynapseHomeServer(HomeServer): "enable_metrics is not True!" ) else: - _base.listen_metrics(listener.bind_addresses, listener.port) + _base.listen_metrics( + listener.bind_addresses, + listener.port, + enable_legacy_metric_names=self.config.metrics.enable_legacy_metrics, + ) else: # this shouldn't happen, as the listener type should have been checked # during parsing diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index 40dbdace8..51c8d1571 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -51,6 +51,16 @@ async def phone_stats_home( stats: JsonDict, stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process, ) -> None: + """Collect usage statistics and send them to the configured endpoint. + + Args: + hs: the HomeServer object to use for gathering usage data. + stats: the dict in which to store the statistics sent to the configured + endpoint. Mostly used in tests to figure out the data that is supposed to + be sent. + stats_process: statistics about resource usage of the process. + """ + logger.info("Gathering stats for reporting") now = int(hs.get_clock().time()) # Ensure the homeserver has started. @@ -83,6 +93,7 @@ async def phone_stats_home( # store = hs.get_datastores().main + common_metrics = await hs.get_common_usage_metrics_manager().get_metrics() stats["homeserver"] = hs.config.server.server_name stats["server_context"] = hs.config.server.server_context @@ -104,7 +115,7 @@ async def phone_stats_home( room_count = await store.get_room_count() stats["total_room_count"] = room_count - stats["daily_active_users"] = await store.count_daily_users() + stats["daily_active_users"] = common_metrics.daily_active_users stats["monthly_active_users"] = await store.count_monthly_users() daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms() stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 7c9cf403e..1f6362aed 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -20,6 +20,7 @@ import logging import os import re from collections import OrderedDict +from enum import Enum, auto from hashlib import sha256 from textwrap import dedent from typing import ( @@ -603,18 +604,44 @@ class RootConfig: " may specify directories containing *.yaml files.", ) - generate_group = parser.add_argument_group("Config generation") - generate_group.add_argument( - "--generate-config", - action="store_true", - help="Generate a config file, then exit.", + # we nest the mutually-exclusive group inside another group so that the help + # text shows them in their own group. + generate_mode_group = parser.add_argument_group( + "Config generation mode", ) - generate_group.add_argument( + generate_mode_exclusive = generate_mode_group.add_mutually_exclusive_group() + generate_mode_exclusive.add_argument( + # hidden option to make the type and default work + "--generate-mode", + help=argparse.SUPPRESS, + type=_ConfigGenerateMode, + default=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN, + ) + generate_mode_exclusive.add_argument( + "--generate-config", + help="Generate a config file, then exit.", + action="store_const", + const=_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT, + dest="generate_mode", + ) + generate_mode_exclusive.add_argument( "--generate-missing-configs", "--generate-keys", - action="store_true", help="Generate any missing additional config files, then exit.", + action="store_const", + const=_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT, + dest="generate_mode", ) + generate_mode_exclusive.add_argument( + "--generate-missing-and-run", + help="Generate any missing additional config files, then run. This is the " + "default behaviour.", + action="store_const", + const=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN, + dest="generate_mode", + ) + + generate_group = parser.add_argument_group("Details for --generate-config") generate_group.add_argument( "-H", "--server-name", help="The server name to generate a config file for." ) @@ -670,11 +697,12 @@ class RootConfig: config_dir_path = os.path.abspath(config_dir_path) data_dir_path = os.getcwd() - generate_missing_configs = config_args.generate_missing_configs - obj = cls(config_files) - if config_args.generate_config: + if ( + config_args.generate_mode + == _ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT + ): if config_args.report_stats is None: parser.error( "Please specify either --report-stats=yes or --report-stats=no\n\n" @@ -732,11 +760,14 @@ class RootConfig: ) % (config_path,) ) - generate_missing_configs = True config_dict = read_config_files(config_files) - if generate_missing_configs: - obj.generate_missing_files(config_dict, config_dir_path) + obj.generate_missing_files(config_dict, config_dir_path) + + if config_args.generate_mode in ( + _ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT, + _ConfigGenerateMode.GENERATE_MISSING_AND_EXIT, + ): return None obj.parse_config_dict( @@ -965,6 +996,12 @@ def read_file(file_path: Any, config_path: Iterable[str]) -> str: raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e +class _ConfigGenerateMode(Enum): + GENERATE_MISSING_AND_RUN = auto() + GENERATE_MISSING_AND_EXIT = auto() + GENERATE_EVERYTHING_AND_EXIT = auto() + + __all__ = [ "Config", "RootConfig", diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index c1ff41753..702b81e63 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -32,9 +32,6 @@ class ExperimentalConfig(Config): # MSC2716 (importing historical messages) self.msc2716_enabled: bool = experimental.get("msc2716_enabled", False) - # MSC2285 (unstable private read receipts) - self.msc2285_enabled: bool = experimental.get("msc2285_enabled", False) - # MSC3244 (room version capabilities) self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True) @@ -74,6 +71,9 @@ class ExperimentalConfig(Config): self.msc3720_enabled: bool = experimental.get("msc3720_enabled", False) # MSC2654: Unread counts + # + # Note that enabling this will result in an incorrect unread count for + # previously calculated push actions. self.msc2654_enabled: bool = experimental.get("msc2654_enabled", False) # MSC2815 (allow room moderators to view redacted event content) diff --git a/synapse/config/metrics.py b/synapse/config/metrics.py index 3b42be5b5..f3134834e 100644 --- a/synapse/config/metrics.py +++ b/synapse/config/metrics.py @@ -42,6 +42,35 @@ class MetricsConfig(Config): def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.enable_metrics = config.get("enable_metrics", False) + + """ + ### `enable_legacy_metrics` (experimental) + + **Experimental: this option may be removed or have its behaviour + changed at any time, with no notice.** + + Set to `true` to publish both legacy and non-legacy Prometheus metric names, + or to `false` to only publish non-legacy Prometheus metric names. + Defaults to `true`. Has no effect if `enable_metrics` is `false`. + + Legacy metric names include: + - metrics containing colons in the name, such as `synapse_util_caches_response_cache:hits`, because colons are supposed to be reserved for user-defined recording rules; + - counters that don't end with the `_total` suffix, such as `synapse_federation_client_sent_edus`, therefore not adhering to the OpenMetrics standard. + + These legacy metric names are unconventional and not compliant with OpenMetrics standards. + They are included for backwards compatibility. + + Example configuration: + ```yaml + enable_legacy_metrics: false + ``` + + See https://github.com/matrix-org/synapse/issues/11106 for context. + + *Since v1.67.0.* + """ + self.enable_legacy_metrics = config.get("enable_legacy_metrics", True) + self.report_stats = config.get("report_stats", None) self.report_stats_endpoint = config.get( "report_stats_endpoint", "https://matrix.org/report-usage-stats/push" diff --git a/synapse/config/registration.py b/synapse/config/registration.py index a888d976f..df1d83dfa 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import argparse -from typing import Any, Optional +from typing import Any, Dict, Optional from synapse.api.constants import RoomCreationPreset -from synapse.config._base import Config, ConfigError +from synapse.config._base import Config, ConfigError, read_file from synapse.types import JsonDict, RoomAlias, UserID from synapse.util.stringutils import random_string_with_symbols, strtobool @@ -27,6 +27,11 @@ password resets, configure Synapse with an SMTP server via the `email` setting, remove `account_threepid_delegates.email`. """ +CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\ +You have configured both `registration_shared_secret` and +`registration_shared_secret_path`. These are mutually incompatible. +""" + class RegistrationConfig(Config): section = "registration" @@ -53,7 +58,16 @@ class RegistrationConfig(Config): self.enable_registration_token_3pid_bypass = config.get( "enable_registration_token_3pid_bypass", False ) + + # read the shared secret, either inline or from an external file self.registration_shared_secret = config.get("registration_shared_secret") + registration_shared_secret_path = config.get("registration_shared_secret_path") + if registration_shared_secret_path: + if self.registration_shared_secret: + raise ConfigError(CONFLICTING_SHARED_SECRET_OPTS_ERROR) + self.registration_shared_secret = read_file( + registration_shared_secret_path, ("registration_shared_secret_path",) + ).strip() self.bcrypt_rounds = config.get("bcrypt_rounds", 12) @@ -218,6 +232,21 @@ class RegistrationConfig(Config): else: return "" + def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + # if 'registration_shared_secret_path' is specified, and the target file + # does not exist, generate it. + registration_shared_secret_path = config.get("registration_shared_secret_path") + if registration_shared_secret_path and not self.path_exists( + registration_shared_secret_path + ): + print( + "Generating registration shared secret file " + + registration_shared_secret_path + ) + secret = random_string_with_symbols(50) + with open(registration_shared_secret_path, "w") as f: + f.write(f"{secret}\n") + @staticmethod def add_arguments(parser: argparse.ArgumentParser) -> None: reg_group = parser.add_argument_group("registration") diff --git a/synapse/config/server.py b/synapse/config/server.py index 085fe22c5..c91df636d 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -36,6 +36,12 @@ from ._util import validate_config logger = logging.Logger(__name__) +DIRECT_TCP_ERROR = """ +Using direct TCP replication for workers is no longer supported. + +Please see https://matrix-org.github.io/synapse/latest/upgrade.html#direct-tcp-replication-is-no-longer-supported-migrate-to-redis +""" + # by default, we attempt to listen on both '::' *and* '0.0.0.0' because some OSes # (Windows, macOS, other BSD/Linux where net.ipv6.bindv6only is set) will only listen # on IPv6 when '::' is set. @@ -165,7 +171,6 @@ KNOWN_LISTENER_TYPES = { "http", "metrics", "manhole", - "replication", } KNOWN_RESOURCES = { @@ -515,7 +520,9 @@ class ServerConfig(Config): ): raise ConfigError("allowed_avatar_mimetypes must be a list") - self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])] + self.listeners = [ + parse_listener_def(i, x) for i, x in enumerate(config.get("listeners", [])) + ] # no_tls is not really supported any more, but let's grandfather it in # here. @@ -880,9 +887,12 @@ def read_gc_thresholds( ) -def parse_listener_def(listener: Any) -> ListenerConfig: +def parse_listener_def(num: int, listener: Any) -> ListenerConfig: """parse a listener config from the config file""" listener_type = listener["type"] + # Raise a helpful error if direct TCP replication is still configured. + if listener_type == "replication": + raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type")) port = listener.get("port") if not isinstance(port, int): diff --git a/synapse/config/workers.py b/synapse/config/workers.py index f2716422b..0fb725dd8 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -27,7 +27,7 @@ from ._base import ( RoutableShardedWorkerHandlingConfig, ShardedWorkerHandlingConfig, ) -from .server import ListenerConfig, parse_listener_def +from .server import DIRECT_TCP_ERROR, ListenerConfig, parse_listener_def _FEDERATION_SENDER_WITH_SEND_FEDERATION_ENABLED_ERROR = """ The send_federation config option must be disabled in the main @@ -128,7 +128,8 @@ class WorkerConfig(Config): self.worker_app = None self.worker_listeners = [ - parse_listener_def(x) for x in config.get("worker_listeners", []) + parse_listener_def(i, x) + for i, x in enumerate(config.get("worker_listeners", [])) ] self.worker_daemonize = bool(config.get("worker_daemonize")) self.worker_pid_file = config.get("worker_pid_file") @@ -142,7 +143,8 @@ class WorkerConfig(Config): self.worker_replication_host = config.get("worker_replication_host", None) # The port on the main synapse for TCP replication - self.worker_replication_port = config.get("worker_replication_port", None) + if "worker_replication_port" in config: + raise ConfigError(DIRECT_TCP_ERROR, ("worker_replication_port",)) # The port on the main synapse for HTTP replication endpoint self.worker_replication_http_port = config.get("worker_replication_http_port") diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 7520647d1..23b799ac3 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -28,6 +28,7 @@ from synapse.api.errors import Codes, SynapseError from synapse.api.room_versions import RoomVersion from synapse.events import EventBase from synapse.events.utils import prune_event, prune_event_dict +from synapse.logging.opentracing import trace from synapse.types import JsonDict logger = logging.getLogger(__name__) @@ -35,6 +36,7 @@ logger = logging.getLogger(__name__) Hasher = Callable[[bytes], "hashlib._Hash"] +@trace def check_event_content_hash( event: EventBase, hash_algorithm: Hasher = hashlib.sha256 ) -> bool: diff --git a/synapse/events/spamcheck.py b/synapse/events/spamcheck.py index 4a3bfb38f..623a2c71e 100644 --- a/synapse/events/spamcheck.py +++ b/synapse/events/spamcheck.py @@ -32,6 +32,7 @@ from typing_extensions import Literal import synapse from synapse.api.errors import Codes +from synapse.logging.opentracing import trace from synapse.rest.media.v1._base import FileInfo from synapse.rest.media.v1.media_storage import ReadableFileWrapper from synapse.spam_checker_api import RegistrationBehaviour @@ -378,6 +379,7 @@ class SpamChecker: if check_media_file_for_spam is not None: self._check_media_file_for_spam_callbacks.append(check_media_file_for_spam) + @trace async def check_event_for_spam( self, event: "synapse.events.EventBase" ) -> Union[Tuple[Codes, JsonDict], str]: diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 2522bf78f..4269a98db 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -23,6 +23,7 @@ from synapse.crypto.keyring import Keyring from synapse.events import EventBase, make_event_from_dict from synapse.events.utils import prune_event, validate_canonicaljson from synapse.http.servlet import assert_params_in_dict +from synapse.logging.opentracing import log_kv, trace from synapse.types import JsonDict, get_domain_from_id if TYPE_CHECKING: @@ -55,6 +56,7 @@ class FederationBase: self._clock = hs.get_clock() self._storage_controllers = hs.get_storage_controllers() + @trace async def _check_sigs_and_hash( self, room_version: RoomVersion, pdu: EventBase ) -> EventBase: @@ -97,17 +99,36 @@ class FederationBase: "Event %s seems to have been redacted; using our redacted copy", pdu.event_id, ) + log_kv( + { + "message": "Event seems to have been redacted; using our redacted copy", + "event_id": pdu.event_id, + } + ) else: logger.warning( "Event %s content has been tampered, redacting", pdu.event_id, ) + log_kv( + { + "message": "Event content has been tampered, redacting", + "event_id": pdu.event_id, + } + ) return redacted_event spam_check = await self.spam_checker.check_event_for_spam(pdu) if spam_check != self.spam_checker.NOT_SPAM: logger.warning("Event contains spam, soft-failing %s", pdu.event_id) + log_kv( + { + "message": "Event contains spam, redacting (to save disk space) " + "as well as soft-failing (to stop using the event in prev_events)", + "event_id": pdu.event_id, + } + ) # we redact (to save disk space) as well as soft-failing (to stop # using the event in prev_events). redacted_event = prune_event(pdu) @@ -117,6 +138,7 @@ class FederationBase: return pdu +@trace async def _check_sigs_on_pdu( keyring: Keyring, room_version: RoomVersion, pdu: EventBase ) -> None: diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 987f6dad4..7ee2974bb 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -61,7 +61,7 @@ from synapse.federation.federation_base import ( ) from synapse.federation.transport.client import SendJoinResponse from synapse.http.types import QueryParams -from synapse.logging.opentracing import SynapseTags, set_tag, tag_args, trace +from synapse.logging.opentracing import SynapseTags, log_kv, set_tag, tag_args, trace from synapse.types import JsonDict, UserID, get_domain_from_id from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.expiringcache import ExpiringCache @@ -587,11 +587,15 @@ class FederationClient(FederationBase): Returns: A list of PDUs that have valid signatures and hashes. """ + set_tag( + SynapseTags.RESULT_PREFIX + "pdus.length", + str(len(pdus)), + ) # We limit how many PDUs we check at once, as if we try to do hundreds # of thousands of PDUs at once we see large memory spikes. - valid_pdus = [] + valid_pdus: List[EventBase] = [] async def _execute(pdu: EventBase) -> None: valid_pdu = await self._check_sigs_and_hash_and_fetch_one( @@ -607,6 +611,8 @@ class FederationClient(FederationBase): return valid_pdus + @trace + @tag_args async def _check_sigs_and_hash_and_fetch_one( self, pdu: EventBase, @@ -639,16 +645,27 @@ class FederationClient(FederationBase): except InvalidEventSignatureError as e: logger.warning( "Signature on retrieved event %s was invalid (%s). " - "Checking local store/orgin server", + "Checking local store/origin server", pdu.event_id, e, ) + log_kv( + { + "message": "Signature on retrieved event was invalid. " + "Checking local store/origin server", + "event_id": pdu.event_id, + "InvalidEventSignatureError": e, + } + ) # Check local db. res = await self.store.get_event( pdu.event_id, allow_rejected=True, allow_none=True ) + # If the PDU fails its signature check and we don't have it in our + # database, we then request it from sender's server (if that is not the + # same as `origin`). pdu_origin = get_domain_from_id(pdu.sender) if not res and pdu_origin != origin: try: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 75fbc6073..3bf84cf62 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -763,6 +763,17 @@ class FederationServer(FederationBase): The partial knock event. """ origin_host, _ = parse_server_name(origin) + + if await self.store.is_partial_state_room(room_id): + # Before we do anything: check if the room is partial-stated. + # Note that at the time this check was added, `on_make_knock_request` would + # block due to https://github.com/matrix-org/synapse/issues/12997. + raise SynapseError( + 404, + "Unable to handle /make_knock right now; this server is not fully joined.", + errcode=Codes.NOT_FOUND, + ) + await self.check_server_matches_acl(origin_host, room_id) room_version = await self.store.get_room_version(room_id) diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 94a65ac65..8bc60e3e3 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -441,6 +441,19 @@ class FederationSender(AbstractFederationSender): destinations = await self._external_cache.get( "get_joined_hosts", str(sg) ) + if destinations is None: + # Add logging to help track down #13444 + logger.info( + "Unexpectedly did not have cached destinations for %s / %s", + sg, + event.event_id, + ) + else: + # Add logging to help track down #13444 + logger.info( + "Unexpectedly did not have cached prev group for %s", + event.event_id, + ) if destinations is None: try: diff --git a/synapse/federation/transport/server/_base.py b/synapse/federation/transport/server/_base.py index bb0f8d6b7..1db8009d6 100644 --- a/synapse/federation/transport/server/_base.py +++ b/synapse/federation/transport/server/_base.py @@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, Optional, Tupl from synapse.api.errors import Codes, FederationDeniedError, SynapseError from synapse.api.urls import FEDERATION_V1_PREFIX -from synapse.http.server import HttpServer, ServletCallback, is_method_cancellable +from synapse.http.server import HttpServer, ServletCallback from synapse.http.servlet import parse_json_object_from_request from synapse.http.site import SynapseRequest from synapse.logging.context import run_in_background @@ -34,6 +34,7 @@ from synapse.logging.opentracing import ( whitelisted_homeserver, ) from synapse.types import JsonDict +from synapse.util.cancellation import is_function_cancellable from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.stringutils import parse_and_validate_server_name @@ -375,7 +376,7 @@ class BaseFederationServlet: if code is None: continue - if is_method_cancellable(code): + if is_function_cancellable(code): # The wrapper added by `self._wrap` will inherit the cancellable flag, # but the wrapper itself does not support cancellation yet. # Once resolved, the cancellation tests in diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index f5c586f65..9c2c3a0e6 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -310,6 +310,7 @@ class DeviceHandler(DeviceWorkerHandler): super().__init__(hs) self.federation_sender = hs.get_federation_sender() + self._storage_controllers = hs.get_storage_controllers() self.device_list_updater = DeviceListUpdater(hs, self) @@ -694,8 +695,11 @@ class DeviceHandler(DeviceWorkerHandler): # Ignore any users that aren't ours if self.hs.is_mine_id(user_id): - joined_user_ids = await self.store.get_users_in_room(room_id) - hosts = {get_domain_from_id(u) for u in joined_user_ids} + hosts = set( + await self._storage_controllers.state.get_current_hosts_in_room( + room_id + ) + ) hosts.discard(self.server_name) # Check if we've already sent this update to some hosts diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index f3b3dbd34..7ad4003f2 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -30,7 +30,7 @@ from synapse.api.errors import ( from synapse.appservice import ApplicationService from synapse.module_api import NOT_SPAM from synapse.storage.databases.main.directory import RoomAliasMapping -from synapse.types import JsonDict, Requester, RoomAlias, get_domain_from_id +from synapse.types import JsonDict, Requester, RoomAlias if TYPE_CHECKING: from synapse.server import HomeServer @@ -85,8 +85,9 @@ class DirectoryHandler: # TODO(erikj): Add transactions. # TODO(erikj): Check if there is a current association. if not servers: - users = await self.store.get_users_in_room(room_id) - servers = {get_domain_from_id(u) for u in users} + servers = await self._storage_controllers.state.get_current_hosts_in_room( + room_id + ) if not servers: raise SynapseError(400, "Failed to get server list") @@ -292,8 +293,9 @@ class DirectoryHandler: Codes.NOT_FOUND, ) - users = await self.store.get_users_in_room(room_id) - extra_servers = {get_domain_from_id(u) for u in users} + extra_servers = await self._storage_controllers.state.get_current_hosts_in_room( + room_id + ) servers_set = set(extra_servers) | set(servers) # If this server is in the list of servers, return it first. diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py index a2dd9c7ef..c3ddc5d18 100644 --- a/synapse/handlers/event_auth.py +++ b/synapse/handlers/event_auth.py @@ -129,12 +129,9 @@ class EventAuthHandler: else: users = {} - # Find the user with the highest power level. - users_in_room = await self._store.get_users_in_room(room_id) - # Only interested in local users. - local_users_in_room = [ - u for u in users_in_room if get_domain_from_id(u) == self._server_name - ] + # Find the user with the highest power level (only interested in local + # users). + local_users_in_room = await self._store.get_local_users_in_room(room_id) chosen_user = max( local_users_in_room, key=lambda user: users.get(user, users_default_level), diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index ac13340d3..949b69cb4 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -151,7 +151,7 @@ class EventHandler: """Retrieve a single specified event. Args: - user: The user requesting the event + user: The local user requesting the event room_id: The expected room id. We'll return None if the event's room does not match. event_id: The event ID to obtain. @@ -173,8 +173,11 @@ class EventHandler: if not event: return None - users = await self.store.get_users_in_room(event.room_id) - is_peeking = user.to_string() not in users + is_user_in_room = await self.store.check_local_user_in_room( + user_id=user.to_string(), room_id=event.room_id + ) + # The user is peeking if they aren't in the room already + is_peeking = not is_user_in_room filtered = await filter_events_for_client( self._storage_controllers, user.to_string(), [event], is_peeking=is_peeking diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a3098669f..4372ebeaa 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -70,7 +70,7 @@ from synapse.replication.http.federation import ( from synapse.storage.databases.main.events import PartialStateConflictError from synapse.storage.databases.main.events_worker import EventRedactBehaviour from synapse.storage.state import StateFilter -from synapse.types import JsonDict, StateMap, get_domain_from_id +from synapse.types import JsonDict, get_domain_from_id from synapse.util.async_helpers import Linearizer from synapse.util.retryutils import NotRetryingDestination from synapse.visibility import filter_events_for_server @@ -104,37 +104,6 @@ backfill_processing_before_timer = Histogram( ) -def get_domains_from_state(state: StateMap[EventBase]) -> List[Tuple[str, int]]: - """Get joined domains from state - - Args: - state: State map from type/state key to event. - - Returns: - Returns a list of servers with the lowest depth of their joins. - Sorted by lowest depth first. - """ - joined_users = [ - (state_key, int(event.depth)) - for (e_type, state_key), event in state.items() - if e_type == EventTypes.Member and event.membership == Membership.JOIN - ] - - joined_domains: Dict[str, int] = {} - for u, d in joined_users: - try: - dom = get_domain_from_id(u) - old_d = joined_domains.get(dom) - if old_d: - joined_domains[dom] = min(d, old_d) - else: - joined_domains[dom] = d - except Exception: - pass - - return sorted(joined_domains.items(), key=lambda d: d[1]) - - class _BackfillPointType(Enum): # a regular backwards extremity (ie, an event which we don't yet have, but which # is referred to by other events in the DAG) @@ -432,21 +401,19 @@ class FederationHandler: ) # Now we need to decide which hosts to hit first. - - # First we try hosts that are already in the room + # First we try hosts that are already in the room. # TODO: HEURISTIC ALERT. + likely_domains = ( + await self._storage_controllers.state.get_current_hosts_in_room(room_id) + ) - curr_state = await self._storage_controllers.state.get_current_state(room_id) - - curr_domains = get_domains_from_state(curr_state) - - likely_domains = [ - domain for domain, depth in curr_domains if domain != self.server_name - ] - - async def try_backfill(domains: List[str]) -> bool: + async def try_backfill(domains: Collection[str]) -> bool: # TODO: Should we try multiple of these at a time? for dom in domains: + # We don't want to ask our own server for information we don't have + if dom == self.server_name: + continue + try: await self._federation_event_handler.backfill( dom, room_id, limit=100, extremities=extremities_to_request diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 048c4111f..ace7adcff 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -1041,6 +1041,14 @@ class FederationEventHandler: InvalidResponseError: if the remote homeserver's response contains fields of the wrong type. """ + + # It would be better if we could query the difference from our known + # state to the given `event_id` so the sending server doesn't have to + # send as much and we don't have to process as many events. For example + # in a room like #matrix:matrix.org, we get 200k events (77k state_events, 122k + # auth_events) from this call. + # + # Tracked by https://github.com/matrix-org/synapse/issues/13618 ( state_event_ids, auth_event_ids, diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 9571d461c..93d09e993 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -538,11 +538,7 @@ class IdentityHandler: raise SynapseError(400, "Error contacting the identity server") async def lookup_3pid( - self, - id_server: str, - medium: str, - address: str, - id_access_token: Optional[str] = None, + self, id_server: str, medium: str, address: str, id_access_token: str ) -> Optional[str]: """Looks up a 3pid in the passed identity server. @@ -557,60 +553,15 @@ class IdentityHandler: Returns: the matrix ID of the 3pid, or None if it is not recognized. """ - if id_access_token is not None: - try: - results = await self._lookup_3pid_v2( - id_server, id_access_token, medium, address - ) - return results - except Exception as e: - # Catch HttpResponseExcept for a non-200 response code - # Check if this identity server does not know about v2 lookups - if isinstance(e, HttpResponseException) and e.code == 404: - # This is an old identity server that does not yet support v2 lookups - logger.warning( - "Attempted v2 lookup on v1 identity server %s. Falling " - "back to v1", - id_server, - ) - else: - logger.warning("Error when looking up hashing details: %s", e) - return None - - return await self._lookup_3pid_v1(id_server, medium, address) - - async def _lookup_3pid_v1( - self, id_server: str, medium: str, address: str - ) -> Optional[str]: - """Looks up a 3pid in the passed identity server using v1 lookup. - - Args: - id_server: The server name (including port, if required) - of the identity server to use. - medium: The type of the third party identifier (e.g. "email"). - address: The third party identifier (e.g. "foo@example.com"). - - Returns: - the matrix ID of the 3pid, or None if it is not recognized. - """ try: - data = await self.blacklisting_http_client.get_json( - "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server), - {"medium": medium, "address": address}, + results = await self._lookup_3pid_v2( + id_server, id_access_token, medium, address ) - - if "mxid" in data: - # note: we used to verify the identity server's signature here, but no longer - # require or validate it. See the following for context: - # https://github.com/matrix-org/synapse/issues/5253#issuecomment-666246950 - return data["mxid"] - except RequestTimedOutError: - raise SynapseError(500, "Timed out contacting identity server") - except OSError as e: - logger.warning("Error from v1 identity server lookup: %s" % (e,)) - - return None + return results + except Exception as e: + logger.warning("Error when looking up hashing details: %s", e) + return None async def _lookup_3pid_v2( self, id_server: str, id_access_token: str, medium: str, address: str @@ -739,7 +690,7 @@ class IdentityHandler: room_type: Optional[str], inviter_display_name: str, inviter_avatar_url: str, - id_access_token: Optional[str] = None, + id_access_token: str, ) -> Tuple[str, List[Dict[str, str]], Dict[str, str], str]: """ Asks an identity server for a third party invite. @@ -760,7 +711,7 @@ class IdentityHandler: inviter_display_name: The current display name of the inviter. inviter_avatar_url: The URL of the inviter's avatar. - id_access_token (str|None): The access token to authenticate to the identity + id_access_token (str): The access token to authenticate to the identity server with Returns: @@ -792,71 +743,24 @@ class IdentityHandler: invite_config["org.matrix.web_client_location"] = self._web_client_location # Add the identity service access token to the JSON body and use the v2 - # Identity Service endpoints if id_access_token is present + # Identity Service endpoints data = None - base_url = "%s%s/_matrix/identity" % (id_server_scheme, id_server) - if id_access_token: - key_validity_url = "%s%s/_matrix/identity/v2/pubkey/isvalid" % ( - id_server_scheme, - id_server, + key_validity_url = "%s%s/_matrix/identity/v2/pubkey/isvalid" % ( + id_server_scheme, + id_server, + ) + + url = "%s%s/_matrix/identity/v2/store-invite" % (id_server_scheme, id_server) + try: + data = await self.blacklisting_http_client.post_json_get_json( + url, + invite_config, + {"Authorization": create_id_access_token_header(id_access_token)}, ) + except RequestTimedOutError: + raise SynapseError(500, "Timed out contacting identity server") - # Attempt a v2 lookup - url = base_url + "/v2/store-invite" - try: - data = await self.blacklisting_http_client.post_json_get_json( - url, - invite_config, - {"Authorization": create_id_access_token_header(id_access_token)}, - ) - except RequestTimedOutError: - raise SynapseError(500, "Timed out contacting identity server") - except HttpResponseException as e: - if e.code != 404: - logger.info("Failed to POST %s with JSON: %s", url, e) - raise e - - if data is None: - key_validity_url = "%s%s/_matrix/identity/api/v1/pubkey/isvalid" % ( - id_server_scheme, - id_server, - ) - url = base_url + "/api/v1/store-invite" - - try: - data = await self.blacklisting_http_client.post_json_get_json( - url, invite_config - ) - except RequestTimedOutError: - raise SynapseError(500, "Timed out contacting identity server") - except HttpResponseException as e: - logger.warning( - "Error trying to call /store-invite on %s%s: %s", - id_server_scheme, - id_server, - e, - ) - - if data is None: - # Some identity servers may only support application/x-www-form-urlencoded - # types. This is especially true with old instances of Sydent, see - # https://github.com/matrix-org/sydent/pull/170 - try: - data = await self.blacklisting_http_client.post_urlencoded_get_json( - url, invite_config - ) - except HttpResponseException as e: - logger.warning( - "Error calling /store-invite on %s%s with fallback " - "encoding: %s", - id_server_scheme, - id_server, - e, - ) - raise e - - # TODO: Check for success token = data["token"] public_keys = data.get("public_keys", []) if "public_key" in data: diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 08ee7caf2..ca3fc5577 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -763,8 +763,10 @@ class EventCreationHandler: async def _is_server_notices_room(self, room_id: str) -> bool: if self.config.servernotices.server_notices_mxid is None: return False - user_ids = await self.store.get_users_in_room(room_id) - return self.config.servernotices.server_notices_mxid in user_ids + is_server_notices_room = await self.store.check_local_user_in_room( + user_id=self.config.servernotices.server_notices_mxid, room_id=room_id + ) + return is_server_notices_room async def assert_accepted_privacy_policy(self, requester: Requester) -> None: """Check if a user has accepted the privacy policy diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 74e944bce..a0c39778a 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -159,11 +159,9 @@ class PaginationHandler: self._retention_allowed_lifetime_max = ( hs.config.retention.retention_allowed_lifetime_max ) + self._is_master = hs.config.worker.worker_app is None - if ( - hs.config.worker.run_background_tasks - and hs.config.retention.retention_enabled - ): + if hs.config.retention.retention_enabled and self._is_master: # Run the purge jobs described in the configuration file. for job in hs.config.retention.retention_purge_jobs: logger.info("Setting up purge job with config: %s", job) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 741504ba9..4e575ffba 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -2051,8 +2051,7 @@ async def get_interested_remotes( ) for room_id, states in room_ids_to_states.items(): - user_ids = await store.get_users_in_room(room_id) - hosts = {get_domain_from_id(user_id) for user_id in user_ids} + hosts = await store.get_current_hosts_in_room(room_id) for host in hosts: hosts_and_states.setdefault(host, set()).update(states) diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index f4e22311b..8a4500f53 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -164,10 +164,7 @@ class ReceiptsHandler: if not is_new: return - if self.federation_sender and receipt_type not in ( - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ): + if self.federation_sender and receipt_type != ReceiptTypes.READ_PRIVATE: await self.federation_sender.send_read_receipt(receipt) @@ -207,38 +204,24 @@ class ReceiptEventSource(EventSource[int, JsonDict]): for event_id, orig_event_content in room.get("content", {}).items(): event_content = orig_event_content # If there are private read receipts, additional logic is necessary. - if ( - ReceiptTypes.READ_PRIVATE in event_content - or ReceiptTypes.UNSTABLE_READ_PRIVATE in event_content - ): + if ReceiptTypes.READ_PRIVATE in event_content: # Make a copy without private read receipts to avoid leaking # other user's private read receipts.. event_content = { receipt_type: receipt_value for receipt_type, receipt_value in event_content.items() - if receipt_type - not in ( - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ) + if receipt_type != ReceiptTypes.READ_PRIVATE } # Copy the current user's private read receipt from the # original content, if it exists. - user_private_read_receipt = orig_event_content.get( - ReceiptTypes.READ_PRIVATE, {} - ).get(user_id, None) + user_private_read_receipt = orig_event_content[ + ReceiptTypes.READ_PRIVATE + ].get(user_id, None) if user_private_read_receipt: event_content[ReceiptTypes.READ_PRIVATE] = { user_id: user_private_read_receipt } - user_unstable_private_read_receipt = orig_event_content.get( - ReceiptTypes.UNSTABLE_READ_PRIVATE, {} - ).get(user_id, None) - if user_unstable_private_read_receipt: - event_content[ReceiptTypes.UNSTABLE_READ_PRIVATE] = { - user_id: user_unstable_private_read_receipt - } # Include the event if there is at least one non-private read # receipt or the current user has a private read receipt. diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 16c120301..d6bb4810b 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -19,6 +19,7 @@ import math import random import string from collections import OrderedDict +from http import HTTPStatus from typing import ( TYPE_CHECKING, Any, @@ -60,7 +61,6 @@ from synapse.event_auth import validate_event_for_room_version from synapse.events import EventBase from synapse.events.utils import copy_and_fixup_power_levels_contents from synapse.federation.federation_client import InvalidResponseError -from synapse.handlers.federation import get_domains_from_state from synapse.handlers.relations import BundledAggregations from synapse.module_api import NOT_SPAM from synapse.rest.admin._base import assert_user_is_admin @@ -705,8 +705,8 @@ class RoomCreationHandler: was, requested, `room_alias`. Secondly, the stream_id of the last persisted event. Raises: - SynapseError if the room ID couldn't be stored, or something went - horribly wrong. + SynapseError if the room ID couldn't be stored, 3pid invitation config + validation failed, or something went horribly wrong. ResourceLimitError if server is blocked to some resource being exceeded """ @@ -732,6 +732,19 @@ class RoomCreationHandler: invite_3pid_list = config.get("invite_3pid", []) invite_list = config.get("invite", []) + # validate each entry for correctness + for invite_3pid in invite_3pid_list: + if not all( + key in invite_3pid + for key in ("medium", "address", "id_server", "id_access_token") + ): + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "all of `medium`, `address`, `id_server` and `id_access_token` " + "are required when making a 3pid invite", + Codes.MISSING_PARAM, + ) + if not is_requester_admin: spam_check = await self.spam_checker.user_may_create_room(user_id) if spam_check != NOT_SPAM: @@ -991,7 +1004,7 @@ class RoomCreationHandler: for invite_3pid in invite_3pid_list: id_server = invite_3pid["id_server"] - id_access_token = invite_3pid.get("id_access_token") # optional + id_access_token = invite_3pid["id_access_token"] address = invite_3pid["address"] medium = invite_3pid["medium"] # Note that do_3pid_invite can raise a ShadowBanError, but this was @@ -1296,8 +1309,11 @@ class RoomContextHandler: before_limit = math.floor(limit / 2.0) after_limit = limit - before_limit - users = await self.store.get_users_in_room(room_id) - is_peeking = user.to_string() not in users + is_user_in_room = await self.store.check_local_user_in_room( + user_id=user.to_string(), room_id=room_id + ) + # The user is peeking if they aren't in the room already + is_peeking = not is_user_in_room async def filter_evts(events: List[EventBase]) -> List[EventBase]: if use_admin_priviledge: @@ -1471,17 +1487,16 @@ class TimestampLookupHandler: timestamp, ) - # Find other homeservers from the given state in the room - curr_state = await self._storage_controllers.state.get_current_state( - room_id + likely_domains = ( + await self._storage_controllers.state.get_current_hosts_in_room(room_id) ) - curr_domains = get_domains_from_state(curr_state) - likely_domains = [ - domain for domain, depth in curr_domains if domain != self.server_name - ] # Loop through each homeserver candidate until we get a succesful response for domain in likely_domains: + # We don't want to ask our own server for information we don't have + if domain == self.server_name: + continue + try: remote_response = await self.federation_client.timestamp_to_event( domain, room_id, timestamp, direction diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 65b9a655d..5d4adf5bf 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -1382,7 +1382,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): id_server: str, requester: Requester, txn_id: Optional[str], - id_access_token: Optional[str] = None, + id_access_token: str, prev_event_ids: Optional[List[str]] = None, depth: Optional[int] = None, ) -> Tuple[str, int]: @@ -1397,7 +1397,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): requester: The user making the request. txn_id: The transaction ID this is part of, or None if this is not part of a transaction. - id_access_token: The optional identity server access token. + id_access_token: Identity server access token. depth: Override the depth used to order the event in the DAG. prev_event_ids: The event IDs to use as the prev events Should normally be set to None, which will cause the depth to be calculated @@ -1494,7 +1494,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): room_id: str, user: UserID, txn_id: Optional[str], - id_access_token: Optional[str] = None, + id_access_token: str, prev_event_ids: Optional[List[str]] = None, depth: Optional[int] = None, ) -> Tuple[EventBase, int]: @@ -1620,8 +1620,10 @@ class RoomMemberHandler(metaclass=abc.ABCMeta): async def _is_server_notice_room(self, room_id: str) -> bool: if self._server_notices_mxid is None: return False - user_ids = await self.store.get_users_in_room(room_id) - return self._server_notices_mxid in user_ids + is_server_notices_room = await self.store.check_local_user_in_room( + user_id=self._server_notices_mxid, room_id=room_id + ) + return is_server_notices_room class RoomMemberMasterHandler(RoomMemberHandler): @@ -1923,8 +1925,11 @@ class RoomMemberMasterHandler(RoomMemberHandler): ]: raise SynapseError(400, "User %s in room %s" % (user_id, room_id)) - if membership: - await self.store.forget(user_id, room_id) + # In normal case this call is only required if `membership` is not `None`. + # But: After the last member had left the room, the background update + # `_background_remove_left_rooms` is deleting rows related to this room from + # the table `current_state_events` and `get_current_state_events` is `None`. + await self.store.forget(user_id, room_id) def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[str]: diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 15c00b9f2..5025f6696 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -2420,10 +2420,10 @@ class SyncHandler: joined_room.room_id, joined_room.event_pos.stream ) ) - users_in_room = await self.state.get_current_users_in_room( + user_ids_in_room = await self.state.get_current_user_ids_in_room( joined_room.room_id, extrems ) - if user_id in users_in_room: + if user_id in user_ids_in_room: joined_room_ids.add(joined_room.room_id) return frozenset(joined_room_ids) diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index bcac3372a..a4cd8b8f0 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -26,7 +26,7 @@ from synapse.metrics.background_process_metrics import ( ) from synapse.replication.tcp.streams import TypingStream from synapse.streams import EventSource -from synapse.types import JsonDict, Requester, StreamKeyType, UserID, get_domain_from_id +from synapse.types import JsonDict, Requester, StreamKeyType, UserID from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.metrics import Measure from synapse.util.wheel_timer import WheelTimer @@ -362,8 +362,9 @@ class TypingWriterHandler(FollowerTypingHandler): ) return - users = await self.store.get_users_in_room(room_id) - domains = {get_domain_from_id(u) for u in users} + domains = await self._storage_controllers.state.get_current_hosts_in_room( + room_id + ) if self.server_name in domains: logger.info("Got typing update from %s: %r", user_id, content) diff --git a/synapse/http/server.py b/synapse/http/server.py index 19f42159b..6068a94b4 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -33,7 +33,6 @@ from typing import ( Optional, Pattern, Tuple, - TypeVar, Union, ) @@ -64,6 +63,7 @@ from synapse.logging.context import defer_to_thread, preserve_fn, run_in_backgro from synapse.logging.opentracing import active_span, start_active_span, trace_servlet from synapse.util import json_encoder from synapse.util.caches import intern_dict +from synapse.util.cancellation import is_function_cancellable from synapse.util.iterutils import chunk_seq if TYPE_CHECKING: @@ -94,68 +94,6 @@ HTML_ERROR_TEMPLATE = """ HTTP_STATUS_REQUEST_CANCELLED = 499 -F = TypeVar("F", bound=Callable[..., Any]) - - -_cancellable_method_names = frozenset( - { - # `RestServlet`, `BaseFederationServlet` and `BaseFederationServerServlet` - # methods - "on_GET", - "on_PUT", - "on_POST", - "on_DELETE", - # `_AsyncResource`, `DirectServeHtmlResource` and `DirectServeJsonResource` - # methods - "_async_render_GET", - "_async_render_PUT", - "_async_render_POST", - "_async_render_DELETE", - "_async_render_OPTIONS", - # `ReplicationEndpoint` methods - "_handle_request", - } -) - - -def cancellable(method: F) -> F: - """Marks a servlet method as cancellable. - - Methods with this decorator will be cancelled if the client disconnects before we - finish processing the request. - - During cancellation, `Deferred.cancel()` will be invoked on the `Deferred` wrapping - the method. The `cancel()` call will propagate down to the `Deferred` that is - currently being waited on. That `Deferred` will raise a `CancelledError`, which will - propagate up, as per normal exception handling. - - Before applying this decorator to a new endpoint, you MUST recursively check - that all `await`s in the function are on `async` functions or `Deferred`s that - handle cancellation cleanly, otherwise a variety of bugs may occur, ranging from - premature logging context closure, to stuck requests, to database corruption. - - Usage: - class SomeServlet(RestServlet): - @cancellable - async def on_GET(self, request: SynapseRequest) -> ...: - ... - """ - if method.__name__ not in _cancellable_method_names and not any( - method.__name__.startswith(prefix) for prefix in _cancellable_method_names - ): - raise ValueError( - "@cancellable decorator can only be applied to servlet methods." - ) - - method.cancellable = True # type: ignore[attr-defined] - return method - - -def is_method_cancellable(method: Callable[..., Any]) -> bool: - """Checks whether a servlet method has the `@cancellable` flag.""" - return getattr(method, "cancellable", False) - - def return_json_error( f: failure.Failure, request: SynapseRequest, config: Optional[HomeServerConfig] ) -> None: @@ -389,7 +327,7 @@ class _AsyncResource(resource.Resource, metaclass=abc.ABCMeta): method_handler = getattr(self, "_async_render_%s" % (request_method,), None) if method_handler: - request.is_render_cancellable = is_method_cancellable(method_handler) + request.is_render_cancellable = is_function_cancellable(method_handler) raw_callback_return = method_handler(request) @@ -551,7 +489,7 @@ class JsonResource(DirectServeJsonResource): async def _async_render(self, request: SynapseRequest) -> Tuple[int, Any]: callback, servlet_classname, group_dict = self._get_handler_for_request(request) - request.is_render_cancellable = is_method_cancellable(callback) + request.is_render_cancellable = is_function_cancellable(callback) # Make sure we have an appropriate name for this handler in prometheus # (rather than the default of JsonResource). diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 496fce2ec..c3d3daf87 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -46,12 +46,12 @@ from twisted.python.threadpool import ThreadPool # This module is imported for its side effects; flake8 needn't warn that it's unused. import synapse.metrics._reactor_metrics # noqa: F401 -from synapse.metrics._exposition import ( +from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager +from synapse.metrics._legacy_exposition import ( MetricsResource, generate_latest, start_http_server, ) -from synapse.metrics._gc import MIN_TIME_BETWEEN_GCS, install_gc_manager from synapse.metrics._types import Collector from synapse.util import SYNAPSE_VERSION diff --git a/synapse/metrics/_exposition.py b/synapse/metrics/_legacy_exposition.py similarity index 85% rename from synapse/metrics/_exposition.py rename to synapse/metrics/_legacy_exposition.py index 353d0a63b..ff640a49a 100644 --- a/synapse/metrics/_exposition.py +++ b/synapse/metrics/_legacy_exposition.py @@ -80,7 +80,27 @@ def sample_line(line: Sample, name: str) -> str: return "{}{} {}{}\n".format(name, labelstr, floatToGoString(line.value), timestamp) +# Mapping from new metric names to legacy metric names. +# We translate these back to their old names when exposing them through our +# legacy vendored exporter. +# Only this legacy exposition module applies these name changes. +LEGACY_METRIC_NAMES = { + "synapse_util_caches_cache_hits": "synapse_util_caches_cache:hits", + "synapse_util_caches_cache_size": "synapse_util_caches_cache:size", + "synapse_util_caches_cache_evicted_size": "synapse_util_caches_cache:evicted_size", + "synapse_util_caches_cache_total": "synapse_util_caches_cache:total", + "synapse_util_caches_response_cache_size": "synapse_util_caches_response_cache:size", + "synapse_util_caches_response_cache_hits": "synapse_util_caches_response_cache:hits", + "synapse_util_caches_response_cache_evicted_size": "synapse_util_caches_response_cache:evicted_size", + "synapse_util_caches_response_cache_total": "synapse_util_caches_response_cache:total", +} + + def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> bytes: + """ + Generate metrics in legacy format. Modern metrics are generated directly + by prometheus-client. + """ # Trigger the cache metrics to be rescraped, which updates the common # metrics but do not produce metrics themselves @@ -94,7 +114,8 @@ def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> byt # No samples, don't bother. continue - mname = metric.name + # Translate to legacy metric name if it has one. + mname = LEGACY_METRIC_NAMES.get(metric.name, metric.name) mnewname = metric.name mtype = metric.type @@ -124,7 +145,7 @@ def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> byt om_samples: Dict[str, List[str]] = {} for s in metric.samples: for suffix in ["_created", "_gsum", "_gcount"]: - if s.name == metric.name + suffix: + if s.name == mname + suffix: # OpenMetrics specific sample, put in a gauge at the end. # (these come from gaugehistograms which don't get renamed, # so no need to faff with mnewname) @@ -140,12 +161,12 @@ def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> byt if emit_help: output.append( "# HELP {}{} {}\n".format( - metric.name, + mname, suffix, metric.documentation.replace("\\", r"\\").replace("\n", r"\n"), ) ) - output.append(f"# TYPE {metric.name}{suffix} gauge\n") + output.append(f"# TYPE {mname}{suffix} gauge\n") output.extend(lines) # Get rid of the weird colon things while we're at it @@ -170,11 +191,12 @@ def generate_latest(registry: CollectorRegistry, emit_help: bool = False) -> byt # Get rid of the OpenMetrics specific samples (we should already have # dealt with them above anyway.) for suffix in ["_created", "_gsum", "_gcount"]: - if s.name == metric.name + suffix: + if s.name == mname + suffix: break else: + sample_name = LEGACY_METRIC_NAMES.get(s.name, s.name) output.append( - sample_line(s, s.name.replace(":total", "").replace(":", "_")) + sample_line(s, sample_name.replace(":total", "").replace(":", "_")) ) return "".join(output).encode("utf-8") diff --git a/synapse/metrics/common_usage_metrics.py b/synapse/metrics/common_usage_metrics.py new file mode 100644 index 000000000..0a22ea3d9 --- /dev/null +++ b/synapse/metrics/common_usage_metrics.py @@ -0,0 +1,79 @@ +# Copyright 2022 The Matrix.org Foundation C.I.C +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import TYPE_CHECKING + +import attr + +from synapse.metrics.background_process_metrics import run_as_background_process + +if TYPE_CHECKING: + from synapse.server import HomeServer + +from prometheus_client import Gauge + +# Gauge to expose daily active users metrics +current_dau_gauge = Gauge( + "synapse_admin_daily_active_users", + "Current daily active users count", +) + + +@attr.s(auto_attribs=True) +class CommonUsageMetrics: + """Usage metrics shared between the phone home stats and the prometheus exporter.""" + + daily_active_users: int + + +class CommonUsageMetricsManager: + """Collects common usage metrics.""" + + def __init__(self, hs: "HomeServer") -> None: + self._store = hs.get_datastores().main + self._clock = hs.get_clock() + + async def get_metrics(self) -> CommonUsageMetrics: + """Get the CommonUsageMetrics object. If no collection has happened yet, do it + before returning the metrics. + + Returns: + The CommonUsageMetrics object to read common metrics from. + """ + return await self._collect() + + async def setup(self) -> None: + """Keep the gauges for common usage metrics up to date.""" + await self._update_gauges() + self._clock.looping_call( + run_as_background_process, + 5 * 60 * 1000, + desc="common_usage_metrics_update_gauges", + func=self._update_gauges, + ) + + async def _collect(self) -> CommonUsageMetrics: + """Collect the common metrics and either create the CommonUsageMetrics object to + use if it doesn't exist yet, or update it. + """ + dau_count = await self._store.count_daily_users() + + return CommonUsageMetrics( + daily_active_users=dau_count, + ) + + async def _update_gauges(self) -> None: + """Update the Prometheus gauges.""" + metrics = await self._collect() + + current_dau_gauge.set(float(metrics.daily_active_users)) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index ccd512be5..d1caf8a0f 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -262,7 +262,12 @@ class BulkPushRuleEvaluator: # This can happen due to out of band memberships return - count_as_unread = _should_count_as_unread(event, context) + # Disable counting as unread unless the experimental configuration is + # enabled, as it can cause additional (unwanted) rows to be added to the + # event_push_actions table. + count_as_unread = False + if self.hs.config.experimental.msc2654_enabled: + count_as_unread = _should_count_as_unread(event, context) rules_by_user = await self._get_rules_for_event(event) actions_by_user: Dict[str, Collection[Union[Mapping, str]]] = {} diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 561ad5bf0..acb0bd18f 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -26,12 +26,13 @@ from twisted.web.server import Request from synapse.api.errors import HttpResponseException, SynapseError from synapse.http import RequestTimedOutError -from synapse.http.server import HttpServer, is_method_cancellable +from synapse.http.server import HttpServer from synapse.http.site import SynapseRequest from synapse.logging import opentracing from synapse.logging.opentracing import trace_with_opname from synapse.types import JsonDict from synapse.util.caches.response_cache import ResponseCache +from synapse.util.cancellation import is_function_cancellable from synapse.util.stringutils import random_string if TYPE_CHECKING: @@ -311,7 +312,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): url_args = list(self.PATH_ARGS) method = self.METHOD - if self.CACHE and is_method_cancellable(self._handle_request): + if self.CACHE and is_function_cancellable(self._handle_request): raise Exception( f"{self.__class__.__name__} has been marked as cancellable, but CACHE " "is set. The cancellable flag would have no effect." @@ -359,6 +360,6 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): # The `@cancellable` decorator may be applied to `_handle_request`. But we # told `HttpServer.register_paths` that our handler is `_check_auth_and_handle`, # so we have to set up the cancellable flag ourselves. - request.is_render_cancellable = is_method_cancellable(self._handle_request) + request.is_render_cancellable = is_function_cancellable(self._handle_request) return await self._handle_request(request, **kwargs) diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py index 52ee3f7e5..5e65eaf1e 100644 --- a/synapse/replication/slave/storage/push_rule.py +++ b/synapse/replication/slave/storage/push_rule.py @@ -31,6 +31,5 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore): self._push_rules_stream_id_gen.advance(instance_name, token) for row in rows: self.get_push_rules_for_user.invalidate((row.user_id,)) - self.get_push_rules_enabled_for_user.invalidate((row.user_id,)) self.push_rules_stream_cache.entity_has_changed(row.user_id, token) return super().process_replication_rows(stream_name, instance_name, token, rows) diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index 1ed7230e3..e4f2201c9 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -416,10 +416,7 @@ class FederationSenderHandler: if not self._is_mine_id(receipt.user_id): continue # Private read receipts never get sent over federation. - if receipt.receipt_type in ( - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ): + if receipt.receipt_type == ReceiptTypes.READ_PRIVATE: continue receipt_info = ReadReceipt( receipt.room_id, diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index e1cbfa50e..0f166d16a 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -35,7 +35,6 @@ from twisted.internet.protocol import ReconnectingClientFactory from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.replication.tcp.client import DirectTcpReplicationClientFactory from synapse.replication.tcp.commands import ( ClearUserSyncsCommand, Command, @@ -332,46 +331,31 @@ class ReplicationCommandHandler: def start_replication(self, hs: "HomeServer") -> None: """Helper method to start replication.""" - if hs.config.redis.redis_enabled: - from synapse.replication.tcp.redis import ( - RedisDirectTcpReplicationClientFactory, - ) + from synapse.replication.tcp.redis import RedisDirectTcpReplicationClientFactory - # First let's ensure that we have a ReplicationStreamer started. - hs.get_replication_streamer() + # First let's ensure that we have a ReplicationStreamer started. + hs.get_replication_streamer() - # We need two connections to redis, one for the subscription stream and - # one to send commands to (as you can't send further redis commands to a - # connection after SUBSCRIBE is called). + # We need two connections to redis, one for the subscription stream and + # one to send commands to (as you can't send further redis commands to a + # connection after SUBSCRIBE is called). - # First create the connection for sending commands. - outbound_redis_connection = hs.get_outbound_redis_connection() + # First create the connection for sending commands. + outbound_redis_connection = hs.get_outbound_redis_connection() - # Now create the factory/connection for the subscription stream. - self._factory = RedisDirectTcpReplicationClientFactory( - hs, - outbound_redis_connection, - channel_names=self._channels_to_subscribe_to, - ) - hs.get_reactor().connectTCP( - hs.config.redis.redis_host, - hs.config.redis.redis_port, - self._factory, - timeout=30, - bindAddress=None, - ) - else: - client_name = hs.get_instance_name() - self._factory = DirectTcpReplicationClientFactory(hs, client_name, self) - host = hs.config.worker.worker_replication_host - port = hs.config.worker.worker_replication_port - hs.get_reactor().connectTCP( - host, - port, - self._factory, - timeout=30, - bindAddress=None, - ) + # Now create the factory/connection for the subscription stream. + self._factory = RedisDirectTcpReplicationClientFactory( + hs, + outbound_redis_connection, + channel_names=self._channels_to_subscribe_to, + ) + hs.get_reactor().connectTCP( + hs.config.redis.redis_host, + hs.config.redis.redis_port, + self._factory, + timeout=30, + bindAddress=None, + ) def get_streams(self) -> Dict[str, Stream]: """Get a map from stream name to all streams.""" diff --git a/synapse/rest/client/notifications.py b/synapse/rest/client/notifications.py index a73322a6a..61268e3af 100644 --- a/synapse/rest/client/notifications.py +++ b/synapse/rest/client/notifications.py @@ -62,7 +62,6 @@ class NotificationsServlet(RestServlet): [ ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, ], ) diff --git a/synapse/rest/client/read_marker.py b/synapse/rest/client/read_marker.py index 6a3bf2cab..ad0d4af70 100644 --- a/synapse/rest/client/read_marker.py +++ b/synapse/rest/client/read_marker.py @@ -45,8 +45,6 @@ class ReadMarkerRestServlet(RestServlet): ReceiptTypes.FULLY_READ, ReceiptTypes.READ_PRIVATE, } - if hs.config.experimental.msc2285_enabled: - self._known_receipt_types.add(ReceiptTypes.UNSTABLE_READ_PRIVATE) async def on_POST( self, request: SynapseRequest, room_id: str diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py index 77593d574..cba8cbc1b 100644 --- a/synapse/rest/client/receipts.py +++ b/synapse/rest/client/receipts.py @@ -49,8 +49,6 @@ class ReceiptRestServlet(RestServlet): ReceiptTypes.READ_PRIVATE, ReceiptTypes.FULLY_READ, } - if hs.config.experimental.msc2285_enabled: - self._known_receipt_types.add(ReceiptTypes.UNSTABLE_READ_PRIVATE) async def on_POST( self, request: SynapseRequest, room_id: str, receipt_type: str, event_id: str diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 05c87b71a..0511bb4be 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -17,6 +17,7 @@ import logging import re from enum import Enum +from http import HTTPStatus from typing import TYPE_CHECKING, Awaitable, Dict, List, Optional, Tuple from urllib import parse as urlparse @@ -37,7 +38,7 @@ from synapse.api.errors import ( ) from synapse.api.filtering import Filter from synapse.events.utils import format_event_for_client_v2 -from synapse.http.server import HttpServer, cancellable +from synapse.http.server import HttpServer from synapse.http.servlet import ( ResolveRoomIdMixin, RestServlet, @@ -57,6 +58,7 @@ from synapse.storage.state import StateFilter from synapse.streams.config import PaginationConfig from synapse.types import JsonDict, StreamToken, ThirdPartyInstanceID, UserID from synapse.util import json_decoder +from synapse.util.cancellation import cancellable from synapse.util.stringutils import parse_and_validate_server_name, random_string if TYPE_CHECKING: @@ -950,7 +952,16 @@ class RoomMembershipRestServlet(TransactionRestServlet): # cheekily send invalid bodies. content = {} - if membership_action == "invite" and self._has_3pid_invite_keys(content): + if membership_action == "invite" and all( + key in content for key in ("medium", "address") + ): + if not all(key in content for key in ("id_server", "id_access_token")): + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "`id_server` and `id_access_token` are required when doing 3pid invite", + Codes.MISSING_PARAM, + ) + try: await self.room_member_handler.do_3pid_invite( room_id, @@ -960,7 +971,7 @@ class RoomMembershipRestServlet(TransactionRestServlet): content["id_server"], requester, txn_id, - content.get("id_access_token"), + content["id_access_token"], ) except ShadowBanError: # Pretend the request succeeded. @@ -997,12 +1008,6 @@ class RoomMembershipRestServlet(TransactionRestServlet): return 200, return_value - def _has_3pid_invite_keys(self, content: JsonDict) -> bool: - for key in {"id_server", "medium", "address"}: - if key not in content: - return False - return True - def on_PUT( self, request: SynapseRequest, room_id: str, membership_action: str, txn_id: str ) -> Awaitable[Tuple[int, JsonDict]]: diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index c9a830cba..c516cda95 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -95,7 +95,6 @@ class VersionsRestServlet(RestServlet): "org.matrix.msc3026.busy_presence": self.config.experimental.msc3026_enabled, # Supports receiving private read receipts as per MSC2285 "org.matrix.msc2285.stable": True, # TODO: Remove when MSC2285 becomes a part of the spec - "org.matrix.msc2285": self.config.experimental.msc2285_enabled, # Supports filtering of /publicRooms by room type as per MSC3827 "org.matrix.msc3827.stable": True, # Adds support for importing historical messages as per MSC2716 diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index f59715758..7f8ad2956 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -135,13 +135,6 @@ class RemoteKey(DirectServeJsonResource): store_queries = [] for server_name, key_ids in query.items(): - if ( - self.federation_domain_whitelist is not None - and server_name not in self.federation_domain_whitelist - ): - logger.debug("Federation denied with %s", server_name) - continue - if not key_ids: key_ids = (None,) for key_id in key_ids: @@ -153,21 +146,28 @@ class RemoteKey(DirectServeJsonResource): time_now_ms = self.clock.time_msec() - # Note that the value is unused. + # Map server_name->key_id->int. Note that the value of the init is unused. + # XXX: why don't we just use a set? cache_misses: Dict[str, Dict[str, int]] = {} for (server_name, key_id, _), key_results in cached.items(): results = [(result["ts_added_ms"], result) for result in key_results] - if not results and key_id is not None: - cache_misses.setdefault(server_name, {})[key_id] = 0 + if key_id is None: + # all keys were requested. Just return what we have without worrying + # about validity + for _, result in results: + # Cast to bytes since postgresql returns a memoryview. + json_results.add(bytes(result["key_json"])) continue - if key_id is not None: + miss = False + if not results: + miss = True + else: ts_added_ms, most_recent_result = max(results) ts_valid_until_ms = most_recent_result["ts_valid_until_ms"] req_key = query.get(server_name, {}).get(key_id, {}) req_valid_until = req_key.get("minimum_valid_until_ts") - miss = False if req_valid_until is not None: if ts_valid_until_ms < req_valid_until: logger.debug( @@ -211,19 +211,20 @@ class RemoteKey(DirectServeJsonResource): ts_valid_until_ms, time_now_ms, ) - - if miss: - cache_misses.setdefault(server_name, {})[key_id] = 0 # Cast to bytes since postgresql returns a memoryview. json_results.add(bytes(most_recent_result["key_json"])) - else: - for _, result in results: - # Cast to bytes since postgresql returns a memoryview. - json_results.add(bytes(result["key_json"])) + + if miss and query_remote_on_cache_miss: + # only bother attempting to fetch keys from servers on our whitelist + if ( + self.federation_domain_whitelist is None + or server_name in self.federation_domain_whitelist + ): + cache_misses.setdefault(server_name, {})[key_id] = 0 # If there is a cache miss, request the missing keys, then recurse (and # ensure the result is sent). - if cache_misses and query_remote_on_cache_miss: + if cache_misses: await yieldable_gather_results( lambda t: self.fetcher.get_keys(*t), ( diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py index 477b0ca6c..f3018b922 100644 --- a/synapse/rest/media/v1/_base.py +++ b/synapse/rest/media/v1/_base.py @@ -254,30 +254,32 @@ async def respond_with_responder( file_size: Size in bytes of the media. If not known it should be None upload_name: The name of the requested file, if any. """ - if request._disconnected: - logger.warning( - "Not sending response to request %s, already disconnected.", request - ) - return - if not responder: respond_404(request) return - logger.debug("Responding to media request with responder %s", responder) - add_file_headers(request, media_type, file_size, upload_name) - try: - with responder: - await responder.write_to_consumer(request) - except Exception as e: - # The majority of the time this will be due to the client having gone - # away. Unfortunately, Twisted simply throws a generic exception at us - # in that case. - logger.warning("Failed to write to consumer: %s %s", type(e), e) + # If we have a responder we *must* use it as a context manager. + with responder: + if request._disconnected: + logger.warning( + "Not sending response to request %s, already disconnected.", request + ) + return - # Unregister the producer, if it has one, so Twisted doesn't complain - if request.producer: - request.unregisterProducer() + logger.debug("Responding to media request with responder %s", responder) + add_file_headers(request, media_type, file_size, upload_name) + try: + + await responder.write_to_consumer(request) + except Exception as e: + # The majority of the time this will be due to the client having gone + # away. Unfortunately, Twisted simply throws a generic exception at us + # in that case. + logger.warning("Failed to write to consumer: %s %s", type(e), e) + + # Unregister the producer, if it has one, so Twisted doesn't complain + if request.producer: + request.unregisterProducer() finish_request(request) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 7435fd913..9dd3c8d4b 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -64,7 +64,6 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) - # How often to run the background job to update the "recently accessed" # attribute of local and remote media. UPDATE_RECENTLY_ACCESSED_TS = 60 * 1000 # 1 minute diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 82121f42c..3959fb440 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -732,10 +732,6 @@ class PreviewUrlResource(DirectServeJsonResource): logger.debug("Running url preview cache expiry") - if not (await self.store.db_pool.updates.has_completed_background_updates()): - logger.debug("Still running DB updates; skipping url preview cache expiry") - return - def try_remove_parent_dirs(dirs: Iterable[str]) -> None: """Attempt to remove the given chain of parent directories diff --git a/synapse/server.py b/synapse/server.py index c2e55bf0b..5a99c0b34 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -105,6 +105,7 @@ from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler from synapse.handlers.user_directory import UserDirectoryHandler from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient from synapse.http.matrixfederationclient import MatrixFederationHttpClient +from synapse.metrics.common_usage_metrics import CommonUsageMetricsManager from synapse.module_api import ModuleApi from synapse.notifier import Notifier from synapse.push.bulk_push_rule_evaluator import BulkPushRuleEvaluator @@ -829,3 +830,8 @@ class HomeServer(metaclass=abc.ABCMeta): self.config.ratelimiting.rc_message, self.config.ratelimiting.rc_admin_redaction, ) + + @cache_in_self + def get_common_usage_metrics_manager(self) -> CommonUsageMetricsManager: + """Usage metrics shared between phone home stats and the prometheus exporter.""" + return CommonUsageMetricsManager(self) diff --git a/synapse/server_notices/server_notices_manager.py b/synapse/server_notices/server_notices_manager.py index 70d054a8f..564e3705c 100644 --- a/synapse/server_notices/server_notices_manager.py +++ b/synapse/server_notices/server_notices_manager.py @@ -102,6 +102,10 @@ class ServerNoticesManager: Returns: The room's ID, or None if no room could be found. """ + # If there is no server notices MXID, then there is no server notices room + if self.server_notices_mxid is None: + return None + rooms = await self._store.get_rooms_for_local_user_where_membership_is( user_id, [Membership.INVITE, Membership.JOIN] ) @@ -111,8 +115,10 @@ class ServerNoticesManager: # be joined. This is kinda deliberate, in that if somebody somehow # manages to invite the system user to a room, that doesn't make it # the server notices room. - user_ids = await self._store.get_users_in_room(room.room_id) - if len(user_ids) <= 2 and self.server_notices_mxid in user_ids: + is_server_notices_room = await self._store.check_local_user_in_room( + user_id=self.server_notices_mxid, room_id=room.room_id + ) + if is_server_notices_room: # we found a room which our user shares with the system notice # user return room.room_id diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index c355e4f98..3787d35b2 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -44,7 +44,6 @@ from synapse.logging.context import ContextResourceUsage from synapse.replication.http.state import ReplicationUpdateCurrentStateRestServlet from synapse.state import v1, v2 from synapse.storage.databases.main.events_worker import EventRedactBehaviour -from synapse.storage.roommember import ProfileInfo from synapse.storage.state import StateFilter from synapse.types import StateMap from synapse.util.async_helpers import Linearizer @@ -210,11 +209,11 @@ class StateHandler: ret = await self.resolve_state_groups_for_events(room_id, event_ids) return await ret.get_state(self._state_storage_controller, state_filter) - async def get_current_users_in_room( + async def get_current_user_ids_in_room( self, room_id: str, latest_event_ids: List[str] - ) -> Dict[str, ProfileInfo]: + ) -> Set[str]: """ - Get the users who are currently in a room. + Get the users IDs who are currently in a room. Note: This is much slower than using the equivalent method `DataStore.get_users_in_room` or `DataStore.get_users_in_room_with_profiles`, @@ -225,15 +224,15 @@ class StateHandler: room_id: The ID of the room. latest_event_ids: Precomputed list of latest event IDs. Will be computed if None. Returns: - Dictionary of user IDs to their profileinfo. + Set of user IDs in the room. """ assert latest_event_ids is not None - logger.debug("calling resolve_state_groups from get_current_users_in_room") + logger.debug("calling resolve_state_groups from get_current_user_ids_in_room") entry = await self.resolve_state_groups_for_events(room_id, latest_event_ids) state = await entry.get_state(self._state_storage_controller, StateFilter.all()) - return await self.store.get_joined_users_from_state(room_id, state, entry) + return await self.store.get_joined_user_ids_from_state(room_id, state) async def get_hosts_in_room_at_events( self, room_id: str, event_ids: Collection[str] diff --git a/synapse/state/v2.py b/synapse/state/v2.py index cf3045f82..af03851c7 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -271,40 +271,41 @@ async def _get_power_level_for_sender( async def _get_auth_chain_difference( room_id: str, state_sets: Sequence[Mapping[Any, str]], - event_map: Dict[str, EventBase], + unpersisted_events: Dict[str, EventBase], state_res_store: StateResolutionStore, ) -> Set[str]: """Compare the auth chains of each state set and return the set of events - that only appear in some but not all of the auth chains. + that only appear in some, but not all of the auth chains. Args: - state_sets - event_map - state_res_store + state_sets: The input state sets we are trying to resolve across. + unpersisted_events: A map from event ID to EventBase containing all unpersisted + events involved in this resolution. + state_res_store: Returns: - Set of event IDs + The auth difference of the given state sets, as a set of event IDs. """ # The `StateResolutionStore.get_auth_chain_difference` function assumes that # all events passed to it (and their auth chains) have been persisted - # previously. This is not the case for any events in the `event_map`, and so - # we need to manually handle those events. + # previously. We need to manually handle any other events that are yet to be + # persisted. # - # We do this by: - # 1. calculating the auth chain difference for the state sets based on the - # events in `event_map` alone - # 2. replacing any events in the state_sets that are also in `event_map` - # with their auth events (recursively), and then calling - # `store.get_auth_chain_difference` as normal - # 3. adding the results of 1 and 2 together. + # We do this in three steps: + # 1. Compute the set of unpersisted events belonging to the auth difference. + # 2. Replacing any unpersisted events in the state_sets with their auth events, + # recursively, until the state_sets contain only persisted events. + # Then we call `store.get_auth_chain_difference` as normal, which computes + # the set of persisted events belonging to the auth difference. + # 3. Adding the results of 1 and 2 together. - # Map from event ID in `event_map` to their auth event IDs, and their auth - # event IDs if they appear in the `event_map`. This is the intersection of - # the event's auth chain with the events in the `event_map` *plus* their + # Map from event ID in `unpersisted_events` to their auth event IDs, and their auth + # event IDs if they appear in the `unpersisted_events`. This is the intersection of + # the event's auth chain with the events in `unpersisted_events` *plus* their # auth event IDs. events_to_auth_chain: Dict[str, Set[str]] = {} - for event in event_map.values(): + for event in unpersisted_events.values(): chain = {event.event_id} events_to_auth_chain[event.event_id] = chain @@ -312,16 +313,16 @@ async def _get_auth_chain_difference( while to_search: for auth_id in to_search.pop().auth_event_ids(): chain.add(auth_id) - auth_event = event_map.get(auth_id) + auth_event = unpersisted_events.get(auth_id) if auth_event: to_search.append(auth_event) - # We now a) calculate the auth chain difference for the unpersisted events - # and b) work out the state sets to pass to the store. + # We now 1) calculate the auth chain difference for the unpersisted events + # and 2) work out the state sets to pass to the store. # - # Note: If the `event_map` is empty (which is the common case), we can do a + # Note: If there are no `unpersisted_events` (which is the common case), we can do a # much simpler calculation. - if event_map: + if unpersisted_events: # The list of state sets to pass to the store, where each state set is a set # of the event ids making up the state. This is similar to `state_sets`, # except that (a) we only have event ids, not the complete @@ -344,14 +345,18 @@ async def _get_auth_chain_difference( for event_id in state_set.values(): event_chain = events_to_auth_chain.get(event_id) if event_chain is not None: - # We have an event in `event_map`. We add all the auth - # events that it references (that aren't also in `event_map`). - set_ids.update(e for e in event_chain if e not in event_map) + # We have an unpersisted event. We add all the auth + # events that it references which are also unpersisted. + set_ids.update( + e for e in event_chain if e not in unpersisted_events + ) # We also add the full chain of unpersisted event IDs # referenced by this state set, so that we can work out the # auth chain difference of the unpersisted events. - unpersisted_ids.update(e for e in event_chain if e in event_map) + unpersisted_ids.update( + e for e in event_chain if e in unpersisted_events + ) else: set_ids.add(event_id) @@ -361,15 +366,15 @@ async def _get_auth_chain_difference( union = unpersisted_set_ids[0].union(*unpersisted_set_ids[1:]) intersection = unpersisted_set_ids[0].intersection(*unpersisted_set_ids[1:]) - difference_from_event_map: Collection[str] = union - intersection + auth_difference_unpersisted_part: Collection[str] = union - intersection else: - difference_from_event_map = () + auth_difference_unpersisted_part = () state_sets_ids = [set(state_set.values()) for state_set in state_sets] difference = await state_res_store.get_auth_chain_difference( room_id, state_sets_ids ) - difference.update(difference_from_event_map) + difference.update(auth_difference_unpersisted_part) return difference diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index f9ffd0e29..ba5380ce3 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -23,7 +23,6 @@ from typing import ( List, Mapping, Optional, - Set, Tuple, ) @@ -520,7 +519,7 @@ class StateStorageController: ) return state_map.get(key) - async def get_current_hosts_in_room(self, room_id: str) -> Set[str]: + async def get_current_hosts_in_room(self, room_id: str) -> List[str]: """Get current hosts in room based on current state.""" await self._partial_state_room_tracker.await_full_state(room_id) diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index 9af9f4f18..c38b8a9e5 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -650,9 +650,6 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore) txn, self.get_account_data_for_room, (user_id,) ) self._invalidate_cache_and_stream(txn, self.get_push_rules_for_user, (user_id,)) - self._invalidate_cache_and_stream( - txn, self.get_push_rules_enabled_for_user, (user_id,) - ) # This user might be contained in the ignored_by cache for other users, # so we have to invalidate it all. self._invalidate_all_cache_and_stream(txn, self.ignored_by) diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index eabf9c973..f4a07de2a 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -274,7 +274,6 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas receipt_types=( ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, ), ) @@ -459,6 +458,31 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas return await self.db_pool.runInteraction("get_push_action_users_in_range", f) + def _get_receipts_by_room_txn( + self, txn: LoggingTransaction, user_id: str + ) -> List[Tuple[str, int]]: + receipt_types_clause, args = make_in_list_sql_clause( + self.database_engine, + "receipt_type", + ( + ReceiptTypes.READ, + ReceiptTypes.READ_PRIVATE, + ), + ) + + sql = f""" + SELECT room_id, MAX(stream_ordering) + FROM receipts_linearized + INNER JOIN events USING (room_id, event_id) + WHERE {receipt_types_clause} + AND user_id = ? + GROUP BY room_id + """ + + args.extend((user_id,)) + txn.execute(sql, args) + return cast(List[Tuple[str, int]], txn.fetchall()) + async def get_unread_push_actions_for_user_in_range_for_http( self, user_id: str, @@ -482,106 +506,45 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas The list will have between 0~limit entries. """ - # find rooms that have a read receipt in them and return the next - # push actions - def get_after_receipt( - txn: LoggingTransaction, - ) -> List[Tuple[str, str, int, str, bool]]: - # find rooms that have a read receipt in them and return the next - # push actions - - receipt_types_clause, args = make_in_list_sql_clause( - self.database_engine, - "receipt_type", - ( - ReceiptTypes.READ, - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ), - ) - - sql = f""" - SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, - ep.highlight - FROM ( - SELECT room_id, - MAX(stream_ordering) as stream_ordering - FROM events - INNER JOIN receipts_linearized USING (room_id, event_id) - WHERE {receipt_types_clause} AND user_id = ? - GROUP BY room_id - ) AS rl, - event_push_actions AS ep - WHERE - ep.room_id = rl.room_id - AND ep.stream_ordering > rl.stream_ordering - AND ep.user_id = ? - AND ep.stream_ordering > ? - AND ep.stream_ordering <= ? - AND ep.notif = 1 - ORDER BY ep.stream_ordering ASC LIMIT ? - """ - args.extend( - (user_id, user_id, min_stream_ordering, max_stream_ordering, limit) - ) - txn.execute(sql, args) - return cast(List[Tuple[str, str, int, str, bool]], txn.fetchall()) - - after_read_receipt = await self.db_pool.runInteraction( - "get_unread_push_actions_for_user_in_range_http_arr", get_after_receipt + receipts_by_room = dict( + await self.db_pool.runInteraction( + "get_unread_push_actions_for_user_in_range_http_receipts", + self._get_receipts_by_room_txn, + user_id=user_id, + ), ) - # There are rooms with push actions in them but you don't have a read receipt in - # them e.g. rooms you've been invited to, so get push actions for rooms which do - # not have read receipts in them too. - def get_no_receipt( + def get_push_actions_txn( txn: LoggingTransaction, ) -> List[Tuple[str, str, int, str, bool]]: - receipt_types_clause, args = make_in_list_sql_clause( - self.database_engine, - "receipt_type", - ( - ReceiptTypes.READ, - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ), - ) - - sql = f""" - SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, - ep.highlight + sql = """ + SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, ep.highlight FROM event_push_actions AS ep - INNER JOIN events AS e USING (room_id, event_id) WHERE - ep.room_id NOT IN ( - SELECT room_id FROM receipts_linearized - WHERE {receipt_types_clause} AND user_id = ? - GROUP BY room_id - ) - AND ep.user_id = ? + ep.user_id = ? AND ep.stream_ordering > ? AND ep.stream_ordering <= ? AND ep.notif = 1 ORDER BY ep.stream_ordering ASC LIMIT ? """ - args.extend( - (user_id, user_id, min_stream_ordering, max_stream_ordering, limit) - ) - txn.execute(sql, args) + txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) return cast(List[Tuple[str, str, int, str, bool]], txn.fetchall()) - no_read_receipt = await self.db_pool.runInteraction( - "get_unread_push_actions_for_user_in_range_http_nrr", get_no_receipt + push_actions = await self.db_pool.runInteraction( + "get_unread_push_actions_for_user_in_range_http", get_push_actions_txn ) notifs = [ HttpPushAction( - event_id=row[0], - room_id=row[1], - stream_ordering=row[2], - actions=_deserialize_action(row[3], row[4]), + event_id=event_id, + room_id=room_id, + stream_ordering=stream_ordering, + actions=_deserialize_action(actions, highlight), ) - for row in after_read_receipt + no_read_receipt + for event_id, room_id, stream_ordering, actions, highlight in push_actions + # Only include push actions with a stream ordering after any receipt, or without any + # receipt present (invited to but never read rooms). + if stream_ordering > receipts_by_room.get(room_id, 0) ] # Now sort it so it's ordered correctly, since currently it will @@ -617,106 +580,49 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas The list will have between 0~limit entries. """ - # find rooms that have a read receipt in them and return the most recent - # push actions - def get_after_receipt( - txn: LoggingTransaction, - ) -> List[Tuple[str, str, int, str, bool, int]]: - receipt_types_clause, args = make_in_list_sql_clause( - self.database_engine, - "receipt_type", - ( - ReceiptTypes.READ, - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ), - ) - - sql = f""" - SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, - ep.highlight, e.received_ts - FROM ( - SELECT room_id, - MAX(stream_ordering) as stream_ordering - FROM events - INNER JOIN receipts_linearized USING (room_id, event_id) - WHERE {receipt_types_clause} AND user_id = ? - GROUP BY room_id - ) AS rl, - event_push_actions AS ep - INNER JOIN events AS e USING (room_id, event_id) - WHERE - ep.room_id = rl.room_id - AND ep.stream_ordering > rl.stream_ordering - AND ep.user_id = ? - AND ep.stream_ordering > ? - AND ep.stream_ordering <= ? - AND ep.notif = 1 - ORDER BY ep.stream_ordering DESC LIMIT ? - """ - args.extend( - (user_id, user_id, min_stream_ordering, max_stream_ordering, limit) - ) - txn.execute(sql, args) - return cast(List[Tuple[str, str, int, str, bool, int]], txn.fetchall()) - - after_read_receipt = await self.db_pool.runInteraction( - "get_unread_push_actions_for_user_in_range_email_arr", get_after_receipt + receipts_by_room = dict( + await self.db_pool.runInteraction( + "get_unread_push_actions_for_user_in_range_email_receipts", + self._get_receipts_by_room_txn, + user_id=user_id, + ), ) - # There are rooms with push actions in them but you don't have a read receipt in - # them e.g. rooms you've been invited to, so get push actions for rooms which do - # not have read receipts in them too. - def get_no_receipt( + def get_push_actions_txn( txn: LoggingTransaction, ) -> List[Tuple[str, str, int, str, bool, int]]: - receipt_types_clause, args = make_in_list_sql_clause( - self.database_engine, - "receipt_type", - ( - ReceiptTypes.READ, - ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, - ), - ) - - sql = f""" + sql = """ SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, ep.highlight, e.received_ts FROM event_push_actions AS ep INNER JOIN events AS e USING (room_id, event_id) WHERE - ep.room_id NOT IN ( - SELECT room_id FROM receipts_linearized - WHERE {receipt_types_clause} AND user_id = ? - GROUP BY room_id - ) - AND ep.user_id = ? + ep.user_id = ? AND ep.stream_ordering > ? AND ep.stream_ordering <= ? AND ep.notif = 1 ORDER BY ep.stream_ordering DESC LIMIT ? """ - args.extend( - (user_id, user_id, min_stream_ordering, max_stream_ordering, limit) - ) - txn.execute(sql, args) + txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) return cast(List[Tuple[str, str, int, str, bool, int]], txn.fetchall()) - no_read_receipt = await self.db_pool.runInteraction( - "get_unread_push_actions_for_user_in_range_email_nrr", get_no_receipt + push_actions = await self.db_pool.runInteraction( + "get_unread_push_actions_for_user_in_range_email", get_push_actions_txn ) # Make a list of dicts from the two sets of results. notifs = [ EmailPushAction( - event_id=row[0], - room_id=row[1], - stream_ordering=row[2], - actions=_deserialize_action(row[3], row[4]), - received_ts=row[5], + event_id=event_id, + room_id=room_id, + stream_ordering=stream_ordering, + actions=_deserialize_action(actions, highlight), + received_ts=received_ts, ) - for row in after_read_receipt + no_read_receipt + for event_id, room_id, stream_ordering, actions, highlight, received_ts in push_actions + # Only include push actions with a stream ordering after any receipt, or without any + # receipt present (invited to but never read rooms). + if stream_ordering > receipts_by_room.get(room_id, 0) ] # Now sort it so it's ordered correctly, since currently it will @@ -792,26 +698,14 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas int(count_as_unread), # unread column ) - def _add_push_actions_to_staging_txn(txn: LoggingTransaction) -> None: - # We don't use simple_insert_many here to avoid the overhead - # of generating lists of dicts. - - sql = """ - INSERT INTO event_push_actions_staging - (event_id, user_id, actions, notif, highlight, unread) - VALUES (?, ?, ?, ?, ?, ?) - """ - - txn.execute_batch( - sql, - ( - _gen_entry(user_id, actions) - for user_id, actions in user_id_actions.items() - ), - ) - - return await self.db_pool.runInteraction( - "add_push_actions_to_staging", _add_push_actions_to_staging_txn + await self.db_pool.simple_insert_many( + "event_push_actions_staging", + keys=("event_id", "user_id", "actions", "notif", "highlight", "unread"), + values=[ + _gen_entry(user_id, actions) + for user_id, actions in user_id_actions.items() + ], + desc="add_push_actions_to_staging", ) async def remove_push_actions_from_staging(self, event_id: str) -> None: diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 8a7cdb024..9b997c304 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -2111,7 +2111,14 @@ class EventsWorkerStore(SQLBaseStore): AND room_id = ? /* Make sure event is not rejected */ AND rejections.event_id IS NULL - ORDER BY origin_server_ts %s + /** + * First sort by the message timestamp. If the message timestamps are the + * same, we want the message that logically comes "next" (before/after + * the given timestamp) based on the DAG and its topological order (`depth`). + * Finally, we can tie-break based on when it was received on the server + * (`stream_ordering`). + */ + ORDER BY origin_server_ts %s, depth %s, stream_ordering %s LIMIT 1; """ @@ -2130,7 +2137,8 @@ class EventsWorkerStore(SQLBaseStore): order = "ASC" txn.execute( - sql_template % (comparison_operator, order), (timestamp, room_id) + sql_template % (comparison_operator, order, order, order), + (timestamp, room_id), ) row = txn.fetchone() if row: diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 255620f99..5079edd1e 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -165,7 +165,6 @@ class PushRulesWorkerStore( return _load_rules(rows, enabled_map, self.hs.config.experimental) - @cached(max_entries=5000) async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]: results = await self.db_pool.simple_select_list( table="push_rules_enable", @@ -229,9 +228,6 @@ class PushRulesWorkerStore( return results - @cachedList( - cached_method_name="get_push_rules_enabled_for_user", list_name="user_ids" - ) async def bulk_get_push_rules_enabled( self, user_ids: Collection[str] ) -> Dict[str, Dict[str, bool]]: @@ -246,6 +242,7 @@ class PushRulesWorkerStore( iterable=user_ids, retcols=("user_name", "rule_id", "enabled"), desc="bulk_get_push_rules_enabled", + batch_size=1000, ) for row in rows: enabled = bool(row["enabled"]) @@ -792,7 +789,6 @@ class PushRuleStore(PushRulesWorkerStore): self.db_pool.simple_insert_txn(txn, "push_rules_stream", values=values) txn.call_after(self.get_push_rules_for_user.invalidate, (user_id,)) - txn.call_after(self.get_push_rules_enabled_for_user.invalidate, (user_id,)) txn.call_after( self.push_rules_stream_cache.entity_has_changed, user_id, stream_id ) diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index b7d4baa6b..bef66f199 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -641,8 +641,10 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): "version": room[5], "creator": room[6], "encryption": room[7], - "federatable": room[8], - "public": room[9], + # room_stats_state.federatable is an integer on sqlite. + "federatable": bool(room[8]), + # rooms.is_public is an integer on sqlite. + "public": bool(room[9]), "join_rules": room[10], "guest_access": room[11], "history_visibility": room[12], @@ -1183,8 +1185,9 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): ) return False - @staticmethod - def _clear_partial_state_room_txn(txn: LoggingTransaction, room_id: str) -> None: + def _clear_partial_state_room_txn( + self, txn: LoggingTransaction, room_id: str + ) -> None: DatabasePool.simple_delete_txn( txn, table="partial_state_rooms_servers", @@ -1195,7 +1198,9 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): table="partial_state_rooms", keyvalues={"room_id": room_id}, ) + self._invalidate_cache_and_stream(txn, self.is_partial_state_room, (room_id,)) + @cached() async def is_partial_state_room(self, room_id: str) -> bool: """Checks if this room has partial state. @@ -1769,9 +1774,8 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore): servers, ) - @staticmethod def _store_partial_state_room_txn( - txn: LoggingTransaction, room_id: str, servers: Collection[str] + self, txn: LoggingTransaction, room_id: str, servers: Collection[str] ) -> None: DatabasePool.simple_insert_txn( txn, @@ -1786,6 +1790,7 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore): keys=("room_id", "server_name"), values=((room_id, s) for s in servers), ) + self._invalidate_cache_and_stream(txn, self.is_partial_state_room, (room_id,)) async def maybe_store_room_on_outlier_membership( self, room_id: str, room_version: RoomVersion diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 827c1f1ef..4f0adb136 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -31,7 +31,6 @@ from typing import ( import attr from synapse.api.constants import EventTypes, Membership -from synapse.events import EventBase from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import ( run_as_background_process, @@ -187,27 +186,48 @@ class RoomMemberWorkerStore(EventsWorkerStore): @cached(max_entries=100000, iterable=True) async def get_users_in_room(self, room_id: str) -> List[str]: + """ + Returns a list of users in the room sorted by longest in the room first + (aka. with the lowest depth). This is done to match the sort in + `get_current_hosts_in_room()` and so we can re-use the cache but it's + not horrible to have here either. + """ + return await self.db_pool.runInteraction( "get_users_in_room", self.get_users_in_room_txn, room_id ) def get_users_in_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[str]: + """ + Returns a list of users in the room sorted by longest in the room first + (aka. with the lowest depth). This is done to match the sort in + `get_current_hosts_in_room()` and so we can re-use the cache but it's + not horrible to have here either. + """ # If we can assume current_state_events.membership is up to date # then we can avoid a join, which is a Very Good Thing given how # frequently this function gets called. if self._current_state_events_membership_up_to_date: sql = """ - SELECT state_key FROM current_state_events - WHERE type = 'm.room.member' AND room_id = ? AND membership = ? + SELECT c.state_key FROM current_state_events as c + /* Get the depth of the event from the events table */ + INNER JOIN events AS e USING (event_id) + WHERE c.type = 'm.room.member' AND c.room_id = ? AND membership = ? + /* Sorted by lowest depth first */ + ORDER BY e.depth ASC; """ else: sql = """ - SELECT state_key FROM room_memberships as m + SELECT c.state_key FROM room_memberships as m + /* Get the depth of the event from the events table */ + INNER JOIN events AS e USING (event_id) INNER JOIN current_state_events as c ON m.event_id = c.event_id AND m.room_id = c.room_id AND m.user_id = c.state_key WHERE c.type = 'm.room.member' AND c.room_id = ? AND m.membership = ? + /* Sorted by lowest depth first */ + ORDER BY e.depth ASC; """ txn.execute(sql, (room_id, Membership.JOIN)) @@ -534,6 +554,32 @@ class RoomMemberWorkerStore(EventsWorkerStore): desc="get_local_users_in_room", ) + async def check_local_user_in_room(self, user_id: str, room_id: str) -> bool: + """ + Check whether a given local user is currently joined to the given room. + + Returns: + A boolean indicating whether the user is currently joined to the room + + Raises: + Exeption when called with a non-local user to this homeserver + """ + if not self.hs.is_mine_id(user_id): + raise Exception( + "Cannot call 'check_local_user_in_room' on " + "non-local user %s" % (user_id,), + ) + + ( + membership, + member_event_id, + ) = await self.get_local_current_membership_for_user_in_room( + user_id=user_id, + room_id=room_id, + ) + + return membership == Membership.JOIN + async def get_local_current_membership_for_user_in_room( self, user_id: str, room_id: str ) -> Tuple[Optional[str], Optional[str]]: @@ -835,144 +881,92 @@ class RoomMemberWorkerStore(EventsWorkerStore): return shared_room_ids or frozenset() - async def get_joined_users_from_state( - self, room_id: str, state: StateMap[str], state_entry: "_StateCacheEntry" - ) -> Dict[str, ProfileInfo]: - state_group: Union[object, int] = state_entry.state_group - if not state_group: - # If state_group is None it means it has yet to be assigned a - # state group, i.e. we need to make sure that calls with a state_group - # of None don't hit previous cached calls with a None state_group. - # To do this we set the state_group to a new object as object() != object() - state_group = object() + async def get_joined_user_ids_from_state( + self, room_id: str, state: StateMap[str] + ) -> Set[str]: + """ + For a given set of state IDs, get a set of user IDs in the room. - assert state_group is not None - with Measure(self._clock, "get_joined_users_from_state"): - return await self._get_joined_users_from_context( - room_id, state_group, state, context=state_entry + This method checks the local event cache, before calling + `_get_user_ids_from_membership_event_ids` for any uncached events. + """ + + with Measure(self._clock, "get_joined_user_ids_from_state"): + users_in_room = set() + member_event_ids = [ + e_id for key, e_id in state.items() if key[0] == EventTypes.Member + ] + + # We check if we have any of the member event ids in the event cache + # before we ask the DB + + # We don't update the event cache hit ratio as it completely throws off + # the hit ratio counts. After all, we don't populate the cache if we + # miss it here + event_map = self._get_events_from_local_cache( + member_event_ids, update_metrics=False ) - @cached(num_args=2, iterable=True, max_entries=100000) - async def _get_joined_users_from_context( - self, - room_id: str, - state_group: Union[object, int], - current_state_ids: StateMap[str], - event: Optional[EventBase] = None, - context: Optional["_StateCacheEntry"] = None, - ) -> Dict[str, ProfileInfo]: - # We don't use `state_group`, it's there so that we can cache based - # on it. However, it's important that it's never None, since two current_states - # with a state_group of None are likely to be different. - assert state_group is not None + missing_member_event_ids = [] + for event_id in member_event_ids: + ev_entry = event_map.get(event_id) + if ev_entry and not ev_entry.event.rejected_reason: + if ev_entry.event.membership == Membership.JOIN: + users_in_room.add(ev_entry.event.state_key) + else: + missing_member_event_ids.append(event_id) - users_in_room = {} - member_event_ids = [ - e_id - for key, e_id in current_state_ids.items() - if key[0] == EventTypes.Member - ] - - if context is not None: - # If we have a context with a delta from a previous state group, - # check if we also have the result from the previous group in cache. - # If we do then we can reuse that result and simply update it with - # any membership changes in `delta_ids` - if context.prev_group and context.delta_ids: - prev_res = self._get_joined_users_from_context.cache.get_immediate( - (room_id, context.prev_group), None + if missing_member_event_ids: + event_to_memberships = ( + await self._get_user_ids_from_membership_event_ids( + missing_member_event_ids + ) + ) + users_in_room.update( + user_id for user_id in event_to_memberships.values() if user_id ) - if prev_res and isinstance(prev_res, dict): - users_in_room = dict(prev_res) - member_event_ids = [ - e_id - for key, e_id in context.delta_ids.items() - if key[0] == EventTypes.Member - ] - for etype, state_key in context.delta_ids: - if etype == EventTypes.Member: - users_in_room.pop(state_key, None) - # We check if we have any of the member event ids in the event cache - # before we ask the DB + return users_in_room - # We don't update the event cache hit ratio as it completely throws off - # the hit ratio counts. After all, we don't populate the cache if we - # miss it here - event_map = self._get_events_from_local_cache( - member_event_ids, update_metrics=False - ) - - missing_member_event_ids = [] - for event_id in member_event_ids: - ev_entry = event_map.get(event_id) - if ev_entry and not ev_entry.event.rejected_reason: - if ev_entry.event.membership == Membership.JOIN: - users_in_room[ev_entry.event.state_key] = ProfileInfo( - display_name=ev_entry.event.content.get("displayname", None), - avatar_url=ev_entry.event.content.get("avatar_url", None), - ) - else: - missing_member_event_ids.append(event_id) - - if missing_member_event_ids: - event_to_memberships = await self._get_joined_profiles_from_event_ids( - missing_member_event_ids - ) - users_in_room.update(row for row in event_to_memberships.values() if row) - - if event is not None and event.type == EventTypes.Member: - if event.membership == Membership.JOIN: - if event.event_id in member_event_ids: - users_in_room[event.state_key] = ProfileInfo( - display_name=event.content.get("displayname", None), - avatar_url=event.content.get("avatar_url", None), - ) - - return users_in_room - - @cached(max_entries=10000) - def _get_joined_profile_from_event_id( + @cached( + max_entries=10000, + # This name matches the old function that has been replaced - the cache name + # is kept here to maintain backwards compatibility. + name="_get_joined_profile_from_event_id", + ) + def _get_user_id_from_membership_event_id( self, event_id: str ) -> Optional[Tuple[str, ProfileInfo]]: raise NotImplementedError() @cachedList( - cached_method_name="_get_joined_profile_from_event_id", + cached_method_name="_get_user_id_from_membership_event_id", list_name="event_ids", ) - async def _get_joined_profiles_from_event_ids( + async def _get_user_ids_from_membership_event_ids( self, event_ids: Iterable[str] - ) -> Dict[str, Optional[Tuple[str, ProfileInfo]]]: + ) -> Dict[str, Optional[str]]: """For given set of member event_ids check if they point to a join - event and if so return the associated user and profile info. + event. Args: event_ids: The member event IDs to lookup Returns: - Map from event ID to `user_id` and ProfileInfo (or None if not join event). + Map from event ID to `user_id`, or None if event is not a join. """ rows = await self.db_pool.simple_select_many_batch( table="room_memberships", column="event_id", iterable=event_ids, - retcols=("user_id", "display_name", "avatar_url", "event_id"), + retcols=("user_id", "event_id"), keyvalues={"membership": Membership.JOIN}, batch_size=1000, - desc="_get_joined_profiles_from_event_ids", + desc="_get_user_ids_from_membership_event_ids", ) - return { - row["event_id"]: ( - row["user_id"], - ProfileInfo( - avatar_url=row["avatar_url"], display_name=row["display_name"] - ), - ) - for row in rows - } + return {row["event_id"]: row["user_id"] for row in rows} @cached(max_entries=10000) async def is_host_joined(self, room_id: str, host: str) -> bool: @@ -1018,37 +1012,70 @@ class RoomMemberWorkerStore(EventsWorkerStore): return True @cached(iterable=True, max_entries=10000) - async def get_current_hosts_in_room(self, room_id: str) -> Set[str]: - """Get current hosts in room based on current state.""" + async def get_current_hosts_in_room(self, room_id: str) -> List[str]: + """ + Get current hosts in room based on current state. + + The heuristic of sorting by servers who have been in the room the + longest is good because they're most likely to have anything we ask + about. + + Returns: + Returns a list of servers sorted by longest in the room first. (aka. + sorted by join with the lowest depth first). + """ # First we check if we already have `get_users_in_room` in the cache, as # we can just calculate result from that users = self.get_users_in_room.cache.get_immediate( (room_id,), None, update_metrics=False ) - if users is not None: - return {get_domain_from_id(u) for u in users} - - if isinstance(self.database_engine, Sqlite3Engine): + if users is None and isinstance(self.database_engine, Sqlite3Engine): # If we're using SQLite then let's just always use # `get_users_in_room` rather than funky SQL. users = await self.get_users_in_room(room_id) - return {get_domain_from_id(u) for u in users} + + if users is not None: + # Because `users` is sorted from lowest -> highest depth, the list + # of domains will also be sorted that way. + domains: List[str] = [] + # We use a `Set` just for fast lookups + domain_set: Set[str] = set() + for u in users: + domain = get_domain_from_id(u) + if domain not in domain_set: + domain_set.add(domain) + domains.append(domain) + return domains # For PostgreSQL we can use a regex to pull out the domains from the # joined users in `current_state_events` via regex. - def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> Set[str]: + def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> List[str]: + # Returns a list of servers currently joined in the room sorted by + # longest in the room first (aka. with the lowest depth). The + # heuristic of sorting by servers who have been in the room the + # longest is good because they're most likely to have anything we + # ask about. sql = """ - SELECT DISTINCT substring(state_key FROM '@[^:]*:(.*)$') - FROM current_state_events + SELECT + /* Match the domain part of the MXID */ + substring(c.state_key FROM '@[^:]*:(.*)$') as server_domain + FROM current_state_events c + /* Get the depth of the event from the events table */ + INNER JOIN events AS e USING (event_id) WHERE - type = 'm.room.member' - AND membership = 'join' - AND room_id = ? + /* Find any join state events in the room */ + c.type = 'm.room.member' + AND c.membership = 'join' + AND c.room_id = ? + /* Group all state events from the same domain into their own buckets (groups) */ + GROUP BY server_domain + /* Sorted by lowest depth first */ + ORDER BY min(e.depth) ASC; """ txn.execute(sql, (room_id,)) - return {d for d, in txn} + return [d for d, in txn] return await self.db_pool.runInteraction( "get_current_hosts_in_room", get_current_hosts_in_room_txn @@ -1131,12 +1158,12 @@ class RoomMemberWorkerStore(EventsWorkerStore): else: # The cache doesn't match the state group or prev state group, # so we calculate the result from first principles. - joined_users = await self.get_joined_users_from_state( - room_id, state, state_entry + joined_user_ids = await self.get_joined_user_ids_from_state( + room_id, state ) cache.hosts_to_joined_users = {} - for user_id in joined_users: + for user_id in joined_user_ids: host = intern_string(get_domain_from_id(user_id)) cache.hosts_to_joined_users.setdefault(host, set()).add(user_id) diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index a9a88c8bf..256f745dc 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -75,6 +75,7 @@ Changes in SCHEMA_VERSION = 71: Changes in SCHEMA_VERSION = 72: - event_edges.(room_id, is_state) are no longer written to. - Tables related to groups are dropped. + - Unused column application_services_state.last_txn is dropped """ diff --git a/synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.postgres b/synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.postgres new file mode 100644 index 000000000..13d47de9e --- /dev/null +++ b/synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.postgres @@ -0,0 +1,17 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Drop unused column application_services_state.last_txn +ALTER table application_services_state DROP COLUMN last_txn; \ No newline at end of file diff --git a/synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.sqlite b/synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.sqlite new file mode 100644 index 000000000..3be1c88d7 --- /dev/null +++ b/synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.sqlite @@ -0,0 +1,40 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Drop unused column application_services_state.last_txn + +CREATE TABLE application_services_state2 ( + as_id TEXT PRIMARY KEY NOT NULL, + state VARCHAR(5), + read_receipt_stream_id BIGINT, + presence_stream_id BIGINT, + to_device_stream_id BIGINT, + device_list_stream_id BIGINT +); + + +INSERT INTO application_services_state2 ( + as_id, + state, + read_receipt_stream_id, + presence_stream_id, + to_device_stream_id, + device_list_stream_id +) +SELECT as_id, state, read_receipt_stream_id, presence_stream_id, to_device_stream_id, device_list_stream_id +FROM application_services_state; + +DROP TABLE application_services_state; +ALTER TABLE application_services_state2 RENAME TO application_services_state; diff --git a/synapse/storage/schema/main/delta/72/05remove_unstable_private_read_receipts.sql b/synapse/storage/schema/main/delta/72/05remove_unstable_private_read_receipts.sql new file mode 100644 index 000000000..36b41049c --- /dev/null +++ b/synapse/storage/schema/main/delta/72/05remove_unstable_private_read_receipts.sql @@ -0,0 +1,19 @@ +/* Copyright 2022 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Drop previously received private read receipts so they do not accidentally +-- get leaked to other users. +DELETE FROM receipts_linearized WHERE receipt_type = 'org.matrix.msc2285.read.private'; +DELETE FROM receipts_graph WHERE receipt_type = 'org.matrix.msc2285.read.private'; diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 3c13859fa..2dfe4c0b6 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -460,8 +460,17 @@ class MultiWriterIdGenerator(AbstractStreamIdGenerator): # Cast safety: this corresponds to the types returned by the query above. rows.extend(cast(Iterable[Tuple[str, int]], cur)) - # Sort so that we handle rows in order for each instance. - rows.sort() + # Sort by stream_id (ascending, lowest -> highest) so that we handle + # rows in order for each instance because we don't want to overwrite + # the current_position of an instance to a lower stream ID than + # we're actually at. + def sort_by_stream_id_key_func(row: Tuple[str, int]) -> int: + (instance, stream_id) = row + # If `stream_id` is ever `None`, we will see a `TypeError: '<' + # not supported between instances of 'NoneType' and 'X'` error. + return stream_id + + rows.sort(key=sort_by_stream_id_key_func) with self._lock: for ( diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 42f6abb5e..bdf9b0dc8 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -34,10 +34,10 @@ TRACK_MEMORY_USAGE = False caches_by_name: Dict[str, Sized] = {} collectors_by_name: Dict[str, "CacheMetric"] = {} -cache_size = Gauge("synapse_util_caches_cache:size", "", ["name"]) -cache_hits = Gauge("synapse_util_caches_cache:hits", "", ["name"]) -cache_evicted = Gauge("synapse_util_caches_cache:evicted_size", "", ["name", "reason"]) -cache_total = Gauge("synapse_util_caches_cache:total", "", ["name"]) +cache_size = Gauge("synapse_util_caches_cache_size", "", ["name"]) +cache_hits = Gauge("synapse_util_caches_cache_hits", "", ["name"]) +cache_evicted = Gauge("synapse_util_caches_cache_evicted_size", "", ["name", "reason"]) +cache_total = Gauge("synapse_util_caches_cache_total", "", ["name"]) cache_max_size = Gauge("synapse_util_caches_cache_max_size", "", ["name"]) cache_memory_usage = Gauge( "synapse_util_caches_cache_size_bytes", @@ -45,12 +45,12 @@ cache_memory_usage = Gauge( ["name"], ) -response_cache_size = Gauge("synapse_util_caches_response_cache:size", "", ["name"]) -response_cache_hits = Gauge("synapse_util_caches_response_cache:hits", "", ["name"]) +response_cache_size = Gauge("synapse_util_caches_response_cache_size", "", ["name"]) +response_cache_hits = Gauge("synapse_util_caches_response_cache_hits", "", ["name"]) response_cache_evicted = Gauge( - "synapse_util_caches_response_cache:evicted_size", "", ["name", "reason"] + "synapse_util_caches_response_cache_evicted_size", "", ["name", "reason"] ) -response_cache_total = Gauge("synapse_util_caches_response_cache:total", "", ["name"]) +response_cache_total = Gauge("synapse_util_caches_response_cache_total", "", ["name"]) class EvictionReason(Enum): diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py index 1d6ec2219..6425f851e 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py @@ -14,15 +14,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import abc import enum import threading from typing import ( Callable, + Collection, + Dict, Generic, - Iterable, MutableMapping, Optional, + Set, Sized, + Tuple, TypeVar, Union, cast, @@ -31,7 +35,6 @@ from typing import ( from prometheus_client import Gauge from twisted.internet import defer -from twisted.python import failure from twisted.python.failure import Failure from synapse.util.async_helpers import ObservableDeferred @@ -94,7 +97,7 @@ class DeferredCache(Generic[KT, VT]): # _pending_deferred_cache maps from the key value to a `CacheEntry` object. self._pending_deferred_cache: Union[ - TreeCache, "MutableMapping[KT, CacheEntry]" + TreeCache, "MutableMapping[KT, CacheEntry[KT, VT]]" ] = cache_type() def metrics_cb() -> None: @@ -159,15 +162,16 @@ class DeferredCache(Generic[KT, VT]): Raises: KeyError if the key is not found in the cache """ - callbacks = [callback] if callback else [] val = self._pending_deferred_cache.get(key, _Sentinel.sentinel) if val is not _Sentinel.sentinel: - val.callbacks.update(callbacks) + val.add_invalidation_callback(key, callback) if update_metrics: m = self.cache.metrics assert m # we always have a name, so should always have metrics m.inc_hits() - return val.deferred.observe() + return val.deferred(key) + + callbacks = (callback,) if callback else () val2 = self.cache.get( key, _Sentinel.sentinel, callbacks=callbacks, update_metrics=update_metrics @@ -177,6 +181,73 @@ class DeferredCache(Generic[KT, VT]): else: return defer.succeed(val2) + def get_bulk( + self, + keys: Collection[KT], + callback: Optional[Callable[[], None]] = None, + ) -> Tuple[Dict[KT, VT], Optional["defer.Deferred[Dict[KT, VT]]"], Collection[KT]]: + """Bulk lookup of items in the cache. + + Returns: + A 3-tuple of: + 1. a dict of key/value of items already cached; + 2. a deferred that resolves to a dict of key/value of items + we're already fetching; and + 3. a collection of keys that don't appear in the previous two. + """ + + # The cached results + cached = {} + + # List of pending deferreds + pending = [] + + # Dict that gets filled out when the pending deferreds complete + pending_results = {} + + # List of keys that aren't in either cache + missing = [] + + callbacks = (callback,) if callback else () + + for key in keys: + # Check if its in the main cache. + immediate_value = self.cache.get( + key, + _Sentinel.sentinel, + callbacks=callbacks, + ) + if immediate_value is not _Sentinel.sentinel: + cached[key] = immediate_value + continue + + # Check if its in the pending cache + pending_value = self._pending_deferred_cache.get(key, _Sentinel.sentinel) + if pending_value is not _Sentinel.sentinel: + pending_value.add_invalidation_callback(key, callback) + + def completed_cb(value: VT, key: KT) -> VT: + pending_results[key] = value + return value + + # Add a callback to fill out `pending_results` when that completes + d = pending_value.deferred(key).addCallback(completed_cb, key) + pending.append(d) + continue + + # Not in either cache + missing.append(key) + + # If we've got pending deferreds, squash them into a single one that + # returns `pending_results`. + pending_deferred = None + if pending: + pending_deferred = defer.gatherResults( + pending, consumeErrors=True + ).addCallback(lambda _: pending_results) + + return (cached, pending_deferred, missing) + def get_immediate( self, key: KT, default: T, update_metrics: bool = True ) -> Union[VT, T]: @@ -218,84 +289,89 @@ class DeferredCache(Generic[KT, VT]): value: a deferred which will complete with a result to add to the cache callback: An optional callback to be called when the entry is invalidated """ - if not isinstance(value, defer.Deferred): - raise TypeError("not a Deferred") - - callbacks = [callback] if callback else [] self.check_thread() - existing_entry = self._pending_deferred_cache.pop(key, None) - if existing_entry: - existing_entry.invalidate() + self._pending_deferred_cache.pop(key, None) # XXX: why don't we invalidate the entry in `self.cache` yet? - # we can save a whole load of effort if the deferred is ready. - if value.called: - result = value.result - if not isinstance(result, failure.Failure): - self.cache.set(key, cast(VT, result), callbacks) - return value - # otherwise, we'll add an entry to the _pending_deferred_cache for now, # and add callbacks to add it to the cache properly later. - - observable = ObservableDeferred(value, consumeErrors=True) - observer = observable.observe() - entry = CacheEntry(deferred=observable, callbacks=callbacks) - + entry = CacheEntrySingle[KT, VT](value) + entry.add_invalidation_callback(key, callback) self._pending_deferred_cache[key] = entry - - def compare_and_pop() -> bool: - """Check if our entry is still the one in _pending_deferred_cache, and - if so, pop it. - - Returns true if the entries matched. - """ - existing_entry = self._pending_deferred_cache.pop(key, None) - if existing_entry is entry: - return True - - # oops, the _pending_deferred_cache has been updated since - # we started our query, so we are out of date. - # - # Better put back whatever we took out. (We do it this way - # round, rather than peeking into the _pending_deferred_cache - # and then removing on a match, to make the common case faster) - if existing_entry is not None: - self._pending_deferred_cache[key] = existing_entry - - return False - - def cb(result: VT) -> None: - if compare_and_pop(): - self.cache.set(key, result, entry.callbacks) - else: - # we're not going to put this entry into the cache, so need - # to make sure that the invalidation callbacks are called. - # That was probably done when _pending_deferred_cache was - # updated, but it's possible that `set` was called without - # `invalidate` being previously called, in which case it may - # not have been. Either way, let's double-check now. - entry.invalidate() - - def eb(_fail: Failure) -> None: - compare_and_pop() - entry.invalidate() - - # once the deferred completes, we can move the entry from the - # _pending_deferred_cache to the real cache. - # - observer.addCallbacks(cb, eb) + deferred = entry.deferred(key).addCallbacks( + self._completed_callback, + self._error_callback, + callbackArgs=(entry, key), + errbackArgs=(entry, key), + ) # we return a new Deferred which will be called before any subsequent observers. - return observable.observe() + return deferred + + def start_bulk_input( + self, + keys: Collection[KT], + callback: Optional[Callable[[], None]] = None, + ) -> "CacheMultipleEntries[KT, VT]": + """Bulk set API for use when fetching multiple keys at once from the DB. + + Called *before* starting the fetch from the DB, and the caller *must* + call either `complete_bulk(..)` or `error_bulk(..)` on the return value. + """ + + entry = CacheMultipleEntries[KT, VT]() + entry.add_global_invalidation_callback(callback) + + for key in keys: + self._pending_deferred_cache[key] = entry + + return entry + + def _completed_callback( + self, value: VT, entry: "CacheEntry[KT, VT]", key: KT + ) -> VT: + """Called when a deferred is completed.""" + # We check if the current entry matches the entry associated with the + # deferred. If they don't match then it got invalidated. + current_entry = self._pending_deferred_cache.pop(key, None) + if current_entry is not entry: + if current_entry: + self._pending_deferred_cache[key] = current_entry + return value + + self.cache.set(key, value, entry.get_invalidation_callbacks(key)) + + return value + + def _error_callback( + self, + failure: Failure, + entry: "CacheEntry[KT, VT]", + key: KT, + ) -> Failure: + """Called when a deferred errors.""" + + # We check if the current entry matches the entry associated with the + # deferred. If they don't match then it got invalidated. + current_entry = self._pending_deferred_cache.pop(key, None) + if current_entry is not entry: + if current_entry: + self._pending_deferred_cache[key] = current_entry + return failure + + for cb in entry.get_invalidation_callbacks(key): + cb() + + return failure def prefill( self, key: KT, value: VT, callback: Optional[Callable[[], None]] = None ) -> None: - callbacks = [callback] if callback else [] + callbacks = (callback,) if callback else () self.cache.set(key, value, callbacks=callbacks) + self._pending_deferred_cache.pop(key, None) def invalidate(self, key: KT) -> None: """Delete a key, or tree of entries @@ -311,41 +387,129 @@ class DeferredCache(Generic[KT, VT]): self.cache.del_multi(key) # if we have a pending lookup for this key, remove it from the - # _pending_deferred_cache, which will (a) stop it being returned - # for future queries and (b) stop it being persisted as a proper entry + # _pending_deferred_cache, which will (a) stop it being returned for + # future queries and (b) stop it being persisted as a proper entry # in self.cache. entry = self._pending_deferred_cache.pop(key, None) - - # run the invalidation callbacks now, rather than waiting for the - # deferred to resolve. if entry: # _pending_deferred_cache.pop should either return a CacheEntry, or, in the # case of a TreeCache, a dict of keys to cache entries. Either way calling # iterate_tree_cache_entry on it will do the right thing. for entry in iterate_tree_cache_entry(entry): - entry.invalidate() + for cb in entry.get_invalidation_callbacks(key): + cb() def invalidate_all(self) -> None: self.check_thread() self.cache.clear() - for entry in self._pending_deferred_cache.values(): - entry.invalidate() + for key, entry in self._pending_deferred_cache.items(): + for cb in entry.get_invalidation_callbacks(key): + cb() + self._pending_deferred_cache.clear() -class CacheEntry: - __slots__ = ["deferred", "callbacks", "invalidated"] +class CacheEntry(Generic[KT, VT], metaclass=abc.ABCMeta): + """Abstract class for entries in `DeferredCache[KT, VT]`""" - def __init__( - self, deferred: ObservableDeferred, callbacks: Iterable[Callable[[], None]] - ): - self.deferred = deferred - self.callbacks = set(callbacks) - self.invalidated = False + @abc.abstractmethod + def deferred(self, key: KT) -> "defer.Deferred[VT]": + """Get a deferred that a caller can wait on to get the value at the + given key""" + ... - def invalidate(self) -> None: - if not self.invalidated: - self.invalidated = True - for callback in self.callbacks: - callback() - self.callbacks.clear() + @abc.abstractmethod + def add_invalidation_callback( + self, key: KT, callback: Optional[Callable[[], None]] + ) -> None: + """Add an invalidation callback""" + ... + + @abc.abstractmethod + def get_invalidation_callbacks(self, key: KT) -> Collection[Callable[[], None]]: + """Get all invalidation callbacks""" + ... + + +class CacheEntrySingle(CacheEntry[KT, VT]): + """An implementation of `CacheEntry` wrapping a deferred that results in a + single cache entry. + """ + + __slots__ = ["_deferred", "_callbacks"] + + def __init__(self, deferred: "defer.Deferred[VT]") -> None: + self._deferred = ObservableDeferred(deferred, consumeErrors=True) + self._callbacks: Set[Callable[[], None]] = set() + + def deferred(self, key: KT) -> "defer.Deferred[VT]": + return self._deferred.observe() + + def add_invalidation_callback( + self, key: KT, callback: Optional[Callable[[], None]] + ) -> None: + if callback is None: + return + + self._callbacks.add(callback) + + def get_invalidation_callbacks(self, key: KT) -> Collection[Callable[[], None]]: + return self._callbacks + + +class CacheMultipleEntries(CacheEntry[KT, VT]): + """Cache entry that is used for bulk lookups and insertions.""" + + __slots__ = ["_deferred", "_callbacks", "_global_callbacks"] + + def __init__(self) -> None: + self._deferred: Optional[ObservableDeferred[Dict[KT, VT]]] = None + self._callbacks: Dict[KT, Set[Callable[[], None]]] = {} + self._global_callbacks: Set[Callable[[], None]] = set() + + def deferred(self, key: KT) -> "defer.Deferred[VT]": + if not self._deferred: + self._deferred = ObservableDeferred(defer.Deferred(), consumeErrors=True) + return self._deferred.observe().addCallback(lambda res: res.get(key)) + + def add_invalidation_callback( + self, key: KT, callback: Optional[Callable[[], None]] + ) -> None: + if callback is None: + return + + self._callbacks.setdefault(key, set()).add(callback) + + def get_invalidation_callbacks(self, key: KT) -> Collection[Callable[[], None]]: + return self._callbacks.get(key, set()) | self._global_callbacks + + def add_global_invalidation_callback( + self, callback: Optional[Callable[[], None]] + ) -> None: + """Add a callback for when any keys get invalidated.""" + if callback is None: + return + + self._global_callbacks.add(callback) + + def complete_bulk( + self, + cache: DeferredCache[KT, VT], + result: Dict[KT, VT], + ) -> None: + """Called when there is a result""" + for key, value in result.items(): + cache._completed_callback(value, self, key) + + if self._deferred: + self._deferred.callback(result) + + def error_bulk( + self, cache: DeferredCache[KT, VT], keys: Collection[KT], failure: Failure + ) -> None: + """Called when bulk lookup failed.""" + for key in keys: + cache._error_callback(failure, self, key) + + if self._deferred: + self._deferred.errback(failure) diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 867f315b2..10aff4d04 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -25,6 +25,7 @@ from typing import ( Generic, Hashable, Iterable, + List, Mapping, Optional, Sequence, @@ -73,8 +74,10 @@ class _CacheDescriptorBase: num_args: Optional[int], uncached_args: Optional[Collection[str]] = None, cache_context: bool = False, + name: Optional[str] = None, ): self.orig = orig + self.name = name or orig.__name__ arg_spec = inspect.getfullargspec(orig) all_args = arg_spec.args @@ -211,7 +214,7 @@ class LruCacheDescriptor(_CacheDescriptorBase): def __get__(self, obj: Optional[Any], owner: Optional[Type]) -> Callable[..., Any]: cache: LruCache[CacheKey, Any] = LruCache( - cache_name=self.orig.__name__, + cache_name=self.name, max_size=self.max_entries, ) @@ -241,7 +244,7 @@ class LruCacheDescriptor(_CacheDescriptorBase): wrapped = cast(_CachedFunction, _wrapped) wrapped.cache = cache - obj.__dict__[self.orig.__name__] = wrapped + obj.__dict__[self.name] = wrapped return wrapped @@ -301,12 +304,14 @@ class DeferredCacheDescriptor(_CacheDescriptorBase): cache_context: bool = False, iterable: bool = False, prune_unread_entries: bool = True, + name: Optional[str] = None, ): super().__init__( orig, num_args=num_args, uncached_args=uncached_args, cache_context=cache_context, + name=name, ) if tree and self.num_args < 2: @@ -321,7 +326,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase): def __get__(self, obj: Optional[Any], owner: Optional[Type]) -> Callable[..., Any]: cache: DeferredCache[CacheKey, Any] = DeferredCache( - name=self.orig.__name__, + name=self.name, max_entries=self.max_entries, tree=self.tree, iterable=self.iterable, @@ -372,7 +377,7 @@ class DeferredCacheDescriptor(_CacheDescriptorBase): wrapped.cache = cache wrapped.num_args = self.num_args - obj.__dict__[self.orig.__name__] = wrapped + obj.__dict__[self.name] = wrapped return wrapped @@ -393,6 +398,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): cached_method_name: str, list_name: str, num_args: Optional[int] = None, + name: Optional[str] = None, ): """ Args: @@ -403,7 +409,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): but including list_name) to use as cache keys. Defaults to all named args of the function. """ - super().__init__(orig, num_args=num_args, uncached_args=None) + super().__init__(orig, num_args=num_args, uncached_args=None, name=name) self.list_name = list_name @@ -435,16 +441,6 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): keyargs = [arg_dict[arg_nm] for arg_nm in self.arg_names] list_args = arg_dict[self.list_name] - results = {} - - def update_results_dict(res: Any, arg: Hashable) -> None: - results[arg] = res - - # list of deferreds to wait for - cached_defers = [] - - missing = set() - # If the cache takes a single arg then that is used as the key, # otherwise a tuple is used. if num_args == 1: @@ -452,6 +448,9 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): def arg_to_cache_key(arg: Hashable) -> Hashable: return arg + def cache_key_to_arg(key: tuple) -> Hashable: + return key + else: keylist = list(keyargs) @@ -459,58 +458,53 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): keylist[self.list_pos] = arg return tuple(keylist) - for arg in list_args: - try: - res = cache.get(arg_to_cache_key(arg), callback=invalidate_callback) - if not res.called: - res.addCallback(update_results_dict, arg) - cached_defers.append(res) - else: - results[arg] = res.result - except KeyError: - missing.add(arg) + def cache_key_to_arg(key: tuple) -> Hashable: + return key[self.list_pos] + + cache_keys = [arg_to_cache_key(arg) for arg in list_args] + immediate_results, pending_deferred, missing = cache.get_bulk( + cache_keys, callback=invalidate_callback + ) + + results = {cache_key_to_arg(key): v for key, v in immediate_results.items()} + + cached_defers: List["defer.Deferred[Any]"] = [] + if pending_deferred: + + def update_results(r: Dict) -> None: + for k, v in r.items(): + results[cache_key_to_arg(k)] = v + + pending_deferred.addCallback(update_results) + cached_defers.append(pending_deferred) if missing: - # we need a deferred for each entry in the list, - # which we put in the cache. Each deferred resolves with the - # relevant result for that key. - deferreds_map = {} - for arg in missing: - deferred: "defer.Deferred[Any]" = defer.Deferred() - deferreds_map[arg] = deferred - key = arg_to_cache_key(arg) - cached_defers.append( - cache.set(key, deferred, callback=invalidate_callback) - ) + cache_entry = cache.start_bulk_input(missing, invalidate_callback) def complete_all(res: Dict[Hashable, Any]) -> None: - # the wrapped function has completed. It returns a dict. - # We can now update our own result map, and then resolve the - # observable deferreds in the cache. - for e, d1 in deferreds_map.items(): - val = res.get(e, None) - # make sure we update the results map before running the - # deferreds, because as soon as we run the last deferred, the - # gatherResults() below will complete and return the result - # dict to our caller. - results[e] = val - d1.callback(val) + missing_results = {} + for key in missing: + arg = cache_key_to_arg(key) + val = res.get(arg, None) + + results[arg] = val + missing_results[key] = val + + cache_entry.complete_bulk(cache, missing_results) def errback_all(f: Failure) -> None: - # the wrapped function has failed. Propagate the failure into - # the cache, which will invalidate the entry, and cause the - # relevant cached_deferreds to fail, which will propagate the - # failure to our caller. - for d1 in deferreds_map.values(): - d1.errback(f) + cache_entry.error_bulk(cache, missing, f) args_to_call = dict(arg_dict) - args_to_call[self.list_name] = missing + args_to_call[self.list_name] = { + cache_key_to_arg(key) for key in missing + } # dispatch the call, and attach the two handlers - defer.maybeDeferred( + missing_d = defer.maybeDeferred( preserve_fn(self.orig), **args_to_call ).addCallbacks(complete_all, errback_all) + cached_defers.append(missing_d) if cached_defers: d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks( @@ -525,7 +519,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): else: return defer.succeed(results) - obj.__dict__[self.orig.__name__] = wrapped + obj.__dict__[self.name] = wrapped return wrapped @@ -577,6 +571,7 @@ def cached( cache_context: bool = False, iterable: bool = False, prune_unread_entries: bool = True, + name: Optional[str] = None, ) -> Callable[[F], _CachedFunction[F]]: func = lambda orig: DeferredCacheDescriptor( orig, @@ -587,13 +582,18 @@ def cached( cache_context=cache_context, iterable=iterable, prune_unread_entries=prune_unread_entries, + name=name, ) return cast(Callable[[F], _CachedFunction[F]], func) def cachedList( - *, cached_method_name: str, list_name: str, num_args: Optional[int] = None + *, + cached_method_name: str, + list_name: str, + num_args: Optional[int] = None, + name: Optional[str] = None, ) -> Callable[[F], _CachedFunction[F]]: """Creates a descriptor that wraps a function in a `DeferredCacheListDescriptor`. @@ -628,6 +628,7 @@ def cachedList( cached_method_name=cached_method_name, list_name=list_name, num_args=num_args, + name=name, ) return cast(Callable[[F], _CachedFunction[F]], func) diff --git a/synapse/util/caches/treecache.py b/synapse/util/caches/treecache.py index c1b8ec0c7..fec31da2b 100644 --- a/synapse/util/caches/treecache.py +++ b/synapse/util/caches/treecache.py @@ -135,6 +135,9 @@ class TreeCache: def values(self): return iterate_tree_cache_entry(self.root) + def items(self): + return iterate_tree_cache_items((), self.root) + def __len__(self) -> int: return self.size diff --git a/synapse/util/cancellation.py b/synapse/util/cancellation.py new file mode 100644 index 000000000..472d2e3ae --- /dev/null +++ b/synapse/util/cancellation.py @@ -0,0 +1,56 @@ +# Copyright 2022 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Callable, TypeVar + +F = TypeVar("F", bound=Callable[..., Any]) + + +def cancellable(function: F) -> F: + """Marks a function as cancellable. + + Servlet methods with this decorator will be cancelled if the client disconnects before we + finish processing the request. + + Although this annotation is particularly useful for servlet methods, it's also + useful for intermediate functions, where it documents the fact that the function has + been audited for cancellation safety and needs to preserve that. + This then simplifies auditing new functions that call those same intermediate + functions. + + During cancellation, `Deferred.cancel()` will be invoked on the `Deferred` wrapping + the method. The `cancel()` call will propagate down to the `Deferred` that is + currently being waited on. That `Deferred` will raise a `CancelledError`, which will + propagate up, as per normal exception handling. + + Before applying this decorator to a new function, you MUST recursively check + that all `await`s in the function are on `async` functions or `Deferred`s that + handle cancellation cleanly, otherwise a variety of bugs may occur, ranging from + premature logging context closure, to stuck requests, to database corruption. + + See the documentation page on Cancellation for more information. + + Usage: + class SomeServlet(RestServlet): + @cancellable + async def on_GET(self, request: SynapseRequest) -> ...: + ... + """ + + function.cancellable = True # type: ignore[attr-defined] + return function + + +def is_function_cancellable(function: Callable[..., Any]) -> bool: + """Checks whether a servlet method has the `@cancellable` flag.""" + return getattr(function, "cancellable", False) diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 264e10108..c7dae58eb 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -61,7 +61,7 @@ class FederationReaderOpenIDListenerTests(HomeserverTestCase): } # Listen with the config - self.hs._listen_http(parse_listener_def(config)) + self.hs._listen_http(parse_listener_def(0, config)) # Grab the resource from the site that was told to listen site = self.reactor.tcpServers[0][1] @@ -109,7 +109,7 @@ class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase): } # Listen with the config - self.hs._listener_http(self.hs.config, parse_listener_def(config)) + self.hs._listener_http(self.hs.config, parse_listener_def(0, config)) # Grab the resource from the site that was told to listen site = self.reactor.tcpServers[0][1] diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py index 01a1db611..a5aa500ef 100644 --- a/tests/federation/test_federation_sender.py +++ b/tests/federation/test_federation_sender.py @@ -173,17 +173,24 @@ class FederationSenderDevicesTestCases(HomeserverTestCase): return c def prepare(self, reactor, clock, hs): - # stub out `get_rooms_for_user` and `get_users_in_room` so that the + test_room_id = "!room:host1" + + # stub out `get_rooms_for_user` and `get_current_hosts_in_room` so that the # server thinks the user shares a room with `@user2:host2` def get_rooms_for_user(user_id): - return defer.succeed({"!room:host1"}) + return defer.succeed({test_room_id}) hs.get_datastores().main.get_rooms_for_user = get_rooms_for_user - def get_users_in_room(room_id): - return defer.succeed({"@user2:host2"}) + async def get_current_hosts_in_room(room_id): + if room_id == test_room_id: + return ["host2"] - hs.get_datastores().main.get_users_in_room = get_users_in_room + # TODO: We should fail the test when we encounter an unxpected room ID. + # We can't just use `self.fail(...)` here because the app code is greedy + # with `Exception` and will catch it before the test can see it. + + hs.get_datastores().main.get_current_hosts_in_room = get_current_hosts_in_room # whenever send_transaction is called, record the edu data self.edus = [] diff --git a/tests/federation/transport/server/test__base.py b/tests/federation/transport/server/test__base.py index d33e86db4..e88e5d8bb 100644 --- a/tests/federation/transport/server/test__base.py +++ b/tests/federation/transport/server/test__base.py @@ -18,9 +18,10 @@ from typing import Dict, List, Tuple from synapse.api.errors import Codes from synapse.federation.transport.server import BaseFederationServlet from synapse.federation.transport.server._base import Authenticator, _parse_auth_header -from synapse.http.server import JsonResource, cancellable +from synapse.http.server import JsonResource from synapse.server import HomeServer from synapse.types import JsonDict +from synapse.util.cancellation import cancellable from synapse.util.ratelimitutils import FederationRateLimiter from tests import unittest diff --git a/tests/handlers/test_receipts.py b/tests/handlers/test_receipts.py index 5f70a2db7..b55238650 100644 --- a/tests/handlers/test_receipts.py +++ b/tests/handlers/test_receipts.py @@ -15,8 +15,6 @@ from copy import deepcopy from typing import List -from parameterized import parameterized - from synapse.api.constants import EduTypes, ReceiptTypes from synapse.types import JsonDict @@ -27,16 +25,13 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): def prepare(self, reactor, clock, hs): self.event_source = hs.get_event_sources().sources.receipt - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_filters_out_private_receipt(self, receipt_type: str) -> None: + def test_filters_out_private_receipt(self) -> None: self._test_filters_private( [ { "content": { "$1435641916114394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@rikj:jki.re": { "ts": 1436451550453, } @@ -50,18 +45,13 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): [], ) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_filters_out_private_receipt_and_ignores_rest( - self, receipt_type: str - ) -> None: + def test_filters_out_private_receipt_and_ignores_rest(self) -> None: self._test_filters_private( [ { "content": { "$1dgdgrd5641916114394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@rikj:jki.re": { "ts": 1436451550453, }, @@ -94,18 +84,15 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): ], ) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) def test_filters_out_event_with_only_private_receipts_and_ignores_the_rest( - self, receipt_type: str + self, ) -> None: self._test_filters_private( [ { "content": { "$14356419edgd14394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@rikj:jki.re": { "ts": 1436451550453, }, @@ -175,18 +162,15 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): ], ) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) def test_filters_out_receipt_event_with_only_private_receipt_and_ignores_rest( - self, receipt_type: str + self, ) -> None: self._test_filters_private( [ { "content": { "$14356419edgd14394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@rikj:jki.re": { "ts": 1436451550453, }, @@ -262,16 +246,13 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): ], ) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_leaves_our_private_and_their_public(self, receipt_type: str) -> None: + def test_leaves_our_private_and_their_public(self) -> None: self._test_filters_private( [ { "content": { "$1dgdgrd5641916114394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@me:server.org": { "ts": 1436451550453, }, @@ -296,7 +277,7 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): { "content": { "$1dgdgrd5641916114394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@me:server.org": { "ts": 1436451550453, }, @@ -319,16 +300,13 @@ class ReceiptsTestCase(unittest.HomeserverTestCase): ], ) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_we_do_not_mutate(self, receipt_type: str) -> None: + def test_we_do_not_mutate(self) -> None: """Ensure the input values are not modified.""" events = [ { "content": { "$1435641916114394fHBLK:matrix.org": { - receipt_type: { + ReceiptTypes.READ_PRIVATE: { "@rikj:jki.re": { "ts": 1436451550453, } diff --git a/tests/handlers/test_room_member.py b/tests/handlers/test_room_member.py index 1d13ed1e8..6bbfd5dc8 100644 --- a/tests/handlers/test_room_member.py +++ b/tests/handlers/test_room_member.py @@ -6,7 +6,7 @@ import synapse.rest.admin import synapse.rest.client.login import synapse.rest.client.room from synapse.api.constants import EventTypes, Membership -from synapse.api.errors import LimitExceededError +from synapse.api.errors import LimitExceededError, SynapseError from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events import FrozenEventV3 from synapse.federation.federation_client import SendJoinResult @@ -17,7 +17,11 @@ from synapse.util import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import make_request from tests.test_utils import make_awaitable -from tests.unittest import FederatingHomeserverTestCase, override_config +from tests.unittest import ( + FederatingHomeserverTestCase, + HomeserverTestCase, + override_config, +) class TestJoinsLimitedByPerRoomRateLimiter(FederatingHomeserverTestCase): @@ -287,3 +291,88 @@ class TestReplicatedJoinsLimitedByPerRoomRateLimiter(BaseMultiWorkerStreamTestCa ), LimitExceededError, ) + + +class RoomMemberMasterHandlerTestCase(HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + synapse.rest.client.login.register_servlets, + synapse.rest.client.room.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.handler = hs.get_room_member_handler() + self.store = hs.get_datastores().main + + # Create two users. + self.alice = self.register_user("alice", "pass") + self.alice_ID = UserID.from_string(self.alice) + self.alice_token = self.login("alice", "pass") + self.bob = self.register_user("bob", "pass") + self.bob_ID = UserID.from_string(self.bob) + self.bob_token = self.login("bob", "pass") + + # Create a room on this homeserver. + self.room_id = self.helper.create_room_as(self.alice, tok=self.alice_token) + + def test_leave_and_forget(self) -> None: + """Tests that forget a room is successfully. The test is performed with two users, + as forgetting by the last user respectively after all users had left the + is a special edge case.""" + self.helper.join(self.room_id, user=self.bob, tok=self.bob_token) + + # alice is not the last room member that leaves and forgets the room + self.helper.leave(self.room_id, user=self.alice, tok=self.alice_token) + self.get_success(self.handler.forget(self.alice_ID, self.room_id)) + self.assertTrue( + self.get_success(self.store.did_forget(self.alice, self.room_id)) + ) + + # the server has not forgotten the room + self.assertFalse( + self.get_success(self.store.is_locally_forgotten_room(self.room_id)) + ) + + def test_leave_and_forget_last_user(self) -> None: + """Tests that forget a room is successfully when the last user has left the room.""" + + # alice is the last room member that leaves and forgets the room + self.helper.leave(self.room_id, user=self.alice, tok=self.alice_token) + self.get_success(self.handler.forget(self.alice_ID, self.room_id)) + self.assertTrue( + self.get_success(self.store.did_forget(self.alice, self.room_id)) + ) + + # the server has forgotten the room + self.assertTrue( + self.get_success(self.store.is_locally_forgotten_room(self.room_id)) + ) + + def test_forget_when_not_left(self) -> None: + """Tests that a user cannot not forgets a room that has not left.""" + self.get_failure(self.handler.forget(self.alice_ID, self.room_id), SynapseError) + + def test_rejoin_forgotten_by_user(self) -> None: + """Test that a user that has forgotten a room can do a re-join. + The room was not forgotten from the local server. + One local user is still member of the room.""" + self.helper.join(self.room_id, user=self.bob, tok=self.bob_token) + + self.helper.leave(self.room_id, user=self.alice, tok=self.alice_token) + self.get_success(self.handler.forget(self.alice_ID, self.room_id)) + self.assertTrue( + self.get_success(self.store.did_forget(self.alice, self.room_id)) + ) + + # the server has not forgotten the room + self.assertFalse( + self.get_success(self.store.is_locally_forgotten_room(self.room_id)) + ) + + self.helper.join(self.room_id, user=self.alice, tok=self.alice_token) + # TODO: A join to a room does not invalidate the forgotten cache + # see https://github.com/matrix-org/synapse/issues/13262 + self.store.did_forget.invalidate_all() + self.assertFalse( + self.get_success(self.store.did_forget(self.alice, self.room_id)) + ) diff --git a/tests/http/test_servlet.py b/tests/http/test_servlet.py index bb966c80c..3cbca0f5a 100644 --- a/tests/http/test_servlet.py +++ b/tests/http/test_servlet.py @@ -18,7 +18,6 @@ from typing import Tuple from unittest.mock import Mock from synapse.api.errors import Codes, SynapseError -from synapse.http.server import cancellable from synapse.http.servlet import ( RestServlet, parse_json_object_from_request, @@ -28,6 +27,7 @@ from synapse.http.site import SynapseRequest from synapse.rest.client._base import client_patterns from synapse.server import HomeServer from synapse.types import JsonDict +from synapse.util.cancellation import cancellable from tests import unittest from tests.http.server._base import test_disconnect diff --git a/tests/replication/http/test__base.py b/tests/replication/http/test__base.py index 822a957c3..936ab4504 100644 --- a/tests/replication/http/test__base.py +++ b/tests/replication/http/test__base.py @@ -18,11 +18,12 @@ from typing import Tuple from twisted.web.server import Request from synapse.api.errors import Codes -from synapse.http.server import JsonResource, cancellable +from synapse.http.server import JsonResource from synapse.replication.http import REPLICATION_PREFIX from synapse.replication.http._base import ReplicationEndpoint from synapse.server import HomeServer from synapse.types import JsonDict +from synapse.util.cancellation import cancellable from tests import unittest from tests.http.server._base import test_disconnect diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index fd6da557c..9d71a9752 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -1080,7 +1080,9 @@ class RoomTestCase(unittest.HomeserverTestCase): room_ids = [] for _ in range(total_rooms): room_id = self.helper.create_room_as( - self.admin_user, tok=self.admin_user_tok + self.admin_user, + tok=self.admin_user_tok, + is_public=True, ) room_ids.append(room_id) @@ -1119,8 +1121,8 @@ class RoomTestCase(unittest.HomeserverTestCase): self.assertIn("version", r) self.assertIn("creator", r) self.assertIn("encryption", r) - self.assertIn("federatable", r) - self.assertIn("public", r) + self.assertIs(r["federatable"], True) + self.assertIs(r["public"], True) self.assertIn("join_rules", r) self.assertIn("guest_access", r) self.assertIn("history_visibility", r) @@ -1587,8 +1589,12 @@ class RoomTestCase(unittest.HomeserverTestCase): def test_single_room(self) -> None: """Test that a single room can be requested correctly""" # Create two test rooms - room_id_1 = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok) - room_id_2 = self.helper.create_room_as(self.admin_user, tok=self.admin_user_tok) + room_id_1 = self.helper.create_room_as( + self.admin_user, tok=self.admin_user_tok, is_public=True + ) + room_id_2 = self.helper.create_room_as( + self.admin_user, tok=self.admin_user_tok, is_public=False + ) room_name_1 = "something" room_name_2 = "else" @@ -1634,7 +1640,10 @@ class RoomTestCase(unittest.HomeserverTestCase): self.assertIn("state_events", channel.json_body) self.assertIn("room_type", channel.json_body) self.assertIn("forgotten", channel.json_body) + self.assertEqual(room_id_1, channel.json_body["room_id"]) + self.assertIs(True, channel.json_body["federatable"]) + self.assertIs(True, channel.json_body["public"]) def test_single_room_devices(self) -> None: """Test that `joined_local_devices` can be requested correctly""" diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index dc17c9d11..b0c821574 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -25,7 +25,6 @@ from tests import unittest class IdentityTestCase(unittest.HomeserverTestCase): - servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, room.register_servlets, @@ -33,7 +32,6 @@ class IdentityTestCase(unittest.HomeserverTestCase): ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - config = self.default_config() config["enable_3pid_lookup"] = False self.hs = self.setup_test_homeserver(config=config) @@ -54,6 +52,7 @@ class IdentityTestCase(unittest.HomeserverTestCase): "id_server": "testis", "medium": "email", "address": "test@example.com", + "id_access_token": tok, } request_url = ("/rooms/%s/invite" % (room_id)).encode("ascii") channel = self.make_request( diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index d589f0731..651f4f415 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -999,7 +999,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase): bundled_aggregations, ) - self._test_bundled_aggregations(RelationTypes.ANNOTATION, assert_annotations, 6) + self._test_bundled_aggregations(RelationTypes.ANNOTATION, assert_annotations, 7) def test_annotation_to_annotation(self) -> None: """Any relation to an annotation should be ignored.""" @@ -1035,7 +1035,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase): bundled_aggregations, ) - self._test_bundled_aggregations(RelationTypes.REFERENCE, assert_annotations, 6) + self._test_bundled_aggregations(RelationTypes.REFERENCE, assert_annotations, 7) def test_thread(self) -> None: """ @@ -1080,21 +1080,21 @@ class BundledAggregationsTestCase(BaseRelationsTestCase): # The "user" sent the root event and is making queries for the bundled # aggregations: they have participated. - self._test_bundled_aggregations(RelationTypes.THREAD, _gen_assert(True), 8) + self._test_bundled_aggregations(RelationTypes.THREAD, _gen_assert(True), 9) # The "user2" sent replies in the thread and is making queries for the # bundled aggregations: they have participated. # # Note that this re-uses some cached values, so the total number of # queries is much smaller. self._test_bundled_aggregations( - RelationTypes.THREAD, _gen_assert(True), 2, access_token=self.user2_token + RelationTypes.THREAD, _gen_assert(True), 3, access_token=self.user2_token ) # A user with no interactions with the thread: they have not participated. user3_id, user3_token = self._create_user("charlie") self.helper.join(self.room, user=user3_id, tok=user3_token) self._test_bundled_aggregations( - RelationTypes.THREAD, _gen_assert(False), 2, access_token=user3_token + RelationTypes.THREAD, _gen_assert(False), 3, access_token=user3_token ) def test_thread_with_bundled_aggregations_for_latest(self) -> None: @@ -1142,7 +1142,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase): bundled_aggregations["latest_event"].get("unsigned"), ) - self._test_bundled_aggregations(RelationTypes.THREAD, assert_thread, 8) + self._test_bundled_aggregations(RelationTypes.THREAD, assert_thread, 9) def test_nested_thread(self) -> None: """ diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index aa2f57844..c7eb88d33 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -3461,3 +3461,21 @@ class ThreepidInviteTestCase(unittest.HomeserverTestCase): # Also check that it stopped before calling _make_and_store_3pid_invite. make_invite_mock.assert_called_once() + + def test_400_missing_param_without_id_access_token(self) -> None: + """ + Test that a 3pid invite request returns 400 M_MISSING_PARAM + if we do not include id_access_token. + """ + channel = self.make_request( + method="POST", + path="/rooms/" + self.room_id + "/invite", + content={ + "id_server": "example.com", + "medium": "email", + "address": "teresa@example.com", + }, + access_token=self.tok, + ) + self.assertEqual(channel.code, 400) + self.assertEqual(channel.json_body["errcode"], "M_MISSING_PARAM") diff --git a/tests/rest/client/test_shadow_banned.py b/tests/rest/client/test_shadow_banned.py index c50f034b3..c807a37bc 100644 --- a/tests/rest/client/test_shadow_banned.py +++ b/tests/rest/client/test_shadow_banned.py @@ -97,7 +97,12 @@ class RoomTestCase(_ShadowBannedBase): channel = self.make_request( "POST", "/rooms/%s/invite" % (room_id,), - {"id_server": "test", "medium": "email", "address": "test@test.test"}, + { + "id_server": "test", + "medium": "email", + "address": "test@test.test", + "id_access_token": "anytoken", + }, access_token=self.banned_access_token, ) self.assertEqual(200, channel.code, channel.result) diff --git a/tests/rest/client/test_sync.py b/tests/rest/client/test_sync.py index de0dec853..0af643ecd 100644 --- a/tests/rest/client/test_sync.py +++ b/tests/rest/client/test_sync.py @@ -391,7 +391,6 @@ class ReadReceiptsTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: config = self.default_config() - config["experimental_features"] = {"msc2285_enabled": True} return self.setup_test_homeserver(config=config) @@ -413,17 +412,14 @@ class ReadReceiptsTestCase(unittest.HomeserverTestCase): # Join the second user self.helper.join(room=self.room_id, user=self.user2, tok=self.tok2) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_private_read_receipts(self, receipt_type: str) -> None: + def test_private_read_receipts(self) -> None: # Send a message as the first user res = self.helper.send(self.room_id, body="hello", tok=self.tok) # Send a private read receipt to tell the server the first user's message was read channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res['event_id']}", {}, access_token=self.tok2, ) @@ -432,10 +428,7 @@ class ReadReceiptsTestCase(unittest.HomeserverTestCase): # Test that the first user can't see the other user's private read receipt self.assertIsNone(self._get_read_receipt()) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_public_receipt_can_override_private(self, receipt_type: str) -> None: + def test_public_receipt_can_override_private(self) -> None: """ Sending a public read receipt to the same event which has a private read receipt should cause that receipt to become public. @@ -446,7 +439,7 @@ class ReadReceiptsTestCase(unittest.HomeserverTestCase): # Send a private read receipt channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res['event_id']}", {}, access_token=self.tok2, ) @@ -465,10 +458,7 @@ class ReadReceiptsTestCase(unittest.HomeserverTestCase): # Test that we did override the private read receipt self.assertNotEqual(self._get_read_receipt(), None) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_private_receipt_cannot_override_public(self, receipt_type: str) -> None: + def test_private_receipt_cannot_override_public(self) -> None: """ Sending a private read receipt to the same event which has a public read receipt should cause no change. @@ -489,7 +479,7 @@ class ReadReceiptsTestCase(unittest.HomeserverTestCase): # Send a private read receipt channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res['event_id']}", {}, access_token=self.tok2, ) @@ -554,7 +544,6 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): config = super().default_config() config["experimental_features"] = { "msc2654_enabled": True, - "msc2285_enabled": True, } return config @@ -601,10 +590,7 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): tok=self.tok, ) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_unread_counts(self, receipt_type: str) -> None: + def test_unread_counts(self) -> None: """Tests that /sync returns the right value for the unread count (MSC2654).""" # Check that our own messages don't increase the unread count. @@ -638,7 +624,7 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): # Send a read receipt to tell the server we've read the latest event. channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res['event_id']}", {}, access_token=self.tok, ) @@ -726,7 +712,7 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res2['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res2['event_id']}", {}, access_token=self.tok, ) @@ -738,7 +724,6 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): [ ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, ] ) def test_read_receipts_only_go_down(self, receipt_type: str) -> None: @@ -752,7 +737,7 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): # Read last event channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res2['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res2['event_id']}", {}, access_token=self.tok, ) @@ -763,7 +748,7 @@ class UnreadMessagesTestCase(unittest.HomeserverTestCase): # read receipt go up to an older event channel = self.make_request( "POST", - f"/rooms/{self.room_id}/receipt/{receipt_type}/{res1['event_id']}", + f"/rooms/{self.room_id}/receipt/{ReceiptTypes.READ_PRIVATE}/{res1['event_id']}", {}, access_token=self.tok, ) diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 62fd4aeb2..fc43d7edd 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -67,9 +67,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase): last_event_id: str - def _assert_counts( - noitf_count: int, unread_count: int, highlight_count: int - ) -> None: + def _assert_counts(noitf_count: int, highlight_count: int) -> None: counts = self.get_success( self.store.db_pool.runInteraction( "get-unread-counts", @@ -82,7 +80,7 @@ class EventPushActionsStoreTestCase(HomeserverTestCase): counts, NotifCounts( notify_count=noitf_count, - unread_count=unread_count, + unread_count=0, highlight_count=highlight_count, ), ) @@ -112,27 +110,27 @@ class EventPushActionsStoreTestCase(HomeserverTestCase): ) ) - _assert_counts(0, 0, 0) + _assert_counts(0, 0) _create_event() - _assert_counts(1, 1, 0) + _assert_counts(1, 0) _rotate() - _assert_counts(1, 1, 0) + _assert_counts(1, 0) event_id = _create_event() - _assert_counts(2, 2, 0) + _assert_counts(2, 0) _rotate() - _assert_counts(2, 2, 0) + _assert_counts(2, 0) _create_event() _mark_read(event_id) - _assert_counts(1, 1, 0) + _assert_counts(1, 0) _mark_read(last_event_id) - _assert_counts(0, 0, 0) + _assert_counts(0, 0) _create_event() _rotate() - _assert_counts(1, 1, 0) + _assert_counts(1, 0) # Delete old event push actions, this should not affect the (summarised) count. # @@ -151,35 +149,35 @@ class EventPushActionsStoreTestCase(HomeserverTestCase): ) ) self.assertEqual(result, []) - _assert_counts(1, 1, 0) + _assert_counts(1, 0) _mark_read(last_event_id) - _assert_counts(0, 0, 0) + _assert_counts(0, 0) event_id = _create_event(True) - _assert_counts(1, 1, 1) + _assert_counts(1, 1) _rotate() - _assert_counts(1, 1, 1) + _assert_counts(1, 1) # Check that adding another notification and rotating after highlight # works. _create_event() _rotate() - _assert_counts(2, 2, 1) + _assert_counts(2, 1) # Check that sending read receipts at different points results in the # right counts. _mark_read(event_id) - _assert_counts(1, 1, 0) + _assert_counts(1, 0) _mark_read(last_event_id) - _assert_counts(0, 0, 0) + _assert_counts(0, 0) _create_event(True) - _assert_counts(1, 1, 1) + _assert_counts(1, 1) _mark_read(last_event_id) - _assert_counts(0, 0, 0) + _assert_counts(0, 0) _rotate() - _assert_counts(0, 0, 0) + _assert_counts(0, 0) def test_find_first_stream_ordering_after_ts(self) -> None: def add_event(so: int, ts: int) -> None: diff --git a/tests/storage/test_receipts.py b/tests/storage/test_receipts.py index 191c957fb..c89bfff24 100644 --- a/tests/storage/test_receipts.py +++ b/tests/storage/test_receipts.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from parameterized import parameterized from synapse.api.constants import ReceiptTypes from synapse.types import UserID, create_requester @@ -92,7 +91,6 @@ class ReceiptTestCase(HomeserverTestCase): [ ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, ], ) ) @@ -104,7 +102,6 @@ class ReceiptTestCase(HomeserverTestCase): [ ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, ], ) ) @@ -117,16 +114,12 @@ class ReceiptTestCase(HomeserverTestCase): [ ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE, - ReceiptTypes.UNSTABLE_READ_PRIVATE, ], ) ) self.assertEqual(res, None) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_get_receipts_for_user(self, receipt_type: str) -> None: + def test_get_receipts_for_user(self) -> None: # Send some events into the first room event1_1_id = self.create_and_send_event( self.room_id1, UserID.from_string(OTHER_USER_ID) @@ -144,14 +137,14 @@ class ReceiptTestCase(HomeserverTestCase): # Send private read receipt for the second event self.get_success( self.store.insert_receipt( - self.room_id1, receipt_type, OUR_USER_ID, [event1_2_id], {} + self.room_id1, ReceiptTypes.READ_PRIVATE, OUR_USER_ID, [event1_2_id], {} ) ) # Test we get the latest event when we want both private and public receipts res = self.get_success( self.store.get_receipts_for_user( - OUR_USER_ID, [ReceiptTypes.READ, receipt_type] + OUR_USER_ID, [ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE] ) ) self.assertEqual(res, {self.room_id1: event1_2_id}) @@ -164,7 +157,7 @@ class ReceiptTestCase(HomeserverTestCase): # Test we get the latest event when we want only the public receipt res = self.get_success( - self.store.get_receipts_for_user(OUR_USER_ID, [receipt_type]) + self.store.get_receipts_for_user(OUR_USER_ID, [ReceiptTypes.READ_PRIVATE]) ) self.assertEqual(res, {self.room_id1: event1_2_id}) @@ -187,20 +180,17 @@ class ReceiptTestCase(HomeserverTestCase): # Test new room is reflected in what the method returns self.get_success( self.store.insert_receipt( - self.room_id2, receipt_type, OUR_USER_ID, [event2_1_id], {} + self.room_id2, ReceiptTypes.READ_PRIVATE, OUR_USER_ID, [event2_1_id], {} ) ) res = self.get_success( self.store.get_receipts_for_user( - OUR_USER_ID, [ReceiptTypes.READ, receipt_type] + OUR_USER_ID, [ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE] ) ) self.assertEqual(res, {self.room_id1: event1_2_id, self.room_id2: event2_1_id}) - @parameterized.expand( - [ReceiptTypes.READ_PRIVATE, ReceiptTypes.UNSTABLE_READ_PRIVATE] - ) - def test_get_last_receipt_event_id_for_user(self, receipt_type: str) -> None: + def test_get_last_receipt_event_id_for_user(self) -> None: # Send some events into the first room event1_1_id = self.create_and_send_event( self.room_id1, UserID.from_string(OTHER_USER_ID) @@ -218,7 +208,7 @@ class ReceiptTestCase(HomeserverTestCase): # Send private read receipt for the second event self.get_success( self.store.insert_receipt( - self.room_id1, receipt_type, OUR_USER_ID, [event1_2_id], {} + self.room_id1, ReceiptTypes.READ_PRIVATE, OUR_USER_ID, [event1_2_id], {} ) ) @@ -227,7 +217,7 @@ class ReceiptTestCase(HomeserverTestCase): self.store.get_last_receipt_event_id_for_user( OUR_USER_ID, self.room_id1, - [ReceiptTypes.READ, receipt_type], + [ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE], ) ) self.assertEqual(res, event1_2_id) @@ -243,7 +233,7 @@ class ReceiptTestCase(HomeserverTestCase): # Test we get the latest event when we want only the private receipt res = self.get_success( self.store.get_last_receipt_event_id_for_user( - OUR_USER_ID, self.room_id1, [receipt_type] + OUR_USER_ID, self.room_id1, [ReceiptTypes.READ_PRIVATE] ) ) self.assertEqual(res, event1_2_id) @@ -269,14 +259,14 @@ class ReceiptTestCase(HomeserverTestCase): # Test new room is reflected in what the method returns self.get_success( self.store.insert_receipt( - self.room_id2, receipt_type, OUR_USER_ID, [event2_1_id], {} + self.room_id2, ReceiptTypes.READ_PRIVATE, OUR_USER_ID, [event2_1_id], {} ) ) res = self.get_success( self.store.get_last_receipt_event_id_for_user( OUR_USER_ID, self.room_id2, - [ReceiptTypes.READ, receipt_type], + [ReceiptTypes.READ, ReceiptTypes.READ_PRIVATE], ) ) self.assertEqual(res, event2_1_id) diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index ceec69028..879440182 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -158,7 +158,7 @@ class RoomMemberStoreTestCase(unittest.HomeserverTestCase): # Check that alice's display name is now None self.assertEqual(row[0]["display_name"], None) - def test_room_is_locally_forgotten(self): + def test_room_is_locally_forgotten(self) -> None: """Test that when the last local user has forgotten a room it is known as forgotten.""" # join two local and one remote user self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice) @@ -199,7 +199,7 @@ class RoomMemberStoreTestCase(unittest.HomeserverTestCase): self.get_success(self.store.is_locally_forgotten_room(self.room)) ) - def test_join_locally_forgotten_room(self): + def test_join_locally_forgotten_room(self) -> None: """Tests if a user joins a forgotten room the room is not forgotten anymore.""" self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice) self.assertFalse( diff --git a/tests/test_metrics.py b/tests/test_metrics.py index b4574b2ff..1a70eddc9 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -12,7 +12,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +try: + from importlib import metadata +except ImportError: + import importlib_metadata as metadata # type: ignore[no-redef] +from unittest.mock import patch + +from pkg_resources import parse_version + +from synapse.app._base import _set_prometheus_client_use_created_metrics from synapse.metrics import REGISTRY, InFlightGauge, generate_latest from synapse.util.caches.deferred_cache import DeferredCache @@ -162,3 +171,30 @@ class CacheMetricsTests(unittest.HomeserverTestCase): self.assertEqual(items["synapse_util_caches_cache_size"], "1.0") self.assertEqual(items["synapse_util_caches_cache_max_size"], "777.0") + + +class PrometheusMetricsHackTestCase(unittest.HomeserverTestCase): + if parse_version(metadata.version("prometheus_client")) < parse_version("0.14.0"): + skip = "prometheus-client too old" + + def test_created_metrics_disabled(self) -> None: + """ + Tests that a brittle hack, to disable `_created` metrics, works. + This involves poking at the internals of prometheus-client. + It's not the end of the world if this doesn't work. + + This test gives us a way to notice if prometheus-client changes + their internals. + """ + import prometheus_client.metrics + + PRIVATE_FLAG_NAME = "_use_created" + + # By default, the pesky `_created` metrics are enabled. + # Check this assumption is still valid. + self.assertTrue(getattr(prometheus_client.metrics, PRIVATE_FLAG_NAME)) + + with patch("prometheus_client.metrics") as mock: + setattr(mock, PRIVATE_FLAG_NAME, True) + _set_prometheus_client_use_created_metrics(False) + self.assertFalse(getattr(mock, PRIVATE_FLAG_NAME, False)) diff --git a/tests/test_phone_home.py b/tests/test_phone_home.py index b01cae6e5..cc1a98f1c 100644 --- a/tests/test_phone_home.py +++ b/tests/test_phone_home.py @@ -15,8 +15,14 @@ import resource from unittest import mock +from twisted.test.proto_helpers import MemoryReactor + from synapse.app.phone_stats_home import phone_stats_home +from synapse.rest import admin +from synapse.rest.client import login, sync +from synapse.server import HomeServer from synapse.types import JsonDict +from synapse.util import Clock from tests.unittest import HomeserverTestCase @@ -47,5 +53,43 @@ class PhoneHomeStatsTestCase(HomeserverTestCase): stats: JsonDict = {} self.reactor.advance(1) # `old_resource` has type `Mock` instead of `struct_rusage` - self.get_success(phone_stats_home(self.hs, stats, past_stats)) # type: ignore[arg-type] + self.get_success( + phone_stats_home(self.hs, stats, past_stats) # type: ignore[arg-type] + ) self.assertApproximates(stats["cpu_average"], 100, tolerance=2.5) + + +class CommonMetricsTestCase(HomeserverTestCase): + servlets = [ + admin.register_servlets, + login.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.metrics_manager = hs.get_common_usage_metrics_manager() + self.get_success(self.metrics_manager.setup()) + + def test_dau(self) -> None: + """Tests that the daily active users count is correctly updated.""" + self._assert_metric_value("daily_active_users", 0) + + self.register_user("user", "password") + tok = self.login("user", "password") + self.make_request("GET", "/sync", access_token=tok) + + self.pump(1) + + self._assert_metric_value("daily_active_users", 1) + + def _assert_metric_value(self, metric_name: str, expected: int) -> None: + """Compare the given value to the current value of the common usage metric with + the given name. + + Args: + metric_name: The metric to look up. + expected: Expected value for this metric. + """ + metrics = self.get_success(self.metrics_manager.get_metrics()) + value = getattr(metrics, metric_name) + self.assertEqual(value, expected) diff --git a/tests/test_server.py b/tests/test_server.py index d2b2d8344..7c6644824 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -26,12 +26,12 @@ from synapse.http.server import ( DirectServeJsonResource, JsonResource, OptionsResource, - cancellable, ) from synapse.http.site import SynapseRequest, SynapseSite from synapse.logging.context import make_deferred_yieldable from synapse.types import JsonDict from synapse.util import Clock +from synapse.util.cancellation import cancellable from tests import unittest from tests.http.server._base import test_disconnect @@ -228,7 +228,7 @@ class OptionsResourceTests(unittest.TestCase): site = SynapseSite( "test", "site_tag", - parse_listener_def({"type": "http", "port": 0}), + parse_listener_def(0, {"type": "http", "port": 0}), self.resource, "1.0", max_request_body_size=4096, diff --git a/tests/utils.py b/tests/utils.py index d2c6d1e85..65db43769 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -135,7 +135,6 @@ def default_config( "enable_registration_captcha": False, "macaroon_secret_key": "not even a little secret", "password_providers": [], - "worker_replication_url": "", "worker_app": None, "block_non_admin_invites": False, "federation_domain_whitelist": None,