0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-14 18:03:53 +01:00

Merge pull request #5769 from matrix-org/uhoreg/e2e_cross-signing2-part1

Cross-signing [2/4] - upload/download keys
This commit is contained in:
Hubert Chathi 2019-09-04 16:56:29 -07:00 committed by GitHub
commit 19bb5c8024
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
141 changed files with 2806 additions and 1711 deletions

View file

@ -17,6 +17,6 @@ services:
SYNAPSE_POSTGRES_HOST: postgres SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app working_dir: /src
volumes: volumes:
- ..:/app - ..:/src

View file

@ -17,6 +17,6 @@ services:
SYNAPSE_POSTGRES_HOST: postgres SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app working_dir: /src
volumes: volumes:
- ..:/app - ..:/src

View file

@ -17,6 +17,6 @@ services:
SYNAPSE_POSTGRES_HOST: postgres SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app working_dir: /src
volumes: volumes:
- ..:/app - ..:/src

View file

@ -1,3 +1,18 @@
# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys import sys
from tap.parser import Parser from tap.parser import Parser
from tap.line import Result, Unknown, Diagnostic from tap.line import Result, Unknown, Diagnostic

View file

@ -27,7 +27,7 @@ git config --global user.name "A robot"
# Fetch and merge. If it doesn't work, it will raise due to set -e. # Fetch and merge. If it doesn't work, it will raise due to set -e.
git fetch -u origin $GITBASE git fetch -u origin $GITBASE
git merge --no-edit origin/$GITBASE git merge --no-edit --no-commit origin/$GITBASE
# Show what we are after. # Show what we are after.
git --no-pager show -s git --no-pager show -s

View file

@ -1,248 +0,0 @@
env:
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
steps:
- command:
- "python -m pip install tox"
- "tox -e check_codestyle"
label: "\U0001F9F9 Check Style"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e packaging"
label: "\U0001F9F9 packaging"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e check_isort"
label: "\U0001F9F9 isort"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "scripts-dev/check-newsfragment"
label: ":newspaper: Newsfile"
branches: "!master !develop !release-*"
plugins:
- docker#v3.0.1:
image: "python:3.6"
propagate-environment: true
- command:
- "python -m pip install tox"
- "tox -e check-sampleconfig"
label: "\U0001F9F9 check-sample-config"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e mypy"
label: ":mypy: mypy"
plugins:
- docker#v3.0.1:
image: "python:3.5"
- wait
- command:
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
- "python3.5 -m pip install tox"
- "tox -e py35-old,codecov"
label: ":python: 3.5 / SQLite / Old Deps"
env:
TRIAL_FLAGS: "-j 2"
LANG: "C.UTF-8"
plugins:
- docker#v3.0.1:
image: "ubuntu:xenial" # We use xenail to get an old sqlite and python
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py35,codecov"
label: ":python: 3.5 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.5"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py36,codecov"
label: ":python: 3.6 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.6"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py37,codecov"
label: ":python: 3.7 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.7"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.5 / :postgres: 9.5"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py35.pg95.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.7 / :postgres: 9.5"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py37.pg95.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.7 / :postgres: 11"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py37.pg11.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
agents:
queue: "medium"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
agents:
queue: "medium"
env:
POSTGRES: "1"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
agents:
queue: "medium"
env:
POSTGRES: "1"
WORKERS: "1"
BLACKLIST: "synapse-blacklist-with-workers"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2

View file

@ -1,7 +1,8 @@
[run] [run]
branch = True branch = True
parallel = True parallel = True
include = synapse/* include=$TOP/synapse/*
data_file = $TOP/.coverage
[report] [report]
precision = 2 precision = 2

View file

@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
System requirements: System requirements:
- POSIX-compliant system (tested on Linux & OS X) - POSIX-compliant system (tested on Linux & OS X)
- Python 3.5, 3.6, 3.7, or 2.7 - Python 3.5, 3.6, or 3.7
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org - At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
Synapse is written in Python but some of the libraries it uses are written in Synapse is written in Python but some of the libraries it uses are written in
@ -421,7 +421,7 @@ If Synapse is not configured with an SMTP server, password reset via email will
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im). The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
Alternatively you can do so from the command line if you have installed via pip. Alternatively you can do so from the command line if you have installed via pip.
This can be done as follows: This can be done as follows:

1
changelog.d/5769.feature Normal file
View file

@ -0,0 +1 @@
Allow uploading of cross-signing keys.

1
changelog.d/5853.feature Normal file
View file

@ -0,0 +1 @@
Opentracing for device list updates.

View file

@ -1 +0,0 @@
Switch to the v2 lookup API for 3PID invites.

1
changelog.d/5915.bugfix Normal file
View file

@ -0,0 +1 @@
Fix 404 for thumbnail download when `dynamic_thumbnails` is `false` and the thumbnail was dynamically generated. Fix reported by rkfg.

1
changelog.d/5922.misc Normal file
View file

@ -0,0 +1 @@
Update Buildkite pipeline to use plugins instead of buildkite-agent commands.

1
changelog.d/5931.misc Normal file
View file

@ -0,0 +1 @@
Remove unnecessary parentheses in return statements.

1
changelog.d/5938.misc Normal file
View file

@ -0,0 +1 @@
Remove unused jenkins/prepare_sytest.sh file.

1
changelog.d/5943.misc Normal file
View file

@ -0,0 +1 @@
Move Buildkite pipeline config to the pipelines repo.

1
changelog.d/5953.misc Normal file
View file

@ -0,0 +1 @@
Update INSTALL.md to say that Python 2 is no longer supported.

1
changelog.d/5962.misc Normal file
View file

@ -0,0 +1 @@
Remove unnecessary return statements in the codebase which were the result of a regex run.

1
changelog.d/5963.misc Normal file
View file

@ -0,0 +1 @@
Remove left-over methods from C/S registration API.

1
changelog.d/5964.feature Normal file
View file

@ -0,0 +1 @@
Remove `bind_email` and `bind_msisdn` parameters from /register ala MSC2140.

1
changelog.d/5966.bugfix Normal file
View file

@ -0,0 +1 @@
Fix admin API for listing media in a room not being available with an external media repo.

1
changelog.d/5967.bugfix Normal file
View file

@ -0,0 +1 @@
Fix list media admin API always returning an error.

1
changelog.d/5970.docker Normal file
View file

@ -0,0 +1 @@
Avoid changing UID/GID if they are already correct.

1
changelog.d/5971.bugfix Normal file
View file

@ -0,0 +1 @@
Fix room and user stats tracking.

1
changelog.d/5975.misc Normal file
View file

@ -0,0 +1 @@
Cleanup event auth type initialisation.

View file

@ -41,8 +41,8 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
config_dir (str): where to put generated config files config_dir (str): where to put generated config files
config_path (str): where to put the main config file config_path (str): where to put the main config file
environ (dict): environment dictionary environ (dict): environment dictionary
ownership (str): "<user>:<group>" string which will be used to set ownership (str|None): "<user>:<group>" string which will be used to set
ownership of the generated configs ownership of the generated configs. If None, ownership will not change.
""" """
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"): for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in environ: if v not in environ:
@ -105,24 +105,24 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
log("Generating log config file " + log_config_file) log("Generating log config file " + log_config_file)
convert("/conf/log.config", log_config_file, environ) convert("/conf/log.config", log_config_file, environ)
subprocess.check_output(["chown", "-R", ownership, "/data"])
# Hopefully we already have a signing key, but generate one if not. # Hopefully we already have a signing key, but generate one if not.
subprocess.check_output( args = [
[ "python",
"su-exec", "-m",
ownership, "synapse.app.homeserver",
"python", "--config-path",
"-m", config_path,
"synapse.app.homeserver", # tell synapse to put generated keys in /data rather than /compiled
"--config-path", "--keys-directory",
config_path, config_dir,
# tell synapse to put generated keys in /data rather than /compiled "--generate-keys",
"--keys-directory", ]
config_dir,
"--generate-keys", if ownership is not None:
] subprocess.check_output(["chown", "-R", ownership, "/data"])
) args = ["su-exec", ownership] + args
subprocess.check_output(args)
def run_generate_config(environ, ownership): def run_generate_config(environ, ownership):
@ -130,7 +130,7 @@ def run_generate_config(environ, ownership):
Args: Args:
environ (dict): env var dict environ (dict): env var dict
ownership (str): "userid:groupid" arg for chmod ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
Never returns. Never returns.
""" """
@ -149,9 +149,6 @@ def run_generate_config(environ, ownership):
log("Creating log config %s" % (log_config_file,)) log("Creating log config %s" % (log_config_file,))
convert("/conf/log.config", log_config_file, environ) convert("/conf/log.config", log_config_file, environ)
# make sure that synapse has perms to write to the data dir.
subprocess.check_output(["chown", ownership, data_dir])
args = [ args = [
"python", "python",
"-m", "-m",
@ -170,12 +167,33 @@ def run_generate_config(environ, ownership):
"--open-private-ports", "--open-private-ports",
] ]
# log("running %s" % (args, )) # log("running %s" % (args, ))
os.execv("/usr/local/bin/python", args)
if ownership is not None:
args = ["su-exec", ownership] + args
os.execv("/sbin/su-exec", args)
# make sure that synapse has perms to write to the data dir.
subprocess.check_output(["chown", ownership, data_dir])
else:
os.execv("/usr/local/bin/python", args)
def main(args, environ): def main(args, environ):
mode = args[1] if len(args) > 1 else None mode = args[1] if len(args) > 1 else None
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991)) desired_uid = int(environ.get("UID", "991"))
desired_gid = int(environ.get("GID", "991"))
if (desired_uid == os.getuid()) and (desired_gid == os.getgid()):
ownership = None
else:
ownership = "{}:{}".format(desired_uid, desired_gid)
log(
"Container running as UserID %s:%s, ENV (or defaults) requests %s:%s"
% (os.getuid(), os.getgid(), desired_uid, desired_gid)
)
if ownership is None:
log("Will not perform chmod/su-exec as UserID already matches request")
# In generate mode, generate a configuration and missing keys, then exit # In generate mode, generate a configuration and missing keys, then exit
if mode == "generate": if mode == "generate":
@ -227,16 +245,12 @@ def main(args, environ):
log("Starting synapse with config file " + config_path) log("Starting synapse with config file " + config_path)
args = [ args = ["python", "-m", "synapse.app.homeserver", "--config-path", config_path]
"su-exec", if ownership is not None:
ownership, args = ["su-exec", ownership] + args
"python", os.execv("/sbin/su-exec", args)
"-m", else:
"synapse.app.homeserver", os.execv("/usr/local/bin/python", args)
"--config-path",
config_path,
]
os.execv("/sbin/su-exec", args)
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -0,0 +1,62 @@
Room and User Statistics
========================
Synapse maintains room and user statistics (as well as a cache of room state),
in various tables. These can be used for administrative purposes but are also
used when generating the public room directory.
# Synapse Developer Documentation
## High-Level Concepts
### Definitions
* **subject**: Something we are tracking stats about currently a room or user.
* **current row**: An entry for a subject in the appropriate current statistics
table. Each subject can have only one.
* **historical row**: An entry for a subject in the appropriate historical
statistics table. Each subject can have any number of these.
### Overview
Stats are maintained as time series. There are two kinds of column:
* absolute columns where the value is correct for the time given by `end_ts`
in the stats row. (Imagine a line graph for these values)
* They can also be thought of as 'gauges' in Prometheus, if you are familiar.
* per-slice columns where the value corresponds to how many of the occurrences
occurred within the time slice given by `(end_ts bucket_size)…end_ts`
or `start_ts…end_ts`. (Imagine a histogram for these values)
Stats are maintained in two tables (for each type): current and historical.
Current stats correspond to the present values. Each subject can only have one
entry.
Historical stats correspond to values in the past. Subjects may have multiple
entries.
## Concepts around the management of stats
### Current rows
Current rows contain the most up-to-date statistics for a room.
They only contain absolute columns
### Historical rows
Historical rows can always be considered to be valid for the time slice and
end time specified.
* historical rows will not exist for every time slice they will be omitted
if there were no changes. In this case, the following assumptions can be
made to interpolate/recreate missing rows:
- absolute fields have the same values as in the preceding row
- per-slice fields are zero (`0`)
* historical rows will not be retained forever rows older than a configurable
time will be purged.
#### Purge
The purging of historical rows is not yet implemented.

View file

@ -1,16 +0,0 @@
#! /bin/bash
set -eux
cd "`dirname $0`/.."
TOX_DIR=$WORKSPACE/.tox
mkdir -p $TOX_DIR
if ! [ $TOX_DIR -ef .tox ]; then
ln -s "$TOX_DIR" .tox
fi
# set up the virtualenv
tox -e py27 --notest -v

View file

@ -276,25 +276,25 @@ class Auth(object):
self.get_access_token_from_request(request) self.get_access_token_from_request(request)
) )
if app_service is None: if app_service is None:
return (None, None) return None, None
if app_service.ip_range_whitelist: if app_service.ip_range_whitelist:
ip_address = IPAddress(self.hs.get_ip_from_request(request)) ip_address = IPAddress(self.hs.get_ip_from_request(request))
if ip_address not in app_service.ip_range_whitelist: if ip_address not in app_service.ip_range_whitelist:
return (None, None) return None, None
if b"user_id" not in request.args: if b"user_id" not in request.args:
return (app_service.sender, app_service) return app_service.sender, app_service
user_id = request.args[b"user_id"][0].decode("utf8") user_id = request.args[b"user_id"][0].decode("utf8")
if app_service.sender == user_id: if app_service.sender == user_id:
return (app_service.sender, app_service) return app_service.sender, app_service
if not app_service.is_interested_in_user(user_id): if not app_service.is_interested_in_user(user_id):
raise AuthError(403, "Application service cannot masquerade as this user.") raise AuthError(403, "Application service cannot masquerade as this user.")
if not (yield self.store.get_user_by_id(user_id)): if not (yield self.store.get_user_by_id(user_id)):
raise AuthError(403, "Application service has not registered this user") raise AuthError(403, "Application service has not registered this user")
return (user_id, app_service) return user_id, app_service
@defer.inlineCallbacks @defer.inlineCallbacks
def get_user_by_access_token(self, token, rights="access"): def get_user_by_access_token(self, token, rights="access"):
@ -694,7 +694,7 @@ class Auth(object):
# * The user is a guest user, and has joined the room # * The user is a guest user, and has joined the room
# else it will throw. # else it will throw.
member_event = yield self.check_user_was_in_room(room_id, user_id) member_event = yield self.check_user_was_in_room(room_id, user_id)
return (member_event.membership, member_event.event_id) return member_event.membership, member_event.event_id
except AuthError: except AuthError:
visibility = yield self.state.get_current_state( visibility = yield self.state.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, "" room_id, EventTypes.RoomHistoryVisibility, ""
@ -703,8 +703,7 @@ class Auth(object):
visibility visibility
and visibility.content["history_visibility"] == "world_readable" and visibility.content["history_visibility"] == "world_readable"
): ):
return (Membership.JOIN, None) return Membership.JOIN, None
return
raise AuthError( raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
) )

View file

@ -61,6 +61,7 @@ class Codes(object):
INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION" INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION"
WRONG_ROOM_KEYS_VERSION = "M_WRONG_ROOM_KEYS_VERSION" WRONG_ROOM_KEYS_VERSION = "M_WRONG_ROOM_KEYS_VERSION"
EXPIRED_ACCOUNT = "ORG_MATRIX_EXPIRED_ACCOUNT" EXPIRED_ACCOUNT = "ORG_MATRIX_EXPIRED_ACCOUNT"
INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
USER_DEACTIVATED = "M_USER_DEACTIVATED" USER_DEACTIVATED = "M_USER_DEACTIVATED"

View file

@ -70,12 +70,12 @@ class PresenceStatusStubServlet(RestServlet):
except HttpResponseException as e: except HttpResponseException as e:
raise e.to_synapse_error() raise e.to_synapse_error()
return (200, result) return 200, result
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, user_id): def on_PUT(self, request, user_id):
yield self.auth.get_user_by_req(request) yield self.auth.get_user_by_req(request)
return (200, {}) return 200, {}
class KeyUploadServlet(RestServlet): class KeyUploadServlet(RestServlet):
@ -126,11 +126,11 @@ class KeyUploadServlet(RestServlet):
self.main_uri + request.uri.decode("ascii"), body, headers=headers self.main_uri + request.uri.decode("ascii"), body, headers=headers
) )
return (200, result) return 200, result
else: else:
# Just interested in counts. # Just interested in counts.
result = yield self.store.count_e2e_one_time_keys(user_id, device_id) result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
return (200, {"one_time_key_counts": result}) return 200, {"one_time_key_counts": result}
class FrontendProxySlavedStore( class FrontendProxySlavedStore(

View file

@ -107,7 +107,6 @@ class ApplicationServiceApi(SimpleHttpClient):
except CodeMessageException as e: except CodeMessageException as e:
if e.code == 404: if e.code == 404:
return False return False
return
logger.warning("query_user to %s received %s", uri, e.code) logger.warning("query_user to %s received %s", uri, e.code)
except Exception as ex: except Exception as ex:
logger.warning("query_user to %s threw exception %s", uri, ex) logger.warning("query_user to %s threw exception %s", uri, ex)
@ -127,7 +126,6 @@ class ApplicationServiceApi(SimpleHttpClient):
logger.warning("query_alias to %s received %s", uri, e.code) logger.warning("query_alias to %s received %s", uri, e.code)
if e.code == 404: if e.code == 404:
return False return False
return
except Exception as ex: except Exception as ex:
logger.warning("query_alias to %s threw exception %s", uri, ex) logger.warning("query_alias to %s threw exception %s", uri, ex)
return False return False
@ -230,7 +228,6 @@ class ApplicationServiceApi(SimpleHttpClient):
sent_transactions_counter.labels(service.id).inc() sent_transactions_counter.labels(service.id).inc()
sent_events_counter.labels(service.id).inc(len(events)) sent_events_counter.labels(service.id).inc(len(events))
return True return True
return
except CodeMessageException as e: except CodeMessageException as e:
logger.warning("push_bulk to %s received %s", uri, e.code) logger.warning("push_bulk to %s received %s", uri, e.code)
except Exception as ex: except Exception as ex:

View file

@ -27,19 +27,16 @@ class StatsConfig(Config):
def read_config(self, config, **kwargs): def read_config(self, config, **kwargs):
self.stats_enabled = True self.stats_enabled = True
self.stats_bucket_size = 86400 self.stats_bucket_size = 86400 * 1000
self.stats_retention = sys.maxsize self.stats_retention = sys.maxsize
stats_config = config.get("stats", None) stats_config = config.get("stats", None)
if stats_config: if stats_config:
self.stats_enabled = stats_config.get("enabled", self.stats_enabled) self.stats_enabled = stats_config.get("enabled", self.stats_enabled)
self.stats_bucket_size = ( self.stats_bucket_size = self.parse_duration(
self.parse_duration(stats_config.get("bucket_size", "1d")) / 1000 stats_config.get("bucket_size", "1d")
) )
self.stats_retention = ( self.stats_retention = self.parse_duration(
self.parse_duration( stats_config.get("retention", "%ds" % (sys.maxsize,))
stats_config.get("retention", "%ds" % (sys.maxsize,))
)
/ 1000
) )
def generate_config_section(self, config_dir_path, server_name, **kwargs): def generate_config_section(self, config_dir_path, server_name, **kwargs):

View file

@ -83,7 +83,7 @@ def compute_content_hash(event_dict, hash_algorithm):
event_json_bytes = encode_canonical_json(event_dict) event_json_bytes = encode_canonical_json(event_dict)
hashed = hash_algorithm(event_json_bytes) hashed = hash_algorithm(event_json_bytes)
return (hashed.name, hashed.digest()) return hashed.name, hashed.digest()
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
@ -106,7 +106,7 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
event_dict.pop("unsigned", None) event_dict.pop("unsigned", None)
event_json_bytes = encode_canonical_json(event_dict) event_json_bytes = encode_canonical_json(event_dict)
hashed = hash_algorithm(event_json_bytes) hashed = hash_algorithm(event_json_bytes)
return (hashed.name, hashed.digest()) return hashed.name, hashed.digest()
def compute_event_signature(event_dict, signature_name, signing_key): def compute_event_signature(event_dict, signature_name, signing_key):

View file

@ -637,11 +637,11 @@ def auth_types_for_event(event):
if event.type == EventTypes.Create: if event.type == EventTypes.Create:
return [] return []
auth_types = [] auth_types = [
(EventTypes.PowerLevels, ""),
auth_types.append((EventTypes.PowerLevels, "")) (EventTypes.Member, event.sender),
auth_types.append((EventTypes.Member, event.sender)) (EventTypes.Create, ""),
auth_types.append((EventTypes.Create, "")) ]
if event.type == EventTypes.Member: if event.type == EventTypes.Member:
membership = event.content["membership"] membership = event.content["membership"]

View file

@ -355,7 +355,7 @@ class FederationClient(FederationBase):
auth_chain.sort(key=lambda e: e.depth) auth_chain.sort(key=lambda e: e.depth)
return (pdus, auth_chain) return pdus, auth_chain
except HttpResponseException as e: except HttpResponseException as e:
if e.code == 400 or e.code == 404: if e.code == 400 or e.code == 404:
logger.info("Failed to use get_room_state_ids API, falling back") logger.info("Failed to use get_room_state_ids API, falling back")
@ -404,7 +404,7 @@ class FederationClient(FederationBase):
signed_auth.sort(key=lambda e: e.depth) signed_auth.sort(key=lambda e: e.depth)
return (signed_pdus, signed_auth) return signed_pdus, signed_auth
@defer.inlineCallbacks @defer.inlineCallbacks
def get_events_from_store_or_dest(self, destination, room_id, event_ids): def get_events_from_store_or_dest(self, destination, room_id, event_ids):
@ -429,7 +429,7 @@ class FederationClient(FederationBase):
missing_events.discard(k) missing_events.discard(k)
if not missing_events: if not missing_events:
return (signed_events, failed_to_fetch) return signed_events, failed_to_fetch
logger.debug( logger.debug(
"Fetching unknown state/auth events %s for room %s", "Fetching unknown state/auth events %s for room %s",
@ -465,7 +465,7 @@ class FederationClient(FederationBase):
# We removed all events we successfully fetched from `batch` # We removed all events we successfully fetched from `batch`
failed_to_fetch.update(batch) failed_to_fetch.update(batch)
return (signed_events, failed_to_fetch) return signed_events, failed_to_fetch
@defer.inlineCallbacks @defer.inlineCallbacks
@log_function @log_function

View file

@ -100,7 +100,7 @@ class FederationServer(FederationBase):
res = self._transaction_from_pdus(pdus).get_dict() res = self._transaction_from_pdus(pdus).get_dict()
return (200, res) return 200, res
@defer.inlineCallbacks @defer.inlineCallbacks
@log_function @log_function
@ -163,7 +163,7 @@ class FederationServer(FederationBase):
yield self.transaction_actions.set_response( yield self.transaction_actions.set_response(
origin, transaction, 400, response origin, transaction, 400, response
) )
return (400, response) return 400, response
received_pdus_counter.inc(len(transaction.pdus)) received_pdus_counter.inc(len(transaction.pdus))
@ -265,7 +265,7 @@ class FederationServer(FederationBase):
logger.debug("Returning: %s", str(response)) logger.debug("Returning: %s", str(response))
yield self.transaction_actions.set_response(origin, transaction, 200, response) yield self.transaction_actions.set_response(origin, transaction, 200, response)
return (200, response) return 200, response
@defer.inlineCallbacks @defer.inlineCallbacks
def received_edu(self, origin, edu_type, content): def received_edu(self, origin, edu_type, content):
@ -298,7 +298,7 @@ class FederationServer(FederationBase):
event_id, event_id,
) )
return (200, resp) return 200, resp
@defer.inlineCallbacks @defer.inlineCallbacks
def on_state_ids_request(self, origin, room_id, event_id): def on_state_ids_request(self, origin, room_id, event_id):
@ -315,7 +315,7 @@ class FederationServer(FederationBase):
state_ids = yield self.handler.get_state_ids_for_pdu(room_id, event_id) state_ids = yield self.handler.get_state_ids_for_pdu(room_id, event_id)
auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids) auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids)
return (200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}) return 200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}
@defer.inlineCallbacks @defer.inlineCallbacks
def _on_context_state_request_compute(self, room_id, event_id): def _on_context_state_request_compute(self, room_id, event_id):
@ -345,15 +345,15 @@ class FederationServer(FederationBase):
pdu = yield self.handler.get_persisted_pdu(origin, event_id) pdu = yield self.handler.get_persisted_pdu(origin, event_id)
if pdu: if pdu:
return (200, self._transaction_from_pdus([pdu]).get_dict()) return 200, self._transaction_from_pdus([pdu]).get_dict()
else: else:
return (404, "") return 404, ""
@defer.inlineCallbacks @defer.inlineCallbacks
def on_query_request(self, query_type, args): def on_query_request(self, query_type, args):
received_queries_counter.labels(query_type).inc() received_queries_counter.labels(query_type).inc()
resp = yield self.registry.on_query(query_type, args) resp = yield self.registry.on_query(query_type, args)
return (200, resp) return 200, resp
@defer.inlineCallbacks @defer.inlineCallbacks
def on_make_join_request(self, origin, room_id, user_id, supported_versions): def on_make_join_request(self, origin, room_id, user_id, supported_versions):
@ -435,7 +435,7 @@ class FederationServer(FederationBase):
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures) logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
yield self.handler.on_send_leave_request(origin, pdu) yield self.handler.on_send_leave_request(origin, pdu)
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_event_auth(self, origin, room_id, event_id): def on_event_auth(self, origin, room_id, event_id):
@ -446,7 +446,7 @@ class FederationServer(FederationBase):
time_now = self._clock.time_msec() time_now = self._clock.time_msec()
auth_pdus = yield self.handler.on_event_auth(event_id) auth_pdus = yield self.handler.on_event_auth(event_id)
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]} res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
return (200, res) return 200, res
@defer.inlineCallbacks @defer.inlineCallbacks
def on_query_auth_request(self, origin, content, room_id, event_id): def on_query_auth_request(self, origin, content, room_id, event_id):
@ -499,7 +499,7 @@ class FederationServer(FederationBase):
"missing": ret.get("missing", []), "missing": ret.get("missing", []),
} }
return (200, send_content) return 200, send_content
@log_function @log_function
def on_query_client_keys(self, origin, content): def on_query_client_keys(self, origin, content):

View file

@ -51,8 +51,8 @@ class AccountDataEventSource(object):
{"type": account_data_type, "content": content, "room_id": room_id} {"type": account_data_type, "content": content, "room_id": room_id}
) )
return (results, current_stream_id) return results, current_stream_id
@defer.inlineCallbacks @defer.inlineCallbacks
def get_pagination_rows(self, user, config, key): def get_pagination_rows(self, user, config, key):
return ([], config.to_id) return [], config.to_id

View file

@ -294,12 +294,10 @@ class ApplicationServicesHandler(object):
# we don't know if they are unknown or not since it isn't one of our # we don't know if they are unknown or not since it isn't one of our
# users. We can't poke ASes. # users. We can't poke ASes.
return False return False
return
user_info = yield self.store.get_user_by_id(user_id) user_info = yield self.store.get_user_by_id(user_id)
if user_info: if user_info:
return False return False
return
# user not found; could be the AS though, so check. # user not found; could be the AS though, so check.
services = self.store.get_app_services() services = self.store.get_app_services()

View file

@ -280,7 +280,7 @@ class AuthHandler(BaseHandler):
creds, creds,
list(clientdict), list(clientdict),
) )
return (creds, clientdict, session["id"]) return creds, clientdict, session["id"]
ret = self._auth_dict_for_flows(flows, session) ret = self._auth_dict_for_flows(flows, session)
ret["completed"] = list(creds) ret["completed"] = list(creds)
@ -722,7 +722,7 @@ class AuthHandler(BaseHandler):
known_login_type = True known_login_type = True
is_valid = yield provider.check_password(qualified_user_id, password) is_valid = yield provider.check_password(qualified_user_id, password)
if is_valid: if is_valid:
return (qualified_user_id, None) return qualified_user_id, None
if not hasattr(provider, "get_supported_login_types") or not hasattr( if not hasattr(provider, "get_supported_login_types") or not hasattr(
provider, "check_auth" provider, "check_auth"
@ -766,7 +766,7 @@ class AuthHandler(BaseHandler):
) )
if canonical_user_id: if canonical_user_id:
return (canonical_user_id, None) return canonical_user_id, None
if not known_login_type: if not known_login_type:
raise SynapseError(400, "Unknown login type %s" % login_type) raise SynapseError(400, "Unknown login type %s" % login_type)
@ -816,7 +816,7 @@ class AuthHandler(BaseHandler):
result = (result, None) result = (result, None)
return result return result
return (None, None) return None, None
@defer.inlineCallbacks @defer.inlineCallbacks
def _check_local_password(self, user_id, password): def _check_local_password(self, user_id, password):

View file

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd # Copyright 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -25,6 +27,7 @@ from synapse.api.errors import (
HttpResponseException, HttpResponseException,
RequestSendFailed, RequestSendFailed,
) )
from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.types import RoomStreamToken, get_domain_from_id from synapse.types import RoomStreamToken, get_domain_from_id
from synapse.util import stringutils from synapse.util import stringutils
from synapse.util.async_helpers import Linearizer from synapse.util.async_helpers import Linearizer
@ -45,6 +48,7 @@ class DeviceWorkerHandler(BaseHandler):
self.state = hs.get_state_handler() self.state = hs.get_state_handler()
self._auth_handler = hs.get_auth_handler() self._auth_handler = hs.get_auth_handler()
@trace
@defer.inlineCallbacks @defer.inlineCallbacks
def get_devices_by_user(self, user_id): def get_devices_by_user(self, user_id):
""" """
@ -56,6 +60,7 @@ class DeviceWorkerHandler(BaseHandler):
defer.Deferred: list[dict[str, X]]: info on each device defer.Deferred: list[dict[str, X]]: info on each device
""" """
set_tag("user_id", user_id)
device_map = yield self.store.get_devices_by_user(user_id) device_map = yield self.store.get_devices_by_user(user_id)
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None) ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None)
@ -64,8 +69,10 @@ class DeviceWorkerHandler(BaseHandler):
for device in devices: for device in devices:
_update_device_from_client_ips(device, ips) _update_device_from_client_ips(device, ips)
log_kv(device_map)
return devices return devices
@trace
@defer.inlineCallbacks @defer.inlineCallbacks
def get_device(self, user_id, device_id): def get_device(self, user_id, device_id):
""" Retrieve the given device """ Retrieve the given device
@ -85,9 +92,14 @@ class DeviceWorkerHandler(BaseHandler):
raise errors.NotFoundError raise errors.NotFoundError
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id) ips = yield self.store.get_last_client_ip_by_device(user_id, device_id)
_update_device_from_client_ips(device, ips) _update_device_from_client_ips(device, ips)
set_tag("device", device)
set_tag("ips", ips)
return device return device
@measure_func("device.get_user_ids_changed") @measure_func("device.get_user_ids_changed")
@trace
@defer.inlineCallbacks @defer.inlineCallbacks
def get_user_ids_changed(self, user_id, from_token): def get_user_ids_changed(self, user_id, from_token):
"""Get list of users that have had the devices updated, or have newly """Get list of users that have had the devices updated, or have newly
@ -97,6 +109,9 @@ class DeviceWorkerHandler(BaseHandler):
user_id (str) user_id (str)
from_token (StreamToken) from_token (StreamToken)
""" """
set_tag("user_id", user_id)
set_tag("from_token", from_token)
now_room_key = yield self.store.get_room_events_max_id() now_room_key = yield self.store.get_room_events_max_id()
room_ids = yield self.store.get_rooms_for_user(user_id) room_ids = yield self.store.get_rooms_for_user(user_id)
@ -148,6 +163,9 @@ class DeviceWorkerHandler(BaseHandler):
# special-case for an empty prev state: include all members # special-case for an empty prev state: include all members
# in the changed list # in the changed list
if not event_ids: if not event_ids:
log_kv(
{"event": "encountered empty previous state", "room_id": room_id}
)
for key, event_id in iteritems(current_state_ids): for key, event_id in iteritems(current_state_ids):
etype, state_key = key etype, state_key = key
if etype != EventTypes.Member: if etype != EventTypes.Member:
@ -200,7 +218,11 @@ class DeviceWorkerHandler(BaseHandler):
possibly_joined = [] possibly_joined = []
possibly_left = [] possibly_left = []
return {"changed": list(possibly_joined), "left": list(possibly_left)} result = {"changed": list(possibly_joined), "left": list(possibly_left)}
log_kv(result)
return result
class DeviceHandler(DeviceWorkerHandler): class DeviceHandler(DeviceWorkerHandler):
@ -267,6 +289,7 @@ class DeviceHandler(DeviceWorkerHandler):
raise errors.StoreError(500, "Couldn't generate a device ID.") raise errors.StoreError(500, "Couldn't generate a device ID.")
@trace
@defer.inlineCallbacks @defer.inlineCallbacks
def delete_device(self, user_id, device_id): def delete_device(self, user_id, device_id):
""" Delete the given device """ Delete the given device
@ -284,6 +307,10 @@ class DeviceHandler(DeviceWorkerHandler):
except errors.StoreError as e: except errors.StoreError as e:
if e.code == 404: if e.code == 404:
# no match # no match
set_tag("error", True)
log_kv(
{"reason": "User doesn't have device id.", "device_id": device_id}
)
pass pass
else: else:
raise raise
@ -296,6 +323,7 @@ class DeviceHandler(DeviceWorkerHandler):
yield self.notify_device_update(user_id, [device_id]) yield self.notify_device_update(user_id, [device_id])
@trace
@defer.inlineCallbacks @defer.inlineCallbacks
def delete_all_devices_for_user(self, user_id, except_device_id=None): def delete_all_devices_for_user(self, user_id, except_device_id=None):
"""Delete all of the user's devices """Delete all of the user's devices
@ -331,6 +359,8 @@ class DeviceHandler(DeviceWorkerHandler):
except errors.StoreError as e: except errors.StoreError as e:
if e.code == 404: if e.code == 404:
# no match # no match
set_tag("error", True)
set_tag("reason", "User doesn't have that device id.")
pass pass
else: else:
raise raise
@ -371,6 +401,7 @@ class DeviceHandler(DeviceWorkerHandler):
else: else:
raise raise
@trace
@measure_func("notify_device_update") @measure_func("notify_device_update")
@defer.inlineCallbacks @defer.inlineCallbacks
def notify_device_update(self, user_id, device_ids): def notify_device_update(self, user_id, device_ids):
@ -386,6 +417,8 @@ class DeviceHandler(DeviceWorkerHandler):
hosts.update(get_domain_from_id(u) for u in users_who_share_room) hosts.update(get_domain_from_id(u) for u in users_who_share_room)
hosts.discard(self.server_name) hosts.discard(self.server_name)
set_tag("target_hosts", hosts)
position = yield self.store.add_device_change_to_streams( position = yield self.store.add_device_change_to_streams(
user_id, device_ids, list(hosts) user_id, device_ids, list(hosts)
) )
@ -405,6 +438,22 @@ class DeviceHandler(DeviceWorkerHandler):
) )
for host in hosts: for host in hosts:
self.federation_sender.send_device_messages(host) self.federation_sender.send_device_messages(host)
log_kv({"message": "sent device update to host", "host": host})
@defer.inlineCallbacks
def notify_user_signature_update(self, from_user_id, user_ids):
"""Notify a user that they have made new signatures of other users.
Args:
from_user_id (str): the user who made the signature
user_ids (list[str]): the users IDs that have new signatures
"""
position = yield self.store.add_user_signature_change_to_streams(
from_user_id, user_ids
)
self.notifier.on_new_event("device_list_key", position, users=[from_user_id])
@defer.inlineCallbacks @defer.inlineCallbacks
def on_federation_query_user_devices(self, user_id): def on_federation_query_user_devices(self, user_id):
@ -451,12 +500,15 @@ class DeviceListUpdater(object):
iterable=True, iterable=True,
) )
@trace
@defer.inlineCallbacks @defer.inlineCallbacks
def incoming_device_list_update(self, origin, edu_content): def incoming_device_list_update(self, origin, edu_content):
"""Called on incoming device list update from federation. Responsible """Called on incoming device list update from federation. Responsible
for parsing the EDU and adding to pending updates list. for parsing the EDU and adding to pending updates list.
""" """
set_tag("origin", origin)
set_tag("edu_content", edu_content)
user_id = edu_content.pop("user_id") user_id = edu_content.pop("user_id")
device_id = edu_content.pop("device_id") device_id = edu_content.pop("device_id")
stream_id = str(edu_content.pop("stream_id")) # They may come as ints stream_id = str(edu_content.pop("stream_id")) # They may come as ints
@ -471,12 +523,30 @@ class DeviceListUpdater(object):
device_id, device_id,
origin, origin,
) )
set_tag("error", True)
log_kv(
{
"message": "Got a device list update edu from a user and "
"device which does not match the origin of the request.",
"user_id": user_id,
"device_id": device_id,
}
)
return return
room_ids = yield self.store.get_rooms_for_user(user_id) room_ids = yield self.store.get_rooms_for_user(user_id)
if not room_ids: if not room_ids:
# We don't share any rooms with this user. Ignore update, as we # We don't share any rooms with this user. Ignore update, as we
# probably won't get any further updates. # probably won't get any further updates.
set_tag("error", True)
log_kv(
{
"message": "Got an update from a user for which "
"we don't share any rooms",
"other user_id": user_id,
}
)
logger.warning( logger.warning(
"Got device list update edu for %r/%r, but don't share a room", "Got device list update edu for %r/%r, but don't share a room",
user_id, user_id,
@ -578,6 +648,7 @@ class DeviceListUpdater(object):
request: request:
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
""" """
log_kv({"message": "Doing resync to update device list."})
# Fetch all devices for the user. # Fetch all devices for the user.
origin = get_domain_from_id(user_id) origin = get_domain_from_id(user_id)
try: try:
@ -594,13 +665,20 @@ class DeviceListUpdater(object):
# eventually become consistent. # eventually become consistent.
return return
except FederationDeniedError as e: except FederationDeniedError as e:
set_tag("error", True)
log_kv({"reason": "FederationDeniedError"})
logger.info(e) logger.info(e)
return return
except Exception: except Exception as e:
# TODO: Remember that we are now out of sync and try again # TODO: Remember that we are now out of sync and try again
# later # later
set_tag("error", True)
log_kv(
{"message": "Exception raised by federation request", "exception": e}
)
logger.exception("Failed to handle device list update for %s", user_id) logger.exception("Failed to handle device list update for %s", user_id)
return return
log_kv({"result": result})
stream_id = result["stream_id"] stream_id = result["stream_id"]
devices = result["devices"] devices = result["devices"]

View file

@ -22,6 +22,7 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.logging.opentracing import ( from synapse.logging.opentracing import (
get_active_span_text_map, get_active_span_text_map,
log_kv,
set_tag, set_tag,
start_active_span, start_active_span,
whitelisted_homeserver, whitelisted_homeserver,
@ -86,7 +87,8 @@ class DeviceMessageHandler(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def send_device_message(self, sender_user_id, message_type, messages): def send_device_message(self, sender_user_id, message_type, messages):
set_tag("number_of_messages", len(messages))
set_tag("sender", sender_user_id)
local_messages = {} local_messages = {}
remote_messages = {} remote_messages = {}
for user_id, by_device in messages.items(): for user_id, by_device in messages.items():
@ -124,6 +126,7 @@ class DeviceMessageHandler(object):
else None, else None,
} }
log_kv({"local_messages": local_messages})
stream_id = yield self.store.add_messages_to_device_inbox( stream_id = yield self.store.add_messages_to_device_inbox(
local_messages, remote_edu_contents local_messages, remote_edu_contents
) )
@ -132,6 +135,7 @@ class DeviceMessageHandler(object):
"to_device_key", stream_id, users=local_messages.keys() "to_device_key", stream_id, users=local_messages.keys()
) )
log_kv({"remote_messages": remote_messages})
for destination in remote_messages.keys(): for destination in remote_messages.keys():
# Enqueue a new federation transaction to send the new # Enqueue a new federation transaction to send the new
# device messages to each remote destination. # device messages to each remote destination.

View file

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd # Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd # Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -19,13 +20,18 @@ import logging
from six import iteritems from six import iteritems
from canonicaljson import encode_canonical_json, json from canonicaljson import encode_canonical_json, json
from signedjson.sign import SignatureVerifyException, verify_signed_json
from twisted.internet import defer from twisted.internet import defer
from synapse.api.errors import CodeMessageException, SynapseError from synapse.api.errors import CodeMessageException, Codes, SynapseError
from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.logging.opentracing import log_kv, set_tag, tag_args, trace from synapse.logging.opentracing import log_kv, set_tag, tag_args, trace
from synapse.types import UserID, get_domain_from_id from synapse.types import (
UserID,
get_domain_from_id,
get_verify_key_from_cross_signing_key,
)
from synapse.util import unwrapFirstError from synapse.util import unwrapFirstError
from synapse.util.retryutils import NotRetryingDestination from synapse.util.retryutils import NotRetryingDestination
@ -49,7 +55,7 @@ class E2eKeysHandler(object):
@trace @trace
@defer.inlineCallbacks @defer.inlineCallbacks
def query_devices(self, query_body, timeout): def query_devices(self, query_body, timeout, from_user_id):
""" Handle a device key query from a client """ Handle a device key query from a client
{ {
@ -67,6 +73,11 @@ class E2eKeysHandler(object):
} }
} }
} }
Args:
from_user_id (str): the user making the query. This is used when
adding cross-signing signatures to limit what signatures users
can see.
""" """
device_keys_query = query_body.get("device_keys", {}) device_keys_query = query_body.get("device_keys", {})
@ -125,6 +136,11 @@ class E2eKeysHandler(object):
r = remote_queries_not_in_cache.setdefault(domain, {}) r = remote_queries_not_in_cache.setdefault(domain, {})
r[user_id] = remote_queries[user_id] r[user_id] = remote_queries[user_id]
# Get cached cross-signing keys
cross_signing_keys = yield self.get_cross_signing_keys_from_cache(
device_keys_query, from_user_id
)
# Now fetch any devices that we don't have in our cache # Now fetch any devices that we don't have in our cache
@trace @trace
@defer.inlineCallbacks @defer.inlineCallbacks
@ -188,6 +204,14 @@ class E2eKeysHandler(object):
if user_id in destination_query: if user_id in destination_query:
results[user_id] = keys results[user_id] = keys
for user_id, key in remote_result["master_keys"].items():
if user_id in destination_query:
cross_signing_keys["master_keys"][user_id] = key
for user_id, key in remote_result["self_signing_keys"].items():
if user_id in destination_query:
cross_signing_keys["self_signing_keys"][user_id] = key
except Exception as e: except Exception as e:
failure = _exception_to_failure(e) failure = _exception_to_failure(e)
failures[destination] = failure failures[destination] = failure
@ -204,7 +228,61 @@ class E2eKeysHandler(object):
).addErrback(unwrapFirstError) ).addErrback(unwrapFirstError)
) )
return {"device_keys": results, "failures": failures} ret = {"device_keys": results, "failures": failures}
ret.update(cross_signing_keys)
return ret
@defer.inlineCallbacks
def get_cross_signing_keys_from_cache(self, query, from_user_id):
"""Get cross-signing keys for users from the database
Args:
query (Iterable[string]) an iterable of user IDs. A dict whose keys
are user IDs satisfies this, so the query format used for
query_devices can be used here.
from_user_id (str): the user making the query. This is used when
adding cross-signing signatures to limit what signatures users
can see.
Returns:
defer.Deferred[dict[str, dict[str, dict]]]: map from
(master|self_signing|user_signing) -> user_id -> key
"""
master_keys = {}
self_signing_keys = {}
user_signing_keys = {}
for user_id in query:
# XXX: consider changing the store functions to allow querying
# multiple users simultaneously.
key = yield self.store.get_e2e_cross_signing_key(
user_id, "master", from_user_id
)
if key:
master_keys[user_id] = key
key = yield self.store.get_e2e_cross_signing_key(
user_id, "self_signing", from_user_id
)
if key:
self_signing_keys[user_id] = key
# users can see other users' master and self-signing keys, but can
# only see their own user-signing keys
if from_user_id == user_id:
key = yield self.store.get_e2e_cross_signing_key(
user_id, "user_signing", from_user_id
)
if key:
user_signing_keys[user_id] = key
return {
"master_keys": master_keys,
"self_signing_keys": self_signing_keys,
"user_signing_keys": user_signing_keys,
}
@trace @trace
@defer.inlineCallbacks @defer.inlineCallbacks
@ -447,6 +525,116 @@ class E2eKeysHandler(object):
log_kv({"message": "Inserting new one_time_keys.", "keys": new_keys}) log_kv({"message": "Inserting new one_time_keys.", "keys": new_keys})
yield self.store.add_e2e_one_time_keys(user_id, device_id, time_now, new_keys) yield self.store.add_e2e_one_time_keys(user_id, device_id, time_now, new_keys)
@defer.inlineCallbacks
def upload_signing_keys_for_user(self, user_id, keys):
"""Upload signing keys for cross-signing
Args:
user_id (string): the user uploading the keys
keys (dict[string, dict]): the signing keys
"""
# if a master key is uploaded, then check it. Otherwise, load the
# stored master key, to check signatures on other keys
if "master_key" in keys:
master_key = keys["master_key"]
_check_cross_signing_key(master_key, user_id, "master")
else:
master_key = yield self.store.get_e2e_cross_signing_key(user_id, "master")
# if there is no master key, then we can't do anything, because all the
# other cross-signing keys need to be signed by the master key
if not master_key:
raise SynapseError(400, "No master key available", Codes.MISSING_PARAM)
try:
master_key_id, master_verify_key = get_verify_key_from_cross_signing_key(
master_key
)
except ValueError:
if "master_key" in keys:
# the invalid key came from the request
raise SynapseError(400, "Invalid master key", Codes.INVALID_PARAM)
else:
# the invalid key came from the database
logger.error("Invalid master key found for user %s", user_id)
raise SynapseError(500, "Invalid master key")
# for the other cross-signing keys, make sure that they have valid
# signatures from the master key
if "self_signing_key" in keys:
self_signing_key = keys["self_signing_key"]
_check_cross_signing_key(
self_signing_key, user_id, "self_signing", master_verify_key
)
if "user_signing_key" in keys:
user_signing_key = keys["user_signing_key"]
_check_cross_signing_key(
user_signing_key, user_id, "user_signing", master_verify_key
)
# if everything checks out, then store the keys and send notifications
deviceids = []
if "master_key" in keys:
yield self.store.set_e2e_cross_signing_key(user_id, "master", master_key)
deviceids.append(master_verify_key.version)
if "self_signing_key" in keys:
yield self.store.set_e2e_cross_signing_key(
user_id, "self_signing", self_signing_key
)
try:
deviceids.append(
get_verify_key_from_cross_signing_key(self_signing_key)[1].version
)
except ValueError:
raise SynapseError(400, "Invalid self-signing key", Codes.INVALID_PARAM)
if "user_signing_key" in keys:
yield self.store.set_e2e_cross_signing_key(
user_id, "user_signing", user_signing_key
)
# the signature stream matches the semantics that we want for
# user-signing key updates: only the user themselves is notified of
# their own user-signing key updates
yield self.device_handler.notify_user_signature_update(user_id, [user_id])
# master key and self-signing key updates match the semantics of device
# list updates: all users who share an encrypted room are notified
if len(deviceids):
yield self.device_handler.notify_device_update(user_id, deviceids)
return {}
def _check_cross_signing_key(key, user_id, key_type, signing_key=None):
"""Check a cross-signing key uploaded by a user. Performs some basic sanity
checking, and ensures that it is signed, if a signature is required.
Args:
key (dict): the key data to verify
user_id (str): the user whose key is being checked
key_type (str): the type of key that the key should be
signing_key (VerifyKey): (optional) the signing key that the key should
be signed with. If omitted, signatures will not be checked.
"""
if (
key.get("user_id") != user_id
or key_type not in key.get("usage", [])
or len(key.get("keys", {})) != 1
):
raise SynapseError(400, ("Invalid %s key" % (key_type,)), Codes.INVALID_PARAM)
if signing_key:
try:
verify_signed_json(key, user_id, signing_key)
except SignatureVerifyException:
raise SynapseError(
400, ("Invalid signature on %s key" % key_type), Codes.INVALID_SIGNATURE
)
def _exception_to_failure(e): def _exception_to_failure(e):
if isinstance(e, CodeMessageException): if isinstance(e, CodeMessageException):

View file

@ -167,7 +167,6 @@ class EventHandler(BaseHandler):
if not event: if not event:
return None return None
return
users = yield self.store.get_users_in_room(event.room_id) users = yield self.store.get_users_in_room(event.room_id)
is_peeking = user.to_string() not in users is_peeking = user.to_string() not in users

View file

@ -1428,7 +1428,7 @@ class FederationHandler(BaseHandler):
assert event.user_id == user_id assert event.user_id == user_id
assert event.state_key == user_id assert event.state_key == user_id
assert event.room_id == room_id assert event.room_id == room_id
return (origin, event, format_ver) return origin, event, format_ver
@defer.inlineCallbacks @defer.inlineCallbacks
@log_function @log_function

View file

@ -282,16 +282,3 @@ class IdentityHandler(BaseHandler):
except HttpResponseException as e: except HttpResponseException as e:
logger.info("Proxied requestToken failed: %r", e) logger.info("Proxied requestToken failed: %r", e)
raise e.to_synapse_error() raise e.to_synapse_error()
class LookupAlgorithm:
"""
Supported hashing algorithms when performing a 3PID lookup.
SHA256 - Hashing an (address, medium, pepper) combo with sha256, then url-safe base64
encoding
NONE - Not performing any hashing. Simply sending an (address, medium) combo in plaintext
"""
SHA256 = "sha256"
NONE = "none"

View file

@ -449,8 +449,7 @@ class InitialSyncHandler(BaseHandler):
# * The user is a guest user, and has joined the room # * The user is a guest user, and has joined the room
# else it will throw. # else it will throw.
member_event = yield self.auth.check_user_was_in_room(room_id, user_id) member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
return (member_event.membership, member_event.event_id) return member_event.membership, member_event.event_id
return
except AuthError: except AuthError:
visibility = yield self.state_handler.get_current_state( visibility = yield self.state_handler.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, "" room_id, EventTypes.RoomHistoryVisibility, ""
@ -459,8 +458,7 @@ class InitialSyncHandler(BaseHandler):
visibility visibility
and visibility.content["history_visibility"] == "world_readable" and visibility.content["history_visibility"] == "world_readable"
): ):
return (Membership.JOIN, None) return Membership.JOIN, None
return
raise AuthError( raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
) )

View file

@ -255,7 +255,7 @@ class PresenceHandler(object):
self.unpersisted_users_changes = set() self.unpersisted_users_changes = set()
if unpersisted: if unpersisted:
logger.info("Persisting %d upersisted presence updates", len(unpersisted)) logger.info("Persisting %d unpersisted presence updates", len(unpersisted))
yield self.store.update_presence( yield self.store.update_presence(
[self.user_to_current_state[user_id] for user_id in unpersisted] [self.user_to_current_state[user_id] for user_id in unpersisted]
) )
@ -1032,7 +1032,7 @@ class PresenceEventSource(object):
# #
# Hence this guard where we just return nothing so that the sync # Hence this guard where we just return nothing so that the sync
# doesn't return. C.f. #5503. # doesn't return. C.f. #5503.
return ([], max_token) return [], max_token
presence = self.get_presence_handler() presence = self.get_presence_handler()
stream_change_cache = self.store.presence_stream_cache stream_change_cache = self.store.presence_stream_cache
@ -1279,7 +1279,7 @@ def get_interested_parties(store, states):
# Always notify self # Always notify self
users_to_states.setdefault(state.user_id, []).append(state) users_to_states.setdefault(state.user_id, []).append(state)
return (room_ids_to_states, users_to_states) return room_ids_to_states, users_to_states
@defer.inlineCallbacks @defer.inlineCallbacks

View file

@ -148,7 +148,7 @@ class ReceiptEventSource(object):
to_key = yield self.get_current_key() to_key = yield self.get_current_key()
if from_key == to_key: if from_key == to_key:
return ([], to_key) return [], to_key
events = yield self.store.get_linearized_receipts_for_rooms( events = yield self.store.get_linearized_receipts_for_rooms(
room_ids, from_key=from_key, to_key=to_key room_ids, from_key=from_key, to_key=to_key

View file

@ -24,13 +24,11 @@ from synapse.api.errors import (
AuthError, AuthError,
Codes, Codes,
ConsentNotGivenError, ConsentNotGivenError,
InvalidCaptchaError,
LimitExceededError, LimitExceededError,
RegistrationError, RegistrationError,
SynapseError, SynapseError,
) )
from synapse.config.server import is_threepid_reserved from synapse.config.server import is_threepid_reserved
from synapse.http.client import CaptchaServerHttpClient
from synapse.http.servlet import assert_params_in_dict from synapse.http.servlet import assert_params_in_dict
from synapse.replication.http.login import RegisterDeviceReplicationServlet from synapse.replication.http.login import RegisterDeviceReplicationServlet
from synapse.replication.http.register import ( from synapse.replication.http.register import (
@ -39,7 +37,6 @@ from synapse.replication.http.register import (
) )
from synapse.types import RoomAlias, RoomID, UserID, create_requester from synapse.types import RoomAlias, RoomID, UserID, create_requester
from synapse.util.async_helpers import Linearizer from synapse.util.async_helpers import Linearizer
from synapse.util.threepids import check_3pid_allowed
from ._base import BaseHandler from ._base import BaseHandler
@ -59,7 +56,6 @@ class RegistrationHandler(BaseHandler):
self._auth_handler = hs.get_auth_handler() self._auth_handler = hs.get_auth_handler()
self.profile_handler = hs.get_profile_handler() self.profile_handler = hs.get_profile_handler()
self.user_directory_handler = hs.get_user_directory_handler() self.user_directory_handler = hs.get_user_directory_handler()
self.captcha_client = CaptchaServerHttpClient(hs)
self.identity_handler = self.hs.get_handlers().identity_handler self.identity_handler = self.hs.get_handlers().identity_handler
self.ratelimiter = hs.get_registration_ratelimiter() self.ratelimiter = hs.get_registration_ratelimiter()
@ -362,70 +358,6 @@ class RegistrationHandler(BaseHandler):
) )
return user_id return user_id
@defer.inlineCallbacks
def check_recaptcha(self, ip, private_key, challenge, response):
"""
Checks a recaptcha is correct.
Used only by c/s api v1
"""
captcha_response = yield self._validate_captcha(
ip, private_key, challenge, response
)
if not captcha_response["valid"]:
logger.info(
"Invalid captcha entered from %s. Error: %s",
ip,
captcha_response["error_url"],
)
raise InvalidCaptchaError(error_url=captcha_response["error_url"])
else:
logger.info("Valid captcha entered from %s", ip)
@defer.inlineCallbacks
def register_email(self, threepidCreds):
"""
Registers emails with an identity server.
Used only by c/s api v1
"""
for c in threepidCreds:
logger.info(
"validating threepidcred sid %s on id server %s",
c["sid"],
c["idServer"],
)
try:
threepid = yield self.identity_handler.threepid_from_creds(c)
except Exception:
logger.exception("Couldn't validate 3pid")
raise RegistrationError(400, "Couldn't validate 3pid")
if not threepid:
raise RegistrationError(400, "Couldn't validate 3pid")
logger.info(
"got threepid with medium '%s' and address '%s'",
threepid["medium"],
threepid["address"],
)
if not check_3pid_allowed(self.hs, threepid["medium"], threepid["address"]):
raise RegistrationError(403, "Third party identifier is not allowed")
@defer.inlineCallbacks
def bind_emails(self, user_id, threepidCreds):
"""Links emails with a user ID and informs an identity server.
Used only by c/s api v1
"""
# Now we have a matrix ID, bind it to the threepids we were given
for c in threepidCreds:
# XXX: This should be a deferred list, shouldn't it?
yield self.identity_handler.bind_threepid(c, user_id)
def check_user_id_not_appservice_exclusive(self, user_id, allowed_appservice=None): def check_user_id_not_appservice_exclusive(self, user_id, allowed_appservice=None):
# don't allow people to register the server notices mxid # don't allow people to register the server notices mxid
if self._server_notices_mxid is not None: if self._server_notices_mxid is not None:
@ -463,45 +395,8 @@ class RegistrationHandler(BaseHandler):
self._next_generated_user_id += 1 self._next_generated_user_id += 1
return str(id) return str(id)
@defer.inlineCallbacks
def _validate_captcha(self, ip_addr, private_key, challenge, response):
"""Validates the captcha provided.
Used only by c/s api v1
Returns:
dict: Containing 'valid'(bool) and 'error_url'(str) if invalid.
"""
response = yield self._submit_captcha(ip_addr, private_key, challenge, response)
# parse Google's response. Lovely format..
lines = response.split("\n")
json = {
"valid": lines[0] == "true",
"error_url": "http://www.recaptcha.net/recaptcha/api/challenge?"
+ "error=%s" % lines[1],
}
return json
@defer.inlineCallbacks
def _submit_captcha(self, ip_addr, private_key, challenge, response):
"""
Used only by c/s api v1
"""
data = yield self.captcha_client.post_urlencoded_get_raw(
"http://www.recaptcha.net:80/recaptcha/api/verify",
args={
"privatekey": private_key,
"remoteip": ip_addr,
"challenge": challenge,
"response": response,
},
)
return data
@defer.inlineCallbacks @defer.inlineCallbacks
def _join_user_to_room(self, requester, room_identifier): def _join_user_to_room(self, requester, room_identifier):
room_id = None
room_member_handler = self.hs.get_room_member_handler() room_member_handler = self.hs.get_room_member_handler()
if RoomID.is_valid(room_identifier): if RoomID.is_valid(room_identifier):
room_id = room_identifier room_id = room_identifier
@ -622,7 +517,7 @@ class RegistrationHandler(BaseHandler):
initial_display_name=initial_display_name, initial_display_name=initial_display_name,
is_guest=is_guest, is_guest=is_guest,
) )
return (r["device_id"], r["access_token"]) return r["device_id"], r["access_token"]
valid_until_ms = None valid_until_ms = None
if self.session_lifetime is not None: if self.session_lifetime is not None:
@ -648,9 +543,7 @@ class RegistrationHandler(BaseHandler):
return (device_id, access_token) return (device_id, access_token)
@defer.inlineCallbacks @defer.inlineCallbacks
def post_registration_actions( def post_registration_actions(self, user_id, auth_result, access_token):
self, user_id, auth_result, access_token, bind_email, bind_msisdn
):
"""A user has completed registration """A user has completed registration
Args: Args:
@ -659,18 +552,10 @@ class RegistrationHandler(BaseHandler):
registered user. registered user.
access_token (str|None): The access token of the newly logged in access_token (str|None): The access token of the newly logged in
device, or None if `inhibit_login` enabled. device, or None if `inhibit_login` enabled.
bind_email (bool): Whether to bind the email with the identity
server.
bind_msisdn (bool): Whether to bind the msisdn with the identity
server.
""" """
if self.hs.config.worker_app: if self.hs.config.worker_app:
yield self._post_registration_client( yield self._post_registration_client(
user_id=user_id, user_id=user_id, auth_result=auth_result, access_token=access_token
auth_result=auth_result,
access_token=access_token,
bind_email=bind_email,
bind_msisdn=bind_msisdn,
) )
return return
@ -683,13 +568,11 @@ class RegistrationHandler(BaseHandler):
): ):
yield self.store.upsert_monthly_active_user(user_id) yield self.store.upsert_monthly_active_user(user_id)
yield self._register_email_threepid( yield self._register_email_threepid(user_id, threepid, access_token)
user_id, threepid, access_token, bind_email
)
if auth_result and LoginType.MSISDN in auth_result: if auth_result and LoginType.MSISDN in auth_result:
threepid = auth_result[LoginType.MSISDN] threepid = auth_result[LoginType.MSISDN]
yield self._register_msisdn_threepid(user_id, threepid, bind_msisdn) yield self._register_msisdn_threepid(user_id, threepid)
if auth_result and LoginType.TERMS in auth_result: if auth_result and LoginType.TERMS in auth_result:
yield self._on_user_consented(user_id, self.hs.config.user_consent_version) yield self._on_user_consented(user_id, self.hs.config.user_consent_version)
@ -708,14 +591,12 @@ class RegistrationHandler(BaseHandler):
yield self.post_consent_actions(user_id) yield self.post_consent_actions(user_id)
@defer.inlineCallbacks @defer.inlineCallbacks
def _register_email_threepid(self, user_id, threepid, token, bind_email): def _register_email_threepid(self, user_id, threepid, token):
"""Add an email address as a 3pid identifier """Add an email address as a 3pid identifier
Also adds an email pusher for the email address, if configured in the Also adds an email pusher for the email address, if configured in the
HS config HS config
Also optionally binds emails to the given user_id on the identity server
Must be called on master. Must be called on master.
Args: Args:
@ -723,8 +604,6 @@ class RegistrationHandler(BaseHandler):
threepid (object): m.login.email.identity auth response threepid (object): m.login.email.identity auth response
token (str|None): access_token for the user, or None if not logged token (str|None): access_token for the user, or None if not logged
in. in.
bind_email (bool): true if the client requested the email to be
bound at the identity server
Returns: Returns:
defer.Deferred: defer.Deferred:
""" """
@ -766,29 +645,15 @@ class RegistrationHandler(BaseHandler):
data={}, data={},
) )
if bind_email:
logger.info("bind_email specified: binding")
logger.debug("Binding emails %s to %s" % (threepid, user_id))
yield self.identity_handler.bind_threepid(
threepid["threepid_creds"], user_id
)
else:
logger.info("bind_email not specified: not binding email")
@defer.inlineCallbacks @defer.inlineCallbacks
def _register_msisdn_threepid(self, user_id, threepid, bind_msisdn): def _register_msisdn_threepid(self, user_id, threepid):
"""Add a phone number as a 3pid identifier """Add a phone number as a 3pid identifier
Also optionally binds msisdn to the given user_id on the identity server
Must be called on master. Must be called on master.
Args: Args:
user_id (str): id of user user_id (str): id of user
threepid (object): m.login.msisdn auth response threepid (object): m.login.msisdn auth response
token (str): access_token for the user
bind_email (bool): true if the client requested the email to be
bound at the identity server
Returns: Returns:
defer.Deferred: defer.Deferred:
""" """
@ -804,12 +669,3 @@ class RegistrationHandler(BaseHandler):
yield self._auth_handler.add_threepid( yield self._auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], threepid["validated_at"] user_id, threepid["medium"], threepid["address"], threepid["validated_at"]
) )
if bind_msisdn:
logger.info("bind_msisdn specified: binding")
logger.debug("Binding msisdn %s to %s", threepid, user_id)
yield self.identity_handler.bind_threepid(
threepid["threepid_creds"], user_id
)
else:
logger.info("bind_msisdn not specified: not binding msisdn")

View file

@ -852,7 +852,6 @@ class RoomContextHandler(object):
) )
if not event: if not event:
return None return None
return
filtered = yield (filter_evts([event])) filtered = yield (filter_evts([event]))
if not filtered: if not filtered:

View file

@ -29,11 +29,9 @@ from twisted.internet import defer
from synapse import types from synapse import types
from synapse.api.constants import EventTypes, Membership from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError
from synapse.handlers.identity import LookupAlgorithm
from synapse.types import RoomID, UserID from synapse.types import RoomID, UserID
from synapse.util.async_helpers import Linearizer from synapse.util.async_helpers import Linearizer
from synapse.util.distributor import user_joined_room, user_left_room from synapse.util.distributor import user_joined_room, user_left_room
from synapse.util.hash import sha256_and_url_safe_base64
from ._base import BaseHandler from ._base import BaseHandler
@ -525,7 +523,7 @@ class RoomMemberHandler(object):
event (SynapseEvent): The membership event. event (SynapseEvent): The membership event.
context: The context of the event. context: The context of the event.
is_guest (bool): Whether the sender is a guest. is_guest (bool): Whether the sender is a guest.
remote_room_hosts (list[str]|None): Homeservers which are likely to already be in room_hosts ([str]): Homeservers which are likely to already be in
the room, and could be danced with in order to join this the room, and could be danced with in order to join this
homeserver for the first time. homeserver for the first time.
ratelimit (bool): Whether to rate limit this request. ratelimit (bool): Whether to rate limit this request.
@ -636,7 +634,7 @@ class RoomMemberHandler(object):
servers.remove(room_alias.domain) servers.remove(room_alias.domain)
servers.insert(0, room_alias.domain) servers.insert(0, room_alias.domain)
return RoomID.from_string(room_id), servers return (RoomID.from_string(room_id), servers)
@defer.inlineCallbacks @defer.inlineCallbacks
def _get_inviter(self, user_id, room_id): def _get_inviter(self, user_id, room_id):
@ -699,44 +697,6 @@ class RoomMemberHandler(object):
raise SynapseError( raise SynapseError(
403, "Looking up third-party identifiers is denied from this server" 403, "Looking up third-party identifiers is denied from this server"
) )
# Check what hashing details are supported by this identity server
use_v1 = False
hash_details = None
try:
hash_details = yield self.simple_http_client.get_json(
"%s%s/_matrix/identity/v2/hash_details" % (id_server_scheme, id_server)
)
except (HttpResponseException, ValueError) as e:
# Catch HttpResponseExcept for a non-200 response code
# Catch ValueError for non-JSON response body
# Check if this identity server does not know about v2 lookups
if e.code == 404:
# This is an old identity server that does not yet support v2 lookups
use_v1 = True
else:
logger.warn("Error when looking up hashing details: %s" % (e,))
return None
if use_v1:
return (yield self._lookup_3pid_v1(id_server, medium, address))
return (yield self._lookup_3pid_v2(id_server, medium, address, hash_details))
@defer.inlineCallbacks
def _lookup_3pid_v1(self, id_server, medium, address):
"""Looks up a 3pid in the passed identity server using v1 lookup.
Args:
id_server (str): The server name (including port, if required)
of the identity server to use.
medium (str): The type of the third party identifier (e.g. "email").
address (str): The third party identifier (e.g. "foo@example.com").
Returns:
str: the matrix ID of the 3pid, or None if it is not recognized.
"""
try: try:
data = yield self.simple_http_client.get_json( data = yield self.simple_http_client.get_json(
"%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server), "%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server),
@ -751,83 +711,8 @@ class RoomMemberHandler(object):
except IOError as e: except IOError as e:
logger.warn("Error from identity server lookup: %s" % (e,)) logger.warn("Error from identity server lookup: %s" % (e,))
return None
@defer.inlineCallbacks
def _lookup_3pid_v2(self, id_server, medium, address, hash_details):
"""Looks up a 3pid in the passed identity server using v2 lookup.
Args:
id_server (str): The server name (including port, if required)
of the identity server to use.
medium (str): The type of the third party identifier (e.g. "email").
address (str): The third party identifier (e.g. "foo@example.com").
hash_details (dict[str, str|list]): A dictionary containing hashing information
provided by an identity server.
Returns:
Deferred[str|None]: the matrix ID of the 3pid, or None if it is not recognised.
"""
# Extract information from hash_details
supported_lookup_algorithms = hash_details["algorithms"]
lookup_pepper = hash_details["lookup_pepper"]
# Check if any of the supported lookup algorithms are present
if LookupAlgorithm.SHA256 in supported_lookup_algorithms:
# Perform a hashed lookup
lookup_algorithm = LookupAlgorithm.SHA256
# Hash address, medium and the pepper with sha256
to_hash = "%s %s %s" % (address, medium, lookup_pepper)
lookup_value = sha256_and_url_safe_base64(to_hash)
elif LookupAlgorithm.NONE in supported_lookup_algorithms:
# Perform a non-hashed lookup
lookup_algorithm = LookupAlgorithm.NONE
# Combine together plaintext address and medium
lookup_value = "%s %s" % (address, medium)
else:
logger.warn(
"None of the provided lookup algorithms of %s%s are supported: %s",
id_server_scheme,
id_server,
hash_details["algorithms"],
)
raise SynapseError(
400,
"Provided identity server does not support any v2 lookup "
"algorithms that this homeserver supports.",
)
try:
lookup_results = yield self.simple_http_client.post_json_get_json(
"%s%s/_matrix/identity/v2/lookup" % (id_server_scheme, id_server),
{
"addresses": [lookup_value],
"algorithm": lookup_algorithm,
"pepper": lookup_pepper,
},
)
except (HttpResponseException, ValueError) as e:
# Catch HttpResponseExcept for a non-200 response code
# Catch ValueError for non-JSON response body
logger.warn("Error when performing a 3pid lookup: %s" % (e,))
return None return None
# Check for a mapping from what we looked up to an MXID
if "mappings" not in lookup_results or not isinstance(
lookup_results["mappings"], dict
):
logger.debug("No results from 3pid lookup")
return None
# Return the MXID if it's available, or None otherwise
mxid = lookup_results["mappings"].get(lookup_value)
return mxid
@defer.inlineCallbacks @defer.inlineCallbacks
def _verify_any_signature(self, data, server_hostname): def _verify_any_signature(self, data, server_hostname):
if server_hostname not in data["signatures"]: if server_hostname not in data["signatures"]:
@ -1018,7 +903,7 @@ class RoomMemberHandler(object):
if not public_keys: if not public_keys:
public_keys.append(fallback_public_key) public_keys.append(fallback_public_key)
display_name = data["display_name"] display_name = data["display_name"]
return (token, public_keys, fallback_public_key, display_name) return token, public_keys, fallback_public_key, display_name
@defer.inlineCallbacks @defer.inlineCallbacks
def _is_host_in_room(self, current_state_ids): def _is_host_in_room(self, current_state_ids):

View file

@ -14,15 +14,14 @@
# limitations under the License. # limitations under the License.
import logging import logging
from collections import Counter
from twisted.internet import defer from twisted.internet import defer
from synapse.api.constants import EventTypes, JoinRules, Membership from synapse.api.constants import EventTypes, Membership
from synapse.handlers.state_deltas import StateDeltasHandler from synapse.handlers.state_deltas import StateDeltasHandler
from synapse.metrics import event_processing_positions from synapse.metrics import event_processing_positions
from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import UserID
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -62,11 +61,10 @@ class StatsHandler(StateDeltasHandler):
def notify_new_event(self): def notify_new_event(self):
"""Called when there may be more deltas to process """Called when there may be more deltas to process
""" """
if not self.hs.config.stats_enabled: if not self.hs.config.stats_enabled or self._is_processing:
return return
if self._is_processing: self._is_processing = True
return
@defer.inlineCallbacks @defer.inlineCallbacks
def process(): def process():
@ -75,39 +73,72 @@ class StatsHandler(StateDeltasHandler):
finally: finally:
self._is_processing = False self._is_processing = False
self._is_processing = True
run_as_background_process("stats.notify_new_event", process) run_as_background_process("stats.notify_new_event", process)
@defer.inlineCallbacks @defer.inlineCallbacks
def _unsafe_process(self): def _unsafe_process(self):
# If self.pos is None then means we haven't fetched it from DB # If self.pos is None then means we haven't fetched it from DB
if self.pos is None: if self.pos is None:
self.pos = yield self.store.get_stats_stream_pos() self.pos = yield self.store.get_stats_positions()
# If still None then the initial background update hasn't happened yet
if self.pos is None:
return None
# Loop round handling deltas until we're up to date # Loop round handling deltas until we're up to date
while True: while True:
with Measure(self.clock, "stats_delta"): deltas = yield self.store.get_current_state_deltas(self.pos)
deltas = yield self.store.get_current_state_deltas(self.pos)
if not deltas:
return
logger.info("Handling %d state deltas", len(deltas)) if deltas:
yield self._handle_deltas(deltas) logger.debug("Handling %d state deltas", len(deltas))
room_deltas, user_deltas = yield self._handle_deltas(deltas)
self.pos = deltas[-1]["stream_id"] max_pos = deltas[-1]["stream_id"]
yield self.store.update_stats_stream_pos(self.pos) else:
room_deltas = {}
user_deltas = {}
max_pos = yield self.store.get_room_max_stream_ordering()
event_processing_positions.labels("stats").set(self.pos) # Then count deltas for total_events and total_event_bytes.
room_count, user_count = yield self.store.get_changes_room_total_events_and_bytes(
self.pos, max_pos
)
for room_id, fields in room_count.items():
room_deltas.setdefault(room_id, {}).update(fields)
for user_id, fields in user_count.items():
user_deltas.setdefault(user_id, {}).update(fields)
logger.debug("room_deltas: %s", room_deltas)
logger.debug("user_deltas: %s", user_deltas)
# Always call this so that we update the stats position.
yield self.store.bulk_update_stats_delta(
self.clock.time_msec(),
updates={"room": room_deltas, "user": user_deltas},
stream_id=max_pos,
)
event_processing_positions.labels("stats").set(max_pos)
if self.pos == max_pos:
break
self.pos = max_pos
@defer.inlineCallbacks @defer.inlineCallbacks
def _handle_deltas(self, deltas): def _handle_deltas(self, deltas):
"""Called with the state deltas to process
Returns:
Deferred[tuple[dict[str, Counter], dict[str, counter]]]
Resovles to two dicts, the room deltas and the user deltas,
mapping from room/user ID to changes in the various fields.
""" """
Called with the state deltas to process
""" room_to_stats_deltas = {}
user_to_stats_deltas = {}
room_to_state_updates = {}
for delta in deltas: for delta in deltas:
typ = delta["type"] typ = delta["type"]
state_key = delta["state_key"] state_key = delta["state_key"]
@ -115,11 +146,10 @@ class StatsHandler(StateDeltasHandler):
event_id = delta["event_id"] event_id = delta["event_id"]
stream_id = delta["stream_id"] stream_id = delta["stream_id"]
prev_event_id = delta["prev_event_id"] prev_event_id = delta["prev_event_id"]
stream_pos = delta["stream_id"]
logger.debug("Handling: %r %r, %s", typ, state_key, event_id) logger.debug("Handling: %r, %r %r, %s", room_id, typ, state_key, event_id)
token = yield self.store.get_earliest_token_for_room_stats(room_id) token = yield self.store.get_earliest_token_for_stats("room", room_id)
# If the earliest token to begin from is larger than our current # If the earliest token to begin from is larger than our current
# stream ID, skip processing this delta. # stream ID, skip processing this delta.
@ -131,203 +161,130 @@ class StatsHandler(StateDeltasHandler):
continue continue
if event_id is None and prev_event_id is None: if event_id is None and prev_event_id is None:
# Errr... logger.error(
"event ID is None and so is the previous event ID. stream_id: %s",
stream_id,
)
continue continue
event_content = {} event_content = {}
sender = None
if event_id is not None: if event_id is not None:
event = yield self.store.get_event(event_id, allow_none=True) event = yield self.store.get_event(event_id, allow_none=True)
if event: if event:
event_content = event.content or {} event_content = event.content or {}
sender = event.sender
# We use stream_pos here rather than fetch by event_id as event_id # All the values in this dict are deltas (RELATIVE changes)
# may be None room_stats_delta = room_to_stats_deltas.setdefault(room_id, Counter())
now = yield self.store.get_received_ts_by_stream_pos(stream_pos)
# quantise time to the nearest bucket room_state = room_to_state_updates.setdefault(room_id, {})
now = (now // 1000 // self.stats_bucket_size) * self.stats_bucket_size
if prev_event_id is None:
# this state event doesn't overwrite another,
# so it is a new effective/current state event
room_stats_delta["current_state_events"] += 1
if typ == EventTypes.Member: if typ == EventTypes.Member:
# we could use _get_key_change here but it's a bit inefficient # we could use _get_key_change here but it's a bit inefficient
# given we're not testing for a specific result; might as well # given we're not testing for a specific result; might as well
# just grab the prev_membership and membership strings and # just grab the prev_membership and membership strings and
# compare them. # compare them.
prev_event_content = {} # We take None rather than leave as a previous membership
# in the absence of a previous event because we do not want to
# reduce the leave count when a new-to-the-room user joins.
prev_membership = None
if prev_event_id is not None: if prev_event_id is not None:
prev_event = yield self.store.get_event( prev_event = yield self.store.get_event(
prev_event_id, allow_none=True prev_event_id, allow_none=True
) )
if prev_event: if prev_event:
prev_event_content = prev_event.content prev_event_content = prev_event.content
prev_membership = prev_event_content.get(
"membership", Membership.LEAVE
)
membership = event_content.get("membership", Membership.LEAVE) membership = event_content.get("membership", Membership.LEAVE)
prev_membership = prev_event_content.get("membership", Membership.LEAVE)
if prev_membership == membership: if prev_membership is None:
continue logger.debug("No previous membership for this user.")
elif membership == prev_membership:
if prev_membership == Membership.JOIN: pass # noop
yield self.store.update_stats_delta( elif prev_membership == Membership.JOIN:
now, "room", room_id, "joined_members", -1 room_stats_delta["joined_members"] -= 1
)
elif prev_membership == Membership.INVITE: elif prev_membership == Membership.INVITE:
yield self.store.update_stats_delta( room_stats_delta["invited_members"] -= 1
now, "room", room_id, "invited_members", -1
)
elif prev_membership == Membership.LEAVE: elif prev_membership == Membership.LEAVE:
yield self.store.update_stats_delta( room_stats_delta["left_members"] -= 1
now, "room", room_id, "left_members", -1
)
elif prev_membership == Membership.BAN: elif prev_membership == Membership.BAN:
yield self.store.update_stats_delta( room_stats_delta["banned_members"] -= 1
now, "room", room_id, "banned_members", -1
)
else: else:
err = "%s is not a valid prev_membership" % (repr(prev_membership),) raise ValueError(
logger.error(err) "%r is not a valid prev_membership" % (prev_membership,)
raise ValueError(err) )
if membership == prev_membership:
pass # noop
if membership == Membership.JOIN: if membership == Membership.JOIN:
yield self.store.update_stats_delta( room_stats_delta["joined_members"] += 1
now, "room", room_id, "joined_members", +1
)
elif membership == Membership.INVITE: elif membership == Membership.INVITE:
yield self.store.update_stats_delta( room_stats_delta["invited_members"] += 1
now, "room", room_id, "invited_members", +1
) if sender and self.is_mine_id(sender):
user_to_stats_deltas.setdefault(sender, Counter())[
"invites_sent"
] += 1
elif membership == Membership.LEAVE: elif membership == Membership.LEAVE:
yield self.store.update_stats_delta( room_stats_delta["left_members"] += 1
now, "room", room_id, "left_members", +1
)
elif membership == Membership.BAN: elif membership == Membership.BAN:
yield self.store.update_stats_delta( room_stats_delta["banned_members"] += 1
now, "room", room_id, "banned_members", +1
)
else: else:
err = "%s is not a valid membership" % (repr(membership),) raise ValueError("%r is not a valid membership" % (membership,))
logger.error(err)
raise ValueError(err)
user_id = state_key user_id = state_key
if self.is_mine_id(user_id): if self.is_mine_id(user_id):
# update user_stats as it's one of our users # this accounts for transitions like leave → ban and so on.
public = yield self._is_public_room(room_id) has_changed_joinedness = (prev_membership == Membership.JOIN) != (
membership == Membership.JOIN
)
if membership == Membership.LEAVE: if has_changed_joinedness:
yield self.store.update_stats_delta( delta = +1 if membership == Membership.JOIN else -1
now,
"user", user_to_stats_deltas.setdefault(user_id, Counter())[
user_id, "joined_rooms"
"public_rooms" if public else "private_rooms", ] += delta
-1,
) room_stats_delta["local_users_in_room"] += delta
elif membership == Membership.JOIN:
yield self.store.update_stats_delta(
now,
"user",
user_id,
"public_rooms" if public else "private_rooms",
+1,
)
elif typ == EventTypes.Create: elif typ == EventTypes.Create:
# Newly created room. Add it with all blank portions. room_state["is_federatable"] = event_content.get("m.federate", True)
yield self.store.update_room_state( if sender and self.is_mine_id(sender):
room_id, user_to_stats_deltas.setdefault(sender, Counter())[
{ "rooms_created"
"join_rules": None, ] += 1
"history_visibility": None,
"encryption": None,
"name": None,
"topic": None,
"avatar": None,
"canonical_alias": None,
},
)
elif typ == EventTypes.JoinRules: elif typ == EventTypes.JoinRules:
yield self.store.update_room_state( room_state["join_rules"] = event_content.get("join_rule")
room_id, {"join_rules": event_content.get("join_rule")}
)
is_public = yield self._get_key_change(
prev_event_id, event_id, "join_rule", JoinRules.PUBLIC
)
if is_public is not None:
yield self.update_public_room_stats(now, room_id, is_public)
elif typ == EventTypes.RoomHistoryVisibility: elif typ == EventTypes.RoomHistoryVisibility:
yield self.store.update_room_state( room_state["history_visibility"] = event_content.get(
room_id, "history_visibility"
{"history_visibility": event_content.get("history_visibility")},
) )
is_public = yield self._get_key_change(
prev_event_id, event_id, "history_visibility", "world_readable"
)
if is_public is not None:
yield self.update_public_room_stats(now, room_id, is_public)
elif typ == EventTypes.Encryption: elif typ == EventTypes.Encryption:
yield self.store.update_room_state( room_state["encryption"] = event_content.get("algorithm")
room_id, {"encryption": event_content.get("algorithm")}
)
elif typ == EventTypes.Name: elif typ == EventTypes.Name:
yield self.store.update_room_state( room_state["name"] = event_content.get("name")
room_id, {"name": event_content.get("name")}
)
elif typ == EventTypes.Topic: elif typ == EventTypes.Topic:
yield self.store.update_room_state( room_state["topic"] = event_content.get("topic")
room_id, {"topic": event_content.get("topic")}
)
elif typ == EventTypes.RoomAvatar: elif typ == EventTypes.RoomAvatar:
yield self.store.update_room_state( room_state["avatar"] = event_content.get("url")
room_id, {"avatar": event_content.get("url")}
)
elif typ == EventTypes.CanonicalAlias: elif typ == EventTypes.CanonicalAlias:
yield self.store.update_room_state( room_state["canonical_alias"] = event_content.get("alias")
room_id, {"canonical_alias": event_content.get("alias")} elif typ == EventTypes.GuestAccess:
) room_state["guest_access"] = event_content.get("guest_access")
@defer.inlineCallbacks for room_id, state in room_to_state_updates.items():
def update_public_room_stats(self, ts, room_id, is_public): yield self.store.update_room_state(room_id, state)
"""
Increment/decrement a user's number of public rooms when a room they are
in changes to/from public visibility.
Args: return room_to_stats_deltas, user_to_stats_deltas
ts (int): Timestamp in seconds
room_id (str)
is_public (bool)
"""
# For now, blindly iterate over all local users in the room so that
# we can handle the whole problem of copying buckets over as needed
user_ids = yield self.store.get_users_in_room(room_id)
for user_id in user_ids:
if self.hs.is_mine(UserID.from_string(user_id)):
yield self.store.update_stats_delta(
ts, "user", user_id, "public_rooms", +1 if is_public else -1
)
yield self.store.update_stats_delta(
ts, "user", user_id, "private_rooms", -1 if is_public else +1
)
@defer.inlineCallbacks
def _is_public_room(self, room_id):
join_rules = yield self.state.get_current_state(room_id, EventTypes.JoinRules)
history_visibility = yield self.state.get_current_state(
room_id, EventTypes.RoomHistoryVisibility
)
if (join_rules and join_rules.content.get("join_rule") == JoinRules.PUBLIC) or (
(
history_visibility
and history_visibility.content.get("history_visibility")
== "world_readable"
)
):
return True
else:
return False

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd # Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd # Copyright 2018, 2019 New Vector Ltd
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -378,7 +378,7 @@ class SyncHandler(object):
event_copy = {k: v for (k, v) in iteritems(event) if k != "room_id"} event_copy = {k: v for (k, v) in iteritems(event) if k != "room_id"}
ephemeral_by_room.setdefault(room_id, []).append(event_copy) ephemeral_by_room.setdefault(room_id, []).append(event_copy)
return (now_token, ephemeral_by_room) return now_token, ephemeral_by_room
@defer.inlineCallbacks @defer.inlineCallbacks
def _load_filtered_recents( def _load_filtered_recents(
@ -578,7 +578,6 @@ class SyncHandler(object):
if not last_events: if not last_events:
return None return None
return
last_event = last_events[-1] last_event = last_events[-1]
state_ids = yield self.store.get_state_ids_for_event( state_ids = yield self.store.get_state_ids_for_event(
@ -1125,6 +1124,11 @@ class SyncHandler(object):
# weren't in the previous sync *or* they left and rejoined. # weren't in the previous sync *or* they left and rejoined.
users_that_have_changed.update(newly_joined_or_invited_users) users_that_have_changed.update(newly_joined_or_invited_users)
user_signatures_changed = yield self.store.get_users_whose_signatures_changed(
user_id, since_token.device_list_key
)
users_that_have_changed.update(user_signatures_changed)
# Now find users that we no longer track # Now find users that we no longer track
for room_id in newly_left_rooms: for room_id in newly_left_rooms:
left_users = yield self.state.get_current_users_in_room(room_id) left_users = yield self.state.get_current_users_in_room(room_id)
@ -1332,7 +1336,7 @@ class SyncHandler(object):
) )
if not tags_by_room: if not tags_by_room:
logger.debug("no-oping sync") logger.debug("no-oping sync")
return ([], [], [], []) return [], [], [], []
ignored_account_data = yield self.store.get_global_account_data_by_type_for_user( ignored_account_data = yield self.store.get_global_account_data_by_type_for_user(
"m.ignored_user_list", user_id=user_id "m.ignored_user_list", user_id=user_id
@ -1642,7 +1646,7 @@ class SyncHandler(object):
) )
room_entries.append(entry) room_entries.append(entry)
return (room_entries, invited, newly_joined_rooms, newly_left_rooms) return room_entries, invited, newly_joined_rooms, newly_left_rooms
@defer.inlineCallbacks @defer.inlineCallbacks
def _get_all_rooms(self, sync_result_builder, ignored_users): def _get_all_rooms(self, sync_result_builder, ignored_users):
@ -1716,7 +1720,7 @@ class SyncHandler(object):
) )
) )
return (room_entries, invited, []) return room_entries, invited, []
@defer.inlineCallbacks @defer.inlineCallbacks
def _generate_room_entry( def _generate_room_entry(

View file

@ -319,4 +319,4 @@ class TypingNotificationEventSource(object):
return self.get_typing_handler()._latest_room_serial return self.get_typing_handler()._latest_room_serial
def get_pagination_rows(self, user, pagination_config, key): def get_pagination_rows(self, user, pagination_config, key):
return ([], pagination_config.from_key) return [], pagination_config.from_key

View file

@ -35,7 +35,7 @@ from twisted.internet.interfaces import (
) )
from twisted.python.failure import Failure from twisted.python.failure import Failure
from twisted.web._newclient import ResponseDone from twisted.web._newclient import ResponseDone
from twisted.web.client import Agent, HTTPConnectionPool, PartialDownloadError, readBody from twisted.web.client import Agent, HTTPConnectionPool, readBody
from twisted.web.http import PotentialDataLoss from twisted.web.http import PotentialDataLoss
from twisted.web.http_headers import Headers from twisted.web.http_headers import Headers
@ -599,38 +599,6 @@ def _readBodyToFile(response, stream, max_size):
return d return d
class CaptchaServerHttpClient(SimpleHttpClient):
"""
Separate HTTP client for talking to google's captcha servers
Only slightly special because accepts partial download responses
used only by c/s api v1
"""
@defer.inlineCallbacks
def post_urlencoded_get_raw(self, url, args={}):
query_bytes = urllib.parse.urlencode(encode_urlencode_args(args), True)
response = yield self.request(
"POST",
url,
data=query_bytes,
headers=Headers(
{
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
}
),
)
try:
body = yield make_deferred_yieldable(readBody(response))
return body
except PartialDownloadError as e:
# twisted dislikes google's response, no content length.
return e.response
def encode_urlencode_args(args): def encode_urlencode_args(args):
return {k: encode_urlencode_arg(v) for k, v in args.items()} return {k: encode_urlencode_arg(v) for k, v in args.items()}

View file

@ -207,7 +207,7 @@ class WellKnownResolver(object):
cache_period + WELL_KNOWN_REMEMBER_DOMAIN_HAD_VALID, cache_period + WELL_KNOWN_REMEMBER_DOMAIN_HAD_VALID,
) )
return (result, cache_period) return result, cache_period
@defer.inlineCallbacks @defer.inlineCallbacks
def _make_well_known_request(self, server_name, retry): def _make_well_known_request(self, server_name, retry):

View file

@ -85,14 +85,14 @@ the function becomes the operation name for the span.
return something_usual_and_useful return something_usual_and_useful
Operation names can be explicitly set for functions by using Operation names can be explicitly set for a function by passing the
``trace_using_operation_name`` operation name to ``trace``
.. code-block:: python .. code-block:: python
from synapse.logging.opentracing import trace_using_operation_name from synapse.logging.opentracing import trace
@trace_using_operation_name("A *much* better operation name") @trace(opname="a_better_operation_name")
def interesting_badly_named_function(*args, **kwargs): def interesting_badly_named_function(*args, **kwargs):
# Does all kinds of cool and expected things # Does all kinds of cool and expected things
return something_usual_and_useful return something_usual_and_useful
@ -641,66 +641,26 @@ def extract_text_map(carrier):
# Tracing decorators # Tracing decorators
def trace(func): def trace(func=None, opname=None):
""" """
Decorator to trace a function. Decorator to trace a function.
Sets the operation name to that of the function's. Sets the operation name to that of the function's or that given
as operation_name. See the module's doc string for usage
examples.
""" """
if opentracing is None:
return func
@wraps(func) def decorator(func):
def _trace_inner(self, *args, **kwargs):
if opentracing is None:
return func(self, *args, **kwargs)
scope = start_active_span(func.__name__)
scope.__enter__()
try:
result = func(self, *args, **kwargs)
if isinstance(result, defer.Deferred):
def call_back(result):
scope.__exit__(None, None, None)
return result
def err_back(result):
scope.span.set_tag(tags.ERROR, True)
scope.__exit__(None, None, None)
return result
result.addCallbacks(call_back, err_back)
else:
scope.__exit__(None, None, None)
return result
except Exception as e:
scope.__exit__(type(e), None, e.__traceback__)
raise
return _trace_inner
def trace_using_operation_name(operation_name):
"""Decorator to trace a function. Explicitely sets the operation_name."""
def trace(func):
"""
Decorator to trace a function.
Sets the operation name to that of the function's.
"""
if opentracing is None: if opentracing is None:
return func return func
_opname = opname if opname else func.__name__
@wraps(func) @wraps(func)
def _trace_inner(self, *args, **kwargs): def _trace_inner(self, *args, **kwargs):
if opentracing is None: if opentracing is None:
return func(self, *args, **kwargs) return func(self, *args, **kwargs)
scope = start_active_span(operation_name) scope = start_active_span(_opname)
scope.__enter__() scope.__enter__()
try: try:
@ -717,6 +677,7 @@ def trace_using_operation_name(operation_name):
return result return result
result.addCallbacks(call_back, err_back) result.addCallbacks(call_back, err_back)
else: else:
scope.__exit__(None, None, None) scope.__exit__(None, None, None)
@ -728,7 +689,10 @@ def trace_using_operation_name(operation_name):
return _trace_inner return _trace_inner
return trace if func:
return decorator(func)
else:
return decorator
def tag_args(func): def tag_args(func):

View file

@ -101,7 +101,7 @@ class ModuleApi(object):
) )
user_id = yield self.register_user(localpart, displayname, emails) user_id = yield self.register_user(localpart, displayname, emails)
_, access_token = yield self.register_device(user_id) _, access_token = yield self.register_device(user_id)
return (user_id, access_token) return user_id, access_token
def register_user(self, localpart, displayname=None, emails=[]): def register_user(self, localpart, displayname=None, emails=[]):
"""Registers a new user with given localpart and optional displayname, emails. """Registers a new user with given localpart and optional displayname, emails.

View file

@ -472,11 +472,11 @@ class Notifier(object):
joined_room_ids = yield self.store.get_rooms_for_user(user.to_string()) joined_room_ids = yield self.store.get_rooms_for_user(user.to_string())
if explicit_room_id: if explicit_room_id:
if explicit_room_id in joined_room_ids: if explicit_room_id in joined_room_ids:
return ([explicit_room_id], True) return [explicit_room_id], True
if (yield self._is_world_readable(explicit_room_id)): if (yield self._is_world_readable(explicit_room_id)):
return ([explicit_room_id], False) return [explicit_room_id], False
raise AuthError(403, "Non-joined access not allowed") raise AuthError(403, "Non-joined access not allowed")
return (joined_room_ids, True) return joined_room_ids, True
@defer.inlineCallbacks @defer.inlineCallbacks
def _is_world_readable(self, room_id): def _is_world_readable(self, room_id):

View file

@ -134,7 +134,7 @@ class BulkPushRuleEvaluator(object):
pl_event = auth_events.get(POWER_KEY) pl_event = auth_events.get(POWER_KEY)
return (pl_event.content if pl_event else {}, sender_level) return pl_event.content if pl_event else {}, sender_level
@defer.inlineCallbacks @defer.inlineCallbacks
def action_for_event_by_user(self, event, context): def action_for_event_by_user(self, event, context):

View file

@ -113,7 +113,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
event_and_contexts, backfilled event_and_contexts, backfilled
) )
return (200, {}) return 200, {}
class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): class ReplicationFederationSendEduRestServlet(ReplicationEndpoint):
@ -156,7 +156,7 @@ class ReplicationFederationSendEduRestServlet(ReplicationEndpoint):
result = yield self.registry.on_edu(edu_type, origin, edu_content) result = yield self.registry.on_edu(edu_type, origin, edu_content)
return (200, result) return 200, result
class ReplicationGetQueryRestServlet(ReplicationEndpoint): class ReplicationGetQueryRestServlet(ReplicationEndpoint):
@ -204,7 +204,7 @@ class ReplicationGetQueryRestServlet(ReplicationEndpoint):
result = yield self.registry.on_query(query_type, args) result = yield self.registry.on_query(query_type, args)
return (200, result) return 200, result
class ReplicationCleanRoomRestServlet(ReplicationEndpoint): class ReplicationCleanRoomRestServlet(ReplicationEndpoint):
@ -238,7 +238,7 @@ class ReplicationCleanRoomRestServlet(ReplicationEndpoint):
def _handle_request(self, request, room_id): def _handle_request(self, request, room_id):
yield self.store.clean_room_for_join(room_id) yield self.store.clean_room_for_join(room_id)
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -64,7 +64,7 @@ class RegisterDeviceReplicationServlet(ReplicationEndpoint):
user_id, device_id, initial_display_name, is_guest user_id, device_id, initial_display_name, is_guest
) )
return (200, {"device_id": device_id, "access_token": access_token}) return 200, {"device_id": device_id, "access_token": access_token}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -83,7 +83,7 @@ class ReplicationRemoteJoinRestServlet(ReplicationEndpoint):
remote_room_hosts, room_id, user_id, event_content remote_room_hosts, room_id, user_id, event_content
) )
return (200, {}) return 200, {}
class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
@ -153,7 +153,7 @@ class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
yield self.store.locally_reject_invite(user_id, room_id) yield self.store.locally_reject_invite(user_id, room_id)
ret = {} ret = {}
return (200, ret) return 200, ret
class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
@ -202,7 +202,7 @@ class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
else: else:
raise Exception("Unrecognized change: %r", change) raise Exception("Unrecognized change: %r", change)
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -90,7 +90,7 @@ class ReplicationRegisterServlet(ReplicationEndpoint):
address=content["address"], address=content["address"],
) )
return (200, {}) return 200, {}
class ReplicationPostRegisterActionsServlet(ReplicationEndpoint): class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
@ -106,7 +106,7 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
self.registration_handler = hs.get_registration_handler() self.registration_handler = hs.get_registration_handler()
@staticmethod @staticmethod
def _serialize_payload(user_id, auth_result, access_token, bind_email, bind_msisdn): def _serialize_payload(user_id, auth_result, access_token):
""" """
Args: Args:
user_id (str): The user ID that consented user_id (str): The user ID that consented
@ -114,17 +114,8 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
registered user. registered user.
access_token (str|None): The access token of the newly logged in access_token (str|None): The access token of the newly logged in
device, or None if `inhibit_login` enabled. device, or None if `inhibit_login` enabled.
bind_email (bool): Whether to bind the email with the identity
server
bind_msisdn (bool): Whether to bind the msisdn with the identity
server
""" """
return { return {"auth_result": auth_result, "access_token": access_token}
"auth_result": auth_result,
"access_token": access_token,
"bind_email": bind_email,
"bind_msisdn": bind_msisdn,
}
@defer.inlineCallbacks @defer.inlineCallbacks
def _handle_request(self, request, user_id): def _handle_request(self, request, user_id):
@ -132,18 +123,12 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
auth_result = content["auth_result"] auth_result = content["auth_result"]
access_token = content["access_token"] access_token = content["access_token"]
bind_email = content["bind_email"]
bind_msisdn = content["bind_msisdn"]
yield self.registration_handler.post_registration_actions( yield self.registration_handler.post_registration_actions(
user_id=user_id, user_id=user_id, auth_result=auth_result, access_token=access_token
auth_result=auth_result,
access_token=access_token,
bind_email=bind_email,
bind_msisdn=bind_msisdn,
) )
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -117,7 +117,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
requester, event, context, ratelimit=ratelimit, extra_users=extra_users requester, event, context, ratelimit=ratelimit, extra_users=extra_users
) )
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -33,6 +33,9 @@ class SlavedDeviceStore(EndToEndKeyWorkerStore, DeviceWorkerStore, BaseSlavedSto
self._device_list_stream_cache = StreamChangeCache( self._device_list_stream_cache = StreamChangeCache(
"DeviceListStreamChangeCache", device_list_max "DeviceListStreamChangeCache", device_list_max
) )
self._user_signature_stream_cache = StreamChangeCache(
"UserSignatureStreamChangeCache", device_list_max
)
self._device_list_federation_stream_cache = StreamChangeCache( self._device_list_federation_stream_cache = StreamChangeCache(
"DeviceListFederationStreamChangeCache", device_list_max "DeviceListFederationStreamChangeCache", device_list_max
) )

View file

@ -158,7 +158,7 @@ class Stream(object):
updates, current_token = yield self.get_updates_since(self.last_token) updates, current_token = yield self.get_updates_since(self.last_token)
self.last_token = current_token self.last_token = current_token
return (updates, current_token) return updates, current_token
@defer.inlineCallbacks @defer.inlineCallbacks
def get_updates_since(self, from_token): def get_updates_since(self, from_token):
@ -172,14 +172,14 @@ class Stream(object):
sent over the replication steam. sent over the replication steam.
""" """
if from_token in ("NOW", "now"): if from_token in ("NOW", "now"):
return ([], self.upto_token) return [], self.upto_token
current_token = self.upto_token current_token = self.upto_token
from_token = int(from_token) from_token = int(from_token)
if from_token == current_token: if from_token == current_token:
return ([], current_token) return [], current_token
if self._LIMITED: if self._LIMITED:
rows = yield self.update_function( rows = yield self.update_function(
@ -198,7 +198,7 @@ class Stream(object):
if self._LIMITED and len(updates) >= MAX_EVENTS_BEHIND: if self._LIMITED and len(updates) >= MAX_EVENTS_BEHIND:
raise Exception("stream %s has fallen behind" % (self.NAME)) raise Exception("stream %s has fallen behind" % (self.NAME))
return (updates, current_token) return updates, current_token
def current_token(self): def current_token(self):
"""Gets the current token of the underlying streams. Should be provided """Gets the current token of the underlying streams. Should be provided

View file

@ -41,7 +41,7 @@ from synapse.rest.admin._base import (
assert_user_is_admin, assert_user_is_admin,
historical_admin_path_patterns, historical_admin_path_patterns,
) )
from synapse.rest.admin.media import register_servlets_for_media_repo from synapse.rest.admin.media import ListMediaInRoom, register_servlets_for_media_repo
from synapse.rest.admin.purge_room_servlet import PurgeRoomServlet from synapse.rest.admin.purge_room_servlet import PurgeRoomServlet
from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet
from synapse.rest.admin.users import UserAdminServlet from synapse.rest.admin.users import UserAdminServlet
@ -69,7 +69,7 @@ class UsersRestServlet(RestServlet):
ret = yield self.handlers.admin_handler.get_users() ret = yield self.handlers.admin_handler.get_users()
return (200, ret) return 200, ret
class VersionServlet(RestServlet): class VersionServlet(RestServlet):
@ -120,7 +120,7 @@ class UserRegisterServlet(RestServlet):
nonce = self.hs.get_secrets().token_hex(64) nonce = self.hs.get_secrets().token_hex(64)
self.nonces[nonce] = int(self.reactor.seconds()) self.nonces[nonce] = int(self.reactor.seconds())
return (200, {"nonce": nonce}) return 200, {"nonce": nonce}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
@ -212,7 +212,7 @@ class UserRegisterServlet(RestServlet):
) )
result = yield register._create_registration_details(user_id, body) result = yield register._create_registration_details(user_id, body)
return (200, result) return 200, result
class WhoisRestServlet(RestServlet): class WhoisRestServlet(RestServlet):
@ -237,7 +237,7 @@ class WhoisRestServlet(RestServlet):
ret = yield self.handlers.admin_handler.get_whois(target_user) ret = yield self.handlers.admin_handler.get_whois(target_user)
return (200, ret) return 200, ret
class PurgeHistoryRestServlet(RestServlet): class PurgeHistoryRestServlet(RestServlet):
@ -322,7 +322,7 @@ class PurgeHistoryRestServlet(RestServlet):
room_id, token, delete_local_events=delete_local_events room_id, token, delete_local_events=delete_local_events
) )
return (200, {"purge_id": purge_id}) return 200, {"purge_id": purge_id}
class PurgeHistoryStatusRestServlet(RestServlet): class PurgeHistoryStatusRestServlet(RestServlet):
@ -347,7 +347,7 @@ class PurgeHistoryStatusRestServlet(RestServlet):
if purge_status is None: if purge_status is None:
raise NotFoundError("purge id '%s' not found" % purge_id) raise NotFoundError("purge id '%s' not found" % purge_id)
return (200, purge_status.asdict()) return 200, purge_status.asdict()
class DeactivateAccountRestServlet(RestServlet): class DeactivateAccountRestServlet(RestServlet):
@ -379,7 +379,7 @@ class DeactivateAccountRestServlet(RestServlet):
else: else:
id_server_unbind_result = "no-support" id_server_unbind_result = "no-support"
return (200, {"id_server_unbind_result": id_server_unbind_result}) return 200, {"id_server_unbind_result": id_server_unbind_result}
class ShutdownRoomRestServlet(RestServlet): class ShutdownRoomRestServlet(RestServlet):
@ -549,7 +549,7 @@ class ResetPasswordRestServlet(RestServlet):
yield self._set_password_handler.set_password( yield self._set_password_handler.set_password(
target_user_id, new_password, requester target_user_id, new_password, requester
) )
return (200, {}) return 200, {}
class GetUsersPaginatedRestServlet(RestServlet): class GetUsersPaginatedRestServlet(RestServlet):
@ -591,7 +591,7 @@ class GetUsersPaginatedRestServlet(RestServlet):
logger.info("limit: %s, start: %s", limit, start) logger.info("limit: %s, start: %s", limit, start)
ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit) ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit)
return (200, ret) return 200, ret
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request, target_user_id): def on_POST(self, request, target_user_id):
@ -619,7 +619,7 @@ class GetUsersPaginatedRestServlet(RestServlet):
logger.info("limit: %s, start: %s", limit, start) logger.info("limit: %s, start: %s", limit, start)
ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit) ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit)
return (200, ret) return 200, ret
class SearchUsersRestServlet(RestServlet): class SearchUsersRestServlet(RestServlet):
@ -662,7 +662,7 @@ class SearchUsersRestServlet(RestServlet):
logger.info("term: %s ", term) logger.info("term: %s ", term)
ret = yield self.handlers.admin_handler.search_users(term) ret = yield self.handlers.admin_handler.search_users(term)
return (200, ret) return 200, ret
class DeleteGroupAdminRestServlet(RestServlet): class DeleteGroupAdminRestServlet(RestServlet):
@ -685,7 +685,7 @@ class DeleteGroupAdminRestServlet(RestServlet):
raise SynapseError(400, "Can only delete local groups") raise SynapseError(400, "Can only delete local groups")
yield self.group_server.delete_group(group_id, requester.user.to_string()) yield self.group_server.delete_group(group_id, requester.user.to_string())
return (200, {}) return 200, {}
class AccountValidityRenewServlet(RestServlet): class AccountValidityRenewServlet(RestServlet):
@ -716,7 +716,7 @@ class AccountValidityRenewServlet(RestServlet):
) )
res = {"expiration_ts": expiration_ts} res = {"expiration_ts": expiration_ts}
return (200, res) return 200, res
######################################################################################## ########################################################################################
@ -761,9 +761,12 @@ def register_servlets_for_client_rest_resource(hs, http_server):
DeleteGroupAdminRestServlet(hs).register(http_server) DeleteGroupAdminRestServlet(hs).register(http_server)
AccountValidityRenewServlet(hs).register(http_server) AccountValidityRenewServlet(hs).register(http_server)
# Load the media repo ones if we're using them. # Load the media repo ones if we're using them. Otherwise load the servlets which
# don't need a media repo (typically readonly admin APIs).
if hs.config.can_load_media_repo: if hs.config.can_load_media_repo:
register_servlets_for_media_repo(hs, http_server) register_servlets_for_media_repo(hs, http_server)
else:
ListMediaInRoom(hs).register(http_server)
# don't add more things here: new servlets should only be exposed on # don't add more things here: new servlets should only be exposed on
# /_synapse/admin so should not go here. Instead register them in AdminRestResource. # /_synapse/admin so should not go here. Instead register them in AdminRestResource.

View file

@ -49,7 +49,7 @@ class QuarantineMediaInRoom(RestServlet):
room_id, requester.user.to_string() room_id, requester.user.to_string()
) )
return (200, {"num_quarantined": num_quarantined}) return 200, {"num_quarantined": num_quarantined}
class ListMediaInRoom(RestServlet): class ListMediaInRoom(RestServlet):
@ -60,6 +60,7 @@ class ListMediaInRoom(RestServlet):
def __init__(self, hs): def __init__(self, hs):
self.store = hs.get_datastore() self.store = hs.get_datastore()
self.auth = hs.get_auth()
@defer.inlineCallbacks @defer.inlineCallbacks
def on_GET(self, request, room_id): def on_GET(self, request, room_id):
@ -70,7 +71,7 @@ class ListMediaInRoom(RestServlet):
local_mxcs, remote_mxcs = yield self.store.get_media_mxcs_in_room(room_id) local_mxcs, remote_mxcs = yield self.store.get_media_mxcs_in_room(room_id)
return (200, {"local": local_mxcs, "remote": remote_mxcs}) return 200, {"local": local_mxcs, "remote": remote_mxcs}
class PurgeMediaCacheRestServlet(RestServlet): class PurgeMediaCacheRestServlet(RestServlet):
@ -89,7 +90,7 @@ class PurgeMediaCacheRestServlet(RestServlet):
ret = yield self.media_repository.delete_old_remote_media(before_ts) ret = yield self.media_repository.delete_old_remote_media(before_ts)
return (200, ret) return 200, ret
def register_servlets_for_media_repo(hs, http_server): def register_servlets_for_media_repo(hs, http_server):

View file

@ -54,4 +54,4 @@ class PurgeRoomServlet(RestServlet):
await self.pagination_handler.purge_room(body["room_id"]) await self.pagination_handler.purge_room(body["room_id"])
return (200, {}) return 200, {}

View file

@ -92,7 +92,7 @@ class SendServerNoticeServlet(RestServlet):
event_content=body["content"], event_content=body["content"],
) )
return (200, {"event_id": event.event_id}) return 200, {"event_id": event.event_id}
def on_PUT(self, request, txn_id): def on_PUT(self, request, txn_id):
return self.txns.fetch_or_execute_request( return self.txns.fetch_or_execute_request(

View file

@ -71,7 +71,7 @@ class UserAdminServlet(RestServlet):
is_admin = yield self.handlers.admin_handler.get_user_server_admin(target_user) is_admin = yield self.handlers.admin_handler.get_user_server_admin(target_user)
is_admin = bool(is_admin) is_admin = bool(is_admin)
return (200, {"admin": is_admin}) return 200, {"admin": is_admin}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, user_id): def on_PUT(self, request, user_id):
@ -97,4 +97,4 @@ class UserAdminServlet(RestServlet):
target_user, set_admin_to target_user, set_admin_to
) )
return (200, {}) return 200, {}

View file

@ -54,7 +54,7 @@ class ClientDirectoryServer(RestServlet):
dir_handler = self.handlers.directory_handler dir_handler = self.handlers.directory_handler
res = yield dir_handler.get_association(room_alias) res = yield dir_handler.get_association(room_alias)
return (200, res) return 200, res
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, room_alias): def on_PUT(self, request, room_alias):
@ -87,7 +87,7 @@ class ClientDirectoryServer(RestServlet):
requester, room_alias, room_id, servers requester, room_alias, room_id, servers
) )
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, room_alias): def on_DELETE(self, request, room_alias):
@ -102,7 +102,7 @@ class ClientDirectoryServer(RestServlet):
service.url, service.url,
room_alias.to_string(), room_alias.to_string(),
) )
return (200, {}) return 200, {}
except InvalidClientCredentialsError: except InvalidClientCredentialsError:
# fallback to default user behaviour if they aren't an AS # fallback to default user behaviour if they aren't an AS
pass pass
@ -118,7 +118,7 @@ class ClientDirectoryServer(RestServlet):
"User %s deleted alias %s", user.to_string(), room_alias.to_string() "User %s deleted alias %s", user.to_string(), room_alias.to_string()
) )
return (200, {}) return 200, {}
class ClientDirectoryListServer(RestServlet): class ClientDirectoryListServer(RestServlet):
@ -136,7 +136,7 @@ class ClientDirectoryListServer(RestServlet):
if room is None: if room is None:
raise NotFoundError("Unknown room") raise NotFoundError("Unknown room")
return (200, {"visibility": "public" if room["is_public"] else "private"}) return 200, {"visibility": "public" if room["is_public"] else "private"}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, room_id): def on_PUT(self, request, room_id):
@ -149,7 +149,7 @@ class ClientDirectoryListServer(RestServlet):
requester, room_id, visibility requester, room_id, visibility
) )
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, room_id): def on_DELETE(self, request, room_id):
@ -159,7 +159,7 @@ class ClientDirectoryListServer(RestServlet):
requester, room_id, "private" requester, room_id, "private"
) )
return (200, {}) return 200, {}
class ClientAppserviceDirectoryListServer(RestServlet): class ClientAppserviceDirectoryListServer(RestServlet):
@ -193,4 +193,4 @@ class ClientAppserviceDirectoryListServer(RestServlet):
requester.app_service.id, network_id, room_id, visibility requester.app_service.id, network_id, room_id, visibility
) )
return (200, {}) return 200, {}

View file

@ -67,10 +67,10 @@ class EventStreamRestServlet(RestServlet):
is_guest=is_guest, is_guest=is_guest,
) )
return (200, chunk) return 200, chunk
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
# TODO: Unit test gets, with and without auth, with different kinds of events. # TODO: Unit test gets, with and without auth, with different kinds of events.
@ -91,9 +91,9 @@ class EventRestServlet(RestServlet):
time_now = self.clock.time_msec() time_now = self.clock.time_msec()
if event: if event:
event = yield self._event_serializer.serialize_event(event, time_now) event = yield self._event_serializer.serialize_event(event, time_now)
return (200, event) return 200, event
else: else:
return (404, "Event not found.") return 404, "Event not found."
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -42,7 +42,7 @@ class InitialSyncRestServlet(RestServlet):
include_archived=include_archived, include_archived=include_archived,
) )
return (200, content) return 200, content
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -121,10 +121,10 @@ class LoginRestServlet(RestServlet):
({"type": t} for t in self.auth_handler.get_supported_login_types()) ({"type": t} for t in self.auth_handler.get_supported_login_types())
) )
return (200, {"flows": flows}) return 200, {"flows": flows}
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
@ -152,7 +152,7 @@ class LoginRestServlet(RestServlet):
well_known_data = self._well_known_builder.get_well_known() well_known_data = self._well_known_builder.get_well_known()
if well_known_data: if well_known_data:
result["well_known"] = well_known_data result["well_known"] = well_known_data
return (200, result) return 200, result
@defer.inlineCallbacks @defer.inlineCallbacks
def _do_other_login(self, login_submission): def _do_other_login(self, login_submission):

View file

@ -33,7 +33,7 @@ class LogoutRestServlet(RestServlet):
self._device_handler = hs.get_device_handler() self._device_handler = hs.get_device_handler()
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
@ -49,7 +49,7 @@ class LogoutRestServlet(RestServlet):
requester.user.to_string(), requester.device_id requester.user.to_string(), requester.device_id
) )
return (200, {}) return 200, {}
class LogoutAllRestServlet(RestServlet): class LogoutAllRestServlet(RestServlet):
@ -62,7 +62,7 @@ class LogoutAllRestServlet(RestServlet):
self._device_handler = hs.get_device_handler() self._device_handler = hs.get_device_handler()
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
@ -75,7 +75,7 @@ class LogoutAllRestServlet(RestServlet):
# .. and then delete any access tokens which weren't associated with # .. and then delete any access tokens which weren't associated with
# devices. # devices.
yield self._auth_handler.delete_access_tokens_for_user(user_id) yield self._auth_handler.delete_access_tokens_for_user(user_id)
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -56,7 +56,7 @@ class PresenceStatusRestServlet(RestServlet):
state = yield self.presence_handler.get_state(target_user=user) state = yield self.presence_handler.get_state(target_user=user)
state = format_user_presence_state(state, self.clock.time_msec()) state = format_user_presence_state(state, self.clock.time_msec())
return (200, state) return 200, state
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, user_id): def on_PUT(self, request, user_id):
@ -88,10 +88,10 @@ class PresenceStatusRestServlet(RestServlet):
if self.hs.config.use_presence: if self.hs.config.use_presence:
yield self.presence_handler.set_state(user, state) yield self.presence_handler.set_state(user, state)
return (200, {}) return 200, {}
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -48,7 +48,7 @@ class ProfileDisplaynameRestServlet(RestServlet):
if displayname is not None: if displayname is not None:
ret["displayname"] = displayname ret["displayname"] = displayname
return (200, ret) return 200, ret
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, user_id): def on_PUT(self, request, user_id):
@ -61,14 +61,14 @@ class ProfileDisplaynameRestServlet(RestServlet):
try: try:
new_name = content["displayname"] new_name = content["displayname"]
except Exception: except Exception:
return (400, "Unable to parse name") return 400, "Unable to parse name"
yield self.profile_handler.set_displayname(user, requester, new_name, is_admin) yield self.profile_handler.set_displayname(user, requester, new_name, is_admin)
return (200, {}) return 200, {}
def on_OPTIONS(self, request, user_id): def on_OPTIONS(self, request, user_id):
return (200, {}) return 200, {}
class ProfileAvatarURLRestServlet(RestServlet): class ProfileAvatarURLRestServlet(RestServlet):
@ -98,7 +98,7 @@ class ProfileAvatarURLRestServlet(RestServlet):
if avatar_url is not None: if avatar_url is not None:
ret["avatar_url"] = avatar_url ret["avatar_url"] = avatar_url
return (200, ret) return 200, ret
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, user_id): def on_PUT(self, request, user_id):
@ -110,14 +110,14 @@ class ProfileAvatarURLRestServlet(RestServlet):
try: try:
new_name = content["avatar_url"] new_name = content["avatar_url"]
except Exception: except Exception:
return (400, "Unable to parse name") return 400, "Unable to parse name"
yield self.profile_handler.set_avatar_url(user, requester, new_name, is_admin) yield self.profile_handler.set_avatar_url(user, requester, new_name, is_admin)
return (200, {}) return 200, {}
def on_OPTIONS(self, request, user_id): def on_OPTIONS(self, request, user_id):
return (200, {}) return 200, {}
class ProfileRestServlet(RestServlet): class ProfileRestServlet(RestServlet):
@ -150,7 +150,7 @@ class ProfileRestServlet(RestServlet):
if avatar_url is not None: if avatar_url is not None:
ret["avatar_url"] = avatar_url ret["avatar_url"] = avatar_url
return (200, ret) return 200, ret
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -69,7 +69,7 @@ class PushRuleRestServlet(RestServlet):
if "attr" in spec: if "attr" in spec:
yield self.set_rule_attr(user_id, spec, content) yield self.set_rule_attr(user_id, spec, content)
self.notify_user(user_id) self.notify_user(user_id)
return (200, {}) return 200, {}
if spec["rule_id"].startswith("."): if spec["rule_id"].startswith("."):
# Rule ids starting with '.' are reserved for server default rules. # Rule ids starting with '.' are reserved for server default rules.
@ -106,7 +106,7 @@ class PushRuleRestServlet(RestServlet):
except RuleNotFoundException as e: except RuleNotFoundException as e:
raise SynapseError(400, str(e)) raise SynapseError(400, str(e))
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, path): def on_DELETE(self, request, path):
@ -123,7 +123,7 @@ class PushRuleRestServlet(RestServlet):
try: try:
yield self.store.delete_push_rule(user_id, namespaced_rule_id) yield self.store.delete_push_rule(user_id, namespaced_rule_id)
self.notify_user(user_id) self.notify_user(user_id)
return (200, {}) return 200, {}
except StoreError as e: except StoreError as e:
if e.code == 404: if e.code == 404:
raise NotFoundError() raise NotFoundError()
@ -151,10 +151,10 @@ class PushRuleRestServlet(RestServlet):
) )
if path[0] == "": if path[0] == "":
return (200, rules) return 200, rules
elif path[0] == "global": elif path[0] == "global":
result = _filter_ruleset_with_path(rules["global"], path[1:]) result = _filter_ruleset_with_path(rules["global"], path[1:])
return (200, result) return 200, result
else: else:
raise UnrecognizedRequestError() raise UnrecognizedRequestError()

View file

@ -62,7 +62,7 @@ class PushersRestServlet(RestServlet):
if k not in allowed_keys: if k not in allowed_keys:
del p[k] del p[k]
return (200, {"pushers": pushers}) return 200, {"pushers": pushers}
def on_OPTIONS(self, _): def on_OPTIONS(self, _):
return 200, {} return 200, {}
@ -94,7 +94,7 @@ class PushersSetRestServlet(RestServlet):
yield self.pusher_pool.remove_pusher( yield self.pusher_pool.remove_pusher(
content["app_id"], content["pushkey"], user_id=user.to_string() content["app_id"], content["pushkey"], user_id=user.to_string()
) )
return (200, {}) return 200, {}
assert_params_in_dict( assert_params_in_dict(
content, content,
@ -143,7 +143,7 @@ class PushersSetRestServlet(RestServlet):
self.notifier.on_new_replication_data() self.notifier.on_new_replication_data()
return (200, {}) return 200, {}
def on_OPTIONS(self, _): def on_OPTIONS(self, _):
return 200, {} return 200, {}

View file

@ -91,14 +91,14 @@ class RoomCreateRestServlet(TransactionRestServlet):
requester, self.get_room_config(request) requester, self.get_room_config(request)
) )
return (200, info) return 200, info
def get_room_config(self, request): def get_room_config(self, request):
user_supplied_config = parse_json_object_from_request(request) user_supplied_config = parse_json_object_from_request(request)
return user_supplied_config return user_supplied_config
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
# TODO: Needs unit testing for generic events # TODO: Needs unit testing for generic events
@ -173,9 +173,9 @@ class RoomStateEventRestServlet(TransactionRestServlet):
if format == "event": if format == "event":
event = format_event_for_client_v2(data.get_dict()) event = format_event_for_client_v2(data.get_dict())
return (200, event) return 200, event
elif format == "content": elif format == "content":
return (200, data.get_dict()["content"]) return 200, data.get_dict()["content"]
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, room_id, event_type, state_key, txn_id=None): def on_PUT(self, request, room_id, event_type, state_key, txn_id=None):
@ -210,7 +210,7 @@ class RoomStateEventRestServlet(TransactionRestServlet):
ret = {} ret = {}
if event: if event:
ret = {"event_id": event.event_id} ret = {"event_id": event.event_id}
return (200, ret) return 200, ret
# TODO: Needs unit testing for generic events + feedback # TODO: Needs unit testing for generic events + feedback
@ -244,10 +244,10 @@ class RoomSendEventRestServlet(TransactionRestServlet):
requester, event_dict, txn_id=txn_id requester, event_dict, txn_id=txn_id
) )
return (200, {"event_id": event.event_id}) return 200, {"event_id": event.event_id}
def on_GET(self, request, room_id, event_type, txn_id): def on_GET(self, request, room_id, event_type, txn_id):
return (200, "Not implemented") return 200, "Not implemented"
def on_PUT(self, request, room_id, event_type, txn_id): def on_PUT(self, request, room_id, event_type, txn_id):
return self.txns.fetch_or_execute_request( return self.txns.fetch_or_execute_request(
@ -307,7 +307,7 @@ class JoinRoomAliasServlet(TransactionRestServlet):
third_party_signed=content.get("third_party_signed", None), third_party_signed=content.get("third_party_signed", None),
) )
return (200, {"room_id": room_id}) return 200, {"room_id": room_id}
def on_PUT(self, request, room_identifier, txn_id): def on_PUT(self, request, room_identifier, txn_id):
return self.txns.fetch_or_execute_request( return self.txns.fetch_or_execute_request(
@ -360,7 +360,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
limit=limit, since_token=since_token limit=limit, since_token=since_token
) )
return (200, data) return 200, data
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
@ -405,7 +405,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
network_tuple=network_tuple, network_tuple=network_tuple,
) )
return (200, data) return 200, data
# TODO: Needs unit testing # TODO: Needs unit testing
@ -456,7 +456,7 @@ class RoomMemberListRestServlet(RestServlet):
continue continue
chunk.append(event) chunk.append(event)
return (200, {"chunk": chunk}) return 200, {"chunk": chunk}
# deprecated in favour of /members?membership=join? # deprecated in favour of /members?membership=join?
@ -477,7 +477,7 @@ class JoinedRoomMemberListRestServlet(RestServlet):
requester, room_id requester, room_id
) )
return (200, {"joined": users_with_profile}) return 200, {"joined": users_with_profile}
# TODO: Needs better unit testing # TODO: Needs better unit testing
@ -510,7 +510,7 @@ class RoomMessageListRestServlet(RestServlet):
event_filter=event_filter, event_filter=event_filter,
) )
return (200, msgs) return 200, msgs
# TODO: Needs unit testing # TODO: Needs unit testing
@ -531,7 +531,7 @@ class RoomStateRestServlet(RestServlet):
user_id=requester.user.to_string(), user_id=requester.user.to_string(),
is_guest=requester.is_guest, is_guest=requester.is_guest,
) )
return (200, events) return 200, events
# TODO: Needs unit testing # TODO: Needs unit testing
@ -550,7 +550,7 @@ class RoomInitialSyncRestServlet(RestServlet):
content = yield self.initial_sync_handler.room_initial_sync( content = yield self.initial_sync_handler.room_initial_sync(
room_id=room_id, requester=requester, pagin_config=pagination_config room_id=room_id, requester=requester, pagin_config=pagination_config
) )
return (200, content) return 200, content
class RoomEventServlet(RestServlet): class RoomEventServlet(RestServlet):
@ -581,7 +581,7 @@ class RoomEventServlet(RestServlet):
time_now = self.clock.time_msec() time_now = self.clock.time_msec()
if event: if event:
event = yield self._event_serializer.serialize_event(event, time_now) event = yield self._event_serializer.serialize_event(event, time_now)
return (200, event) return 200, event
return SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND) return SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)
@ -633,7 +633,7 @@ class RoomEventContextServlet(RestServlet):
results["state"], time_now results["state"], time_now
) )
return (200, results) return 200, results
class RoomForgetRestServlet(TransactionRestServlet): class RoomForgetRestServlet(TransactionRestServlet):
@ -652,7 +652,7 @@ class RoomForgetRestServlet(TransactionRestServlet):
yield self.room_member_handler.forget(user=requester.user, room_id=room_id) yield self.room_member_handler.forget(user=requester.user, room_id=room_id)
return (200, {}) return 200, {}
def on_PUT(self, request, room_id, txn_id): def on_PUT(self, request, room_id, txn_id):
return self.txns.fetch_or_execute_request( return self.txns.fetch_or_execute_request(
@ -702,8 +702,7 @@ class RoomMembershipRestServlet(TransactionRestServlet):
requester, requester,
txn_id, txn_id,
) )
return (200, {}) return 200, {}
return
target = requester.user target = requester.user
if membership_action in ["invite", "ban", "unban", "kick"]: if membership_action in ["invite", "ban", "unban", "kick"]:
@ -729,7 +728,7 @@ class RoomMembershipRestServlet(TransactionRestServlet):
if membership_action == "join": if membership_action == "join":
return_value["room_id"] = room_id return_value["room_id"] = room_id
return (200, return_value) return 200, return_value
def _has_3pid_invite_keys(self, content): def _has_3pid_invite_keys(self, content):
for key in {"id_server", "medium", "address"}: for key in {"id_server", "medium", "address"}:
@ -771,7 +770,7 @@ class RoomRedactEventRestServlet(TransactionRestServlet):
txn_id=txn_id, txn_id=txn_id,
) )
return (200, {"event_id": event.event_id}) return 200, {"event_id": event.event_id}
def on_PUT(self, request, room_id, event_id, txn_id): def on_PUT(self, request, room_id, event_id, txn_id):
return self.txns.fetch_or_execute_request( return self.txns.fetch_or_execute_request(
@ -816,7 +815,7 @@ class RoomTypingRestServlet(RestServlet):
target_user=target_user, auth_user=requester.user, room_id=room_id target_user=target_user, auth_user=requester.user, room_id=room_id
) )
return (200, {}) return 200, {}
class SearchRestServlet(RestServlet): class SearchRestServlet(RestServlet):
@ -838,7 +837,7 @@ class SearchRestServlet(RestServlet):
requester.user, content, batch requester.user, content, batch
) )
return (200, results) return 200, results
class JoinedRoomsRestServlet(RestServlet): class JoinedRoomsRestServlet(RestServlet):
@ -854,7 +853,7 @@ class JoinedRoomsRestServlet(RestServlet):
requester = yield self.auth.get_user_by_req(request, allow_guest=True) requester = yield self.auth.get_user_by_req(request, allow_guest=True)
room_ids = yield self.store.get_rooms_for_user(requester.user.to_string()) room_ids = yield self.store.get_rooms_for_user(requester.user.to_string())
return (200, {"joined_rooms": list(room_ids)}) return 200, {"joined_rooms": list(room_ids)}
def register_txn_path(servlet, regex_string, http_server, with_get=False): def register_txn_path(servlet, regex_string, http_server, with_get=False):

View file

@ -60,7 +60,7 @@ class VoipRestServlet(RestServlet):
password = turnPassword password = turnPassword
else: else:
return (200, {}) return 200, {}
return ( return (
200, 200,
@ -73,7 +73,7 @@ class VoipRestServlet(RestServlet):
) )
def on_OPTIONS(self, request): def on_OPTIONS(self, request):
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -117,7 +117,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
# Wrap the session id in a JSON object # Wrap the session id in a JSON object
ret = {"sid": sid} ret = {"sid": sid}
return (200, ret) return 200, ret
@defer.inlineCallbacks @defer.inlineCallbacks
def send_password_reset(self, email, client_secret, send_attempt, next_link=None): def send_password_reset(self, email, client_secret, send_attempt, next_link=None):
@ -221,7 +221,7 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "MSISDN not found", Codes.THREEPID_NOT_FOUND) raise SynapseError(400, "MSISDN not found", Codes.THREEPID_NOT_FOUND)
ret = yield self.identity_handler.requestMsisdnToken(**body) ret = yield self.identity_handler.requestMsisdnToken(**body)
return (200, ret) return 200, ret
class PasswordResetSubmitTokenServlet(RestServlet): class PasswordResetSubmitTokenServlet(RestServlet):
@ -330,7 +330,7 @@ class PasswordResetSubmitTokenServlet(RestServlet):
) )
response_code = 200 if valid else 400 response_code = 200 if valid else 400
return (response_code, {"success": valid}) return response_code, {"success": valid}
class PasswordRestServlet(RestServlet): class PasswordRestServlet(RestServlet):
@ -399,7 +399,7 @@ class PasswordRestServlet(RestServlet):
yield self._set_password_handler.set_password(user_id, new_password, requester) yield self._set_password_handler.set_password(user_id, new_password, requester)
return (200, {}) return 200, {}
def on_OPTIONS(self, _): def on_OPTIONS(self, _):
return 200, {} return 200, {}
@ -434,7 +434,7 @@ class DeactivateAccountRestServlet(RestServlet):
yield self._deactivate_account_handler.deactivate_account( yield self._deactivate_account_handler.deactivate_account(
requester.user.to_string(), erase requester.user.to_string(), erase
) )
return (200, {}) return 200, {}
yield self.auth_handler.validate_user_via_ui_auth( yield self.auth_handler.validate_user_via_ui_auth(
requester, body, self.hs.get_ip_from_request(request) requester, body, self.hs.get_ip_from_request(request)
@ -447,7 +447,7 @@ class DeactivateAccountRestServlet(RestServlet):
else: else:
id_server_unbind_result = "no-support" id_server_unbind_result = "no-support"
return (200, {"id_server_unbind_result": id_server_unbind_result}) return 200, {"id_server_unbind_result": id_server_unbind_result}
class EmailThreepidRequestTokenRestServlet(RestServlet): class EmailThreepidRequestTokenRestServlet(RestServlet):
@ -481,7 +481,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE) raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
ret = yield self.identity_handler.requestEmailToken(**body) ret = yield self.identity_handler.requestEmailToken(**body)
return (200, ret) return 200, ret
class MsisdnThreepidRequestTokenRestServlet(RestServlet): class MsisdnThreepidRequestTokenRestServlet(RestServlet):
@ -516,7 +516,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE) raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE)
ret = yield self.identity_handler.requestMsisdnToken(**body) ret = yield self.identity_handler.requestMsisdnToken(**body)
return (200, ret) return 200, ret
class ThreepidRestServlet(RestServlet): class ThreepidRestServlet(RestServlet):
@ -536,7 +536,7 @@ class ThreepidRestServlet(RestServlet):
threepids = yield self.datastore.user_get_threepids(requester.user.to_string()) threepids = yield self.datastore.user_get_threepids(requester.user.to_string())
return (200, {"threepids": threepids}) return 200, {"threepids": threepids}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
@ -568,7 +568,7 @@ class ThreepidRestServlet(RestServlet):
logger.debug("Binding threepid %s to %s", threepid, user_id) logger.debug("Binding threepid %s to %s", threepid, user_id)
yield self.identity_handler.bind_threepid(threePidCreds, user_id) yield self.identity_handler.bind_threepid(threePidCreds, user_id)
return (200, {}) return 200, {}
class ThreepidDeleteRestServlet(RestServlet): class ThreepidDeleteRestServlet(RestServlet):
@ -603,7 +603,7 @@ class ThreepidDeleteRestServlet(RestServlet):
else: else:
id_server_unbind_result = "no-support" id_server_unbind_result = "no-support"
return (200, {"id_server_unbind_result": id_server_unbind_result}) return 200, {"id_server_unbind_result": id_server_unbind_result}
class WhoamiRestServlet(RestServlet): class WhoamiRestServlet(RestServlet):
@ -617,7 +617,7 @@ class WhoamiRestServlet(RestServlet):
def on_GET(self, request): def on_GET(self, request):
requester = yield self.auth.get_user_by_req(request) requester = yield self.auth.get_user_by_req(request)
return (200, {"user_id": requester.user.to_string()}) return 200, {"user_id": requester.user.to_string()}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -55,7 +55,7 @@ class AccountDataServlet(RestServlet):
self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) self.notifier.on_new_event("account_data_key", max_id, users=[user_id])
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_GET(self, request, user_id, account_data_type): def on_GET(self, request, user_id, account_data_type):
@ -70,7 +70,7 @@ class AccountDataServlet(RestServlet):
if event is None: if event is None:
raise NotFoundError("Account data not found") raise NotFoundError("Account data not found")
return (200, event) return 200, event
class RoomAccountDataServlet(RestServlet): class RoomAccountDataServlet(RestServlet):
@ -112,7 +112,7 @@ class RoomAccountDataServlet(RestServlet):
self.notifier.on_new_event("account_data_key", max_id, users=[user_id]) self.notifier.on_new_event("account_data_key", max_id, users=[user_id])
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_GET(self, request, user_id, room_id, account_data_type): def on_GET(self, request, user_id, room_id, account_data_type):
@ -127,7 +127,7 @@ class RoomAccountDataServlet(RestServlet):
if event is None: if event is None:
raise NotFoundError("Room account data not found") raise NotFoundError("Room account data not found")
return (200, event) return 200, event
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -58,7 +58,7 @@ class CapabilitiesRestServlet(RestServlet):
"m.change_password": {"enabled": change_password}, "m.change_password": {"enabled": change_password},
} }
} }
return (200, response) return 200, response
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -48,7 +48,7 @@ class DevicesRestServlet(RestServlet):
devices = yield self.device_handler.get_devices_by_user( devices = yield self.device_handler.get_devices_by_user(
requester.user.to_string() requester.user.to_string()
) )
return (200, {"devices": devices}) return 200, {"devices": devices}
class DeleteDevicesRestServlet(RestServlet): class DeleteDevicesRestServlet(RestServlet):
@ -91,7 +91,7 @@ class DeleteDevicesRestServlet(RestServlet):
yield self.device_handler.delete_devices( yield self.device_handler.delete_devices(
requester.user.to_string(), body["devices"] requester.user.to_string(), body["devices"]
) )
return (200, {}) return 200, {}
class DeviceRestServlet(RestServlet): class DeviceRestServlet(RestServlet):
@ -114,7 +114,7 @@ class DeviceRestServlet(RestServlet):
device = yield self.device_handler.get_device( device = yield self.device_handler.get_device(
requester.user.to_string(), device_id requester.user.to_string(), device_id
) )
return (200, device) return 200, device
@interactive_auth_handler @interactive_auth_handler
@defer.inlineCallbacks @defer.inlineCallbacks
@ -137,7 +137,7 @@ class DeviceRestServlet(RestServlet):
) )
yield self.device_handler.delete_device(requester.user.to_string(), device_id) yield self.device_handler.delete_device(requester.user.to_string(), device_id)
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, device_id): def on_PUT(self, request, device_id):
@ -147,7 +147,7 @@ class DeviceRestServlet(RestServlet):
yield self.device_handler.update_device( yield self.device_handler.update_device(
requester.user.to_string(), device_id, body requester.user.to_string(), device_id, body
) )
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -56,7 +56,7 @@ class GetFilterRestServlet(RestServlet):
user_localpart=target_user.localpart, filter_id=filter_id user_localpart=target_user.localpart, filter_id=filter_id
) )
return (200, filter.get_filter_json()) return 200, filter.get_filter_json()
except (KeyError, StoreError): except (KeyError, StoreError):
raise SynapseError(400, "No such filter", errcode=Codes.NOT_FOUND) raise SynapseError(400, "No such filter", errcode=Codes.NOT_FOUND)
@ -89,7 +89,7 @@ class CreateFilterRestServlet(RestServlet):
user_localpart=target_user.localpart, user_filter=content user_localpart=target_user.localpart, user_filter=content
) )
return (200, {"filter_id": str(filter_id)}) return 200, {"filter_id": str(filter_id)}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -47,7 +47,7 @@ class GroupServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, group_description) return 200, group_description
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request, group_id): def on_POST(self, request, group_id):
@ -59,7 +59,7 @@ class GroupServlet(RestServlet):
group_id, requester_user_id, content group_id, requester_user_id, content
) )
return (200, {}) return 200, {}
class GroupSummaryServlet(RestServlet): class GroupSummaryServlet(RestServlet):
@ -83,7 +83,7 @@ class GroupSummaryServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, get_group_summary) return 200, get_group_summary
class GroupSummaryRoomsCatServlet(RestServlet): class GroupSummaryRoomsCatServlet(RestServlet):
@ -120,7 +120,7 @@ class GroupSummaryRoomsCatServlet(RestServlet):
content=content, content=content,
) )
return (200, resp) return 200, resp
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, group_id, category_id, room_id): def on_DELETE(self, request, group_id, category_id, room_id):
@ -131,7 +131,7 @@ class GroupSummaryRoomsCatServlet(RestServlet):
group_id, requester_user_id, room_id=room_id, category_id=category_id group_id, requester_user_id, room_id=room_id, category_id=category_id
) )
return (200, resp) return 200, resp
class GroupCategoryServlet(RestServlet): class GroupCategoryServlet(RestServlet):
@ -157,7 +157,7 @@ class GroupCategoryServlet(RestServlet):
group_id, requester_user_id, category_id=category_id group_id, requester_user_id, category_id=category_id
) )
return (200, category) return 200, category
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, group_id, category_id): def on_PUT(self, request, group_id, category_id):
@ -169,7 +169,7 @@ class GroupCategoryServlet(RestServlet):
group_id, requester_user_id, category_id=category_id, content=content group_id, requester_user_id, category_id=category_id, content=content
) )
return (200, resp) return 200, resp
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, group_id, category_id): def on_DELETE(self, request, group_id, category_id):
@ -180,7 +180,7 @@ class GroupCategoryServlet(RestServlet):
group_id, requester_user_id, category_id=category_id group_id, requester_user_id, category_id=category_id
) )
return (200, resp) return 200, resp
class GroupCategoriesServlet(RestServlet): class GroupCategoriesServlet(RestServlet):
@ -204,7 +204,7 @@ class GroupCategoriesServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, category) return 200, category
class GroupRoleServlet(RestServlet): class GroupRoleServlet(RestServlet):
@ -228,7 +228,7 @@ class GroupRoleServlet(RestServlet):
group_id, requester_user_id, role_id=role_id group_id, requester_user_id, role_id=role_id
) )
return (200, category) return 200, category
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, group_id, role_id): def on_PUT(self, request, group_id, role_id):
@ -240,7 +240,7 @@ class GroupRoleServlet(RestServlet):
group_id, requester_user_id, role_id=role_id, content=content group_id, requester_user_id, role_id=role_id, content=content
) )
return (200, resp) return 200, resp
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, group_id, role_id): def on_DELETE(self, request, group_id, role_id):
@ -251,7 +251,7 @@ class GroupRoleServlet(RestServlet):
group_id, requester_user_id, role_id=role_id group_id, requester_user_id, role_id=role_id
) )
return (200, resp) return 200, resp
class GroupRolesServlet(RestServlet): class GroupRolesServlet(RestServlet):
@ -275,7 +275,7 @@ class GroupRolesServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, category) return 200, category
class GroupSummaryUsersRoleServlet(RestServlet): class GroupSummaryUsersRoleServlet(RestServlet):
@ -312,7 +312,7 @@ class GroupSummaryUsersRoleServlet(RestServlet):
content=content, content=content,
) )
return (200, resp) return 200, resp
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, group_id, role_id, user_id): def on_DELETE(self, request, group_id, role_id, user_id):
@ -323,7 +323,7 @@ class GroupSummaryUsersRoleServlet(RestServlet):
group_id, requester_user_id, user_id=user_id, role_id=role_id group_id, requester_user_id, user_id=user_id, role_id=role_id
) )
return (200, resp) return 200, resp
class GroupRoomServlet(RestServlet): class GroupRoomServlet(RestServlet):
@ -347,7 +347,7 @@ class GroupRoomServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, result) return 200, result
class GroupUsersServlet(RestServlet): class GroupUsersServlet(RestServlet):
@ -371,7 +371,7 @@ class GroupUsersServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, result) return 200, result
class GroupInvitedUsersServlet(RestServlet): class GroupInvitedUsersServlet(RestServlet):
@ -395,7 +395,7 @@ class GroupInvitedUsersServlet(RestServlet):
group_id, requester_user_id group_id, requester_user_id
) )
return (200, result) return 200, result
class GroupSettingJoinPolicyServlet(RestServlet): class GroupSettingJoinPolicyServlet(RestServlet):
@ -420,7 +420,7 @@ class GroupSettingJoinPolicyServlet(RestServlet):
group_id, requester_user_id, content group_id, requester_user_id, content
) )
return (200, result) return 200, result
class GroupCreateServlet(RestServlet): class GroupCreateServlet(RestServlet):
@ -450,7 +450,7 @@ class GroupCreateServlet(RestServlet):
group_id, requester_user_id, content group_id, requester_user_id, content
) )
return (200, result) return 200, result
class GroupAdminRoomsServlet(RestServlet): class GroupAdminRoomsServlet(RestServlet):
@ -477,7 +477,7 @@ class GroupAdminRoomsServlet(RestServlet):
group_id, requester_user_id, room_id, content group_id, requester_user_id, room_id, content
) )
return (200, result) return 200, result
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, group_id, room_id): def on_DELETE(self, request, group_id, room_id):
@ -488,7 +488,7 @@ class GroupAdminRoomsServlet(RestServlet):
group_id, requester_user_id, room_id group_id, requester_user_id, room_id
) )
return (200, result) return 200, result
class GroupAdminRoomsConfigServlet(RestServlet): class GroupAdminRoomsConfigServlet(RestServlet):
@ -516,7 +516,7 @@ class GroupAdminRoomsConfigServlet(RestServlet):
group_id, requester_user_id, room_id, config_key, content group_id, requester_user_id, room_id, config_key, content
) )
return (200, result) return 200, result
class GroupAdminUsersInviteServlet(RestServlet): class GroupAdminUsersInviteServlet(RestServlet):
@ -546,7 +546,7 @@ class GroupAdminUsersInviteServlet(RestServlet):
group_id, user_id, requester_user_id, config group_id, user_id, requester_user_id, config
) )
return (200, result) return 200, result
class GroupAdminUsersKickServlet(RestServlet): class GroupAdminUsersKickServlet(RestServlet):
@ -573,7 +573,7 @@ class GroupAdminUsersKickServlet(RestServlet):
group_id, user_id, requester_user_id, content group_id, user_id, requester_user_id, content
) )
return (200, result) return 200, result
class GroupSelfLeaveServlet(RestServlet): class GroupSelfLeaveServlet(RestServlet):
@ -598,7 +598,7 @@ class GroupSelfLeaveServlet(RestServlet):
group_id, requester_user_id, requester_user_id, content group_id, requester_user_id, requester_user_id, content
) )
return (200, result) return 200, result
class GroupSelfJoinServlet(RestServlet): class GroupSelfJoinServlet(RestServlet):
@ -623,7 +623,7 @@ class GroupSelfJoinServlet(RestServlet):
group_id, requester_user_id, content group_id, requester_user_id, content
) )
return (200, result) return 200, result
class GroupSelfAcceptInviteServlet(RestServlet): class GroupSelfAcceptInviteServlet(RestServlet):
@ -648,7 +648,7 @@ class GroupSelfAcceptInviteServlet(RestServlet):
group_id, requester_user_id, content group_id, requester_user_id, content
) )
return (200, result) return 200, result
class GroupSelfUpdatePublicityServlet(RestServlet): class GroupSelfUpdatePublicityServlet(RestServlet):
@ -672,7 +672,7 @@ class GroupSelfUpdatePublicityServlet(RestServlet):
publicise = content["publicise"] publicise = content["publicise"]
yield self.store.update_group_publicity(group_id, requester_user_id, publicise) yield self.store.update_group_publicity(group_id, requester_user_id, publicise)
return (200, {}) return 200, {}
class PublicisedGroupsForUserServlet(RestServlet): class PublicisedGroupsForUserServlet(RestServlet):
@ -694,7 +694,7 @@ class PublicisedGroupsForUserServlet(RestServlet):
result = yield self.groups_handler.get_publicised_groups_for_user(user_id) result = yield self.groups_handler.get_publicised_groups_for_user(user_id)
return (200, result) return 200, result
class PublicisedGroupsForUsersServlet(RestServlet): class PublicisedGroupsForUsersServlet(RestServlet):
@ -719,7 +719,7 @@ class PublicisedGroupsForUsersServlet(RestServlet):
result = yield self.groups_handler.bulk_get_publicised_groups(user_ids) result = yield self.groups_handler.bulk_get_publicised_groups(user_ids)
return (200, result) return 200, result
class GroupsForUserServlet(RestServlet): class GroupsForUserServlet(RestServlet):
@ -741,7 +741,7 @@ class GroupsForUserServlet(RestServlet):
result = yield self.groups_handler.get_joined_groups(requester_user_id) result = yield self.groups_handler.get_joined_groups(requester_user_id)
return (200, result) return 200, result
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd # Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -24,10 +25,10 @@ from synapse.http.servlet import (
parse_json_object_from_request, parse_json_object_from_request,
parse_string, parse_string,
) )
from synapse.logging.opentracing import log_kv, set_tag, trace_using_operation_name from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.types import StreamToken from synapse.types import StreamToken
from ._base import client_patterns from ._base import client_patterns, interactive_auth_handler
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -69,7 +70,7 @@ class KeyUploadServlet(RestServlet):
self.auth = hs.get_auth() self.auth = hs.get_auth()
self.e2e_keys_handler = hs.get_e2e_keys_handler() self.e2e_keys_handler = hs.get_e2e_keys_handler()
@trace_using_operation_name("upload_keys") @trace(opname="upload_keys")
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request, device_id): def on_POST(self, request, device_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True) requester = yield self.auth.get_user_by_req(request, allow_guest=True)
@ -105,7 +106,7 @@ class KeyUploadServlet(RestServlet):
result = yield self.e2e_keys_handler.upload_keys_for_user( result = yield self.e2e_keys_handler.upload_keys_for_user(
user_id, device_id, body user_id, device_id, body
) )
return (200, result) return 200, result
class KeyQueryServlet(RestServlet): class KeyQueryServlet(RestServlet):
@ -155,11 +156,12 @@ class KeyQueryServlet(RestServlet):
@defer.inlineCallbacks @defer.inlineCallbacks
def on_POST(self, request): def on_POST(self, request):
yield self.auth.get_user_by_req(request, allow_guest=True) requester = yield self.auth.get_user_by_req(request, allow_guest=True)
user_id = requester.user.to_string()
timeout = parse_integer(request, "timeout", 10 * 1000) timeout = parse_integer(request, "timeout", 10 * 1000)
body = parse_json_object_from_request(request) body = parse_json_object_from_request(request)
result = yield self.e2e_keys_handler.query_devices(body, timeout) result = yield self.e2e_keys_handler.query_devices(body, timeout, user_id)
return (200, result) return 200, result
class KeyChangesServlet(RestServlet): class KeyChangesServlet(RestServlet):
@ -200,7 +202,7 @@ class KeyChangesServlet(RestServlet):
results = yield self.device_handler.get_user_ids_changed(user_id, from_token) results = yield self.device_handler.get_user_ids_changed(user_id, from_token)
return (200, results) return 200, results
class OneTimeKeyServlet(RestServlet): class OneTimeKeyServlet(RestServlet):
@ -235,6 +237,43 @@ class OneTimeKeyServlet(RestServlet):
timeout = parse_integer(request, "timeout", 10 * 1000) timeout = parse_integer(request, "timeout", 10 * 1000)
body = parse_json_object_from_request(request) body = parse_json_object_from_request(request)
result = yield self.e2e_keys_handler.claim_one_time_keys(body, timeout) result = yield self.e2e_keys_handler.claim_one_time_keys(body, timeout)
return 200, result
class SigningKeyUploadServlet(RestServlet):
"""
POST /keys/device_signing/upload HTTP/1.1
Content-Type: application/json
{
}
"""
PATTERNS = client_patterns("/keys/device_signing/upload$", releases=())
def __init__(self, hs):
"""
Args:
hs (synapse.server.HomeServer): server
"""
super(SigningKeyUploadServlet, self).__init__()
self.hs = hs
self.auth = hs.get_auth()
self.e2e_keys_handler = hs.get_e2e_keys_handler()
self.auth_handler = hs.get_auth_handler()
@interactive_auth_handler
@defer.inlineCallbacks
def on_POST(self, request):
requester = yield self.auth.get_user_by_req(request)
user_id = requester.user.to_string()
body = parse_json_object_from_request(request)
yield self.auth_handler.validate_user_via_ui_auth(
requester, body, self.hs.get_ip_from_request(request)
)
result = yield self.e2e_keys_handler.upload_signing_keys_for_user(user_id, body)
return (200, result) return (200, result)
@ -243,3 +282,4 @@ def register_servlets(hs, http_server):
KeyQueryServlet(hs).register(http_server) KeyQueryServlet(hs).register(http_server)
KeyChangesServlet(hs).register(http_server) KeyChangesServlet(hs).register(http_server)
OneTimeKeyServlet(hs).register(http_server) OneTimeKeyServlet(hs).register(http_server)
SigningKeyUploadServlet(hs).register(http_server)

View file

@ -88,7 +88,7 @@ class NotificationsServlet(RestServlet):
returned_push_actions.append(returned_pa) returned_push_actions.append(returned_pa)
next_token = str(pa["stream_ordering"]) next_token = str(pa["stream_ordering"])
return (200, {"notifications": returned_push_actions, "next_token": next_token}) return 200, {"notifications": returned_push_actions, "next_token": next_token}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -59,7 +59,7 @@ class ReadMarkerRestServlet(RestServlet):
event_id=read_marker_event_id, event_id=read_marker_event_id,
) )
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -52,7 +52,7 @@ class ReceiptRestServlet(RestServlet):
room_id, receipt_type, user_id=requester.user.to_string(), event_id=event_id room_id, receipt_type, user_id=requester.user.to_string(), event_id=event_id
) )
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -94,7 +94,7 @@ class EmailRegisterRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE) raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
ret = yield self.identity_handler.requestEmailToken(**body) ret = yield self.identity_handler.requestEmailToken(**body)
return (200, ret) return 200, ret
class MsisdnRegisterRequestTokenRestServlet(RestServlet): class MsisdnRegisterRequestTokenRestServlet(RestServlet):
@ -137,7 +137,7 @@ class MsisdnRegisterRequestTokenRestServlet(RestServlet):
) )
ret = yield self.identity_handler.requestMsisdnToken(**body) ret = yield self.identity_handler.requestMsisdnToken(**body)
return (200, ret) return 200, ret
class UsernameAvailabilityRestServlet(RestServlet): class UsernameAvailabilityRestServlet(RestServlet):
@ -177,7 +177,7 @@ class UsernameAvailabilityRestServlet(RestServlet):
yield self.registration_handler.check_username(username) yield self.registration_handler.check_username(username)
return (200, {"available": True}) return 200, {"available": True}
class RegisterRestServlet(RestServlet): class RegisterRestServlet(RestServlet):
@ -230,7 +230,6 @@ class RegisterRestServlet(RestServlet):
if kind == b"guest": if kind == b"guest":
ret = yield self._do_guest_registration(body, address=client_addr) ret = yield self._do_guest_registration(body, address=client_addr)
return ret return ret
return
elif kind != b"user": elif kind != b"user":
raise UnrecognizedRequestError( raise UnrecognizedRequestError(
"Do not understand membership kind: %s" % (kind,) "Do not understand membership kind: %s" % (kind,)
@ -279,8 +278,7 @@ class RegisterRestServlet(RestServlet):
result = yield self._do_appservice_registration( result = yield self._do_appservice_registration(
desired_username, access_token, body desired_username, access_token, body
) )
return (200, result) # we throw for non 200 responses return 200, result # we throw for non 200 responses
return
# for regular registration, downcase the provided username before # for regular registration, downcase the provided username before
# attempting to register it. This should mean # attempting to register it. This should mean
@ -483,11 +481,9 @@ class RegisterRestServlet(RestServlet):
user_id=registered_user_id, user_id=registered_user_id,
auth_result=auth_result, auth_result=auth_result,
access_token=return_dict.get("access_token"), access_token=return_dict.get("access_token"),
bind_email=params.get("bind_email"),
bind_msisdn=params.get("bind_msisdn"),
) )
return (200, return_dict) return 200, return_dict
def on_OPTIONS(self, _): def on_OPTIONS(self, _):
return 200, {} return 200, {}

View file

@ -118,7 +118,7 @@ class RelationSendServlet(RestServlet):
requester, event_dict=event_dict, txn_id=txn_id requester, event_dict=event_dict, txn_id=txn_id
) )
return (200, {"event_id": event.event_id}) return 200, {"event_id": event.event_id}
class RelationPaginationServlet(RestServlet): class RelationPaginationServlet(RestServlet):
@ -198,7 +198,7 @@ class RelationPaginationServlet(RestServlet):
return_value["chunk"] = events return_value["chunk"] = events
return_value["original_event"] = original_event return_value["original_event"] = original_event
return (200, return_value) return 200, return_value
class RelationAggregationPaginationServlet(RestServlet): class RelationAggregationPaginationServlet(RestServlet):
@ -270,7 +270,7 @@ class RelationAggregationPaginationServlet(RestServlet):
to_token=to_token, to_token=to_token,
) )
return (200, pagination_chunk.to_dict()) return 200, pagination_chunk.to_dict()
class RelationAggregationGroupPaginationServlet(RestServlet): class RelationAggregationGroupPaginationServlet(RestServlet):
@ -356,7 +356,7 @@ class RelationAggregationGroupPaginationServlet(RestServlet):
return_value = result.to_dict() return_value = result.to_dict()
return_value["chunk"] = events return_value["chunk"] = events
return (200, return_value) return 200, return_value
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -72,7 +72,7 @@ class ReportEventRestServlet(RestServlet):
received_ts=self.clock.time_msec(), received_ts=self.clock.time_msec(),
) )
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -135,7 +135,7 @@ class RoomKeysServlet(RestServlet):
body = {"rooms": {room_id: body}} body = {"rooms": {room_id: body}}
yield self.e2e_room_keys_handler.upload_room_keys(user_id, version, body) yield self.e2e_room_keys_handler.upload_room_keys(user_id, version, body)
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_GET(self, request, room_id, session_id): def on_GET(self, request, room_id, session_id):
@ -218,7 +218,7 @@ class RoomKeysServlet(RestServlet):
else: else:
room_keys = room_keys["rooms"][room_id] room_keys = room_keys["rooms"][room_id]
return (200, room_keys) return 200, room_keys
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, room_id, session_id): def on_DELETE(self, request, room_id, session_id):
@ -242,7 +242,7 @@ class RoomKeysServlet(RestServlet):
yield self.e2e_room_keys_handler.delete_room_keys( yield self.e2e_room_keys_handler.delete_room_keys(
user_id, version, room_id, session_id user_id, version, room_id, session_id
) )
return (200, {}) return 200, {}
class RoomKeysNewVersionServlet(RestServlet): class RoomKeysNewVersionServlet(RestServlet):
@ -293,7 +293,7 @@ class RoomKeysNewVersionServlet(RestServlet):
info = parse_json_object_from_request(request) info = parse_json_object_from_request(request)
new_version = yield self.e2e_room_keys_handler.create_version(user_id, info) new_version = yield self.e2e_room_keys_handler.create_version(user_id, info)
return (200, {"version": new_version}) return 200, {"version": new_version}
# we deliberately don't have a PUT /version, as these things really should # we deliberately don't have a PUT /version, as these things really should
# be immutable to avoid people footgunning # be immutable to avoid people footgunning
@ -338,7 +338,7 @@ class RoomKeysVersionServlet(RestServlet):
except SynapseError as e: except SynapseError as e:
if e.code == 404: if e.code == 404:
raise SynapseError(404, "No backup found", Codes.NOT_FOUND) raise SynapseError(404, "No backup found", Codes.NOT_FOUND)
return (200, info) return 200, info
@defer.inlineCallbacks @defer.inlineCallbacks
def on_DELETE(self, request, version): def on_DELETE(self, request, version):
@ -358,7 +358,7 @@ class RoomKeysVersionServlet(RestServlet):
user_id = requester.user.to_string() user_id = requester.user.to_string()
yield self.e2e_room_keys_handler.delete_version(user_id, version) yield self.e2e_room_keys_handler.delete_version(user_id, version)
return (200, {}) return 200, {}
@defer.inlineCallbacks @defer.inlineCallbacks
def on_PUT(self, request, version): def on_PUT(self, request, version):
@ -392,7 +392,7 @@ class RoomKeysVersionServlet(RestServlet):
) )
yield self.e2e_room_keys_handler.update_version(user_id, version, info) yield self.e2e_room_keys_handler.update_version(user_id, version, info)
return (200, {}) return 200, {}
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -80,7 +80,7 @@ class RoomUpgradeRestServlet(RestServlet):
ret = {"replacement_room": new_room_id} ret = {"replacement_room": new_room_id}
return (200, ret) return 200, ret
def register_servlets(hs, http_server): def register_servlets(hs, http_server):

View file

@ -19,6 +19,7 @@ from twisted.internet import defer
from synapse.http import servlet from synapse.http import servlet
from synapse.http.servlet import parse_json_object_from_request from synapse.http.servlet import parse_json_object_from_request
from synapse.logging.opentracing import set_tag, trace
from synapse.rest.client.transactions import HttpTransactionCache from synapse.rest.client.transactions import HttpTransactionCache
from ._base import client_patterns from ._base import client_patterns
@ -42,7 +43,10 @@ class SendToDeviceRestServlet(servlet.RestServlet):
self.txns = HttpTransactionCache(hs) self.txns = HttpTransactionCache(hs)
self.device_message_handler = hs.get_device_message_handler() self.device_message_handler = hs.get_device_message_handler()
@trace(opname="sendToDevice")
def on_PUT(self, request, message_type, txn_id): def on_PUT(self, request, message_type, txn_id):
set_tag("message_type", message_type)
set_tag("txn_id", txn_id)
return self.txns.fetch_or_execute_request( return self.txns.fetch_or_execute_request(
request, self._put, request, message_type, txn_id request, self._put, request, message_type, txn_id
) )

Some files were not shown because too many files have changed in this diff Show more