Merge branch 'develop' of github.com:matrix-org/synapse into erikj/censor_redactions

This commit is contained in:
Erik Johnston 2019-09-05 17:27:46 +01:00
commit 591d82f06b
144 changed files with 2394 additions and 1780 deletions

View file

@ -17,6 +17,6 @@ services:
SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app
working_dir: /src
volumes:
- ..:/app
- ..:/src

View file

@ -17,6 +17,6 @@ services:
SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app
working_dir: /src
volumes:
- ..:/app
- ..:/src

View file

@ -17,6 +17,6 @@ services:
SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app
working_dir: /src
volumes:
- ..:/app
- ..:/src

View file

@ -1,3 +1,18 @@
# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from tap.parser import Parser
from tap.line import Result, Unknown, Diagnostic

View file

@ -27,7 +27,7 @@ git config --global user.name "A robot"
# Fetch and merge. If it doesn't work, it will raise due to set -e.
git fetch -u origin $GITBASE
git merge --no-edit origin/$GITBASE
git merge --no-edit --no-commit origin/$GITBASE
# Show what we are after.
git --no-pager show -s

View file

@ -1,248 +0,0 @@
env:
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
steps:
- command:
- "python -m pip install tox"
- "tox -e check_codestyle"
label: "\U0001F9F9 Check Style"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e packaging"
label: "\U0001F9F9 packaging"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e check_isort"
label: "\U0001F9F9 isort"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "scripts-dev/check-newsfragment"
label: ":newspaper: Newsfile"
branches: "!master !develop !release-*"
plugins:
- docker#v3.0.1:
image: "python:3.6"
propagate-environment: true
- command:
- "python -m pip install tox"
- "tox -e check-sampleconfig"
label: "\U0001F9F9 check-sample-config"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e mypy"
label: ":mypy: mypy"
plugins:
- docker#v3.0.1:
image: "python:3.5"
- wait
- command:
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
- "python3.5 -m pip install tox"
- "tox -e py35-old,codecov"
label: ":python: 3.5 / SQLite / Old Deps"
env:
TRIAL_FLAGS: "-j 2"
LANG: "C.UTF-8"
plugins:
- docker#v3.0.1:
image: "ubuntu:xenial" # We use xenail to get an old sqlite and python
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py35,codecov"
label: ":python: 3.5 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.5"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py36,codecov"
label: ":python: 3.6 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.6"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py37,codecov"
label: ":python: 3.7 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.7"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.5 / :postgres: 9.5"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py35.pg95.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.7 / :postgres: 9.5"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py37.pg95.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.7 / :postgres: 11"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py37.pg11.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
agents:
queue: "medium"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
agents:
queue: "medium"
env:
POSTGRES: "1"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
agents:
queue: "medium"
env:
POSTGRES: "1"
WORKERS: "1"
BLACKLIST: "synapse-blacklist-with-workers"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2

View file

@ -1,7 +1,8 @@
[run]
branch = True
parallel = True
include = synapse/*
include=$TOP/synapse/*
data_file = $TOP/.coverage
[report]
precision = 2

View file

@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 3.5, 3.6, 3.7, or 2.7
- Python 3.5, 3.6, or 3.7
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
Synapse is written in Python but some of the libraries it uses are written in
@ -421,7 +421,7 @@ If Synapse is not configured with an SMTP server, password reset via email will
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
Alternatively you can do so from the command line if you have installed via pip.
Alternatively you can do so from the command line if you have installed via pip.
This can be done as follows:

1
changelog.d/5853.feature Normal file
View file

@ -0,0 +1 @@
Opentracing for device list updates.

1
changelog.d/5892.misc Normal file
View file

@ -0,0 +1 @@
Compatibility with v2 Identity Service APIs other than /lookup.

View file

@ -1 +0,0 @@
Switch to the v2 lookup API for 3PID invites.

1
changelog.d/5915.bugfix Normal file
View file

@ -0,0 +1 @@
Fix 404 for thumbnail download when `dynamic_thumbnails` is `false` and the thumbnail was dynamically generated. Fix reported by rkfg.

1
changelog.d/5922.misc Normal file
View file

@ -0,0 +1 @@
Update Buildkite pipeline to use plugins instead of buildkite-agent commands.

1
changelog.d/5931.misc Normal file
View file

@ -0,0 +1 @@
Remove unnecessary parentheses in return statements.

1
changelog.d/5938.misc Normal file
View file

@ -0,0 +1 @@
Remove unused jenkins/prepare_sytest.sh file.

1
changelog.d/5943.misc Normal file
View file

@ -0,0 +1 @@
Move Buildkite pipeline config to the pipelines repo.

1
changelog.d/5953.misc Normal file
View file

@ -0,0 +1 @@
Update INSTALL.md to say that Python 2 is no longer supported.

1
changelog.d/5962.misc Normal file
View file

@ -0,0 +1 @@
Remove unnecessary return statements in the codebase which were the result of a regex run.

1
changelog.d/5963.misc Normal file
View file

@ -0,0 +1 @@
Remove left-over methods from C/S registration API.

1
changelog.d/5964.feature Normal file
View file

@ -0,0 +1 @@
Remove `bind_email` and `bind_msisdn` parameters from /register ala MSC2140.

1
changelog.d/5966.bugfix Normal file
View file

@ -0,0 +1 @@
Fix admin API for listing media in a room not being available with an external media repo.

1
changelog.d/5967.bugfix Normal file
View file

@ -0,0 +1 @@
Fix list media admin API always returning an error.

1
changelog.d/5970.docker Normal file
View file

@ -0,0 +1 @@
Avoid changing UID/GID if they are already correct.

1
changelog.d/5971.bugfix Normal file
View file

@ -0,0 +1 @@
Fix room and user stats tracking.

1
changelog.d/5975.misc Normal file
View file

@ -0,0 +1 @@
Cleanup event auth type initialisation.

1
changelog.d/5980.feature Normal file
View file

@ -0,0 +1 @@
Add POST /_matrix/client/r0/account/3pid/unbind endpoint from MSC2140 for unbinding a 3PID from an identity server without removing it from the homeserver user account.

1
changelog.d/5982.bugfix Normal file
View file

@ -0,0 +1 @@
Include missing opentracing contexts in outbout replication requests.

1
changelog.d/5983.feature Normal file
View file

@ -0,0 +1 @@
Add minimum opentracing for client servlets.

1
changelog.d/5984.bugfix Normal file
View file

@ -0,0 +1 @@
Fix sending of EDUs when opentracing is enabled with an empty whitelist.

View file

@ -268,6 +268,7 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_emailrequest(self, args):
# TODO: Update to use v2 Identity Service API endpoint
url = (
self._identityServerUrl()
+ "/_matrix/identity/api/v1/validate/email/requestToken"
@ -302,6 +303,7 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_emailvalidate(self, args):
# TODO: Update to use v2 Identity Service API endpoint
url = (
self._identityServerUrl()
+ "/_matrix/identity/api/v1/validate/email/submitToken"
@ -330,6 +332,7 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_3pidbind(self, args):
# TODO: Update to use v2 Identity Service API endpoint
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
json_res = yield self.http_client.do_request(
@ -398,6 +401,7 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_invite(self, roomid, userstring):
if not userstring.startswith("@") and self._is_on("complete_usernames"):
# TODO: Update to use v2 Identity Service API endpoint
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
json_res = yield self.http_client.do_request(
@ -407,6 +411,7 @@ class SynapseCmd(cmd.Cmd):
mxid = None
if "mxid" in json_res and "signatures" in json_res:
# TODO: Update to use v2 Identity Service API endpoint
url = (
self._identityServerUrl()
+ "/_matrix/identity/api/v1/pubkey/ed25519"

View file

@ -41,8 +41,8 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
config_dir (str): where to put generated config files
config_path (str): where to put the main config file
environ (dict): environment dictionary
ownership (str): "<user>:<group>" string which will be used to set
ownership of the generated configs
ownership (str|None): "<user>:<group>" string which will be used to set
ownership of the generated configs. If None, ownership will not change.
"""
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in environ:
@ -105,24 +105,24 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
log("Generating log config file " + log_config_file)
convert("/conf/log.config", log_config_file, environ)
subprocess.check_output(["chown", "-R", ownership, "/data"])
# Hopefully we already have a signing key, but generate one if not.
subprocess.check_output(
[
"su-exec",
ownership,
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
# tell synapse to put generated keys in /data rather than /compiled
"--keys-directory",
config_dir,
"--generate-keys",
]
)
args = [
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
# tell synapse to put generated keys in /data rather than /compiled
"--keys-directory",
config_dir,
"--generate-keys",
]
if ownership is not None:
subprocess.check_output(["chown", "-R", ownership, "/data"])
args = ["su-exec", ownership] + args
subprocess.check_output(args)
def run_generate_config(environ, ownership):
@ -130,7 +130,7 @@ def run_generate_config(environ, ownership):
Args:
environ (dict): env var dict
ownership (str): "userid:groupid" arg for chmod
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
Never returns.
"""
@ -149,9 +149,6 @@ def run_generate_config(environ, ownership):
log("Creating log config %s" % (log_config_file,))
convert("/conf/log.config", log_config_file, environ)
# make sure that synapse has perms to write to the data dir.
subprocess.check_output(["chown", ownership, data_dir])
args = [
"python",
"-m",
@ -170,12 +167,33 @@ def run_generate_config(environ, ownership):
"--open-private-ports",
]
# log("running %s" % (args, ))
os.execv("/usr/local/bin/python", args)
if ownership is not None:
args = ["su-exec", ownership] + args
os.execv("/sbin/su-exec", args)
# make sure that synapse has perms to write to the data dir.
subprocess.check_output(["chown", ownership, data_dir])
else:
os.execv("/usr/local/bin/python", args)
def main(args, environ):
mode = args[1] if len(args) > 1 else None
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
desired_uid = int(environ.get("UID", "991"))
desired_gid = int(environ.get("GID", "991"))
if (desired_uid == os.getuid()) and (desired_gid == os.getgid()):
ownership = None
else:
ownership = "{}:{}".format(desired_uid, desired_gid)
log(
"Container running as UserID %s:%s, ENV (or defaults) requests %s:%s"
% (os.getuid(), os.getgid(), desired_uid, desired_gid)
)
if ownership is None:
log("Will not perform chmod/su-exec as UserID already matches request")
# In generate mode, generate a configuration and missing keys, then exit
if mode == "generate":
@ -227,16 +245,12 @@ def main(args, environ):
log("Starting synapse with config file " + config_path)
args = [
"su-exec",
ownership,
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
]
os.execv("/sbin/su-exec", args)
args = ["python", "-m", "synapse.app.homeserver", "--config-path", config_path]
if ownership is not None:
args = ["su-exec", ownership] + args
os.execv("/sbin/su-exec", args)
else:
os.execv("/usr/local/bin/python", args)
if __name__ == "__main__":

View file

@ -0,0 +1,62 @@
Room and User Statistics
========================
Synapse maintains room and user statistics (as well as a cache of room state),
in various tables. These can be used for administrative purposes but are also
used when generating the public room directory.
# Synapse Developer Documentation
## High-Level Concepts
### Definitions
* **subject**: Something we are tracking stats about currently a room or user.
* **current row**: An entry for a subject in the appropriate current statistics
table. Each subject can have only one.
* **historical row**: An entry for a subject in the appropriate historical
statistics table. Each subject can have any number of these.
### Overview
Stats are maintained as time series. There are two kinds of column:
* absolute columns where the value is correct for the time given by `end_ts`
in the stats row. (Imagine a line graph for these values)
* They can also be thought of as 'gauges' in Prometheus, if you are familiar.
* per-slice columns where the value corresponds to how many of the occurrences
occurred within the time slice given by `(end_ts bucket_size)…end_ts`
or `start_ts…end_ts`. (Imagine a histogram for these values)
Stats are maintained in two tables (for each type): current and historical.
Current stats correspond to the present values. Each subject can only have one
entry.
Historical stats correspond to values in the past. Subjects may have multiple
entries.
## Concepts around the management of stats
### Current rows
Current rows contain the most up-to-date statistics for a room.
They only contain absolute columns
### Historical rows
Historical rows can always be considered to be valid for the time slice and
end time specified.
* historical rows will not exist for every time slice they will be omitted
if there were no changes. In this case, the following assumptions can be
made to interpolate/recreate missing rows:
- absolute fields have the same values as in the preceding row
- per-slice fields are zero (`0`)
* historical rows will not be retained forever rows older than a configurable
time will be purged.
#### Purge
The purging of historical rows is not yet implemented.

View file

@ -1,16 +0,0 @@
#! /bin/bash
set -eux
cd "`dirname $0`/.."
TOX_DIR=$WORKSPACE/.tox
mkdir -p $TOX_DIR
if ! [ $TOX_DIR -ef .tox ]; then
ln -s "$TOX_DIR" .tox
fi
# set up the virtualenv
tox -e py27 --notest -v

View file

@ -276,25 +276,25 @@ class Auth(object):
self.get_access_token_from_request(request)
)
if app_service is None:
return (None, None)
return None, None
if app_service.ip_range_whitelist:
ip_address = IPAddress(self.hs.get_ip_from_request(request))
if ip_address not in app_service.ip_range_whitelist:
return (None, None)
return None, None
if b"user_id" not in request.args:
return (app_service.sender, app_service)
return app_service.sender, app_service
user_id = request.args[b"user_id"][0].decode("utf8")
if app_service.sender == user_id:
return (app_service.sender, app_service)
return app_service.sender, app_service
if not app_service.is_interested_in_user(user_id):
raise AuthError(403, "Application service cannot masquerade as this user.")
if not (yield self.store.get_user_by_id(user_id)):
raise AuthError(403, "Application service has not registered this user")
return (user_id, app_service)
return user_id, app_service
@defer.inlineCallbacks
def get_user_by_access_token(self, token, rights="access"):
@ -694,7 +694,7 @@ class Auth(object):
# * The user is a guest user, and has joined the room
# else it will throw.
member_event = yield self.check_user_was_in_room(room_id, user_id)
return (member_event.membership, member_event.event_id)
return member_event.membership, member_event.event_id
except AuthError:
visibility = yield self.state.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, ""
@ -703,8 +703,7 @@ class Auth(object):
visibility
and visibility.content["history_visibility"] == "world_readable"
):
return (Membership.JOIN, None)
return
return Membership.JOIN, None
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
)

View file

@ -70,12 +70,12 @@ class PresenceStatusStubServlet(RestServlet):
except HttpResponseException as e:
raise e.to_synapse_error()
return (200, result)
return 200, result
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
yield self.auth.get_user_by_req(request)
return (200, {})
return 200, {}
class KeyUploadServlet(RestServlet):
@ -126,11 +126,11 @@ class KeyUploadServlet(RestServlet):
self.main_uri + request.uri.decode("ascii"), body, headers=headers
)
return (200, result)
return 200, result
else:
# Just interested in counts.
result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
return (200, {"one_time_key_counts": result})
return 200, {"one_time_key_counts": result}
class FrontendProxySlavedStore(

View file

@ -107,7 +107,6 @@ class ApplicationServiceApi(SimpleHttpClient):
except CodeMessageException as e:
if e.code == 404:
return False
return
logger.warning("query_user to %s received %s", uri, e.code)
except Exception as ex:
logger.warning("query_user to %s threw exception %s", uri, ex)
@ -127,7 +126,6 @@ class ApplicationServiceApi(SimpleHttpClient):
logger.warning("query_alias to %s received %s", uri, e.code)
if e.code == 404:
return False
return
except Exception as ex:
logger.warning("query_alias to %s threw exception %s", uri, ex)
return False
@ -230,7 +228,6 @@ class ApplicationServiceApi(SimpleHttpClient):
sent_transactions_counter.labels(service.id).inc()
sent_events_counter.labels(service.id).inc(len(events))
return True
return
except CodeMessageException as e:
logger.warning("push_bulk to %s received %s", uri, e.code)
except Exception as ex:

View file

@ -27,19 +27,16 @@ class StatsConfig(Config):
def read_config(self, config, **kwargs):
self.stats_enabled = True
self.stats_bucket_size = 86400
self.stats_bucket_size = 86400 * 1000
self.stats_retention = sys.maxsize
stats_config = config.get("stats", None)
if stats_config:
self.stats_enabled = stats_config.get("enabled", self.stats_enabled)
self.stats_bucket_size = (
self.parse_duration(stats_config.get("bucket_size", "1d")) / 1000
self.stats_bucket_size = self.parse_duration(
stats_config.get("bucket_size", "1d")
)
self.stats_retention = (
self.parse_duration(
stats_config.get("retention", "%ds" % (sys.maxsize,))
)
/ 1000
self.stats_retention = self.parse_duration(
stats_config.get("retention", "%ds" % (sys.maxsize,))
)
def generate_config_section(self, config_dir_path, server_name, **kwargs):

View file

@ -83,7 +83,7 @@ def compute_content_hash(event_dict, hash_algorithm):
event_json_bytes = encode_canonical_json(event_dict)
hashed = hash_algorithm(event_json_bytes)
return (hashed.name, hashed.digest())
return hashed.name, hashed.digest()
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
@ -106,7 +106,7 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
event_dict.pop("unsigned", None)
event_json_bytes = encode_canonical_json(event_dict)
hashed = hash_algorithm(event_json_bytes)
return (hashed.name, hashed.digest())
return hashed.name, hashed.digest()
def compute_event_signature(event_dict, signature_name, signing_key):

View file

@ -637,11 +637,11 @@ def auth_types_for_event(event):
if event.type == EventTypes.Create:
return []
auth_types = []
auth_types.append((EventTypes.PowerLevels, ""))
auth_types.append((EventTypes.Member, event.sender))
auth_types.append((EventTypes.Create, ""))
auth_types = [
(EventTypes.PowerLevels, ""),
(EventTypes.Member, event.sender),
(EventTypes.Create, ""),
]
if event.type == EventTypes.Member:
membership = event.content["membership"]

View file

@ -355,7 +355,7 @@ class FederationClient(FederationBase):
auth_chain.sort(key=lambda e: e.depth)
return (pdus, auth_chain)
return pdus, auth_chain
except HttpResponseException as e:
if e.code == 400 or e.code == 404:
logger.info("Failed to use get_room_state_ids API, falling back")
@ -404,7 +404,7 @@ class FederationClient(FederationBase):
signed_auth.sort(key=lambda e: e.depth)
return (signed_pdus, signed_auth)
return signed_pdus, signed_auth
@defer.inlineCallbacks
def get_events_from_store_or_dest(self, destination, room_id, event_ids):
@ -429,7 +429,7 @@ class FederationClient(FederationBase):
missing_events.discard(k)
if not missing_events:
return (signed_events, failed_to_fetch)
return signed_events, failed_to_fetch
logger.debug(
"Fetching unknown state/auth events %s for room %s",
@ -465,7 +465,7 @@ class FederationClient(FederationBase):
# We removed all events we successfully fetched from `batch`
failed_to_fetch.update(batch)
return (signed_events, failed_to_fetch)
return signed_events, failed_to_fetch
@defer.inlineCallbacks
@log_function

View file

@ -100,7 +100,7 @@ class FederationServer(FederationBase):
res = self._transaction_from_pdus(pdus).get_dict()
return (200, res)
return 200, res
@defer.inlineCallbacks
@log_function
@ -163,7 +163,7 @@ class FederationServer(FederationBase):
yield self.transaction_actions.set_response(
origin, transaction, 400, response
)
return (400, response)
return 400, response
received_pdus_counter.inc(len(transaction.pdus))
@ -265,7 +265,7 @@ class FederationServer(FederationBase):
logger.debug("Returning: %s", str(response))
yield self.transaction_actions.set_response(origin, transaction, 200, response)
return (200, response)
return 200, response
@defer.inlineCallbacks
def received_edu(self, origin, edu_type, content):
@ -298,7 +298,7 @@ class FederationServer(FederationBase):
event_id,
)
return (200, resp)
return 200, resp
@defer.inlineCallbacks
def on_state_ids_request(self, origin, room_id, event_id):
@ -315,7 +315,7 @@ class FederationServer(FederationBase):
state_ids = yield self.handler.get_state_ids_for_pdu(room_id, event_id)
auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids)
return (200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids})
return 200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}
@defer.inlineCallbacks
def _on_context_state_request_compute(self, room_id, event_id):
@ -345,15 +345,15 @@ class FederationServer(FederationBase):
pdu = yield self.handler.get_persisted_pdu(origin, event_id)
if pdu:
return (200, self._transaction_from_pdus([pdu]).get_dict())
return 200, self._transaction_from_pdus([pdu]).get_dict()
else:
return (404, "")
return 404, ""
@defer.inlineCallbacks
def on_query_request(self, query_type, args):
received_queries_counter.labels(query_type).inc()
resp = yield self.registry.on_query(query_type, args)
return (200, resp)
return 200, resp
@defer.inlineCallbacks
def on_make_join_request(self, origin, room_id, user_id, supported_versions):
@ -435,7 +435,7 @@ class FederationServer(FederationBase):
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
yield self.handler.on_send_leave_request(origin, pdu)
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_event_auth(self, origin, room_id, event_id):
@ -446,7 +446,7 @@ class FederationServer(FederationBase):
time_now = self._clock.time_msec()
auth_pdus = yield self.handler.on_event_auth(event_id)
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
return (200, res)
return 200, res
@defer.inlineCallbacks
def on_query_auth_request(self, origin, content, room_id, event_id):
@ -499,7 +499,7 @@ class FederationServer(FederationBase):
"missing": ret.get("missing", []),
}
return (200, send_content)
return 200, send_content
@log_function
def on_query_client_keys(self, origin, content):

View file

@ -26,6 +26,7 @@ from synapse.logging.opentracing import (
set_tag,
start_active_span_follows_from,
tags,
whitelisted_homeserver,
)
from synapse.util.metrics import measure_func
@ -59,9 +60,15 @@ class TransactionManager(object):
# The span_contexts is a generator so that it won't be evaluated if
# opentracing is disabled. (Yay speed!)
span_contexts = (
extract_text_map(json.loads(edu.get_context())) for edu in pending_edus
)
span_contexts = []
keep_destination = whitelisted_homeserver(destination)
for edu in pending_edus:
context = edu.get_context()
if context:
span_contexts.append(extract_text_map(json.loads(context)))
if keep_destination:
edu.strip_context()
with start_active_span_follows_from("send_transaction", span_contexts):

View file

@ -342,7 +342,11 @@ class BaseFederationServlet(object):
continue
server.register_paths(
method, (pattern,), self._wrap(code), self.__class__.__name__
method,
(pattern,),
self._wrap(code),
self.__class__.__name__,
trace=False,
)

View file

@ -41,6 +41,9 @@ class Edu(JsonEncodedObject):
def get_context(self):
return getattr(self, "content", {}).get("org.matrix.opentracing_context", "{}")
def strip_context(self):
getattr(self, "content", {})["org.matrix.opentracing_context"] = "{}"
class Transaction(JsonEncodedObject):
""" A transaction is a list of Pdus and Edus to be sent to a remote home

View file

@ -51,8 +51,8 @@ class AccountDataEventSource(object):
{"type": account_data_type, "content": content, "room_id": room_id}
)
return (results, current_stream_id)
return results, current_stream_id
@defer.inlineCallbacks
def get_pagination_rows(self, user, config, key):
return ([], config.to_id)
return [], config.to_id

View file

@ -294,12 +294,10 @@ class ApplicationServicesHandler(object):
# we don't know if they are unknown or not since it isn't one of our
# users. We can't poke ASes.
return False
return
user_info = yield self.store.get_user_by_id(user_id)
if user_info:
return False
return
# user not found; could be the AS though, so check.
services = self.store.get_app_services()

View file

@ -280,7 +280,7 @@ class AuthHandler(BaseHandler):
creds,
list(clientdict),
)
return (creds, clientdict, session["id"])
return creds, clientdict, session["id"]
ret = self._auth_dict_for_flows(flows, session)
ret["completed"] = list(creds)
@ -722,7 +722,7 @@ class AuthHandler(BaseHandler):
known_login_type = True
is_valid = yield provider.check_password(qualified_user_id, password)
if is_valid:
return (qualified_user_id, None)
return qualified_user_id, None
if not hasattr(provider, "get_supported_login_types") or not hasattr(
provider, "check_auth"
@ -766,7 +766,7 @@ class AuthHandler(BaseHandler):
)
if canonical_user_id:
return (canonical_user_id, None)
return canonical_user_id, None
if not known_login_type:
raise SynapseError(400, "Unknown login type %s" % login_type)
@ -816,7 +816,7 @@ class AuthHandler(BaseHandler):
result = (result, None)
return result
return (None, None)
return None, None
@defer.inlineCallbacks
def _check_local_password(self, user_id, password):

View file

@ -25,6 +25,7 @@ from synapse.api.errors import (
HttpResponseException,
RequestSendFailed,
)
from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.types import RoomStreamToken, get_domain_from_id
from synapse.util import stringutils
from synapse.util.async_helpers import Linearizer
@ -45,6 +46,7 @@ class DeviceWorkerHandler(BaseHandler):
self.state = hs.get_state_handler()
self._auth_handler = hs.get_auth_handler()
@trace
@defer.inlineCallbacks
def get_devices_by_user(self, user_id):
"""
@ -56,6 +58,7 @@ class DeviceWorkerHandler(BaseHandler):
defer.Deferred: list[dict[str, X]]: info on each device
"""
set_tag("user_id", user_id)
device_map = yield self.store.get_devices_by_user(user_id)
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None)
@ -64,8 +67,10 @@ class DeviceWorkerHandler(BaseHandler):
for device in devices:
_update_device_from_client_ips(device, ips)
log_kv(device_map)
return devices
@trace
@defer.inlineCallbacks
def get_device(self, user_id, device_id):
""" Retrieve the given device
@ -85,9 +90,14 @@ class DeviceWorkerHandler(BaseHandler):
raise errors.NotFoundError
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id)
_update_device_from_client_ips(device, ips)
set_tag("device", device)
set_tag("ips", ips)
return device
@measure_func("device.get_user_ids_changed")
@trace
@defer.inlineCallbacks
def get_user_ids_changed(self, user_id, from_token):
"""Get list of users that have had the devices updated, or have newly
@ -97,6 +107,9 @@ class DeviceWorkerHandler(BaseHandler):
user_id (str)
from_token (StreamToken)
"""
set_tag("user_id", user_id)
set_tag("from_token", from_token)
now_room_key = yield self.store.get_room_events_max_id()
room_ids = yield self.store.get_rooms_for_user(user_id)
@ -148,6 +161,9 @@ class DeviceWorkerHandler(BaseHandler):
# special-case for an empty prev state: include all members
# in the changed list
if not event_ids:
log_kv(
{"event": "encountered empty previous state", "room_id": room_id}
)
for key, event_id in iteritems(current_state_ids):
etype, state_key = key
if etype != EventTypes.Member:
@ -200,7 +216,11 @@ class DeviceWorkerHandler(BaseHandler):
possibly_joined = []
possibly_left = []
return {"changed": list(possibly_joined), "left": list(possibly_left)}
result = {"changed": list(possibly_joined), "left": list(possibly_left)}
log_kv(result)
return result
class DeviceHandler(DeviceWorkerHandler):
@ -267,6 +287,7 @@ class DeviceHandler(DeviceWorkerHandler):
raise errors.StoreError(500, "Couldn't generate a device ID.")
@trace
@defer.inlineCallbacks
def delete_device(self, user_id, device_id):
""" Delete the given device
@ -284,6 +305,10 @@ class DeviceHandler(DeviceWorkerHandler):
except errors.StoreError as e:
if e.code == 404:
# no match
set_tag("error", True)
log_kv(
{"reason": "User doesn't have device id.", "device_id": device_id}
)
pass
else:
raise
@ -296,6 +321,7 @@ class DeviceHandler(DeviceWorkerHandler):
yield self.notify_device_update(user_id, [device_id])
@trace
@defer.inlineCallbacks
def delete_all_devices_for_user(self, user_id, except_device_id=None):
"""Delete all of the user's devices
@ -331,6 +357,8 @@ class DeviceHandler(DeviceWorkerHandler):
except errors.StoreError as e:
if e.code == 404:
# no match
set_tag("error", True)
set_tag("reason", "User doesn't have that device id.")
pass
else:
raise
@ -371,6 +399,7 @@ class DeviceHandler(DeviceWorkerHandler):
else:
raise
@trace
@measure_func("notify_device_update")
@defer.inlineCallbacks
def notify_device_update(self, user_id, device_ids):
@ -386,6 +415,8 @@ class DeviceHandler(DeviceWorkerHandler):
hosts.update(get_domain_from_id(u) for u in users_who_share_room)
hosts.discard(self.server_name)
set_tag("target_hosts", hosts)
position = yield self.store.add_device_change_to_streams(
user_id, device_ids, list(hosts)
)
@ -405,6 +436,7 @@ class DeviceHandler(DeviceWorkerHandler):
)
for host in hosts:
self.federation_sender.send_device_messages(host)
log_kv({"message": "sent device update to host", "host": host})
@defer.inlineCallbacks
def on_federation_query_user_devices(self, user_id):
@ -451,12 +483,15 @@ class DeviceListUpdater(object):
iterable=True,
)
@trace
@defer.inlineCallbacks
def incoming_device_list_update(self, origin, edu_content):
"""Called on incoming device list update from federation. Responsible
for parsing the EDU and adding to pending updates list.
"""
set_tag("origin", origin)
set_tag("edu_content", edu_content)
user_id = edu_content.pop("user_id")
device_id = edu_content.pop("device_id")
stream_id = str(edu_content.pop("stream_id")) # They may come as ints
@ -471,12 +506,30 @@ class DeviceListUpdater(object):
device_id,
origin,
)
set_tag("error", True)
log_kv(
{
"message": "Got a device list update edu from a user and "
"device which does not match the origin of the request.",
"user_id": user_id,
"device_id": device_id,
}
)
return
room_ids = yield self.store.get_rooms_for_user(user_id)
if not room_ids:
# We don't share any rooms with this user. Ignore update, as we
# probably won't get any further updates.
set_tag("error", True)
log_kv(
{
"message": "Got an update from a user for which "
"we don't share any rooms",
"other user_id": user_id,
}
)
logger.warning(
"Got device list update edu for %r/%r, but don't share a room",
user_id,
@ -578,6 +631,7 @@ class DeviceListUpdater(object):
request:
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
"""
log_kv({"message": "Doing resync to update device list."})
# Fetch all devices for the user.
origin = get_domain_from_id(user_id)
try:
@ -594,13 +648,20 @@ class DeviceListUpdater(object):
# eventually become consistent.
return
except FederationDeniedError as e:
set_tag("error", True)
log_kv({"reason": "FederationDeniedError"})
logger.info(e)
return
except Exception:
except Exception as e:
# TODO: Remember that we are now out of sync and try again
# later
set_tag("error", True)
log_kv(
{"message": "Exception raised by federation request", "exception": e}
)
logger.exception("Failed to handle device list update for %s", user_id)
return
log_kv({"result": result})
stream_id = result["stream_id"]
devices = result["devices"]

View file

@ -22,9 +22,9 @@ from twisted.internet import defer
from synapse.api.errors import SynapseError
from synapse.logging.opentracing import (
get_active_span_text_map,
log_kv,
set_tag,
start_active_span,
whitelisted_homeserver,
)
from synapse.types import UserID, get_domain_from_id
from synapse.util.stringutils import random_string
@ -86,7 +86,8 @@ class DeviceMessageHandler(object):
@defer.inlineCallbacks
def send_device_message(self, sender_user_id, message_type, messages):
set_tag("number_of_messages", len(messages))
set_tag("sender", sender_user_id)
local_messages = {}
remote_messages = {}
for user_id, by_device in messages.items():
@ -119,11 +120,10 @@ class DeviceMessageHandler(object):
"sender": sender_user_id,
"type": message_type,
"message_id": message_id,
"org.matrix.opentracing_context": json.dumps(context)
if whitelisted_homeserver(destination)
else None,
"org.matrix.opentracing_context": json.dumps(context),
}
log_kv({"local_messages": local_messages})
stream_id = yield self.store.add_messages_to_device_inbox(
local_messages, remote_edu_contents
)
@ -132,6 +132,7 @@ class DeviceMessageHandler(object):
"to_device_key", stream_id, users=local_messages.keys()
)
log_kv({"remote_messages": remote_messages})
for destination in remote_messages.keys():
# Enqueue a new federation transaction to send the new
# device messages to each remote destination.

View file

@ -167,7 +167,6 @@ class EventHandler(BaseHandler):
if not event:
return None
return
users = yield self.store.get_users_in_room(event.room_id)
is_peeking = user.to_string() not in users

View file

@ -1428,7 +1428,7 @@ class FederationHandler(BaseHandler):
assert event.user_id == user_id
assert event.state_key == user_id
assert event.room_id == room_id
return (origin, event, format_ver)
return origin, event, format_ver
@defer.inlineCallbacks
@log_function

View file

@ -61,21 +61,76 @@ class IdentityHandler(BaseHandler):
return False
return True
@defer.inlineCallbacks
def threepid_from_creds(self, creds):
if "id_server" in creds:
id_server = creds["id_server"]
elif "idServer" in creds:
id_server = creds["idServer"]
else:
raise SynapseError(400, "No id_server in creds")
def _extract_items_from_creds_dict(self, creds):
"""
Retrieve entries from a "credentials" dictionary
if "client_secret" in creds:
client_secret = creds["client_secret"]
elif "clientSecret" in creds:
client_secret = creds["clientSecret"]
Args:
creds (dict[str, str]): Dictionary of credentials that contain the following keys:
* client_secret|clientSecret: A unique secret str provided by the client
* id_server|idServer: the domain of the identity server to query
* id_access_token: The access token to authenticate to the identity
server with.
Returns:
tuple(str, str, str|None): A tuple containing the client_secret, the id_server,
and the id_access_token value if available.
"""
client_secret = creds.get("client_secret") or creds.get("clientSecret")
if not client_secret:
raise SynapseError(
400, "No client_secret in creds", errcode=Codes.MISSING_PARAM
)
id_server = creds.get("id_server") or creds.get("idServer")
if not id_server:
raise SynapseError(
400, "No id_server in creds", errcode=Codes.MISSING_PARAM
)
id_access_token = creds.get("id_access_token")
return client_secret, id_server, id_access_token
@defer.inlineCallbacks
def threepid_from_creds(self, creds, use_v2=True):
"""
Retrieve and validate a threepid identitier from a "credentials" dictionary
Args:
creds (dict[str, str]): Dictionary of credentials that contain the following keys:
* client_secret|clientSecret: A unique secret str provided by the client
* id_server|idServer: the domain of the identity server to query
* id_access_token: The access token to authenticate to the identity
server with. Required if use_v2 is true
use_v2 (bool): Whether to use v2 Identity Service API endpoints
Returns:
Deferred[dict[str,str|int]|None]: A dictionary consisting of response params to
the /getValidated3pid endpoint of the Identity Service API, or None if the
threepid was not found
"""
client_secret, id_server, id_access_token = self._extract_items_from_creds_dict(
creds
)
# If an id_access_token is not supplied, force usage of v1
if id_access_token is None:
use_v2 = False
query_params = {"sid": creds["sid"], "client_secret": client_secret}
# Decide which API endpoint URLs and query parameters to use
if use_v2:
url = "https://%s%s" % (
id_server,
"/_matrix/identity/v2/3pid/getValidated3pid",
)
query_params["id_access_token"] = id_access_token
else:
raise SynapseError(400, "No client_secret in creds")
url = "https://%s%s" % (
id_server,
"/_matrix/identity/api/v1/3pid/getValidated3pid",
)
if not self._should_trust_id_server(id_server):
logger.warn(
@ -85,43 +140,55 @@ class IdentityHandler(BaseHandler):
return None
try:
data = yield self.http_client.get_json(
"https://%s%s"
% (id_server, "/_matrix/identity/api/v1/3pid/getValidated3pid"),
{"sid": creds["sid"], "client_secret": client_secret},
)
data = yield self.http_client.get_json(url, query_params)
return data if "medium" in data else None
except HttpResponseException as e:
logger.info("getValidated3pid failed with Matrix error: %r", e)
raise e.to_synapse_error()
if e.code != 404 or not use_v2:
# Generic failure
logger.info("getValidated3pid failed with Matrix error: %r", e)
raise e.to_synapse_error()
if "medium" in data:
return data
return None
# This identity server is too old to understand Identity Service API v2
# Attempt v1 endpoint
logger.info("Got 404 when POSTing JSON %s, falling back to v1 URL", url)
return (yield self.threepid_from_creds(creds, use_v2=False))
@defer.inlineCallbacks
def bind_threepid(self, creds, mxid):
def bind_threepid(self, creds, mxid, use_v2=True):
"""Bind a 3PID to an identity server
Args:
creds (dict[str, str]): Dictionary of credentials that contain the following keys:
* client_secret|clientSecret: A unique secret str provided by the client
* id_server|idServer: the domain of the identity server to query
* id_access_token: The access token to authenticate to the identity
server with. Required if use_v2 is true
mxid (str): The MXID to bind the 3PID to
use_v2 (bool): Whether to use v2 Identity Service API endpoints
Returns:
Deferred[dict]: The response from the identity server
"""
logger.debug("binding threepid %r to %s", creds, mxid)
data = None
if "id_server" in creds:
id_server = creds["id_server"]
elif "idServer" in creds:
id_server = creds["idServer"]
else:
raise SynapseError(400, "No id_server in creds")
client_secret, id_server, id_access_token = self._extract_items_from_creds_dict(
creds
)
if "client_secret" in creds:
client_secret = creds["client_secret"]
elif "clientSecret" in creds:
client_secret = creds["clientSecret"]
# If an id_access_token is not supplied, force usage of v1
if id_access_token is None:
use_v2 = False
# Decide which API endpoint URLs to use
bind_data = {"sid": creds["sid"], "client_secret": client_secret, "mxid": mxid}
if use_v2:
bind_url = "https://%s/_matrix/identity/v2/3pid/bind" % (id_server,)
bind_data["id_access_token"] = id_access_token
else:
raise SynapseError(400, "No client_secret in creds")
bind_url = "https://%s/_matrix/identity/api/v1/3pid/bind" % (id_server,)
try:
data = yield self.http_client.post_json_get_json(
"https://%s%s" % (id_server, "/_matrix/identity/api/v1/3pid/bind"),
{"sid": creds["sid"], "client_secret": client_secret, "mxid": mxid},
)
data = yield self.http_client.post_json_get_json(bind_url, bind_data)
logger.debug("bound threepid %r to %s", creds, mxid)
# Remember where we bound the threepid
@ -131,13 +198,23 @@ class IdentityHandler(BaseHandler):
address=data["address"],
id_server=id_server,
)
return data
except HttpResponseException as e:
if e.code != 404 or not use_v2:
logger.error("3PID bind failed with Matrix error: %r", e)
raise e.to_synapse_error()
except CodeMessageException as e:
data = json.loads(e.msg) # XXX WAT?
return data
return data
logger.info("Got 404 when POSTing JSON %s, falling back to v1 URL", bind_url)
return (yield self.bind_threepid(creds, mxid, use_v2=False))
@defer.inlineCallbacks
def try_unbind_threepid(self, mxid, threepid):
"""Removes a binding from an identity server
"""Attempt to remove a 3PID from an identity server, or if one is not provided, all
identity servers we're aware the binding is present on
Args:
mxid (str): Matrix user ID of binding to be removed
@ -188,6 +265,8 @@ class IdentityHandler(BaseHandler):
server doesn't support unbinding
"""
url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,)
url_bytes = "/_matrix/identity/api/v1/3pid/unbind".encode("ascii")
content = {
"mxid": mxid,
"threepid": {"medium": threepid["medium"], "address": threepid["address"]},
@ -199,7 +278,7 @@ class IdentityHandler(BaseHandler):
auth_headers = self.federation_http_client.build_auth_headers(
destination=None,
method="POST",
url_bytes="/_matrix/identity/api/v1/3pid/unbind".encode("ascii"),
url_bytes=url_bytes,
content=content,
destination_is=id_server,
)
@ -282,16 +361,3 @@ class IdentityHandler(BaseHandler):
except HttpResponseException as e:
logger.info("Proxied requestToken failed: %r", e)
raise e.to_synapse_error()
class LookupAlgorithm:
"""
Supported hashing algorithms when performing a 3PID lookup.
SHA256 - Hashing an (address, medium, pepper) combo with sha256, then url-safe base64
encoding
NONE - Not performing any hashing. Simply sending an (address, medium) combo in plaintext
"""
SHA256 = "sha256"
NONE = "none"

View file

@ -449,8 +449,7 @@ class InitialSyncHandler(BaseHandler):
# * The user is a guest user, and has joined the room
# else it will throw.
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
return (member_event.membership, member_event.event_id)
return
return member_event.membership, member_event.event_id
except AuthError:
visibility = yield self.state_handler.get_current_state(
room_id, EventTypes.RoomHistoryVisibility, ""
@ -459,8 +458,7 @@ class InitialSyncHandler(BaseHandler):
visibility
and visibility.content["history_visibility"] == "world_readable"
):
return (Membership.JOIN, None)
return
return Membership.JOIN, None
raise AuthError(
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
)

View file

@ -255,7 +255,7 @@ class PresenceHandler(object):
self.unpersisted_users_changes = set()
if unpersisted:
logger.info("Persisting %d upersisted presence updates", len(unpersisted))
logger.info("Persisting %d unpersisted presence updates", len(unpersisted))
yield self.store.update_presence(
[self.user_to_current_state[user_id] for user_id in unpersisted]
)
@ -1032,7 +1032,7 @@ class PresenceEventSource(object):
#
# Hence this guard where we just return nothing so that the sync
# doesn't return. C.f. #5503.
return ([], max_token)
return [], max_token
presence = self.get_presence_handler()
stream_change_cache = self.store.presence_stream_cache
@ -1279,7 +1279,7 @@ def get_interested_parties(store, states):
# Always notify self
users_to_states.setdefault(state.user_id, []).append(state)
return (room_ids_to_states, users_to_states)
return room_ids_to_states, users_to_states
@defer.inlineCallbacks

View file

@ -148,7 +148,7 @@ class ReceiptEventSource(object):
to_key = yield self.get_current_key()
if from_key == to_key:
return ([], to_key)
return [], to_key
events = yield self.store.get_linearized_receipts_for_rooms(
room_ids, from_key=from_key, to_key=to_key

View file

@ -24,13 +24,11 @@ from synapse.api.errors import (
AuthError,
Codes,
ConsentNotGivenError,
InvalidCaptchaError,
LimitExceededError,
RegistrationError,
SynapseError,
)
from synapse.config.server import is_threepid_reserved
from synapse.http.client import CaptchaServerHttpClient
from synapse.http.servlet import assert_params_in_dict
from synapse.replication.http.login import RegisterDeviceReplicationServlet
from synapse.replication.http.register import (
@ -39,7 +37,6 @@ from synapse.replication.http.register import (
)
from synapse.types import RoomAlias, RoomID, UserID, create_requester
from synapse.util.async_helpers import Linearizer
from synapse.util.threepids import check_3pid_allowed
from ._base import BaseHandler
@ -59,7 +56,6 @@ class RegistrationHandler(BaseHandler):
self._auth_handler = hs.get_auth_handler()
self.profile_handler = hs.get_profile_handler()
self.user_directory_handler = hs.get_user_directory_handler()
self.captcha_client = CaptchaServerHttpClient(hs)
self.identity_handler = self.hs.get_handlers().identity_handler
self.ratelimiter = hs.get_registration_ratelimiter()
@ -362,70 +358,6 @@ class RegistrationHandler(BaseHandler):
)
return user_id
@defer.inlineCallbacks
def check_recaptcha(self, ip, private_key, challenge, response):
"""
Checks a recaptcha is correct.
Used only by c/s api v1
"""
captcha_response = yield self._validate_captcha(
ip, private_key, challenge, response
)
if not captcha_response["valid"]:
logger.info(
"Invalid captcha entered from %s. Error: %s",
ip,
captcha_response["error_url"],
)
raise InvalidCaptchaError(error_url=captcha_response["error_url"])
else:
logger.info("Valid captcha entered from %s", ip)
@defer.inlineCallbacks
def register_email(self, threepidCreds):
"""
Registers emails with an identity server.
Used only by c/s api v1
"""
for c in threepidCreds:
logger.info(
"validating threepidcred sid %s on id server %s",
c["sid"],
c["idServer"],
)
try:
threepid = yield self.identity_handler.threepid_from_creds(c)
except Exception:
logger.exception("Couldn't validate 3pid")
raise RegistrationError(400, "Couldn't validate 3pid")
if not threepid:
raise RegistrationError(400, "Couldn't validate 3pid")
logger.info(
"got threepid with medium '%s' and address '%s'",
threepid["medium"],
threepid["address"],
)
if not check_3pid_allowed(self.hs, threepid["medium"], threepid["address"]):
raise RegistrationError(403, "Third party identifier is not allowed")
@defer.inlineCallbacks
def bind_emails(self, user_id, threepidCreds):
"""Links emails with a user ID and informs an identity server.
Used only by c/s api v1
"""
# Now we have a matrix ID, bind it to the threepids we were given
for c in threepidCreds:
# XXX: This should be a deferred list, shouldn't it?
yield self.identity_handler.bind_threepid(c, user_id)
def check_user_id_not_appservice_exclusive(self, user_id, allowed_appservice=None):
# don't allow people to register the server notices mxid
if self._server_notices_mxid is not None:
@ -463,45 +395,8 @@ class RegistrationHandler(BaseHandler):
self._next_generated_user_id += 1
return str(id)
@defer.inlineCallbacks
def _validate_captcha(self, ip_addr, private_key, challenge, response):
"""Validates the captcha provided.
Used only by c/s api v1
Returns:
dict: Containing 'valid'(bool) and 'error_url'(str) if invalid.
"""
response = yield self._submit_captcha(ip_addr, private_key, challenge, response)
# parse Google's response. Lovely format..
lines = response.split("\n")
json = {
"valid": lines[0] == "true",
"error_url": "http://www.recaptcha.net/recaptcha/api/challenge?"
+ "error=%s" % lines[1],
}
return json
@defer.inlineCallbacks
def _submit_captcha(self, ip_addr, private_key, challenge, response):
"""
Used only by c/s api v1
"""
data = yield self.captcha_client.post_urlencoded_get_raw(
"http://www.recaptcha.net:80/recaptcha/api/verify",
args={
"privatekey": private_key,
"remoteip": ip_addr,
"challenge": challenge,
"response": response,
},
)
return data
@defer.inlineCallbacks
def _join_user_to_room(self, requester, room_identifier):
room_id = None
room_member_handler = self.hs.get_room_member_handler()
if RoomID.is_valid(room_identifier):
room_id = room_identifier
@ -622,7 +517,7 @@ class RegistrationHandler(BaseHandler):
initial_display_name=initial_display_name,
is_guest=is_guest,
)
return (r["device_id"], r["access_token"])
return r["device_id"], r["access_token"]
valid_until_ms = None
if self.session_lifetime is not None:
@ -648,9 +543,7 @@ class RegistrationHandler(BaseHandler):
return (device_id, access_token)
@defer.inlineCallbacks
def post_registration_actions(
self, user_id, auth_result, access_token, bind_email, bind_msisdn
):
def post_registration_actions(self, user_id, auth_result, access_token):
"""A user has completed registration
Args:
@ -659,18 +552,10 @@ class RegistrationHandler(BaseHandler):
registered user.
access_token (str|None): The access token of the newly logged in
device, or None if `inhibit_login` enabled.
bind_email (bool): Whether to bind the email with the identity
server.
bind_msisdn (bool): Whether to bind the msisdn with the identity
server.
"""
if self.hs.config.worker_app:
yield self._post_registration_client(
user_id=user_id,
auth_result=auth_result,
access_token=access_token,
bind_email=bind_email,
bind_msisdn=bind_msisdn,
user_id=user_id, auth_result=auth_result, access_token=access_token
)
return
@ -683,13 +568,11 @@ class RegistrationHandler(BaseHandler):
):
yield self.store.upsert_monthly_active_user(user_id)
yield self._register_email_threepid(
user_id, threepid, access_token, bind_email
)
yield self._register_email_threepid(user_id, threepid, access_token)
if auth_result and LoginType.MSISDN in auth_result:
threepid = auth_result[LoginType.MSISDN]
yield self._register_msisdn_threepid(user_id, threepid, bind_msisdn)
yield self._register_msisdn_threepid(user_id, threepid)
if auth_result and LoginType.TERMS in auth_result:
yield self._on_user_consented(user_id, self.hs.config.user_consent_version)
@ -708,14 +591,12 @@ class RegistrationHandler(BaseHandler):
yield self.post_consent_actions(user_id)
@defer.inlineCallbacks
def _register_email_threepid(self, user_id, threepid, token, bind_email):
def _register_email_threepid(self, user_id, threepid, token):
"""Add an email address as a 3pid identifier
Also adds an email pusher for the email address, if configured in the
HS config
Also optionally binds emails to the given user_id on the identity server
Must be called on master.
Args:
@ -723,8 +604,6 @@ class RegistrationHandler(BaseHandler):
threepid (object): m.login.email.identity auth response
token (str|None): access_token for the user, or None if not logged
in.
bind_email (bool): true if the client requested the email to be
bound at the identity server
Returns:
defer.Deferred:
"""
@ -766,29 +645,15 @@ class RegistrationHandler(BaseHandler):
data={},
)
if bind_email:
logger.info("bind_email specified: binding")
logger.debug("Binding emails %s to %s" % (threepid, user_id))
yield self.identity_handler.bind_threepid(
threepid["threepid_creds"], user_id
)
else:
logger.info("bind_email not specified: not binding email")
@defer.inlineCallbacks
def _register_msisdn_threepid(self, user_id, threepid, bind_msisdn):
def _register_msisdn_threepid(self, user_id, threepid):
"""Add a phone number as a 3pid identifier
Also optionally binds msisdn to the given user_id on the identity server
Must be called on master.
Args:
user_id (str): id of user
threepid (object): m.login.msisdn auth response
token (str): access_token for the user
bind_email (bool): true if the client requested the email to be
bound at the identity server
Returns:
defer.Deferred:
"""
@ -804,12 +669,3 @@ class RegistrationHandler(BaseHandler):
yield self._auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], threepid["validated_at"]
)
if bind_msisdn:
logger.info("bind_msisdn specified: binding")
logger.debug("Binding msisdn %s to %s", threepid, user_id)
yield self.identity_handler.bind_threepid(
threepid["threepid_creds"], user_id
)
else:
logger.info("bind_msisdn not specified: not binding msisdn")

View file

@ -852,7 +852,6 @@ class RoomContextHandler(object):
)
if not event:
return None
return
filtered = yield (filter_evts([event]))
if not filtered:

View file

@ -29,11 +29,9 @@ from twisted.internet import defer
from synapse import types
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import AuthError, Codes, HttpResponseException, SynapseError
from synapse.handlers.identity import LookupAlgorithm
from synapse.types import RoomID, UserID
from synapse.util.async_helpers import Linearizer
from synapse.util.distributor import user_joined_room, user_left_room
from synapse.util.hash import sha256_and_url_safe_base64
from ._base import BaseHandler
@ -525,7 +523,7 @@ class RoomMemberHandler(object):
event (SynapseEvent): The membership event.
context: The context of the event.
is_guest (bool): Whether the sender is a guest.
remote_room_hosts (list[str]|None): Homeservers which are likely to already be in
room_hosts ([str]): Homeservers which are likely to already be in
the room, and could be danced with in order to join this
homeserver for the first time.
ratelimit (bool): Whether to rate limit this request.
@ -636,7 +634,7 @@ class RoomMemberHandler(object):
servers.remove(room_alias.domain)
servers.insert(0, room_alias.domain)
return RoomID.from_string(room_id), servers
return (RoomID.from_string(room_id), servers)
@defer.inlineCallbacks
def _get_inviter(self, user_id, room_id):
@ -699,44 +697,6 @@ class RoomMemberHandler(object):
raise SynapseError(
403, "Looking up third-party identifiers is denied from this server"
)
# Check what hashing details are supported by this identity server
use_v1 = False
hash_details = None
try:
hash_details = yield self.simple_http_client.get_json(
"%s%s/_matrix/identity/v2/hash_details" % (id_server_scheme, id_server)
)
except (HttpResponseException, ValueError) as e:
# Catch HttpResponseExcept for a non-200 response code
# Catch ValueError for non-JSON response body
# Check if this identity server does not know about v2 lookups
if e.code == 404:
# This is an old identity server that does not yet support v2 lookups
use_v1 = True
else:
logger.warn("Error when looking up hashing details: %s" % (e,))
return None
if use_v1:
return (yield self._lookup_3pid_v1(id_server, medium, address))
return (yield self._lookup_3pid_v2(id_server, medium, address, hash_details))
@defer.inlineCallbacks
def _lookup_3pid_v1(self, id_server, medium, address):
"""Looks up a 3pid in the passed identity server using v1 lookup.
Args:
id_server (str): The server name (including port, if required)
of the identity server to use.
medium (str): The type of the third party identifier (e.g. "email").
address (str): The third party identifier (e.g. "foo@example.com").
Returns:
str: the matrix ID of the 3pid, or None if it is not recognized.
"""
try:
data = yield self.simple_http_client.get_json(
"%s%s/_matrix/identity/api/v1/lookup" % (id_server_scheme, id_server),
@ -751,83 +711,8 @@ class RoomMemberHandler(object):
except IOError as e:
logger.warn("Error from identity server lookup: %s" % (e,))
return None
@defer.inlineCallbacks
def _lookup_3pid_v2(self, id_server, medium, address, hash_details):
"""Looks up a 3pid in the passed identity server using v2 lookup.
Args:
id_server (str): The server name (including port, if required)
of the identity server to use.
medium (str): The type of the third party identifier (e.g. "email").
address (str): The third party identifier (e.g. "foo@example.com").
hash_details (dict[str, str|list]): A dictionary containing hashing information
provided by an identity server.
Returns:
Deferred[str|None]: the matrix ID of the 3pid, or None if it is not recognised.
"""
# Extract information from hash_details
supported_lookup_algorithms = hash_details["algorithms"]
lookup_pepper = hash_details["lookup_pepper"]
# Check if any of the supported lookup algorithms are present
if LookupAlgorithm.SHA256 in supported_lookup_algorithms:
# Perform a hashed lookup
lookup_algorithm = LookupAlgorithm.SHA256
# Hash address, medium and the pepper with sha256
to_hash = "%s %s %s" % (address, medium, lookup_pepper)
lookup_value = sha256_and_url_safe_base64(to_hash)
elif LookupAlgorithm.NONE in supported_lookup_algorithms:
# Perform a non-hashed lookup
lookup_algorithm = LookupAlgorithm.NONE
# Combine together plaintext address and medium
lookup_value = "%s %s" % (address, medium)
else:
logger.warn(
"None of the provided lookup algorithms of %s%s are supported: %s",
id_server_scheme,
id_server,
hash_details["algorithms"],
)
raise SynapseError(
400,
"Provided identity server does not support any v2 lookup "
"algorithms that this homeserver supports.",
)
try:
lookup_results = yield self.simple_http_client.post_json_get_json(
"%s%s/_matrix/identity/v2/lookup" % (id_server_scheme, id_server),
{
"addresses": [lookup_value],
"algorithm": lookup_algorithm,
"pepper": lookup_pepper,
},
)
except (HttpResponseException, ValueError) as e:
# Catch HttpResponseExcept for a non-200 response code
# Catch ValueError for non-JSON response body
logger.warn("Error when performing a 3pid lookup: %s" % (e,))
return None
# Check for a mapping from what we looked up to an MXID
if "mappings" not in lookup_results or not isinstance(
lookup_results["mappings"], dict
):
logger.debug("No results from 3pid lookup")
return None
# Return the MXID if it's available, or None otherwise
mxid = lookup_results["mappings"].get(lookup_value)
return mxid
@defer.inlineCallbacks
def _verify_any_signature(self, data, server_hostname):
if server_hostname not in data["signatures"]:
@ -1018,7 +903,7 @@ class RoomMemberHandler(object):
if not public_keys:
public_keys.append(fallback_public_key)
display_name = data["display_name"]
return (token, public_keys, fallback_public_key, display_name)
return token, public_keys, fallback_public_key, display_name
@defer.inlineCallbacks
def _is_host_in_room(self, current_state_ids):

View file

@ -14,15 +14,14 @@
# limitations under the License.
import logging
from collections import Counter
from twisted.internet import defer
from synapse.api.constants import EventTypes, JoinRules, Membership
from synapse.api.constants import EventTypes, Membership
from synapse.handlers.state_deltas import StateDeltasHandler
from synapse.metrics import event_processing_positions
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import UserID
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
@ -62,11 +61,10 @@ class StatsHandler(StateDeltasHandler):
def notify_new_event(self):
"""Called when there may be more deltas to process
"""
if not self.hs.config.stats_enabled:
if not self.hs.config.stats_enabled or self._is_processing:
return
if self._is_processing:
return
self._is_processing = True
@defer.inlineCallbacks
def process():
@ -75,39 +73,72 @@ class StatsHandler(StateDeltasHandler):
finally:
self._is_processing = False
self._is_processing = True
run_as_background_process("stats.notify_new_event", process)
@defer.inlineCallbacks
def _unsafe_process(self):
# If self.pos is None then means we haven't fetched it from DB
if self.pos is None:
self.pos = yield self.store.get_stats_stream_pos()
# If still None then the initial background update hasn't happened yet
if self.pos is None:
return None
self.pos = yield self.store.get_stats_positions()
# Loop round handling deltas until we're up to date
while True:
with Measure(self.clock, "stats_delta"):
deltas = yield self.store.get_current_state_deltas(self.pos)
if not deltas:
return
deltas = yield self.store.get_current_state_deltas(self.pos)
logger.info("Handling %d state deltas", len(deltas))
yield self._handle_deltas(deltas)
if deltas:
logger.debug("Handling %d state deltas", len(deltas))
room_deltas, user_deltas = yield self._handle_deltas(deltas)
self.pos = deltas[-1]["stream_id"]
yield self.store.update_stats_stream_pos(self.pos)
max_pos = deltas[-1]["stream_id"]
else:
room_deltas = {}
user_deltas = {}
max_pos = yield self.store.get_room_max_stream_ordering()
event_processing_positions.labels("stats").set(self.pos)
# Then count deltas for total_events and total_event_bytes.
room_count, user_count = yield self.store.get_changes_room_total_events_and_bytes(
self.pos, max_pos
)
for room_id, fields in room_count.items():
room_deltas.setdefault(room_id, {}).update(fields)
for user_id, fields in user_count.items():
user_deltas.setdefault(user_id, {}).update(fields)
logger.debug("room_deltas: %s", room_deltas)
logger.debug("user_deltas: %s", user_deltas)
# Always call this so that we update the stats position.
yield self.store.bulk_update_stats_delta(
self.clock.time_msec(),
updates={"room": room_deltas, "user": user_deltas},
stream_id=max_pos,
)
event_processing_positions.labels("stats").set(max_pos)
if self.pos == max_pos:
break
self.pos = max_pos
@defer.inlineCallbacks
def _handle_deltas(self, deltas):
"""Called with the state deltas to process
Returns:
Deferred[tuple[dict[str, Counter], dict[str, counter]]]
Resovles to two dicts, the room deltas and the user deltas,
mapping from room/user ID to changes in the various fields.
"""
Called with the state deltas to process
"""
room_to_stats_deltas = {}
user_to_stats_deltas = {}
room_to_state_updates = {}
for delta in deltas:
typ = delta["type"]
state_key = delta["state_key"]
@ -115,11 +146,10 @@ class StatsHandler(StateDeltasHandler):
event_id = delta["event_id"]
stream_id = delta["stream_id"]
prev_event_id = delta["prev_event_id"]
stream_pos = delta["stream_id"]
logger.debug("Handling: %r %r, %s", typ, state_key, event_id)
logger.debug("Handling: %r, %r %r, %s", room_id, typ, state_key, event_id)
token = yield self.store.get_earliest_token_for_room_stats(room_id)
token = yield self.store.get_earliest_token_for_stats("room", room_id)
# If the earliest token to begin from is larger than our current
# stream ID, skip processing this delta.
@ -131,203 +161,130 @@ class StatsHandler(StateDeltasHandler):
continue
if event_id is None and prev_event_id is None:
# Errr...
logger.error(
"event ID is None and so is the previous event ID. stream_id: %s",
stream_id,
)
continue
event_content = {}
sender = None
if event_id is not None:
event = yield self.store.get_event(event_id, allow_none=True)
if event:
event_content = event.content or {}
sender = event.sender
# We use stream_pos here rather than fetch by event_id as event_id
# may be None
now = yield self.store.get_received_ts_by_stream_pos(stream_pos)
# All the values in this dict are deltas (RELATIVE changes)
room_stats_delta = room_to_stats_deltas.setdefault(room_id, Counter())
# quantise time to the nearest bucket
now = (now // 1000 // self.stats_bucket_size) * self.stats_bucket_size
room_state = room_to_state_updates.setdefault(room_id, {})
if prev_event_id is None:
# this state event doesn't overwrite another,
# so it is a new effective/current state event
room_stats_delta["current_state_events"] += 1
if typ == EventTypes.Member:
# we could use _get_key_change here but it's a bit inefficient
# given we're not testing for a specific result; might as well
# just grab the prev_membership and membership strings and
# compare them.
prev_event_content = {}
# We take None rather than leave as a previous membership
# in the absence of a previous event because we do not want to
# reduce the leave count when a new-to-the-room user joins.
prev_membership = None
if prev_event_id is not None:
prev_event = yield self.store.get_event(
prev_event_id, allow_none=True
)
if prev_event:
prev_event_content = prev_event.content
prev_membership = prev_event_content.get(
"membership", Membership.LEAVE
)
membership = event_content.get("membership", Membership.LEAVE)
prev_membership = prev_event_content.get("membership", Membership.LEAVE)
if prev_membership == membership:
continue
if prev_membership == Membership.JOIN:
yield self.store.update_stats_delta(
now, "room", room_id, "joined_members", -1
)
if prev_membership is None:
logger.debug("No previous membership for this user.")
elif membership == prev_membership:
pass # noop
elif prev_membership == Membership.JOIN:
room_stats_delta["joined_members"] -= 1
elif prev_membership == Membership.INVITE:
yield self.store.update_stats_delta(
now, "room", room_id, "invited_members", -1
)
room_stats_delta["invited_members"] -= 1
elif prev_membership == Membership.LEAVE:
yield self.store.update_stats_delta(
now, "room", room_id, "left_members", -1
)
room_stats_delta["left_members"] -= 1
elif prev_membership == Membership.BAN:
yield self.store.update_stats_delta(
now, "room", room_id, "banned_members", -1
)
room_stats_delta["banned_members"] -= 1
else:
err = "%s is not a valid prev_membership" % (repr(prev_membership),)
logger.error(err)
raise ValueError(err)
raise ValueError(
"%r is not a valid prev_membership" % (prev_membership,)
)
if membership == prev_membership:
pass # noop
if membership == Membership.JOIN:
yield self.store.update_stats_delta(
now, "room", room_id, "joined_members", +1
)
room_stats_delta["joined_members"] += 1
elif membership == Membership.INVITE:
yield self.store.update_stats_delta(
now, "room", room_id, "invited_members", +1
)
room_stats_delta["invited_members"] += 1
if sender and self.is_mine_id(sender):
user_to_stats_deltas.setdefault(sender, Counter())[
"invites_sent"
] += 1
elif membership == Membership.LEAVE:
yield self.store.update_stats_delta(
now, "room", room_id, "left_members", +1
)
room_stats_delta["left_members"] += 1
elif membership == Membership.BAN:
yield self.store.update_stats_delta(
now, "room", room_id, "banned_members", +1
)
room_stats_delta["banned_members"] += 1
else:
err = "%s is not a valid membership" % (repr(membership),)
logger.error(err)
raise ValueError(err)
raise ValueError("%r is not a valid membership" % (membership,))
user_id = state_key
if self.is_mine_id(user_id):
# update user_stats as it's one of our users
public = yield self._is_public_room(room_id)
# this accounts for transitions like leave → ban and so on.
has_changed_joinedness = (prev_membership == Membership.JOIN) != (
membership == Membership.JOIN
)
if membership == Membership.LEAVE:
yield self.store.update_stats_delta(
now,
"user",
user_id,
"public_rooms" if public else "private_rooms",
-1,
)
elif membership == Membership.JOIN:
yield self.store.update_stats_delta(
now,
"user",
user_id,
"public_rooms" if public else "private_rooms",
+1,
)
if has_changed_joinedness:
delta = +1 if membership == Membership.JOIN else -1
user_to_stats_deltas.setdefault(user_id, Counter())[
"joined_rooms"
] += delta
room_stats_delta["local_users_in_room"] += delta
elif typ == EventTypes.Create:
# Newly created room. Add it with all blank portions.
yield self.store.update_room_state(
room_id,
{
"join_rules": None,
"history_visibility": None,
"encryption": None,
"name": None,
"topic": None,
"avatar": None,
"canonical_alias": None,
},
)
room_state["is_federatable"] = event_content.get("m.federate", True)
if sender and self.is_mine_id(sender):
user_to_stats_deltas.setdefault(sender, Counter())[
"rooms_created"
] += 1
elif typ == EventTypes.JoinRules:
yield self.store.update_room_state(
room_id, {"join_rules": event_content.get("join_rule")}
)
is_public = yield self._get_key_change(
prev_event_id, event_id, "join_rule", JoinRules.PUBLIC
)
if is_public is not None:
yield self.update_public_room_stats(now, room_id, is_public)
room_state["join_rules"] = event_content.get("join_rule")
elif typ == EventTypes.RoomHistoryVisibility:
yield self.store.update_room_state(
room_id,
{"history_visibility": event_content.get("history_visibility")},
room_state["history_visibility"] = event_content.get(
"history_visibility"
)
is_public = yield self._get_key_change(
prev_event_id, event_id, "history_visibility", "world_readable"
)
if is_public is not None:
yield self.update_public_room_stats(now, room_id, is_public)
elif typ == EventTypes.Encryption:
yield self.store.update_room_state(
room_id, {"encryption": event_content.get("algorithm")}
)
room_state["encryption"] = event_content.get("algorithm")
elif typ == EventTypes.Name:
yield self.store.update_room_state(
room_id, {"name": event_content.get("name")}
)
room_state["name"] = event_content.get("name")
elif typ == EventTypes.Topic:
yield self.store.update_room_state(
room_id, {"topic": event_content.get("topic")}
)
room_state["topic"] = event_content.get("topic")
elif typ == EventTypes.RoomAvatar:
yield self.store.update_room_state(
room_id, {"avatar": event_content.get("url")}
)
room_state["avatar"] = event_content.get("url")
elif typ == EventTypes.CanonicalAlias:
yield self.store.update_room_state(
room_id, {"canonical_alias": event_content.get("alias")}
)
room_state["canonical_alias"] = event_content.get("alias")
elif typ == EventTypes.GuestAccess:
room_state["guest_access"] = event_content.get("guest_access")
@defer.inlineCallbacks
def update_public_room_stats(self, ts, room_id, is_public):
"""
Increment/decrement a user's number of public rooms when a room they are
in changes to/from public visibility.
for room_id, state in room_to_state_updates.items():
yield self.store.update_room_state(room_id, state)
Args:
ts (int): Timestamp in seconds
room_id (str)
is_public (bool)
"""
# For now, blindly iterate over all local users in the room so that
# we can handle the whole problem of copying buckets over as needed
user_ids = yield self.store.get_users_in_room(room_id)
for user_id in user_ids:
if self.hs.is_mine(UserID.from_string(user_id)):
yield self.store.update_stats_delta(
ts, "user", user_id, "public_rooms", +1 if is_public else -1
)
yield self.store.update_stats_delta(
ts, "user", user_id, "private_rooms", -1 if is_public else +1
)
@defer.inlineCallbacks
def _is_public_room(self, room_id):
join_rules = yield self.state.get_current_state(room_id, EventTypes.JoinRules)
history_visibility = yield self.state.get_current_state(
room_id, EventTypes.RoomHistoryVisibility
)
if (join_rules and join_rules.content.get("join_rule") == JoinRules.PUBLIC) or (
(
history_visibility
and history_visibility.content.get("history_visibility")
== "world_readable"
)
):
return True
else:
return False
return room_to_stats_deltas, user_to_stats_deltas

View file

@ -378,7 +378,7 @@ class SyncHandler(object):
event_copy = {k: v for (k, v) in iteritems(event) if k != "room_id"}
ephemeral_by_room.setdefault(room_id, []).append(event_copy)
return (now_token, ephemeral_by_room)
return now_token, ephemeral_by_room
@defer.inlineCallbacks
def _load_filtered_recents(
@ -578,7 +578,6 @@ class SyncHandler(object):
if not last_events:
return None
return
last_event = last_events[-1]
state_ids = yield self.store.get_state_ids_for_event(
@ -1332,7 +1331,7 @@ class SyncHandler(object):
)
if not tags_by_room:
logger.debug("no-oping sync")
return ([], [], [], [])
return [], [], [], []
ignored_account_data = yield self.store.get_global_account_data_by_type_for_user(
"m.ignored_user_list", user_id=user_id
@ -1642,7 +1641,7 @@ class SyncHandler(object):
)
room_entries.append(entry)
return (room_entries, invited, newly_joined_rooms, newly_left_rooms)
return room_entries, invited, newly_joined_rooms, newly_left_rooms
@defer.inlineCallbacks
def _get_all_rooms(self, sync_result_builder, ignored_users):
@ -1716,7 +1715,7 @@ class SyncHandler(object):
)
)
return (room_entries, invited, [])
return room_entries, invited, []
@defer.inlineCallbacks
def _generate_room_entry(

View file

@ -319,4 +319,4 @@ class TypingNotificationEventSource(object):
return self.get_typing_handler()._latest_room_serial
def get_pagination_rows(self, user, pagination_config, key):
return ([], pagination_config.from_key)
return [], pagination_config.from_key

View file

@ -35,7 +35,7 @@ from twisted.internet.interfaces import (
)
from twisted.python.failure import Failure
from twisted.web._newclient import ResponseDone
from twisted.web.client import Agent, HTTPConnectionPool, PartialDownloadError, readBody
from twisted.web.client import Agent, HTTPConnectionPool, readBody
from twisted.web.http import PotentialDataLoss
from twisted.web.http_headers import Headers
@ -599,38 +599,6 @@ def _readBodyToFile(response, stream, max_size):
return d
class CaptchaServerHttpClient(SimpleHttpClient):
"""
Separate HTTP client for talking to google's captcha servers
Only slightly special because accepts partial download responses
used only by c/s api v1
"""
@defer.inlineCallbacks
def post_urlencoded_get_raw(self, url, args={}):
query_bytes = urllib.parse.urlencode(encode_urlencode_args(args), True)
response = yield self.request(
"POST",
url,
data=query_bytes,
headers=Headers(
{
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
}
),
)
try:
body = yield make_deferred_yieldable(readBody(response))
return body
except PartialDownloadError as e:
# twisted dislikes google's response, no content length.
return e.response
def encode_urlencode_args(args):
return {k: encode_urlencode_arg(v) for k, v in args.items()}

View file

@ -207,7 +207,7 @@ class WellKnownResolver(object):
cache_period + WELL_KNOWN_REMEMBER_DOMAIN_HAD_VALID,
)
return (result, cache_period)
return result, cache_period
@defer.inlineCallbacks
def _make_well_known_request(self, server_name, retry):

View file

@ -40,6 +40,7 @@ from synapse.api.errors import (
UnrecognizedRequestError,
)
from synapse.logging.context import preserve_fn
from synapse.logging.opentracing import trace_servlet
from synapse.util.caches import intern_dict
logger = logging.getLogger(__name__)
@ -257,7 +258,9 @@ class JsonResource(HttpServer, resource.Resource):
self.path_regexs = {}
self.hs = hs
def register_paths(self, method, path_patterns, callback, servlet_classname):
def register_paths(
self, method, path_patterns, callback, servlet_classname, trace=True
):
"""
Registers a request handler against a regular expression. Later request URLs are
checked against these regular expressions in order to identify an appropriate
@ -273,8 +276,16 @@ class JsonResource(HttpServer, resource.Resource):
servlet_classname (str): The name of the handler to be used in prometheus
and opentracing logs.
trace (bool): Whether we should start a span to trace the servlet.
"""
method = method.encode("utf-8") # method is bytes on py3
if trace:
# We don't extract the context from the servlet because we can't
# trust the sender
callback = trace_servlet(servlet_classname)(callback)
for path_pattern in path_patterns:
logger.debug("Registering for %s %s", method, path_pattern.pattern)
self.path_regexs.setdefault(method, []).append(

View file

@ -20,7 +20,6 @@ import logging
from canonicaljson import json
from synapse.api.errors import Codes, SynapseError
from synapse.logging.opentracing import trace_servlet
logger = logging.getLogger(__name__)
@ -298,10 +297,7 @@ class RestServlet(object):
servlet_classname = self.__class__.__name__
method_handler = getattr(self, "on_%s" % (method,))
http_server.register_paths(
method,
patterns,
trace_servlet(servlet_classname)(method_handler),
servlet_classname,
method, patterns, method_handler, servlet_classname
)
else:

View file

@ -85,14 +85,14 @@ the function becomes the operation name for the span.
return something_usual_and_useful
Operation names can be explicitly set for functions by using
``trace_using_operation_name``
Operation names can be explicitly set for a function by passing the
operation name to ``trace``
.. code-block:: python
from synapse.logging.opentracing import trace_using_operation_name
from synapse.logging.opentracing import trace
@trace_using_operation_name("A *much* better operation name")
@trace(opname="a_better_operation_name")
def interesting_badly_named_function(*args, **kwargs):
# Does all kinds of cool and expected things
return something_usual_and_useful
@ -319,7 +319,7 @@ def whitelisted_homeserver(destination):
Args:
destination (str)
"""
_homeserver_whitelist
if _homeserver_whitelist:
return _homeserver_whitelist.match(destination)
return False
@ -493,6 +493,11 @@ def inject_active_span_twisted_headers(headers, destination, check_destination=T
Args:
headers (twisted.web.http_headers.Headers)
destination (str): address of entity receiving the span context. If check_destination
is true the context will only be injected if the destination matches the
opentracing whitelist
check_destination (bool): If false, destination will be ignored and the context
will always be injected.
span (opentracing.Span)
Returns:
@ -525,6 +530,11 @@ def inject_active_span_byte_dict(headers, destination, check_destination=True):
Args:
headers (dict)
destination (str): address of entity receiving the span context. If check_destination
is true the context will only be injected if the destination matches the
opentracing whitelist
check_destination (bool): If false, destination will be ignored and the context
will always be injected.
span (opentracing.Span)
Returns:
@ -537,7 +547,7 @@ def inject_active_span_byte_dict(headers, destination, check_destination=True):
here:
https://github.com/jaegertracing/jaeger-client-python/blob/master/jaeger_client/constants.py
"""
if not whitelisted_homeserver(destination):
if check_destination and not whitelisted_homeserver(destination):
return
span = opentracing.tracer.active_span
@ -556,9 +566,11 @@ def inject_active_span_text_map(carrier, destination, check_destination=True):
Args:
carrier (dict)
destination (str): the name of the remote server. The span context
will only be injected if the destination matches the homeserver_whitelist
or destination is None.
destination (str): address of entity receiving the span context. If check_destination
is true the context will only be injected if the destination matches the
opentracing whitelist
check_destination (bool): If false, destination will be ignored and the context
will always be injected.
Returns:
In-place modification of carrier
@ -641,66 +653,26 @@ def extract_text_map(carrier):
# Tracing decorators
def trace(func):
def trace(func=None, opname=None):
"""
Decorator to trace a function.
Sets the operation name to that of the function's.
Sets the operation name to that of the function's or that given
as operation_name. See the module's doc string for usage
examples.
"""
if opentracing is None:
return func
@wraps(func)
def _trace_inner(self, *args, **kwargs):
if opentracing is None:
return func(self, *args, **kwargs)
scope = start_active_span(func.__name__)
scope.__enter__()
try:
result = func(self, *args, **kwargs)
if isinstance(result, defer.Deferred):
def call_back(result):
scope.__exit__(None, None, None)
return result
def err_back(result):
scope.span.set_tag(tags.ERROR, True)
scope.__exit__(None, None, None)
return result
result.addCallbacks(call_back, err_back)
else:
scope.__exit__(None, None, None)
return result
except Exception as e:
scope.__exit__(type(e), None, e.__traceback__)
raise
return _trace_inner
def trace_using_operation_name(operation_name):
"""Decorator to trace a function. Explicitely sets the operation_name."""
def trace(func):
"""
Decorator to trace a function.
Sets the operation name to that of the function's.
"""
def decorator(func):
if opentracing is None:
return func
_opname = opname if opname else func.__name__
@wraps(func)
def _trace_inner(self, *args, **kwargs):
if opentracing is None:
return func(self, *args, **kwargs)
scope = start_active_span(operation_name)
scope = start_active_span(_opname)
scope.__enter__()
try:
@ -717,6 +689,7 @@ def trace_using_operation_name(operation_name):
return result
result.addCallbacks(call_back, err_back)
else:
scope.__exit__(None, None, None)
@ -728,7 +701,10 @@ def trace_using_operation_name(operation_name):
return _trace_inner
return trace
if func:
return decorator(func)
else:
return decorator
def tag_args(func):

View file

@ -101,7 +101,7 @@ class ModuleApi(object):
)
user_id = yield self.register_user(localpart, displayname, emails)
_, access_token = yield self.register_device(user_id)
return (user_id, access_token)
return user_id, access_token
def register_user(self, localpart, displayname=None, emails=[]):
"""Registers a new user with given localpart and optional displayname, emails.

View file

@ -472,11 +472,11 @@ class Notifier(object):
joined_room_ids = yield self.store.get_rooms_for_user(user.to_string())
if explicit_room_id:
if explicit_room_id in joined_room_ids:
return ([explicit_room_id], True)
return [explicit_room_id], True
if (yield self._is_world_readable(explicit_room_id)):
return ([explicit_room_id], False)
return [explicit_room_id], False
raise AuthError(403, "Non-joined access not allowed")
return (joined_room_ids, True)
return joined_room_ids, True
@defer.inlineCallbacks
def _is_world_readable(self, room_id):

View file

@ -134,7 +134,7 @@ class BulkPushRuleEvaluator(object):
pl_event = auth_events.get(POWER_KEY)
return (pl_event.content if pl_event else {}, sender_level)
return pl_event.content if pl_event else {}, sender_level
@defer.inlineCallbacks
def action_for_event_by_user(self, event, context):

View file

@ -22,13 +22,13 @@ from six.moves import urllib
from twisted.internet import defer
import synapse.logging.opentracing as opentracing
from synapse.api.errors import (
CodeMessageException,
HttpResponseException,
RequestSendFailed,
SynapseError,
)
from synapse.logging.opentracing import inject_active_span_byte_dict, trace_servlet
from synapse.util.caches.response_cache import ResponseCache
from synapse.util.stringutils import random_string
@ -167,9 +167,7 @@ class ReplicationEndpoint(object):
# the master, and so whether we should clean up or not.
while True:
headers = {}
opentracing.inject_active_span_byte_dict(
headers, None, check_destination=False
)
inject_active_span_byte_dict(headers, None, check_destination=False)
try:
result = yield request_func(uri, data, headers=headers)
break
@ -210,13 +208,11 @@ class ReplicationEndpoint(object):
args = "/".join("(?P<%s>[^/]+)" % (arg,) for arg in url_args)
pattern = re.compile("^/_synapse/replication/%s/%s$" % (self.NAME, args))
handler = trace_servlet(self.__class__.__name__, extract_context=True)(handler)
# We don't let register paths trace this servlet using the default tracing
# options because we wish to extract the context explicitly.
http_server.register_paths(
method,
[pattern],
opentracing.trace_servlet(self.__class__.__name__, extract_context=True)(
handler
),
self.__class__.__name__,
method, [pattern], handler, self.__class__.__name__, trace=False
)
def _cached_handler(self, request, txn_id, **kwargs):

View file

@ -113,7 +113,7 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
event_and_contexts, backfilled
)
return (200, {})
return 200, {}
class ReplicationFederationSendEduRestServlet(ReplicationEndpoint):
@ -156,7 +156,7 @@ class ReplicationFederationSendEduRestServlet(ReplicationEndpoint):
result = yield self.registry.on_edu(edu_type, origin, edu_content)
return (200, result)
return 200, result
class ReplicationGetQueryRestServlet(ReplicationEndpoint):
@ -204,7 +204,7 @@ class ReplicationGetQueryRestServlet(ReplicationEndpoint):
result = yield self.registry.on_query(query_type, args)
return (200, result)
return 200, result
class ReplicationCleanRoomRestServlet(ReplicationEndpoint):
@ -238,7 +238,7 @@ class ReplicationCleanRoomRestServlet(ReplicationEndpoint):
def _handle_request(self, request, room_id):
yield self.store.clean_room_for_join(room_id)
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -64,7 +64,7 @@ class RegisterDeviceReplicationServlet(ReplicationEndpoint):
user_id, device_id, initial_display_name, is_guest
)
return (200, {"device_id": device_id, "access_token": access_token})
return 200, {"device_id": device_id, "access_token": access_token}
def register_servlets(hs, http_server):

View file

@ -83,7 +83,7 @@ class ReplicationRemoteJoinRestServlet(ReplicationEndpoint):
remote_room_hosts, room_id, user_id, event_content
)
return (200, {})
return 200, {}
class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
@ -153,7 +153,7 @@ class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint):
yield self.store.locally_reject_invite(user_id, room_id)
ret = {}
return (200, ret)
return 200, ret
class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
@ -202,7 +202,7 @@ class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint):
else:
raise Exception("Unrecognized change: %r", change)
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -90,7 +90,7 @@ class ReplicationRegisterServlet(ReplicationEndpoint):
address=content["address"],
)
return (200, {})
return 200, {}
class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
@ -106,7 +106,7 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
self.registration_handler = hs.get_registration_handler()
@staticmethod
def _serialize_payload(user_id, auth_result, access_token, bind_email, bind_msisdn):
def _serialize_payload(user_id, auth_result, access_token):
"""
Args:
user_id (str): The user ID that consented
@ -114,17 +114,8 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
registered user.
access_token (str|None): The access token of the newly logged in
device, or None if `inhibit_login` enabled.
bind_email (bool): Whether to bind the email with the identity
server
bind_msisdn (bool): Whether to bind the msisdn with the identity
server
"""
return {
"auth_result": auth_result,
"access_token": access_token,
"bind_email": bind_email,
"bind_msisdn": bind_msisdn,
}
return {"auth_result": auth_result, "access_token": access_token}
@defer.inlineCallbacks
def _handle_request(self, request, user_id):
@ -132,18 +123,12 @@ class ReplicationPostRegisterActionsServlet(ReplicationEndpoint):
auth_result = content["auth_result"]
access_token = content["access_token"]
bind_email = content["bind_email"]
bind_msisdn = content["bind_msisdn"]
yield self.registration_handler.post_registration_actions(
user_id=user_id,
auth_result=auth_result,
access_token=access_token,
bind_email=bind_email,
bind_msisdn=bind_msisdn,
user_id=user_id, auth_result=auth_result, access_token=access_token
)
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -117,7 +117,7 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint):
requester, event, context, ratelimit=ratelimit, extra_users=extra_users
)
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -158,7 +158,7 @@ class Stream(object):
updates, current_token = yield self.get_updates_since(self.last_token)
self.last_token = current_token
return (updates, current_token)
return updates, current_token
@defer.inlineCallbacks
def get_updates_since(self, from_token):
@ -172,14 +172,14 @@ class Stream(object):
sent over the replication steam.
"""
if from_token in ("NOW", "now"):
return ([], self.upto_token)
return [], self.upto_token
current_token = self.upto_token
from_token = int(from_token)
if from_token == current_token:
return ([], current_token)
return [], current_token
if self._LIMITED:
rows = yield self.update_function(
@ -198,7 +198,7 @@ class Stream(object):
if self._LIMITED and len(updates) >= MAX_EVENTS_BEHIND:
raise Exception("stream %s has fallen behind" % (self.NAME))
return (updates, current_token)
return updates, current_token
def current_token(self):
"""Gets the current token of the underlying streams. Should be provided

View file

@ -41,7 +41,7 @@ from synapse.rest.admin._base import (
assert_user_is_admin,
historical_admin_path_patterns,
)
from synapse.rest.admin.media import register_servlets_for_media_repo
from synapse.rest.admin.media import ListMediaInRoom, register_servlets_for_media_repo
from synapse.rest.admin.purge_room_servlet import PurgeRoomServlet
from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet
from synapse.rest.admin.users import UserAdminServlet
@ -69,7 +69,7 @@ class UsersRestServlet(RestServlet):
ret = yield self.handlers.admin_handler.get_users()
return (200, ret)
return 200, ret
class VersionServlet(RestServlet):
@ -120,7 +120,7 @@ class UserRegisterServlet(RestServlet):
nonce = self.hs.get_secrets().token_hex(64)
self.nonces[nonce] = int(self.reactor.seconds())
return (200, {"nonce": nonce})
return 200, {"nonce": nonce}
@defer.inlineCallbacks
def on_POST(self, request):
@ -212,7 +212,7 @@ class UserRegisterServlet(RestServlet):
)
result = yield register._create_registration_details(user_id, body)
return (200, result)
return 200, result
class WhoisRestServlet(RestServlet):
@ -237,7 +237,7 @@ class WhoisRestServlet(RestServlet):
ret = yield self.handlers.admin_handler.get_whois(target_user)
return (200, ret)
return 200, ret
class PurgeHistoryRestServlet(RestServlet):
@ -322,7 +322,7 @@ class PurgeHistoryRestServlet(RestServlet):
room_id, token, delete_local_events=delete_local_events
)
return (200, {"purge_id": purge_id})
return 200, {"purge_id": purge_id}
class PurgeHistoryStatusRestServlet(RestServlet):
@ -347,7 +347,7 @@ class PurgeHistoryStatusRestServlet(RestServlet):
if purge_status is None:
raise NotFoundError("purge id '%s' not found" % purge_id)
return (200, purge_status.asdict())
return 200, purge_status.asdict()
class DeactivateAccountRestServlet(RestServlet):
@ -379,7 +379,7 @@ class DeactivateAccountRestServlet(RestServlet):
else:
id_server_unbind_result = "no-support"
return (200, {"id_server_unbind_result": id_server_unbind_result})
return 200, {"id_server_unbind_result": id_server_unbind_result}
class ShutdownRoomRestServlet(RestServlet):
@ -549,7 +549,7 @@ class ResetPasswordRestServlet(RestServlet):
yield self._set_password_handler.set_password(
target_user_id, new_password, requester
)
return (200, {})
return 200, {}
class GetUsersPaginatedRestServlet(RestServlet):
@ -591,7 +591,7 @@ class GetUsersPaginatedRestServlet(RestServlet):
logger.info("limit: %s, start: %s", limit, start)
ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit)
return (200, ret)
return 200, ret
@defer.inlineCallbacks
def on_POST(self, request, target_user_id):
@ -619,7 +619,7 @@ class GetUsersPaginatedRestServlet(RestServlet):
logger.info("limit: %s, start: %s", limit, start)
ret = yield self.handlers.admin_handler.get_users_paginate(order, start, limit)
return (200, ret)
return 200, ret
class SearchUsersRestServlet(RestServlet):
@ -662,7 +662,7 @@ class SearchUsersRestServlet(RestServlet):
logger.info("term: %s ", term)
ret = yield self.handlers.admin_handler.search_users(term)
return (200, ret)
return 200, ret
class DeleteGroupAdminRestServlet(RestServlet):
@ -685,7 +685,7 @@ class DeleteGroupAdminRestServlet(RestServlet):
raise SynapseError(400, "Can only delete local groups")
yield self.group_server.delete_group(group_id, requester.user.to_string())
return (200, {})
return 200, {}
class AccountValidityRenewServlet(RestServlet):
@ -716,7 +716,7 @@ class AccountValidityRenewServlet(RestServlet):
)
res = {"expiration_ts": expiration_ts}
return (200, res)
return 200, res
########################################################################################
@ -761,9 +761,12 @@ def register_servlets_for_client_rest_resource(hs, http_server):
DeleteGroupAdminRestServlet(hs).register(http_server)
AccountValidityRenewServlet(hs).register(http_server)
# Load the media repo ones if we're using them.
# Load the media repo ones if we're using them. Otherwise load the servlets which
# don't need a media repo (typically readonly admin APIs).
if hs.config.can_load_media_repo:
register_servlets_for_media_repo(hs, http_server)
else:
ListMediaInRoom(hs).register(http_server)
# don't add more things here: new servlets should only be exposed on
# /_synapse/admin so should not go here. Instead register them in AdminRestResource.

View file

@ -49,7 +49,7 @@ class QuarantineMediaInRoom(RestServlet):
room_id, requester.user.to_string()
)
return (200, {"num_quarantined": num_quarantined})
return 200, {"num_quarantined": num_quarantined}
class ListMediaInRoom(RestServlet):
@ -60,6 +60,7 @@ class ListMediaInRoom(RestServlet):
def __init__(self, hs):
self.store = hs.get_datastore()
self.auth = hs.get_auth()
@defer.inlineCallbacks
def on_GET(self, request, room_id):
@ -70,7 +71,7 @@ class ListMediaInRoom(RestServlet):
local_mxcs, remote_mxcs = yield self.store.get_media_mxcs_in_room(room_id)
return (200, {"local": local_mxcs, "remote": remote_mxcs})
return 200, {"local": local_mxcs, "remote": remote_mxcs}
class PurgeMediaCacheRestServlet(RestServlet):
@ -89,7 +90,7 @@ class PurgeMediaCacheRestServlet(RestServlet):
ret = yield self.media_repository.delete_old_remote_media(before_ts)
return (200, ret)
return 200, ret
def register_servlets_for_media_repo(hs, http_server):

View file

@ -54,4 +54,4 @@ class PurgeRoomServlet(RestServlet):
await self.pagination_handler.purge_room(body["room_id"])
return (200, {})
return 200, {}

View file

@ -92,7 +92,7 @@ class SendServerNoticeServlet(RestServlet):
event_content=body["content"],
)
return (200, {"event_id": event.event_id})
return 200, {"event_id": event.event_id}
def on_PUT(self, request, txn_id):
return self.txns.fetch_or_execute_request(

View file

@ -71,7 +71,7 @@ class UserAdminServlet(RestServlet):
is_admin = yield self.handlers.admin_handler.get_user_server_admin(target_user)
is_admin = bool(is_admin)
return (200, {"admin": is_admin})
return 200, {"admin": is_admin}
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
@ -97,4 +97,4 @@ class UserAdminServlet(RestServlet):
target_user, set_admin_to
)
return (200, {})
return 200, {}

View file

@ -54,7 +54,7 @@ class ClientDirectoryServer(RestServlet):
dir_handler = self.handlers.directory_handler
res = yield dir_handler.get_association(room_alias)
return (200, res)
return 200, res
@defer.inlineCallbacks
def on_PUT(self, request, room_alias):
@ -87,7 +87,7 @@ class ClientDirectoryServer(RestServlet):
requester, room_alias, room_id, servers
)
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_DELETE(self, request, room_alias):
@ -102,7 +102,7 @@ class ClientDirectoryServer(RestServlet):
service.url,
room_alias.to_string(),
)
return (200, {})
return 200, {}
except InvalidClientCredentialsError:
# fallback to default user behaviour if they aren't an AS
pass
@ -118,7 +118,7 @@ class ClientDirectoryServer(RestServlet):
"User %s deleted alias %s", user.to_string(), room_alias.to_string()
)
return (200, {})
return 200, {}
class ClientDirectoryListServer(RestServlet):
@ -136,7 +136,7 @@ class ClientDirectoryListServer(RestServlet):
if room is None:
raise NotFoundError("Unknown room")
return (200, {"visibility": "public" if room["is_public"] else "private"})
return 200, {"visibility": "public" if room["is_public"] else "private"}
@defer.inlineCallbacks
def on_PUT(self, request, room_id):
@ -149,7 +149,7 @@ class ClientDirectoryListServer(RestServlet):
requester, room_id, visibility
)
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_DELETE(self, request, room_id):
@ -159,7 +159,7 @@ class ClientDirectoryListServer(RestServlet):
requester, room_id, "private"
)
return (200, {})
return 200, {}
class ClientAppserviceDirectoryListServer(RestServlet):
@ -193,4 +193,4 @@ class ClientAppserviceDirectoryListServer(RestServlet):
requester.app_service.id, network_id, room_id, visibility
)
return (200, {})
return 200, {}

View file

@ -67,10 +67,10 @@ class EventStreamRestServlet(RestServlet):
is_guest=is_guest,
)
return (200, chunk)
return 200, chunk
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
# TODO: Unit test gets, with and without auth, with different kinds of events.
@ -91,9 +91,9 @@ class EventRestServlet(RestServlet):
time_now = self.clock.time_msec()
if event:
event = yield self._event_serializer.serialize_event(event, time_now)
return (200, event)
return 200, event
else:
return (404, "Event not found.")
return 404, "Event not found."
def register_servlets(hs, http_server):

View file

@ -42,7 +42,7 @@ class InitialSyncRestServlet(RestServlet):
include_archived=include_archived,
)
return (200, content)
return 200, content
def register_servlets(hs, http_server):

View file

@ -121,10 +121,10 @@ class LoginRestServlet(RestServlet):
({"type": t} for t in self.auth_handler.get_supported_login_types())
)
return (200, {"flows": flows})
return 200, {"flows": flows}
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_POST(self, request):
@ -152,7 +152,7 @@ class LoginRestServlet(RestServlet):
well_known_data = self._well_known_builder.get_well_known()
if well_known_data:
result["well_known"] = well_known_data
return (200, result)
return 200, result
@defer.inlineCallbacks
def _do_other_login(self, login_submission):

View file

@ -33,7 +33,7 @@ class LogoutRestServlet(RestServlet):
self._device_handler = hs.get_device_handler()
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_POST(self, request):
@ -49,7 +49,7 @@ class LogoutRestServlet(RestServlet):
requester.user.to_string(), requester.device_id
)
return (200, {})
return 200, {}
class LogoutAllRestServlet(RestServlet):
@ -62,7 +62,7 @@ class LogoutAllRestServlet(RestServlet):
self._device_handler = hs.get_device_handler()
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_POST(self, request):
@ -75,7 +75,7 @@ class LogoutAllRestServlet(RestServlet):
# .. and then delete any access tokens which weren't associated with
# devices.
yield self._auth_handler.delete_access_tokens_for_user(user_id)
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -56,7 +56,7 @@ class PresenceStatusRestServlet(RestServlet):
state = yield self.presence_handler.get_state(target_user=user)
state = format_user_presence_state(state, self.clock.time_msec())
return (200, state)
return 200, state
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
@ -88,10 +88,10 @@ class PresenceStatusRestServlet(RestServlet):
if self.hs.config.use_presence:
yield self.presence_handler.set_state(user, state)
return (200, {})
return 200, {}
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -48,7 +48,7 @@ class ProfileDisplaynameRestServlet(RestServlet):
if displayname is not None:
ret["displayname"] = displayname
return (200, ret)
return 200, ret
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
@ -61,14 +61,14 @@ class ProfileDisplaynameRestServlet(RestServlet):
try:
new_name = content["displayname"]
except Exception:
return (400, "Unable to parse name")
return 400, "Unable to parse name"
yield self.profile_handler.set_displayname(user, requester, new_name, is_admin)
return (200, {})
return 200, {}
def on_OPTIONS(self, request, user_id):
return (200, {})
return 200, {}
class ProfileAvatarURLRestServlet(RestServlet):
@ -98,7 +98,7 @@ class ProfileAvatarURLRestServlet(RestServlet):
if avatar_url is not None:
ret["avatar_url"] = avatar_url
return (200, ret)
return 200, ret
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
@ -110,14 +110,14 @@ class ProfileAvatarURLRestServlet(RestServlet):
try:
new_name = content["avatar_url"]
except Exception:
return (400, "Unable to parse name")
return 400, "Unable to parse name"
yield self.profile_handler.set_avatar_url(user, requester, new_name, is_admin)
return (200, {})
return 200, {}
def on_OPTIONS(self, request, user_id):
return (200, {})
return 200, {}
class ProfileRestServlet(RestServlet):
@ -150,7 +150,7 @@ class ProfileRestServlet(RestServlet):
if avatar_url is not None:
ret["avatar_url"] = avatar_url
return (200, ret)
return 200, ret
def register_servlets(hs, http_server):

View file

@ -69,7 +69,7 @@ class PushRuleRestServlet(RestServlet):
if "attr" in spec:
yield self.set_rule_attr(user_id, spec, content)
self.notify_user(user_id)
return (200, {})
return 200, {}
if spec["rule_id"].startswith("."):
# Rule ids starting with '.' are reserved for server default rules.
@ -106,7 +106,7 @@ class PushRuleRestServlet(RestServlet):
except RuleNotFoundException as e:
raise SynapseError(400, str(e))
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_DELETE(self, request, path):
@ -123,7 +123,7 @@ class PushRuleRestServlet(RestServlet):
try:
yield self.store.delete_push_rule(user_id, namespaced_rule_id)
self.notify_user(user_id)
return (200, {})
return 200, {}
except StoreError as e:
if e.code == 404:
raise NotFoundError()
@ -151,10 +151,10 @@ class PushRuleRestServlet(RestServlet):
)
if path[0] == "":
return (200, rules)
return 200, rules
elif path[0] == "global":
result = _filter_ruleset_with_path(rules["global"], path[1:])
return (200, result)
return 200, result
else:
raise UnrecognizedRequestError()

View file

@ -62,7 +62,7 @@ class PushersRestServlet(RestServlet):
if k not in allowed_keys:
del p[k]
return (200, {"pushers": pushers})
return 200, {"pushers": pushers}
def on_OPTIONS(self, _):
return 200, {}
@ -94,7 +94,7 @@ class PushersSetRestServlet(RestServlet):
yield self.pusher_pool.remove_pusher(
content["app_id"], content["pushkey"], user_id=user.to_string()
)
return (200, {})
return 200, {}
assert_params_in_dict(
content,
@ -143,7 +143,7 @@ class PushersSetRestServlet(RestServlet):
self.notifier.on_new_replication_data()
return (200, {})
return 200, {}
def on_OPTIONS(self, _):
return 200, {}

View file

@ -91,14 +91,14 @@ class RoomCreateRestServlet(TransactionRestServlet):
requester, self.get_room_config(request)
)
return (200, info)
return 200, info
def get_room_config(self, request):
user_supplied_config = parse_json_object_from_request(request)
return user_supplied_config
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
# TODO: Needs unit testing for generic events
@ -173,9 +173,9 @@ class RoomStateEventRestServlet(TransactionRestServlet):
if format == "event":
event = format_event_for_client_v2(data.get_dict())
return (200, event)
return 200, event
elif format == "content":
return (200, data.get_dict()["content"])
return 200, data.get_dict()["content"]
@defer.inlineCallbacks
def on_PUT(self, request, room_id, event_type, state_key, txn_id=None):
@ -210,7 +210,7 @@ class RoomStateEventRestServlet(TransactionRestServlet):
ret = {}
if event:
ret = {"event_id": event.event_id}
return (200, ret)
return 200, ret
# TODO: Needs unit testing for generic events + feedback
@ -244,10 +244,10 @@ class RoomSendEventRestServlet(TransactionRestServlet):
requester, event_dict, txn_id=txn_id
)
return (200, {"event_id": event.event_id})
return 200, {"event_id": event.event_id}
def on_GET(self, request, room_id, event_type, txn_id):
return (200, "Not implemented")
return 200, "Not implemented"
def on_PUT(self, request, room_id, event_type, txn_id):
return self.txns.fetch_or_execute_request(
@ -307,7 +307,7 @@ class JoinRoomAliasServlet(TransactionRestServlet):
third_party_signed=content.get("third_party_signed", None),
)
return (200, {"room_id": room_id})
return 200, {"room_id": room_id}
def on_PUT(self, request, room_identifier, txn_id):
return self.txns.fetch_or_execute_request(
@ -360,7 +360,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
limit=limit, since_token=since_token
)
return (200, data)
return 200, data
@defer.inlineCallbacks
def on_POST(self, request):
@ -405,7 +405,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
network_tuple=network_tuple,
)
return (200, data)
return 200, data
# TODO: Needs unit testing
@ -456,7 +456,7 @@ class RoomMemberListRestServlet(RestServlet):
continue
chunk.append(event)
return (200, {"chunk": chunk})
return 200, {"chunk": chunk}
# deprecated in favour of /members?membership=join?
@ -477,7 +477,7 @@ class JoinedRoomMemberListRestServlet(RestServlet):
requester, room_id
)
return (200, {"joined": users_with_profile})
return 200, {"joined": users_with_profile}
# TODO: Needs better unit testing
@ -510,7 +510,7 @@ class RoomMessageListRestServlet(RestServlet):
event_filter=event_filter,
)
return (200, msgs)
return 200, msgs
# TODO: Needs unit testing
@ -531,7 +531,7 @@ class RoomStateRestServlet(RestServlet):
user_id=requester.user.to_string(),
is_guest=requester.is_guest,
)
return (200, events)
return 200, events
# TODO: Needs unit testing
@ -550,7 +550,7 @@ class RoomInitialSyncRestServlet(RestServlet):
content = yield self.initial_sync_handler.room_initial_sync(
room_id=room_id, requester=requester, pagin_config=pagination_config
)
return (200, content)
return 200, content
class RoomEventServlet(RestServlet):
@ -581,7 +581,7 @@ class RoomEventServlet(RestServlet):
time_now = self.clock.time_msec()
if event:
event = yield self._event_serializer.serialize_event(event, time_now)
return (200, event)
return 200, event
return SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)
@ -633,7 +633,7 @@ class RoomEventContextServlet(RestServlet):
results["state"], time_now
)
return (200, results)
return 200, results
class RoomForgetRestServlet(TransactionRestServlet):
@ -652,7 +652,7 @@ class RoomForgetRestServlet(TransactionRestServlet):
yield self.room_member_handler.forget(user=requester.user, room_id=room_id)
return (200, {})
return 200, {}
def on_PUT(self, request, room_id, txn_id):
return self.txns.fetch_or_execute_request(
@ -702,8 +702,7 @@ class RoomMembershipRestServlet(TransactionRestServlet):
requester,
txn_id,
)
return (200, {})
return
return 200, {}
target = requester.user
if membership_action in ["invite", "ban", "unban", "kick"]:
@ -729,7 +728,7 @@ class RoomMembershipRestServlet(TransactionRestServlet):
if membership_action == "join":
return_value["room_id"] = room_id
return (200, return_value)
return 200, return_value
def _has_3pid_invite_keys(self, content):
for key in {"id_server", "medium", "address"}:
@ -771,7 +770,7 @@ class RoomRedactEventRestServlet(TransactionRestServlet):
txn_id=txn_id,
)
return (200, {"event_id": event.event_id})
return 200, {"event_id": event.event_id}
def on_PUT(self, request, room_id, event_id, txn_id):
return self.txns.fetch_or_execute_request(
@ -816,7 +815,7 @@ class RoomTypingRestServlet(RestServlet):
target_user=target_user, auth_user=requester.user, room_id=room_id
)
return (200, {})
return 200, {}
class SearchRestServlet(RestServlet):
@ -838,7 +837,7 @@ class SearchRestServlet(RestServlet):
requester.user, content, batch
)
return (200, results)
return 200, results
class JoinedRoomsRestServlet(RestServlet):
@ -854,7 +853,7 @@ class JoinedRoomsRestServlet(RestServlet):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
room_ids = yield self.store.get_rooms_for_user(requester.user.to_string())
return (200, {"joined_rooms": list(room_ids)})
return 200, {"joined_rooms": list(room_ids)}
def register_txn_path(servlet, regex_string, http_server, with_get=False):

View file

@ -60,7 +60,7 @@ class VoipRestServlet(RestServlet):
password = turnPassword
else:
return (200, {})
return 200, {}
return (
200,
@ -73,7 +73,7 @@ class VoipRestServlet(RestServlet):
)
def on_OPTIONS(self, request):
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -117,7 +117,7 @@ class EmailPasswordRequestTokenRestServlet(RestServlet):
# Wrap the session id in a JSON object
ret = {"sid": sid}
return (200, ret)
return 200, ret
@defer.inlineCallbacks
def send_password_reset(self, email, client_secret, send_attempt, next_link=None):
@ -221,7 +221,7 @@ class MsisdnPasswordRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "MSISDN not found", Codes.THREEPID_NOT_FOUND)
ret = yield self.identity_handler.requestMsisdnToken(**body)
return (200, ret)
return 200, ret
class PasswordResetSubmitTokenServlet(RestServlet):
@ -330,7 +330,7 @@ class PasswordResetSubmitTokenServlet(RestServlet):
)
response_code = 200 if valid else 400
return (response_code, {"success": valid})
return response_code, {"success": valid}
class PasswordRestServlet(RestServlet):
@ -399,7 +399,7 @@ class PasswordRestServlet(RestServlet):
yield self._set_password_handler.set_password(user_id, new_password, requester)
return (200, {})
return 200, {}
def on_OPTIONS(self, _):
return 200, {}
@ -434,7 +434,7 @@ class DeactivateAccountRestServlet(RestServlet):
yield self._deactivate_account_handler.deactivate_account(
requester.user.to_string(), erase
)
return (200, {})
return 200, {}
yield self.auth_handler.validate_user_via_ui_auth(
requester, body, self.hs.get_ip_from_request(request)
@ -447,7 +447,7 @@ class DeactivateAccountRestServlet(RestServlet):
else:
id_server_unbind_result = "no-support"
return (200, {"id_server_unbind_result": id_server_unbind_result})
return 200, {"id_server_unbind_result": id_server_unbind_result}
class EmailThreepidRequestTokenRestServlet(RestServlet):
@ -481,7 +481,7 @@ class EmailThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
ret = yield self.identity_handler.requestEmailToken(**body)
return (200, ret)
return 200, ret
class MsisdnThreepidRequestTokenRestServlet(RestServlet):
@ -516,7 +516,7 @@ class MsisdnThreepidRequestTokenRestServlet(RestServlet):
raise SynapseError(400, "MSISDN is already in use", Codes.THREEPID_IN_USE)
ret = yield self.identity_handler.requestMsisdnToken(**body)
return (200, ret)
return 200, ret
class ThreepidRestServlet(RestServlet):
@ -536,21 +536,22 @@ class ThreepidRestServlet(RestServlet):
threepids = yield self.datastore.user_get_threepids(requester.user.to_string())
return (200, {"threepids": threepids})
return 200, {"threepids": threepids}
@defer.inlineCallbacks
def on_POST(self, request):
body = parse_json_object_from_request(request)
threePidCreds = body.get("threePidCreds")
threePidCreds = body.get("three_pid_creds", threePidCreds)
if threePidCreds is None:
raise SynapseError(400, "Missing param", Codes.MISSING_PARAM)
threepid_creds = body.get("threePidCreds") or body.get("three_pid_creds")
if threepid_creds is None:
raise SynapseError(
400, "Missing param three_pid_creds", Codes.MISSING_PARAM
)
requester = yield self.auth.get_user_by_req(request)
user_id = requester.user.to_string()
threepid = yield self.identity_handler.threepid_from_creds(threePidCreds)
threepid = yield self.identity_handler.threepid_from_creds(threepid_creds)
if not threepid:
raise SynapseError(400, "Failed to auth 3pid", Codes.THREEPID_AUTH_FAILED)
@ -566,9 +567,41 @@ class ThreepidRestServlet(RestServlet):
if "bind" in body and body["bind"]:
logger.debug("Binding threepid %s to %s", threepid, user_id)
yield self.identity_handler.bind_threepid(threePidCreds, user_id)
yield self.identity_handler.bind_threepid(threepid_creds, user_id)
return (200, {})
return 200, {}
class ThreepidUnbindRestServlet(RestServlet):
PATTERNS = client_patterns("/account/3pid/unbind$")
def __init__(self, hs):
super(ThreepidUnbindRestServlet, self).__init__()
self.hs = hs
self.identity_handler = hs.get_handlers().identity_handler
self.auth = hs.get_auth()
self.datastore = self.hs.get_datastore()
@defer.inlineCallbacks
def on_POST(self, request):
"""Unbind the given 3pid from a specific identity server, or identity servers that are
known to have this 3pid bound
"""
requester = yield self.auth.get_user_by_req(request)
body = parse_json_object_from_request(request)
assert_params_in_dict(body, ["medium", "address"])
medium = body.get("medium")
address = body.get("address")
id_server = body.get("id_server")
# Attempt to unbind the threepid from an identity server. If id_server is None, try to
# unbind from all identity servers this threepid has been added to in the past
result = yield self.identity_handler.try_unbind_threepid(
requester.user.to_string(),
{"address": address, "medium": medium, "id_server": id_server},
)
return 200, {"id_server_unbind_result": "success" if result else "no-support"}
class ThreepidDeleteRestServlet(RestServlet):
@ -603,7 +636,7 @@ class ThreepidDeleteRestServlet(RestServlet):
else:
id_server_unbind_result = "no-support"
return (200, {"id_server_unbind_result": id_server_unbind_result})
return 200, {"id_server_unbind_result": id_server_unbind_result}
class WhoamiRestServlet(RestServlet):
@ -617,7 +650,7 @@ class WhoamiRestServlet(RestServlet):
def on_GET(self, request):
requester = yield self.auth.get_user_by_req(request)
return (200, {"user_id": requester.user.to_string()})
return 200, {"user_id": requester.user.to_string()}
def register_servlets(hs, http_server):
@ -629,5 +662,6 @@ def register_servlets(hs, http_server):
EmailThreepidRequestTokenRestServlet(hs).register(http_server)
MsisdnThreepidRequestTokenRestServlet(hs).register(http_server)
ThreepidRestServlet(hs).register(http_server)
ThreepidUnbindRestServlet(hs).register(http_server)
ThreepidDeleteRestServlet(hs).register(http_server)
WhoamiRestServlet(hs).register(http_server)

View file

@ -55,7 +55,7 @@ class AccountDataServlet(RestServlet):
self.notifier.on_new_event("account_data_key", max_id, users=[user_id])
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_GET(self, request, user_id, account_data_type):
@ -70,7 +70,7 @@ class AccountDataServlet(RestServlet):
if event is None:
raise NotFoundError("Account data not found")
return (200, event)
return 200, event
class RoomAccountDataServlet(RestServlet):
@ -112,7 +112,7 @@ class RoomAccountDataServlet(RestServlet):
self.notifier.on_new_event("account_data_key", max_id, users=[user_id])
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_GET(self, request, user_id, room_id, account_data_type):
@ -127,7 +127,7 @@ class RoomAccountDataServlet(RestServlet):
if event is None:
raise NotFoundError("Room account data not found")
return (200, event)
return 200, event
def register_servlets(hs, http_server):

View file

@ -58,7 +58,7 @@ class CapabilitiesRestServlet(RestServlet):
"m.change_password": {"enabled": change_password},
}
}
return (200, response)
return 200, response
def register_servlets(hs, http_server):

View file

@ -48,7 +48,7 @@ class DevicesRestServlet(RestServlet):
devices = yield self.device_handler.get_devices_by_user(
requester.user.to_string()
)
return (200, {"devices": devices})
return 200, {"devices": devices}
class DeleteDevicesRestServlet(RestServlet):
@ -91,7 +91,7 @@ class DeleteDevicesRestServlet(RestServlet):
yield self.device_handler.delete_devices(
requester.user.to_string(), body["devices"]
)
return (200, {})
return 200, {}
class DeviceRestServlet(RestServlet):
@ -114,7 +114,7 @@ class DeviceRestServlet(RestServlet):
device = yield self.device_handler.get_device(
requester.user.to_string(), device_id
)
return (200, device)
return 200, device
@interactive_auth_handler
@defer.inlineCallbacks
@ -137,7 +137,7 @@ class DeviceRestServlet(RestServlet):
)
yield self.device_handler.delete_device(requester.user.to_string(), device_id)
return (200, {})
return 200, {}
@defer.inlineCallbacks
def on_PUT(self, request, device_id):
@ -147,7 +147,7 @@ class DeviceRestServlet(RestServlet):
yield self.device_handler.update_device(
requester.user.to_string(), device_id, body
)
return (200, {})
return 200, {}
def register_servlets(hs, http_server):

View file

@ -56,7 +56,7 @@ class GetFilterRestServlet(RestServlet):
user_localpart=target_user.localpart, filter_id=filter_id
)
return (200, filter.get_filter_json())
return 200, filter.get_filter_json()
except (KeyError, StoreError):
raise SynapseError(400, "No such filter", errcode=Codes.NOT_FOUND)
@ -89,7 +89,7 @@ class CreateFilterRestServlet(RestServlet):
user_localpart=target_user.localpart, user_filter=content
)
return (200, {"filter_id": str(filter_id)})
return 200, {"filter_id": str(filter_id)}
def register_servlets(hs, http_server):

View file

@ -47,7 +47,7 @@ class GroupServlet(RestServlet):
group_id, requester_user_id
)
return (200, group_description)
return 200, group_description
@defer.inlineCallbacks
def on_POST(self, request, group_id):
@ -59,7 +59,7 @@ class GroupServlet(RestServlet):
group_id, requester_user_id, content
)
return (200, {})
return 200, {}
class GroupSummaryServlet(RestServlet):
@ -83,7 +83,7 @@ class GroupSummaryServlet(RestServlet):
group_id, requester_user_id
)
return (200, get_group_summary)
return 200, get_group_summary
class GroupSummaryRoomsCatServlet(RestServlet):
@ -120,7 +120,7 @@ class GroupSummaryRoomsCatServlet(RestServlet):
content=content,
)
return (200, resp)
return 200, resp
@defer.inlineCallbacks
def on_DELETE(self, request, group_id, category_id, room_id):
@ -131,7 +131,7 @@ class GroupSummaryRoomsCatServlet(RestServlet):
group_id, requester_user_id, room_id=room_id, category_id=category_id
)
return (200, resp)
return 200, resp
class GroupCategoryServlet(RestServlet):
@ -157,7 +157,7 @@ class GroupCategoryServlet(RestServlet):
group_id, requester_user_id, category_id=category_id
)
return (200, category)
return 200, category
@defer.inlineCallbacks
def on_PUT(self, request, group_id, category_id):
@ -169,7 +169,7 @@ class GroupCategoryServlet(RestServlet):
group_id, requester_user_id, category_id=category_id, content=content
)
return (200, resp)
return 200, resp
@defer.inlineCallbacks
def on_DELETE(self, request, group_id, category_id):
@ -180,7 +180,7 @@ class GroupCategoryServlet(RestServlet):
group_id, requester_user_id, category_id=category_id
)
return (200, resp)
return 200, resp
class GroupCategoriesServlet(RestServlet):
@ -204,7 +204,7 @@ class GroupCategoriesServlet(RestServlet):
group_id, requester_user_id
)
return (200, category)
return 200, category
class GroupRoleServlet(RestServlet):
@ -228,7 +228,7 @@ class GroupRoleServlet(RestServlet):
group_id, requester_user_id, role_id=role_id
)
return (200, category)
return 200, category
@defer.inlineCallbacks
def on_PUT(self, request, group_id, role_id):
@ -240,7 +240,7 @@ class GroupRoleServlet(RestServlet):
group_id, requester_user_id, role_id=role_id, content=content
)
return (200, resp)
return 200, resp
@defer.inlineCallbacks
def on_DELETE(self, request, group_id, role_id):
@ -251,7 +251,7 @@ class GroupRoleServlet(RestServlet):
group_id, requester_user_id, role_id=role_id
)
return (200, resp)
return 200, resp
class GroupRolesServlet(RestServlet):
@ -275,7 +275,7 @@ class GroupRolesServlet(RestServlet):
group_id, requester_user_id
)
return (200, category)
return 200, category
class GroupSummaryUsersRoleServlet(RestServlet):
@ -312,7 +312,7 @@ class GroupSummaryUsersRoleServlet(RestServlet):
content=content,
)
return (200, resp)
return 200, resp
@defer.inlineCallbacks
def on_DELETE(self, request, group_id, role_id, user_id):
@ -323,7 +323,7 @@ class GroupSummaryUsersRoleServlet(RestServlet):
group_id, requester_user_id, user_id=user_id, role_id=role_id
)
return (200, resp)
return 200, resp
class GroupRoomServlet(RestServlet):
@ -347,7 +347,7 @@ class GroupRoomServlet(RestServlet):
group_id, requester_user_id
)
return (200, result)
return 200, result
class GroupUsersServlet(RestServlet):
@ -371,7 +371,7 @@ class GroupUsersServlet(RestServlet):
group_id, requester_user_id
)
return (200, result)
return 200, result
class GroupInvitedUsersServlet(RestServlet):
@ -395,7 +395,7 @@ class GroupInvitedUsersServlet(RestServlet):
group_id, requester_user_id
)
return (200, result)
return 200, result
class GroupSettingJoinPolicyServlet(RestServlet):
@ -420,7 +420,7 @@ class GroupSettingJoinPolicyServlet(RestServlet):
group_id, requester_user_id, content
)
return (200, result)
return 200, result
class GroupCreateServlet(RestServlet):
@ -450,7 +450,7 @@ class GroupCreateServlet(RestServlet):
group_id, requester_user_id, content
)
return (200, result)
return 200, result
class GroupAdminRoomsServlet(RestServlet):
@ -477,7 +477,7 @@ class GroupAdminRoomsServlet(RestServlet):
group_id, requester_user_id, room_id, content
)
return (200, result)
return 200, result
@defer.inlineCallbacks
def on_DELETE(self, request, group_id, room_id):
@ -488,7 +488,7 @@ class GroupAdminRoomsServlet(RestServlet):
group_id, requester_user_id, room_id
)
return (200, result)
return 200, result
class GroupAdminRoomsConfigServlet(RestServlet):
@ -516,7 +516,7 @@ class GroupAdminRoomsConfigServlet(RestServlet):
group_id, requester_user_id, room_id, config_key, content
)
return (200, result)
return 200, result
class GroupAdminUsersInviteServlet(RestServlet):
@ -546,7 +546,7 @@ class GroupAdminUsersInviteServlet(RestServlet):
group_id, user_id, requester_user_id, config
)
return (200, result)
return 200, result
class GroupAdminUsersKickServlet(RestServlet):
@ -573,7 +573,7 @@ class GroupAdminUsersKickServlet(RestServlet):
group_id, user_id, requester_user_id, content
)
return (200, result)
return 200, result
class GroupSelfLeaveServlet(RestServlet):
@ -598,7 +598,7 @@ class GroupSelfLeaveServlet(RestServlet):
group_id, requester_user_id, requester_user_id, content
)
return (200, result)
return 200, result
class GroupSelfJoinServlet(RestServlet):
@ -623,7 +623,7 @@ class GroupSelfJoinServlet(RestServlet):
group_id, requester_user_id, content
)
return (200, result)
return 200, result
class GroupSelfAcceptInviteServlet(RestServlet):
@ -648,7 +648,7 @@ class GroupSelfAcceptInviteServlet(RestServlet):
group_id, requester_user_id, content
)
return (200, result)
return 200, result
class GroupSelfUpdatePublicityServlet(RestServlet):
@ -672,7 +672,7 @@ class GroupSelfUpdatePublicityServlet(RestServlet):
publicise = content["publicise"]
yield self.store.update_group_publicity(group_id, requester_user_id, publicise)
return (200, {})
return 200, {}
class PublicisedGroupsForUserServlet(RestServlet):
@ -694,7 +694,7 @@ class PublicisedGroupsForUserServlet(RestServlet):
result = yield self.groups_handler.get_publicised_groups_for_user(user_id)
return (200, result)
return 200, result
class PublicisedGroupsForUsersServlet(RestServlet):
@ -719,7 +719,7 @@ class PublicisedGroupsForUsersServlet(RestServlet):
result = yield self.groups_handler.bulk_get_publicised_groups(user_ids)
return (200, result)
return 200, result
class GroupsForUserServlet(RestServlet):
@ -741,7 +741,7 @@ class GroupsForUserServlet(RestServlet):
result = yield self.groups_handler.get_joined_groups(requester_user_id)
return (200, result)
return 200, result
def register_servlets(hs, http_server):

View file

@ -24,7 +24,7 @@ from synapse.http.servlet import (
parse_json_object_from_request,
parse_string,
)
from synapse.logging.opentracing import log_kv, set_tag, trace_using_operation_name
from synapse.logging.opentracing import log_kv, set_tag, trace
from synapse.types import StreamToken
from ._base import client_patterns
@ -69,7 +69,7 @@ class KeyUploadServlet(RestServlet):
self.auth = hs.get_auth()
self.e2e_keys_handler = hs.get_e2e_keys_handler()
@trace_using_operation_name("upload_keys")
@trace(opname="upload_keys")
@defer.inlineCallbacks
def on_POST(self, request, device_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
@ -105,7 +105,7 @@ class KeyUploadServlet(RestServlet):
result = yield self.e2e_keys_handler.upload_keys_for_user(
user_id, device_id, body
)
return (200, result)
return 200, result
class KeyQueryServlet(RestServlet):
@ -159,7 +159,7 @@ class KeyQueryServlet(RestServlet):
timeout = parse_integer(request, "timeout", 10 * 1000)
body = parse_json_object_from_request(request)
result = yield self.e2e_keys_handler.query_devices(body, timeout)
return (200, result)
return 200, result
class KeyChangesServlet(RestServlet):
@ -200,7 +200,7 @@ class KeyChangesServlet(RestServlet):
results = yield self.device_handler.get_user_ids_changed(user_id, from_token)
return (200, results)
return 200, results
class OneTimeKeyServlet(RestServlet):
@ -235,7 +235,7 @@ class OneTimeKeyServlet(RestServlet):
timeout = parse_integer(request, "timeout", 10 * 1000)
body = parse_json_object_from_request(request)
result = yield self.e2e_keys_handler.claim_one_time_keys(body, timeout)
return (200, result)
return 200, result
def register_servlets(hs, http_server):

View file

@ -88,7 +88,7 @@ class NotificationsServlet(RestServlet):
returned_push_actions.append(returned_pa)
next_token = str(pa["stream_ordering"])
return (200, {"notifications": returned_push_actions, "next_token": next_token})
return 200, {"notifications": returned_push_actions, "next_token": next_token}
def register_servlets(hs, http_server):

Some files were not shown because too many files have changed in this diff Show more