0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-15 00:33:50 +01:00

Merge branch 'develop' into redactyoself

Conflicts:
	synapse/handlers/_base.py
This commit is contained in:
Daniel Wagner-Hall 2015-08-28 15:35:39 +01:00
commit 4d1ea40008
17 changed files with 262 additions and 73 deletions

View file

@ -1,4 +1,43 @@
Changes in synapse v0.10.0-rc1 (2015-08-20) Changes in synapse v0.10.0-rc5 (2015-08-27)
===========================================
* Fix bug that broke downloading files with ascii filenames across federation.
Changes in synapse v0.10.0-rc4 (2015-08-27)
===========================================
* Allow UTF-8 filenames for upload. (PR #259)
Changes in synapse v0.10.0-rc3 (2015-08-25)
===========================================
* Add ``--keys-directory`` config option to specify where files such as
certs and signing keys should be stored in, when using ``--generate-config``
or ``--generate-keys``. (PR #250)
* Allow ``--config-path`` to specify a directory, causing synapse to use all
\*.yaml files in the directory as config files. (PR #249)
* Add ``web_client_location`` config option to specify static files to be
hosted by synapse under ``/_matrix/client``. (PR #245)
* Add helper utility to synapse to read and parse the config files and extract
the value of a given key. For example::
$ python -m synapse.config read server_name -c homeserver.yaml
localhost
(PR #246)
Changes in synapse v0.10.0-rc2 (2015-08-24)
===========================================
* Fix bug where we incorrectly populated the ``event_forward_extremities``
table, resulting in problems joining large remote rooms (e.g.
``#matrix:matrix.org``)
* Reduce the number of times we wake up pushers by not listening for presence
or typing events, reducing the CPU cost of each pusher.
Changes in synapse v0.10.0-rc1 (2015-08-21)
=========================================== ===========================================
Also see v0.9.4-rc1 changelog, which has been amalgamated into this release. Also see v0.9.4-rc1 changelog, which has been amalgamated into this release.
@ -10,6 +49,9 @@ General:
(PR #208) (PR #208)
* Add support for logging in with email address (PR #234) * Add support for logging in with email address (PR #234)
* Add support for new ``m.room.canonical_alias`` event. (PR #233) * Add support for new ``m.room.canonical_alias`` event. (PR #233)
* Change synapse to treat user IDs case insensitively during registration and
login. (If two users already exist with case insensitive matching user ids,
synapse will continue to require them to specify their user ids exactly.)
* Error if a user tries to register with an email already in use. (PR #211) * Error if a user tries to register with an email already in use. (PR #211)
* Add extra and improve existing caches (PR #212, #219, #226, #228) * Add extra and improve existing caches (PR #212, #219, #226, #228)
* Batch various storage request (PR #226, #228) * Batch various storage request (PR #226, #228)

View file

@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server. """ This is a reference implementation of a Matrix home server.
""" """
__version__ = "0.10.0-rc1" __version__ = "0.10.0-rc5"

View file

@ -16,7 +16,7 @@
import sys import sys
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
from synapse.python_dependencies import check_requirements from synapse.python_dependencies import check_requirements, DEPENDENCY_LINKS
if __name__ == '__main__': if __name__ == '__main__':
check_requirements() check_requirements()
@ -97,9 +97,25 @@ class SynapseHomeServer(HomeServer):
return JsonResource(self) return JsonResource(self)
def build_resource_for_web_client(self): def build_resource_for_web_client(self):
import syweb webclient_path = self.get_config().web_client_location
syweb_path = os.path.dirname(syweb.__file__) if not webclient_path:
webclient_path = os.path.join(syweb_path, "webclient") try:
import syweb
except ImportError:
quit_with_error(
"Could not find a webclient.\n\n"
"Please either install the matrix-angular-sdk or configure\n"
"the location of the source to serve via the configuration\n"
"option `web_client_location`\n\n"
"To install the `matrix-angular-sdk` via pip, run:\n\n"
" pip install '%(dep)s'\n"
"\n"
"You can also disable hosting of the webclient via the\n"
"configuration option `web_client`\n"
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
)
syweb_path = os.path.dirname(syweb.__file__)
webclient_path = os.path.join(syweb_path, "webclient")
# GZip is disabled here due to # GZip is disabled here due to
# https://twistedmatrix.com/trac/ticket/7678 # https://twistedmatrix.com/trac/ticket/7678
# (It can stay enabled for the API resources: they call # (It can stay enabled for the API resources: they call
@ -259,11 +275,10 @@ class SynapseHomeServer(HomeServer):
def quit_with_error(error_string): def quit_with_error(error_string):
message_lines = error_string.split("\n") message_lines = error_string.split("\n")
line_length = max([len(l) for l in message_lines]) + 2 line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
sys.stderr.write("*" * line_length + '\n') sys.stderr.write("*" * line_length + '\n')
for line in message_lines: for line in message_lines:
if line.strip(): sys.stderr.write(" %s\n" % (line.rstrip(),))
sys.stderr.write(" %s\n" % (line.strip(),))
sys.stderr.write("*" * line_length + '\n') sys.stderr.write("*" * line_length + '\n')
sys.exit(1) sys.exit(1)

View file

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if __name__ == "__main__":
import sys
from homeserver import HomeServerConfig
action = sys.argv[1]
if action == "read":
key = sys.argv[2]
config = HomeServerConfig.load_config("", sys.argv[3:])
print getattr(config, key)
sys.exit(0)
else:
sys.stderr.write("Unknown command %r\n" % (action,))
sys.exit(1)

View file

@ -131,7 +131,8 @@ class Config(object):
"-c", "--config-path", "-c", "--config-path",
action="append", action="append",
metavar="CONFIG_FILE", metavar="CONFIG_FILE",
help="Specify config file" help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files."
) )
config_parser.add_argument( config_parser.add_argument(
"--generate-config", "--generate-config",
@ -143,6 +144,13 @@ class Config(object):
action="store_true", action="store_true",
help="Generate any missing key files then exit" help="Generate any missing key files then exit"
) )
config_parser.add_argument(
"--keys-directory",
metavar="DIRECTORY",
help="Used with 'generate-*' options to specify where files such as"
" certs and signing keys should be stored in, unless explicitly"
" specified in the config."
)
config_parser.add_argument( config_parser.add_argument(
"-H", "--server-name", "-H", "--server-name",
help="The server name to generate a config file for" help="The server name to generate a config file for"
@ -151,16 +159,46 @@ class Config(object):
generate_keys = config_args.generate_keys generate_keys = config_args.generate_keys
config_files = []
if config_args.config_path:
for config_path in config_args.config_path:
if os.path.isdir(config_path):
# We accept specifying directories as config paths, we search
# inside that directory for all files matching *.yaml, and then
# we apply them in *sorted* order.
files = []
for entry in os.listdir(config_path):
entry_path = os.path.join(config_path, entry)
if not os.path.isfile(entry_path):
print (
"Found subdirectory in config directory: %r. IGNORING."
) % (entry_path, )
continue
if not entry.endswith(".yaml"):
print (
"Found file in config directory that does not"
" end in '.yaml': %r. IGNORING."
) % (entry_path, )
continue
config_files.extend(sorted(files))
else:
config_files.append(config_path)
if config_args.generate_config: if config_args.generate_config:
if not config_args.config_path: if not config_files:
config_parser.error( config_parser.error(
"Must supply a config file.\nA config file can be automatically" "Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -H SERVER_NAME" " generated using \"--generate-config -H SERVER_NAME"
" -c CONFIG-FILE\"" " -c CONFIG-FILE\""
) )
(config_path,) = config_args.config_path (config_path,) = config_files
if not os.path.exists(config_path): if not os.path.exists(config_path):
config_dir_path = os.path.dirname(config_path) if config_args.keys_directory:
config_dir_path = config_args.keys_directory
else:
config_dir_path = os.path.dirname(config_path)
config_dir_path = os.path.abspath(config_dir_path) config_dir_path = os.path.abspath(config_dir_path)
server_name = config_args.server_name server_name = config_args.server_name
@ -202,19 +240,22 @@ class Config(object):
obj.invoke_all("add_arguments", parser) obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args) args = parser.parse_args(remaining_args)
if not config_args.config_path: if not config_files:
config_parser.error( config_parser.error(
"Must supply a config file.\nA config file can be automatically" "Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -H SERVER_NAME" " generated using \"--generate-config -H SERVER_NAME"
" -c CONFIG-FILE\"" " -c CONFIG-FILE\""
) )
config_dir_path = os.path.dirname(config_args.config_path[-1]) if config_args.keys_directory:
config_dir_path = config_args.keys_directory
else:
config_dir_path = os.path.dirname(config_args.config_path[-1])
config_dir_path = os.path.abspath(config_dir_path) config_dir_path = os.path.abspath(config_dir_path)
specified_config = {} specified_config = {}
for config_path in config_args.config_path: for config_file in config_files:
yaml_config = cls.read_config_file(config_path) yaml_config = cls.read_config_file(config_file)
specified_config.update(yaml_config) specified_config.update(yaml_config)
server_name = specified_config["server_name"] server_name = specified_config["server_name"]

View file

@ -22,6 +22,7 @@ class ServerConfig(Config):
self.server_name = config["server_name"] self.server_name = config["server_name"]
self.pid_file = self.abspath(config.get("pid_file")) self.pid_file = self.abspath(config.get("pid_file"))
self.web_client = config["web_client"] self.web_client = config["web_client"]
self.web_client_location = config.get("web_client_location", None)
self.soft_file_limit = config["soft_file_limit"] self.soft_file_limit = config["soft_file_limit"]
self.daemonize = config.get("daemonize") self.daemonize = config.get("daemonize")
self.print_pidfile = config.get("print_pidfile") self.print_pidfile = config.get("print_pidfile")

View file

@ -107,6 +107,22 @@ class BaseHandler(object):
if not suppress_auth: if not suppress_auth:
self.auth.check(event, auth_events=context.current_state) self.auth.check(event, auth_events=context.current_state)
if event.type == EventTypes.CanonicalAlias:
# Check the alias is acually valid (at this time at least)
room_alias_str = event.content.get("alias", None)
if room_alias_str:
room_alias = RoomAlias.from_string(room_alias_str)
directory_handler = self.hs.get_handlers().directory_handler
mapping = yield directory_handler.get_association(room_alias)
if mapping["room_id"] != event.room_id:
raise SynapseError(
400,
"Room alias %s does not point to the room" % (
room_alias_str,
)
)
(event_stream_id, max_stream_id) = yield self.store.persist_event( (event_stream_id, max_stream_id) = yield self.store.persist_event(
event, context=context event, context=context
) )
@ -130,22 +146,6 @@ class BaseHandler(object):
returned_invite.signatures returned_invite.signatures
) )
if event.type == EventTypes.CanonicalAlias:
# Check the alias is actually valid (at this time at least)
room_alias_str = event.content.get("alias", None)
if room_alias_str:
room_alias = RoomAlias.from_string(room_alias_str)
directory_handler = self.hs.get_handlers().directory_handler
mapping = yield directory_handler.get_association(room_alias)
if mapping["room_id"] != event.room_id:
raise SynapseError(
400,
"Room alias %s does not point to the room" % (
room_alias_str,
)
)
if event.type == EventTypes.Redaction: if event.type == EventTypes.Redaction:
if self.auth.check_redaction(event, auth_events=context.current_state): if self.auth.check_redaction(event, auth_events=context.current_state):
original_event = yield self.store.get_event( original_event = yield self.store.get_event(

View file

@ -49,7 +49,12 @@ class EventStreamHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
@log_function @log_function
def get_stream(self, auth_user_id, pagin_config, timeout=0, def get_stream(self, auth_user_id, pagin_config, timeout=0,
as_client_event=True, affect_presence=True): as_client_event=True, affect_presence=True,
only_room_events=False):
"""Fetches the events stream for a given user.
If `only_room_events` is `True` only room events will be returned.
"""
auth_user = UserID.from_string(auth_user_id) auth_user = UserID.from_string(auth_user_id)
try: try:
@ -89,7 +94,8 @@ class EventStreamHandler(BaseHandler):
timeout = random.randint(int(timeout*0.9), int(timeout*1.1)) timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
events, tokens = yield self.notifier.get_events_for( events, tokens = yield self.notifier.get_events_for(
auth_user, room_ids, pagin_config, timeout auth_user, room_ids, pagin_config, timeout,
only_room_events=only_room_events
) )
time_now = self.clock.time_msec() time_now = self.clock.time_msec()

View file

@ -204,15 +204,11 @@ class TypingNotificationHandler(BaseHandler):
) )
def _push_update_local(self, room_id, user, typing): def _push_update_local(self, room_id, user, typing):
if room_id not in self._room_serials: room_set = self._room_typing.setdefault(room_id, set())
self._room_serials[room_id] = 0
self._room_typing[room_id] = set()
room_set = self._room_typing[room_id]
if typing: if typing:
room_set.add(user) room_set.add(user)
elif user in room_set: else:
room_set.remove(user) room_set.discard(user)
self._latest_room_serial += 1 self._latest_room_serial += 1
self._room_serials[room_id] = self._latest_room_serial self._room_serials[room_id] = self._latest_room_serial
@ -260,8 +256,8 @@ class TypingNotificationEventSource(object):
) )
events = [] events = []
for room_id in handler._room_serials: for room_id in joined_room_ids:
if room_id not in joined_room_ids: if room_id not in handler._room_serials:
continue continue
if handler._room_serials[room_id] <= from_key: if handler._room_serials[room_id] <= from_key:
continue continue

View file

@ -328,10 +328,13 @@ class Notifier(object):
defer.returnValue(result) defer.returnValue(result)
@defer.inlineCallbacks @defer.inlineCallbacks
def get_events_for(self, user, rooms, pagination_config, timeout): def get_events_for(self, user, rooms, pagination_config, timeout,
only_room_events=False):
""" For the given user and rooms, return any new events for them. If """ For the given user and rooms, return any new events for them. If
there are no new events wait for up to `timeout` milliseconds for any there are no new events wait for up to `timeout` milliseconds for any
new events to happen before returning. new events to happen before returning.
If `only_room_events` is `True` only room events will be returned.
""" """
from_token = pagination_config.from_token from_token = pagination_config.from_token
if not from_token: if not from_token:
@ -352,6 +355,8 @@ class Notifier(object):
after_id = getattr(after_token, keyname) after_id = getattr(after_token, keyname)
if before_id == after_id: if before_id == after_id:
continue continue
if only_room_events and name != "room":
continue
new_events, new_key = yield source.get_new_events_for_user( new_events, new_key = yield source.get_new_events_for_user(
user, getattr(from_token, keyname), limit, user, getattr(from_token, keyname), limit,
) )

View file

@ -249,7 +249,9 @@ class Pusher(object):
# we fail to dispatch the push) # we fail to dispatch the push)
config = PaginationConfig(from_token=None, limit='1') config = PaginationConfig(from_token=None, limit='1')
chunk = yield self.evStreamHandler.get_stream( chunk = yield self.evStreamHandler.get_stream(
self.user_name, config, timeout=0) self.user_name, config, timeout=0, affect_presence=False,
only_room_events=True
)
self.last_token = chunk['end'] self.last_token = chunk['end']
self.store.update_pusher_last_token( self.store.update_pusher_last_token(
self.app_id, self.pushkey, self.user_name, self.last_token self.app_id, self.pushkey, self.user_name, self.last_token
@ -280,8 +282,8 @@ class Pusher(object):
config = PaginationConfig(from_token=from_tok, limit='1') config = PaginationConfig(from_token=from_tok, limit='1')
timeout = (300 + random.randint(-60, 60)) * 1000 timeout = (300 + random.randint(-60, 60)) * 1000
chunk = yield self.evStreamHandler.get_stream( chunk = yield self.evStreamHandler.get_stream(
self.user_name, config, self.user_name, config, timeout=timeout, affect_presence=False,
timeout=timeout, affect_presence=False only_room_events=True
) )
# limiting to 1 may get 1 event plus 1 presence event, so # limiting to 1 may get 1 event plus 1 presence event, so

View file

@ -46,8 +46,8 @@ CONDITIONAL_REQUIREMENTS = {
def requirements(config=None, include_conditional=False): def requirements(config=None, include_conditional=False):
reqs = REQUIREMENTS.copy() reqs = REQUIREMENTS.copy()
for key, req in CONDITIONAL_REQUIREMENTS.items(): if include_conditional:
if (config and getattr(config, key)) or include_conditional: for _, req in CONDITIONAL_REQUIREMENTS.items():
reqs.update(req) reqs.update(req)
return reqs return reqs
@ -55,13 +55,13 @@ def requirements(config=None, include_conditional=False):
def github_link(project, version, egg): def github_link(project, version, egg):
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg) return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
DEPENDENCY_LINKS = [ DEPENDENCY_LINKS = {
github_link( "matrix-angular-sdk": github_link(
project="matrix-org/matrix-angular-sdk", project="matrix-org/matrix-angular-sdk",
version="v0.6.6", version="v0.6.6",
egg="matrix_angular_sdk-0.6.6", egg="matrix_angular_sdk-0.6.6",
), ),
] }
class MissingRequirementError(Exception): class MissingRequirementError(Exception):
@ -129,7 +129,7 @@ def check_requirements(config=None):
def list_requirements(): def list_requirements():
result = [] result = []
linked = [] linked = []
for link in DEPENDENCY_LINKS: for link in DEPENDENCY_LINKS.values():
egg = link.split("#egg=")[1] egg = link.split("#egg=")[1]
linked.append(egg.split('-')[0]) linked.append(egg.split('-')[0])
result.append(link) result.append(link)

View file

@ -33,6 +33,8 @@ import os
import cgi import cgi
import logging import logging
import urllib
import urlparse
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -42,10 +44,13 @@ def parse_media_id(request):
# This allows users to append e.g. /test.png to the URL. Useful for # This allows users to append e.g. /test.png to the URL. Useful for
# clients that parse the URL to see content type. # clients that parse the URL to see content type.
server_name, media_id = request.postpath[:2] server_name, media_id = request.postpath[:2]
if len(request.postpath) > 2 and is_ascii(request.postpath[-1]): file_name = None
return server_name, media_id, request.postpath[-1] if len(request.postpath) > 2:
else: try:
return server_name, media_id, None file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
except UnicodeDecodeError:
pass
return server_name, media_id, file_name
except: except:
raise SynapseError( raise SynapseError(
404, 404,
@ -140,9 +145,26 @@ class BaseMediaResource(Resource):
content_disposition = headers.get("Content-Disposition", None) content_disposition = headers.get("Content-Disposition", None)
if content_disposition: if content_disposition:
_, params = cgi.parse_header(content_disposition[0],) _, params = cgi.parse_header(content_disposition[0],)
upload_name = params.get("filename", None) upload_name = None
if upload_name and not is_ascii(upload_name):
upload_name = None # First check if there is a valid UTF-8 filename
upload_name_utf8 = params.get("filename*", None)
if upload_name_utf8:
if upload_name_utf8.lower().startswith("utf-8''"):
upload_name = upload_name_utf8[7:]
# If there isn't check for an ascii name.
if not upload_name:
upload_name_ascii = params.get("filename", None)
if upload_name_ascii and is_ascii(upload_name_ascii):
upload_name = upload_name_ascii
if upload_name:
upload_name = urlparse.unquote(upload_name)
try:
upload_name = upload_name.decode("utf-8")
except UnicodeDecodeError:
upload_name = None
else: else:
upload_name = None upload_name = None
@ -181,10 +203,20 @@ class BaseMediaResource(Resource):
if os.path.isfile(file_path): if os.path.isfile(file_path):
request.setHeader(b"Content-Type", media_type.encode("UTF-8")) request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
if upload_name: if upload_name:
request.setHeader( if is_ascii(upload_name):
b"Content-Disposition", request.setHeader(
b"inline; filename=%s" % (upload_name.encode("utf-8"),), b"Content-Disposition",
) b"inline; filename=%s" % (
urllib.quote(upload_name.encode("utf-8")),
),
)
else:
request.setHeader(
b"Content-Disposition",
b"inline; filename*=utf-8''%s" % (
urllib.quote(upload_name.encode("utf-8")),
),
)
# cache for at least a day. # cache for at least a day.
# XXX: we might want to turn this off for data we don't want to # XXX: we might want to turn this off for data we don't want to

View file

@ -15,7 +15,7 @@
from synapse.http.server import respond_with_json, request_handler from synapse.http.server import respond_with_json, request_handler
from synapse.util.stringutils import random_string, is_ascii from synapse.util.stringutils import random_string
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from twisted.web.server import NOT_DONE_YET from twisted.web.server import NOT_DONE_YET
@ -86,9 +86,13 @@ class UploadResource(BaseMediaResource):
upload_name = request.args.get("filename", None) upload_name = request.args.get("filename", None)
if upload_name: if upload_name:
upload_name = upload_name[0] try:
if upload_name and not is_ascii(upload_name): upload_name = upload_name[0].decode('UTF-8')
raise SynapseError(400, "filename must be ascii") except UnicodeDecodeError:
raise SynapseError(
msg="Invalid UTF-8 filename parameter: %r" % (upload_name),
code=400,
)
headers = request.requestHeaders headers = request.requestHeaders

View file

@ -331,7 +331,10 @@ class EventFederationStore(SQLBaseStore):
txn.executemany( txn.executemany(
query, query,
[(ev.event_id, ev.room_id, ev.event_id) for ev in events] [
(ev.event_id, ev.room_id, ev.event_id) for ev in events
if not ev.internal_metadata.is_outlier()
]
) )
query = ( query = (
@ -358,7 +361,10 @@ class EventFederationStore(SQLBaseStore):
) )
txn.executemany( txn.executemany(
query, query,
[(ev.event_id, ev.room_id) for ev in events] [
(ev.event_id, ev.room_id) for ev in events
if not ev.internal_metadata.is_outlier()
]
) )
for room_id in events_by_room: for room_id in events_by_room:

View file

@ -403,8 +403,15 @@ class StateStore(SQLBaseStore):
state_dict = results[group] state_dict = results[group]
for event_id in state_ids: for event_id in state_ids:
state_event = state_events[event_id] try:
state_dict[(state_event.type, state_event.state_key)] = state_event state_event = state_events[event_id]
state_dict[(state_event.type, state_event.state_key)] = state_event
except KeyError:
# Hmm. So we do don't have that state event? Interesting.
logger.warn(
"Can't find state event %r for state group %r",
event_id, group,
)
self._state_group_cache.update( self._state_group_cache.update(
cache_seq_num, cache_seq_num,

View file

@ -38,6 +38,8 @@ def random_string_with_symbols(length):
def is_ascii(s): def is_ascii(s):
try: try:
s.encode("ascii") s.encode("ascii")
except UnicodeEncodeError:
return False
except UnicodeDecodeError: except UnicodeDecodeError:
return False return False
else: else: