Merge branch 'release-v0.24.1' of github.com:matrix-org/synapse

This commit is contained in:
Erik Johnston 2017-10-24 15:02:25 +01:00
commit f009df23ec
64 changed files with 221 additions and 161 deletions

View file

@ -1,3 +1,11 @@
Changes in synapse v0.24.1 (2017-10-24)
=======================================
Bug fixes:
* Fix updating group profiles over federation (PR #2567)
Changes in synapse v0.24.0 (2017-10-23) Changes in synapse v0.24.0 (2017-10-23)
======================================= =======================================

View file

@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server. """ This is a reference implementation of a Matrix home server.
""" """
__version__ = "0.24.0" __version__ = "0.24.1"

View file

@ -19,7 +19,7 @@ import sys
try: try:
import affinity import affinity
except: except Exception:
affinity = None affinity = None
from daemonize import Daemonize from daemonize import Daemonize

View file

@ -123,7 +123,7 @@ class _ServiceQueuer(object):
with Measure(self.clock, "servicequeuer.send"): with Measure(self.clock, "servicequeuer.send"):
try: try:
yield self.txn_ctrl.send(service, events) yield self.txn_ctrl.send(service, events)
except: except Exception:
logger.exception("AS request failed") logger.exception("AS request failed")
finally: finally:
self.requests_in_flight.discard(service.id) self.requests_in_flight.discard(service.id)

View file

@ -303,7 +303,7 @@ def read_gc_thresholds(thresholds):
return ( return (
int(thresholds[0]), int(thresholds[1]), int(thresholds[2]), int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
) )
except: except Exception:
raise ConfigError( raise ConfigError(
"Value of `gc_threshold` must be a list of three integers if set" "Value of `gc_threshold` must be a list of three integers if set"
) )

View file

@ -34,7 +34,7 @@ class ServerContextFactory(ssl.ContextFactory):
try: try:
_ecCurve = _OpenSSLECCurve(_defaultCurveName) _ecCurve = _OpenSSLECCurve(_defaultCurveName)
_ecCurve.addECKeyToContext(context) _ecCurve.addECKeyToContext(context)
except: except Exception:
logger.exception("Failed to enable elliptic curve for TLS") logger.exception("Failed to enable elliptic curve for TLS")
context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
context.use_certificate_chain_file(config.tls_certificate_file) context.use_certificate_chain_file(config.tls_certificate_file)

View file

@ -43,7 +43,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
message_hash_base64 = event.hashes[name] message_hash_base64 = event.hashes[name]
try: try:
message_hash_bytes = decode_base64(message_hash_base64) message_hash_bytes = decode_base64(message_hash_base64)
except: except Exception:
raise SynapseError( raise SynapseError(
400, 400,
"Invalid base64: %s" % (message_hash_base64,), "Invalid base64: %s" % (message_hash_base64,),

View file

@ -759,7 +759,7 @@ def _handle_key_deferred(verify_request):
)) ))
try: try:
verify_signed_json(json_object, server_name, verify_key) verify_signed_json(json_object, server_name, verify_key)
except: except Exception:
raise SynapseError( raise SynapseError(
401, 401,
"Invalid signature for server %s with key %s:%s" % ( "Invalid signature for server %s with key %s:%s" % (

View file

@ -443,12 +443,12 @@ def _check_power_levels(event, auth_events):
for k, v in user_list.items(): for k, v in user_list.items():
try: try:
UserID.from_string(k) UserID.from_string(k)
except: except Exception:
raise SynapseError(400, "Not a valid user_id: %s" % (k,)) raise SynapseError(400, "Not a valid user_id: %s" % (k,))
try: try:
int(v) int(v)
except: except Exception:
raise SynapseError(400, "Not a valid power level: %s" % (v,)) raise SynapseError(400, "Not a valid power level: %s" % (v,))
key = (event.type, event.state_key, ) key = (event.type, event.state_key, )

View file

@ -55,7 +55,7 @@ class EventBuilderFactory(object):
local_part = str(int(self.clock.time())) + i + random_string(5) local_part = str(int(self.clock.time())) + i + random_string(5)
e_id = EventID.create(local_part, self.hostname) e_id = EventID(local_part, self.hostname)
return e_id.to_string() return e_id.to_string()

View file

@ -22,7 +22,7 @@ class SpamChecker(object):
config = None config = None
try: try:
module, config = hs.config.spam_checker module, config = hs.config.spam_checker
except: except Exception:
pass pass
if module is not None: if module is not None:

View file

@ -485,6 +485,26 @@ class TransportLayerClient(object):
ignore_backoff=True, ignore_backoff=True,
) )
@log_function
def update_group_profile(self, destination, group_id, requester_user_id, content):
"""Update a remote group profile
Args:
destination (str)
group_id (str)
requester_user_id (str)
content (dict): The new profile of the group
"""
path = PREFIX + "/groups/%s/profile" % (group_id,)
return self.client.post_json(
destination=destination,
path=path,
args={"requester_user_id": requester_user_id},
data=content,
ignore_backoff=True,
)
@log_function @log_function
def get_group_summary(self, destination, group_id, requester_user_id): def get_group_summary(self, destination, group_id, requester_user_id):
"""Get a group summary """Get a group summary

View file

@ -112,7 +112,7 @@ class Authenticator(object):
key = strip_quotes(param_dict["key"]) key = strip_quotes(param_dict["key"])
sig = strip_quotes(param_dict["sig"]) sig = strip_quotes(param_dict["sig"])
return (origin, key, sig) return (origin, key, sig)
except: except Exception:
raise AuthenticationError( raise AuthenticationError(
400, "Malformed Authorization header", Codes.UNAUTHORIZED 400, "Malformed Authorization header", Codes.UNAUTHORIZED
) )
@ -177,7 +177,7 @@ class BaseFederationServlet(object):
if self.REQUIRE_AUTH: if self.REQUIRE_AUTH:
logger.exception("authenticate_request failed") logger.exception("authenticate_request failed")
raise raise
except: except Exception:
logger.exception("authenticate_request failed") logger.exception("authenticate_request failed")
raise raise
@ -270,7 +270,7 @@ class FederationSendServlet(BaseFederationServlet):
code, response = yield self.handler.on_incoming_transaction( code, response = yield self.handler.on_incoming_transaction(
transaction_data transaction_data
) )
except: except Exception:
logger.exception("on_incoming_transaction failed") logger.exception("on_incoming_transaction failed")
raise raise
@ -610,7 +610,7 @@ class FederationVersionServlet(BaseFederationServlet):
class FederationGroupsProfileServlet(BaseFederationServlet): class FederationGroupsProfileServlet(BaseFederationServlet):
"""Get the basic profile of a group on behalf of a user """Get/set the basic profile of a group on behalf of a user
""" """
PATH = "/groups/(?P<group_id>[^/]*)/profile$" PATH = "/groups/(?P<group_id>[^/]*)/profile$"
@ -626,6 +626,18 @@ class FederationGroupsProfileServlet(BaseFederationServlet):
defer.returnValue((200, new_content)) defer.returnValue((200, new_content))
@defer.inlineCallbacks
def on_POST(self, origin, content, query, group_id):
requester_user_id = parse_string_from_args(query, "requester_user_id")
if get_domain_from_id(requester_user_id) != origin:
raise SynapseError(403, "requester_user_id doesn't match origin")
new_content = yield self.handler.update_group_profile(
group_id, requester_user_id, content
)
defer.returnValue((200, new_content))
class FederationGroupsSummaryServlet(BaseFederationServlet): class FederationGroupsSummaryServlet(BaseFederationServlet):
PATH = "/groups/(?P<group_id>[^/]*)/summary$" PATH = "/groups/(?P<group_id>[^/]*)/summary$"
@ -642,18 +654,6 @@ class FederationGroupsSummaryServlet(BaseFederationServlet):
defer.returnValue((200, new_content)) defer.returnValue((200, new_content))
@defer.inlineCallbacks
def on_POST(self, origin, content, query, group_id):
requester_user_id = parse_string_from_args(query, "requester_user_id")
if get_domain_from_id(requester_user_id) != origin:
raise SynapseError(403, "requester_user_id doesn't match origin")
new_content = yield self.handler.update_group_profile(
group_id, requester_user_id, content
)
defer.returnValue((200, new_content))
class FederationGroupsRoomsServlet(BaseFederationServlet): class FederationGroupsRoomsServlet(BaseFederationServlet):
"""Get the rooms in a group on behalf of a user """Get the rooms in a group on behalf of a user

View file

@ -13,14 +13,11 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from twisted.internet import defer import logging
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.types import UserID, get_domain_from_id, RoomID, GroupID from synapse.types import GroupID, RoomID, UserID, get_domain_from_id
from twisted.internet import defer
import logging
import urllib
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -698,9 +695,11 @@ class GroupsServerHandler(object):
def create_group(self, group_id, user_id, content): def create_group(self, group_id, user_id, content):
group = yield self.check_group_is_ours(group_id) group = yield self.check_group_is_ours(group_id)
_validate_group_id(group_id)
logger.info("Attempting to create group with ID: %r", group_id) logger.info("Attempting to create group with ID: %r", group_id)
# parsing the id into a GroupID validates it.
group_id_obj = GroupID.from_string(group_id)
if group: if group:
raise SynapseError(400, "Group already exists") raise SynapseError(400, "Group already exists")
@ -710,7 +709,7 @@ class GroupsServerHandler(object):
raise SynapseError( raise SynapseError(
403, "Only server admin can create group on this server", 403, "Only server admin can create group on this server",
) )
localpart = GroupID.from_string(group_id).localpart localpart = group_id_obj.localpart
if not localpart.startswith(self.hs.config.group_creation_prefix): if not localpart.startswith(self.hs.config.group_creation_prefix):
raise SynapseError( raise SynapseError(
400, 400,
@ -786,18 +785,3 @@ def _parse_visibility_from_contents(content):
is_public = True is_public = True
return is_public return is_public
def _validate_group_id(group_id):
"""Validates the group ID is valid for creation on this home server
"""
localpart = GroupID.from_string(group_id).localpart
if localpart.lower() != localpart:
raise SynapseError(400, "Group ID must be lower case")
if urllib.quote(localpart.encode('utf-8')) != localpart:
raise SynapseError(
400,
"Group ID can only contain characters a-z, 0-9, or '_-./'",
)

View file

@ -267,7 +267,7 @@ class AuthHandler(BaseHandler):
user_id = authdict["user"] user_id = authdict["user"]
password = authdict["password"] password = authdict["password"]
if not user_id.startswith('@'): if not user_id.startswith('@'):
user_id = UserID.create(user_id, self.hs.hostname).to_string() user_id = UserID(user_id, self.hs.hostname).to_string()
return self._check_password(user_id, password) return self._check_password(user_id, password)

View file

@ -227,7 +227,7 @@ class FederationHandler(BaseHandler):
state, auth_chain = yield self.replication_layer.get_state_for_room( state, auth_chain = yield self.replication_layer.get_state_for_room(
origin, pdu.room_id, pdu.event_id, origin, pdu.room_id, pdu.event_id,
) )
except: except Exception:
logger.exception("Failed to get state for event: %s", pdu.event_id) logger.exception("Failed to get state for event: %s", pdu.event_id)
yield self._process_received_pdu( yield self._process_received_pdu(
@ -461,7 +461,7 @@ class FederationHandler(BaseHandler):
def check_match(id): def check_match(id):
try: try:
return server_name == get_domain_from_id(id) return server_name == get_domain_from_id(id)
except: except Exception:
return False return False
# Parses mapping `event_id -> (type, state_key) -> state event_id` # Parses mapping `event_id -> (type, state_key) -> state event_id`
@ -499,7 +499,7 @@ class FederationHandler(BaseHandler):
continue continue
try: try:
domain = get_domain_from_id(ev.state_key) domain = get_domain_from_id(ev.state_key)
except: except Exception:
continue continue
if domain != server_name: if domain != server_name:
@ -738,7 +738,7 @@ class FederationHandler(BaseHandler):
joined_domains[dom] = min(d, old_d) joined_domains[dom] = min(d, old_d)
else: else:
joined_domains[dom] = d joined_domains[dom] = d
except: except Exception:
pass pass
return sorted(joined_domains.items(), key=lambda d: d[1]) return sorted(joined_domains.items(), key=lambda d: d[1])
@ -940,7 +940,7 @@ class FederationHandler(BaseHandler):
room_creator_user_id="", room_creator_user_id="",
is_public=False is_public=False
) )
except: except Exception:
# FIXME # FIXME
pass pass
@ -1775,7 +1775,7 @@ class FederationHandler(BaseHandler):
[e_id for e_id, _ in event.auth_events] [e_id for e_id, _ in event.auth_events]
) )
seen_events = set(have_events.keys()) seen_events = set(have_events.keys())
except: except Exception:
# FIXME: # FIXME:
logger.exception("Failed to get auth chain") logger.exception("Failed to get auth chain")
@ -1899,7 +1899,7 @@ class FederationHandler(BaseHandler):
except AuthError: except AuthError:
pass pass
except: except Exception:
# FIXME: # FIXME:
logger.exception("Failed to query auth chain") logger.exception("Failed to query auth chain")
@ -1966,7 +1966,7 @@ class FederationHandler(BaseHandler):
def get_next(it, opt=None): def get_next(it, opt=None):
try: try:
return it.next() return it.next()
except: except Exception:
return opt return opt
current_local = get_next(local_iter) current_local = get_next(local_iter)

View file

@ -214,7 +214,7 @@ class InitialSyncHandler(BaseHandler):
}) })
d["account_data"] = account_data_events d["account_data"] = account_data_events
except: except Exception:
logger.exception("Failed to get snapshot") logger.exception("Failed to get snapshot")
yield concurrently_execute(handle_room, room_list, 10) yield concurrently_execute(handle_room, room_list, 10)

View file

@ -563,7 +563,7 @@ class MessageHandler(BaseHandler):
try: try:
dump = ujson.dumps(unfreeze(event.content)) dump = ujson.dumps(unfreeze(event.content))
ujson.loads(dump) ujson.loads(dump)
except: except Exception:
logger.exception("Failed to encode content: %r", event.content) logger.exception("Failed to encode content: %r", event.content)
raise raise

View file

@ -364,7 +364,7 @@ class PresenceHandler(object):
) )
preserve_fn(self._update_states)(changes) preserve_fn(self._update_states)(changes)
except: except Exception:
logger.exception("Exception in _handle_timeouts loop") logger.exception("Exception in _handle_timeouts loop")
@defer.inlineCallbacks @defer.inlineCallbacks

View file

@ -118,7 +118,7 @@ class ProfileHandler(BaseHandler):
logger.exception("Failed to get displayname") logger.exception("Failed to get displayname")
raise raise
except: except Exception:
logger.exception("Failed to get displayname") logger.exception("Failed to get displayname")
else: else:
defer.returnValue(result["displayname"]) defer.returnValue(result["displayname"])
@ -165,7 +165,7 @@ class ProfileHandler(BaseHandler):
if e.code != 404: if e.code != 404:
logger.exception("Failed to get avatar_url") logger.exception("Failed to get avatar_url")
raise raise
except: except Exception:
logger.exception("Failed to get avatar_url") logger.exception("Failed to get avatar_url")
defer.returnValue(result["avatar_url"]) defer.returnValue(result["avatar_url"])
@ -266,7 +266,7 @@ class ProfileHandler(BaseHandler):
}, },
ignore_backoff=True, ignore_backoff=True,
) )
except: except Exception:
logger.exception("Failed to get avatar_url") logger.exception("Failed to get avatar_url")
yield self.store.update_remote_profile_cache( yield self.store.update_remote_profile_cache(

View file

@ -15,7 +15,6 @@
"""Contains functions for registering clients.""" """Contains functions for registering clients."""
import logging import logging
import urllib
from twisted.internet import defer from twisted.internet import defer
@ -23,6 +22,7 @@ from synapse.api.errors import (
AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError
) )
from synapse.http.client import CaptchaServerHttpClient from synapse.http.client import CaptchaServerHttpClient
from synapse import types
from synapse.types import UserID from synapse.types import UserID
from synapse.util.async import run_on_reactor from synapse.util.async import run_on_reactor
from ._base import BaseHandler from ._base import BaseHandler
@ -46,12 +46,10 @@ class RegistrationHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def check_username(self, localpart, guest_access_token=None, def check_username(self, localpart, guest_access_token=None,
assigned_user_id=None): assigned_user_id=None):
yield run_on_reactor() if types.contains_invalid_mxid_characters(localpart):
if urllib.quote(localpart.encode('utf-8')) != localpart:
raise SynapseError( raise SynapseError(
400, 400,
"User ID can only contain characters a-z, 0-9, or '_-./'", "User ID can only contain characters a-z, 0-9, or '=_-./'",
Codes.INVALID_USERNAME Codes.INVALID_USERNAME
) )
@ -81,7 +79,7 @@ class RegistrationHandler(BaseHandler):
"A different user ID has already been registered for this session", "A different user ID has already been registered for this session",
) )
yield self.check_user_id_not_appservice_exclusive(user_id) self.check_user_id_not_appservice_exclusive(user_id)
users = yield self.store.get_users_by_id_case_insensitive(user_id) users = yield self.store.get_users_by_id_case_insensitive(user_id)
if users: if users:
@ -254,11 +252,10 @@ class RegistrationHandler(BaseHandler):
""" """
Registers email_id as SAML2 Based Auth. Registers email_id as SAML2 Based Auth.
""" """
if urllib.quote(localpart) != localpart: if types.contains_invalid_mxid_characters(localpart):
raise SynapseError( raise SynapseError(
400, 400,
"User ID must only contain characters which do not" "User ID can only contain characters a-z, 0-9, or '=_-./'",
" require URL encoding."
) )
user = UserID(localpart, self.hs.hostname) user = UserID(localpart, self.hs.hostname)
user_id = user.to_string() user_id = user.to_string()
@ -292,7 +289,7 @@ class RegistrationHandler(BaseHandler):
try: try:
identity_handler = self.hs.get_handlers().identity_handler identity_handler = self.hs.get_handlers().identity_handler
threepid = yield identity_handler.threepid_from_creds(c) threepid = yield identity_handler.threepid_from_creds(c)
except: except Exception:
logger.exception("Couldn't validate 3pid") logger.exception("Couldn't validate 3pid")
raise RegistrationError(400, "Couldn't validate 3pid") raise RegistrationError(400, "Couldn't validate 3pid")

View file

@ -91,7 +91,7 @@ class RoomCreationHandler(BaseHandler):
if wchar in config["room_alias_name"]: if wchar in config["room_alias_name"]:
raise SynapseError(400, "Invalid characters in room alias") raise SynapseError(400, "Invalid characters in room alias")
room_alias = RoomAlias.create( room_alias = RoomAlias(
config["room_alias_name"], config["room_alias_name"],
self.hs.hostname, self.hs.hostname,
) )
@ -108,7 +108,7 @@ class RoomCreationHandler(BaseHandler):
for i in invite_list: for i in invite_list:
try: try:
UserID.from_string(i) UserID.from_string(i)
except: except Exception:
raise SynapseError(400, "Invalid user_id: %s" % (i,)) raise SynapseError(400, "Invalid user_id: %s" % (i,))
invite_3pid_list = config.get("invite_3pid", []) invite_3pid_list = config.get("invite_3pid", [])
@ -123,7 +123,7 @@ class RoomCreationHandler(BaseHandler):
while attempts < 5: while attempts < 5:
try: try:
random_string = stringutils.random_string(18) random_string = stringutils.random_string(18)
gen_room_id = RoomID.create( gen_room_id = RoomID(
random_string, random_string,
self.hs.hostname, self.hs.hostname,
) )

View file

@ -61,7 +61,7 @@ class SearchHandler(BaseHandler):
assert batch_group is not None assert batch_group is not None
assert batch_group_key is not None assert batch_group_key is not None
assert batch_token is not None assert batch_token is not None
except: except Exception:
raise SynapseError(400, "Invalid batch") raise SynapseError(400, "Invalid batch")
try: try:

View file

@ -550,7 +550,7 @@ class MatrixFederationHttpClient(object):
length = yield _readBodyToFile( length = yield _readBodyToFile(
response, output_stream, max_size response, output_stream, max_size
) )
except: except Exception:
logger.exception("Failed to download body") logger.exception("Failed to download body")
raise raise

View file

@ -130,7 +130,7 @@ def wrap_request_handler(request_handler, include_metrics=False):
pretty_print=_request_user_agent_is_curl(request), pretty_print=_request_user_agent_is_curl(request),
version_string=self.version_string, version_string=self.version_string,
) )
except: except Exception:
logger.exception( logger.exception(
"Failed handle request %s.%s on %r: %r", "Failed handle request %s.%s on %r: %r",
request_handler.__module__, request_handler.__module__,

View file

@ -48,7 +48,7 @@ def parse_integer_from_args(args, name, default=None, required=False):
if name in args: if name in args:
try: try:
return int(args[name][0]) return int(args[name][0])
except: except Exception:
message = "Query parameter %r must be an integer" % (name,) message = "Query parameter %r must be an integer" % (name,)
raise SynapseError(400, message) raise SynapseError(400, message)
else: else:
@ -88,7 +88,7 @@ def parse_boolean_from_args(args, name, default=None, required=False):
"true": True, "true": True,
"false": False, "false": False,
}[args[name][0]] }[args[name][0]]
except: except Exception:
message = ( message = (
"Boolean query parameter %r must be one of" "Boolean query parameter %r must be one of"
" ['true', 'false']" " ['true', 'false']"
@ -162,7 +162,7 @@ def parse_json_value_from_request(request):
""" """
try: try:
content_bytes = request.content.read() content_bytes = request.content.read()
except: except Exception:
raise SynapseError(400, "Error reading JSON content.") raise SynapseError(400, "Error reading JSON content.")
try: try:

View file

@ -67,7 +67,7 @@ class SynapseRequest(Request):
ru_utime, ru_stime = context.get_resource_usage() ru_utime, ru_stime = context.get_resource_usage()
db_txn_count = context.db_txn_count db_txn_count = context.db_txn_count
db_txn_duration = context.db_txn_duration db_txn_duration = context.db_txn_duration
except: except Exception:
ru_utime, ru_stime = (0, 0) ru_utime, ru_stime = (0, 0)
db_txn_count, db_txn_duration = (0, 0) db_txn_count, db_txn_duration = (0, 0)

View file

@ -289,7 +289,7 @@ class Notifier(object):
for user_stream in user_streams: for user_stream in user_streams:
try: try:
user_stream.notify(stream_key, new_token, time_now_ms) user_stream.notify(stream_key, new_token, time_now_ms)
except: except Exception:
logger.exception("Failed to notify listener") logger.exception("Failed to notify listener")
self.notify_replication() self.notify_replication()

View file

@ -121,7 +121,7 @@ class EmailPusher(object):
starting_max_ordering = self.max_stream_ordering starting_max_ordering = self.max_stream_ordering
try: try:
yield self._unsafe_process() yield self._unsafe_process()
except: except Exception:
logger.exception("Exception processing notifs") logger.exception("Exception processing notifs")
if self.max_stream_ordering == starting_max_ordering: if self.max_stream_ordering == starting_max_ordering:
break break

View file

@ -131,7 +131,7 @@ class HttpPusher(object):
starting_max_ordering = self.max_stream_ordering starting_max_ordering = self.max_stream_ordering
try: try:
yield self._unsafe_process() yield self._unsafe_process()
except: except Exception:
logger.exception("Exception processing notifs") logger.exception("Exception processing notifs")
if self.max_stream_ordering == starting_max_ordering: if self.max_stream_ordering == starting_max_ordering:
break break
@ -314,7 +314,7 @@ class HttpPusher(object):
defer.returnValue([]) defer.returnValue([])
try: try:
resp = yield self.http_client.post_json_get_json(self.url, notification_dict) resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
except: except Exception:
logger.warn("Failed to push %s ", self.url) logger.warn("Failed to push %s ", self.url)
defer.returnValue(False) defer.returnValue(False)
rejected = [] rejected = []
@ -345,7 +345,7 @@ class HttpPusher(object):
} }
try: try:
resp = yield self.http_client.post_json_get_json(self.url, d) resp = yield self.http_client.post_json_get_json(self.url, d)
except: except Exception:
logger.exception("Failed to push %s ", self.url) logger.exception("Failed to push %s ", self.url)
defer.returnValue(False) defer.returnValue(False)
rejected = [] rejected = []

View file

@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
try: try:
from synapse.push.emailpusher import EmailPusher from synapse.push.emailpusher import EmailPusher
from synapse.push.mailer import Mailer, load_jinja2_templates from synapse.push.mailer import Mailer, load_jinja2_templates
except: except Exception:
pass pass

View file

@ -137,7 +137,7 @@ class PusherPool:
) )
yield preserve_context_over_deferred(defer.gatherResults(deferreds)) yield preserve_context_over_deferred(defer.gatherResults(deferreds))
except: except Exception:
logger.exception("Exception in pusher on_new_notifications") logger.exception("Exception in pusher on_new_notifications")
@defer.inlineCallbacks @defer.inlineCallbacks
@ -162,7 +162,7 @@ class PusherPool:
) )
yield preserve_context_over_deferred(defer.gatherResults(deferreds)) yield preserve_context_over_deferred(defer.gatherResults(deferreds))
except: except Exception:
logger.exception("Exception in pusher on_new_receipts") logger.exception("Exception in pusher on_new_receipts")
@defer.inlineCallbacks @defer.inlineCallbacks
@ -188,7 +188,7 @@ class PusherPool:
for pusherdict in pushers: for pusherdict in pushers:
try: try:
p = self.pusher_factory.create_pusher(pusherdict) p = self.pusher_factory.create_pusher(pusherdict)
except: except Exception:
logger.exception("Couldn't start a pusher: caught Exception") logger.exception("Couldn't start a pusher: caught Exception")
continue continue
if p: if p:

View file

@ -162,7 +162,7 @@ class ReplicationStreamer(object):
) )
try: try:
updates, current_token = yield stream.get_updates() updates, current_token = yield stream.get_updates()
except: except Exception:
logger.info("Failed to handle stream %s", stream.NAME) logger.info("Failed to handle stream %s", stream.NAME)
raise raise

View file

@ -93,7 +93,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
) )
except SynapseError as e: except SynapseError as e:
raise e raise e
except: except Exception:
logger.exception("Failed to create association") logger.exception("Failed to create association")
raise raise
except AuthError: except AuthError:

View file

@ -211,7 +211,7 @@ class LoginRestServlet(ClientV1RestServlet):
user_id = identifier["user"] user_id = identifier["user"]
if not user_id.startswith('@'): if not user_id.startswith('@'):
user_id = UserID.create( user_id = UserID(
user_id, self.hs.hostname user_id, self.hs.hostname
).to_string() ).to_string()
@ -278,7 +278,7 @@ class LoginRestServlet(ClientV1RestServlet):
if user is None: if user is None:
raise LoginError(401, "Invalid JWT", errcode=Codes.UNAUTHORIZED) raise LoginError(401, "Invalid JWT", errcode=Codes.UNAUTHORIZED)
user_id = UserID.create(user, self.hs.hostname).to_string() user_id = UserID(user, self.hs.hostname).to_string()
auth_handler = self.auth_handler auth_handler = self.auth_handler
registered_user_id = yield auth_handler.check_user_exists(user_id) registered_user_id = yield auth_handler.check_user_exists(user_id)
if registered_user_id: if registered_user_id:
@ -444,7 +444,7 @@ class CasTicketServlet(ClientV1RestServlet):
if required_value != actual_value: if required_value != actual_value:
raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED) raise LoginError(401, "Unauthorized", errcode=Codes.UNAUTHORIZED)
user_id = UserID.create(user, self.hs.hostname).to_string() user_id = UserID(user, self.hs.hostname).to_string()
auth_handler = self.auth_handler auth_handler = self.auth_handler
registered_user_id = yield auth_handler.check_user_exists(user_id) registered_user_id = yield auth_handler.check_user_exists(user_id)
if not registered_user_id: if not registered_user_id:

View file

@ -78,7 +78,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
raise KeyError() raise KeyError()
except SynapseError as e: except SynapseError as e:
raise e raise e
except: except Exception:
raise SynapseError(400, "Unable to parse state") raise SynapseError(400, "Unable to parse state")
yield self.presence_handler.set_state(user, state) yield self.presence_handler.set_state(user, state)

View file

@ -52,7 +52,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
try: try:
new_name = content["displayname"] new_name = content["displayname"]
except: except Exception:
defer.returnValue((400, "Unable to parse name")) defer.returnValue((400, "Unable to parse name"))
yield self.profile_handler.set_displayname( yield self.profile_handler.set_displayname(
@ -94,7 +94,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
content = parse_json_object_from_request(request) content = parse_json_object_from_request(request)
try: try:
new_name = content["avatar_url"] new_name = content["avatar_url"]
except: except Exception:
defer.returnValue((400, "Unable to parse name")) defer.returnValue((400, "Unable to parse name"))
yield self.profile_handler.set_avatar_url( yield self.profile_handler.set_avatar_url(

View file

@ -238,7 +238,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
try: try:
content = parse_json_object_from_request(request) content = parse_json_object_from_request(request)
except: except Exception:
# Turns out we used to ignore the body entirely, and some clients # Turns out we used to ignore the body entirely, and some clients
# cheekily send invalid bodies. # cheekily send invalid bodies.
content = {} content = {}
@ -247,7 +247,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
room_id = room_identifier room_id = room_identifier
try: try:
remote_room_hosts = request.args["server_name"] remote_room_hosts = request.args["server_name"]
except: except Exception:
remote_room_hosts = None remote_room_hosts = None
elif RoomAlias.is_valid(room_identifier): elif RoomAlias.is_valid(room_identifier):
handler = self.handlers.room_member_handler handler = self.handlers.room_member_handler
@ -587,7 +587,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
try: try:
content = parse_json_object_from_request(request) content = parse_json_object_from_request(request)
except: except Exception:
# Turns out we used to ignore the body entirely, and some clients # Turns out we used to ignore the body entirely, and some clients
# cheekily send invalid bodies. # cheekily send invalid bodies.
content = {} content = {}

View file

@ -50,7 +50,7 @@ class GetFilterRestServlet(RestServlet):
try: try:
filter_id = int(filter_id) filter_id = int(filter_id)
except: except Exception:
raise SynapseError(400, "Invalid filter_id") raise SynapseError(400, "Invalid filter_id")
try: try:

View file

@ -412,7 +412,7 @@ class GroupCreateServlet(RestServlet):
# TODO: Create group on remote server # TODO: Create group on remote server
content = parse_json_object_from_request(request) content = parse_json_object_from_request(request)
localpart = content.pop("localpart") localpart = content.pop("localpart")
group_id = GroupID.create(localpart, self.server_name).to_string() group_id = GroupID(localpart, self.server_name).to_string()
result = yield self.groups_handler.create_group(group_id, user_id, content) result = yield self.groups_handler.create_group(group_id, user_id, content)

View file

@ -125,7 +125,7 @@ class SyncRestServlet(RestServlet):
filter_object = json.loads(filter_id) filter_object = json.loads(filter_id)
set_timeline_upper_limit(filter_object, set_timeline_upper_limit(filter_object,
self.hs.config.filter_timeline_limit) self.hs.config.filter_timeline_limit)
except: except Exception:
raise SynapseError(400, "Invalid filter JSON") raise SynapseError(400, "Invalid filter JSON")
self.filtering.check_valid_filter(filter_object) self.filtering.check_valid_filter(filter_object)
filter = FilterCollection(filter_object) filter = FilterCollection(filter_object)

View file

@ -65,7 +65,7 @@ class UserDirectorySearchRestServlet(RestServlet):
try: try:
search_term = body["search_term"] search_term = body["search_term"]
except: except Exception:
raise SynapseError(400, "`search_term` is required field") raise SynapseError(400, "`search_term` is required field")
results = yield self.user_directory_handler.search_users( results = yield self.user_directory_handler.search_users(

View file

@ -213,7 +213,7 @@ class RemoteKey(Resource):
) )
except KeyLookupError as e: except KeyLookupError as e:
logger.info("Failed to fetch key: %s", e) logger.info("Failed to fetch key: %s", e)
except: except Exception:
logger.exception("Failed to get key for %r", server_name) logger.exception("Failed to get key for %r", server_name)
yield self.query_keys( yield self.query_keys(
request, query, query_remote_on_cache_miss=False request, query, query_remote_on_cache_miss=False

View file

@ -17,6 +17,7 @@ from synapse.http.server import respond_with_json, finish_request
from synapse.api.errors import ( from synapse.api.errors import (
cs_error, Codes, SynapseError cs_error, Codes, SynapseError
) )
from synapse.util import logcontext
from twisted.internet import defer from twisted.internet import defer
from twisted.protocols.basic import FileSender from twisted.protocols.basic import FileSender
@ -44,7 +45,7 @@ def parse_media_id(request):
except UnicodeDecodeError: except UnicodeDecodeError:
pass pass
return server_name, media_id, file_name return server_name, media_id, file_name
except: except Exception:
raise SynapseError( raise SynapseError(
404, 404,
"Invalid media id token %r" % (request.postpath,), "Invalid media id token %r" % (request.postpath,),
@ -103,7 +104,9 @@ def respond_with_file(request, media_type, file_path,
) )
with open(file_path, "rb") as f: with open(file_path, "rb") as f:
yield FileSender().beginFileTransfer(f, request) yield logcontext.make_deferred_yieldable(
FileSender().beginFileTransfer(f, request)
)
finish_request(request) finish_request(request)
else: else:

View file

@ -310,7 +310,7 @@ class MediaRepository(object):
media_length=length, media_length=length,
filesystem_id=file_id, filesystem_id=file_id,
) )
except: except Exception:
os.remove(fname) os.remove(fname)
raise raise

View file

@ -367,7 +367,7 @@ class PreviewUrlResource(Resource):
dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id) dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id)
for dir in dirs: for dir in dirs:
os.rmdir(dir) os.rmdir(dir)
except: except Exception:
pass pass
yield self.store.delete_url_cache(removed_media) yield self.store.delete_url_cache(removed_media)
@ -397,7 +397,7 @@ class PreviewUrlResource(Resource):
dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id) dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id)
for dir in dirs: for dir in dirs:
os.rmdir(dir) os.rmdir(dir)
except: except Exception:
pass pass
thumbnail_dir = self.filepaths.url_cache_thumbnail_directory(media_id) thumbnail_dir = self.filepaths.url_cache_thumbnail_directory(media_id)
@ -415,7 +415,7 @@ class PreviewUrlResource(Resource):
dirs = self.filepaths.url_cache_thumbnail_dirs_to_delete(media_id) dirs = self.filepaths.url_cache_thumbnail_dirs_to_delete(media_id)
for dir in dirs: for dir in dirs:
os.rmdir(dir) os.rmdir(dir)
except: except Exception:
pass pass
yield self.store.delete_url_cache_media(removed_media) yield self.store.delete_url_cache_media(removed_media)

View file

@ -560,7 +560,7 @@ def _resolve_with_state(unconflicted_state_ids, conflicted_state_ds, auth_event_
resolved_state = _resolve_state_events( resolved_state = _resolve_state_events(
conflicted_state, auth_events conflicted_state, auth_events
) )
except: except Exception:
logger.exception("Failed to resolve state") logger.exception("Failed to resolve state")
raise raise

View file

@ -103,7 +103,7 @@ class LoggingTransaction(object):
"[SQL values] {%s} %r", "[SQL values] {%s} %r",
self.name, args[0] self.name, args[0]
) )
except: except Exception:
# Don't let logging failures stop SQL from working # Don't let logging failures stop SQL from working
pass pass

View file

@ -98,7 +98,7 @@ class BackgroundUpdateStore(SQLBaseStore):
result = yield self.do_next_background_update( result = yield self.do_next_background_update(
self.BACKGROUND_UPDATE_DURATION_MS self.BACKGROUND_UPDATE_DURATION_MS
) )
except: except Exception:
logger.exception("Error doing update") logger.exception("Error doing update")
else: else:
if result is None: if result is None:

View file

@ -1481,7 +1481,7 @@ class EventsStore(SQLBaseStore):
for i in ids for i in ids
if i in res if i in res
]) ])
except: except Exception:
logger.exception("Failed to callback") logger.exception("Failed to callback")
with PreserveLoggingContext(): with PreserveLoggingContext():
reactor.callFromThread(fire, event_list, row_dict) reactor.callFromThread(fire, event_list, row_dict)

View file

@ -66,7 +66,7 @@ def prepare_database(db_conn, database_engine, config):
cur.close() cur.close()
db_conn.commit() db_conn.commit()
except: except Exception:
db_conn.rollback() db_conn.rollback()
raise raise

View file

@ -636,7 +636,7 @@ class RoomMemberStore(SQLBaseStore):
room_id = row["room_id"] room_id = row["room_id"]
try: try:
content = json.loads(row["content"]) content = json.loads(row["content"])
except: except Exception:
continue continue
display_name = content.get("displayname", None) display_name = content.get("displayname", None)

View file

@ -22,7 +22,7 @@ def run_create(cur, database_engine, *args, **kwargs):
# NULL indicates user was not registered by an appservice. # NULL indicates user was not registered by an appservice.
try: try:
cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT") cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT")
except: except Exception:
# Maybe we already added the column? Hope so... # Maybe we already added the column? Hope so...
pass pass

View file

@ -81,7 +81,7 @@ class SearchStore(BackgroundUpdateStore):
etype = row["type"] etype = row["type"]
try: try:
content = json.loads(row["content"]) content = json.loads(row["content"])
except: except Exception:
continue continue
if etype == "m.room.message": if etype == "m.room.message":
@ -407,7 +407,7 @@ class SearchStore(BackgroundUpdateStore):
origin_server_ts, stream = pagination_token.split(",") origin_server_ts, stream = pagination_token.split(",")
origin_server_ts = int(origin_server_ts) origin_server_ts = int(origin_server_ts)
stream = int(stream) stream = int(stream)
except: except Exception:
raise SynapseError(400, "Invalid pagination token") raise SynapseError(400, "Invalid pagination token")
clauses.append( clauses.append(

View file

@ -80,13 +80,13 @@ class PaginationConfig(object):
from_tok = None # For backwards compat. from_tok = None # For backwards compat.
elif from_tok: elif from_tok:
from_tok = StreamToken.from_string(from_tok) from_tok = StreamToken.from_string(from_tok)
except: except Exception:
raise SynapseError(400, "'from' paramater is invalid") raise SynapseError(400, "'from' paramater is invalid")
try: try:
if to_tok: if to_tok:
to_tok = StreamToken.from_string(to_tok) to_tok = StreamToken.from_string(to_tok)
except: except Exception:
raise SynapseError(400, "'to' paramater is invalid") raise SynapseError(400, "'to' paramater is invalid")
limit = get_param("limit", None) limit = get_param("limit", None)
@ -98,7 +98,7 @@ class PaginationConfig(object):
try: try:
return PaginationConfig(from_tok, to_tok, direction, limit) return PaginationConfig(from_tok, to_tok, direction, limit)
except: except Exception:
logger.exception("Failed to create pagination config") logger.exception("Failed to create pagination config")
raise SynapseError(400, "Invalid request.") raise SynapseError(400, "Invalid request.")

View file

@ -12,6 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import string
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
@ -126,15 +127,11 @@ class DomainSpecificString(
try: try:
cls.from_string(s) cls.from_string(s)
return True return True
except: except Exception:
return False return False
__str__ = to_string __str__ = to_string
@classmethod
def create(cls, localpart, domain,):
return cls(localpart=localpart, domain=domain)
class UserID(DomainSpecificString): class UserID(DomainSpecificString):
"""Structure representing a user ID.""" """Structure representing a user ID."""
@ -160,6 +157,38 @@ class GroupID(DomainSpecificString):
"""Structure representing a group ID.""" """Structure representing a group ID."""
SIGIL = "+" SIGIL = "+"
@classmethod
def from_string(cls, s):
group_id = super(GroupID, cls).from_string(s)
if not group_id.localpart:
raise SynapseError(
400,
"Group ID cannot be empty",
)
if contains_invalid_mxid_characters(group_id.localpart):
raise SynapseError(
400,
"Group ID can only contain characters a-z, 0-9, or '=_-./'",
)
return group_id
mxid_localpart_allowed_characters = set("_-./=" + string.ascii_lowercase + string.digits)
def contains_invalid_mxid_characters(localpart):
"""Check for characters not allowed in an mxid or groupid localpart
Args:
localpart (basestring): the localpart to be checked
Returns:
bool: True if there are any naughty characters
"""
return any(c not in mxid_localpart_allowed_characters for c in localpart)
class StreamToken( class StreamToken(
namedtuple("Token", ( namedtuple("Token", (
@ -184,7 +213,7 @@ class StreamToken(
# i.e. old token from before receipt_key # i.e. old token from before receipt_key
keys.append("0") keys.append("0")
return cls(*keys) return cls(*keys)
except: except Exception:
raise SynapseError(400, "Invalid Token") raise SynapseError(400, "Invalid Token")
def to_string(self): def to_string(self):
@ -270,7 +299,7 @@ class RoomStreamToken(namedtuple("_StreamToken", "topological stream")):
if string[0] == 't': if string[0] == 't':
parts = string[1:].split('-', 1) parts = string[1:].split('-', 1)
return cls(topological=int(parts[0]), stream=int(parts[1])) return cls(topological=int(parts[0]), stream=int(parts[1]))
except: except Exception:
pass pass
raise SynapseError(400, "Invalid token %r" % (string,)) raise SynapseError(400, "Invalid token %r" % (string,))
@ -279,7 +308,7 @@ class RoomStreamToken(namedtuple("_StreamToken", "topological stream")):
try: try:
if string[0] == 's': if string[0] == 's':
return cls(topological=None, stream=int(string[1:])) return cls(topological=None, stream=int(string[1:]))
except: except Exception:
pass pass
raise SynapseError(400, "Invalid token %r" % (string,)) raise SynapseError(400, "Invalid token %r" % (string,))

View file

@ -59,9 +59,9 @@ class Clock(object):
f(function): The function to call repeatedly. f(function): The function to call repeatedly.
msec(float): How long to wait between calls in milliseconds. msec(float): How long to wait between calls in milliseconds.
""" """
l = task.LoopingCall(f) call = task.LoopingCall(f)
l.start(msec / 1000.0, now=False) call.start(msec / 1000.0, now=False)
return l return call
def call_later(self, delay, callback, *args, **kwargs): def call_later(self, delay, callback, *args, **kwargs):
"""Call something later """Call something later
@ -82,7 +82,7 @@ class Clock(object):
def cancel_call_later(self, timer, ignore_errs=False): def cancel_call_later(self, timer, ignore_errs=False):
try: try:
timer.cancel() timer.cancel()
except: except Exception:
if not ignore_errs: if not ignore_errs:
raise raise
@ -97,12 +97,12 @@ class Clock(object):
try: try:
ret_deferred.errback(e) ret_deferred.errback(e)
except: except Exception:
pass pass
try: try:
given_deferred.cancel() given_deferred.cancel()
except: except Exception:
pass pass
timer = None timer = None
@ -110,7 +110,7 @@ class Clock(object):
def cancel(res): def cancel(res):
try: try:
self.cancel_call_later(timer) self.cancel_call_later(timer)
except: except Exception:
pass pass
return res return res
@ -119,7 +119,7 @@ class Clock(object):
def success(res): def success(res):
try: try:
ret_deferred.callback(res) ret_deferred.callback(res)
except: except Exception:
pass pass
return res return res
@ -127,7 +127,7 @@ class Clock(object):
def err(res): def err(res):
try: try:
ret_deferred.errback(res) ret_deferred.errback(res)
except: except Exception:
pass pass
given_deferred.addCallbacks(callback=success, errback=err) given_deferred.addCallbacks(callback=success, errback=err)

View file

@ -73,7 +73,7 @@ class ObservableDeferred(object):
try: try:
# TODO: Handle errors here. # TODO: Handle errors here.
self._observers.pop().callback(r) self._observers.pop().callback(r)
except: except Exception:
pass pass
return r return r
@ -83,7 +83,7 @@ class ObservableDeferred(object):
try: try:
# TODO: Handle errors here. # TODO: Handle errors here.
self._observers.pop().errback(f) self._observers.pop().errback(f)
except: except Exception:
pass pass
if consumeErrors: if consumeErrors:
@ -205,7 +205,7 @@ class Linearizer(object):
try: try:
with PreserveLoggingContext(): with PreserveLoggingContext():
yield current_defer yield current_defer
except: except Exception:
logger.exception("Unexpected exception in Linearizer") logger.exception("Unexpected exception in Linearizer")
logger.info("Acquired linearizer lock %r for key %r", self.name, logger.info("Acquired linearizer lock %r for key %r", self.name,

View file

@ -42,7 +42,7 @@ try:
def get_thread_resource_usage(): def get_thread_resource_usage():
return resource.getrusage(RUSAGE_THREAD) return resource.getrusage(RUSAGE_THREAD)
except: except Exception:
# If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we # If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we
# won't track resource usage by returning None. # won't track resource usage by returning None.
def get_thread_resource_usage(): def get_thread_resource_usage():

View file

@ -189,7 +189,7 @@ class RetryDestinationLimiter(object):
yield self.store.set_destination_retry_timings( yield self.store.set_destination_retry_timings(
self.destination, retry_last_ts, self.retry_interval self.destination, retry_last_ts, self.retry_interval
) )
except: except Exception:
logger.exception( logger.exception(
"Failed to store set_destination_retry_timings", "Failed to store set_destination_retry_timings",
) )

View file

@ -91,7 +91,4 @@ class WheelTimer(object):
return ret return ret
def __len__(self): def __len__(self):
l = 0 return sum(len(entry.queue) for entry in self.entries)
for entry in self.entries:
l += len(entry.queue)
return l

View file

@ -65,7 +65,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
for f in self.as_yaml_files: for f in self.as_yaml_files:
try: try:
os.remove(f) os.remove(f)
except: except Exception:
pass pass
def _add_appservice(self, as_token, id, url, hs_token, sender): def _add_appservice(self, as_token, id, url, hs_token, sender):

View file

@ -17,7 +17,7 @@ from tests import unittest
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.server import HomeServer from synapse.server import HomeServer
from synapse.types import UserID, RoomAlias from synapse.types import UserID, RoomAlias, GroupID
mock_homeserver = HomeServer(hostname="my.domain") mock_homeserver = HomeServer(hostname="my.domain")
@ -60,3 +60,25 @@ class RoomAliasTestCase(unittest.TestCase):
room = RoomAlias("channel", "my.domain") room = RoomAlias("channel", "my.domain")
self.assertEquals(room.to_string(), "#channel:my.domain") self.assertEquals(room.to_string(), "#channel:my.domain")
class GroupIDTestCase(unittest.TestCase):
def test_parse(self):
group_id = GroupID.from_string("+group/=_-.123:my.domain")
self.assertEqual("group/=_-.123", group_id.localpart)
self.assertEqual("my.domain", group_id.domain)
def test_validate(self):
bad_ids = [
"$badsigil:domain",
"+:empty",
] + [
"+group" + c + ":domain" for c in "A%?æ£"
]
for id_string in bad_ids:
try:
GroupID.from_string(id_string)
self.fail("Parsing '%s' should raise exception" % id_string)
except SynapseError as exc:
self.assertEqual(400, exc.code)
self.assertEqual("M_UNKNOWN", exc.errcode)

View file

@ -184,7 +184,7 @@ class MockHttpResource(HttpServer):
mock_request.args = urlparse.parse_qs(path.split('?')[1]) mock_request.args = urlparse.parse_qs(path.split('?')[1])
mock_request.path = path.split('?')[0] mock_request.path = path.split('?')[0]
path = mock_request.path path = mock_request.path
except: except Exception:
pass pass
for (method, pattern, func) in self.callbacks: for (method, pattern, func) in self.callbacks:
@ -364,13 +364,13 @@ class MemoryDataStore(object):
return { return {
"name": self.tokens_to_users[token], "name": self.tokens_to_users[token],
} }
except: except Exception:
raise StoreError(400, "User does not exist.") raise StoreError(400, "User does not exist.")
def get_room(self, room_id): def get_room(self, room_id):
try: try:
return self.rooms[room_id] return self.rooms[room_id]
except: except Exception:
return None return None
def store_room(self, room_id, room_creator_user_id, is_public): def store_room(self, room_id, room_creator_user_id, is_public):
@ -499,7 +499,7 @@ class DeferredMockCallable(object):
for _, _, d in self.expectations: for _, _, d in self.expectations:
try: try:
d.errback(failure) d.errback(failure)
except: except Exception:
pass pass
raise failure raise failure