0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-14 21:03:51 +01:00

actually throw meaningful errors

This commit is contained in:
Matthew Hodgson 2016-04-08 21:36:48 +01:00
parent b36270b5e1
commit 83b2f83da0
2 changed files with 40 additions and 14 deletions

View file

@ -271,12 +271,19 @@ class SimpleHttpClient(object):
if 'Content-Length' in headers and headers['Content-Length'] > max_size:
logger.warn("Requested URL is too large > %r bytes" % (self.max_size,))
# XXX: do we want to explicitly drop the connection here somehow? if so, how?
raise # what should we be raising here?
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (self.max_size,),
Codes.TOO_LARGE,
)
if response.code > 299:
logger.warn("Got %d when downloading %s" % (response.code, url))
raise
raise SynapseError(
502,
"Got error %d" % (response.code,),
Codes.UNKNOWN,
)
# TODO: if our Content-Type is HTML or something, just read the first
# N bytes into RAM rather than saving it all to disk only to read it
@ -287,9 +294,13 @@ class SimpleHttpClient(object):
_readBodyToFile,
response, output_stream, max_size
)
except:
except Exception as e:
logger.exception("Failed to download body")
raise
raise SynapseError(
502,
("Failed to download remote body: %s" % e),
Codes.UNKNOWN,
)
defer.returnValue((length, headers, response.request.absoluteURI, response.code))

View file

@ -19,6 +19,9 @@ from twisted.web.server import NOT_DONE_YET
from twisted.internet import defer
from urlparse import urlparse, urlsplit, urlunparse
from synapse.api.errors import (
SynapseError, Codes,
)
from synapse.util.stringutils import random_string
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.http.client import SpiderHttpClient
@ -47,9 +50,11 @@ class PreviewUrlResource(BaseMediaResource):
isLeaf = True
def __init__(self, hs, filepaths):
if not html:
logger.warn("Disabling PreviewUrlResource as lxml not available")
raise
try:
if html:
pass
except:
raise RunTimeError("Disabling PreviewUrlResource as lxml not available")
if not hasattr(hs.config, "url_preview_ip_range_blacklist"):
logger.warn(
@ -57,7 +62,10 @@ class PreviewUrlResource(BaseMediaResource):
"blacklist in url_preview_ip_range_blacklist for url previewing "
"to work"
)
raise
raise RunTimeError(
"Disabling PreviewUrlResource as "
"url_preview_ip_range_blacklist not specified"
)
BaseMediaResource.__init__(self, hs, filepaths)
self.client = SpiderHttpClient(hs)
@ -121,7 +129,10 @@ class PreviewUrlResource(BaseMediaResource):
logger.warn(
"URL %s blocked by url_blacklist entry %s", url, entry
)
raise
raise SynapseError(
403, "URL blocked by url pattern blacklist entry",
Codes.UNKNOWN
)
# first check the memory cache - good to handle all the clients on this
# HS thundering away to preview the same URL at the same time.
@ -229,8 +240,9 @@ class PreviewUrlResource(BaseMediaResource):
)
respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True)
except:
raise
except Exception as e:
raise e
@defer.inlineCallbacks
def _calc_og(self, tree, media_info, requester):
@ -418,9 +430,12 @@ class PreviewUrlResource(BaseMediaResource):
user_id=user,
)
except:
except Exception as e:
os.remove(fname)
raise
raise SynapseError(
500, ("Failed to download content: %s" % e),
Codes.UNKNOWN
)
defer.returnValue({
"media_type": media_type,