mirror of
https://mau.dev/maunium/synapse.git
synced 2024-10-01 01:36:05 -04:00
actually throw meaningful errors
This commit is contained in:
parent
b36270b5e1
commit
83b2f83da0
@ -271,12 +271,19 @@ class SimpleHttpClient(object):
|
|||||||
|
|
||||||
if 'Content-Length' in headers and headers['Content-Length'] > max_size:
|
if 'Content-Length' in headers and headers['Content-Length'] > max_size:
|
||||||
logger.warn("Requested URL is too large > %r bytes" % (self.max_size,))
|
logger.warn("Requested URL is too large > %r bytes" % (self.max_size,))
|
||||||
# XXX: do we want to explicitly drop the connection here somehow? if so, how?
|
raise SynapseError(
|
||||||
raise # what should we be raising here?
|
502,
|
||||||
|
"Requested file is too large > %r bytes" % (self.max_size,),
|
||||||
|
Codes.TOO_LARGE,
|
||||||
|
)
|
||||||
|
|
||||||
if response.code > 299:
|
if response.code > 299:
|
||||||
logger.warn("Got %d when downloading %s" % (response.code, url))
|
logger.warn("Got %d when downloading %s" % (response.code, url))
|
||||||
raise
|
raise SynapseError(
|
||||||
|
502,
|
||||||
|
"Got error %d" % (response.code,),
|
||||||
|
Codes.UNKNOWN,
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: if our Content-Type is HTML or something, just read the first
|
# TODO: if our Content-Type is HTML or something, just read the first
|
||||||
# N bytes into RAM rather than saving it all to disk only to read it
|
# N bytes into RAM rather than saving it all to disk only to read it
|
||||||
@ -287,9 +294,13 @@ class SimpleHttpClient(object):
|
|||||||
_readBodyToFile,
|
_readBodyToFile,
|
||||||
response, output_stream, max_size
|
response, output_stream, max_size
|
||||||
)
|
)
|
||||||
except:
|
except Exception as e:
|
||||||
logger.exception("Failed to download body")
|
logger.exception("Failed to download body")
|
||||||
raise
|
raise SynapseError(
|
||||||
|
502,
|
||||||
|
("Failed to download remote body: %s" % e),
|
||||||
|
Codes.UNKNOWN,
|
||||||
|
)
|
||||||
|
|
||||||
defer.returnValue((length, headers, response.request.absoluteURI, response.code))
|
defer.returnValue((length, headers, response.request.absoluteURI, response.code))
|
||||||
|
|
||||||
|
@ -19,6 +19,9 @@ from twisted.web.server import NOT_DONE_YET
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from urlparse import urlparse, urlsplit, urlunparse
|
from urlparse import urlparse, urlsplit, urlunparse
|
||||||
|
|
||||||
|
from synapse.api.errors import (
|
||||||
|
SynapseError, Codes,
|
||||||
|
)
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.http.client import SpiderHttpClient
|
from synapse.http.client import SpiderHttpClient
|
||||||
@ -47,9 +50,11 @@ class PreviewUrlResource(BaseMediaResource):
|
|||||||
isLeaf = True
|
isLeaf = True
|
||||||
|
|
||||||
def __init__(self, hs, filepaths):
|
def __init__(self, hs, filepaths):
|
||||||
if not html:
|
try:
|
||||||
logger.warn("Disabling PreviewUrlResource as lxml not available")
|
if html:
|
||||||
raise
|
pass
|
||||||
|
except:
|
||||||
|
raise RunTimeError("Disabling PreviewUrlResource as lxml not available")
|
||||||
|
|
||||||
if not hasattr(hs.config, "url_preview_ip_range_blacklist"):
|
if not hasattr(hs.config, "url_preview_ip_range_blacklist"):
|
||||||
logger.warn(
|
logger.warn(
|
||||||
@ -57,7 +62,10 @@ class PreviewUrlResource(BaseMediaResource):
|
|||||||
"blacklist in url_preview_ip_range_blacklist for url previewing "
|
"blacklist in url_preview_ip_range_blacklist for url previewing "
|
||||||
"to work"
|
"to work"
|
||||||
)
|
)
|
||||||
raise
|
raise RunTimeError(
|
||||||
|
"Disabling PreviewUrlResource as "
|
||||||
|
"url_preview_ip_range_blacklist not specified"
|
||||||
|
)
|
||||||
|
|
||||||
BaseMediaResource.__init__(self, hs, filepaths)
|
BaseMediaResource.__init__(self, hs, filepaths)
|
||||||
self.client = SpiderHttpClient(hs)
|
self.client = SpiderHttpClient(hs)
|
||||||
@ -121,7 +129,10 @@ class PreviewUrlResource(BaseMediaResource):
|
|||||||
logger.warn(
|
logger.warn(
|
||||||
"URL %s blocked by url_blacklist entry %s", url, entry
|
"URL %s blocked by url_blacklist entry %s", url, entry
|
||||||
)
|
)
|
||||||
raise
|
raise SynapseError(
|
||||||
|
403, "URL blocked by url pattern blacklist entry",
|
||||||
|
Codes.UNKNOWN
|
||||||
|
)
|
||||||
|
|
||||||
# first check the memory cache - good to handle all the clients on this
|
# first check the memory cache - good to handle all the clients on this
|
||||||
# HS thundering away to preview the same URL at the same time.
|
# HS thundering away to preview the same URL at the same time.
|
||||||
@ -229,8 +240,9 @@ class PreviewUrlResource(BaseMediaResource):
|
|||||||
)
|
)
|
||||||
|
|
||||||
respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True)
|
respond_with_json_bytes(request, 200, json.dumps(og), send_cors=True)
|
||||||
except:
|
except Exception as e:
|
||||||
raise
|
raise e
|
||||||
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _calc_og(self, tree, media_info, requester):
|
def _calc_og(self, tree, media_info, requester):
|
||||||
@ -418,9 +430,12 @@ class PreviewUrlResource(BaseMediaResource):
|
|||||||
user_id=user,
|
user_id=user,
|
||||||
)
|
)
|
||||||
|
|
||||||
except:
|
except Exception as e:
|
||||||
os.remove(fname)
|
os.remove(fname)
|
||||||
raise
|
raise SynapseError(
|
||||||
|
500, ("Failed to download content: %s" % e),
|
||||||
|
Codes.UNKNOWN
|
||||||
|
)
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
"media_type": media_type,
|
"media_type": media_type,
|
||||||
|
Loading…
Reference in New Issue
Block a user