2014-11-20 12:41:56 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2018-04-22 19:53:18 -04:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2014-11-20 12:41:56 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2016-11-30 10:03:00 -05:00
|
|
|
import cgi
|
2014-11-20 12:41:56 -05:00
|
|
|
import logging
|
2015-11-02 11:49:05 -05:00
|
|
|
import random
|
2015-06-19 05:13:03 -04:00
|
|
|
import sys
|
2018-09-18 13:17:15 -04:00
|
|
|
from io import BytesIO
|
2018-04-15 15:43:35 -04:00
|
|
|
|
2019-01-08 06:04:28 -05:00
|
|
|
from six import PY3, raise_from, string_types
|
2018-09-05 10:10:47 -04:00
|
|
|
from six.moves import urllib
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
import attr
|
2018-09-05 10:10:47 -04:00
|
|
|
import treq
|
|
|
|
from canonicaljson import encode_canonical_json
|
2018-05-21 20:47:37 -04:00
|
|
|
from prometheus_client import Counter
|
2018-07-09 02:09:20 -04:00
|
|
|
from signedjson.sign import sign_json
|
|
|
|
|
2019-02-20 06:35:52 -05:00
|
|
|
from twisted.internet import defer, protocol
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.internet.error import DNSLookupError
|
2018-09-18 13:17:15 -04:00
|
|
|
from twisted.internet.task import _EPSILON, Cooperator
|
2018-07-09 02:09:20 -04:00
|
|
|
from twisted.web._newclient import ResponseDone
|
|
|
|
from twisted.web.http_headers import Headers
|
|
|
|
|
|
|
|
import synapse.metrics
|
|
|
|
import synapse.util.retryutils
|
|
|
|
from synapse.api.errors import (
|
|
|
|
Codes,
|
|
|
|
FederationDeniedError,
|
|
|
|
HttpResponseException,
|
2019-01-08 06:04:28 -05:00
|
|
|
RequestSendFailed,
|
2018-07-09 02:09:20 -04:00
|
|
|
SynapseError,
|
|
|
|
)
|
2019-02-20 06:35:52 -05:00
|
|
|
from synapse.http import QuieterFileBodyProducer
|
2019-01-21 18:29:47 -05:00
|
|
|
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
|
2018-09-19 05:39:40 -04:00
|
|
|
from synapse.util.async_helpers import timeout_deferred
|
2018-07-09 02:09:20 -04:00
|
|
|
from synapse.util.logcontext import make_deferred_yieldable
|
2018-09-14 10:11:26 -04:00
|
|
|
from synapse.util.metrics import Measure
|
2018-05-21 20:47:37 -04:00
|
|
|
|
2014-11-20 12:41:56 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-22 18:32:57 -04:00
|
|
|
outgoing_requests_counter = Counter("synapse_http_matrixfederationclient_requests",
|
|
|
|
"", ["method"])
|
|
|
|
incoming_responses_counter = Counter("synapse_http_matrixfederationclient_responses",
|
|
|
|
"", ["method", "code"])
|
2015-02-24 14:51:21 -05:00
|
|
|
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2015-11-17 13:26:50 -05:00
|
|
|
MAX_LONG_RETRIES = 10
|
|
|
|
MAX_SHORT_RETRIES = 3
|
2015-11-02 11:49:05 -05:00
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
if PY3:
|
|
|
|
MAXINT = sys.maxsize
|
|
|
|
else:
|
|
|
|
MAXINT = sys.maxint
|
|
|
|
|
2015-11-02 11:49:05 -05:00
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
_next_id = 1
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
class MatrixFederationRequest(object):
|
|
|
|
method = attr.ib()
|
|
|
|
"""HTTP method
|
|
|
|
:type: str
|
|
|
|
"""
|
|
|
|
|
|
|
|
path = attr.ib()
|
|
|
|
"""HTTP path
|
|
|
|
:type: str
|
|
|
|
"""
|
|
|
|
|
|
|
|
destination = attr.ib()
|
|
|
|
"""The remote server to send the HTTP request to.
|
|
|
|
:type: str"""
|
|
|
|
|
|
|
|
json = attr.ib(default=None)
|
|
|
|
"""JSON to send in the body.
|
|
|
|
:type: dict|None
|
|
|
|
"""
|
|
|
|
|
|
|
|
json_callback = attr.ib(default=None)
|
|
|
|
"""A callback to generate the JSON.
|
|
|
|
:type: func|None
|
|
|
|
"""
|
|
|
|
|
|
|
|
query = attr.ib(default=None)
|
|
|
|
"""Query arguments.
|
|
|
|
:type: dict|None
|
|
|
|
"""
|
|
|
|
|
|
|
|
txn_id = attr.ib(default=None)
|
|
|
|
"""Unique ID for this request (for logging)
|
|
|
|
:type: str|None
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __attrs_post_init__(self):
|
|
|
|
global _next_id
|
|
|
|
self.txn_id = "%s-O-%s" % (self.method, _next_id)
|
|
|
|
_next_id = (_next_id + 1) % (MAXINT - 1)
|
|
|
|
|
|
|
|
def get_json(self):
|
|
|
|
if self.json_callback:
|
|
|
|
return self.json_callback()
|
|
|
|
return self.json
|
|
|
|
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _handle_json_response(reactor, timeout_sec, request, response):
|
|
|
|
"""
|
|
|
|
Reads the JSON body of a response, with a timeout
|
|
|
|
|
|
|
|
Args:
|
|
|
|
reactor (IReactor): twisted reactor, for the timeout
|
|
|
|
timeout_sec (float): number of seconds to wait for response to complete
|
|
|
|
request (MatrixFederationRequest): the request that triggered the response
|
|
|
|
response (IResponse): response to the request
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict: parsed JSON response
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
check_content_type_is_json(response.headers)
|
2018-09-19 05:39:40 -04:00
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
d = treq.json_content(response)
|
2018-09-19 05:39:40 -04:00
|
|
|
d = timeout_deferred(
|
|
|
|
d,
|
|
|
|
timeout=timeout_sec,
|
|
|
|
reactor=reactor,
|
|
|
|
)
|
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
body = yield make_deferred_yieldable(d)
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn(
|
2018-09-18 17:26:08 -04:00
|
|
|
"{%s} [%s] Error reading response: %s",
|
2018-09-18 13:17:15 -04:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
e,
|
|
|
|
)
|
|
|
|
raise
|
|
|
|
logger.info(
|
2018-09-18 17:26:08 -04:00
|
|
|
"{%s} [%s] Completed: %d %s",
|
2018-09-18 13:17:15 -04:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
|
|
|
response.phrase.decode('ascii', errors='replace'),
|
|
|
|
)
|
|
|
|
defer.returnValue(body)
|
|
|
|
|
|
|
|
|
2014-11-20 12:41:56 -05:00
|
|
|
class MatrixFederationHttpClient(object):
|
2014-11-20 13:00:10 -05:00
|
|
|
"""HTTP client used to talk to other homeservers over the federation
|
|
|
|
protocol. Send client certificates and signs requests.
|
2014-11-20 12:41:56 -05:00
|
|
|
|
|
|
|
Attributes:
|
|
|
|
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
|
|
|
requests.
|
|
|
|
"""
|
|
|
|
|
2019-02-11 13:03:30 -05:00
|
|
|
def __init__(self, hs, tls_client_options_factory):
|
2014-11-20 12:41:56 -05:00
|
|
|
self.hs = hs
|
|
|
|
self.signing_key = hs.config.signing_key[0]
|
|
|
|
self.server_name = hs.hostname
|
2018-09-13 10:15:51 -04:00
|
|
|
reactor = hs.get_reactor()
|
2019-01-21 18:29:47 -05:00
|
|
|
|
|
|
|
self.agent = MatrixFederationAgent(
|
|
|
|
hs.get_reactor(),
|
2019-02-11 13:03:30 -05:00
|
|
|
tls_client_options_factory,
|
2015-06-01 05:51:50 -04:00
|
|
|
)
|
2015-02-10 13:17:27 -05:00
|
|
|
self.clock = hs.get_clock()
|
2017-03-22 20:12:21 -04:00
|
|
|
self._store = hs.get_datastore()
|
2018-10-16 05:44:49 -04:00
|
|
|
self.version_string_bytes = hs.version_string.encode('ascii')
|
2018-09-13 10:15:51 -04:00
|
|
|
self.default_timeout = 60
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
def schedule(x):
|
|
|
|
reactor.callLater(_EPSILON, x)
|
|
|
|
|
|
|
|
self._cooperator = Cooperator(scheduler=schedule)
|
2015-06-01 05:51:50 -04:00
|
|
|
|
2019-03-13 14:26:06 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _send_request_with_optional_trailing_slash(
|
2019-03-13 16:17:39 -04:00
|
|
|
self,
|
|
|
|
request,
|
|
|
|
try_trailing_slash_on_400=False,
|
2019-03-20 10:00:39 -04:00
|
|
|
**send_request_args
|
2019-03-13 15:27:10 -04:00
|
|
|
):
|
2019-03-13 14:26:06 -04:00
|
|
|
"""Wrapper for _send_request which can optionally retry the request
|
|
|
|
upon receiving a combination of a 400 HTTP response code and a
|
2019-03-20 07:27:18 -04:00
|
|
|
'M_UNRECOGNIZED' errcode. This is a workaround for Synapse <= v0.99.3
|
2019-03-13 14:26:06 -04:00
|
|
|
due to #3622.
|
|
|
|
|
|
|
|
Args:
|
2019-03-13 14:38:16 -04:00
|
|
|
request (MatrixFederationRequest): details of request to be sent
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400 (bool): Whether on receiving a 400
|
|
|
|
'M_UNRECOGNIZED' from the server to retry the request with a
|
|
|
|
trailing slash appended to the request path.
|
2019-03-13 15:27:10 -04:00
|
|
|
send_request_args (Dict): A dictionary of arguments to pass to
|
2019-03-13 14:26:06 -04:00
|
|
|
`_send_request()`.
|
|
|
|
|
2019-03-13 17:08:10 -04:00
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
|
2019-03-13 14:26:06 -04:00
|
|
|
Returns:
|
2019-03-13 16:17:39 -04:00
|
|
|
Deferred[Dict]: Parsed JSON response body.
|
2019-03-13 14:26:06 -04:00
|
|
|
"""
|
2019-03-13 17:08:10 -04:00
|
|
|
try:
|
2019-03-20 10:08:57 -04:00
|
|
|
response = yield self._send_request(
|
2019-03-21 10:32:47 -04:00
|
|
|
request, **send_request_args
|
2019-03-20 10:08:57 -04:00
|
|
|
)
|
2019-03-13 17:08:10 -04:00
|
|
|
except HttpResponseException as e:
|
2019-03-20 10:00:39 -04:00
|
|
|
# Received an HTTP error > 300. Check if it meets the requirements
|
|
|
|
# to retry with a trailing slash
|
2019-03-13 17:08:10 -04:00
|
|
|
if not try_trailing_slash_on_400:
|
2019-03-20 10:00:39 -04:00
|
|
|
raise
|
2019-03-13 14:26:06 -04:00
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
if e.code != 400 or e.to_synapse_error().errcode != "M_UNRECOGNIZED":
|
|
|
|
raise
|
2019-03-13 14:26:06 -04:00
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
# Retry with a trailing slash if we received a 400 with
|
|
|
|
# 'M_UNRECOGNIZED' which some endpoints can return when omitting a
|
|
|
|
# trailing slash on Synapse <= v0.99.3.
|
2019-03-26 07:35:29 -04:00
|
|
|
logger.info("Retrying request with trailing slash")
|
2019-03-20 10:00:39 -04:00
|
|
|
request.path += "/"
|
2019-03-13 17:08:10 -04:00
|
|
|
|
2019-03-20 10:08:57 -04:00
|
|
|
response = yield self._send_request(
|
2019-03-21 10:32:47 -04:00
|
|
|
request, **send_request_args
|
2019-03-20 10:08:57 -04:00
|
|
|
)
|
2019-03-13 16:17:39 -04:00
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
defer.returnValue(response)
|
2019-03-13 14:26:06 -04:00
|
|
|
|
2014-11-20 12:41:56 -05:00
|
|
|
@defer.inlineCallbacks
|
2018-09-18 13:17:15 -04:00
|
|
|
def _send_request(
|
|
|
|
self,
|
|
|
|
request,
|
|
|
|
retry_on_dns_fail=True,
|
|
|
|
timeout=None,
|
|
|
|
long_retries=False,
|
|
|
|
ignore_backoff=False,
|
2019-03-12 10:11:11 -04:00
|
|
|
backoff_on_404=False,
|
2018-09-18 13:17:15 -04:00
|
|
|
):
|
2018-09-05 10:10:47 -04:00
|
|
|
"""
|
2018-09-18 13:17:15 -04:00
|
|
|
Sends a request to the given server.
|
2018-09-05 10:10:47 -04:00
|
|
|
|
2017-03-22 20:27:04 -04:00
|
|
|
Args:
|
2018-09-18 13:17:15 -04:00
|
|
|
request (MatrixFederationRequest): details of request to be sent
|
|
|
|
|
|
|
|
timeout (int|None): number of milliseconds to wait for the response headers
|
|
|
|
(including connecting to the server). 60s by default.
|
|
|
|
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff (bool): true to ignore the historical backoff data
|
|
|
|
and try the request anyway.
|
2018-09-18 13:17:15 -04:00
|
|
|
|
2017-03-22 20:12:21 -04:00
|
|
|
backoff_on_404 (bool): Back off if we get a 404
|
2017-03-13 09:50:16 -04:00
|
|
|
|
|
|
|
Returns:
|
2019-01-09 04:25:59 -05:00
|
|
|
Deferred[twisted.web.client.Response]: resolves with the HTTP
|
|
|
|
response object on success.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-11-20 12:41:56 -05:00
|
|
|
"""
|
2018-09-13 10:15:51 -04:00
|
|
|
if timeout:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
|
|
|
|
2018-01-22 13:11:18 -05:00
|
|
|
if (
|
2018-08-24 07:10:01 -04:00
|
|
|
self.hs.config.federation_domain_whitelist is not None and
|
2018-09-18 13:17:15 -04:00
|
|
|
request.destination not in self.hs.config.federation_domain_whitelist
|
2018-01-22 13:11:18 -05:00
|
|
|
):
|
2018-09-18 13:17:15 -04:00
|
|
|
raise FederationDeniedError(request.destination)
|
2018-01-22 13:11:18 -05:00
|
|
|
|
2017-03-22 20:12:21 -04:00
|
|
|
limiter = yield synapse.util.retryutils.get_retry_limiter(
|
2018-09-18 13:17:15 -04:00
|
|
|
request.destination,
|
2017-03-22 20:12:21 -04:00
|
|
|
self.clock,
|
|
|
|
self._store,
|
|
|
|
backoff_on_404=backoff_on_404,
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff=ignore_backoff,
|
2017-03-22 20:12:21 -04:00
|
|
|
)
|
|
|
|
|
2018-10-16 05:44:49 -04:00
|
|
|
method_bytes = request.method.encode("ascii")
|
|
|
|
destination_bytes = request.destination.encode("ascii")
|
2018-09-18 13:17:15 -04:00
|
|
|
path_bytes = request.path.encode("ascii")
|
|
|
|
if request.query:
|
|
|
|
query_bytes = encode_query_args(request.query)
|
2018-09-05 10:10:47 -04:00
|
|
|
else:
|
|
|
|
query_bytes = b""
|
2017-03-22 20:27:04 -04:00
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
headers_dict = {
|
2018-10-16 05:44:49 -04:00
|
|
|
b"User-Agent": [self.version_string_bytes],
|
2018-09-05 10:10:47 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
with limiter:
|
2017-03-22 20:12:21 -04:00
|
|
|
# XXX: Would be much nicer to retry only at the transaction-layer
|
|
|
|
# (once we have reliable transactions in place)
|
|
|
|
if long_retries:
|
|
|
|
retries_left = MAX_LONG_RETRIES
|
|
|
|
else:
|
|
|
|
retries_left = MAX_SHORT_RETRIES
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2018-10-16 05:44:49 -04:00
|
|
|
url_bytes = urllib.parse.urlunparse((
|
|
|
|
b"matrix", destination_bytes,
|
2018-09-18 13:17:15 -04:00
|
|
|
path_bytes, None, query_bytes, b"",
|
2018-10-16 05:44:49 -04:00
|
|
|
))
|
|
|
|
url_str = url_bytes.decode('ascii')
|
2018-09-18 13:17:15 -04:00
|
|
|
|
2018-10-16 05:44:49 -04:00
|
|
|
url_to_sign_bytes = urllib.parse.urlunparse((
|
2018-09-18 13:17:15 -04:00
|
|
|
b"", b"",
|
|
|
|
path_bytes, None, query_bytes, b"",
|
2018-10-16 05:44:49 -04:00
|
|
|
))
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2018-09-12 11:22:14 -04:00
|
|
|
while True:
|
|
|
|
try:
|
2018-09-18 13:17:15 -04:00
|
|
|
json = request.get_json()
|
2018-09-12 11:22:14 -04:00
|
|
|
if json:
|
2018-10-16 05:44:49 -04:00
|
|
|
headers_dict[b"Content-Type"] = [b"application/json"]
|
2019-01-17 07:40:09 -05:00
|
|
|
auth_headers = self.build_auth_headers(
|
2018-10-16 05:44:49 -04:00
|
|
|
destination_bytes, method_bytes, url_to_sign_bytes,
|
2019-01-17 07:40:09 -05:00
|
|
|
json,
|
2018-04-22 19:53:18 -04:00
|
|
|
)
|
2018-10-16 05:44:49 -04:00
|
|
|
data = encode_canonical_json(json)
|
2019-02-01 07:22:57 -05:00
|
|
|
producer = QuieterFileBodyProducer(
|
2018-09-18 13:17:15 -04:00
|
|
|
BytesIO(data),
|
2018-10-16 05:44:49 -04:00
|
|
|
cooperator=self._cooperator,
|
2018-09-18 13:17:15 -04:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
producer = None
|
2019-01-17 07:40:09 -05:00
|
|
|
auth_headers = self.build_auth_headers(
|
2018-10-16 05:44:49 -04:00
|
|
|
destination_bytes, method_bytes, url_to_sign_bytes,
|
|
|
|
)
|
2018-09-18 13:17:15 -04:00
|
|
|
|
2019-01-17 07:40:09 -05:00
|
|
|
headers_dict[b"Authorization"] = auth_headers
|
|
|
|
|
2018-10-16 05:44:49 -04:00
|
|
|
logger.info(
|
2019-01-21 18:29:47 -05:00
|
|
|
"{%s} [%s] Sending request: %s %s; timeout %fs",
|
2018-10-16 05:44:49 -04:00
|
|
|
request.txn_id, request.destination, request.method,
|
2019-01-21 18:29:47 -05:00
|
|
|
url_str, _sec_timeout,
|
2018-10-16 05:44:49 -04:00
|
|
|
)
|
|
|
|
|
2019-01-08 06:04:28 -05:00
|
|
|
try:
|
|
|
|
with Measure(self.clock, "outbound_request"):
|
2019-01-18 07:07:38 -05:00
|
|
|
# we don't want all the fancy cookie and redirect handling
|
|
|
|
# that treq.request gives: just use the raw Agent.
|
|
|
|
request_deferred = self.agent.request(
|
|
|
|
method_bytes,
|
|
|
|
url_bytes,
|
|
|
|
headers=Headers(headers_dict),
|
|
|
|
bodyProducer=producer,
|
|
|
|
)
|
|
|
|
|
|
|
|
request_deferred = timeout_deferred(
|
|
|
|
request_deferred,
|
|
|
|
timeout=_sec_timeout,
|
|
|
|
reactor=self.hs.get_reactor(),
|
|
|
|
)
|
|
|
|
|
2019-01-21 18:29:47 -05:00
|
|
|
response = yield request_deferred
|
2019-01-08 06:04:28 -05:00
|
|
|
except DNSLookupError as e:
|
|
|
|
raise_from(RequestSendFailed(e, can_retry=retry_on_dns_fail), e)
|
|
|
|
except Exception as e:
|
2019-01-21 18:29:47 -05:00
|
|
|
logger.info("Failed to send request: %s", e)
|
2019-01-08 06:04:28 -05:00
|
|
|
raise_from(RequestSendFailed(e, can_retry=True), e)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"{%s} [%s] Got response headers: %d %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
|
|
|
response.phrase.decode('ascii', errors='replace'),
|
|
|
|
)
|
|
|
|
|
|
|
|
if 200 <= response.code < 300:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# :'(
|
|
|
|
# Update transactions table?
|
|
|
|
d = treq.content(response)
|
|
|
|
d = timeout_deferred(
|
|
|
|
d,
|
|
|
|
timeout=_sec_timeout,
|
|
|
|
reactor=self.hs.get_reactor(),
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
body = yield make_deferred_yieldable(d)
|
|
|
|
except Exception as e:
|
|
|
|
# Eh, we're already going to raise an exception so lets
|
|
|
|
# ignore if this fails.
|
|
|
|
logger.warn(
|
|
|
|
"{%s} [%s] Failed to get error response: %s %s: %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
|
|
|
_flatten_response_never_received(e),
|
|
|
|
)
|
|
|
|
body = None
|
|
|
|
|
|
|
|
e = HttpResponseException(
|
|
|
|
response.code, response.phrase, body
|
2018-09-14 10:11:26 -04:00
|
|
|
)
|
2018-09-12 11:22:14 -04:00
|
|
|
|
2019-01-08 06:04:28 -05:00
|
|
|
# Retry if the error is a 429 (Too Many Requests),
|
|
|
|
# otherwise just raise a standard HttpResponseException
|
|
|
|
if response.code == 429:
|
|
|
|
raise_from(RequestSendFailed(e, can_retry=True), e)
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
2018-09-12 11:22:14 -04:00
|
|
|
break
|
2019-01-08 06:04:28 -05:00
|
|
|
except RequestSendFailed as e:
|
2018-09-12 11:22:14 -04:00
|
|
|
logger.warn(
|
2018-09-18 13:17:15 -04:00
|
|
|
"{%s} [%s] Request failed: %s %s: %s",
|
|
|
|
request.txn_id,
|
2018-10-16 05:44:49 -04:00
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
2019-01-08 06:04:28 -05:00
|
|
|
_flatten_response_never_received(e.inner_exception),
|
2018-09-12 11:22:14 -04:00
|
|
|
)
|
|
|
|
|
2019-01-08 06:04:28 -05:00
|
|
|
if not e.can_retry:
|
2018-09-18 13:17:15 -04:00
|
|
|
raise
|
2018-09-12 11:22:14 -04:00
|
|
|
|
|
|
|
if retries_left and not timeout:
|
|
|
|
if long_retries:
|
|
|
|
delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
|
|
|
|
delay = min(delay, 60)
|
|
|
|
delay *= random.uniform(0.8, 1.4)
|
2015-11-17 13:26:50 -05:00
|
|
|
else:
|
2018-09-12 11:22:14 -04:00
|
|
|
delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
|
|
|
|
delay = min(delay, 2)
|
|
|
|
delay *= random.uniform(0.8, 1.4)
|
|
|
|
|
2018-09-13 10:15:51 -04:00
|
|
|
logger.debug(
|
2018-09-18 13:17:15 -04:00
|
|
|
"{%s} [%s] Waiting %ss before re-sending...",
|
|
|
|
request.txn_id,
|
2018-10-16 05:44:49 -04:00
|
|
|
request.destination,
|
2018-09-13 10:15:51 -04:00
|
|
|
delay,
|
|
|
|
)
|
|
|
|
|
2018-09-12 11:22:14 -04:00
|
|
|
yield self.clock.sleep(delay)
|
|
|
|
retries_left -= 1
|
|
|
|
else:
|
|
|
|
raise
|
2018-09-18 13:17:15 -04:00
|
|
|
|
2019-01-08 06:04:28 -05:00
|
|
|
except Exception as e:
|
|
|
|
logger.warn(
|
|
|
|
"{%s} [%s] Request failed: %s %s: %s",
|
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
request.method,
|
|
|
|
url_str,
|
|
|
|
_flatten_response_never_received(e),
|
|
|
|
)
|
|
|
|
raise
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2017-03-22 20:12:21 -04:00
|
|
|
defer.returnValue(response)
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2019-01-17 07:40:09 -05:00
|
|
|
def build_auth_headers(
|
|
|
|
self, destination, method, url_bytes, content=None, destination_is=None,
|
|
|
|
):
|
2018-06-04 07:05:58 -04:00
|
|
|
"""
|
2019-01-17 07:40:09 -05:00
|
|
|
Builds the Authorization headers for a federation request
|
2018-06-04 07:05:58 -04:00
|
|
|
Args:
|
2018-06-06 05:15:33 -04:00
|
|
|
destination (bytes|None): The desination home server of the request.
|
|
|
|
May be None if the destination is an identity server, in which case
|
|
|
|
destination_is must be non-None.
|
2018-06-05 13:09:13 -04:00
|
|
|
method (bytes): The HTTP method of the request
|
|
|
|
url_bytes (bytes): The URI path of the request
|
2018-10-16 05:44:49 -04:00
|
|
|
content (object): The body of the request
|
2018-06-05 13:10:35 -04:00
|
|
|
destination_is (bytes): As 'destination', but if the destination is an
|
|
|
|
identity server
|
2018-06-04 07:05:58 -04:00
|
|
|
|
|
|
|
Returns:
|
2019-01-17 07:40:09 -05:00
|
|
|
list[bytes]: a list of headers to be added as "Authorization:" headers
|
2018-06-04 07:05:58 -04:00
|
|
|
"""
|
2014-11-20 12:41:56 -05:00
|
|
|
request = {
|
|
|
|
"method": method,
|
|
|
|
"uri": url_bytes,
|
|
|
|
"origin": self.server_name,
|
|
|
|
}
|
|
|
|
|
2018-05-23 09:38:56 -04:00
|
|
|
if destination is not None:
|
|
|
|
request["destination"] = destination
|
|
|
|
|
|
|
|
if destination_is is not None:
|
|
|
|
request["destination_is"] = destination_is
|
|
|
|
|
2014-11-20 12:41:56 -05:00
|
|
|
if content is not None:
|
|
|
|
request["content"] = content
|
|
|
|
|
|
|
|
request = sign_json(request, self.server_name, self.signing_key)
|
|
|
|
|
|
|
|
auth_headers = []
|
|
|
|
|
|
|
|
for key, sig in request["signatures"][self.server_name].items():
|
2018-09-05 10:10:47 -04:00
|
|
|
auth_headers.append((
|
2014-11-20 12:41:56 -05:00
|
|
|
"X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
|
|
|
|
self.server_name, key, sig,
|
2018-09-05 10:10:47 -04:00
|
|
|
)).encode('ascii')
|
|
|
|
)
|
2019-01-17 07:40:09 -05:00
|
|
|
return auth_headers
|
2014-11-20 12:41:56 -05:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-04-06 06:54:09 -04:00
|
|
|
def put_json(self, destination, path, args={}, data={},
|
|
|
|
json_data_callback=None,
|
2017-03-23 07:10:36 -04:00
|
|
|
long_retries=False, timeout=None,
|
|
|
|
ignore_backoff=False,
|
2019-03-12 10:11:11 -04:00
|
|
|
backoff_on_404=False,
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400=False):
|
2014-11-20 12:41:56 -05:00
|
|
|
""" Sends the specifed json data using PUT
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
2018-04-06 06:54:09 -04:00
|
|
|
args (dict): query params
|
2014-11-20 12:41:56 -05:00
|
|
|
data (dict): A dict containing the data that will be used as
|
|
|
|
the request body. This will be encoded as JSON.
|
|
|
|
json_data_callback (callable): A callable returning the dict to
|
|
|
|
use as the request body.
|
2015-11-17 13:29:29 -05:00
|
|
|
long_retries (bool): A boolean that indicates whether we should
|
|
|
|
retry for a short or long time.
|
2016-09-12 13:17:09 -04:00
|
|
|
timeout(int): How long to try (in ms) the destination for before
|
|
|
|
giving up. None indicates no timeout.
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff (bool): true to ignore the historical backoff data
|
|
|
|
and try the request anyway.
|
2017-03-22 20:12:21 -04:00
|
|
|
backoff_on_404 (bool): True if we should count a 404 response as
|
|
|
|
a failure of the server (and should therefore back off future
|
2019-03-12 10:11:11 -04:00
|
|
|
requests).
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400 (bool): True if on a 400 M_UNRECOGNIZED
|
|
|
|
response we should try appending a trailing slash to the end
|
2019-03-20 07:27:18 -04:00
|
|
|
of the request. Workaround for #3622 in Synapse <= v0.99.3. This
|
2019-03-13 14:26:06 -04:00
|
|
|
will be attempted before backing off if backing off has been
|
|
|
|
enabled.
|
2014-11-20 12:41:56 -05:00
|
|
|
|
|
|
|
Returns:
|
2019-01-09 04:25:59 -05:00
|
|
|
Deferred[dict|list]: Succeeds when we get a 2xx HTTP response. The
|
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-11-20 12:41:56 -05:00
|
|
|
"""
|
2018-09-18 13:17:15 -04:00
|
|
|
request = MatrixFederationRequest(
|
|
|
|
method="PUT",
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-09-05 10:10:47 -04:00
|
|
|
query=args,
|
2018-09-18 13:17:15 -04:00
|
|
|
json_callback=json_data_callback,
|
|
|
|
json=data,
|
|
|
|
)
|
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
response = yield self._send_request_with_optional_trailing_slash(
|
2019-03-21 10:32:47 -04:00
|
|
|
request,
|
|
|
|
try_trailing_slash_on_400,
|
|
|
|
backoff_on_404=backoff_on_404,
|
|
|
|
ignore_backoff=ignore_backoff,
|
|
|
|
long_retries=long_retries,
|
|
|
|
timeout=timeout,
|
2019-03-20 10:00:39 -04:00
|
|
|
)
|
2019-03-12 10:11:11 -04:00
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
body = yield _handle_json_response(
|
|
|
|
self.hs.get_reactor(), self.default_timeout, request, response,
|
2019-03-13 15:35:23 -04:00
|
|
|
)
|
2019-03-12 10:11:11 -04:00
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
defer.returnValue(body)
|
2015-01-29 08:44:52 -05:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-12-28 19:58:34 -05:00
|
|
|
def post_json(self, destination, path, data={}, long_retries=False,
|
2017-07-18 11:40:21 -04:00
|
|
|
timeout=None, ignore_backoff=False, args={}):
|
2015-01-29 08:44:52 -05:00
|
|
|
""" Sends the specifed json data using POST
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
|
|
|
data (dict): A dict containing the data that will be used as
|
|
|
|
the request body. This will be encoded as JSON.
|
2015-11-20 12:15:44 -05:00
|
|
|
long_retries (bool): A boolean that indicates whether we should
|
|
|
|
retry for a short or long time.
|
2016-09-12 13:17:09 -04:00
|
|
|
timeout(int): How long to try (in ms) the destination for before
|
|
|
|
giving up. None indicates no timeout.
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff (bool): true to ignore the historical backoff data and
|
|
|
|
try the request anyway.
|
2018-04-06 06:54:09 -04:00
|
|
|
args (dict): query params
|
2015-01-29 08:44:52 -05:00
|
|
|
Returns:
|
2019-01-09 04:25:59 -05:00
|
|
|
Deferred[dict|list]: Succeeds when we get a 2xx HTTP response. The
|
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2015-01-29 08:44:52 -05:00
|
|
|
"""
|
2018-09-18 13:17:15 -04:00
|
|
|
|
|
|
|
request = MatrixFederationRequest(
|
|
|
|
method="POST",
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-09-05 10:10:47 -04:00
|
|
|
query=args,
|
|
|
|
json=data,
|
2018-09-18 13:17:15 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
response = yield self._send_request(
|
|
|
|
request,
|
2016-12-28 19:58:34 -05:00
|
|
|
long_retries=long_retries,
|
2016-09-12 13:17:09 -04:00
|
|
|
timeout=timeout,
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff=ignore_backoff,
|
2015-01-29 08:44:52 -05:00
|
|
|
)
|
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
if timeout:
|
|
|
|
_sec_timeout = timeout / 1000
|
|
|
|
else:
|
|
|
|
_sec_timeout = self.default_timeout
|
2015-01-29 08:44:52 -05:00
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
body = yield _handle_json_response(
|
|
|
|
self.hs.get_reactor(), _sec_timeout, request, response,
|
|
|
|
)
|
2018-09-05 10:10:47 -04:00
|
|
|
defer.returnValue(body)
|
2014-11-20 12:41:56 -05:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-08-06 08:45:37 -04:00
|
|
|
def get_json(self, destination, path, args=None, retry_on_dns_fail=True,
|
2019-03-12 10:11:11 -04:00
|
|
|
timeout=None, ignore_backoff=False,
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400=False):
|
2014-12-04 09:22:31 -05:00
|
|
|
""" GETs some json from the given host homeserver and path
|
2014-11-20 12:41:56 -05:00
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
2018-08-06 08:45:37 -04:00
|
|
|
args (dict|None): A dictionary used to create query strings, defaults to
|
2014-11-20 12:41:56 -05:00
|
|
|
None.
|
2015-05-22 10:18:04 -04:00
|
|
|
timeout (int): How long to try (in ms) the destination for before
|
|
|
|
giving up. None indicates no timeout and that the request will
|
|
|
|
be retried.
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff (bool): true to ignore the historical backoff data
|
|
|
|
and try the request anyway.
|
2019-03-13 14:26:06 -04:00
|
|
|
try_trailing_slash_on_400 (bool): True if on a 400 M_UNRECOGNIZED
|
|
|
|
response we should try appending a trailing slash to the end of
|
2019-03-20 07:27:18 -04:00
|
|
|
the request. Workaround for #3622 in Synapse <= v0.99.3.
|
2014-11-20 12:41:56 -05:00
|
|
|
Returns:
|
2019-01-09 04:25:59 -05:00
|
|
|
Deferred[dict|list]: Succeeds when we get a 2xx HTTP response. The
|
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-11-20 12:41:56 -05:00
|
|
|
"""
|
|
|
|
logger.debug("get_json args: %s", args)
|
|
|
|
|
|
|
|
logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail)
|
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
request = MatrixFederationRequest(
|
|
|
|
method="GET",
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-09-05 10:10:47 -04:00
|
|
|
query=args,
|
2018-09-18 13:17:15 -04:00
|
|
|
)
|
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
response = yield self._send_request_with_optional_trailing_slash(
|
2019-03-21 10:32:47 -04:00
|
|
|
request,
|
|
|
|
try_trailing_slash_on_400,
|
|
|
|
backoff_on_404=False,
|
2019-03-20 10:00:39 -04:00
|
|
|
ignore_backoff=ignore_backoff,
|
2019-03-21 10:32:47 -04:00
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
|
|
|
timeout=timeout,
|
2019-03-20 10:00:39 -04:00
|
|
|
)
|
2019-03-12 10:11:11 -04:00
|
|
|
|
2019-03-20 10:00:39 -04:00
|
|
|
body = yield _handle_json_response(
|
|
|
|
self.hs.get_reactor(), self.default_timeout, request, response,
|
2019-03-13 15:35:23 -04:00
|
|
|
)
|
2019-03-13 08:10:33 -04:00
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
defer.returnValue(body)
|
2017-07-18 11:41:44 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def delete_json(self, destination, path, long_retries=False,
|
|
|
|
timeout=None, ignore_backoff=False, args={}):
|
|
|
|
"""Send a DELETE request to the remote expecting some json response
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request
|
|
|
|
to.
|
|
|
|
path (str): The HTTP path.
|
|
|
|
long_retries (bool): A boolean that indicates whether we should
|
|
|
|
retry for a short or long time.
|
|
|
|
timeout(int): How long to try (in ms) the destination for before
|
|
|
|
giving up. None indicates no timeout.
|
|
|
|
ignore_backoff (bool): true to ignore the historical backoff data and
|
|
|
|
try the request anyway.
|
|
|
|
Returns:
|
2019-01-09 04:25:59 -05:00
|
|
|
Deferred[dict|list]: Succeeds when we get a 2xx HTTP response. The
|
|
|
|
result will be the decoded JSON body.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2017-07-18 11:41:44 -04:00
|
|
|
"""
|
2018-09-18 13:17:15 -04:00
|
|
|
request = MatrixFederationRequest(
|
|
|
|
method="DELETE",
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-09-05 10:10:47 -04:00
|
|
|
query=args,
|
2018-09-18 13:17:15 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
response = yield self._send_request(
|
|
|
|
request,
|
2017-07-18 11:41:44 -04:00
|
|
|
long_retries=long_retries,
|
|
|
|
timeout=timeout,
|
|
|
|
ignore_backoff=ignore_backoff,
|
|
|
|
)
|
|
|
|
|
2018-09-18 13:17:15 -04:00
|
|
|
body = yield _handle_json_response(
|
|
|
|
self.hs.get_reactor(), self.default_timeout, request, response,
|
|
|
|
)
|
2018-09-05 10:10:47 -04:00
|
|
|
defer.returnValue(body)
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2014-12-04 09:22:31 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_file(self, destination, path, output_stream, args={},
|
2017-03-23 07:10:36 -04:00
|
|
|
retry_on_dns_fail=True, max_size=None,
|
|
|
|
ignore_backoff=False):
|
2014-12-04 09:22:31 -05:00
|
|
|
"""GETs a file from a given homeserver
|
|
|
|
Args:
|
|
|
|
destination (str): The remote server to send the HTTP request to.
|
|
|
|
path (str): The HTTP path to GET.
|
|
|
|
output_stream (file): File to write the response body to.
|
|
|
|
args (dict): Optional dictionary used to create the query string.
|
2017-03-23 07:10:36 -04:00
|
|
|
ignore_backoff (bool): true to ignore the historical backoff data
|
|
|
|
and try the request anyway.
|
2017-03-22 20:12:21 -04:00
|
|
|
|
2019-01-09 04:25:59 -05:00
|
|
|
Returns:
|
|
|
|
Deferred[tuple[int, dict]]: Resolves with an (int,dict) tuple of
|
|
|
|
the file length and a dict of the response headers.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
HttpResponseException: If we get an HTTP response code >= 300
|
|
|
|
(except 429).
|
|
|
|
NotRetryingDestination: If we are not yet ready to retry this
|
|
|
|
server.
|
|
|
|
FederationDeniedError: If this destination is not on our
|
|
|
|
federation whitelist
|
|
|
|
RequestSendFailed: If there were problems connecting to the
|
|
|
|
remote, due to e.g. DNS failures, connection timeouts etc.
|
2014-12-04 09:22:31 -05:00
|
|
|
"""
|
2018-09-18 13:17:15 -04:00
|
|
|
request = MatrixFederationRequest(
|
|
|
|
method="GET",
|
|
|
|
destination=destination,
|
|
|
|
path=path,
|
2018-09-05 10:10:47 -04:00
|
|
|
query=args,
|
2018-09-18 13:17:15 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
response = yield self._send_request(
|
|
|
|
request,
|
2017-03-23 07:10:36 -04:00
|
|
|
retry_on_dns_fail=retry_on_dns_fail,
|
|
|
|
ignore_backoff=ignore_backoff,
|
2014-12-04 09:22:31 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
headers = dict(response.headers.getAllRawHeaders())
|
|
|
|
|
2014-12-11 09:19:32 -05:00
|
|
|
try:
|
2018-09-18 13:17:15 -04:00
|
|
|
d = _readBodyToFile(response, output_stream, max_size)
|
|
|
|
d.addTimeout(self.default_timeout, self.hs.get_reactor())
|
|
|
|
length = yield make_deferred_yieldable(d)
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn(
|
2018-09-18 17:26:08 -04:00
|
|
|
"{%s} [%s] Error reading response: %s",
|
2018-09-18 13:17:15 -04:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
e,
|
|
|
|
)
|
2014-12-11 09:19:32 -05:00
|
|
|
raise
|
2018-09-18 13:17:15 -04:00
|
|
|
logger.info(
|
2018-09-18 17:26:08 -04:00
|
|
|
"{%s} [%s] Completed: %d %s [%d bytes]",
|
2018-09-18 13:17:15 -04:00
|
|
|
request.txn_id,
|
|
|
|
request.destination,
|
|
|
|
response.code,
|
|
|
|
response.phrase.decode('ascii', errors='replace'),
|
|
|
|
length,
|
|
|
|
)
|
2014-12-04 09:22:31 -05:00
|
|
|
defer.returnValue((length, headers))
|
|
|
|
|
2014-11-20 12:41:56 -05:00
|
|
|
|
2014-12-04 09:22:31 -05:00
|
|
|
class _ReadBodyToFileProtocol(protocol.Protocol):
|
2014-12-11 09:19:32 -05:00
|
|
|
def __init__(self, stream, deferred, max_size):
|
2014-12-04 09:22:31 -05:00
|
|
|
self.stream = stream
|
|
|
|
self.deferred = deferred
|
|
|
|
self.length = 0
|
2014-12-11 09:19:32 -05:00
|
|
|
self.max_size = max_size
|
2014-12-04 09:22:31 -05:00
|
|
|
|
|
|
|
def dataReceived(self, data):
|
|
|
|
self.stream.write(data)
|
|
|
|
self.length += len(data)
|
2014-12-11 09:19:32 -05:00
|
|
|
if self.max_size is not None and self.length >= self.max_size:
|
|
|
|
self.deferred.errback(SynapseError(
|
|
|
|
502,
|
|
|
|
"Requested file is too large > %r bytes" % (self.max_size,),
|
|
|
|
Codes.TOO_LARGE,
|
|
|
|
))
|
|
|
|
self.deferred = defer.Deferred()
|
|
|
|
self.transport.loseConnection()
|
2014-12-04 09:22:31 -05:00
|
|
|
|
|
|
|
def connectionLost(self, reason):
|
|
|
|
if reason.check(ResponseDone):
|
|
|
|
self.deferred.callback(self.length)
|
|
|
|
else:
|
|
|
|
self.deferred.errback(reason)
|
|
|
|
|
|
|
|
|
2014-12-11 09:19:32 -05:00
|
|
|
def _readBodyToFile(response, stream, max_size):
|
2014-12-04 09:22:31 -05:00
|
|
|
d = defer.Deferred()
|
2014-12-11 09:19:32 -05:00
|
|
|
response.deliverBody(_ReadBodyToFileProtocol(stream, d, max_size))
|
2014-12-04 09:22:31 -05:00
|
|
|
return d
|
|
|
|
|
|
|
|
|
2015-02-18 05:50:10 -05:00
|
|
|
def _flatten_response_never_received(e):
|
|
|
|
if hasattr(e, "reasons"):
|
2017-09-28 08:44:47 -04:00
|
|
|
reasons = ", ".join(
|
2015-02-18 05:50:10 -05:00
|
|
|
_flatten_response_never_received(f.value)
|
|
|
|
for f in e.reasons
|
|
|
|
)
|
2017-09-28 08:44:47 -04:00
|
|
|
|
|
|
|
return "%s:[%s]" % (type(e).__name__, reasons)
|
2015-02-18 05:50:10 -05:00
|
|
|
else:
|
2017-09-28 08:44:47 -04:00
|
|
|
return repr(e)
|
2016-11-30 10:03:00 -05:00
|
|
|
|
|
|
|
|
|
|
|
def check_content_type_is_json(headers):
|
|
|
|
"""
|
|
|
|
Check that a set of HTTP headers have a Content-Type header, and that it
|
|
|
|
is application/json.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
headers (twisted.web.http_headers.Headers): headers to check
|
|
|
|
|
|
|
|
Raises:
|
2019-01-08 09:22:18 -05:00
|
|
|
RequestSendFailed: if the Content-Type header is missing or isn't JSON
|
2016-11-30 10:03:00 -05:00
|
|
|
|
|
|
|
"""
|
2018-04-15 15:43:35 -04:00
|
|
|
c_type = headers.getRawHeaders(b"Content-Type")
|
2016-11-30 10:03:00 -05:00
|
|
|
if c_type is None:
|
2019-01-08 09:22:18 -05:00
|
|
|
raise RequestSendFailed(RuntimeError(
|
2016-11-30 10:03:00 -05:00
|
|
|
"No Content-Type header"
|
2019-01-08 09:22:18 -05:00
|
|
|
), can_retry=False)
|
2016-11-30 10:03:00 -05:00
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
c_type = c_type[0].decode('ascii') # only the first header
|
2016-11-30 10:03:00 -05:00
|
|
|
val, options = cgi.parse_header(c_type)
|
|
|
|
if val != "application/json":
|
2019-01-08 09:22:18 -05:00
|
|
|
raise RequestSendFailed(RuntimeError(
|
2016-11-30 10:03:00 -05:00
|
|
|
"Content-Type not application/json: was '%s'" % c_type
|
2019-01-08 09:22:18 -05:00
|
|
|
), can_retry=False)
|
2017-07-18 11:40:21 -04:00
|
|
|
|
|
|
|
|
|
|
|
def encode_query_args(args):
|
2018-08-06 08:45:37 -04:00
|
|
|
if args is None:
|
|
|
|
return b""
|
|
|
|
|
2017-07-18 11:40:21 -04:00
|
|
|
encoded_args = {}
|
|
|
|
for k, vs in args.items():
|
2018-04-15 15:43:35 -04:00
|
|
|
if isinstance(vs, string_types):
|
2017-07-18 11:40:21 -04:00
|
|
|
vs = [vs]
|
|
|
|
encoded_args[k] = [v.encode("UTF-8") for v in vs]
|
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
query_bytes = urllib.parse.urlencode(encoded_args, True)
|
2017-07-18 11:40:21 -04:00
|
|
|
|
2018-09-05 10:10:47 -04:00
|
|
|
return query_bytes.encode('utf8')
|