2015-02-03 14:44:16 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 04:26:29 +00:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-02-03 14:44:16 +00:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 16:09:20 +10:00
|
|
|
import logging
|
2018-09-06 00:10:47 +10:00
|
|
|
|
|
|
|
from six.moves import urllib
|
2018-07-09 16:09:20 +10:00
|
|
|
|
|
|
|
from prometheus_client import Counter
|
|
|
|
|
2015-02-04 16:44:53 +00:00
|
|
|
from twisted.internet import defer
|
2015-02-03 14:44:16 +00:00
|
|
|
|
2016-08-25 18:34:47 +01:00
|
|
|
from synapse.api.constants import ThirdPartyEntityKind
|
2015-02-04 17:32:44 +00:00
|
|
|
from synapse.api.errors import CodeMessageException
|
2015-02-05 13:42:35 +00:00
|
|
|
from synapse.events.utils import serialize_event
|
2018-07-09 16:09:20 +10:00
|
|
|
from synapse.http.client import SimpleHttpClient
|
2016-12-06 10:43:48 +00:00
|
|
|
from synapse.types import ThirdPartyInstanceID
|
2018-07-09 16:09:20 +10:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2018-06-05 13:17:55 +01:00
|
|
|
|
2015-02-04 16:44:53 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-06-05 13:17:55 +01:00
|
|
|
sent_transactions_counter = Counter(
|
|
|
|
"synapse_appservice_api_sent_transactions",
|
|
|
|
"Number of /transactions/ requests sent",
|
2019-06-20 19:32:02 +10:00
|
|
|
["service"],
|
2018-06-05 13:17:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
failed_transactions_counter = Counter(
|
|
|
|
"synapse_appservice_api_failed_transactions",
|
|
|
|
"Number of /transactions/ requests that failed to send",
|
2019-06-20 19:32:02 +10:00
|
|
|
["service"],
|
2018-06-05 13:17:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
sent_events_counter = Counter(
|
2019-06-20 19:32:02 +10:00
|
|
|
"synapse_appservice_api_sent_events", "Number of events sent to the AS", ["service"]
|
2018-06-05 13:17:55 +01:00
|
|
|
)
|
2015-02-04 16:44:53 +00:00
|
|
|
|
2016-08-25 15:56:27 +01:00
|
|
|
HOUR_IN_MS = 60 * 60 * 1000
|
|
|
|
|
|
|
|
|
2016-08-25 18:06:29 +01:00
|
|
|
APP_SERVICE_PREFIX = "/_matrix/app/unstable"
|
|
|
|
|
|
|
|
|
2016-09-09 15:09:46 +01:00
|
|
|
def _is_valid_3pe_metadata(info):
|
|
|
|
if "instances" not in info:
|
|
|
|
return False
|
|
|
|
if not isinstance(info["instances"], list):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2016-08-18 17:33:56 +01:00
|
|
|
def _is_valid_3pe_result(r, field):
|
|
|
|
if not isinstance(r, dict):
|
|
|
|
return False
|
|
|
|
|
|
|
|
for k in (field, "protocol"):
|
|
|
|
if k not in r:
|
|
|
|
return False
|
|
|
|
if not isinstance(r[k], str):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if "fields" not in r:
|
|
|
|
return False
|
|
|
|
fields = r["fields"]
|
|
|
|
if not isinstance(fields, dict):
|
|
|
|
return False
|
|
|
|
for k in fields.keys():
|
|
|
|
if not isinstance(fields[k], str):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2015-02-04 16:44:53 +00:00
|
|
|
class ApplicationServiceApi(SimpleHttpClient):
|
2015-02-04 11:19:18 +00:00
|
|
|
"""This class manages HS -> AS communications, including querying and
|
|
|
|
pushing.
|
|
|
|
"""
|
|
|
|
|
2016-02-02 17:18:50 +00:00
|
|
|
def __init__(self, hs):
|
2015-02-04 16:44:53 +00:00
|
|
|
super(ApplicationServiceApi, self).__init__(hs)
|
2015-02-05 13:42:35 +00:00
|
|
|
self.clock = hs.get_clock()
|
2015-02-04 11:19:18 +00:00
|
|
|
|
2019-06-20 19:32:02 +10:00
|
|
|
self.protocol_meta_cache = ResponseCache(
|
|
|
|
hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
|
|
|
)
|
2016-08-25 15:56:27 +01:00
|
|
|
|
2015-02-04 16:44:53 +00:00
|
|
|
@defer.inlineCallbacks
|
2015-02-04 11:19:18 +00:00
|
|
|
def query_user(self, service, user_id):
|
2016-08-30 17:16:00 +01:00
|
|
|
if service.url is None:
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2018-09-06 00:10:47 +10:00
|
|
|
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
2015-02-04 16:44:53 +00:00
|
|
|
response = None
|
|
|
|
try:
|
2019-06-20 19:32:02 +10:00
|
|
|
response = yield self.get_json(uri, {"access_token": service.hs_token})
|
2015-02-09 15:01:28 +00:00
|
|
|
if response is not None: # just an empty json object
|
2019-07-23 23:00:55 +10:00
|
|
|
return True
|
2015-02-04 17:32:44 +00:00
|
|
|
except CodeMessageException as e:
|
|
|
|
if e.code == 404:
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2015-02-04 17:32:44 +00:00
|
|
|
logger.warning("query_user to %s received %s", uri, e.code)
|
2015-02-05 13:19:46 +00:00
|
|
|
except Exception as ex:
|
|
|
|
logger.warning("query_user to %s threw exception %s", uri, ex)
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2015-02-04 11:19:18 +00:00
|
|
|
|
2015-02-04 16:44:53 +00:00
|
|
|
@defer.inlineCallbacks
|
2015-02-04 11:19:18 +00:00
|
|
|
def query_alias(self, service, alias):
|
2016-08-30 17:16:00 +01:00
|
|
|
if service.url is None:
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2018-09-06 00:10:47 +10:00
|
|
|
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
2015-02-04 16:44:53 +00:00
|
|
|
response = None
|
|
|
|
try:
|
2019-06-20 19:32:02 +10:00
|
|
|
response = yield self.get_json(uri, {"access_token": service.hs_token})
|
2015-02-09 15:01:28 +00:00
|
|
|
if response is not None: # just an empty json object
|
2019-07-23 23:00:55 +10:00
|
|
|
return True
|
2015-02-04 17:32:44 +00:00
|
|
|
except CodeMessageException as e:
|
2015-02-09 15:01:28 +00:00
|
|
|
logger.warning("query_alias to %s received %s", uri, e.code)
|
2015-02-04 17:32:44 +00:00
|
|
|
if e.code == 404:
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2015-02-05 13:19:46 +00:00
|
|
|
except Exception as ex:
|
|
|
|
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2015-02-05 13:19:46 +00:00
|
|
|
|
2016-08-18 00:39:09 +01:00
|
|
|
@defer.inlineCallbacks
|
2016-08-18 17:19:55 +01:00
|
|
|
def query_3pe(self, service, kind, protocol, fields):
|
|
|
|
if kind == ThirdPartyEntityKind.USER:
|
2016-08-18 17:33:56 +01:00
|
|
|
required_field = "userid"
|
2016-08-18 17:19:55 +01:00
|
|
|
elif kind == ThirdPartyEntityKind.LOCATION:
|
2016-08-18 17:33:56 +01:00
|
|
|
required_field = "alias"
|
2016-08-18 17:19:55 +01:00
|
|
|
else:
|
2019-06-20 19:32:02 +10:00
|
|
|
raise ValueError("Unrecognised 'kind' argument %r to query_3pe()", kind)
|
2016-08-30 17:16:00 +01:00
|
|
|
if service.url is None:
|
2019-07-23 23:00:55 +10:00
|
|
|
return []
|
2016-08-17 13:15:06 +01:00
|
|
|
|
2016-08-25 18:06:29 +01:00
|
|
|
uri = "%s%s/thirdparty/%s/%s" % (
|
|
|
|
service.url,
|
|
|
|
APP_SERVICE_PREFIX,
|
2016-08-25 18:35:38 +01:00
|
|
|
kind,
|
2019-06-20 19:32:02 +10:00
|
|
|
urllib.parse.quote(protocol),
|
2016-08-25 18:06:29 +01:00
|
|
|
)
|
2016-08-18 16:09:50 +01:00
|
|
|
try:
|
|
|
|
response = yield self.get_json(uri, fields)
|
2016-08-18 17:33:56 +01:00
|
|
|
if not isinstance(response, list):
|
|
|
|
logger.warning(
|
2019-06-20 19:32:02 +10:00
|
|
|
"query_3pe to %s returned an invalid response %r", uri, response
|
2016-08-18 17:33:56 +01:00
|
|
|
)
|
2019-07-23 23:00:55 +10:00
|
|
|
return []
|
2016-08-18 17:33:56 +01:00
|
|
|
|
|
|
|
ret = []
|
|
|
|
for r in response:
|
|
|
|
if _is_valid_3pe_result(r, field=required_field):
|
|
|
|
ret.append(r)
|
|
|
|
else:
|
|
|
|
logger.warning(
|
2019-06-20 19:32:02 +10:00
|
|
|
"query_3pe to %s returned an invalid result %r", uri, r
|
2016-08-18 17:33:56 +01:00
|
|
|
)
|
|
|
|
|
2019-07-23 23:00:55 +10:00
|
|
|
return ret
|
2016-08-18 16:09:50 +01:00
|
|
|
except Exception as ex:
|
2016-08-18 17:19:55 +01:00
|
|
|
logger.warning("query_3pe to %s threw exception %s", uri, ex)
|
2019-07-23 23:00:55 +10:00
|
|
|
return []
|
2016-08-18 16:09:50 +01:00
|
|
|
|
2016-08-25 15:10:06 +01:00
|
|
|
def get_3pe_protocol(self, service, protocol):
|
2016-08-30 17:16:00 +01:00
|
|
|
if service.url is None:
|
2019-07-23 23:00:55 +10:00
|
|
|
return {}
|
2016-08-30 16:30:12 +01:00
|
|
|
|
2016-08-25 15:56:27 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _get():
|
2016-08-25 18:06:29 +01:00
|
|
|
uri = "%s%s/thirdparty/protocol/%s" % (
|
|
|
|
service.url,
|
|
|
|
APP_SERVICE_PREFIX,
|
2019-06-20 19:32:02 +10:00
|
|
|
urllib.parse.quote(protocol),
|
2016-08-25 18:06:29 +01:00
|
|
|
)
|
2016-08-25 15:56:27 +01:00
|
|
|
try:
|
2016-09-09 15:07:04 +01:00
|
|
|
info = yield self.get_json(uri, {})
|
|
|
|
|
2016-09-09 15:09:46 +01:00
|
|
|
if not _is_valid_3pe_metadata(info):
|
2019-06-20 19:32:02 +10:00
|
|
|
logger.warning(
|
2019-11-21 12:00:14 +00:00
|
|
|
"query_3pe_protocol to %s did not return a valid result", uri
|
2019-06-20 19:32:02 +10:00
|
|
|
)
|
2019-07-23 23:00:55 +10:00
|
|
|
return None
|
2016-09-09 15:07:04 +01:00
|
|
|
|
2016-12-06 10:43:48 +00:00
|
|
|
for instance in info.get("instances", []):
|
|
|
|
network_id = instance.get("network_id", None)
|
|
|
|
if network_id is not None:
|
2016-12-12 14:46:13 +00:00
|
|
|
instance["instance_id"] = ThirdPartyInstanceID(
|
2019-06-20 19:32:02 +10:00
|
|
|
service.id, network_id
|
2016-12-06 10:43:48 +00:00
|
|
|
).to_string()
|
|
|
|
|
2019-07-23 23:00:55 +10:00
|
|
|
return info
|
2016-08-25 15:56:27 +01:00
|
|
|
except Exception as ex:
|
2019-06-20 19:32:02 +10:00
|
|
|
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
2019-07-23 23:00:55 +10:00
|
|
|
return None
|
2016-08-25 15:56:27 +01:00
|
|
|
|
|
|
|
key = (service.id, protocol)
|
2018-04-12 12:08:59 +01:00
|
|
|
return self.protocol_meta_cache.wrap(key, _get)
|
2016-08-25 15:10:06 +01:00
|
|
|
|
2015-02-05 09:43:22 +00:00
|
|
|
@defer.inlineCallbacks
|
2015-03-05 15:40:07 +00:00
|
|
|
def push_bulk(self, service, events, txn_id=None):
|
2016-08-30 17:16:00 +01:00
|
|
|
if service.url is None:
|
2019-07-23 23:00:55 +10:00
|
|
|
return True
|
2016-08-30 16:21:16 +01:00
|
|
|
|
2015-02-05 13:42:35 +00:00
|
|
|
events = self._serialize(events)
|
|
|
|
|
2015-03-05 15:40:07 +00:00
|
|
|
if txn_id is None:
|
2019-06-20 19:32:02 +10:00
|
|
|
logger.warning(
|
|
|
|
"push_bulk: Missing txn ID sending events to %s", service.url
|
|
|
|
)
|
2015-03-05 15:40:07 +00:00
|
|
|
txn_id = str(0)
|
2015-03-09 17:45:41 +00:00
|
|
|
txn_id = str(txn_id)
|
2015-03-05 15:40:07 +00:00
|
|
|
|
2019-06-20 19:32:02 +10:00
|
|
|
uri = service.url + ("/transactions/%s" % urllib.parse.quote(txn_id))
|
2015-02-05 09:43:22 +00:00
|
|
|
try:
|
2015-03-10 10:04:20 +00:00
|
|
|
yield self.put_json(
|
2015-02-11 16:41:16 +00:00
|
|
|
uri=uri,
|
2019-06-20 19:32:02 +10:00
|
|
|
json_body={"events": events},
|
|
|
|
args={"access_token": service.hs_token},
|
|
|
|
)
|
2018-06-05 17:30:45 +01:00
|
|
|
sent_transactions_counter.labels(service.id).inc()
|
|
|
|
sent_events_counter.labels(service.id).inc(len(events))
|
2019-07-23 23:00:55 +10:00
|
|
|
return True
|
2015-02-05 09:43:22 +00:00
|
|
|
except CodeMessageException as e:
|
|
|
|
logger.warning("push_bulk to %s received %s", uri, e.code)
|
2015-02-05 13:19:46 +00:00
|
|
|
except Exception as ex:
|
|
|
|
logger.warning("push_bulk to %s threw exception %s", uri, ex)
|
2018-06-05 17:30:45 +01:00
|
|
|
failed_transactions_counter.labels(service.id).inc()
|
2019-07-23 23:00:55 +10:00
|
|
|
return False
|
2015-02-04 11:19:18 +00:00
|
|
|
|
2015-02-05 13:42:35 +00:00
|
|
|
def _serialize(self, events):
|
|
|
|
time_now = self.clock.time_msec()
|
2019-06-20 19:32:02 +10:00
|
|
|
return [serialize_event(e, time_now, as_client_event=True) for e in events]
|