2015-01-27 10:50:28 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2015 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-01-28 06:59:38 -05:00
|
|
|
import logging
|
2015-02-27 05:44:32 -05:00
|
|
|
from simplejson import JSONDecodeError
|
2015-03-09 09:10:31 -04:00
|
|
|
import simplejson as json
|
2015-01-28 06:59:38 -05:00
|
|
|
from twisted.internet import defer
|
2015-01-27 10:50:28 -05:00
|
|
|
|
2015-03-02 06:20:51 -05:00
|
|
|
from synapse.api.constants import Membership
|
2015-02-02 12:39:41 -05:00
|
|
|
from synapse.api.errors import StoreError
|
2015-03-09 09:54:20 -04:00
|
|
|
from synapse.appservice import ApplicationService, AppServiceTransaction
|
2015-02-25 10:00:59 -05:00
|
|
|
from synapse.storage.roommember import RoomsForUser
|
2015-01-27 10:50:28 -05:00
|
|
|
from ._base import SQLBaseStore
|
|
|
|
|
|
|
|
|
2015-01-28 06:59:38 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-02-03 06:26:33 -05:00
|
|
|
|
2015-02-27 05:44:32 -05:00
|
|
|
def log_failure(failure):
|
|
|
|
logger.error("Failed to detect application services: %s", failure.value)
|
|
|
|
logger.error(failure.getTraceback())
|
|
|
|
|
|
|
|
|
2015-01-27 10:50:28 -05:00
|
|
|
class ApplicationServiceStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceStore, self).__init__(hs)
|
2015-02-23 09:38:44 -05:00
|
|
|
self.services_cache = []
|
2015-02-04 10:21:03 -05:00
|
|
|
self.cache_defer = self._populate_cache()
|
2015-02-27 05:44:32 -05:00
|
|
|
self.cache_defer.addErrback(log_failure)
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-02-02 12:42:49 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-01-27 11:53:59 -05:00
|
|
|
def unregister_app_service(self, token):
|
|
|
|
"""Unregisters this service.
|
|
|
|
|
|
|
|
This removes all AS specific regex and the base URL. The token is the
|
|
|
|
only thing preserved for future registration attempts.
|
|
|
|
"""
|
2015-02-04 10:21:03 -05:00
|
|
|
yield self.cache_defer # make sure the cache is ready
|
2015-02-02 12:39:41 -05:00
|
|
|
yield self.runInteraction(
|
|
|
|
"unregister_app_service",
|
|
|
|
self._unregister_app_service_txn,
|
|
|
|
token,
|
|
|
|
)
|
|
|
|
# update cache TODO: Should this be in the txn?
|
2015-02-23 09:38:44 -05:00
|
|
|
for service in self.services_cache:
|
2015-02-02 12:39:41 -05:00
|
|
|
if service.token == token:
|
|
|
|
service.url = None
|
|
|
|
service.namespaces = None
|
2015-02-05 05:08:12 -05:00
|
|
|
service.hs_token = None
|
2015-02-02 12:39:41 -05:00
|
|
|
|
|
|
|
def _unregister_app_service_txn(self, txn, token):
|
|
|
|
# kill the url to prevent pushes
|
|
|
|
txn.execute(
|
|
|
|
"UPDATE application_services SET url=NULL WHERE token=?",
|
|
|
|
(token,)
|
|
|
|
)
|
|
|
|
|
|
|
|
# cleanup regex
|
|
|
|
as_id = self._get_as_id_txn(txn, token)
|
|
|
|
if not as_id:
|
|
|
|
logger.warning(
|
|
|
|
"unregister_app_service_txn: Failed to find as_id for token=",
|
|
|
|
token
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
"DELETE FROM application_services_regex WHERE as_id=?",
|
|
|
|
(as_id,)
|
|
|
|
)
|
|
|
|
return True
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-02-02 12:42:49 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-01-27 11:53:59 -05:00
|
|
|
def update_app_service(self, service):
|
|
|
|
"""Update an application service, clobbering what was previously there.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The updated service.
|
|
|
|
"""
|
2015-02-04 10:21:03 -05:00
|
|
|
yield self.cache_defer # make sure the cache is ready
|
|
|
|
|
2015-01-27 11:53:59 -05:00
|
|
|
# NB: There is no "insert" since we provide no public-facing API to
|
|
|
|
# allocate new ASes. It relies on the server admin inserting the AS
|
|
|
|
# token into the database manually.
|
2015-02-04 10:21:03 -05:00
|
|
|
|
2015-02-02 12:39:41 -05:00
|
|
|
if not service.token or not service.url:
|
|
|
|
raise StoreError(400, "Token and url must be specified.")
|
|
|
|
|
2015-02-05 05:08:12 -05:00
|
|
|
if not service.hs_token:
|
|
|
|
raise StoreError(500, "No HS token")
|
|
|
|
|
2015-02-02 12:39:41 -05:00
|
|
|
yield self.runInteraction(
|
|
|
|
"update_app_service",
|
|
|
|
self._update_app_service_txn,
|
|
|
|
service
|
|
|
|
)
|
|
|
|
|
|
|
|
# update cache TODO: Should this be in the txn?
|
2015-02-23 09:38:44 -05:00
|
|
|
for (index, cache_service) in enumerate(self.services_cache):
|
2015-02-02 12:39:41 -05:00
|
|
|
if service.token == cache_service.token:
|
2015-02-23 09:38:44 -05:00
|
|
|
self.services_cache[index] = service
|
2015-02-02 12:39:41 -05:00
|
|
|
logger.info("Updated: %s", service)
|
|
|
|
return
|
|
|
|
# new entry
|
2015-02-23 09:38:44 -05:00
|
|
|
self.services_cache.append(service)
|
2015-02-02 12:39:41 -05:00
|
|
|
logger.info("Updated(new): %s", service)
|
|
|
|
|
|
|
|
def _update_app_service_txn(self, txn, service):
|
|
|
|
as_id = self._get_as_id_txn(txn, service.token)
|
|
|
|
if not as_id:
|
|
|
|
logger.warning(
|
|
|
|
"update_app_service_txn: Failed to find as_id for token=",
|
|
|
|
service.token
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
|
|
|
|
txn.execute(
|
2015-02-09 07:03:37 -05:00
|
|
|
"UPDATE application_services SET url=?, hs_token=?, sender=? "
|
|
|
|
"WHERE id=?",
|
|
|
|
(service.url, service.hs_token, service.sender, as_id,)
|
2015-02-02 12:39:41 -05:00
|
|
|
)
|
|
|
|
# cleanup regex
|
|
|
|
txn.execute(
|
2015-02-03 06:37:52 -05:00
|
|
|
"DELETE FROM application_services_regex WHERE as_id=?",
|
2015-02-02 12:39:41 -05:00
|
|
|
(as_id,)
|
|
|
|
)
|
2015-02-03 08:17:28 -05:00
|
|
|
for (ns_int, ns_str) in enumerate(ApplicationService.NS_LIST):
|
2015-02-02 12:39:41 -05:00
|
|
|
if ns_str in service.namespaces:
|
2015-02-27 05:44:32 -05:00
|
|
|
for regex_obj in service.namespaces[ns_str]:
|
2015-02-02 12:39:41 -05:00
|
|
|
txn.execute(
|
|
|
|
"INSERT INTO application_services_regex("
|
|
|
|
"as_id, namespace, regex) values(?,?,?)",
|
2015-03-09 09:10:31 -04:00
|
|
|
(as_id, ns_int, json.dumps(regex_obj))
|
2015-02-02 12:39:41 -05:00
|
|
|
)
|
|
|
|
return True
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-02-02 12:39:41 -05:00
|
|
|
def _get_as_id_txn(self, txn, token):
|
|
|
|
cursor = txn.execute(
|
|
|
|
"SELECT id FROM application_services WHERE token=?",
|
|
|
|
(token,)
|
|
|
|
)
|
|
|
|
res = cursor.fetchone()
|
|
|
|
if res:
|
|
|
|
return res[0]
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-02-04 10:21:03 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-02-03 06:26:33 -05:00
|
|
|
def get_app_services(self):
|
2015-02-04 10:21:03 -05:00
|
|
|
yield self.cache_defer # make sure the cache is ready
|
2015-02-23 09:38:44 -05:00
|
|
|
defer.returnValue(self.services_cache)
|
2015-01-27 10:50:28 -05:00
|
|
|
|
2015-02-25 10:00:59 -05:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_app_service_by_user_id(self, user_id):
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Retrieve an application service from their user ID.
|
|
|
|
|
|
|
|
All application services have associated with them a particular user ID.
|
|
|
|
There is no distinguishing feature on the user ID which indicates it
|
|
|
|
represents an application service. This function allows you to map from
|
|
|
|
a user ID to an application service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user ID to see if it is an application service.
|
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
|
|
|
"""
|
|
|
|
|
2015-02-25 10:00:59 -05:00
|
|
|
yield self.cache_defer # make sure the cache is ready
|
|
|
|
|
|
|
|
for service in self.services_cache:
|
|
|
|
if service.sender == user_id:
|
|
|
|
defer.returnValue(service)
|
|
|
|
return
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
2015-02-04 10:21:03 -05:00
|
|
|
@defer.inlineCallbacks
|
2015-02-03 06:26:33 -05:00
|
|
|
def get_app_service_by_token(self, token, from_cache=True):
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Get the application service with the given appservice token.
|
2015-01-27 10:50:28 -05:00
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str): The application service token.
|
2015-01-27 11:53:59 -05:00
|
|
|
from_cache (bool): True to get this service from the cache, False to
|
|
|
|
check the database.
|
2015-01-27 10:50:28 -05:00
|
|
|
Raises:
|
2015-01-27 11:53:59 -05:00
|
|
|
StoreError if there was a problem retrieving this service.
|
2015-01-27 10:50:28 -05:00
|
|
|
"""
|
2015-02-04 10:21:03 -05:00
|
|
|
yield self.cache_defer # make sure the cache is ready
|
2015-01-27 11:53:59 -05:00
|
|
|
|
|
|
|
if from_cache:
|
2015-02-23 09:38:44 -05:00
|
|
|
for service in self.services_cache:
|
2015-01-27 12:15:06 -05:00
|
|
|
if service.token == token:
|
2015-02-04 10:21:03 -05:00
|
|
|
defer.returnValue(service)
|
|
|
|
return
|
|
|
|
defer.returnValue(None)
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-01-28 06:59:38 -05:00
|
|
|
# TODO: The from_cache=False impl
|
2015-01-27 11:53:59 -05:00
|
|
|
# TODO: This should be JOINed with the application_services_regex table.
|
|
|
|
|
2015-02-25 10:00:59 -05:00
|
|
|
def get_app_service_rooms(self, service):
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Get a list of RoomsForUser for this application service.
|
|
|
|
|
|
|
|
Application services may be "interested" in lots of rooms depending on
|
|
|
|
the room ID, the room aliases, or the members in the room. This function
|
|
|
|
takes all of these into account and returns a list of RoomsForUser which
|
|
|
|
represent the entire list of room IDs that this application service
|
|
|
|
wants to know about.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to get a room list for.
|
|
|
|
Returns:
|
|
|
|
A list of RoomsForUser.
|
|
|
|
"""
|
2015-03-02 06:20:51 -05:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_app_service_rooms",
|
|
|
|
self._get_app_service_rooms_txn,
|
|
|
|
service,
|
|
|
|
)
|
2015-02-25 12:15:25 -05:00
|
|
|
|
2015-03-02 06:20:51 -05:00
|
|
|
def _get_app_service_rooms_txn(self, txn, service):
|
2015-02-25 12:15:25 -05:00
|
|
|
# get all rooms matching the room ID regex.
|
2015-03-02 06:20:51 -05:00
|
|
|
room_entries = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="rooms", keyvalues=None, retcols=["room_id"]
|
|
|
|
)
|
2015-03-02 04:53:00 -05:00
|
|
|
matching_room_list = set([
|
2015-03-02 05:16:24 -05:00
|
|
|
r["room_id"] for r in room_entries if
|
|
|
|
service.is_interested_in_room(r["room_id"])
|
2015-03-02 04:53:00 -05:00
|
|
|
])
|
2015-02-25 12:15:25 -05:00
|
|
|
|
|
|
|
# resolve room IDs for matching room alias regex.
|
2015-03-02 06:20:51 -05:00
|
|
|
room_alias_mappings = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="room_aliases", keyvalues=None,
|
|
|
|
retcols=["room_id", "room_alias"]
|
|
|
|
)
|
2015-03-02 04:53:00 -05:00
|
|
|
matching_room_list |= set([
|
2015-03-02 06:20:51 -05:00
|
|
|
r["room_id"] for r in room_alias_mappings if
|
|
|
|
service.is_interested_in_alias(r["room_alias"])
|
2015-03-02 04:53:00 -05:00
|
|
|
])
|
2015-02-25 12:15:25 -05:00
|
|
|
|
2015-02-26 09:35:28 -05:00
|
|
|
# get all rooms for every user for this AS. This is scoped to users on
|
|
|
|
# this HS only.
|
2015-03-02 06:20:51 -05:00
|
|
|
user_list = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="users", keyvalues=None, retcols=["name"]
|
|
|
|
)
|
2015-02-26 09:35:28 -05:00
|
|
|
user_list = [
|
|
|
|
u["name"] for u in user_list if
|
|
|
|
service.is_interested_in_user(u["name"])
|
|
|
|
]
|
2015-03-02 04:53:00 -05:00
|
|
|
rooms_for_user_matching_user_id = set() # RoomsForUser list
|
2015-02-26 09:35:28 -05:00
|
|
|
for user_id in user_list:
|
2015-03-02 06:20:51 -05:00
|
|
|
# FIXME: This assumes this store is linked with RoomMemberStore :(
|
|
|
|
rooms_for_user = self._get_rooms_for_user_where_membership_is_txn(
|
|
|
|
txn=txn,
|
|
|
|
user_id=user_id,
|
|
|
|
membership_list=[Membership.JOIN]
|
|
|
|
)
|
2015-03-02 04:53:00 -05:00
|
|
|
rooms_for_user_matching_user_id |= set(rooms_for_user)
|
2015-02-26 09:35:28 -05:00
|
|
|
|
|
|
|
# make RoomsForUser tuples for room ids and aliases which are not in the
|
|
|
|
# main rooms_for_user_list - e.g. they are rooms which do not have AS
|
|
|
|
# registered users in it.
|
|
|
|
known_room_ids = [r.room_id for r in rooms_for_user_matching_user_id]
|
|
|
|
missing_rooms_for_user = [
|
|
|
|
RoomsForUser(r, service.sender, "join") for r in
|
2015-03-02 04:53:00 -05:00
|
|
|
matching_room_list if r not in known_room_ids
|
2015-02-26 09:35:28 -05:00
|
|
|
]
|
|
|
|
rooms_for_user_matching_user_id |= set(missing_rooms_for_user)
|
2015-02-25 12:15:25 -05:00
|
|
|
|
2015-03-02 06:20:51 -05:00
|
|
|
return rooms_for_user_matching_user_id
|
2015-02-25 10:00:59 -05:00
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
def _parse_services_dict(self, results):
|
2015-01-28 06:59:38 -05:00
|
|
|
# SQL results in the form:
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# 'regex': "something",
|
|
|
|
# 'url': "something",
|
|
|
|
# 'namespace': enum,
|
|
|
|
# 'as_id': 0,
|
|
|
|
# 'token': "something",
|
2015-02-05 05:08:12 -05:00
|
|
|
# 'hs_token': "otherthing",
|
2015-01-28 06:59:38 -05:00
|
|
|
# 'id': 0
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
services = {}
|
|
|
|
for res in results:
|
|
|
|
as_token = res["token"]
|
2015-03-10 06:04:20 -04:00
|
|
|
if as_token is None:
|
|
|
|
continue
|
2015-01-28 06:59:38 -05:00
|
|
|
if as_token not in services:
|
|
|
|
# add the service
|
|
|
|
services[as_token] = {
|
2015-03-09 09:10:31 -04:00
|
|
|
"id": res["id"],
|
2015-01-28 06:59:38 -05:00
|
|
|
"url": res["url"],
|
|
|
|
"token": as_token,
|
2015-02-05 05:08:12 -05:00
|
|
|
"hs_token": res["hs_token"],
|
2015-02-09 09:14:15 -05:00
|
|
|
"sender": res["sender"],
|
2015-01-28 06:59:38 -05:00
|
|
|
"namespaces": {
|
2015-02-03 08:17:28 -05:00
|
|
|
ApplicationService.NS_USERS: [],
|
|
|
|
ApplicationService.NS_ALIASES: [],
|
|
|
|
ApplicationService.NS_ROOMS: []
|
2015-01-28 06:59:38 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
# add the namespace regex if one exists
|
|
|
|
ns_int = res["namespace"]
|
|
|
|
if ns_int is None:
|
|
|
|
continue
|
|
|
|
try:
|
2015-02-03 08:17:28 -05:00
|
|
|
services[as_token]["namespaces"][
|
|
|
|
ApplicationService.NS_LIST[ns_int]].append(
|
2015-03-09 09:10:31 -04:00
|
|
|
json.loads(res["regex"])
|
2015-01-28 06:59:38 -05:00
|
|
|
)
|
|
|
|
except IndexError:
|
|
|
|
logger.error("Bad namespace enum '%s'. %s", ns_int, res)
|
2015-02-27 05:44:32 -05:00
|
|
|
except JSONDecodeError:
|
|
|
|
logger.error("Bad regex object '%s'", res["regex"])
|
2015-01-28 06:59:38 -05:00
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
service_list = []
|
2015-01-28 06:59:38 -05:00
|
|
|
for service in services.values():
|
2015-03-09 09:10:31 -04:00
|
|
|
service_list.append(ApplicationService(
|
2015-02-05 05:08:12 -05:00
|
|
|
token=service["token"],
|
|
|
|
url=service["url"],
|
|
|
|
namespaces=service["namespaces"],
|
2015-02-09 09:14:15 -05:00
|
|
|
hs_token=service["hs_token"],
|
2015-03-06 12:27:55 -05:00
|
|
|
sender=service["sender"],
|
|
|
|
id=service["id"]
|
2015-01-28 06:59:38 -05:00
|
|
|
))
|
2015-03-09 09:10:31 -04:00
|
|
|
return service_list
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _populate_cache(self):
|
|
|
|
"""Populates the ApplicationServiceCache from the database."""
|
2015-03-09 13:45:41 -04:00
|
|
|
sql = ("SELECT r.*, a.* FROM application_services AS a LEFT JOIN "
|
|
|
|
"application_services_regex AS r ON a.id = r.as_id")
|
2015-03-09 09:10:31 -04:00
|
|
|
|
2015-03-16 06:09:15 -04:00
|
|
|
results = yield self._execute_and_decode("appservice_cache", sql)
|
2015-03-09 09:10:31 -04:00
|
|
|
services = self._parse_services_dict(results)
|
|
|
|
|
|
|
|
for service in services:
|
|
|
|
logger.info("Found application service: %s", service)
|
|
|
|
self.services_cache.append(service)
|
2015-03-06 09:53:35 -05:00
|
|
|
|
|
|
|
|
|
|
|
class ApplicationServiceTransactionStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceTransactionStore, self).__init__(hs)
|
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 10:12:24 -05:00
|
|
|
def get_appservices_by_state(self, state):
|
|
|
|
"""Get a list of application services based on their state.
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2015-03-06 10:12:24 -05:00
|
|
|
Args:
|
|
|
|
state(ApplicationServiceState): The state to filter on.
|
2015-03-06 09:53:35 -05:00
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to a list of ApplicationServices, which
|
|
|
|
may be empty.
|
|
|
|
"""
|
2015-03-09 09:10:31 -04:00
|
|
|
sql = (
|
2015-03-16 06:16:59 -04:00
|
|
|
"SELECT r.*, a.* FROM application_services_state AS s LEFT JOIN"
|
|
|
|
" application_services AS a ON a.id=s.as_id LEFT JOIN"
|
|
|
|
" application_services_regex AS r ON r.as_id=a.id WHERE state = ?"
|
2015-03-09 09:10:31 -04:00
|
|
|
)
|
2015-03-16 06:09:15 -04:00
|
|
|
results = yield self._execute_and_decode(
|
|
|
|
"get_appservices_by_state", sql, state
|
|
|
|
)
|
2015-03-09 09:10:31 -04:00
|
|
|
# NB: This assumes this class is linked with ApplicationServiceStore
|
|
|
|
defer.returnValue(self._parse_services_dict(results))
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 11:09:05 -05:00
|
|
|
def get_appservice_state(self, service):
|
|
|
|
"""Get the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to ApplicationServiceState.
|
|
|
|
"""
|
2015-03-09 09:10:31 -04:00
|
|
|
result = yield self._simple_select_one(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
["state"],
|
|
|
|
allow_none=True
|
|
|
|
)
|
|
|
|
if result:
|
|
|
|
defer.returnValue(result.get("state"))
|
|
|
|
return
|
|
|
|
defer.returnValue(None)
|
2015-03-06 11:09:05 -05:00
|
|
|
|
2015-03-06 10:12:24 -05:00
|
|
|
def set_appservice_state(self, service, state):
|
|
|
|
"""Set the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
state(ApplicationServiceState): The connectivity state to apply.
|
|
|
|
Returns:
|
2015-03-09 09:10:31 -04:00
|
|
|
A Deferred which resolves when the state was set successfully.
|
2015-03-06 10:12:24 -05:00
|
|
|
"""
|
2015-03-06 12:35:14 -05:00
|
|
|
return self._simple_upsert(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
dict(state=state)
|
|
|
|
)
|
2015-03-06 10:12:24 -05:00
|
|
|
|
2015-03-06 11:09:05 -05:00
|
|
|
def create_appservice_txn(self, service, events):
|
|
|
|
"""Atomically creates a new transaction for this application service
|
|
|
|
with the given list of events.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service who the transaction is for.
|
|
|
|
events(list<Event>): A list of events to put in the transaction.
|
|
|
|
Returns:
|
2015-03-06 11:16:14 -05:00
|
|
|
AppServiceTransaction: A new transaction.
|
2015-03-06 11:09:05 -05:00
|
|
|
"""
|
2015-03-09 09:54:20 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"create_appservice_txn",
|
|
|
|
self._create_appservice_txn,
|
|
|
|
service, events
|
|
|
|
)
|
|
|
|
|
|
|
|
def _create_appservice_txn(self, txn, service, events):
|
|
|
|
# work out new txn id (highest txn id for this service += 1)
|
|
|
|
# The highest id may be the last one sent (in which case it is last_txn)
|
|
|
|
# or it may be the highest in the txns list (which are waiting to be/are
|
|
|
|
# being sent)
|
2015-03-09 11:53:03 -04:00
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
2015-03-09 09:54:20 -04:00
|
|
|
|
|
|
|
result = txn.execute(
|
|
|
|
"SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
|
|
|
|
(service.id,)
|
|
|
|
)
|
|
|
|
highest_txn_id = result.fetchone()[0]
|
|
|
|
if highest_txn_id is None:
|
|
|
|
highest_txn_id = 0
|
|
|
|
|
|
|
|
new_txn_id = max(highest_txn_id, last_txn_id) + 1
|
|
|
|
|
|
|
|
# Insert new txn into txn table
|
2015-03-09 13:25:20 -04:00
|
|
|
event_ids = [e.event_id for e in events]
|
2015-03-09 09:54:20 -04:00
|
|
|
txn.execute(
|
2015-03-09 13:25:20 -04:00
|
|
|
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
|
2015-03-09 09:54:20 -04:00
|
|
|
"VALUES(?,?,?)",
|
2015-03-09 13:25:20 -04:00
|
|
|
(service.id, new_txn_id, json.dumps(event_ids))
|
2015-03-09 09:54:20 -04:00
|
|
|
)
|
|
|
|
return AppServiceTransaction(
|
|
|
|
service=service, id=new_txn_id, events=events
|
|
|
|
)
|
2015-03-06 11:09:05 -05:00
|
|
|
|
2015-03-06 09:53:35 -05:00
|
|
|
def complete_appservice_txn(self, txn_id, service):
|
|
|
|
"""Completes an application service transaction.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn_id(str): The transaction ID being completed.
|
|
|
|
service(ApplicationService): The application service which was sent
|
|
|
|
this transaction.
|
|
|
|
Returns:
|
2015-03-09 11:53:03 -04:00
|
|
|
A Deferred which resolves if this transaction was stored
|
2015-03-06 09:53:35 -05:00
|
|
|
successfully.
|
|
|
|
"""
|
2015-03-09 11:53:03 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"complete_appservice_txn",
|
|
|
|
self._complete_appservice_txn,
|
|
|
|
txn_id, service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _complete_appservice_txn(self, txn, txn_id, service):
|
|
|
|
txn_id = int(txn_id)
|
|
|
|
|
|
|
|
# Debugging query: Make sure the txn being completed is EXACTLY +1 from
|
|
|
|
# what was there before. If it isn't, we've got problems (e.g. the AS
|
|
|
|
# has probably missed some events), so whine loudly but still continue,
|
|
|
|
# since it shouldn't fail completion of the transaction.
|
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
|
|
|
if (last_txn_id + 1) != txn_id:
|
|
|
|
logger.error(
|
|
|
|
"appservice: Completing a transaction which has an ID > 1 from "
|
|
|
|
"the last ID sent to this AS. We've either dropped events or "
|
|
|
|
"sent it to the AS out of order. FIX ME. last_txn=%s "
|
|
|
|
"completing_txn=%s service_id=%s", last_txn_id, txn_id,
|
|
|
|
service.id
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set current txn_id for AS to 'txn_id'
|
|
|
|
self._simple_upsert_txn(
|
|
|
|
txn, "application_services_state", dict(as_id=service.id),
|
|
|
|
dict(last_txn=txn_id)
|
|
|
|
)
|
|
|
|
|
2015-03-09 13:25:20 -04:00
|
|
|
# Delete txn
|
2015-03-09 11:53:03 -04:00
|
|
|
self._simple_delete_txn(
|
|
|
|
txn, "application_services_txns",
|
|
|
|
dict(txn_id=txn_id, as_id=service.id)
|
|
|
|
)
|
2015-03-06 11:16:14 -05:00
|
|
|
|
|
|
|
def get_oldest_unsent_txn(self, service):
|
|
|
|
"""Get the oldest transaction which has not been sent for this
|
|
|
|
service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The app service to get the oldest txn.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to an AppServiceTransaction or
|
|
|
|
None.
|
|
|
|
"""
|
2015-03-09 11:53:03 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_oldest_unsent_appservice_txn",
|
|
|
|
self._get_oldest_unsent_txn,
|
|
|
|
service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _get_oldest_unsent_txn(self, txn, service):
|
|
|
|
# Monotonically increasing txn ids, so just select the smallest
|
2015-03-06 12:16:47 -05:00
|
|
|
# one in the txns table (we delete them when they are sent)
|
2015-03-09 11:53:03 -04:00
|
|
|
result = txn.execute(
|
2015-03-10 06:04:20 -04:00
|
|
|
"SELECT MIN(txn_id), * FROM application_services_txns WHERE as_id=?",
|
2015-03-09 11:53:03 -04:00
|
|
|
(service.id,)
|
|
|
|
)
|
|
|
|
entry = self.cursor_to_dict(result)[0]
|
|
|
|
if not entry or entry["txn_id"] is None:
|
|
|
|
# the min(txn_id) part will force a row, so entry may not be None
|
|
|
|
return None
|
|
|
|
|
2015-03-09 13:25:20 -04:00
|
|
|
event_ids = json.loads(entry["event_ids"])
|
2015-03-09 13:45:41 -04:00
|
|
|
events = self._get_events_txn(txn, event_ids)
|
2015-03-09 13:25:20 -04:00
|
|
|
|
2015-03-09 11:53:03 -04:00
|
|
|
return AppServiceTransaction(
|
2015-03-09 13:25:20 -04:00
|
|
|
service=service, id=entry["txn_id"], events=events
|
2015-03-09 11:53:03 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
def _get_last_txn(self, txn, service_id):
|
|
|
|
result = txn.execute(
|
|
|
|
"SELECT last_txn FROM application_services_state WHERE as_id=?",
|
|
|
|
(service_id,)
|
|
|
|
)
|
|
|
|
last_txn_id = result.fetchone()
|
2015-03-09 13:45:41 -04:00
|
|
|
if last_txn_id is None or last_txn_id[0] is None: # no row exists
|
2015-03-09 11:53:03 -04:00
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return int(last_txn_id[0]) # select 'last_txn' col
|