2015-01-27 10:50:28 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2015 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-01-28 06:59:38 -05:00
|
|
|
import logging
|
2015-03-31 09:00:25 -04:00
|
|
|
import urllib
|
2015-03-31 06:00:00 -04:00
|
|
|
import yaml
|
2015-02-27 05:44:32 -05:00
|
|
|
from simplejson import JSONDecodeError
|
2015-03-09 09:10:31 -04:00
|
|
|
import simplejson as json
|
2015-01-28 06:59:38 -05:00
|
|
|
from twisted.internet import defer
|
2015-01-27 10:50:28 -05:00
|
|
|
|
2015-03-02 06:20:51 -05:00
|
|
|
from synapse.api.constants import Membership
|
2015-03-09 09:54:20 -04:00
|
|
|
from synapse.appservice import ApplicationService, AppServiceTransaction
|
2015-02-25 10:00:59 -05:00
|
|
|
from synapse.storage.roommember import RoomsForUser
|
2015-03-31 08:48:03 -04:00
|
|
|
from synapse.types import UserID
|
2015-01-27 10:50:28 -05:00
|
|
|
from ._base import SQLBaseStore
|
|
|
|
|
|
|
|
|
2015-01-28 06:59:38 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-02-03 06:26:33 -05:00
|
|
|
|
2015-01-27 10:50:28 -05:00
|
|
|
class ApplicationServiceStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceStore, self).__init__(hs)
|
2015-03-31 08:48:03 -04:00
|
|
|
self.hostname = hs.hostname
|
2015-02-23 09:38:44 -05:00
|
|
|
self.services_cache = []
|
2015-03-31 06:00:00 -04:00
|
|
|
self._populate_appservice_cache(
|
|
|
|
hs.config.app_service_config_files
|
2015-02-02 12:39:41 -05:00
|
|
|
)
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-02-03 06:26:33 -05:00
|
|
|
def get_app_services(self):
|
2015-03-31 06:35:45 -04:00
|
|
|
return defer.succeed(self.services_cache)
|
2015-01-27 10:50:28 -05:00
|
|
|
|
2015-02-25 10:00:59 -05:00
|
|
|
def get_app_service_by_user_id(self, user_id):
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Retrieve an application service from their user ID.
|
|
|
|
|
|
|
|
All application services have associated with them a particular user ID.
|
|
|
|
There is no distinguishing feature on the user ID which indicates it
|
|
|
|
represents an application service. This function allows you to map from
|
|
|
|
a user ID to an application service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user ID to see if it is an application service.
|
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
|
|
|
"""
|
2015-02-25 10:00:59 -05:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.sender == user_id:
|
2015-03-31 06:35:45 -04:00
|
|
|
return defer.succeed(service)
|
|
|
|
return defer.succeed(None)
|
2015-02-25 10:00:59 -05:00
|
|
|
|
2015-03-31 06:00:00 -04:00
|
|
|
def get_app_service_by_token(self, token):
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Get the application service with the given appservice token.
|
2015-01-27 10:50:28 -05:00
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str): The application service token.
|
2015-03-31 06:00:00 -04:00
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
2015-01-27 10:50:28 -05:00
|
|
|
"""
|
2015-03-31 06:00:00 -04:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.token == token:
|
|
|
|
return defer.succeed(service)
|
2015-03-31 06:35:45 -04:00
|
|
|
return defer.succeed(None)
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2015-02-25 10:00:59 -05:00
|
|
|
def get_app_service_rooms(self, service):
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Get a list of RoomsForUser for this application service.
|
|
|
|
|
|
|
|
Application services may be "interested" in lots of rooms depending on
|
|
|
|
the room ID, the room aliases, or the members in the room. This function
|
|
|
|
takes all of these into account and returns a list of RoomsForUser which
|
|
|
|
represent the entire list of room IDs that this application service
|
|
|
|
wants to know about.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to get a room list for.
|
|
|
|
Returns:
|
|
|
|
A list of RoomsForUser.
|
|
|
|
"""
|
2015-03-02 06:20:51 -05:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_app_service_rooms",
|
|
|
|
self._get_app_service_rooms_txn,
|
|
|
|
service,
|
|
|
|
)
|
2015-02-25 12:15:25 -05:00
|
|
|
|
2015-03-02 06:20:51 -05:00
|
|
|
def _get_app_service_rooms_txn(self, txn, service):
|
2015-02-25 12:15:25 -05:00
|
|
|
# get all rooms matching the room ID regex.
|
2015-03-02 06:20:51 -05:00
|
|
|
room_entries = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="rooms", keyvalues=None, retcols=["room_id"]
|
|
|
|
)
|
2015-03-02 04:53:00 -05:00
|
|
|
matching_room_list = set([
|
2015-03-02 05:16:24 -05:00
|
|
|
r["room_id"] for r in room_entries if
|
|
|
|
service.is_interested_in_room(r["room_id"])
|
2015-03-02 04:53:00 -05:00
|
|
|
])
|
2015-02-25 12:15:25 -05:00
|
|
|
|
|
|
|
# resolve room IDs for matching room alias regex.
|
2015-03-02 06:20:51 -05:00
|
|
|
room_alias_mappings = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="room_aliases", keyvalues=None,
|
|
|
|
retcols=["room_id", "room_alias"]
|
|
|
|
)
|
2015-03-02 04:53:00 -05:00
|
|
|
matching_room_list |= set([
|
2015-03-02 06:20:51 -05:00
|
|
|
r["room_id"] for r in room_alias_mappings if
|
|
|
|
service.is_interested_in_alias(r["room_alias"])
|
2015-03-02 04:53:00 -05:00
|
|
|
])
|
2015-02-25 12:15:25 -05:00
|
|
|
|
2015-02-26 09:35:28 -05:00
|
|
|
# get all rooms for every user for this AS. This is scoped to users on
|
|
|
|
# this HS only.
|
2015-03-02 06:20:51 -05:00
|
|
|
user_list = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="users", keyvalues=None, retcols=["name"]
|
|
|
|
)
|
2015-02-26 09:35:28 -05:00
|
|
|
user_list = [
|
|
|
|
u["name"] for u in user_list if
|
|
|
|
service.is_interested_in_user(u["name"])
|
|
|
|
]
|
2015-03-02 04:53:00 -05:00
|
|
|
rooms_for_user_matching_user_id = set() # RoomsForUser list
|
2015-02-26 09:35:28 -05:00
|
|
|
for user_id in user_list:
|
2015-03-02 06:20:51 -05:00
|
|
|
# FIXME: This assumes this store is linked with RoomMemberStore :(
|
|
|
|
rooms_for_user = self._get_rooms_for_user_where_membership_is_txn(
|
|
|
|
txn=txn,
|
|
|
|
user_id=user_id,
|
|
|
|
membership_list=[Membership.JOIN]
|
|
|
|
)
|
2015-03-02 04:53:00 -05:00
|
|
|
rooms_for_user_matching_user_id |= set(rooms_for_user)
|
2015-02-26 09:35:28 -05:00
|
|
|
|
|
|
|
# make RoomsForUser tuples for room ids and aliases which are not in the
|
|
|
|
# main rooms_for_user_list - e.g. they are rooms which do not have AS
|
|
|
|
# registered users in it.
|
|
|
|
known_room_ids = [r.room_id for r in rooms_for_user_matching_user_id]
|
|
|
|
missing_rooms_for_user = [
|
|
|
|
RoomsForUser(r, service.sender, "join") for r in
|
2015-03-02 04:53:00 -05:00
|
|
|
matching_room_list if r not in known_room_ids
|
2015-02-26 09:35:28 -05:00
|
|
|
]
|
|
|
|
rooms_for_user_matching_user_id |= set(missing_rooms_for_user)
|
2015-02-25 12:15:25 -05:00
|
|
|
|
2015-03-02 06:20:51 -05:00
|
|
|
return rooms_for_user_matching_user_id
|
2015-02-25 10:00:59 -05:00
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
def _parse_services_dict(self, results):
|
2015-01-28 06:59:38 -05:00
|
|
|
# SQL results in the form:
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# 'regex': "something",
|
|
|
|
# 'url': "something",
|
|
|
|
# 'namespace': enum,
|
|
|
|
# 'as_id': 0,
|
|
|
|
# 'token': "something",
|
2015-02-05 05:08:12 -05:00
|
|
|
# 'hs_token': "otherthing",
|
2015-01-28 06:59:38 -05:00
|
|
|
# 'id': 0
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
services = {}
|
|
|
|
for res in results:
|
|
|
|
as_token = res["token"]
|
2015-03-10 06:04:20 -04:00
|
|
|
if as_token is None:
|
|
|
|
continue
|
2015-01-28 06:59:38 -05:00
|
|
|
if as_token not in services:
|
|
|
|
# add the service
|
|
|
|
services[as_token] = {
|
2015-03-09 09:10:31 -04:00
|
|
|
"id": res["id"],
|
2015-01-28 06:59:38 -05:00
|
|
|
"url": res["url"],
|
|
|
|
"token": as_token,
|
2015-02-05 05:08:12 -05:00
|
|
|
"hs_token": res["hs_token"],
|
2015-02-09 09:14:15 -05:00
|
|
|
"sender": res["sender"],
|
2015-01-28 06:59:38 -05:00
|
|
|
"namespaces": {
|
2015-02-03 08:17:28 -05:00
|
|
|
ApplicationService.NS_USERS: [],
|
|
|
|
ApplicationService.NS_ALIASES: [],
|
|
|
|
ApplicationService.NS_ROOMS: []
|
2015-01-28 06:59:38 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
# add the namespace regex if one exists
|
|
|
|
ns_int = res["namespace"]
|
|
|
|
if ns_int is None:
|
|
|
|
continue
|
|
|
|
try:
|
2015-02-03 08:17:28 -05:00
|
|
|
services[as_token]["namespaces"][
|
|
|
|
ApplicationService.NS_LIST[ns_int]].append(
|
2015-03-09 09:10:31 -04:00
|
|
|
json.loads(res["regex"])
|
2015-01-28 06:59:38 -05:00
|
|
|
)
|
|
|
|
except IndexError:
|
|
|
|
logger.error("Bad namespace enum '%s'. %s", ns_int, res)
|
2015-02-27 05:44:32 -05:00
|
|
|
except JSONDecodeError:
|
|
|
|
logger.error("Bad regex object '%s'", res["regex"])
|
2015-01-28 06:59:38 -05:00
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
service_list = []
|
2015-01-28 06:59:38 -05:00
|
|
|
for service in services.values():
|
2015-03-09 09:10:31 -04:00
|
|
|
service_list.append(ApplicationService(
|
2015-02-05 05:08:12 -05:00
|
|
|
token=service["token"],
|
|
|
|
url=service["url"],
|
|
|
|
namespaces=service["namespaces"],
|
2015-02-09 09:14:15 -05:00
|
|
|
hs_token=service["hs_token"],
|
2015-03-06 12:27:55 -05:00
|
|
|
sender=service["sender"],
|
|
|
|
id=service["id"]
|
2015-01-28 06:59:38 -05:00
|
|
|
))
|
2015-03-09 09:10:31 -04:00
|
|
|
return service_list
|
|
|
|
|
2015-03-31 06:00:00 -04:00
|
|
|
def _load_appservice(self, as_info):
|
2015-03-31 08:48:03 -04:00
|
|
|
required_string_fields = [
|
|
|
|
"url", "as_token", "hs_token", "sender_localpart"
|
|
|
|
]
|
2015-03-31 06:00:00 -04:00
|
|
|
for field in required_string_fields:
|
|
|
|
if not isinstance(as_info.get(field), basestring):
|
|
|
|
raise KeyError("Required string field: '%s'", field)
|
|
|
|
|
2015-03-31 09:00:25 -04:00
|
|
|
localpart = as_info["sender_localpart"]
|
|
|
|
if urllib.quote(localpart) != localpart:
|
|
|
|
raise ValueError(
|
|
|
|
"sender_localpart needs characters which are not URL encoded."
|
|
|
|
)
|
|
|
|
user = UserID(localpart, self.hostname)
|
2015-03-31 08:48:03 -04:00
|
|
|
user_id = user.to_string()
|
|
|
|
|
2015-03-31 06:00:00 -04:00
|
|
|
# namespace checks
|
|
|
|
if not isinstance(as_info.get("namespaces"), dict):
|
|
|
|
raise KeyError("Requires 'namespaces' object.")
|
|
|
|
for ns in ApplicationService.NS_LIST:
|
|
|
|
# specific namespaces are optional
|
|
|
|
if ns in as_info["namespaces"]:
|
|
|
|
# expect a list of dicts with exclusive and regex keys
|
|
|
|
for regex_obj in as_info["namespaces"][ns]:
|
|
|
|
if not isinstance(regex_obj, dict):
|
|
|
|
raise ValueError(
|
|
|
|
"Expected namespace entry in %s to be an object,"
|
|
|
|
" but got %s", ns, regex_obj
|
|
|
|
)
|
|
|
|
if not isinstance(regex_obj.get("regex"), basestring):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'regex' key in %s", regex_obj
|
|
|
|
)
|
|
|
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'exclusive' key in %s", regex_obj
|
|
|
|
)
|
|
|
|
return ApplicationService(
|
|
|
|
token=as_info["as_token"],
|
|
|
|
url=as_info["url"],
|
|
|
|
namespaces=as_info["namespaces"],
|
|
|
|
hs_token=as_info["hs_token"],
|
2015-03-31 08:48:03 -04:00
|
|
|
sender=user_id,
|
2015-03-31 07:07:56 -04:00
|
|
|
id=as_info["as_token"] # the token is the only unique thing here
|
2015-03-31 06:00:00 -04:00
|
|
|
)
|
2015-03-09 09:10:31 -04:00
|
|
|
|
2015-03-31 06:00:00 -04:00
|
|
|
def _populate_appservice_cache(self, config_files):
|
|
|
|
"""Populates a cache of Application Services from the config files."""
|
2015-03-31 06:35:45 -04:00
|
|
|
if not isinstance(config_files, list):
|
|
|
|
logger.warning(
|
|
|
|
"Expected %s to be a list of AS config files.", config_files
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2015-03-31 06:00:00 -04:00
|
|
|
for config_file in config_files:
|
|
|
|
try:
|
|
|
|
with open(config_file, 'r') as f:
|
2015-03-31 06:35:45 -04:00
|
|
|
appservice = self._load_appservice(yaml.load(f))
|
2015-03-31 06:00:00 -04:00
|
|
|
logger.info("Loaded application service: %s", appservice)
|
|
|
|
self.services_cache.append(appservice)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Failed to load appservice from '%s'", config_file)
|
|
|
|
logger.exception(e)
|
2015-03-06 09:53:35 -05:00
|
|
|
|
|
|
|
|
|
|
|
class ApplicationServiceTransactionStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceTransactionStore, self).__init__(hs)
|
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 10:12:24 -05:00
|
|
|
def get_appservices_by_state(self, state):
|
|
|
|
"""Get a list of application services based on their state.
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2015-03-06 10:12:24 -05:00
|
|
|
Args:
|
|
|
|
state(ApplicationServiceState): The state to filter on.
|
2015-03-06 09:53:35 -05:00
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to a list of ApplicationServices, which
|
|
|
|
may be empty.
|
|
|
|
"""
|
2015-03-31 07:07:56 -04:00
|
|
|
results = yield self._simple_select_list(
|
|
|
|
"application_services_state",
|
|
|
|
dict(state=state),
|
|
|
|
["as_id"]
|
2015-03-16 06:09:15 -04:00
|
|
|
)
|
2015-03-09 09:10:31 -04:00
|
|
|
# NB: This assumes this class is linked with ApplicationServiceStore
|
2015-03-31 07:07:56 -04:00
|
|
|
as_list = yield self.get_app_services()
|
|
|
|
services = []
|
|
|
|
|
|
|
|
for res in results:
|
|
|
|
for service in as_list:
|
|
|
|
if service.id == res["as_id"]:
|
|
|
|
services.append(service)
|
|
|
|
defer.returnValue(services)
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2015-03-09 09:10:31 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 11:09:05 -05:00
|
|
|
def get_appservice_state(self, service):
|
|
|
|
"""Get the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to ApplicationServiceState.
|
|
|
|
"""
|
2015-03-09 09:10:31 -04:00
|
|
|
result = yield self._simple_select_one(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
["state"],
|
|
|
|
allow_none=True
|
|
|
|
)
|
|
|
|
if result:
|
|
|
|
defer.returnValue(result.get("state"))
|
|
|
|
return
|
|
|
|
defer.returnValue(None)
|
2015-03-06 11:09:05 -05:00
|
|
|
|
2015-03-06 10:12:24 -05:00
|
|
|
def set_appservice_state(self, service, state):
|
|
|
|
"""Set the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
state(ApplicationServiceState): The connectivity state to apply.
|
|
|
|
Returns:
|
2015-03-09 09:10:31 -04:00
|
|
|
A Deferred which resolves when the state was set successfully.
|
2015-03-06 10:12:24 -05:00
|
|
|
"""
|
2015-03-06 12:35:14 -05:00
|
|
|
return self._simple_upsert(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
dict(state=state)
|
|
|
|
)
|
2015-03-06 10:12:24 -05:00
|
|
|
|
2015-03-06 11:09:05 -05:00
|
|
|
def create_appservice_txn(self, service, events):
|
|
|
|
"""Atomically creates a new transaction for this application service
|
|
|
|
with the given list of events.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service who the transaction is for.
|
|
|
|
events(list<Event>): A list of events to put in the transaction.
|
|
|
|
Returns:
|
2015-03-06 11:16:14 -05:00
|
|
|
AppServiceTransaction: A new transaction.
|
2015-03-06 11:09:05 -05:00
|
|
|
"""
|
2015-03-09 09:54:20 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"create_appservice_txn",
|
|
|
|
self._create_appservice_txn,
|
|
|
|
service, events
|
|
|
|
)
|
|
|
|
|
|
|
|
def _create_appservice_txn(self, txn, service, events):
|
|
|
|
# work out new txn id (highest txn id for this service += 1)
|
|
|
|
# The highest id may be the last one sent (in which case it is last_txn)
|
|
|
|
# or it may be the highest in the txns list (which are waiting to be/are
|
|
|
|
# being sent)
|
2015-03-09 11:53:03 -04:00
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
2015-03-09 09:54:20 -04:00
|
|
|
|
|
|
|
result = txn.execute(
|
|
|
|
"SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
|
|
|
|
(service.id,)
|
|
|
|
)
|
|
|
|
highest_txn_id = result.fetchone()[0]
|
|
|
|
if highest_txn_id is None:
|
|
|
|
highest_txn_id = 0
|
|
|
|
|
|
|
|
new_txn_id = max(highest_txn_id, last_txn_id) + 1
|
|
|
|
|
|
|
|
# Insert new txn into txn table
|
2015-04-16 06:17:52 -04:00
|
|
|
event_ids = json.dumps([e.event_id for e in events])
|
2015-03-09 09:54:20 -04:00
|
|
|
txn.execute(
|
2015-03-09 13:25:20 -04:00
|
|
|
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
|
2015-03-09 09:54:20 -04:00
|
|
|
"VALUES(?,?,?)",
|
2015-04-07 13:05:39 -04:00
|
|
|
(service.id, new_txn_id, event_ids)
|
2015-03-09 09:54:20 -04:00
|
|
|
)
|
|
|
|
return AppServiceTransaction(
|
|
|
|
service=service, id=new_txn_id, events=events
|
|
|
|
)
|
2015-03-06 11:09:05 -05:00
|
|
|
|
2015-03-06 09:53:35 -05:00
|
|
|
def complete_appservice_txn(self, txn_id, service):
|
|
|
|
"""Completes an application service transaction.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn_id(str): The transaction ID being completed.
|
|
|
|
service(ApplicationService): The application service which was sent
|
|
|
|
this transaction.
|
|
|
|
Returns:
|
2015-03-09 11:53:03 -04:00
|
|
|
A Deferred which resolves if this transaction was stored
|
2015-03-06 09:53:35 -05:00
|
|
|
successfully.
|
|
|
|
"""
|
2015-03-09 11:53:03 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"complete_appservice_txn",
|
|
|
|
self._complete_appservice_txn,
|
|
|
|
txn_id, service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _complete_appservice_txn(self, txn, txn_id, service):
|
|
|
|
txn_id = int(txn_id)
|
|
|
|
|
|
|
|
# Debugging query: Make sure the txn being completed is EXACTLY +1 from
|
|
|
|
# what was there before. If it isn't, we've got problems (e.g. the AS
|
|
|
|
# has probably missed some events), so whine loudly but still continue,
|
|
|
|
# since it shouldn't fail completion of the transaction.
|
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
|
|
|
if (last_txn_id + 1) != txn_id:
|
|
|
|
logger.error(
|
|
|
|
"appservice: Completing a transaction which has an ID > 1 from "
|
|
|
|
"the last ID sent to this AS. We've either dropped events or "
|
|
|
|
"sent it to the AS out of order. FIX ME. last_txn=%s "
|
|
|
|
"completing_txn=%s service_id=%s", last_txn_id, txn_id,
|
|
|
|
service.id
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set current txn_id for AS to 'txn_id'
|
|
|
|
self._simple_upsert_txn(
|
|
|
|
txn, "application_services_state", dict(as_id=service.id),
|
|
|
|
dict(last_txn=txn_id)
|
|
|
|
)
|
|
|
|
|
2015-03-09 13:25:20 -04:00
|
|
|
# Delete txn
|
2015-03-09 11:53:03 -04:00
|
|
|
self._simple_delete_txn(
|
|
|
|
txn, "application_services_txns",
|
|
|
|
dict(txn_id=txn_id, as_id=service.id)
|
|
|
|
)
|
2015-03-06 11:16:14 -05:00
|
|
|
|
|
|
|
def get_oldest_unsent_txn(self, service):
|
|
|
|
"""Get the oldest transaction which has not been sent for this
|
|
|
|
service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The app service to get the oldest txn.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to an AppServiceTransaction or
|
|
|
|
None.
|
|
|
|
"""
|
2015-03-09 11:53:03 -04:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_oldest_unsent_appservice_txn",
|
|
|
|
self._get_oldest_unsent_txn,
|
|
|
|
service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _get_oldest_unsent_txn(self, txn, service):
|
|
|
|
# Monotonically increasing txn ids, so just select the smallest
|
2015-03-06 12:16:47 -05:00
|
|
|
# one in the txns table (we delete them when they are sent)
|
2015-03-09 11:53:03 -04:00
|
|
|
result = txn.execute(
|
2015-04-29 11:30:25 -04:00
|
|
|
"SELECT * FROM application_services_txns WHERE as_id=?"
|
|
|
|
" ORDER BY txn_id ASC LIMIT 1",
|
2015-03-09 11:53:03 -04:00
|
|
|
(service.id,)
|
|
|
|
)
|
2015-04-29 11:30:25 -04:00
|
|
|
rows = self.cursor_to_dict(result)
|
|
|
|
if not rows:
|
2015-03-09 11:53:03 -04:00
|
|
|
return None
|
|
|
|
|
2015-04-29 11:30:25 -04:00
|
|
|
entry = rows[0]
|
|
|
|
|
2015-03-09 13:25:20 -04:00
|
|
|
event_ids = json.loads(entry["event_ids"])
|
2015-03-09 13:45:41 -04:00
|
|
|
events = self._get_events_txn(txn, event_ids)
|
2015-03-09 13:25:20 -04:00
|
|
|
|
2015-03-09 11:53:03 -04:00
|
|
|
return AppServiceTransaction(
|
2015-03-09 13:25:20 -04:00
|
|
|
service=service, id=entry["txn_id"], events=events
|
2015-03-09 11:53:03 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
def _get_last_txn(self, txn, service_id):
|
|
|
|
result = txn.execute(
|
|
|
|
"SELECT last_txn FROM application_services_state WHERE as_id=?",
|
|
|
|
(service_id,)
|
|
|
|
)
|
|
|
|
last_txn_id = result.fetchone()
|
2015-03-09 13:45:41 -04:00
|
|
|
if last_txn_id is None or last_txn_id[0] is None: # no row exists
|
2015-03-09 11:53:03 -04:00
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return int(last_txn_id[0]) # select 'last_txn' col
|