2016-01-06 23:26:29 -05:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2018-02-23 05:45:00 -05:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-01-27 10:50:28 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-01-28 06:59:38 -05:00
|
|
|
import logging
|
2017-05-18 11:34:41 -04:00
|
|
|
import re
|
2023-02-06 07:49:06 -05:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Pattern,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
cast,
|
|
|
|
)
|
2018-07-09 02:09:20 -04:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
from synapse.appservice import (
|
|
|
|
ApplicationService,
|
|
|
|
ApplicationServiceState,
|
|
|
|
AppServiceTransaction,
|
2022-11-28 11:17:29 -05:00
|
|
|
TransactionOneTimeKeysCount,
|
2022-02-24 12:55:45 -05:00
|
|
|
TransactionUnusedFallbackKeys,
|
2020-10-28 11:12:21 -04:00
|
|
|
)
|
2016-05-17 06:28:58 -04:00
|
|
|
from synapse.config.appservice import load_appservices
|
2020-10-15 12:33:28 -04:00
|
|
|
from synapse.events import EventBase
|
2022-02-24 12:55:45 -05:00
|
|
|
from synapse.storage._base import db_to_json
|
2022-04-12 06:54:00 -04:00
|
|
|
from synapse.storage.database import (
|
|
|
|
DatabasePool,
|
|
|
|
LoggingDatabaseConnection,
|
|
|
|
LoggingTransaction,
|
|
|
|
)
|
2020-08-05 16:38:57 -04:00
|
|
|
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
2022-02-24 12:55:45 -05:00
|
|
|
from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
|
2022-04-01 09:33:25 -04:00
|
|
|
from synapse.storage.types import Cursor
|
|
|
|
from synapse.storage.util.sequence import build_sequence_generator
|
2022-03-30 09:39:27 -04:00
|
|
|
from synapse.types import DeviceListUpdates, JsonDict
|
2020-08-20 10:32:33 -04:00
|
|
|
from synapse.util import json_encoder
|
2022-02-24 12:55:45 -05:00
|
|
|
from synapse.util.caches.descriptors import _CacheContext, cached
|
2015-01-27 10:50:28 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 07:12:48 -04:00
|
|
|
from synapse.server import HomeServer
|
2020-10-28 11:12:21 -04:00
|
|
|
|
2015-01-28 06:59:38 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-02-03 06:26:33 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
def _make_exclusive_regex(
|
|
|
|
services_cache: List[ApplicationService],
|
|
|
|
) -> Optional[Pattern]:
|
2020-05-15 13:53:31 -04:00
|
|
|
# We precompile a regex constructed from all the regexes that the AS's
|
2017-06-21 09:19:33 -04:00
|
|
|
# have registered for exclusive users.
|
|
|
|
exclusive_user_regexes = [
|
|
|
|
regex.pattern
|
|
|
|
for service in services_cache
|
2020-05-15 13:53:31 -04:00
|
|
|
for regex in service.get_exclusive_user_regexes()
|
2017-06-21 09:19:33 -04:00
|
|
|
]
|
|
|
|
if exclusive_user_regexes:
|
|
|
|
exclusive_user_regex = "|".join("(" + r + ")" for r in exclusive_user_regexes)
|
2021-07-15 12:46:54 -04:00
|
|
|
exclusive_user_pattern: Optional[Pattern] = re.compile(exclusive_user_regex)
|
2017-06-21 09:19:33 -04:00
|
|
|
else:
|
|
|
|
# We handle this case specially otherwise the constructed regex
|
|
|
|
# will always match
|
2020-10-28 11:12:21 -04:00
|
|
|
exclusive_user_pattern = None
|
2017-06-21 09:19:33 -04:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
return exclusive_user_pattern
|
2017-06-21 09:19:33 -04:00
|
|
|
|
|
|
|
|
2022-02-24 12:55:45 -05:00
|
|
|
class ApplicationServiceWorkerStore(RoomMemberWorkerStore):
|
2021-12-13 12:05:00 -05:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
2016-05-17 06:28:58 -04:00
|
|
|
self.services_cache = load_appservices(
|
2021-09-23 07:13:34 -04:00
|
|
|
hs.hostname, hs.config.appservice.app_service_config_files
|
2015-02-02 12:39:41 -05:00
|
|
|
)
|
2017-06-21 09:19:33 -04:00
|
|
|
self.exclusive_user_regex = _make_exclusive_regex(self.services_cache)
|
2017-05-18 11:34:41 -04:00
|
|
|
|
2022-04-01 09:33:25 -04:00
|
|
|
def get_max_as_txn_id(txn: Cursor) -> int:
|
|
|
|
logger.warning("Falling back to slow query, you should port to postgres")
|
|
|
|
txn.execute(
|
|
|
|
"SELECT COALESCE(max(txn_id), 0) FROM application_services_txns"
|
|
|
|
)
|
2022-04-27 08:05:00 -04:00
|
|
|
return cast(Tuple[int], txn.fetchone())[0]
|
2022-04-01 09:33:25 -04:00
|
|
|
|
|
|
|
self._as_txn_seq_gen = build_sequence_generator(
|
|
|
|
db_conn,
|
|
|
|
database.engine,
|
|
|
|
get_max_as_txn_id,
|
|
|
|
"application_services_txn_id_seq",
|
|
|
|
table="application_services_txns",
|
|
|
|
id_column="txn_id",
|
|
|
|
)
|
|
|
|
|
2020-09-18 09:56:44 -04:00
|
|
|
super().__init__(database, db_conn, hs)
|
2018-02-21 06:20:42 -05:00
|
|
|
|
2022-04-12 06:54:00 -04:00
|
|
|
def get_app_services(self) -> List[ApplicationService]:
|
2016-10-06 04:43:32 -04:00
|
|
|
return self.services_cache
|
2015-01-27 10:50:28 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
def get_if_app_services_interested_in_user(self, user_id: str) -> bool:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Check if the user is one associated with an app service (exclusively)"""
|
2017-05-18 11:34:41 -04:00
|
|
|
if self.exclusive_user_regex:
|
|
|
|
return bool(self.exclusive_user_regex.match(user_id))
|
|
|
|
else:
|
|
|
|
return False
|
2016-11-23 06:01:01 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
def get_app_service_by_user_id(self, user_id: str) -> Optional[ApplicationService]:
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Retrieve an application service from their user ID.
|
|
|
|
|
|
|
|
All application services have associated with them a particular user ID.
|
|
|
|
There is no distinguishing feature on the user ID which indicates it
|
|
|
|
represents an application service. This function allows you to map from
|
|
|
|
a user ID to an application service.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
user_id: The user ID to see if it is an application service.
|
2015-02-25 12:15:25 -05:00
|
|
|
Returns:
|
2020-10-28 11:12:21 -04:00
|
|
|
The application service or None.
|
2015-02-25 12:15:25 -05:00
|
|
|
"""
|
2015-02-25 10:00:59 -05:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.sender == user_id:
|
2016-10-06 04:43:32 -04:00
|
|
|
return service
|
|
|
|
return None
|
2015-02-25 10:00:59 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
def get_app_service_by_token(self, token: str) -> Optional[ApplicationService]:
|
2015-02-25 12:15:25 -05:00
|
|
|
"""Get the application service with the given appservice token.
|
2015-01-27 10:50:28 -05:00
|
|
|
|
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
token: The application service token.
|
2015-03-31 06:00:00 -04:00
|
|
|
Returns:
|
2020-10-28 11:12:21 -04:00
|
|
|
The application service or None.
|
2015-01-27 10:50:28 -05:00
|
|
|
"""
|
2015-03-31 06:00:00 -04:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.token == token:
|
2016-10-06 04:43:32 -04:00
|
|
|
return service
|
|
|
|
return None
|
2015-01-27 11:53:59 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
def get_app_service_by_id(self, as_id: str) -> Optional[ApplicationService]:
|
2018-02-05 12:22:16 -05:00
|
|
|
"""Get the application service with the given appservice ID.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
as_id: The application service ID.
|
2018-02-05 12:22:16 -05:00
|
|
|
Returns:
|
2020-10-28 11:12:21 -04:00
|
|
|
The application service or None.
|
2018-02-05 12:22:16 -05:00
|
|
|
"""
|
|
|
|
for service in self.services_cache:
|
|
|
|
if service.id == as_id:
|
|
|
|
return service
|
|
|
|
return None
|
|
|
|
|
2022-02-24 12:55:45 -05:00
|
|
|
@cached(iterable=True, cache_context=True)
|
|
|
|
async def get_app_service_users_in_room(
|
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
app_service: "ApplicationService",
|
|
|
|
cache_context: _CacheContext,
|
|
|
|
) -> List[str]:
|
2022-10-27 14:29:23 -04:00
|
|
|
"""
|
|
|
|
Get all users in a room that the appservice controls.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id: The room to check in.
|
|
|
|
app_service: The application service to check interest/control against
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
List of user IDs that the appservice controls.
|
|
|
|
"""
|
|
|
|
# We can use `get_local_users_in_room(...)` here because an application service
|
|
|
|
# can only be interested in local users of the server it's on (ignore any remote
|
|
|
|
# users that might match the user namespace regex).
|
|
|
|
local_users_in_room = await self.get_local_users_in_room(
|
2022-02-24 12:55:45 -05:00
|
|
|
room_id, on_invalidate=cache_context.invalidate
|
|
|
|
)
|
2022-10-27 14:29:23 -04:00
|
|
|
return list(filter(app_service.is_interested_in_user, local_users_in_room))
|
2022-02-24 12:55:45 -05:00
|
|
|
|
2018-02-21 06:20:42 -05:00
|
|
|
|
|
|
|
class ApplicationServiceStore(ApplicationServiceWorkerStore):
|
2018-03-05 10:42:57 -05:00
|
|
|
# This is currently empty due to there not being any AS storage functions
|
|
|
|
# that can't be run on the workers. Since this may change in future, and
|
|
|
|
# to keep consistency with the other stores, we keep this empty class for
|
|
|
|
# now.
|
|
|
|
pass
|
2015-02-25 10:00:59 -05:00
|
|
|
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2019-04-03 05:07:29 -04:00
|
|
|
class ApplicationServiceTransactionWorkerStore(
|
|
|
|
ApplicationServiceWorkerStore, EventsWorkerStore
|
|
|
|
):
|
2020-10-28 11:12:21 -04:00
|
|
|
async def get_appservices_by_state(
|
|
|
|
self, state: ApplicationServiceState
|
|
|
|
) -> List[ApplicationService]:
|
2015-03-06 10:12:24 -05:00
|
|
|
"""Get a list of application services based on their state.
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2015-03-06 10:12:24 -05:00
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
state: The state to filter on.
|
2015-03-06 09:53:35 -05:00
|
|
|
Returns:
|
2020-08-12 09:28:48 -04:00
|
|
|
A list of ApplicationServices, which may be empty.
|
2015-03-06 09:53:35 -05:00
|
|
|
"""
|
2020-08-12 09:28:48 -04:00
|
|
|
results = await self.db_pool.simple_select_list(
|
2021-12-02 10:30:05 -05:00
|
|
|
"application_services_state", {"state": state.value}, ["as_id"]
|
2015-03-16 06:09:15 -04:00
|
|
|
)
|
2015-03-09 09:10:31 -04:00
|
|
|
# NB: This assumes this class is linked with ApplicationServiceStore
|
2016-10-06 04:43:32 -04:00
|
|
|
as_list = self.get_app_services()
|
2015-03-31 07:07:56 -04:00
|
|
|
services = []
|
|
|
|
|
|
|
|
for res in results:
|
|
|
|
for service in as_list:
|
|
|
|
if service.id == res["as_id"]:
|
|
|
|
services.append(service)
|
2019-07-23 09:00:55 -04:00
|
|
|
return services
|
2015-03-06 09:53:35 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
async def get_appservice_state(
|
|
|
|
self, service: ApplicationService
|
|
|
|
) -> Optional[ApplicationServiceState]:
|
2015-03-06 11:09:05 -05:00
|
|
|
"""Get the application service state.
|
|
|
|
|
|
|
|
Args:
|
2022-05-17 06:07:18 -04:00
|
|
|
service: The service whose state to get.
|
2015-03-06 11:09:05 -05:00
|
|
|
Returns:
|
2022-05-17 06:07:18 -04:00
|
|
|
An ApplicationServiceState, or None if we have yet to attempt any
|
|
|
|
transactions to the AS.
|
2015-03-06 11:09:05 -05:00
|
|
|
"""
|
2022-05-17 06:07:18 -04:00
|
|
|
# if we have created transactions for this AS but not yet attempted to send
|
|
|
|
# them, we will have a row in the table with state=NULL (recording the stream
|
|
|
|
# positions we have processed up to).
|
|
|
|
#
|
|
|
|
# On the other hand, if we have yet to create any transactions for this AS at
|
|
|
|
# all, then there will be no row for the AS.
|
|
|
|
#
|
|
|
|
# In either case, we return None to indicate "we don't yet know the state of
|
|
|
|
# this AS".
|
|
|
|
result = await self.db_pool.simple_select_one_onecol(
|
2015-03-09 09:10:31 -04:00
|
|
|
"application_services_state",
|
2020-02-21 07:15:07 -05:00
|
|
|
{"as_id": service.id},
|
2022-05-17 06:07:18 -04:00
|
|
|
retcol="state",
|
2016-02-03 11:22:35 -05:00
|
|
|
allow_none=True,
|
|
|
|
desc="get_appservice_state",
|
2015-03-09 09:10:31 -04:00
|
|
|
)
|
|
|
|
if result:
|
2022-05-17 06:07:18 -04:00
|
|
|
return ApplicationServiceState(result)
|
2019-07-23 09:00:55 -04:00
|
|
|
return None
|
2015-03-06 11:09:05 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
async def set_appservice_state(
|
|
|
|
self, service: ApplicationService, state: ApplicationServiceState
|
|
|
|
) -> None:
|
2015-03-06 10:12:24 -05:00
|
|
|
"""Set the application service state.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
service: The service whose state to set.
|
|
|
|
state: The connectivity state to apply.
|
2015-03-06 10:12:24 -05:00
|
|
|
"""
|
2020-08-27 13:38:41 -04:00
|
|
|
await self.db_pool.simple_upsert(
|
2021-12-02 10:30:05 -05:00
|
|
|
"application_services_state", {"as_id": service.id}, {"state": state.value}
|
2015-03-06 12:35:14 -05:00
|
|
|
)
|
2015-03-06 10:12:24 -05:00
|
|
|
|
2020-10-15 12:33:28 -04:00
|
|
|
async def create_appservice_txn(
|
|
|
|
self,
|
|
|
|
service: ApplicationService,
|
2023-02-06 07:49:06 -05:00
|
|
|
events: Sequence[EventBase],
|
2020-10-15 12:33:28 -04:00
|
|
|
ephemeral: List[JsonDict],
|
2022-02-01 09:13:38 -05:00
|
|
|
to_device_messages: List[JsonDict],
|
2022-11-28 11:17:29 -05:00
|
|
|
one_time_keys_count: TransactionOneTimeKeysCount,
|
2022-02-24 12:55:45 -05:00
|
|
|
unused_fallback_keys: TransactionUnusedFallbackKeys,
|
2022-03-30 09:39:27 -04:00
|
|
|
device_list_summary: DeviceListUpdates,
|
2020-10-15 12:33:28 -04:00
|
|
|
) -> AppServiceTransaction:
|
2015-03-06 11:09:05 -05:00
|
|
|
"""Atomically creates a new transaction for this application service
|
2020-10-15 12:33:28 -04:00
|
|
|
with the given list of events. Ephemeral events are NOT persisted to the
|
|
|
|
database and are not resent if a transaction is retried.
|
2015-03-06 11:09:05 -05:00
|
|
|
|
|
|
|
Args:
|
2020-10-15 12:33:28 -04:00
|
|
|
service: The service who the transaction is for.
|
|
|
|
events: A list of persistent events to put in the transaction.
|
|
|
|
ephemeral: A list of ephemeral events to put in the transaction.
|
2022-02-01 09:13:38 -05:00
|
|
|
to_device_messages: A list of to-device messages to put in the transaction.
|
2022-11-28 11:17:29 -05:00
|
|
|
one_time_keys_count: Counts of remaining one-time keys for relevant
|
2022-02-24 12:55:45 -05:00
|
|
|
appservice devices in the transaction.
|
|
|
|
unused_fallback_keys: Lists of unused fallback keys for relevant
|
|
|
|
appservice devices in the transaction.
|
2022-03-30 09:39:27 -04:00
|
|
|
device_list_summary: The device list summary to include in the transaction.
|
2020-10-15 12:33:28 -04:00
|
|
|
|
2015-03-06 11:09:05 -05:00
|
|
|
Returns:
|
2020-10-15 12:33:28 -04:00
|
|
|
A new transaction.
|
2015-03-06 11:09:05 -05:00
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2022-04-12 06:54:00 -04:00
|
|
|
def _create_appservice_txn(txn: LoggingTransaction) -> AppServiceTransaction:
|
2022-04-01 09:33:25 -04:00
|
|
|
new_txn_id = self._as_txn_seq_gen.get_next_id_txn(txn)
|
2015-03-09 09:54:20 -04:00
|
|
|
|
2016-08-18 09:59:55 -04:00
|
|
|
# Insert new txn into txn table
|
2020-08-20 10:32:33 -04:00
|
|
|
event_ids = json_encoder.encode([e.event_id for e in events])
|
2016-08-18 09:59:55 -04:00
|
|
|
txn.execute(
|
|
|
|
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
|
|
|
|
"VALUES(?,?,?)",
|
2019-04-03 05:07:29 -04:00
|
|
|
(service.id, new_txn_id, event_ids),
|
2016-08-18 09:59:55 -04:00
|
|
|
)
|
2020-10-15 12:33:28 -04:00
|
|
|
return AppServiceTransaction(
|
2022-02-01 09:13:38 -05:00
|
|
|
service=service,
|
|
|
|
id=new_txn_id,
|
|
|
|
events=events,
|
|
|
|
ephemeral=ephemeral,
|
|
|
|
to_device_messages=to_device_messages,
|
2022-11-28 11:17:29 -05:00
|
|
|
one_time_keys_count=one_time_keys_count,
|
2022-02-24 12:55:45 -05:00
|
|
|
unused_fallback_keys=unused_fallback_keys,
|
2022-03-30 09:39:27 -04:00
|
|
|
device_list_summary=device_list_summary,
|
2020-10-15 12:33:28 -04:00
|
|
|
)
|
2015-03-09 09:54:20 -04:00
|
|
|
|
2020-08-28 07:54:27 -04:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-08-05 16:38:57 -04:00
|
|
|
"create_appservice_txn", _create_appservice_txn
|
|
|
|
)
|
2015-03-06 11:09:05 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
async def complete_appservice_txn(
|
|
|
|
self, txn_id: int, service: ApplicationService
|
|
|
|
) -> None:
|
2015-03-06 09:53:35 -05:00
|
|
|
"""Completes an application service transaction.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
txn_id: The transaction ID being completed.
|
|
|
|
service: The application service which was sent this transaction.
|
2015-03-06 09:53:35 -05:00
|
|
|
"""
|
2015-03-09 11:53:03 -04:00
|
|
|
|
2022-04-12 06:54:00 -04:00
|
|
|
def _complete_appservice_txn(txn: LoggingTransaction) -> None:
|
2016-08-18 09:59:55 -04:00
|
|
|
# Delete txn
|
2020-08-05 16:38:57 -04:00
|
|
|
self.db_pool.simple_delete_txn(
|
2020-02-21 07:15:07 -05:00
|
|
|
txn,
|
|
|
|
"application_services_txns",
|
|
|
|
{"txn_id": txn_id, "as_id": service.id},
|
2016-08-18 09:59:55 -04:00
|
|
|
)
|
2015-03-09 11:53:03 -04:00
|
|
|
|
2020-08-28 07:54:27 -04:00
|
|
|
await self.db_pool.runInteraction(
|
2019-12-04 08:52:46 -05:00
|
|
|
"complete_appservice_txn", _complete_appservice_txn
|
|
|
|
)
|
2015-03-06 11:16:14 -05:00
|
|
|
|
2020-10-28 11:12:21 -04:00
|
|
|
async def get_oldest_unsent_txn(
|
|
|
|
self, service: ApplicationService
|
|
|
|
) -> Optional[AppServiceTransaction]:
|
|
|
|
"""Get the oldest transaction which has not been sent for this service.
|
2015-03-06 11:16:14 -05:00
|
|
|
|
|
|
|
Args:
|
2020-10-28 11:12:21 -04:00
|
|
|
service: The app service to get the oldest txn.
|
2015-03-06 11:16:14 -05:00
|
|
|
Returns:
|
2020-08-12 09:28:48 -04:00
|
|
|
An AppServiceTransaction or None.
|
2015-03-06 11:16:14 -05:00
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2022-04-12 06:54:00 -04:00
|
|
|
def _get_oldest_unsent_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Optional[Dict[str, Any]]:
|
2016-08-18 09:59:55 -04:00
|
|
|
# Monotonically increasing txn ids, so just select the smallest
|
|
|
|
# one in the txns table (we delete them when they are sent)
|
|
|
|
txn.execute(
|
|
|
|
"SELECT * FROM application_services_txns WHERE as_id=?"
|
|
|
|
" ORDER BY txn_id ASC LIMIT 1",
|
2019-04-03 05:07:29 -04:00
|
|
|
(service.id,),
|
2016-08-18 09:59:55 -04:00
|
|
|
)
|
2020-08-05 16:38:57 -04:00
|
|
|
rows = self.db_pool.cursor_to_dict(txn)
|
2016-08-18 09:59:55 -04:00
|
|
|
if not rows:
|
|
|
|
return None
|
|
|
|
|
|
|
|
entry = rows[0]
|
|
|
|
|
|
|
|
return entry
|
|
|
|
|
2020-08-12 09:28:48 -04:00
|
|
|
entry = await self.db_pool.runInteraction(
|
2019-04-03 05:07:29 -04:00
|
|
|
"get_oldest_unsent_appservice_txn", _get_oldest_unsent_txn
|
2015-03-09 11:53:03 -04:00
|
|
|
)
|
|
|
|
|
2016-06-03 12:12:48 -04:00
|
|
|
if not entry:
|
2019-07-23 09:00:55 -04:00
|
|
|
return None
|
2016-06-03 12:12:48 -04:00
|
|
|
|
2020-07-16 11:32:19 -04:00
|
|
|
event_ids = db_to_json(entry["event_ids"])
|
2016-06-03 12:12:48 -04:00
|
|
|
|
2020-08-12 09:28:48 -04:00
|
|
|
events = await self.get_events_as_list(event_ids)
|
2016-06-03 12:12:48 -04:00
|
|
|
|
2022-03-30 09:39:27 -04:00
|
|
|
# TODO: to-device messages, one-time key counts, device list summaries and unused
|
|
|
|
# fallback keys are not yet populated for catch-up transactions.
|
2022-02-24 12:55:45 -05:00
|
|
|
# We likely want to populate those for reliability.
|
2020-10-15 12:33:28 -04:00
|
|
|
return AppServiceTransaction(
|
2022-02-01 09:13:38 -05:00
|
|
|
service=service,
|
|
|
|
id=entry["txn_id"],
|
|
|
|
events=events,
|
|
|
|
ephemeral=[],
|
|
|
|
to_device_messages=[],
|
2022-11-28 11:17:29 -05:00
|
|
|
one_time_keys_count={},
|
2022-02-24 12:55:45 -05:00
|
|
|
unused_fallback_keys={},
|
2022-03-30 09:39:27 -04:00
|
|
|
device_list_summary=DeviceListUpdates(),
|
2020-10-15 12:33:28 -04:00
|
|
|
)
|
2016-06-03 12:12:48 -04:00
|
|
|
|
2022-07-15 04:36:56 -04:00
|
|
|
async def get_appservice_last_pos(self) -> int:
|
|
|
|
"""
|
|
|
|
Get the last stream ordering position for the appservice process.
|
|
|
|
"""
|
2019-04-03 05:07:29 -04:00
|
|
|
|
2022-07-15 04:36:56 -04:00
|
|
|
return await self.db_pool.simple_select_one_onecol(
|
|
|
|
table="appservice_stream_position",
|
|
|
|
retcol="stream_ordering",
|
|
|
|
keyvalues={},
|
|
|
|
desc="get_appservice_last_pos",
|
2016-08-18 06:54:41 -04:00
|
|
|
)
|
|
|
|
|
2022-07-15 04:36:56 -04:00
|
|
|
async def set_appservice_last_pos(self, pos: int) -> None:
|
|
|
|
"""
|
|
|
|
Set the last stream ordering position for the appservice process.
|
|
|
|
"""
|
2016-08-18 06:54:41 -04:00
|
|
|
|
2022-07-15 04:36:56 -04:00
|
|
|
await self.db_pool.simple_update_one(
|
|
|
|
table="appservice_stream_position",
|
|
|
|
keyvalues={},
|
|
|
|
updatevalues={"stream_ordering": pos},
|
|
|
|
desc="set_appservice_last_pos",
|
2016-08-18 06:54:41 -04:00
|
|
|
)
|
|
|
|
|
2020-10-15 12:33:28 -04:00
|
|
|
async def get_type_stream_id_for_appservice(
|
|
|
|
self, service: ApplicationService, type: str
|
|
|
|
) -> int:
|
2022-03-30 09:39:27 -04:00
|
|
|
if type not in ("read_receipt", "presence", "to_device", "device_list"):
|
2020-10-26 10:51:33 -04:00
|
|
|
raise ValueError(
|
|
|
|
"Expected type to be a valid application stream id type, got %s"
|
|
|
|
% (type,)
|
|
|
|
)
|
|
|
|
|
2022-04-12 06:54:00 -04:00
|
|
|
def get_type_stream_id_for_appservice_txn(txn: LoggingTransaction) -> int:
|
2020-10-15 12:33:28 -04:00
|
|
|
stream_id_type = "%s_stream_id" % type
|
|
|
|
txn.execute(
|
2020-10-26 10:51:33 -04:00
|
|
|
# We do NOT want to escape `stream_id_type`.
|
|
|
|
"SELECT %s FROM application_services_state WHERE as_id=?"
|
|
|
|
% stream_id_type,
|
|
|
|
(service.id,),
|
2020-10-15 12:33:28 -04:00
|
|
|
)
|
2020-10-26 10:51:33 -04:00
|
|
|
last_stream_id = txn.fetchone()
|
|
|
|
if last_stream_id is None or last_stream_id[0] is None: # no row exists
|
2022-03-30 07:10:12 -04:00
|
|
|
# Stream tokens always start from 1, to avoid foot guns around `0` being falsey.
|
|
|
|
return 1
|
2020-10-15 12:33:28 -04:00
|
|
|
else:
|
2020-10-26 10:51:33 -04:00
|
|
|
return int(last_stream_id[0])
|
2020-10-15 12:33:28 -04:00
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_type_stream_id_for_appservice", get_type_stream_id_for_appservice_txn
|
|
|
|
)
|
|
|
|
|
2022-02-01 09:13:38 -05:00
|
|
|
async def set_appservice_stream_type_pos(
|
2021-11-09 09:31:15 -05:00
|
|
|
self, service: ApplicationService, stream_type: str, pos: Optional[int]
|
2020-10-15 12:33:28 -04:00
|
|
|
) -> None:
|
2022-03-30 09:39:27 -04:00
|
|
|
if stream_type not in ("read_receipt", "presence", "to_device", "device_list"):
|
2020-10-26 10:51:33 -04:00
|
|
|
raise ValueError(
|
|
|
|
"Expected type to be a valid application stream id type, got %s"
|
2021-11-09 09:31:15 -05:00
|
|
|
% (stream_type,)
|
2020-10-26 10:51:33 -04:00
|
|
|
)
|
|
|
|
|
2022-05-17 06:07:18 -04:00
|
|
|
# this may be the first time that we're recording any state for this AS, so
|
|
|
|
# we don't yet know if a row for it exists; hence we have to upsert here.
|
|
|
|
await self.db_pool.simple_upsert(
|
|
|
|
table="application_services_state",
|
|
|
|
keyvalues={"as_id": service.id},
|
|
|
|
values={f"{stream_type}_stream_id": pos},
|
|
|
|
desc="set_appservice_stream_type_pos",
|
2020-10-15 12:33:28 -04:00
|
|
|
)
|
|
|
|
|
2018-02-21 06:20:42 -05:00
|
|
|
|
|
|
|
class ApplicationServiceTransactionStore(ApplicationServiceTransactionWorkerStore):
|
2018-02-27 05:06:51 -05:00
|
|
|
# This is currently empty due to there not being any AS storage functions
|
|
|
|
# that can't be run on the workers. Since this may change in future, and
|
|
|
|
# to keep consistency with the other stores, we keep this empty class for
|
|
|
|
# now.
|
2018-02-21 06:20:42 -05:00
|
|
|
pass
|