2020-04-22 08:07:41 -04:00
|
|
|
#
|
2023-11-21 15:29:58 -05:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 06:26:48 -05:00
|
|
|
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
2023-11-21 15:29:58 -05:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2020-04-22 08:07:41 -04:00
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
import logging
|
2020-07-27 13:54:43 -04:00
|
|
|
from inspect import isawaitable
|
2022-05-19 11:29:08 -04:00
|
|
|
from typing import TYPE_CHECKING, Any, Generic, List, Optional, Type, TypeVar, cast
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-02-11 11:06:29 -05:00
|
|
|
import attr
|
2023-05-26 15:28:39 -04:00
|
|
|
from txredisapi import (
|
|
|
|
ConnectionHandler,
|
|
|
|
RedisFactory,
|
|
|
|
SubscriberProtocol,
|
|
|
|
UnixConnectionHandler,
|
|
|
|
)
|
2021-03-12 11:37:57 -05:00
|
|
|
from zope.interface import implementer
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-03-11 13:35:09 -05:00
|
|
|
from twisted.internet.address import IPv4Address, IPv6Address
|
|
|
|
from twisted.internet.interfaces import IAddress, IConnector
|
|
|
|
from twisted.python.failure import Failure
|
|
|
|
|
2020-07-21 19:40:42 -04:00
|
|
|
from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
|
|
|
|
from synapse.metrics.background_process_metrics import (
|
|
|
|
BackgroundProcessLoggingContext,
|
|
|
|
run_as_background_process,
|
2021-01-26 05:54:54 -05:00
|
|
|
wrap_as_background_process,
|
2020-07-21 19:40:42 -04:00
|
|
|
)
|
2020-04-22 08:07:41 -04:00
|
|
|
from synapse.replication.tcp.commands import (
|
|
|
|
Command,
|
|
|
|
ReplicateCommand,
|
|
|
|
parse_command_from_line,
|
|
|
|
)
|
2023-05-11 08:02:51 -04:00
|
|
|
from synapse.replication.tcp.context import ClientContextFactory
|
2020-04-22 11:26:19 -04:00
|
|
|
from synapse.replication.tcp.protocol import (
|
2021-03-12 11:37:57 -05:00
|
|
|
IReplicationConnection,
|
2020-04-22 11:26:19 -04:00
|
|
|
tcp_inbound_commands_counter,
|
|
|
|
tcp_outbound_commands_counter,
|
|
|
|
)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.replication.tcp.handler import ReplicationCommandHandler
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-02-11 11:06:29 -05:00
|
|
|
T = TypeVar("T")
|
|
|
|
V = TypeVar("V")
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
class ConstantProperty(Generic[T, V]):
|
|
|
|
"""A descriptor that returns the given constant, ignoring attempts to set
|
|
|
|
it.
|
|
|
|
"""
|
|
|
|
|
2021-07-15 06:02:43 -04:00
|
|
|
constant: V = attr.ib()
|
2021-02-11 11:06:29 -05:00
|
|
|
|
2021-04-05 09:10:18 -04:00
|
|
|
def __get__(self, obj: Optional[T], objtype: Optional[Type[T]] = None) -> V:
|
2021-02-11 11:06:29 -05:00
|
|
|
return self.constant
|
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def __set__(self, obj: Optional[T], value: V) -> None:
|
2021-02-11 11:06:29 -05:00
|
|
|
pass
|
|
|
|
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-03-12 11:37:57 -05:00
|
|
|
@implementer(IReplicationConnection)
|
2023-05-26 15:28:39 -04:00
|
|
|
class RedisSubscriber(SubscriberProtocol):
|
2020-04-22 08:07:41 -04:00
|
|
|
"""Connection to redis subscribed to replication stream.
|
|
|
|
|
2020-05-05 13:53:38 -04:00
|
|
|
This class fulfils two functions:
|
|
|
|
|
|
|
|
(a) it implements the twisted Protocol API, where it handles the SUBSCRIBEd redis
|
|
|
|
connection, parsing *incoming* messages into replication commands, and passing them
|
|
|
|
to `ReplicationCommandHandler`
|
|
|
|
|
2021-03-12 11:37:57 -05:00
|
|
|
(b) it implements the IReplicationConnection API, where it sends *outgoing* commands
|
2020-05-05 13:53:38 -04:00
|
|
|
onto outbound_redis_connection.
|
2020-04-22 08:07:41 -04:00
|
|
|
|
|
|
|
Due to the vagaries of `txredisapi` we don't want to have a custom
|
|
|
|
constructor, so instead we expect the defined attributes below to be set
|
|
|
|
immediately after initialisation.
|
|
|
|
|
|
|
|
Attributes:
|
2021-01-26 05:54:54 -05:00
|
|
|
synapse_handler: The command handler to handle incoming commands.
|
2022-05-19 11:29:08 -04:00
|
|
|
synapse_stream_prefix: The *redis* stream name to subscribe to and publish
|
2021-01-26 05:54:54 -05:00
|
|
|
from (not anything to do with Synapse replication streams).
|
|
|
|
synapse_outbound_redis_connection: The connection to redis to use to send
|
2020-04-22 08:07:41 -04:00
|
|
|
commands.
|
|
|
|
"""
|
|
|
|
|
2021-07-15 06:02:43 -04:00
|
|
|
synapse_handler: "ReplicationCommandHandler"
|
2022-05-19 11:29:08 -04:00
|
|
|
synapse_stream_prefix: str
|
|
|
|
synapse_channel_names: List[str]
|
2023-05-26 15:28:39 -04:00
|
|
|
synapse_outbound_redis_connection: ConnectionHandler
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any):
|
2020-07-21 19:40:42 -04:00
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
# a logcontext which we use for processing incoming commands. We declare it as a
|
|
|
|
# background process so that the CPU stats get reported to prometheus.
|
2021-10-15 08:10:58 -04:00
|
|
|
with PreserveLoggingContext():
|
|
|
|
# thanks to `PreserveLoggingContext()`, the new logcontext is guaranteed to
|
|
|
|
# capture the sentinel context as its containing context and won't prevent
|
|
|
|
# GC of / unintentionally reactivate what would be the current context.
|
|
|
|
self._logging_context = BackgroundProcessLoggingContext(
|
|
|
|
"replication_command_handler"
|
|
|
|
)
|
2020-07-21 19:40:42 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def connectionMade(self) -> None:
|
2020-05-05 13:53:38 -04:00
|
|
|
logger.info("Connected to redis")
|
2020-05-04 09:04:09 -04:00
|
|
|
super().connectionMade()
|
2020-05-05 13:53:38 -04:00
|
|
|
run_as_background_process("subscribe-replication", self._send_subscribe)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
async def _send_subscribe(self) -> None:
|
2020-05-05 13:53:38 -04:00
|
|
|
# it's important to make sure that we only send the REPLICATE command once we
|
|
|
|
# have successfully subscribed to the stream - otherwise we might miss the
|
|
|
|
# POSITION response sent back by the other end.
|
2022-05-19 11:29:08 -04:00
|
|
|
fully_qualified_stream_names = [
|
|
|
|
f"{self.synapse_stream_prefix}/{stream_suffix}"
|
|
|
|
for stream_suffix in self.synapse_channel_names
|
|
|
|
] + [self.synapse_stream_prefix]
|
|
|
|
logger.info("Sending redis SUBSCRIBE for %r", fully_qualified_stream_names)
|
|
|
|
await make_deferred_yieldable(self.subscribe(fully_qualified_stream_names))
|
|
|
|
|
2020-05-05 13:53:38 -04:00
|
|
|
logger.info(
|
|
|
|
"Successfully subscribed to redis stream, sending REPLICATE command"
|
|
|
|
)
|
2021-01-26 05:54:54 -05:00
|
|
|
self.synapse_handler.new_connection(self)
|
2020-05-05 13:53:38 -04:00
|
|
|
await self._async_send_command(ReplicateCommand())
|
|
|
|
logger.info("REPLICATE successfully sent")
|
|
|
|
|
2020-05-13 04:57:15 -04:00
|
|
|
# We send out our positions when there is a new connection in case the
|
|
|
|
# other side missed updates. We do this for Redis connections as the
|
|
|
|
# otherside won't know we've connected and so won't issue a REPLICATE.
|
2023-11-16 08:05:09 -05:00
|
|
|
self.synapse_handler.send_positions_to_connection()
|
2020-05-13 04:57:15 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def messageReceived(self, pattern: str, channel: str, message: str) -> None:
|
2021-02-16 17:32:34 -05:00
|
|
|
"""Received a message from redis."""
|
2020-07-21 19:40:42 -04:00
|
|
|
with PreserveLoggingContext(self._logging_context):
|
|
|
|
self._parse_and_dispatch_message(message)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def _parse_and_dispatch_message(self, message: str) -> None:
|
2020-04-22 08:07:41 -04:00
|
|
|
if message.strip() == "":
|
|
|
|
# Ignore blank lines
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
cmd = parse_command_from_line(message)
|
|
|
|
except Exception:
|
|
|
|
logger.exception(
|
2021-02-16 17:32:34 -05:00
|
|
|
"Failed to parse replication line: %r",
|
|
|
|
message,
|
2020-04-22 08:07:41 -04:00
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2020-04-22 11:26:19 -04:00
|
|
|
# We use "redis" as the name here as we don't have 1:1 connections to
|
|
|
|
# remote instances.
|
|
|
|
tcp_inbound_commands_counter.labels(cmd.NAME, "redis").inc()
|
|
|
|
|
2020-07-27 13:54:43 -04:00
|
|
|
self.handle_command(cmd)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2020-07-27 13:54:43 -04:00
|
|
|
def handle_command(self, cmd: Command) -> None:
|
2020-04-22 08:07:41 -04:00
|
|
|
"""Handle a command we have received over the replication stream.
|
|
|
|
|
2020-07-27 13:54:43 -04:00
|
|
|
Delegates to `self.handler.on_<COMMAND>` (which can optionally return an
|
|
|
|
Awaitable).
|
2020-04-22 08:07:41 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
cmd: received command
|
|
|
|
"""
|
|
|
|
|
2021-01-26 05:54:54 -05:00
|
|
|
cmd_func = getattr(self.synapse_handler, "on_%s" % (cmd.NAME,), None)
|
2020-07-27 13:54:43 -04:00
|
|
|
if not cmd_func:
|
2020-04-22 08:07:41 -04:00
|
|
|
logger.warning("Unhandled command: %r", cmd)
|
2020-07-27 13:54:43 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
res = cmd_func(self, cmd)
|
|
|
|
|
|
|
|
# the handler might be a coroutine: fire it off as a background process
|
|
|
|
# if so.
|
|
|
|
|
|
|
|
if isawaitable(res):
|
|
|
|
run_as_background_process(
|
|
|
|
"replication-" + cmd.get_logcontext_id(), lambda: res
|
|
|
|
)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def connectionLost(self, reason: Failure) -> None: # type: ignore[override]
|
2020-05-05 13:53:38 -04:00
|
|
|
logger.info("Lost connection to redis")
|
2020-05-04 09:04:09 -04:00
|
|
|
super().connectionLost(reason)
|
2021-01-26 05:54:54 -05:00
|
|
|
self.synapse_handler.lost_connection(self)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-10-15 08:10:58 -04:00
|
|
|
# mark the logging context as finished by triggering `__exit__()`
|
|
|
|
with PreserveLoggingContext():
|
|
|
|
with self._logging_context:
|
|
|
|
pass
|
|
|
|
# the sentinel context is now active, which may not be correct.
|
|
|
|
# PreserveLoggingContext() will restore the correct logging context.
|
2020-07-21 19:40:42 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def send_command(self, cmd: Command) -> None:
|
2020-04-22 08:07:41 -04:00
|
|
|
"""Send a command if connection has been established.
|
|
|
|
|
|
|
|
Args:
|
2022-02-08 11:03:08 -05:00
|
|
|
cmd: The command to send
|
2020-04-22 08:07:41 -04:00
|
|
|
"""
|
2020-10-26 05:30:19 -04:00
|
|
|
run_as_background_process(
|
|
|
|
"send-cmd", self._async_send_command, cmd, bg_start_span=False
|
|
|
|
)
|
2020-05-05 13:53:38 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
async def _async_send_command(self, cmd: Command) -> None:
|
2020-05-05 13:53:38 -04:00
|
|
|
"""Encode a replication command and send it over our outbound connection"""
|
2020-04-22 08:07:41 -04:00
|
|
|
string = "%s %s" % (cmd.NAME, cmd.to_line())
|
|
|
|
if "\n" in string:
|
|
|
|
raise Exception("Unexpected newline in command: %r", string)
|
|
|
|
|
|
|
|
encoded_string = string.encode("utf-8")
|
|
|
|
|
2020-04-22 11:26:19 -04:00
|
|
|
# We use "redis" as the name here as we don't have 1:1 connections to
|
|
|
|
# remote instances.
|
|
|
|
tcp_outbound_commands_counter.labels(cmd.NAME, "redis").inc()
|
|
|
|
|
2022-05-20 10:28:23 -04:00
|
|
|
channel_name = cmd.redis_channel_name(self.synapse_stream_prefix)
|
|
|
|
|
2020-05-05 13:53:38 -04:00
|
|
|
await make_deferred_yieldable(
|
2022-05-20 10:28:23 -04:00
|
|
|
self.synapse_outbound_redis_connection.publish(channel_name, encoded_string)
|
2021-01-26 05:54:54 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-05-26 15:28:39 -04:00
|
|
|
class SynapseRedisFactory(RedisFactory):
|
2021-01-26 05:54:54 -05:00
|
|
|
"""A subclass of RedisFactory that periodically sends pings to ensure that
|
|
|
|
we detect dead connections.
|
|
|
|
"""
|
|
|
|
|
2021-02-11 11:06:29 -05:00
|
|
|
# We want to *always* retry connecting, txredisapi will stop if there is a
|
|
|
|
# failure during certain operations, e.g. during AUTH.
|
|
|
|
continueTrying = cast(bool, ConstantProperty(True))
|
|
|
|
|
2021-01-26 05:54:54 -05:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
hs: "HomeServer",
|
|
|
|
uuid: str,
|
|
|
|
dbid: Optional[int],
|
|
|
|
poolsize: int,
|
|
|
|
isLazy: bool = False,
|
2023-05-26 15:28:39 -04:00
|
|
|
handler: Type = ConnectionHandler,
|
2021-01-26 05:54:54 -05:00
|
|
|
charset: str = "utf-8",
|
|
|
|
password: Optional[str] = None,
|
|
|
|
replyTimeout: int = 30,
|
|
|
|
convertNumbers: Optional[int] = True,
|
|
|
|
):
|
|
|
|
super().__init__(
|
|
|
|
uuid=uuid,
|
|
|
|
dbid=dbid,
|
|
|
|
poolsize=poolsize,
|
|
|
|
isLazy=isLazy,
|
|
|
|
handler=handler,
|
|
|
|
charset=charset,
|
|
|
|
password=password,
|
|
|
|
replyTimeout=replyTimeout,
|
|
|
|
convertNumbers=convertNumbers,
|
2020-05-05 13:53:38 -04:00
|
|
|
)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-01-26 05:54:54 -05:00
|
|
|
hs.get_clock().looping_call(self._send_ping, 30 * 1000)
|
|
|
|
|
|
|
|
@wrap_as_background_process("redis_ping")
|
2022-02-08 11:03:08 -05:00
|
|
|
async def _send_ping(self) -> None:
|
2021-01-26 05:54:54 -05:00
|
|
|
for connection in self.pool:
|
|
|
|
try:
|
|
|
|
await make_deferred_yieldable(connection.ping())
|
|
|
|
except Exception:
|
|
|
|
logger.warning("Failed to send ping to a redis connection")
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-03-11 13:35:09 -05:00
|
|
|
# ReconnectingClientFactory has some logging (if you enable `self.noisy`), but
|
|
|
|
# it's rubbish. We add our own here.
|
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def startedConnecting(self, connector: IConnector) -> None:
|
2021-03-11 13:35:09 -05:00
|
|
|
logger.info(
|
|
|
|
"Connecting to redis server %s", format_address(connector.getDestination())
|
|
|
|
)
|
|
|
|
super().startedConnecting(connector)
|
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def clientConnectionFailed(self, connector: IConnector, reason: Failure) -> None:
|
2021-03-11 13:35:09 -05:00
|
|
|
logger.info(
|
|
|
|
"Connection to redis server %s failed: %s",
|
|
|
|
format_address(connector.getDestination()),
|
|
|
|
reason.value,
|
|
|
|
)
|
|
|
|
super().clientConnectionFailed(connector, reason)
|
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def clientConnectionLost(self, connector: IConnector, reason: Failure) -> None:
|
2021-03-11 13:35:09 -05:00
|
|
|
logger.info(
|
|
|
|
"Connection to redis server %s lost: %s",
|
|
|
|
format_address(connector.getDestination()),
|
|
|
|
reason.value,
|
|
|
|
)
|
|
|
|
super().clientConnectionLost(connector, reason)
|
|
|
|
|
|
|
|
|
|
|
|
def format_address(address: IAddress) -> str:
|
|
|
|
if isinstance(address, (IPv4Address, IPv6Address)):
|
|
|
|
return "%s:%i" % (address.host, address.port)
|
|
|
|
return str(address)
|
|
|
|
|
2021-01-26 05:54:54 -05:00
|
|
|
|
|
|
|
class RedisDirectTcpReplicationClientFactory(SynapseRedisFactory):
|
2020-04-22 08:07:41 -04:00
|
|
|
"""This is a reconnecting factory that connects to redis and immediately
|
2022-05-19 11:29:08 -04:00
|
|
|
subscribes to some streams.
|
2020-04-22 08:07:41 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
hs
|
|
|
|
outbound_redis_connection: A connection to redis that will be used to
|
2020-07-09 09:52:58 -04:00
|
|
|
send outbound commands (this is separate to the redis connection
|
2020-04-22 08:07:41 -04:00
|
|
|
used to subscribe).
|
2022-05-19 11:29:08 -04:00
|
|
|
channel_names: A list of channel names to append to the base channel name
|
|
|
|
to additionally subscribe to.
|
|
|
|
e.g. if ['ABC', 'DEF'] is specified then we'll listen to:
|
|
|
|
example.com; example.com/ABC; and example.com/DEF.
|
2020-04-22 08:07:41 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
maxDelay = 5
|
|
|
|
protocol = RedisSubscriber
|
|
|
|
|
|
|
|
def __init__(
|
2022-05-19 11:29:08 -04:00
|
|
|
self,
|
|
|
|
hs: "HomeServer",
|
2023-05-26 15:28:39 -04:00
|
|
|
outbound_redis_connection: ConnectionHandler,
|
2022-05-19 11:29:08 -04:00
|
|
|
channel_names: List[str],
|
2020-04-22 08:07:41 -04:00
|
|
|
):
|
2021-01-26 05:54:54 -05:00
|
|
|
super().__init__(
|
|
|
|
hs,
|
|
|
|
uuid="subscriber",
|
|
|
|
dbid=None,
|
|
|
|
poolsize=1,
|
|
|
|
replyTimeout=30,
|
|
|
|
password=hs.config.redis.redis_password,
|
|
|
|
)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2022-03-10 08:01:56 -05:00
|
|
|
self.synapse_handler = hs.get_replication_command_handler()
|
2022-05-19 11:29:08 -04:00
|
|
|
self.synapse_stream_prefix = hs.hostname
|
|
|
|
self.synapse_channel_names = channel_names
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2021-01-26 05:54:54 -05:00
|
|
|
self.synapse_outbound_redis_connection = outbound_redis_connection
|
2020-04-22 08:07:41 -04:00
|
|
|
|
2022-02-08 11:03:08 -05:00
|
|
|
def buildProtocol(self, addr: IAddress) -> RedisSubscriber:
|
2021-01-26 05:54:54 -05:00
|
|
|
p = super().buildProtocol(addr)
|
|
|
|
p = cast(RedisSubscriber, p)
|
2020-04-22 08:07:41 -04:00
|
|
|
|
|
|
|
# We do this here rather than add to the constructor of `RedisSubcriber`
|
|
|
|
# as to do so would involve overriding `buildProtocol` entirely, however
|
|
|
|
# the base method does some other things than just instantiating the
|
|
|
|
# protocol.
|
2021-01-26 05:54:54 -05:00
|
|
|
p.synapse_handler = self.synapse_handler
|
|
|
|
p.synapse_outbound_redis_connection = self.synapse_outbound_redis_connection
|
2022-05-19 11:29:08 -04:00
|
|
|
p.synapse_stream_prefix = self.synapse_stream_prefix
|
|
|
|
p.synapse_channel_names = self.synapse_channel_names
|
2020-04-22 08:07:41 -04:00
|
|
|
|
|
|
|
return p
|
2020-10-02 04:57:12 -04:00
|
|
|
|
|
|
|
|
|
|
|
def lazyConnection(
|
2021-01-26 05:54:54 -05:00
|
|
|
hs: "HomeServer",
|
2020-10-02 04:57:12 -04:00
|
|
|
host: str = "localhost",
|
|
|
|
port: int = 6379,
|
|
|
|
dbid: Optional[int] = None,
|
|
|
|
reconnect: bool = True,
|
|
|
|
password: Optional[str] = None,
|
2021-01-26 05:54:54 -05:00
|
|
|
replyTimeout: int = 30,
|
2023-05-26 15:28:39 -04:00
|
|
|
) -> ConnectionHandler:
|
2021-01-26 05:54:54 -05:00
|
|
|
"""Creates a connection to Redis that is lazily set up and reconnects if the
|
|
|
|
connections is lost.
|
2020-10-02 04:57:12 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
uuid = "%s:%d" % (host, port)
|
2021-01-26 05:54:54 -05:00
|
|
|
factory = SynapseRedisFactory(
|
|
|
|
hs,
|
|
|
|
uuid=uuid,
|
|
|
|
dbid=dbid,
|
|
|
|
poolsize=1,
|
|
|
|
isLazy=True,
|
2023-05-26 15:28:39 -04:00
|
|
|
handler=ConnectionHandler,
|
2021-01-26 05:54:54 -05:00
|
|
|
password=password,
|
|
|
|
replyTimeout=replyTimeout,
|
2020-10-02 04:57:12 -04:00
|
|
|
)
|
|
|
|
factory.continueTrying = reconnect
|
2021-01-26 05:54:54 -05:00
|
|
|
|
|
|
|
reactor = hs.get_reactor()
|
2023-05-11 08:02:51 -04:00
|
|
|
|
|
|
|
if hs.config.redis.redis_use_tls:
|
|
|
|
ssl_context_factory = ClientContextFactory(hs.config.redis)
|
|
|
|
reactor.connectSSL(
|
|
|
|
host,
|
|
|
|
port,
|
|
|
|
factory,
|
|
|
|
ssl_context_factory,
|
|
|
|
timeout=30,
|
|
|
|
bindAddress=None,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
reactor.connectTCP(
|
|
|
|
host,
|
|
|
|
port,
|
|
|
|
factory,
|
|
|
|
timeout=30,
|
|
|
|
bindAddress=None,
|
|
|
|
)
|
2020-10-02 04:57:12 -04:00
|
|
|
|
|
|
|
return factory.handler
|
2023-05-26 15:28:39 -04:00
|
|
|
|
|
|
|
|
|
|
|
def lazyUnixConnection(
|
|
|
|
hs: "HomeServer",
|
|
|
|
path: str = "/tmp/redis.sock",
|
|
|
|
dbid: Optional[int] = None,
|
|
|
|
reconnect: bool = True,
|
|
|
|
password: Optional[str] = None,
|
|
|
|
replyTimeout: int = 30,
|
|
|
|
) -> ConnectionHandler:
|
|
|
|
"""Creates a connection to Redis that is lazily set up and reconnects if the
|
|
|
|
connection is lost.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A subclass of ConnectionHandler, which is a UnixConnectionHandler in this case.
|
|
|
|
"""
|
|
|
|
|
|
|
|
uuid = path
|
|
|
|
|
|
|
|
factory = SynapseRedisFactory(
|
|
|
|
hs,
|
|
|
|
uuid=uuid,
|
|
|
|
dbid=dbid,
|
|
|
|
poolsize=1,
|
|
|
|
isLazy=True,
|
|
|
|
handler=UnixConnectionHandler,
|
|
|
|
password=password,
|
|
|
|
replyTimeout=replyTimeout,
|
|
|
|
)
|
|
|
|
factory.continueTrying = reconnect
|
|
|
|
|
|
|
|
reactor = hs.get_reactor()
|
|
|
|
|
|
|
|
reactor.connectUNIX(
|
|
|
|
path,
|
|
|
|
factory,
|
|
|
|
timeout=30,
|
|
|
|
checkPID=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
return factory.handler
|