mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-08-17 01:10:11 -04:00
Merge remote-tracking branch 'upstream/release-v1.71'
This commit is contained in:
commit
2337ca829d
135 changed files with 5192 additions and 2356 deletions
8
synapse/_scripts/update_synapse_database.py
Executable file → Normal file
8
synapse/_scripts/update_synapse_database.py
Executable file → Normal file
|
@ -15,7 +15,6 @@
|
|||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
from typing import cast
|
||||
|
||||
import yaml
|
||||
|
@ -100,13 +99,6 @@ def main() -> None:
|
|||
# Load, process and sanity-check the config.
|
||||
hs_config = yaml.safe_load(args.database_config)
|
||||
|
||||
if "database" not in hs_config and "databases" not in hs_config:
|
||||
sys.stderr.write(
|
||||
"The configuration file must have a 'database' or 'databases' section. "
|
||||
"See https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database"
|
||||
)
|
||||
sys.exit(4)
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
|
|
|
@ -155,7 +155,13 @@ class RedirectException(CodeMessageException):
|
|||
|
||||
class SynapseError(CodeMessageException):
|
||||
"""A base exception type for matrix errors which have an errcode and error
|
||||
message (as well as an HTTP status code).
|
||||
message (as well as an HTTP status code). These often bubble all the way up to the
|
||||
client API response so the error code and status often reach the client directly as
|
||||
defined here. If the error doesn't make sense to present to a client, then it
|
||||
probably shouldn't be a `SynapseError`. For example, if we contact another
|
||||
homeserver over federation, we shouldn't automatically ferry response errors back to
|
||||
the client on our end (a 500 from a remote server does not make sense to a client
|
||||
when our server did not experience a 500).
|
||||
|
||||
Attributes:
|
||||
errcode: Matrix error code e.g 'M_FORBIDDEN'
|
||||
|
@ -600,8 +606,20 @@ def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs: Any) -> "JsonDict":
|
|||
|
||||
|
||||
class FederationError(RuntimeError):
|
||||
"""This class is used to inform remote homeservers about erroneous
|
||||
PDUs they sent us.
|
||||
"""
|
||||
Raised when we process an erroneous PDU.
|
||||
|
||||
There are two kinds of scenarios where this exception can be raised:
|
||||
|
||||
1. We may pull an invalid PDU from a remote homeserver (e.g. during backfill). We
|
||||
raise this exception to signal an error to the rest of the application.
|
||||
2. We may be pushed an invalid PDU as part of a `/send` transaction from a remote
|
||||
homeserver. We raise so that we can respond to the transaction and include the
|
||||
error string in the "PDU Processing Result". The message which will likely be
|
||||
ignored by the remote homeserver and is not machine parse-able since it's just a
|
||||
string.
|
||||
|
||||
TODO: In the future, we should split these usage scenarios into their own error types.
|
||||
|
||||
FATAL: The remote server could not interpret the source event.
|
||||
(e.g., it was missing a required field)
|
||||
|
|
|
@ -28,7 +28,7 @@ FEDERATION_V1_PREFIX = FEDERATION_PREFIX + "/v1"
|
|||
FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2"
|
||||
FEDERATION_UNSTABLE_PREFIX = FEDERATION_PREFIX + "/unstable"
|
||||
STATIC_PREFIX = "/_matrix/static"
|
||||
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
||||
SERVER_KEY_PREFIX = "/_matrix/key"
|
||||
MEDIA_R0_PREFIX = "/_matrix/media/r0"
|
||||
MEDIA_V3_PREFIX = "/_matrix/media/v3"
|
||||
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
||||
|
|
|
@ -558,7 +558,7 @@ def reload_cache_config(config: HomeServerConfig) -> None:
|
|||
logger.warning(f)
|
||||
else:
|
||||
logger.debug(
|
||||
"New cache config. Was:\n %s\nNow:\n",
|
||||
"New cache config. Was:\n %s\nNow:\n %s",
|
||||
previous_cache_config.__dict__,
|
||||
config.caches.__dict__,
|
||||
)
|
||||
|
|
|
@ -55,13 +55,13 @@ import os
|
|||
import signal
|
||||
import sys
|
||||
from types import FrameType
|
||||
from typing import Any, Callable, List, Optional
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
|
||||
from twisted.internet.main import installReactor
|
||||
|
||||
# a list of the original signal handlers, before we installed our custom ones.
|
||||
# We restore these in our child processes.
|
||||
_original_signal_handlers: dict[int, Any] = {}
|
||||
_original_signal_handlers: Dict[int, Any] = {}
|
||||
|
||||
|
||||
class ProxiedReactor:
|
||||
|
|
|
@ -28,7 +28,7 @@ from synapse.api.urls import (
|
|||
LEGACY_MEDIA_PREFIX,
|
||||
MEDIA_R0_PREFIX,
|
||||
MEDIA_V3_PREFIX,
|
||||
SERVER_KEY_V2_PREFIX,
|
||||
SERVER_KEY_PREFIX,
|
||||
)
|
||||
from synapse.app import _base
|
||||
from synapse.app._base import (
|
||||
|
@ -89,7 +89,7 @@ from synapse.rest.client.register import (
|
|||
RegistrationTokenValidityRestServlet,
|
||||
)
|
||||
from synapse.rest.health import HealthResource
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.rest.key.v2 import KeyResource
|
||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||
from synapse.rest.well_known import well_known_resource
|
||||
from synapse.server import HomeServer
|
||||
|
@ -178,13 +178,13 @@ class KeyUploadServlet(RestServlet):
|
|||
# Proxy headers from the original request, such as the auth headers
|
||||
# (in case the access token is there) and the original IP /
|
||||
# User-Agent of the request.
|
||||
headers = {
|
||||
header: request.requestHeaders.getRawHeaders(header, [])
|
||||
headers: Dict[bytes, List[bytes]] = {
|
||||
header: list(request.requestHeaders.getRawHeaders(header, []))
|
||||
for header in (b"Authorization", b"User-Agent")
|
||||
}
|
||||
# Add the previous hop to the X-Forwarded-For header.
|
||||
x_forwarded_for = request.requestHeaders.getRawHeaders(
|
||||
b"X-Forwarded-For", []
|
||||
x_forwarded_for = list(
|
||||
request.requestHeaders.getRawHeaders(b"X-Forwarded-For", [])
|
||||
)
|
||||
# we use request.client here, since we want the previous hop, not the
|
||||
# original client (as returned by request.getClientAddress()).
|
||||
|
@ -325,13 +325,13 @@ class GenericWorkerServer(HomeServer):
|
|||
|
||||
presence.register_servlets(self, resource)
|
||||
|
||||
resources.update({CLIENT_API_PREFIX: resource})
|
||||
resources[CLIENT_API_PREFIX] = resource
|
||||
|
||||
resources.update(build_synapse_client_resource_tree(self))
|
||||
resources.update({"/.well-known": well_known_resource(self)})
|
||||
resources["/.well-known"] = well_known_resource(self)
|
||||
|
||||
elif name == "federation":
|
||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||
resources[FEDERATION_PREFIX] = TransportLayerServer(self)
|
||||
elif name == "media":
|
||||
if self.config.media.can_load_media_repo:
|
||||
media_repo = self.get_media_repository_resource()
|
||||
|
@ -359,16 +359,12 @@ class GenericWorkerServer(HomeServer):
|
|||
# Only load the openid resource separately if federation resource
|
||||
# is not specified since federation resource includes openid
|
||||
# resource.
|
||||
resources.update(
|
||||
{
|
||||
FEDERATION_PREFIX: TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
}
|
||||
resources[FEDERATION_PREFIX] = TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
|
||||
if name in ["keys", "federation"]:
|
||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||
resources[SERVER_KEY_PREFIX] = KeyResource(self)
|
||||
|
||||
if name == "replication":
|
||||
resources[REPLICATION_PREFIX] = ReplicationRestResource(self)
|
||||
|
|
|
@ -31,7 +31,7 @@ from synapse.api.urls import (
|
|||
LEGACY_MEDIA_PREFIX,
|
||||
MEDIA_R0_PREFIX,
|
||||
MEDIA_V3_PREFIX,
|
||||
SERVER_KEY_V2_PREFIX,
|
||||
SERVER_KEY_PREFIX,
|
||||
STATIC_PREFIX,
|
||||
)
|
||||
from synapse.app import _base
|
||||
|
@ -60,7 +60,7 @@ from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
|||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.admin import AdminRestResource
|
||||
from synapse.rest.health import HealthResource
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.rest.key.v2 import KeyResource
|
||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||
from synapse.rest.well_known import well_known_resource
|
||||
from synapse.server import HomeServer
|
||||
|
@ -215,30 +215,22 @@ class SynapseHomeServer(HomeServer):
|
|||
consent_resource: Resource = ConsentResource(self)
|
||||
if compress:
|
||||
consent_resource = gz_wrap(consent_resource)
|
||||
resources.update({"/_matrix/consent": consent_resource})
|
||||
resources["/_matrix/consent"] = consent_resource
|
||||
|
||||
if name == "federation":
|
||||
federation_resource: Resource = TransportLayerServer(self)
|
||||
if compress:
|
||||
federation_resource = gz_wrap(federation_resource)
|
||||
resources.update({FEDERATION_PREFIX: federation_resource})
|
||||
resources[FEDERATION_PREFIX] = federation_resource
|
||||
|
||||
if name == "openid":
|
||||
resources.update(
|
||||
{
|
||||
FEDERATION_PREFIX: TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
}
|
||||
resources[FEDERATION_PREFIX] = TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
|
||||
if name in ["static", "client"]:
|
||||
resources.update(
|
||||
{
|
||||
STATIC_PREFIX: StaticResource(
|
||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||
)
|
||||
}
|
||||
resources[STATIC_PREFIX] = StaticResource(
|
||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||
)
|
||||
|
||||
if name in ["media", "federation", "client"]:
|
||||
|
@ -257,7 +249,7 @@ class SynapseHomeServer(HomeServer):
|
|||
)
|
||||
|
||||
if name in ["keys", "federation"]:
|
||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||
resources[SERVER_KEY_PREFIX] = KeyResource(self)
|
||||
|
||||
if name == "metrics" and self.config.metrics.enable_metrics:
|
||||
metrics_resource: Resource = MetricsResource(RegistryProxy)
|
||||
|
|
|
@ -172,12 +172,24 @@ class ApplicationService:
|
|||
Returns:
|
||||
True if this service would like to know about this room.
|
||||
"""
|
||||
member_list = await store.get_users_in_room(
|
||||
# We can use `get_local_users_in_room(...)` here because an application service
|
||||
# can only be interested in local users of the server it's on (ignore any remote
|
||||
# users that might match the user namespace regex).
|
||||
#
|
||||
# In the future, we can consider re-using
|
||||
# `store.get_app_service_users_in_room` which is very similar to this
|
||||
# function but has a slightly worse performance than this because we
|
||||
# have an early escape-hatch if we find a single user that the
|
||||
# appservice is interested in. The juice would be worth the squeeze if
|
||||
# `store.get_app_service_users_in_room` was used in more places besides
|
||||
# an experimental MSC. But for now we can avoid doing more work and
|
||||
# barely using it later.
|
||||
local_user_ids = await store.get_local_users_in_room(
|
||||
room_id, on_invalidate=cache_context.invalidate
|
||||
)
|
||||
|
||||
# check joined member events
|
||||
for user_id in member_list:
|
||||
for user_id in local_user_ids:
|
||||
if self.is_interested_in_user(user_id):
|
||||
return True
|
||||
return False
|
||||
|
|
|
@ -98,6 +98,9 @@ class ExperimentalConfig(Config):
|
|||
# MSC3773: Thread notifications
|
||||
self.msc3773_enabled: bool = experimental.get("msc3773_enabled", False)
|
||||
|
||||
# MSC3664: Pushrules to match on related events
|
||||
self.msc3664_enabled: bool = experimental.get("msc3664_enabled", False)
|
||||
|
||||
# MSC3848: Introduce errcodes for specific event sending failures
|
||||
self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False)
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ DEFAULT_LOG_CONFIG = Template(
|
|||
# Synapse also supports structured logging for machine readable logs which can
|
||||
# be ingested by ELK stacks. See [2] for details.
|
||||
#
|
||||
# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
|
||||
# [1]: https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
||||
# [2]: https://matrix-org.github.io/synapse/latest/structured_logging.html
|
||||
|
||||
version: 1
|
||||
|
|
|
@ -123,6 +123,8 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
|||
"userinfo_endpoint": {"type": "string"},
|
||||
"jwks_uri": {"type": "string"},
|
||||
"skip_verification": {"type": "boolean"},
|
||||
"backchannel_logout_enabled": {"type": "boolean"},
|
||||
"backchannel_logout_ignore_sub": {"type": "boolean"},
|
||||
"user_profile_method": {
|
||||
"type": "string",
|
||||
"enum": ["auto", "userinfo_endpoint"],
|
||||
|
@ -292,6 +294,10 @@ def _parse_oidc_config_dict(
|
|||
token_endpoint=oidc_config.get("token_endpoint"),
|
||||
userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
|
||||
jwks_uri=oidc_config.get("jwks_uri"),
|
||||
backchannel_logout_enabled=oidc_config.get("backchannel_logout_enabled", False),
|
||||
backchannel_logout_ignore_sub=oidc_config.get(
|
||||
"backchannel_logout_ignore_sub", False
|
||||
),
|
||||
skip_verification=oidc_config.get("skip_verification", False),
|
||||
user_profile_method=oidc_config.get("user_profile_method", "auto"),
|
||||
allow_existing_users=oidc_config.get("allow_existing_users", False),
|
||||
|
@ -368,6 +374,12 @@ class OidcProviderConfig:
|
|||
# "openid" scope is used.
|
||||
jwks_uri: Optional[str]
|
||||
|
||||
# Whether Synapse should react to backchannel logouts
|
||||
backchannel_logout_enabled: bool
|
||||
|
||||
# Whether Synapse should ignore the `sub` claim in backchannel logouts or not.
|
||||
backchannel_logout_ignore_sub: bool
|
||||
|
||||
# Whether to skip metadata verification
|
||||
skip_verification: bool
|
||||
|
||||
|
|
|
@ -80,6 +80,18 @@ PDU_RETRY_TIME_MS = 1 * 60 * 1000
|
|||
T = TypeVar("T")
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class PulledPduInfo:
|
||||
"""
|
||||
A result object that stores the PDU and info about it like which homeserver we
|
||||
pulled it from (`pull_origin`)
|
||||
"""
|
||||
|
||||
pdu: EventBase
|
||||
# Which homeserver we pulled the PDU from
|
||||
pull_origin: str
|
||||
|
||||
|
||||
class InvalidResponseError(RuntimeError):
|
||||
"""Helper for _try_destination_list: indicates that the server returned a response
|
||||
we couldn't parse
|
||||
|
@ -114,7 +126,9 @@ class FederationClient(FederationBase):
|
|||
self.hostname = hs.hostname
|
||||
self.signing_key = hs.signing_key
|
||||
|
||||
self._get_pdu_cache: ExpiringCache[str, EventBase] = ExpiringCache(
|
||||
# Cache mapping `event_id` to a tuple of the event itself and the `pull_origin`
|
||||
# (which server we pulled the event from)
|
||||
self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache(
|
||||
cache_name="get_pdu_cache",
|
||||
clock=self._clock,
|
||||
max_len=1000,
|
||||
|
@ -352,11 +366,11 @@ class FederationClient(FederationBase):
|
|||
@tag_args
|
||||
async def get_pdu(
|
||||
self,
|
||||
destinations: Iterable[str],
|
||||
destinations: Collection[str],
|
||||
event_id: str,
|
||||
room_version: RoomVersion,
|
||||
timeout: Optional[int] = None,
|
||||
) -> Optional[EventBase]:
|
||||
) -> Optional[PulledPduInfo]:
|
||||
"""Requests the PDU with given origin and ID from the remote home
|
||||
servers.
|
||||
|
||||
|
@ -371,11 +385,11 @@ class FederationClient(FederationBase):
|
|||
moving to the next destination. None indicates no timeout.
|
||||
|
||||
Returns:
|
||||
The requested PDU, or None if we were unable to find it.
|
||||
The requested PDU wrapped in `PulledPduInfo`, or None if we were unable to find it.
|
||||
"""
|
||||
|
||||
logger.debug(
|
||||
"get_pdu: event_id=%s from destinations=%s", event_id, destinations
|
||||
"get_pdu(event_id=%s): from destinations=%s", event_id, destinations
|
||||
)
|
||||
|
||||
# TODO: Rate limit the number of times we try and get the same event.
|
||||
|
@ -384,19 +398,25 @@ class FederationClient(FederationBase):
|
|||
# it gets persisted to the database), so we cache the results of the lookup.
|
||||
# Note that this is separate to the regular get_event cache which caches
|
||||
# events once they have been persisted.
|
||||
event = self._get_pdu_cache.get(event_id)
|
||||
get_pdu_cache_entry = self._get_pdu_cache.get(event_id)
|
||||
|
||||
event = None
|
||||
pull_origin = None
|
||||
if get_pdu_cache_entry:
|
||||
event, pull_origin = get_pdu_cache_entry
|
||||
# If we don't see the event in the cache, go try to fetch it from the
|
||||
# provided remote federated destinations
|
||||
if not event:
|
||||
else:
|
||||
pdu_attempts = self.pdu_destination_tried.setdefault(event_id, {})
|
||||
|
||||
# TODO: We can probably refactor this to use `_try_destination_list`
|
||||
for destination in destinations:
|
||||
now = self._clock.time_msec()
|
||||
last_attempt = pdu_attempts.get(destination, 0)
|
||||
if last_attempt + PDU_RETRY_TIME_MS > now:
|
||||
logger.debug(
|
||||
"get_pdu: skipping destination=%s because we tried it recently last_attempt=%s and we only check every %s (now=%s)",
|
||||
"get_pdu(event_id=%s): skipping destination=%s because we tried it recently last_attempt=%s and we only check every %s (now=%s)",
|
||||
event_id,
|
||||
destination,
|
||||
last_attempt,
|
||||
PDU_RETRY_TIME_MS,
|
||||
|
@ -411,43 +431,48 @@ class FederationClient(FederationBase):
|
|||
room_version=room_version,
|
||||
timeout=timeout,
|
||||
)
|
||||
pull_origin = destination
|
||||
|
||||
pdu_attempts[destination] = now
|
||||
|
||||
if event:
|
||||
# Prime the cache
|
||||
self._get_pdu_cache[event.event_id] = event
|
||||
self._get_pdu_cache[event.event_id] = (event, pull_origin)
|
||||
|
||||
# Now that we have an event, we can break out of this
|
||||
# loop and stop asking other destinations.
|
||||
break
|
||||
|
||||
except NotRetryingDestination as e:
|
||||
logger.info("get_pdu(event_id=%s): %s", event_id, e)
|
||||
continue
|
||||
except FederationDeniedError:
|
||||
logger.info(
|
||||
"get_pdu(event_id=%s): Not attempting to fetch PDU from %s because the homeserver is not on our federation whitelist",
|
||||
event_id,
|
||||
destination,
|
||||
)
|
||||
continue
|
||||
except SynapseError as e:
|
||||
logger.info(
|
||||
"Failed to get PDU %s from %s because %s",
|
||||
"get_pdu(event_id=%s): Failed to get PDU from %s because %s",
|
||||
event_id,
|
||||
destination,
|
||||
e,
|
||||
)
|
||||
continue
|
||||
except NotRetryingDestination as e:
|
||||
logger.info(str(e))
|
||||
continue
|
||||
except FederationDeniedError as e:
|
||||
logger.info(str(e))
|
||||
continue
|
||||
except Exception as e:
|
||||
pdu_attempts[destination] = now
|
||||
|
||||
logger.info(
|
||||
"Failed to get PDU %s from %s because %s",
|
||||
"get_pdu(event_id=): Failed to get PDU from %s because %s",
|
||||
event_id,
|
||||
destination,
|
||||
e,
|
||||
)
|
||||
continue
|
||||
|
||||
if not event:
|
||||
if not event or not pull_origin:
|
||||
return None
|
||||
|
||||
# `event` now refers to an object stored in `get_pdu_cache`. Our
|
||||
|
@ -459,7 +484,7 @@ class FederationClient(FederationBase):
|
|||
event.room_version,
|
||||
)
|
||||
|
||||
return event_copy
|
||||
return PulledPduInfo(event_copy, pull_origin)
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
|
@ -699,12 +724,14 @@ class FederationClient(FederationBase):
|
|||
pdu_origin = get_domain_from_id(pdu.sender)
|
||||
if not res and pdu_origin != origin:
|
||||
try:
|
||||
res = await self.get_pdu(
|
||||
pulled_pdu_info = await self.get_pdu(
|
||||
destinations=[pdu_origin],
|
||||
event_id=pdu.event_id,
|
||||
room_version=room_version,
|
||||
timeout=10000,
|
||||
)
|
||||
if pulled_pdu_info is not None:
|
||||
res = pulled_pdu_info.pdu
|
||||
except SynapseError:
|
||||
pass
|
||||
|
||||
|
@ -806,6 +833,7 @@ class FederationClient(FederationBase):
|
|||
)
|
||||
|
||||
for destination in destinations:
|
||||
# We don't want to ask our own server for information we don't have
|
||||
if destination == self.server_name:
|
||||
continue
|
||||
|
||||
|
@ -814,9 +842,21 @@ class FederationClient(FederationBase):
|
|||
except (
|
||||
RequestSendFailed,
|
||||
InvalidResponseError,
|
||||
NotRetryingDestination,
|
||||
) as e:
|
||||
logger.warning("Failed to %s via %s: %s", description, destination, e)
|
||||
# Skip to the next homeserver in the list to try.
|
||||
continue
|
||||
except NotRetryingDestination as e:
|
||||
logger.info("%s: %s", description, e)
|
||||
continue
|
||||
except FederationDeniedError:
|
||||
logger.info(
|
||||
"%s: Not attempting to %s from %s because the homeserver is not on our federation whitelist",
|
||||
description,
|
||||
description,
|
||||
destination,
|
||||
)
|
||||
continue
|
||||
except UnsupportedRoomVersionError:
|
||||
raise
|
||||
except HttpResponseException as e:
|
||||
|
@ -1609,6 +1649,54 @@ class FederationClient(FederationBase):
|
|||
return result
|
||||
|
||||
async def timestamp_to_event(
|
||||
self, *, destinations: List[str], room_id: str, timestamp: int, direction: str
|
||||
) -> Optional["TimestampToEventResponse"]:
|
||||
"""
|
||||
Calls each remote federating server from `destinations` asking for their closest
|
||||
event to the given timestamp in the given direction until we get a response.
|
||||
Also validates the response to always return the expected keys or raises an
|
||||
error.
|
||||
|
||||
Args:
|
||||
destinations: The domains of homeservers to try fetching from
|
||||
room_id: Room to fetch the event from
|
||||
timestamp: The point in time (inclusive) we should navigate from in
|
||||
the given direction to find the closest event.
|
||||
direction: ["f"|"b"] to indicate whether we should navigate forward
|
||||
or backward from the given timestamp to find the closest event.
|
||||
|
||||
Returns:
|
||||
A parsed TimestampToEventResponse including the closest event_id
|
||||
and origin_server_ts or None if no destination has a response.
|
||||
"""
|
||||
|
||||
async def _timestamp_to_event_from_destination(
|
||||
destination: str,
|
||||
) -> TimestampToEventResponse:
|
||||
return await self._timestamp_to_event_from_destination(
|
||||
destination, room_id, timestamp, direction
|
||||
)
|
||||
|
||||
try:
|
||||
# Loop through each homeserver candidate until we get a succesful response
|
||||
timestamp_to_event_response = await self._try_destination_list(
|
||||
"timestamp_to_event",
|
||||
destinations,
|
||||
# TODO: The requested timestamp may lie in a part of the
|
||||
# event graph that the remote server *also* didn't have,
|
||||
# in which case they will have returned another event
|
||||
# which may be nowhere near the requested timestamp. In
|
||||
# the future, we may need to reconcile that gap and ask
|
||||
# other homeservers, and/or extend `/timestamp_to_event`
|
||||
# to return events on *both* sides of the timestamp to
|
||||
# help reconcile the gap faster.
|
||||
_timestamp_to_event_from_destination,
|
||||
)
|
||||
return timestamp_to_event_response
|
||||
except SynapseError:
|
||||
return None
|
||||
|
||||
async def _timestamp_to_event_from_destination(
|
||||
self, destination: str, room_id: str, timestamp: int, direction: str
|
||||
) -> "TimestampToEventResponse":
|
||||
"""
|
||||
|
|
|
@ -481,6 +481,14 @@ class FederationServer(FederationBase):
|
|||
pdu_results[pdu.event_id] = await process_pdu(pdu)
|
||||
|
||||
async def process_pdu(pdu: EventBase) -> JsonDict:
|
||||
"""
|
||||
Processes a pushed PDU sent to us via a `/send` transaction
|
||||
|
||||
Returns:
|
||||
JsonDict representing a "PDU Processing Result" that will be bundled up
|
||||
with the other processed PDU's in the `/send` transaction and sent back
|
||||
to remote homeserver.
|
||||
"""
|
||||
event_id = pdu.event_id
|
||||
with nested_logging_context(event_id):
|
||||
try:
|
||||
|
|
|
@ -499,6 +499,11 @@ class FederationV2InviteServlet(BaseFederationServerServlet):
|
|||
result = await self.handler.on_invite_request(
|
||||
origin, event, room_version_id=room_version
|
||||
)
|
||||
|
||||
# We only store invite_room_state for internal use, so remove it before
|
||||
# returning the event to the remote homeserver.
|
||||
result["event"].get("unsigned", {}).pop("invite_room_state", None)
|
||||
|
||||
return 200, result
|
||||
|
||||
|
||||
|
|
|
@ -100,6 +100,7 @@ class AdminHandler:
|
|||
user_info_dict["avatar_url"] = profile.avatar_url
|
||||
user_info_dict["threepids"] = threepids
|
||||
user_info_dict["external_ids"] = external_ids
|
||||
user_info_dict["erased"] = await self.store.is_user_erased(user.to_string())
|
||||
|
||||
return user_info_dict
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ from typing import (
|
|||
import attr
|
||||
import bcrypt
|
||||
import unpaddedbase64
|
||||
from prometheus_client import Counter
|
||||
|
||||
from twisted.internet.defer import CancelledError
|
||||
from twisted.web.server import Request
|
||||
|
@ -48,6 +49,7 @@ from synapse.api.errors import (
|
|||
Codes,
|
||||
InteractiveAuthIncompleteError,
|
||||
LoginError,
|
||||
NotFoundError,
|
||||
StoreError,
|
||||
SynapseError,
|
||||
UserDeactivatedError,
|
||||
|
@ -63,10 +65,14 @@ from synapse.http.server import finish_request, respond_with_html
|
|||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import defer_to_thread
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage.databases.main.registration import (
|
||||
LoginTokenExpired,
|
||||
LoginTokenLookupResult,
|
||||
LoginTokenReused,
|
||||
)
|
||||
from synapse.types import JsonDict, Requester, UserID
|
||||
from synapse.util import stringutils as stringutils
|
||||
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
||||
from synapse.util.macaroons import LoginTokenAttributes
|
||||
from synapse.util.msisdn import phone_number_to_msisdn
|
||||
from synapse.util.stringutils import base62_encode
|
||||
from synapse.util.threepids import canonicalise_email
|
||||
|
@ -80,6 +86,12 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
INVALID_USERNAME_OR_PASSWORD = "Invalid username or password"
|
||||
|
||||
invalid_login_token_counter = Counter(
|
||||
"synapse_user_login_invalid_login_tokens",
|
||||
"Counts the number of rejected m.login.token on /login",
|
||||
["reason"],
|
||||
)
|
||||
|
||||
|
||||
def convert_client_dict_legacy_fields_to_identifier(
|
||||
submission: JsonDict,
|
||||
|
@ -883,6 +895,25 @@ class AuthHandler:
|
|||
|
||||
return True
|
||||
|
||||
async def create_login_token_for_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: Optional[str] = None,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
login_token = self.generate_login_token()
|
||||
now = self._clock.time_msec()
|
||||
expiry_ts = now + duration_ms
|
||||
await self.store.add_login_token_to_user(
|
||||
user_id=user_id,
|
||||
token=login_token,
|
||||
expiry_ts=expiry_ts,
|
||||
auth_provider_id=auth_provider_id,
|
||||
auth_provider_session_id=auth_provider_session_id,
|
||||
)
|
||||
return login_token
|
||||
|
||||
async def create_refresh_token_for_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
|
@ -1401,6 +1432,18 @@ class AuthHandler:
|
|||
return None
|
||||
return user_id
|
||||
|
||||
def generate_login_token(self) -> str:
|
||||
"""Generates an opaque string, for use as an short-term login token"""
|
||||
|
||||
# we use the following format for access tokens:
|
||||
# syl_<random string>_<base62 crc check>
|
||||
|
||||
random_string = stringutils.random_string(20)
|
||||
base = f"syl_{random_string}"
|
||||
|
||||
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
||||
return f"{base}_{crc}"
|
||||
|
||||
def generate_access_token(self, for_user: UserID) -> str:
|
||||
"""Generates an opaque string, for use as an access token"""
|
||||
|
||||
|
@ -1427,16 +1470,17 @@ class AuthHandler:
|
|||
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
||||
return f"{base}_{crc}"
|
||||
|
||||
async def validate_short_term_login_token(
|
||||
self, login_token: str
|
||||
) -> LoginTokenAttributes:
|
||||
async def consume_login_token(self, login_token: str) -> LoginTokenLookupResult:
|
||||
try:
|
||||
res = self.macaroon_gen.verify_short_term_login_token(login_token)
|
||||
except Exception:
|
||||
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
|
||||
return await self.store.consume_login_token(login_token)
|
||||
except LoginTokenExpired:
|
||||
invalid_login_token_counter.labels("expired").inc()
|
||||
except LoginTokenReused:
|
||||
invalid_login_token_counter.labels("reused").inc()
|
||||
except NotFoundError:
|
||||
invalid_login_token_counter.labels("not found").inc()
|
||||
|
||||
await self.auth_blocking.check_auth_blocking(res.user_id)
|
||||
return res
|
||||
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
|
||||
|
||||
async def delete_access_token(self, access_token: str) -> None:
|
||||
"""Invalidate a single access token
|
||||
|
@ -1711,7 +1755,7 @@ class AuthHandler:
|
|||
)
|
||||
|
||||
# Create a login token
|
||||
login_token = self.macaroon_gen.generate_short_term_login_token(
|
||||
login_token = await self.create_login_token_for_user_id(
|
||||
registered_user_id,
|
||||
auth_provider_id=auth_provider_id,
|
||||
auth_provider_session_id=auth_provider_session_id,
|
||||
|
|
|
@ -49,6 +49,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class E2eKeysHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.config = hs.config
|
||||
self.store = hs.get_datastores().main
|
||||
self.federation = hs.get_federation_client()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
|
@ -431,13 +432,17 @@ class E2eKeysHandler:
|
|||
@trace
|
||||
@cancellable
|
||||
async def query_local_devices(
|
||||
self, query: Mapping[str, Optional[List[str]]]
|
||||
self,
|
||||
query: Mapping[str, Optional[List[str]]],
|
||||
include_displaynames: bool = True,
|
||||
) -> Dict[str, Dict[str, dict]]:
|
||||
"""Get E2E device keys for local users
|
||||
|
||||
Args:
|
||||
query: map from user_id to a list
|
||||
of devices to query (None for all devices)
|
||||
include_displaynames: Whether to include device displaynames in the returned
|
||||
device details.
|
||||
|
||||
Returns:
|
||||
A map from user_id -> device_id -> device details
|
||||
|
@ -469,7 +474,9 @@ class E2eKeysHandler:
|
|||
# make sure that each queried user appears in the result dict
|
||||
result_dict[user_id] = {}
|
||||
|
||||
results = await self.store.get_e2e_device_keys_for_cs_api(local_query)
|
||||
results = await self.store.get_e2e_device_keys_for_cs_api(
|
||||
local_query, include_displaynames
|
||||
)
|
||||
|
||||
# Build the result structure
|
||||
for user_id, device_keys in results.items():
|
||||
|
@ -482,11 +489,33 @@ class E2eKeysHandler:
|
|||
async def on_federation_query_client_keys(
|
||||
self, query_body: Dict[str, Dict[str, Optional[List[str]]]]
|
||||
) -> JsonDict:
|
||||
"""Handle a device key query from a federated server"""
|
||||
"""Handle a device key query from a federated server:
|
||||
|
||||
Handles the path: GET /_matrix/federation/v1/users/keys/query
|
||||
|
||||
Args:
|
||||
query_body: The body of the query request. Should contain a key
|
||||
"device_keys" that map to a dictionary of user ID's -> list of
|
||||
device IDs. If the list of device IDs is empty, all devices of
|
||||
that user will be queried.
|
||||
|
||||
Returns:
|
||||
A json dictionary containing the following:
|
||||
- device_keys: A dictionary containing the requested device information.
|
||||
- master_keys: An optional dictionary of user ID -> master cross-signing
|
||||
key info.
|
||||
- self_signing_key: An optional dictionary of user ID -> self-signing
|
||||
key info.
|
||||
"""
|
||||
device_keys_query: Dict[str, Optional[List[str]]] = query_body.get(
|
||||
"device_keys", {}
|
||||
)
|
||||
res = await self.query_local_devices(device_keys_query)
|
||||
res = await self.query_local_devices(
|
||||
device_keys_query,
|
||||
include_displaynames=(
|
||||
self.config.federation.allow_device_name_lookup_over_federation
|
||||
),
|
||||
)
|
||||
ret = {"device_keys": res}
|
||||
|
||||
# add in the cross-signing keys
|
||||
|
|
|
@ -442,6 +442,15 @@ class FederationHandler:
|
|||
# appropriate stuff.
|
||||
# TODO: We can probably do something more intelligent here.
|
||||
return True
|
||||
except NotRetryingDestination as e:
|
||||
logger.info("_maybe_backfill_inner: %s", e)
|
||||
continue
|
||||
except FederationDeniedError:
|
||||
logger.info(
|
||||
"_maybe_backfill_inner: Not attempting to backfill from %s because the homeserver is not on our federation whitelist",
|
||||
dom,
|
||||
)
|
||||
continue
|
||||
except (SynapseError, InvalidResponseError) as e:
|
||||
logger.info("Failed to backfill from %s because %s", dom, e)
|
||||
continue
|
||||
|
@ -477,15 +486,9 @@ class FederationHandler:
|
|||
|
||||
logger.info("Failed to backfill from %s because %s", dom, e)
|
||||
continue
|
||||
except NotRetryingDestination as e:
|
||||
logger.info(str(e))
|
||||
continue
|
||||
except RequestSendFailed as e:
|
||||
logger.info("Failed to get backfill from %s because %s", dom, e)
|
||||
continue
|
||||
except FederationDeniedError as e:
|
||||
logger.info(e)
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.exception("Failed to backfill from %s because %s", dom, e)
|
||||
continue
|
||||
|
@ -1017,7 +1020,9 @@ class FederationHandler:
|
|||
|
||||
context = EventContext.for_outlier(self._storage_controllers)
|
||||
|
||||
await self._bulk_push_rule_evaluator.action_for_event_by_user(event, context)
|
||||
await self._bulk_push_rule_evaluator.action_for_events_by_user(
|
||||
[(event, context)]
|
||||
)
|
||||
try:
|
||||
await self._federation_event_handler.persist_events_and_notify(
|
||||
event.room_id, [(event, context)]
|
||||
|
|
|
@ -58,7 +58,7 @@ from synapse.event_auth import (
|
|||
)
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.federation.federation_client import InvalidResponseError
|
||||
from synapse.federation.federation_client import InvalidResponseError, PulledPduInfo
|
||||
from synapse.logging.context import nested_logging_context
|
||||
from synapse.logging.opentracing import (
|
||||
SynapseTags,
|
||||
|
@ -1517,8 +1517,8 @@ class FederationEventHandler:
|
|||
)
|
||||
|
||||
async def backfill_event_id(
|
||||
self, destination: str, room_id: str, event_id: str
|
||||
) -> EventBase:
|
||||
self, destinations: List[str], room_id: str, event_id: str
|
||||
) -> PulledPduInfo:
|
||||
"""Backfill a single event and persist it as a non-outlier which means
|
||||
we also pull in all of the state and auth events necessary for it.
|
||||
|
||||
|
@ -1530,24 +1530,21 @@ class FederationEventHandler:
|
|||
Raises:
|
||||
FederationError if we are unable to find the event from the destination
|
||||
"""
|
||||
logger.info(
|
||||
"backfill_event_id: event_id=%s from destination=%s", event_id, destination
|
||||
)
|
||||
logger.info("backfill_event_id: event_id=%s", event_id)
|
||||
|
||||
room_version = await self._store.get_room_version(room_id)
|
||||
|
||||
event_from_response = await self._federation_client.get_pdu(
|
||||
[destination],
|
||||
pulled_pdu_info = await self._federation_client.get_pdu(
|
||||
destinations,
|
||||
event_id,
|
||||
room_version,
|
||||
)
|
||||
|
||||
if not event_from_response:
|
||||
if not pulled_pdu_info:
|
||||
raise FederationError(
|
||||
"ERROR",
|
||||
404,
|
||||
"Unable to find event_id=%s from destination=%s to backfill."
|
||||
% (event_id, destination),
|
||||
f"Unable to find event_id={event_id} from remote servers to backfill.",
|
||||
affected=event_id,
|
||||
)
|
||||
|
||||
|
@ -1555,13 +1552,13 @@ class FederationEventHandler:
|
|||
# and auth events to de-outlier it. This also sets up the necessary
|
||||
# `state_groups` for the event.
|
||||
await self._process_pulled_events(
|
||||
destination,
|
||||
[event_from_response],
|
||||
pulled_pdu_info.pull_origin,
|
||||
[pulled_pdu_info.pdu],
|
||||
# Prevent notifications going to clients
|
||||
backfilled=True,
|
||||
)
|
||||
|
||||
return event_from_response
|
||||
return pulled_pdu_info
|
||||
|
||||
@trace
|
||||
@tag_args
|
||||
|
@ -1584,19 +1581,19 @@ class FederationEventHandler:
|
|||
async def get_event(event_id: str) -> None:
|
||||
with nested_logging_context(event_id):
|
||||
try:
|
||||
event = await self._federation_client.get_pdu(
|
||||
pulled_pdu_info = await self._federation_client.get_pdu(
|
||||
[destination],
|
||||
event_id,
|
||||
room_version,
|
||||
)
|
||||
if event is None:
|
||||
if pulled_pdu_info is None:
|
||||
logger.warning(
|
||||
"Server %s didn't return event %s",
|
||||
destination,
|
||||
event_id,
|
||||
)
|
||||
return
|
||||
events.append(event)
|
||||
events.append(pulled_pdu_info.pdu)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
|
@ -2171,8 +2168,8 @@ class FederationEventHandler:
|
|||
min_depth,
|
||||
)
|
||||
else:
|
||||
await self._bulk_push_rule_evaluator.action_for_event_by_user(
|
||||
event, context
|
||||
await self._bulk_push_rule_evaluator.action_for_events_by_user(
|
||||
[(event, context)]
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
@ -1442,17 +1442,9 @@ class EventCreationHandler:
|
|||
a room that has been un-partial stated.
|
||||
"""
|
||||
|
||||
for event, context in events_and_context:
|
||||
# Skip push notification actions for historical messages
|
||||
# because we don't want to notify people about old history back in time.
|
||||
# The historical messages also do not have the proper `context.current_state_ids`
|
||||
# and `state_groups` because they have `prev_events` that aren't persisted yet
|
||||
# (historical messages persisted in reverse-chronological order).
|
||||
if not event.internal_metadata.is_historical() and not event.content.get(EventContentFields.MSC2716_HISTORICAL):
|
||||
with opentracing.start_active_span("calculate_push_actions"):
|
||||
await self._bulk_push_rule_evaluator.action_for_event_by_user(
|
||||
event, context
|
||||
)
|
||||
await self._bulk_push_rule_evaluator.action_for_events_by_user(
|
||||
events_and_context
|
||||
)
|
||||
|
||||
try:
|
||||
# If we're a worker we need to hit out to the master.
|
||||
|
|
|
@ -12,14 +12,28 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import binascii
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Optional,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
import attr
|
||||
import unpaddedbase64
|
||||
from authlib.common.security import generate_token
|
||||
from authlib.jose import JsonWebToken, jwt
|
||||
from authlib.jose import JsonWebToken, JWTClaims
|
||||
from authlib.jose.errors import InvalidClaimError, JoseError, MissingClaimError
|
||||
from authlib.oauth2.auth import ClientAuth
|
||||
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
|
||||
from authlib.oidc.core import CodeIDToken, UserInfo
|
||||
|
@ -35,9 +49,12 @@ from typing_extensions import TypedDict
|
|||
from twisted.web.client import readBody
|
||||
from twisted.web.http_headers import Headers
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config.oidc import OidcProviderClientSecretJwtKey, OidcProviderConfig
|
||||
from synapse.handlers.sso import MappingException, UserAttributes
|
||||
from synapse.http.server import finish_request
|
||||
from synapse.http.servlet import parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart
|
||||
|
@ -88,6 +105,8 @@ class Token(TypedDict):
|
|||
#: there is no real point of doing this in our case.
|
||||
JWK = Dict[str, str]
|
||||
|
||||
C = TypeVar("C")
|
||||
|
||||
|
||||
#: A JWK Set, as per RFC7517 sec 5.
|
||||
class JWKS(TypedDict):
|
||||
|
@ -247,6 +266,80 @@ class OidcHandler:
|
|||
|
||||
await oidc_provider.handle_oidc_callback(request, session_data, code)
|
||||
|
||||
async def handle_backchannel_logout(self, request: SynapseRequest) -> None:
|
||||
"""Handle an incoming request to /_synapse/client/oidc/backchannel_logout
|
||||
|
||||
This extracts the logout_token from the request and tries to figure out
|
||||
which OpenID Provider it is comming from. This works by matching the iss claim
|
||||
with the issuer and the aud claim with the client_id.
|
||||
|
||||
Since at this point we don't know who signed the JWT, we can't just
|
||||
decode it using authlib since it will always verifies the signature. We
|
||||
have to decode it manually without validating the signature. The actual JWT
|
||||
verification is done in the `OidcProvider.handler_backchannel_logout` method,
|
||||
once we figured out which provider sent the request.
|
||||
|
||||
Args:
|
||||
request: the incoming request from the browser.
|
||||
"""
|
||||
logout_token = parse_string(request, "logout_token")
|
||||
if logout_token is None:
|
||||
raise SynapseError(400, "Missing logout_token in request")
|
||||
|
||||
# A JWT looks like this:
|
||||
# header.payload.signature
|
||||
# where all parts are encoded with urlsafe base64.
|
||||
# The aud and iss claims we care about are in the payload part, which
|
||||
# is a JSON object.
|
||||
try:
|
||||
# By destructuring the list after splitting, we ensure that we have
|
||||
# exactly 3 segments
|
||||
_, payload, _ = logout_token.split(".")
|
||||
except ValueError:
|
||||
raise SynapseError(400, "Invalid logout_token in request")
|
||||
|
||||
try:
|
||||
payload_bytes = unpaddedbase64.decode_base64(payload)
|
||||
claims = json_decoder.decode(payload_bytes.decode("utf-8"))
|
||||
except (json.JSONDecodeError, binascii.Error, UnicodeError):
|
||||
raise SynapseError(400, "Invalid logout_token payload in request")
|
||||
|
||||
try:
|
||||
# Let's extract the iss and aud claims
|
||||
iss = claims["iss"]
|
||||
aud = claims["aud"]
|
||||
# The aud claim can be either a string or a list of string. Here we
|
||||
# normalize it as a list of strings.
|
||||
if isinstance(aud, str):
|
||||
aud = [aud]
|
||||
|
||||
# Check that we have the right types for the aud and the iss claims
|
||||
if not isinstance(iss, str) or not isinstance(aud, list):
|
||||
raise TypeError()
|
||||
for a in aud:
|
||||
if not isinstance(a, str):
|
||||
raise TypeError()
|
||||
|
||||
# At this point we properly checked both claims types
|
||||
issuer: str = iss
|
||||
audience: List[str] = aud
|
||||
except (TypeError, KeyError):
|
||||
raise SynapseError(400, "Invalid issuer/audience in logout_token")
|
||||
|
||||
# Now that we know the audience and the issuer, we can figure out from
|
||||
# what provider it is coming from
|
||||
oidc_provider: Optional[OidcProvider] = None
|
||||
for provider in self._providers.values():
|
||||
if provider.issuer == issuer and provider.client_id in audience:
|
||||
oidc_provider = provider
|
||||
break
|
||||
|
||||
if oidc_provider is None:
|
||||
raise SynapseError(400, "Could not find the OP that issued this event")
|
||||
|
||||
# Ask the provider to handle the logout request.
|
||||
await oidc_provider.handle_backchannel_logout(request, logout_token)
|
||||
|
||||
|
||||
class OidcError(Exception):
|
||||
"""Used to catch errors when calling the token_endpoint"""
|
||||
|
@ -275,6 +368,7 @@ class OidcProvider:
|
|||
provider: OidcProviderConfig,
|
||||
):
|
||||
self._store = hs.get_datastores().main
|
||||
self._clock = hs.get_clock()
|
||||
|
||||
self._macaroon_generaton = macaroon_generator
|
||||
|
||||
|
@ -341,6 +435,7 @@ class OidcProvider:
|
|||
self.idp_brand = provider.idp_brand
|
||||
|
||||
self._sso_handler = hs.get_sso_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
|
||||
self._sso_handler.register_identity_provider(self)
|
||||
|
||||
|
@ -399,6 +494,41 @@ class OidcProvider:
|
|||
# If we're not using userinfo, we need a valid jwks to validate the ID token
|
||||
m.validate_jwks_uri()
|
||||
|
||||
if self._config.backchannel_logout_enabled:
|
||||
if not m.get("backchannel_logout_supported", False):
|
||||
logger.warning(
|
||||
"OIDC Back-Channel Logout is enabled for issuer %r"
|
||||
"but it does not advertise support for it",
|
||||
self.issuer,
|
||||
)
|
||||
|
||||
elif not m.get("backchannel_logout_session_supported", False):
|
||||
logger.warning(
|
||||
"OIDC Back-Channel Logout is enabled and supported "
|
||||
"by issuer %r but it might not send a session ID with "
|
||||
"logout tokens, which is required for the logouts to work",
|
||||
self.issuer,
|
||||
)
|
||||
|
||||
if not self._config.backchannel_logout_ignore_sub:
|
||||
# If OIDC backchannel logouts are enabled, the provider mapping provider
|
||||
# should use the `sub` claim. We verify that by mapping a dumb user and
|
||||
# see if we get back the sub claim
|
||||
user = UserInfo({"sub": "thisisasubject"})
|
||||
try:
|
||||
subject = self._user_mapping_provider.get_remote_user_id(user)
|
||||
if subject != user["sub"]:
|
||||
raise ValueError("Unexpected subject")
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"OIDC Back-Channel Logout is enabled for issuer {self.issuer!r} "
|
||||
"but it looks like the configured `user_mapping_provider` "
|
||||
"does not use the `sub` claim as subject. If it is the case, "
|
||||
"and you want Synapse to ignore the `sub` claim in OIDC "
|
||||
"Back-Channel Logouts, set `backchannel_logout_ignore_sub` "
|
||||
"to `true` in the issuer config."
|
||||
)
|
||||
|
||||
@property
|
||||
def _uses_userinfo(self) -> bool:
|
||||
"""Returns True if the ``userinfo_endpoint`` should be used.
|
||||
|
@ -414,6 +544,16 @@ class OidcProvider:
|
|||
or self._user_profile_method == "userinfo_endpoint"
|
||||
)
|
||||
|
||||
@property
|
||||
def issuer(self) -> str:
|
||||
"""The issuer identifying this provider."""
|
||||
return self._config.issuer
|
||||
|
||||
@property
|
||||
def client_id(self) -> str:
|
||||
"""The client_id used when interacting with this provider."""
|
||||
return self._config.client_id
|
||||
|
||||
async def load_metadata(self, force: bool = False) -> OpenIDProviderMetadata:
|
||||
"""Return the provider metadata.
|
||||
|
||||
|
@ -661,6 +801,59 @@ class OidcProvider:
|
|||
|
||||
return UserInfo(resp)
|
||||
|
||||
async def _verify_jwt(
|
||||
self,
|
||||
alg_values: List[str],
|
||||
token: str,
|
||||
claims_cls: Type[C],
|
||||
claims_options: Optional[dict] = None,
|
||||
claims_params: Optional[dict] = None,
|
||||
) -> C:
|
||||
"""Decode and validate a JWT, re-fetching the JWKS as needed.
|
||||
|
||||
Args:
|
||||
alg_values: list of `alg` values allowed when verifying the JWT.
|
||||
token: the JWT.
|
||||
claims_cls: the JWTClaims class to use to validate the claims.
|
||||
claims_options: dict of options passed to the `claims_cls` constructor.
|
||||
claims_params: dict of params passed to the `claims_cls` constructor.
|
||||
|
||||
Returns:
|
||||
The decoded claims in the JWT.
|
||||
"""
|
||||
jwt = JsonWebToken(alg_values)
|
||||
|
||||
logger.debug("Attempting to decode JWT (%s) %r", claims_cls.__name__, token)
|
||||
|
||||
# Try to decode the keys in cache first, then retry by forcing the keys
|
||||
# to be reloaded
|
||||
jwk_set = await self.load_jwks()
|
||||
try:
|
||||
claims = jwt.decode(
|
||||
token,
|
||||
key=jwk_set,
|
||||
claims_cls=claims_cls,
|
||||
claims_options=claims_options,
|
||||
claims_params=claims_params,
|
||||
)
|
||||
except ValueError:
|
||||
logger.info("Reloading JWKS after decode error")
|
||||
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
|
||||
claims = jwt.decode(
|
||||
token,
|
||||
key=jwk_set,
|
||||
claims_cls=claims_cls,
|
||||
claims_options=claims_options,
|
||||
claims_params=claims_params,
|
||||
)
|
||||
|
||||
logger.debug("Decoded JWT (%s) %r; validating", claims_cls.__name__, claims)
|
||||
|
||||
claims.validate(
|
||||
now=self._clock.time(), leeway=120
|
||||
) # allows 2 min of clock skew
|
||||
return claims
|
||||
|
||||
async def _parse_id_token(self, token: Token, nonce: str) -> CodeIDToken:
|
||||
"""Return an instance of UserInfo from token's ``id_token``.
|
||||
|
||||
|
@ -673,7 +866,14 @@ class OidcProvider:
|
|||
Returns:
|
||||
The decoded claims in the ID token.
|
||||
"""
|
||||
id_token = token.get("id_token")
|
||||
|
||||
# That has been theoritically been checked by the caller, so even though
|
||||
# assertion are not enabled in production, it is mainly here to appease mypy
|
||||
assert id_token is not None
|
||||
|
||||
metadata = await self.load_metadata()
|
||||
|
||||
claims_params = {
|
||||
"nonce": nonce,
|
||||
"client_id": self._client_auth.client_id,
|
||||
|
@ -683,39 +883,17 @@ class OidcProvider:
|
|||
# in the `id_token` that we can check against.
|
||||
claims_params["access_token"] = token["access_token"]
|
||||
|
||||
claims_options = {"iss": {"values": [metadata["issuer"]]}}
|
||||
|
||||
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
|
||||
jwt = JsonWebToken(alg_values)
|
||||
|
||||
claim_options = {"iss": {"values": [metadata["issuer"]]}}
|
||||
|
||||
id_token = token["id_token"]
|
||||
logger.debug("Attempting to decode JWT id_token %r", id_token)
|
||||
|
||||
# Try to decode the keys in cache first, then retry by forcing the keys
|
||||
# to be reloaded
|
||||
jwk_set = await self.load_jwks()
|
||||
try:
|
||||
claims = jwt.decode(
|
||||
id_token,
|
||||
key=jwk_set,
|
||||
claims_cls=CodeIDToken,
|
||||
claims_options=claim_options,
|
||||
claims_params=claims_params,
|
||||
)
|
||||
except ValueError:
|
||||
logger.info("Reloading JWKS after decode error")
|
||||
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
|
||||
claims = jwt.decode(
|
||||
id_token,
|
||||
key=jwk_set,
|
||||
claims_cls=CodeIDToken,
|
||||
claims_options=claim_options,
|
||||
claims_params=claims_params,
|
||||
)
|
||||
|
||||
logger.debug("Decoded id_token JWT %r; validating", claims)
|
||||
|
||||
claims.validate(leeway=120) # allows 2 min of clock skew
|
||||
claims = await self._verify_jwt(
|
||||
alg_values=alg_values,
|
||||
token=id_token,
|
||||
claims_cls=CodeIDToken,
|
||||
claims_options=claims_options,
|
||||
claims_params=claims_params,
|
||||
)
|
||||
|
||||
return claims
|
||||
|
||||
|
@ -1036,6 +1214,146 @@ class OidcProvider:
|
|||
# to be strings.
|
||||
return str(remote_user_id)
|
||||
|
||||
async def handle_backchannel_logout(
|
||||
self, request: SynapseRequest, logout_token: str
|
||||
) -> None:
|
||||
"""Handle an incoming request to /_synapse/client/oidc/backchannel_logout
|
||||
|
||||
The OIDC Provider posts a logout token to this endpoint when a user
|
||||
session ends. That token is a JWT signed with the same keys as
|
||||
ID tokens. The OpenID Connect Back-Channel Logout draft explains how to
|
||||
validate the JWT and figure out what session to end.
|
||||
|
||||
Args:
|
||||
request: The request to respond to
|
||||
logout_token: The logout token (a JWT) extracted from the request body
|
||||
"""
|
||||
# Back-Channel Logout can be disabled in the config, hence this check.
|
||||
# This is not that important for now since Synapse is registered
|
||||
# manually to the OP, so not specifying the backchannel-logout URI is
|
||||
# as effective than disabling it here. It might make more sense if we
|
||||
# support dynamic registration in Synapse at some point.
|
||||
if not self._config.backchannel_logout_enabled:
|
||||
logger.warning(
|
||||
f"Received an OIDC Back-Channel Logout request from issuer {self.issuer!r} but it is disabled in config"
|
||||
)
|
||||
|
||||
# TODO: this responds with a 400 status code, which is what the OIDC
|
||||
# Back-Channel Logout spec expects, but spec also suggests answering with
|
||||
# a JSON object, with the `error` and `error_description` fields set, which
|
||||
# we are not doing here.
|
||||
# See https://openid.net/specs/openid-connect-backchannel-1_0.html#BCResponse
|
||||
raise SynapseError(
|
||||
400, "OpenID Connect Back-Channel Logout is disabled for this provider"
|
||||
)
|
||||
|
||||
metadata = await self.load_metadata()
|
||||
|
||||
# As per OIDC Back-Channel Logout 1.0 sec. 2.4:
|
||||
# A Logout Token MUST be signed and MAY also be encrypted. The same
|
||||
# keys are used to sign and encrypt Logout Tokens as are used for ID
|
||||
# Tokens. If the Logout Token is encrypted, it SHOULD replicate the
|
||||
# iss (issuer) claim in the JWT Header Parameters, as specified in
|
||||
# Section 5.3 of [JWT].
|
||||
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
|
||||
|
||||
# As per sec. 2.6:
|
||||
# 3. Validate the iss, aud, and iat Claims in the same way they are
|
||||
# validated in ID Tokens.
|
||||
# Which means the audience should contain Synapse's client_id and the
|
||||
# issuer should be the IdP issuer
|
||||
claims_options = {
|
||||
"iss": {"values": [metadata["issuer"]]},
|
||||
"aud": {"values": [self.client_id]},
|
||||
}
|
||||
|
||||
try:
|
||||
claims = await self._verify_jwt(
|
||||
alg_values=alg_values,
|
||||
token=logout_token,
|
||||
claims_cls=LogoutToken,
|
||||
claims_options=claims_options,
|
||||
)
|
||||
except JoseError:
|
||||
logger.exception("Invalid logout_token")
|
||||
raise SynapseError(400, "Invalid logout_token")
|
||||
|
||||
# As per sec. 2.6:
|
||||
# 4. Verify that the Logout Token contains a sub Claim, a sid Claim,
|
||||
# or both.
|
||||
# 5. Verify that the Logout Token contains an events Claim whose
|
||||
# value is JSON object containing the member name
|
||||
# http://schemas.openid.net/event/backchannel-logout.
|
||||
# 6. Verify that the Logout Token does not contain a nonce Claim.
|
||||
# This is all verified by the LogoutToken claims class, so at this
|
||||
# point the `sid` claim exists and is a string.
|
||||
sid: str = claims.get("sid")
|
||||
|
||||
# If the `sub` claim was included in the logout token, we check that it matches
|
||||
# that it matches the right user. We can have cases where the `sub` claim is not
|
||||
# the ID saved in database, so we let admins disable this check in config.
|
||||
sub: Optional[str] = claims.get("sub")
|
||||
expected_user_id: Optional[str] = None
|
||||
if sub is not None and not self._config.backchannel_logout_ignore_sub:
|
||||
expected_user_id = await self._store.get_user_by_external_id(
|
||||
self.idp_id, sub
|
||||
)
|
||||
|
||||
# Invalidate any running user-mapping sessions, in-flight login tokens and
|
||||
# active devices
|
||||
await self._sso_handler.revoke_sessions_for_provider_session_id(
|
||||
auth_provider_id=self.idp_id,
|
||||
auth_provider_session_id=sid,
|
||||
expected_user_id=expected_user_id,
|
||||
)
|
||||
|
||||
request.setResponseCode(200)
|
||||
request.setHeader(b"Cache-Control", b"no-cache, no-store")
|
||||
request.setHeader(b"Pragma", b"no-cache")
|
||||
finish_request(request)
|
||||
|
||||
|
||||
class LogoutToken(JWTClaims):
|
||||
"""
|
||||
Holds and verify claims of a logout token, as per
|
||||
https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken
|
||||
"""
|
||||
|
||||
REGISTERED_CLAIMS = ["iss", "sub", "aud", "iat", "jti", "events", "sid"]
|
||||
|
||||
def validate(self, now: Optional[int] = None, leeway: int = 0) -> None:
|
||||
"""Validate everything in claims payload."""
|
||||
super().validate(now, leeway)
|
||||
self.validate_sid()
|
||||
self.validate_events()
|
||||
self.validate_nonce()
|
||||
|
||||
def validate_sid(self) -> None:
|
||||
"""Ensure the sid claim is present"""
|
||||
sid = self.get("sid")
|
||||
if not sid:
|
||||
raise MissingClaimError("sid")
|
||||
|
||||
if not isinstance(sid, str):
|
||||
raise InvalidClaimError("sid")
|
||||
|
||||
def validate_nonce(self) -> None:
|
||||
"""Ensure the nonce claim is absent"""
|
||||
if "nonce" in self:
|
||||
raise InvalidClaimError("nonce")
|
||||
|
||||
def validate_events(self) -> None:
|
||||
"""Ensure the events claim is present and with the right value"""
|
||||
events = self.get("events")
|
||||
if not events:
|
||||
raise MissingClaimError("events")
|
||||
|
||||
if not isinstance(events, dict):
|
||||
raise InvalidClaimError("events")
|
||||
|
||||
if "http://schemas.openid.net/event/backchannel-logout" not in events:
|
||||
raise InvalidClaimError("events")
|
||||
|
||||
|
||||
# number of seconds a newly-generated client secret should be valid for
|
||||
CLIENT_SECRET_VALIDITY_SECONDS = 3600
|
||||
|
@ -1105,6 +1423,7 @@ class JwtClientSecret:
|
|||
logger.info(
|
||||
"Generating new JWT for %s: %s %s", self._oauth_issuer, header, payload
|
||||
)
|
||||
jwt = JsonWebToken(header["alg"])
|
||||
self._cached_secret = jwt.encode(header, payload, self._key.key)
|
||||
self._cached_secret_replacement_time = (
|
||||
expires_at - CLIENT_SECRET_MIN_VALIDITY_SECONDS
|
||||
|
@ -1119,9 +1438,6 @@ class UserAttributeDict(TypedDict):
|
|||
emails: List[str]
|
||||
|
||||
|
||||
C = TypeVar("C")
|
||||
|
||||
|
||||
class OidcMappingProvider(Generic[C]):
|
||||
"""A mapping provider maps a UserInfo object to user attributes.
|
||||
|
||||
|
|
|
@ -307,7 +307,11 @@ class ProfileHandler:
|
|||
if not self.max_avatar_size and not self.allowed_avatar_mimetypes:
|
||||
return True
|
||||
|
||||
server_name, _, media_id = parse_and_validate_mxc_uri(mxc)
|
||||
host, port, media_id = parse_and_validate_mxc_uri(mxc)
|
||||
if port is not None:
|
||||
server_name = host + ":" + str(port)
|
||||
else:
|
||||
server_name = host
|
||||
|
||||
if server_name == self.server_name:
|
||||
media_info = await self.store.get_local_media(media_id)
|
||||
|
|
|
@ -49,7 +49,6 @@ from synapse.api.constants import (
|
|||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
HttpResponseException,
|
||||
LimitExceededError,
|
||||
NotFoundError,
|
||||
StoreError,
|
||||
|
@ -60,7 +59,6 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
|||
from synapse.event_auth import validate_event_for_room_version
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import copy_and_fixup_power_levels_contents
|
||||
from synapse.federation.federation_client import InvalidResponseError
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.module_api import NOT_SPAM
|
||||
from synapse.rest.admin._base import assert_user_is_admin
|
||||
|
@ -1070,9 +1068,6 @@ class RoomCreationHandler:
|
|||
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
|
||||
depth = 1
|
||||
|
||||
# the last event sent/persisted to the db
|
||||
last_sent_event_id: Optional[str] = None
|
||||
|
||||
# the most recently created event
|
||||
prev_event: List[str] = []
|
||||
# a map of event types, state keys -> event_ids. We collect these mappings this as events are
|
||||
|
@ -1117,26 +1112,6 @@ class RoomCreationHandler:
|
|||
|
||||
return new_event, new_context
|
||||
|
||||
async def send(
|
||||
event: EventBase,
|
||||
context: synapse.events.snapshot.EventContext,
|
||||
creator: Requester,
|
||||
) -> int:
|
||||
nonlocal last_sent_event_id
|
||||
|
||||
ev = await self.event_creation_handler.handle_new_client_event(
|
||||
requester=creator,
|
||||
events_and_context=[(event, context)],
|
||||
ratelimit=False,
|
||||
ignore_shadow_ban=True,
|
||||
)
|
||||
|
||||
last_sent_event_id = ev.event_id
|
||||
|
||||
# we know it was persisted, so must have a stream ordering
|
||||
assert ev.internal_metadata.stream_ordering
|
||||
return ev.internal_metadata.stream_ordering
|
||||
|
||||
try:
|
||||
config = self._presets_dict[preset_config]
|
||||
except KeyError:
|
||||
|
@ -1150,10 +1125,14 @@ class RoomCreationHandler:
|
|||
)
|
||||
|
||||
logger.debug("Sending %s in new room", EventTypes.Member)
|
||||
await send(creation_event, creation_context, creator)
|
||||
ev = await self.event_creation_handler.handle_new_client_event(
|
||||
requester=creator,
|
||||
events_and_context=[(creation_event, creation_context)],
|
||||
ratelimit=False,
|
||||
ignore_shadow_ban=True,
|
||||
)
|
||||
last_sent_event_id = ev.event_id
|
||||
|
||||
# Room create event must exist at this point
|
||||
assert last_sent_event_id is not None
|
||||
member_event_id, _ = await self.room_member_handler.update_membership(
|
||||
creator,
|
||||
creator.user,
|
||||
|
@ -1172,6 +1151,7 @@ class RoomCreationHandler:
|
|||
depth += 1
|
||||
state_map[(EventTypes.Member, creator.user.to_string())] = member_event_id
|
||||
|
||||
events_to_send = []
|
||||
# We treat the power levels override specially as this needs to be one
|
||||
# of the first events that get sent into a room.
|
||||
pl_content = initial_state.pop((EventTypes.PowerLevels, ""), None)
|
||||
|
@ -1180,7 +1160,7 @@ class RoomCreationHandler:
|
|||
EventTypes.PowerLevels, pl_content, False
|
||||
)
|
||||
current_state_group = power_context._state_group
|
||||
await send(power_event, power_context, creator)
|
||||
events_to_send.append((power_event, power_context))
|
||||
else:
|
||||
power_level_content: JsonDict = {
|
||||
"users": {creator_id: 9001},
|
||||
|
@ -1229,9 +1209,8 @@ class RoomCreationHandler:
|
|||
False,
|
||||
)
|
||||
current_state_group = pl_context._state_group
|
||||
await send(pl_event, pl_context, creator)
|
||||
events_to_send.append((pl_event, pl_context))
|
||||
|
||||
events_to_send = []
|
||||
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
|
||||
room_alias_event, room_alias_context = await create_event(
|
||||
EventTypes.CanonicalAlias, {"alias": room_alias.to_string()}, True
|
||||
|
@ -1509,7 +1488,12 @@ class TimestampLookupHandler:
|
|||
Raises:
|
||||
SynapseError if unable to find any event locally in the given direction
|
||||
"""
|
||||
|
||||
logger.debug(
|
||||
"get_event_for_timestamp(room_id=%s, timestamp=%s, direction=%s) Finding closest event...",
|
||||
room_id,
|
||||
timestamp,
|
||||
direction,
|
||||
)
|
||||
local_event_id = await self.store.get_event_id_for_timestamp(
|
||||
room_id, timestamp, direction
|
||||
)
|
||||
|
@ -1561,85 +1545,54 @@ class TimestampLookupHandler:
|
|||
)
|
||||
)
|
||||
|
||||
# Loop through each homeserver candidate until we get a succesful response
|
||||
for domain in likely_domains:
|
||||
# We don't want to ask our own server for information we don't have
|
||||
if domain == self.server_name:
|
||||
continue
|
||||
remote_response = await self.federation_client.timestamp_to_event(
|
||||
destinations=likely_domains,
|
||||
room_id=room_id,
|
||||
timestamp=timestamp,
|
||||
direction=direction,
|
||||
)
|
||||
if remote_response is not None:
|
||||
logger.debug(
|
||||
"get_event_for_timestamp: remote_response=%s",
|
||||
remote_response,
|
||||
)
|
||||
|
||||
try:
|
||||
remote_response = await self.federation_client.timestamp_to_event(
|
||||
domain, room_id, timestamp, direction
|
||||
)
|
||||
logger.debug(
|
||||
"get_event_for_timestamp: response from domain(%s)=%s",
|
||||
domain,
|
||||
remote_response,
|
||||
remote_event_id = remote_response.event_id
|
||||
remote_origin_server_ts = remote_response.origin_server_ts
|
||||
|
||||
# Backfill this event so we can get a pagination token for
|
||||
# it with `/context` and paginate `/messages` from this
|
||||
# point.
|
||||
pulled_pdu_info = await self.federation_event_handler.backfill_event_id(
|
||||
likely_domains, room_id, remote_event_id
|
||||
)
|
||||
remote_event = pulled_pdu_info.pdu
|
||||
|
||||
# XXX: When we see that the remote server is not trustworthy,
|
||||
# maybe we should not ask them first in the future.
|
||||
if remote_origin_server_ts != remote_event.origin_server_ts:
|
||||
logger.info(
|
||||
"get_event_for_timestamp: Remote server (%s) claimed that remote_event_id=%s occured at remote_origin_server_ts=%s but that isn't true (actually occured at %s). Their claims are dubious and we should consider not trusting them.",
|
||||
pulled_pdu_info.pull_origin,
|
||||
remote_event_id,
|
||||
remote_origin_server_ts,
|
||||
remote_event.origin_server_ts,
|
||||
)
|
||||
|
||||
remote_event_id = remote_response.event_id
|
||||
remote_origin_server_ts = remote_response.origin_server_ts
|
||||
|
||||
# Backfill this event so we can get a pagination token for
|
||||
# it with `/context` and paginate `/messages` from this
|
||||
# point.
|
||||
#
|
||||
# TODO: The requested timestamp may lie in a part of the
|
||||
# event graph that the remote server *also* didn't have,
|
||||
# in which case they will have returned another event
|
||||
# which may be nowhere near the requested timestamp. In
|
||||
# the future, we may need to reconcile that gap and ask
|
||||
# other homeservers, and/or extend `/timestamp_to_event`
|
||||
# to return events on *both* sides of the timestamp to
|
||||
# help reconcile the gap faster.
|
||||
remote_event = (
|
||||
await self.federation_event_handler.backfill_event_id(
|
||||
domain, room_id, remote_event_id
|
||||
)
|
||||
)
|
||||
|
||||
# XXX: When we see that the remote server is not trustworthy,
|
||||
# maybe we should not ask them first in the future.
|
||||
if remote_origin_server_ts != remote_event.origin_server_ts:
|
||||
logger.info(
|
||||
"get_event_for_timestamp: Remote server (%s) claimed that remote_event_id=%s occured at remote_origin_server_ts=%s but that isn't true (actually occured at %s). Their claims are dubious and we should consider not trusting them.",
|
||||
domain,
|
||||
remote_event_id,
|
||||
remote_origin_server_ts,
|
||||
remote_event.origin_server_ts,
|
||||
)
|
||||
|
||||
# Only return the remote event if it's closer than the local event
|
||||
if not local_event or (
|
||||
abs(remote_event.origin_server_ts - timestamp)
|
||||
< abs(local_event.origin_server_ts - timestamp)
|
||||
):
|
||||
logger.info(
|
||||
"get_event_for_timestamp: returning remote_event_id=%s (%s) since it's closer to timestamp=%s than local_event=%s (%s)",
|
||||
remote_event_id,
|
||||
remote_event.origin_server_ts,
|
||||
timestamp,
|
||||
local_event.event_id if local_event else None,
|
||||
local_event.origin_server_ts if local_event else None,
|
||||
)
|
||||
return remote_event_id, remote_origin_server_ts
|
||||
except (HttpResponseException, InvalidResponseError) as ex:
|
||||
# Let's not put a high priority on some other homeserver
|
||||
# failing to respond or giving a random response
|
||||
logger.debug(
|
||||
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception(%s) %s args=%s",
|
||||
domain,
|
||||
type(ex).__name__,
|
||||
ex,
|
||||
ex.args,
|
||||
)
|
||||
except Exception:
|
||||
# But we do want to see some exceptions in our code
|
||||
logger.warning(
|
||||
"get_event_for_timestamp: Failed to fetch /timestamp_to_event from %s because of exception",
|
||||
domain,
|
||||
exc_info=True,
|
||||
# Only return the remote event if it's closer than the local event
|
||||
if not local_event or (
|
||||
abs(remote_event.origin_server_ts - timestamp)
|
||||
< abs(local_event.origin_server_ts - timestamp)
|
||||
):
|
||||
logger.info(
|
||||
"get_event_for_timestamp: returning remote_event_id=%s (%s) since it's closer to timestamp=%s than local_event=%s (%s)",
|
||||
remote_event_id,
|
||||
remote_event.origin_server_ts,
|
||||
timestamp,
|
||||
local_event.event_id if local_event else None,
|
||||
local_event.origin_server_ts if local_event else None,
|
||||
)
|
||||
return remote_event_id, remote_origin_server_ts
|
||||
|
||||
# To appease mypy, we have to add both of these conditions to check for
|
||||
# `None`. We only expect `local_event` to be `None` when
|
||||
|
|
|
@ -191,6 +191,7 @@ class SsoHandler:
|
|||
self._server_name = hs.hostname
|
||||
self._registration_handler = hs.get_registration_handler()
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
self._device_handler = hs.get_device_handler()
|
||||
self._error_template = hs.config.sso.sso_error_template
|
||||
self._bad_user_template = hs.config.sso.sso_auth_bad_user_template
|
||||
self._profile_handler = hs.get_profile_handler()
|
||||
|
@ -1026,6 +1027,76 @@ class SsoHandler:
|
|||
|
||||
return True
|
||||
|
||||
async def revoke_sessions_for_provider_session_id(
|
||||
self,
|
||||
auth_provider_id: str,
|
||||
auth_provider_session_id: str,
|
||||
expected_user_id: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Revoke any devices and in-flight logins tied to a provider session.
|
||||
|
||||
Args:
|
||||
auth_provider_id: A unique identifier for this SSO provider, e.g.
|
||||
"oidc" or "saml".
|
||||
auth_provider_session_id: The session ID from the provider to logout
|
||||
expected_user_id: The user we're expecting to logout. If set, it will ignore
|
||||
sessions belonging to other users and log an error.
|
||||
"""
|
||||
# Invalidate any running user-mapping sessions
|
||||
to_delete = []
|
||||
for session_id, session in self._username_mapping_sessions.items():
|
||||
if (
|
||||
session.auth_provider_id == auth_provider_id
|
||||
and session.auth_provider_session_id == auth_provider_session_id
|
||||
):
|
||||
to_delete.append(session_id)
|
||||
|
||||
for session_id in to_delete:
|
||||
logger.info("Revoking mapping session %s", session_id)
|
||||
del self._username_mapping_sessions[session_id]
|
||||
|
||||
# Invalidate any in-flight login tokens
|
||||
await self._store.invalidate_login_tokens_by_session_id(
|
||||
auth_provider_id=auth_provider_id,
|
||||
auth_provider_session_id=auth_provider_session_id,
|
||||
)
|
||||
|
||||
# Fetch any device(s) in the store associated with the session ID.
|
||||
devices = await self._store.get_devices_by_auth_provider_session_id(
|
||||
auth_provider_id=auth_provider_id,
|
||||
auth_provider_session_id=auth_provider_session_id,
|
||||
)
|
||||
|
||||
# We have no guarantee that all the devices of that session are for the same
|
||||
# `user_id`. Hence, we have to iterate over the list of devices and log them out
|
||||
# one by one.
|
||||
for device in devices:
|
||||
user_id = device["user_id"]
|
||||
device_id = device["device_id"]
|
||||
|
||||
# If the user_id associated with that device/session is not the one we got
|
||||
# out of the `sub` claim, skip that device and show log an error.
|
||||
if expected_user_id is not None and user_id != expected_user_id:
|
||||
logger.error(
|
||||
"Received a logout notification from SSO provider "
|
||||
f"{auth_provider_id!r} for the user {expected_user_id!r}, but with "
|
||||
f"a session ID ({auth_provider_session_id!r}) which belongs to "
|
||||
f"{user_id!r}. This may happen when the SSO provider user mapper "
|
||||
"uses something else than the standard attribute as mapping ID. "
|
||||
"For OIDC providers, set `backchannel_logout_ignore_sub` to `true` "
|
||||
"in the provider config if that is the case."
|
||||
)
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Logging out %r (device %r) via SSO (%r) logout notification (session %r).",
|
||||
user_id,
|
||||
device_id,
|
||||
auth_provider_id,
|
||||
auth_provider_session_id,
|
||||
)
|
||||
await self._device_handler.delete_devices(user_id, [device_id])
|
||||
|
||||
|
||||
def get_username_mapping_session_cookie_from_request(request: IRequest) -> str:
|
||||
"""Extract the session ID from the cookie
|
||||
|
|
|
@ -25,7 +25,6 @@ from typing import (
|
|||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
@ -90,14 +89,29 @@ incoming_responses_counter = Counter(
|
|||
"synapse_http_client_responses", "", ["method", "code"]
|
||||
)
|
||||
|
||||
# the type of the headers list, to be passed to the t.w.h.Headers.
|
||||
# Actually we can mix str and bytes keys, but Mapping treats 'key' as invariant so
|
||||
# we simplify.
|
||||
# the type of the headers map, to be passed to the t.w.h.Headers.
|
||||
#
|
||||
# The actual type accepted by Twisted is
|
||||
# Mapping[Union[str, bytes], Sequence[Union[str, bytes]] ,
|
||||
# allowing us to mix and match str and bytes freely. However: any str is also a
|
||||
# Sequence[str]; passing a header string value which is a
|
||||
# standalone str is interpreted as a sequence of 1-codepoint strings. This is a disastrous footgun.
|
||||
# We use a narrower value type (RawHeaderValue) to avoid this footgun.
|
||||
#
|
||||
# We also simplify the keys to be either all str or all bytes. This helps because
|
||||
# Dict[K, V] is invariant in K (and indeed V).
|
||||
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
|
||||
|
||||
# the value actually has to be a List, but List is invariant so we can't specify that
|
||||
# the entries can either be Lists or bytes.
|
||||
RawHeaderValue = Sequence[Union[str, bytes]]
|
||||
RawHeaderValue = Union[
|
||||
List[str],
|
||||
List[bytes],
|
||||
List[Union[str, bytes]],
|
||||
Tuple[str, ...],
|
||||
Tuple[bytes, ...],
|
||||
Tuple[Union[str, bytes], ...],
|
||||
]
|
||||
|
||||
|
||||
def check_against_blacklist(
|
||||
|
|
|
@ -174,8 +174,10 @@ class _BackgroundProcess:
|
|||
diff = new_stats - self._reported_stats
|
||||
self._reported_stats = new_stats
|
||||
|
||||
_background_process_ru_utime.labels(self.desc).inc(diff.ru_utime)
|
||||
_background_process_ru_stime.labels(self.desc).inc(diff.ru_stime)
|
||||
# For unknown reasons, the difference in times can be negative. See comment in
|
||||
# synapse.http.request_metrics.RequestMetrics.update_metrics.
|
||||
_background_process_ru_utime.labels(self.desc).inc(max(diff.ru_utime, 0))
|
||||
_background_process_ru_stime.labels(self.desc).inc(max(diff.ru_stime, 0))
|
||||
_background_process_db_txn_count.labels(self.desc).inc(diff.db_txn_count)
|
||||
_background_process_db_txn_duration.labels(self.desc).inc(
|
||||
diff.db_txn_duration_sec
|
||||
|
|
|
@ -771,50 +771,11 @@ class ModuleApi:
|
|||
auth_provider_session_id: The session ID got during login from the SSO IdP,
|
||||
if any.
|
||||
"""
|
||||
# The deprecated `generate_short_term_login_token` method defaulted to an empty
|
||||
# string for the `auth_provider_id` because of how the underlying macaroon was
|
||||
# generated. This will change to a proper NULL-able field when the tokens get
|
||||
# moved to the database.
|
||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
||||
return await self._hs.get_auth_handler().create_login_token_for_user_id(
|
||||
user_id,
|
||||
auth_provider_id or "",
|
||||
auth_provider_session_id,
|
||||
duration_in_ms,
|
||||
)
|
||||
|
||||
def generate_short_term_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: str = "",
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Generate a login token suitable for m.login.token authentication
|
||||
|
||||
Added in Synapse v1.9.0.
|
||||
|
||||
This was deprecated in Synapse v1.69.0 in favor of create_login_token, and will
|
||||
be removed in Synapse 1.71.0.
|
||||
|
||||
Args:
|
||||
user_id: gives the ID of the user that the token is for
|
||||
|
||||
duration_in_ms: the time that the token will be valid for
|
||||
|
||||
auth_provider_id: the ID of the SSO IdP that the user used to authenticate
|
||||
to get this token, if any. This is encoded in the token so that
|
||||
/login can report stats on number of successful logins by IdP.
|
||||
"""
|
||||
logger.warn(
|
||||
"A module configured on this server uses ModuleApi.generate_short_term_login_token(), "
|
||||
"which is deprecated in favor of ModuleApi.create_login_token(), and will be removed in "
|
||||
"Synapse 1.71.0",
|
||||
)
|
||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
||||
user_id,
|
||||
auth_provider_id,
|
||||
auth_provider_session_id,
|
||||
duration_in_ms,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -28,7 +28,7 @@ from typing import (
|
|||
|
||||
from prometheus_client import Counter
|
||||
|
||||
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes
|
||||
from synapse.api.constants import MAIN_TIMELINE, EventTypes, Membership, RelationTypes, EventContentFields
|
||||
from synapse.event_auth import auth_types_for_event, get_user_power_level
|
||||
from synapse.events import EventBase, relation_from_event
|
||||
from synapse.events.snapshot import EventContext
|
||||
|
@ -45,7 +45,6 @@ if TYPE_CHECKING:
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
push_rules_invalidation_counter = Counter(
|
||||
"synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter", ""
|
||||
)
|
||||
|
@ -107,6 +106,8 @@ class BulkPushRuleEvaluator:
|
|||
self.clock = hs.get_clock()
|
||||
self._event_auth_handler = hs.get_event_auth_handler()
|
||||
|
||||
self._related_event_match_enabled = self.hs.config.experimental.msc3664_enabled
|
||||
|
||||
self.room_push_rule_cache_metrics = register_cache(
|
||||
"cache",
|
||||
"room_push_rule_cache",
|
||||
|
@ -165,8 +166,21 @@ class BulkPushRuleEvaluator:
|
|||
return rules_by_user
|
||||
|
||||
async def _get_power_levels_and_sender_level(
|
||||
self, event: EventBase, context: EventContext
|
||||
self,
|
||||
event: EventBase,
|
||||
context: EventContext,
|
||||
event_id_to_event: Mapping[str, EventBase],
|
||||
) -> Tuple[dict, Optional[int]]:
|
||||
"""
|
||||
Given an event and an event context, get the power level event relevant to the event
|
||||
and the power level of the sender of the event.
|
||||
Args:
|
||||
event: event to check
|
||||
context: context of event to check
|
||||
event_id_to_event: a mapping of event_id to event for a set of events being
|
||||
batch persisted. This is needed as the sought-after power level event may
|
||||
be in this batch rather than the DB
|
||||
"""
|
||||
# There are no power levels and sender levels possible to get from outlier
|
||||
if event.internal_metadata.is_outlier():
|
||||
return {}, None
|
||||
|
@ -177,15 +191,26 @@ class BulkPushRuleEvaluator:
|
|||
)
|
||||
pl_event_id = prev_state_ids.get(POWER_KEY)
|
||||
|
||||
# fastpath: if there's a power level event, that's all we need, and
|
||||
# not having a power level event is an extreme edge case
|
||||
if pl_event_id:
|
||||
# fastpath: if there's a power level event, that's all we need, and
|
||||
# not having a power level event is an extreme edge case
|
||||
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
|
||||
# Get the power level event from the batch, or fall back to the database.
|
||||
pl_event = event_id_to_event.get(pl_event_id)
|
||||
if pl_event:
|
||||
auth_events = {POWER_KEY: pl_event}
|
||||
else:
|
||||
auth_events = {POWER_KEY: await self.store.get_event(pl_event_id)}
|
||||
else:
|
||||
auth_events_ids = self._event_auth_handler.compute_auth_events(
|
||||
event, prev_state_ids, for_verification=False
|
||||
)
|
||||
auth_events_dict = await self.store.get_events(auth_events_ids)
|
||||
# Some needed auth events might be in the batch, combine them with those
|
||||
# fetched from the database.
|
||||
for auth_event_id in auth_events_ids:
|
||||
auth_event = event_id_to_event.get(auth_event_id)
|
||||
if auth_event:
|
||||
auth_events_dict[auth_event_id] = auth_event
|
||||
auth_events = {(e.type, e.state_key): e for e in auth_events_dict.values()}
|
||||
|
||||
sender_level = get_user_power_level(event.sender, auth_events)
|
||||
|
@ -194,16 +219,81 @@ class BulkPushRuleEvaluator:
|
|||
|
||||
return pl_event.content if pl_event else {}, sender_level
|
||||
|
||||
@measure_func("action_for_event_by_user")
|
||||
async def action_for_event_by_user(
|
||||
self, event: EventBase, context: EventContext
|
||||
) -> None:
|
||||
"""Given an event and context, evaluate the push rules, check if the message
|
||||
should increment the unread count, and insert the results into the
|
||||
event_push_actions_staging table.
|
||||
async def _related_events(self, event: EventBase) -> Dict[str, Dict[str, str]]:
|
||||
"""Fetches the related events for 'event'. Sets the im.vector.is_falling_back key if the event is from a fallback relation
|
||||
|
||||
Returns:
|
||||
Mapping of relation type to flattened events.
|
||||
"""
|
||||
if not event.internal_metadata.is_notifiable():
|
||||
# Push rules for events that aren't notifiable can't be processed by this
|
||||
related_events: Dict[str, Dict[str, str]] = {}
|
||||
if self._related_event_match_enabled:
|
||||
related_event_id = event.content.get("m.relates_to", {}).get("event_id")
|
||||
relation_type = event.content.get("m.relates_to", {}).get("rel_type")
|
||||
if related_event_id is not None and relation_type is not None:
|
||||
related_event = await self.store.get_event(
|
||||
related_event_id, allow_none=True
|
||||
)
|
||||
if related_event is not None:
|
||||
related_events[relation_type] = _flatten_dict(related_event)
|
||||
|
||||
reply_event_id = (
|
||||
event.content.get("m.relates_to", {})
|
||||
.get("m.in_reply_to", {})
|
||||
.get("event_id")
|
||||
)
|
||||
|
||||
# convert replies to pseudo relations
|
||||
if reply_event_id is not None:
|
||||
related_event = await self.store.get_event(
|
||||
reply_event_id, allow_none=True
|
||||
)
|
||||
|
||||
if related_event is not None:
|
||||
related_events["m.in_reply_to"] = _flatten_dict(related_event)
|
||||
|
||||
# indicate that this is from a fallback relation.
|
||||
if relation_type == "m.thread" and event.content.get(
|
||||
"m.relates_to", {}
|
||||
).get("is_falling_back", False):
|
||||
related_events["m.in_reply_to"][
|
||||
"im.vector.is_falling_back"
|
||||
] = ""
|
||||
|
||||
return related_events
|
||||
|
||||
async def action_for_events_by_user(
|
||||
self, events_and_context: List[Tuple[EventBase, EventContext]]
|
||||
) -> None:
|
||||
"""Given a list of events and their associated contexts, evaluate the push rules
|
||||
for each event, check if the message should increment the unread count, and
|
||||
insert the results into the event_push_actions_staging table.
|
||||
"""
|
||||
# For batched events the power level events may not have been persisted yet,
|
||||
# so we pass in the batched events. Thus if the event cannot be found in the
|
||||
# database we can check in the batch.
|
||||
event_id_to_event = {e.event_id: e for e, _ in events_and_context}
|
||||
for event, context in events_and_context:
|
||||
await self._action_for_event_by_user(event, context, event_id_to_event)
|
||||
|
||||
@measure_func("action_for_event_by_user")
|
||||
async def _action_for_event_by_user(
|
||||
self,
|
||||
event: EventBase,
|
||||
context: EventContext,
|
||||
event_id_to_event: Mapping[str, EventBase],
|
||||
) -> None:
|
||||
|
||||
if (
|
||||
not event.internal_metadata.is_notifiable()
|
||||
or event.internal_metadata.is_historical()
|
||||
or event.content.get(EventContentFields.MSC2716_HISTORICAL)
|
||||
):
|
||||
# Push rules for events that aren't notifiable can't be processed by this and
|
||||
# we want to skip push notification actions for historical messages
|
||||
# because we don't want to notify people about old history back in time.
|
||||
# The historical messages also do not have the proper `context.current_state_ids`
|
||||
# and `state_groups` because they have `prev_events` that aren't persisted yet
|
||||
# (historical messages persisted in reverse-chronological order).
|
||||
return
|
||||
|
||||
# Disable counting as unread unless the experimental configuration is
|
||||
|
@ -223,7 +313,9 @@ class BulkPushRuleEvaluator:
|
|||
(
|
||||
power_levels,
|
||||
sender_power_level,
|
||||
) = await self._get_power_levels_and_sender_level(event, context)
|
||||
) = await self._get_power_levels_and_sender_level(
|
||||
event, context, event_id_to_event
|
||||
)
|
||||
|
||||
# Find the event's thread ID.
|
||||
relation = relation_from_event(event)
|
||||
|
@ -238,6 +330,8 @@ class BulkPushRuleEvaluator:
|
|||
# the parent is part of a thread.
|
||||
thread_id = await self.store.get_thread_id(relation.parent_id)
|
||||
|
||||
related_events = await self._related_events(event)
|
||||
|
||||
# It's possible that old room versions have non-integer power levels (floats or
|
||||
# strings). Workaround this by explicitly converting to int.
|
||||
notification_levels = power_levels.get("notifications", {})
|
||||
|
@ -250,6 +344,8 @@ class BulkPushRuleEvaluator:
|
|||
room_member_count,
|
||||
sender_power_level,
|
||||
notification_levels,
|
||||
related_events,
|
||||
self._related_event_match_enabled,
|
||||
)
|
||||
|
||||
users = rules_by_user.keys()
|
||||
|
|
29
synapse/res/templates/_base.html
Normal file
29
synapse/res/templates/_base.html
Normal file
|
@ -0,0 +1,29 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<style type="text/css">
|
||||
{%- include 'style.css' without context %}
|
||||
</style>
|
||||
{% block header %}{% endblock %}
|
||||
</head>
|
||||
<body>
|
||||
<header class="mx_Header">
|
||||
{% if app_name == "Riot" %}
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{% elif app_name == "Vector" %}
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{% elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{% else %}
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{% endif %}
|
||||
</header>
|
||||
|
||||
{% block body %}{% endblock %}
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,12 +1,6 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</title>
|
||||
</head>
|
||||
<body>
|
||||
Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.
|
||||
</body>
|
||||
</html>
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Your account is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</title>
|
||||
</head>
|
||||
<body>
|
||||
Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.
|
||||
</body>
|
||||
</html>
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Your account has been successfully renewed and is valid until {{ expiration_ts|format_ts("%d-%m-%Y") }}.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,14 +1,8 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Request to add an email address to your Matrix account</title>
|
||||
</head>
|
||||
<body>
|
||||
<p>A request to add an email address to your Matrix account has been received. If this was you, please click the link below to confirm adding this email:</p>
|
||||
<a href="{{ link }}">{{ link }}</a>
|
||||
<p>If this was not you, you can safely ignore this email. Thank you.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Request to add an email address to your Matrix account{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>A request to add an email address to your Matrix account has been received. If this was you, please click the link below to confirm adding this email:</p>
|
||||
<a href="{{ link }}">{{ link }}</a>
|
||||
<p>If this was not you, you can safely ignore this email. Thank you.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,13 +1,7 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Request failed</title>
|
||||
</head>
|
||||
<body>
|
||||
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
||||
<p>No changes have been made to your account.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Request failed{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
||||
<p>No changes have been made to your account.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Your email has now been validated</title>
|
||||
</head>
|
||||
<body>
|
||||
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Your email has now been validated{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Success!</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Success!{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||
<script>
|
||||
if (window.onAuthDone) {
|
||||
window.onAuthDone();
|
||||
} else if (window.opener && window.opener.postMessage) {
|
||||
window.opener.postMessage("authDone", "*");
|
||||
window.opener.postMessage("authDone", "*");
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div>
|
||||
<p>Thank you</p>
|
||||
<p>You may now close this window and return to the application</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<div>
|
||||
<p>Thank you</p>
|
||||
<p>You may now close this window and return to the application</p>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,12 +1,5 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Invalid renewal token.</title>
|
||||
</head>
|
||||
<body>
|
||||
Invalid renewal token.
|
||||
</body>
|
||||
</html>
|
||||
{% block title %}Invalid renewal token.{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Invalid renewal token.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,47 +1,46 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include 'mail.css' without context %}
|
||||
{% include "mail-%s.css" % app_name ignore missing without context %}
|
||||
{% include 'mail-expiry.css' without context %}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<table id="page">
|
||||
<tr>
|
||||
<td> </td>
|
||||
<td id="inner">
|
||||
<table class="header">
|
||||
<tr>
|
||||
<td>
|
||||
<div class="salutation">Hi {{ display_name }},</div>
|
||||
</td>
|
||||
<td class="logo">
|
||||
{% if app_name == "Riot" %}
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{% elif app_name == "Vector" %}
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{% elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{% else %}
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td colspan="2">
|
||||
<div class="noticetext">Your account will expire on {{ expiration_ts|format_ts("%d-%m-%Y") }}. This means that you will lose access to your account after this date.</div>
|
||||
<div class="noticetext">To extend the validity of your account, please click on the link below (or copy and paste it into a new browser tab):</div>
|
||||
<div class="noticetext"><a href="{{ url }}">{{ url }}</a></div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td> </td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
{% extends "_base.html" %}
|
||||
{% block title %}Notice of expiry{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include 'mail.css' without context %}
|
||||
{% include "mail-%s.css" % app_name ignore missing without context %}
|
||||
{% include 'mail-expiry.css' without context %}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<table id="page">
|
||||
<tr>
|
||||
<td> </td>
|
||||
<td id="inner">
|
||||
<table class="header">
|
||||
<tr>
|
||||
<td>
|
||||
<div class="salutation">Hi {{ display_name }},</div>
|
||||
</td>
|
||||
<td class="logo">
|
||||
{% if app_name == "Riot" %}
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{% elif app_name == "Vector" %}
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{% elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{% else %}
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td colspan="2">
|
||||
<div class="noticetext">Your account will expire on {{ expiration_ts|format_ts("%d-%m-%Y") }}. This means that you will lose access to your account after this date.</div>
|
||||
<div class="noticetext">To extend the validity of your account, please click on the link below (or copy and paste it into a new browser tab):</div>
|
||||
<div class="noticetext"><a href="{{ url }}">{{ url }}</a></div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
<td> </td>
|
||||
</tr>
|
||||
</table>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,59 +1,57 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{%- include 'mail.css' without context %}
|
||||
{%- include "mail-%s.css" % app_name ignore missing without context %}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<table id="page">
|
||||
<tr>
|
||||
<td> </td>
|
||||
<td id="inner">
|
||||
<table class="header">
|
||||
<tr>
|
||||
<td>
|
||||
<div class="salutation">Hi {{ user_display_name }},</div>
|
||||
<div class="summarytext">{{ summary_text }}</div>
|
||||
</td>
|
||||
<td class="logo">
|
||||
{%- if app_name == "Riot" %}
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{%- elif app_name == "Vector" %}
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{%- elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{%- else %}
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{%- endif %}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
{%- for room in rooms %}
|
||||
{%- include 'room.html' with context %}
|
||||
{%- endfor %}
|
||||
<div class="footer">
|
||||
<a href="{{ unsubscribe_link }}">Unsubscribe</a>
|
||||
<br/>
|
||||
<br/>
|
||||
<div class="debug">
|
||||
Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because
|
||||
an event was received at {{ reason.received_at|format_ts("%c") }}
|
||||
which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago,
|
||||
{%- if reason.last_sent_ts %}
|
||||
and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }},
|
||||
which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago.
|
||||
{%- else %}
|
||||
and we don't have a last time we sent a mail for this room.
|
||||
{%- endif %}
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
<td> </td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
{% block title %}New activity in room{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{%- include 'mail.css' without context %}
|
||||
{%- include "mail-%s.css" % app_name ignore missing without context %}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<table id="page">
|
||||
<tr>
|
||||
<td> </td>
|
||||
<td id="inner">
|
||||
<table class="header">
|
||||
<tr>
|
||||
<td>
|
||||
<div class="salutation">Hi {{ user_display_name }},</div>
|
||||
<div class="summarytext">{{ summary_text }}</div>
|
||||
</td>
|
||||
<td class="logo">
|
||||
{%- if app_name == "Riot" %}
|
||||
<img src="http://riot.im/img/external/riot-logo-email.png" width="83" height="83" alt="[Riot]"/>
|
||||
{%- elif app_name == "Vector" %}
|
||||
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||
{%- elif app_name == "Element" %}
|
||||
<img src="https://static.element.io/images/email-logo.png" width="83" height="83" alt="[Element]"/>
|
||||
{%- else %}
|
||||
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||
{%- endif %}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
{%- for room in rooms %}
|
||||
{%- include 'room.html' with context %}
|
||||
{%- endfor %}
|
||||
<div class="footer">
|
||||
<a href="{{ unsubscribe_link }}">Unsubscribe</a>
|
||||
<br/>
|
||||
<br/>
|
||||
<div class="debug">
|
||||
Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because
|
||||
an event was received at {{ reason.received_at|format_ts("%c") }}
|
||||
which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago,
|
||||
{%- if reason.last_sent_ts %}
|
||||
and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }},
|
||||
which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago.
|
||||
{%- else %}
|
||||
and we don't have a last time we sent a mail for this room.
|
||||
{%- endif %}
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
<td> </td>
|
||||
</tr>
|
||||
</table>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,14 +1,9 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<title>Password reset</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
<p>A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:</p>
|
||||
{% block title %}Password reset{% endblock %}
|
||||
|
||||
<a href="{{ link }}">{{ link }}</a>
|
||||
{% block body %}
|
||||
<p>A password reset request has been received for your Matrix account. If this was you, please click the link below to confirm resetting your password:</p>
|
||||
|
||||
<p>If this was not you, <strong>do not</strong> click the link above and instead contact your server administrator. Thank you.</p>
|
||||
</body>
|
||||
</html>
|
||||
<a href="{{ link }}">{{ link }}</a>
|
||||
|
||||
<p>If this was not you, <strong>do not</strong> click the link above and instead contact your server administrator. Thank you.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<title>Password reset confirmation</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
{% block title %}Password reset confirmation{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<!--Use a hidden form to resubmit the information necessary to reset the password-->
|
||||
<form method="post">
|
||||
<input type="hidden" name="sid" value="{{ sid }}">
|
||||
|
@ -15,6 +11,4 @@
|
|||
If you did not mean to do this, please close this page and your password will not be changed.</p>
|
||||
<p><button type="submit">Confirm changing my password</button></p>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<title>Password reset failure</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
||||
{% block title %}Password reset failure{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>The request failed for the following reason: {{ failure_reason }}.</p>
|
||||
<p>Your password has not been reset.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
{% block title %}Password reset success{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Your email has now been validated, please return to your client to reset your password. You may now close this window.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Authentication</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<script src="https://www.recaptcha.net/recaptcha/api.js"
|
||||
async defer></script>
|
||||
{% block title %}Authentication{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<script src="https://www.recaptcha.net/recaptcha/api.js" async defer></script>
|
||||
<script src="//code.jquery.com/jquery-1.11.2.min.js"></script>
|
||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||
<script>
|
||||
|
@ -12,8 +9,9 @@ function captchaDone() {
|
|||
$('#registrationForm').submit();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||
<div>
|
||||
{% if error is defined %}
|
||||
|
@ -37,5 +35,4 @@ function captchaDone() {
|
|||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
|
@ -1,16 +1,11 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<title>Registration</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
<p>You have asked us to register this email with a new Matrix account. If this was you, please click the link below to confirm your email address:</p>
|
||||
{% block title %}Registration{% endblock %}
|
||||
|
||||
<a href="{{ link }}">Verify Your Email Address</a>
|
||||
{% block body %}
|
||||
<p>You have asked us to register this email with a new Matrix account. If this was you, please click the link below to confirm your email address:</p>
|
||||
|
||||
<p>If this was not you, you can safely disregard this email.</p>
|
||||
<a href="{{ link }}">Verify Your Email Address</a>
|
||||
|
||||
<p>Thank you.</p>
|
||||
</body>
|
||||
</html>
|
||||
<p>If this was not you, you can safely disregard this email.</p>
|
||||
|
||||
<p>Thank you.</p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
{% block title %}Registration failure{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Validation failed for the following reason: {{ failure_reason }}.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<title>Your email has now been validated</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body>
|
||||
{% block title %}Your email has now been validated{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<p>Your email has now been validated, please return to your client. You may now close this window.</p>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<title>Authentication</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{% block title %}Authentication{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||
</head>
|
||||
<body>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||
<div>
|
||||
{% if error is defined %}
|
||||
|
@ -19,5 +18,4 @@
|
|||
<input type="submit" value="Authenticate" />
|
||||
</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,25 +1,24 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>SSO account deactivated</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
</head>
|
||||
<body class="error_page">
|
||||
<header>
|
||||
<h1>Your account has been deactivated</h1>
|
||||
<p>
|
||||
<strong>No account found</strong>
|
||||
</p>
|
||||
<p>
|
||||
Your account might have been deactivated by the server administrator.
|
||||
You can either try to create a new account or contact the server’s
|
||||
administrator.
|
||||
</p>
|
||||
</header>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
{% block title %}SSO account deactivated{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<div class="error_page">
|
||||
<header>
|
||||
<h1>Your account has been deactivated</h1>
|
||||
<p>
|
||||
<strong>No account found</strong>
|
||||
</p>
|
||||
<p>
|
||||
Your account might have been deactivated by the server administrator.
|
||||
You can either try to create a new account or contact the server’s
|
||||
administrator.
|
||||
</p>
|
||||
</header>
|
||||
</div>
|
||||
{% include "sso_footer.html" without context %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,189 +1,185 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>Create your account</title>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<script type="text/javascript">
|
||||
let wasKeyboard = false;
|
||||
document.addEventListener("mousedown", function() { wasKeyboard = false; });
|
||||
document.addEventListener("keydown", function() { wasKeyboard = true; });
|
||||
document.addEventListener("focusin", function() {
|
||||
if (wasKeyboard) {
|
||||
document.body.classList.add("keyboard-focus");
|
||||
} else {
|
||||
document.body.classList.remove("keyboard-focus");
|
||||
}
|
||||
});
|
||||
</script>
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
{% block title %}Create your account{% endblock %}
|
||||
|
||||
body.keyboard-focus :focus, body.keyboard-focus .username_input:focus-within {
|
||||
outline: 3px solid #17191C;
|
||||
outline-offset: 4px;
|
||||
}
|
||||
{% block header %}
|
||||
<script type="text/javascript">
|
||||
let wasKeyboard = false;
|
||||
document.addEventListener("mousedown", function() { wasKeyboard = false; });
|
||||
document.addEventListener("keydown", function() { wasKeyboard = true; });
|
||||
document.addEventListener("focusin", function() {
|
||||
if (wasKeyboard) {
|
||||
document.body.classList.add("keyboard-focus");
|
||||
} else {
|
||||
document.body.classList.remove("keyboard-focus");
|
||||
}
|
||||
});
|
||||
</script>
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
|
||||
.username_input {
|
||||
display: flex;
|
||||
border: 2px solid #418DED;
|
||||
border-radius: 8px;
|
||||
padding: 12px;
|
||||
position: relative;
|
||||
margin: 16px 0;
|
||||
align-items: center;
|
||||
font-size: 12px;
|
||||
}
|
||||
body.keyboard-focus :focus, body.keyboard-focus .username_input:focus-within {
|
||||
outline: 3px solid #17191C;
|
||||
outline-offset: 4px;
|
||||
}
|
||||
|
||||
.username_input.invalid {
|
||||
border-color: #FE2928;
|
||||
}
|
||||
.username_input {
|
||||
display: flex;
|
||||
border: 2px solid #418DED;
|
||||
border-radius: 8px;
|
||||
padding: 12px;
|
||||
position: relative;
|
||||
margin: 16px 0;
|
||||
align-items: center;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.username_input.invalid input, .username_input.invalid label {
|
||||
color: #FE2928;
|
||||
}
|
||||
.username_input.invalid {
|
||||
border-color: #FE2928;
|
||||
}
|
||||
|
||||
.username_input div, .username_input input {
|
||||
line-height: 18px;
|
||||
font-size: 14px;
|
||||
}
|
||||
.username_input.invalid input, .username_input.invalid label {
|
||||
color: #FE2928;
|
||||
}
|
||||
|
||||
.username_input label {
|
||||
position: absolute;
|
||||
top: -5px;
|
||||
left: 14px;
|
||||
font-size: 10px;
|
||||
line-height: 10px;
|
||||
background: white;
|
||||
padding: 0 2px;
|
||||
}
|
||||
.username_input div, .username_input input {
|
||||
line-height: 18px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.username_input input {
|
||||
flex: 1;
|
||||
display: block;
|
||||
min-width: 0;
|
||||
border: none;
|
||||
}
|
||||
.username_input label {
|
||||
position: absolute;
|
||||
top: -5px;
|
||||
left: 14px;
|
||||
font-size: 10px;
|
||||
line-height: 10px;
|
||||
background: white;
|
||||
padding: 0 2px;
|
||||
}
|
||||
|
||||
/* only clear the outline if we know it will be shown on the parent div using :focus-within */
|
||||
@supports selector(:focus-within) {
|
||||
.username_input input {
|
||||
outline: none !important;
|
||||
}
|
||||
}
|
||||
.username_input input {
|
||||
flex: 1;
|
||||
display: block;
|
||||
min-width: 0;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.username_input div {
|
||||
color: #8D99A5;
|
||||
}
|
||||
/* only clear the outline if we know it will be shown on the parent div using :focus-within */
|
||||
@supports selector(:focus-within) {
|
||||
.username_input input {
|
||||
outline: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.idp-pick-details {
|
||||
border: 1px solid #E9ECF1;
|
||||
border-radius: 8px;
|
||||
margin: 24px 0;
|
||||
}
|
||||
.username_input div {
|
||||
color: #8D99A5;
|
||||
}
|
||||
|
||||
.idp-pick-details h2 {
|
||||
margin: 0;
|
||||
padding: 8px 12px;
|
||||
}
|
||||
.idp-pick-details {
|
||||
border: 1px solid #E9ECF1;
|
||||
border-radius: 8px;
|
||||
margin: 24px 0;
|
||||
}
|
||||
|
||||
.idp-pick-details .idp-detail {
|
||||
border-top: 1px solid #E9ECF1;
|
||||
padding: 12px;
|
||||
display: block;
|
||||
}
|
||||
.idp-pick-details .check-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
.idp-pick-details h2 {
|
||||
margin: 0;
|
||||
padding: 8px 12px;
|
||||
}
|
||||
|
||||
.idp-pick-details .check-row .name {
|
||||
flex: 1;
|
||||
}
|
||||
.idp-pick-details .idp-detail {
|
||||
border-top: 1px solid #E9ECF1;
|
||||
padding: 12px;
|
||||
display: block;
|
||||
}
|
||||
.idp-pick-details .check-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.idp-pick-details .use, .idp-pick-details .idp-value {
|
||||
color: #737D8C;
|
||||
}
|
||||
.idp-pick-details .check-row .name {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.idp-pick-details .idp-value {
|
||||
margin: 0;
|
||||
margin-top: 8px;
|
||||
}
|
||||
.idp-pick-details .use, .idp-pick-details .idp-value {
|
||||
color: #737D8C;
|
||||
}
|
||||
|
||||
.idp-pick-details .avatar {
|
||||
width: 53px;
|
||||
height: 53px;
|
||||
border-radius: 100%;
|
||||
display: block;
|
||||
margin-top: 8px;
|
||||
}
|
||||
.idp-pick-details .idp-value {
|
||||
margin: 0;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
output {
|
||||
padding: 0 14px;
|
||||
display: block;
|
||||
}
|
||||
.idp-pick-details .avatar {
|
||||
width: 53px;
|
||||
height: 53px;
|
||||
border-radius: 100%;
|
||||
display: block;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
output.error {
|
||||
color: #FE2928;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>Create your account</h1>
|
||||
<p>This is required. Continue to create your account on {{ server_name }}. You can't change this later.</p>
|
||||
</header>
|
||||
<main>
|
||||
<form method="post" class="form__input" id="form">
|
||||
<div class="username_input" id="username_input">
|
||||
<label for="field-username">Username (required)</label>
|
||||
<div class="prefix">@</div>
|
||||
<input type="text" name="username" id="field-username" value="{{ user_attributes.localpart }}" autofocus autocorrect="off" autocapitalize="none">
|
||||
<div class="postfix">:{{ server_name }}</div>
|
||||
output {
|
||||
padding: 0 14px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
output.error {
|
||||
color: #FE2928;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<header>
|
||||
<h1>Create your account</h1>
|
||||
<p>This is required. Continue to create your account on {{ server_name }}. You can't change this later.</p>
|
||||
</header>
|
||||
<main>
|
||||
<form method="post" class="form__input" id="form">
|
||||
<div class="username_input" id="username_input">
|
||||
<label for="field-username">Username (required)</label>
|
||||
<div class="prefix">@</div>
|
||||
<input type="text" name="username" id="field-username" value="{{ user_attributes.localpart }}" autofocus autocorrect="off" autocapitalize="none">
|
||||
<div class="postfix">:{{ server_name }}</div>
|
||||
</div>
|
||||
<output for="username_input" id="field-username-output"></output>
|
||||
<input type="submit" value="Continue" class="primary-button">
|
||||
{% if user_attributes.avatar_url or user_attributes.display_name or user_attributes.emails %}
|
||||
<section class="idp-pick-details">
|
||||
<h2>{% if idp.idp_icon %}<img src="{{ idp.idp_icon | mxc_to_http(24, 24) }}"/>{% endif %}Optional data from {{ idp.idp_name }}</h2>
|
||||
{% if user_attributes.avatar_url %}
|
||||
<label class="idp-detail idp-avatar" for="idp-avatar">
|
||||
<div class="check-row">
|
||||
<span class="name">Avatar</span>
|
||||
<span class="use">Use</span>
|
||||
<input type="checkbox" name="use_avatar" id="idp-avatar" value="true" checked>
|
||||
</div>
|
||||
<output for="username_input" id="field-username-output"></output>
|
||||
<input type="submit" value="Continue" class="primary-button">
|
||||
{% if user_attributes.avatar_url or user_attributes.display_name or user_attributes.emails %}
|
||||
<section class="idp-pick-details">
|
||||
<h2>{% if idp.idp_icon %}<img src="{{ idp.idp_icon | mxc_to_http(24, 24) }}"/>{% endif %}Optional data from {{ idp.idp_name }}</h2>
|
||||
{% if user_attributes.avatar_url %}
|
||||
<label class="idp-detail idp-avatar" for="idp-avatar">
|
||||
<div class="check-row">
|
||||
<span class="name">Avatar</span>
|
||||
<span class="use">Use</span>
|
||||
<input type="checkbox" name="use_avatar" id="idp-avatar" value="true" checked>
|
||||
</div>
|
||||
<img src="{{ user_attributes.avatar_url }}" class="avatar" />
|
||||
</label>
|
||||
{% endif %}
|
||||
{% if user_attributes.display_name %}
|
||||
<label class="idp-detail" for="idp-displayname">
|
||||
<div class="check-row">
|
||||
<span class="name">Display name</span>
|
||||
<span class="use">Use</span>
|
||||
<input type="checkbox" name="use_display_name" id="idp-displayname" value="true" checked>
|
||||
</div>
|
||||
<p class="idp-value">{{ user_attributes.display_name }}</p>
|
||||
</label>
|
||||
{% endif %}
|
||||
{% for email in user_attributes.emails %}
|
||||
<label class="idp-detail" for="idp-email{{ loop.index }}">
|
||||
<div class="check-row">
|
||||
<span class="name">E-mail</span>
|
||||
<span class="use">Use</span>
|
||||
<input type="checkbox" name="use_email" id="idp-email{{ loop.index }}" value="{{ email }}" checked>
|
||||
</div>
|
||||
<p class="idp-value">{{ email }}</p>
|
||||
</label>
|
||||
{% endfor %}
|
||||
</section>
|
||||
{% endif %}
|
||||
</form>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
<script type="text/javascript">
|
||||
{% include "sso_auth_account_details.js" without context %}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
<img src="{{ user_attributes.avatar_url }}" class="avatar" />
|
||||
</label>
|
||||
{% endif %}
|
||||
{% if user_attributes.display_name %}
|
||||
<label class="idp-detail" for="idp-displayname">
|
||||
<div class="check-row">
|
||||
<span class="name">Display name</span>
|
||||
<span class="use">Use</span>
|
||||
<input type="checkbox" name="use_display_name" id="idp-displayname" value="true" checked>
|
||||
</div>
|
||||
<p class="idp-value">{{ user_attributes.display_name }}</p>
|
||||
</label>
|
||||
{% endif %}
|
||||
{% for email in user_attributes.emails %}
|
||||
<label class="idp-detail" for="idp-email{{ loop.index }}">
|
||||
<div class="check-row">
|
||||
<span class="name">E-mail</span>
|
||||
<span class="use">Use</span>
|
||||
<input type="checkbox" name="use_email" id="idp-email{{ loop.index }}" value="{{ email }}" checked>
|
||||
</div>
|
||||
<p class="idp-value">{{ email }}</p>
|
||||
</label>
|
||||
{% endfor %}
|
||||
</section>
|
||||
{% endif %}
|
||||
</form>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
<script type="text/javascript">
|
||||
{% include "sso_auth_account_details.js" without context %}
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,27 +1,25 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Authentication failed</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
</head>
|
||||
<body class="error_page">
|
||||
<header>
|
||||
<h1>That doesn't look right</h1>
|
||||
<p>
|
||||
<strong>We were unable to validate your {{ server_name }} account</strong>
|
||||
via single sign‑on (SSO), because the SSO Identity
|
||||
Provider returned different details than when you logged in.
|
||||
</p>
|
||||
<p>
|
||||
Try the operation again, and ensure that you use the same details on
|
||||
the Identity Provider as when you log into your account.
|
||||
</p>
|
||||
</header>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
{% block title %}Authentication failed{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<div class="error_page">
|
||||
<header>
|
||||
<h1>That doesn't look right</h1>
|
||||
<p>
|
||||
<strong>We were unable to validate your {{ server_name }} account</strong>
|
||||
via single sign‑on (SSO), because the SSO Identity
|
||||
Provider returned different details than when you logged in.
|
||||
</p>
|
||||
<p>
|
||||
Try the operation again, and ensure that you use the same details on
|
||||
the Identity Provider as when you log into your account.
|
||||
</p>
|
||||
</header>
|
||||
</div>
|
||||
{% include "sso_footer.html" without context %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,30 +1,26 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Confirm it's you</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>Confirm it's you to continue</h1>
|
||||
<p>
|
||||
A client is trying to {{ description }}. To confirm this action
|
||||
re-authorize your account with single sign-on.
|
||||
</p>
|
||||
<p><strong>
|
||||
If you did not expect this, your account may be compromised.
|
||||
</strong></p>
|
||||
</header>
|
||||
<main>
|
||||
<a href="{{ redirect_url }}" class="primary-button">
|
||||
Continue with {{ idp.idp_name }}
|
||||
</a>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
{% block title %}Confirm it's you{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<header>
|
||||
<h1>Confirm it's you to continue</h1>
|
||||
<p>
|
||||
A client is trying to {{ description }}. To confirm this action
|
||||
re-authorize your account with single sign-on.
|
||||
</p>
|
||||
<p><strong>
|
||||
If you did not expect this, your account may be compromised.
|
||||
</strong></p>
|
||||
</header>
|
||||
<main>
|
||||
<a href="{{ redirect_url }}" class="primary-button">
|
||||
Continue with {{ idp.idp_name }}
|
||||
</a>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,29 +1,25 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Authentication successful</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
<script>
|
||||
if (window.onAuthDone) {
|
||||
window.onAuthDone();
|
||||
} else if (window.opener && window.opener.postMessage) {
|
||||
window.opener.postMessage("authDone", "*");
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>Thank you</h1>
|
||||
<p>
|
||||
Now we know it’s you, you can close this window and return to the
|
||||
application.
|
||||
</p>
|
||||
</header>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
{% block title %}Authentication successful{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
</style>
|
||||
<script>
|
||||
if (window.onAuthDone) {
|
||||
window.onAuthDone();
|
||||
} else if (window.opener && window.opener.postMessage) {
|
||||
window.opener.postMessage("authDone", "*");
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<header>
|
||||
<h1>Thank you</h1>
|
||||
<p>
|
||||
Now we know it’s you, you can close this window and return to the
|
||||
application.
|
||||
</p>
|
||||
</header>
|
||||
{% include "sso_footer.html" without context %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Authentication failed</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
{% block title %}Authentication failed{% endblock %}
|
||||
|
||||
#error_code {
|
||||
margin-top: 56px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body class="error_page">
|
||||
{% block header %}
|
||||
{% if error == "unauthorised" %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
|
||||
#error_code {
|
||||
margin-top: 56px;
|
||||
}
|
||||
</style>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<div class="error_page">
|
||||
{# If an error of unauthorised is returned it means we have actively rejected their login #}
|
||||
{% if error == "unauthorised" %}
|
||||
<header>
|
||||
|
@ -66,5 +66,5 @@
|
|||
}
|
||||
</script>
|
||||
{% endif %}
|
||||
</body>
|
||||
</html>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,63 +1,59 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta charset="UTF-8">
|
||||
<title>Choose identity provider</title>
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
{% block title %}Choose identity provider{% endblock %}
|
||||
|
||||
.providers {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
}
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
|
||||
.providers li {
|
||||
margin: 12px;
|
||||
}
|
||||
.providers {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.providers a {
|
||||
display: block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid #17191C;
|
||||
padding: 8px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
color: #17191C;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
font-weight: bold;
|
||||
}
|
||||
.providers li {
|
||||
margin: 12px;
|
||||
}
|
||||
|
||||
.providers a img {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
.providers a span {
|
||||
flex: 1;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>Log in to {{ server_name }} </h1>
|
||||
<p>Choose an identity provider to log in</p>
|
||||
</header>
|
||||
<main>
|
||||
<ul class="providers">
|
||||
{% for p in providers %}
|
||||
<li>
|
||||
<a href="pick_idp?idp={{ p.idp_id }}&redirectUrl={{ redirect_url | urlencode }}">
|
||||
{% if p.idp_icon %}
|
||||
<img src="{{ p.idp_icon | mxc_to_http(32, 32) }}"/>
|
||||
{% endif %}
|
||||
<span>{{ p.idp_name }}</span>
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
.providers a {
|
||||
display: block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid #17191C;
|
||||
padding: 8px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
color: #17191C;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.providers a img {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
}
|
||||
.providers a span {
|
||||
flex: 1;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<header>
|
||||
<h1>Log in to {{ server_name }} </h1>
|
||||
<p>Choose an identity provider to log in</p>
|
||||
</header>
|
||||
<main>
|
||||
<ul class="providers">
|
||||
{% for p in providers %}
|
||||
<li>
|
||||
<a href="pick_idp?idp={{ p.idp_id }}&redirectUrl={{ redirect_url | urlencode }}">
|
||||
{% if p.idp_icon %}
|
||||
<img src="{{ p.idp_icon | mxc_to_http(32, 32) }}"/>
|
||||
{% endif %}
|
||||
<span>{{ p.idp_name }}</span>
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,33 +1,29 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Agree to terms and conditions</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
{% block title %}Agree to terms and conditions{% endblock %}
|
||||
|
||||
#consent_form {
|
||||
margin-top: 56px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>Your account is nearly ready</h1>
|
||||
<p>Agree to the terms to create your account.</p>
|
||||
</header>
|
||||
<main>
|
||||
{% include "sso_partial_profile.html" %}
|
||||
<form method="post" action="{{my_url}}" id="consent_form">
|
||||
<p>
|
||||
<input id="accepted_version" type="checkbox" name="accepted_version" value="{{ consent_version }}" required>
|
||||
<label for="accepted_version">I have read and agree to the <a href="{{ terms_url }}" target="_blank" rel="noopener">terms and conditions</a>.</label>
|
||||
</p>
|
||||
<input type="submit" class="primary-button" value="Continue"/>
|
||||
</form>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
|
||||
#consent_form {
|
||||
margin-top: 56px;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<header>
|
||||
<h1>Your account is nearly ready</h1>
|
||||
<p>Agree to the terms to create your account.</p>
|
||||
</header>
|
||||
<main>
|
||||
{% include "sso_partial_profile.html" %}
|
||||
<form method="post" action="{{my_url}}" id="consent_form">
|
||||
<p>
|
||||
<input id="accepted_version" type="checkbox" name="accepted_version" value="{{ consent_version }}" required>
|
||||
<label for="accepted_version">I have read and agree to the <a href="{{ terms_url }}" target="_blank" rel="noopener">terms and conditions</a>.</label>
|
||||
</p>
|
||||
<input type="submit" class="primary-button" value="Continue"/>
|
||||
</form>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,41 +1,38 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Continue to your account</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
{% block title %}Continue to your account{% endblock %}
|
||||
|
||||
.confirm-trust {
|
||||
margin: 34px 0;
|
||||
color: #8D99A5;
|
||||
}
|
||||
.confirm-trust strong {
|
||||
color: #17191C;
|
||||
}
|
||||
{% block header %}
|
||||
<style type="text/css">
|
||||
{% include "sso.css" without context %}
|
||||
|
||||
.confirm-trust::before {
|
||||
content: "";
|
||||
background-image: url('data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTgiIGhlaWdodD0iMTgiIHZpZXdCb3g9IjAgMCAxOCAxOCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNi41IDlDMTYuNSAxMy4xNDIxIDEzLjE0MjEgMTYuNSA5IDE2LjVDNC44NTc4NiAxNi41IDEuNSAxMy4xNDIxIDEuNSA5QzEuNSA0Ljg1Nzg2IDQuODU3ODYgMS41IDkgMS41QzEzLjE0MjEgMS41IDE2LjUgNC44NTc4NiAxNi41IDlaTTcuMjUgOUM3LjI1IDkuNDY1OTYgNy41Njg2OSA5Ljg1NzQ4IDggOS45Njg1VjEyLjM3NUM4IDEyLjkyNzMgOC40NDc3MiAxMy4zNzUgOSAxMy4zNzVIMTAuMTI1QzEwLjY3NzMgMTMuMzc1IDExLjEyNSAxMi45MjczIDExLjEyNSAxMi4zNzVDMTEuMTI1IDExLjgyMjcgMTAuNjc3MyAxMS4zNzUgMTAuMTI1IDExLjM3NUgxMFY5QzEwIDguOTY1NDggOS45OTgyNSA4LjkzMTM3IDkuOTk0ODQgOC44OTc3NkM5Ljk0MzYzIDguMzkzNSA5LjUxNzc3IDggOSA4SDguMjVDNy42OTc3MiA4IDcuMjUgOC40NDc3MiA3LjI1IDlaTTkgNy41QzkuNjIxMzIgNy41IDEwLjEyNSA2Ljk5NjMyIDEwLjEyNSA2LjM3NUMxMC4xMjUgNS43NTM2OCA5LjYyMTMyIDUuMjUgOSA1LjI1QzguMzc4NjggNS4yNSA3Ljg3NSA1Ljc1MzY4IDcuODc1IDYuMzc1QzcuODc1IDYuOTk2MzIgOC4zNzg2OCA3LjUgOSA3LjVaIiBmaWxsPSIjQzFDNkNEIi8+Cjwvc3ZnPgoK');
|
||||
background-repeat: no-repeat;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: block;
|
||||
float: left;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>Continue to your account</h1>
|
||||
</header>
|
||||
<main>
|
||||
{% include "sso_partial_profile.html" %}
|
||||
<p class="confirm-trust">Continuing will grant <strong>{{ display_url }}</strong> access to your account.</p>
|
||||
<a href="{{ redirect_url }}" class="primary-button">Continue</a>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
</body>
|
||||
</html>
|
||||
.confirm-trust {
|
||||
margin: 34px 0;
|
||||
color: #8D99A5;
|
||||
}
|
||||
.confirm-trust strong {
|
||||
color: #17191C;
|
||||
}
|
||||
|
||||
.confirm-trust::before {
|
||||
content: "";
|
||||
background-image: url('data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTgiIGhlaWdodD0iMTgiIHZpZXdCb3g9IjAgMCAxOCAxOCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNi41IDlDMTYuNSAxMy4xNDIxIDEzLjE0MjEgMTYuNSA5IDE2LjVDNC44NTc4NiAxNi41IDEuNSAxMy4xNDIxIDEuNSA5QzEuNSA0Ljg1Nzg2IDQuODU3ODYgMS41IDkgMS41QzEzLjE0MjEgMS41IDE2LjUgNC44NTc4NiAxNi41IDlaTTcuMjUgOUM3LjI1IDkuNDY1OTYgNy41Njg2OSA5Ljg1NzQ4IDggOS45Njg1VjEyLjM3NUM4IDEyLjkyNzMgOC40NDc3MiAxMy4zNzUgOSAxMy4zNzVIMTAuMTI1QzEwLjY3NzMgMTMuMzc1IDExLjEyNSAxMi45MjczIDExLjEyNSAxMi4zNzVDMTEuMTI1IDExLjgyMjcgMTAuNjc3MyAxMS4zNzUgMTAuMTI1IDExLjM3NUgxMFY5QzEwIDguOTY1NDggOS45OTgyNSA4LjkzMTM3IDkuOTk0ODQgOC44OTc3NkM5Ljk0MzYzIDguMzkzNSA5LjUxNzc3IDggOSA4SDguMjVDNy42OTc3MiA4IDcuMjUgOC40NDc3MiA3LjI1IDlaTTkgNy41QzkuNjIxMzIgNy41IDEwLjEyNSA2Ljk5NjMyIDEwLjEyNSA2LjM3NUMxMC4xMjUgNS43NTM2OCA5LjYyMTMyIDUuMjUgOSA1LjI1QzguMzc4NjggNS4yNSA3Ljg3NSA1Ljc1MzY4IDcuODc1IDYuMzc1QzcuODc1IDYuOTk2MzIgOC4zNzg2OCA3LjUgOSA3LjVaIiBmaWxsPSIjQzFDNkNEIi8+Cjwvc3ZnPgoK');
|
||||
background-repeat: no-repeat;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: block;
|
||||
float: left;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<header>
|
||||
<h1>Continue to your account</h1>
|
||||
</header>
|
||||
<main>
|
||||
{% include "sso_partial_profile.html" %}
|
||||
<p class="confirm-trust">Continuing will grant <strong>{{ display_url }}</strong> access to your account.</p>
|
||||
<a href="{{ redirect_url }}" class="primary-button">Continue</a>
|
||||
</main>
|
||||
{% include "sso_footer.html" without context %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
29
synapse/res/templates/style.css
Normal file
29
synapse/res/templates/style.css
Normal file
|
@ -0,0 +1,29 @@
|
|||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
background: #f9fafb;
|
||||
max-width: 680px;
|
||||
margin: auto;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
||||
}
|
||||
|
||||
.mx_Header {
|
||||
border-bottom: 3px solid #ddd;
|
||||
margin-bottom: 1rem;
|
||||
padding-top: 1rem;
|
||||
padding-bottom: 1rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 1120px) {
|
||||
body {
|
||||
font-size: 20px;
|
||||
}
|
||||
|
||||
h1 { font-size: 1rem; }
|
||||
h2 { font-size: .9rem; }
|
||||
h3 { font-size: .85rem; }
|
||||
h4 { font-size: .8rem; }
|
||||
}
|
|
@ -1,11 +1,10 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Authentication</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
{% block title %}Authentication{% endblock %}
|
||||
|
||||
{% block header %}
|
||||
<link rel="stylesheet" href="/_matrix/static/client/register/style.css">
|
||||
</head>
|
||||
<body>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<form id="registrationForm" method="post" action="{{ myurl }}">
|
||||
<div>
|
||||
{% if error is defined %}
|
||||
|
@ -19,5 +18,4 @@
|
|||
<input type="submit" value="Agree" />
|
||||
</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
{% endblock %}
|
||||
|
|
|
@ -77,6 +77,11 @@ class CapabilitiesRestServlet(RestServlet):
|
|||
"enabled": True,
|
||||
}
|
||||
|
||||
if self.config.experimental.msc3664_enabled:
|
||||
response["capabilities"]["im.nheko.msc3664.related_event_match"] = {
|
||||
"enabled": self.config.experimental.msc3664_enabled,
|
||||
}
|
||||
|
||||
return HTTPStatus.OK, response
|
||||
|
||||
|
||||
|
|
|
@ -231,7 +231,7 @@ class DehydratedDeviceServlet(RestServlet):
|
|||
}
|
||||
}
|
||||
|
||||
PUT /org.matrix.msc2697/dehydrated_device
|
||||
PUT /org.matrix.msc2697.v2/dehydrated_device
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
|
@ -271,7 +271,6 @@ class DehydratedDeviceServlet(RestServlet):
|
|||
raise errors.NotFoundError("No dehydrated device available")
|
||||
|
||||
class PutBody(RequestBodyModel):
|
||||
device_id: StrictStr
|
||||
device_data: DehydratedDeviceDataModel
|
||||
initial_device_display_name: Optional[StrictStr]
|
||||
|
||||
|
@ -281,7 +280,7 @@ class DehydratedDeviceServlet(RestServlet):
|
|||
|
||||
device_id = await self.device_handler.store_dehydrated_device(
|
||||
requester.user.to_string(),
|
||||
submission.device_data,
|
||||
submission.device_data.dict(),
|
||||
submission.initial_device_display_name,
|
||||
)
|
||||
return 200, {"device_id": device_id}
|
||||
|
|
|
@ -436,8 +436,7 @@ class LoginRestServlet(RestServlet):
|
|||
The body of the JSON response.
|
||||
"""
|
||||
token = login_submission["token"]
|
||||
auth_handler = self.auth_handler
|
||||
res = await auth_handler.validate_short_term_login_token(token)
|
||||
res = await self.auth_handler.consume_login_token(token)
|
||||
|
||||
return await self._complete_login(
|
||||
res.user_id,
|
||||
|
|
|
@ -57,7 +57,6 @@ class LoginTokenRequestServlet(RestServlet):
|
|||
self.store = hs.get_datastores().main
|
||||
self.clock = hs.get_clock()
|
||||
self.server_name = hs.config.server.server_name
|
||||
self.macaroon_gen = hs.get_macaroon_generator()
|
||||
self.auth_handler = hs.get_auth_handler()
|
||||
self.token_timeout = hs.config.experimental.msc3882_token_timeout
|
||||
self.ui_auth = hs.config.experimental.msc3882_ui_auth
|
||||
|
@ -76,10 +75,10 @@ class LoginTokenRequestServlet(RestServlet):
|
|||
can_skip_ui_auth=False, # Don't allow skipping of UI auth
|
||||
)
|
||||
|
||||
login_token = self.macaroon_gen.generate_short_term_login_token(
|
||||
login_token = await self.auth_handler.create_login_token_for_user_id(
|
||||
user_id=requester.user.to_string(),
|
||||
auth_provider_id="org.matrix.msc3882.login_token_request",
|
||||
duration_in_ms=self.token_timeout,
|
||||
duration_ms=self.token_timeout,
|
||||
)
|
||||
|
||||
return (
|
||||
|
|
|
@ -110,6 +110,13 @@ class RoomBatchSendEventRestServlet(RestServlet):
|
|||
errcode=Codes.MISSING_PARAM,
|
||||
)
|
||||
|
||||
if await self.store.is_partial_state_room(room_id):
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Cannot insert history batches until we have fully joined the room",
|
||||
errcode=Codes.UNABLE_DUE_TO_PARTIAL_STATE,
|
||||
)
|
||||
|
||||
# Verify the batch_id_from_query corresponds to an actual insertion event
|
||||
# and have the batch connected.
|
||||
if batch_id_from_query:
|
||||
|
|
|
@ -146,12 +146,12 @@ class SyncRestServlet(RestServlet):
|
|||
elif filter_id.startswith("{"):
|
||||
try:
|
||||
filter_object = json_decoder.decode(filter_id)
|
||||
set_timeline_upper_limit(
|
||||
filter_object, self.hs.config.server.filter_timeline_limit
|
||||
)
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid filter JSON")
|
||||
raise SynapseError(400, "Invalid filter JSON", errcode=Codes.NOT_JSON)
|
||||
self.filtering.check_valid_filter(filter_object)
|
||||
set_timeline_upper_limit(
|
||||
filter_object, self.hs.config.server.filter_timeline_limit
|
||||
)
|
||||
filter_collection = FilterCollection(self.hs, filter_object)
|
||||
else:
|
||||
try:
|
||||
|
|
|
@ -14,17 +14,20 @@
|
|||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
from .local_key_resource import LocalKey
|
||||
from .remote_key_resource import RemoteKey
|
||||
from synapse.http.server import HttpServer, JsonResource
|
||||
from synapse.rest.key.v2.local_key_resource import LocalKey
|
||||
from synapse.rest.key.v2.remote_key_resource import RemoteKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class KeyApiV2Resource(Resource):
|
||||
class KeyResource(JsonResource):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
Resource.__init__(self)
|
||||
self.putChild(b"server", LocalKey(hs))
|
||||
self.putChild(b"query", RemoteKey(hs))
|
||||
super().__init__(hs, canonical_json=True)
|
||||
self.register_servlets(self, hs)
|
||||
|
||||
@staticmethod
|
||||
def register_servlets(http_server: HttpServer, hs: "HomeServer") -> None:
|
||||
LocalKey(hs).register(http_server)
|
||||
RemoteKey(hs).register(http_server)
|
||||
|
|
|
@ -13,16 +13,15 @@
|
|||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
from signedjson.sign import sign_json
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.http.server import respond_with_json_bytes
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.http.servlet import RestServlet
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -31,7 +30,7 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LocalKey(Resource):
|
||||
class LocalKey(RestServlet):
|
||||
"""HTTP resource containing encoding the TLS X.509 certificate and NACL
|
||||
signature verification keys for this server::
|
||||
|
||||
|
@ -61,18 +60,17 @@ class LocalKey(Resource):
|
|||
}
|
||||
"""
|
||||
|
||||
isLeaf = True
|
||||
PATTERNS = (re.compile("^/_matrix/key/v2/server(/(?P<key_id>[^/]*))?$"),)
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.config = hs.config
|
||||
self.clock = hs.get_clock()
|
||||
self.update_response_body(self.clock.time_msec())
|
||||
Resource.__init__(self)
|
||||
|
||||
def update_response_body(self, time_now_msec: int) -> None:
|
||||
refresh_interval = self.config.key.key_refresh_interval
|
||||
self.valid_until_ts = int(time_now_msec + refresh_interval)
|
||||
self.response_body = encode_canonical_json(self.response_json_object())
|
||||
self.response_body = self.response_json_object()
|
||||
|
||||
def response_json_object(self) -> JsonDict:
|
||||
verify_keys = {}
|
||||
|
@ -99,9 +97,11 @@ class LocalKey(Resource):
|
|||
json_object = sign_json(json_object, self.config.server.server_name, key)
|
||||
return json_object
|
||||
|
||||
def render_GET(self, request: SynapseRequest) -> Optional[int]:
|
||||
def on_GET(
|
||||
self, request: Request, key_id: Optional[str] = None
|
||||
) -> Tuple[int, JsonDict]:
|
||||
time_now = self.clock.time_msec()
|
||||
# Update the expiry time if less than half the interval remains.
|
||||
if time_now + self.config.key.key_refresh_interval / 2 > self.valid_until_ts:
|
||||
self.update_response_body(time_now)
|
||||
return respond_with_json_bytes(request, 200, self.response_body)
|
||||
return 200, self.response_body
|
||||
|
|
|
@ -13,15 +13,20 @@
|
|||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Set
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Dict, Optional, Set, Tuple
|
||||
|
||||
from signedjson.sign import sign_json
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.crypto.keyring import ServerKeyFetcher
|
||||
from synapse.http.server import DirectServeJsonResource, respond_with_json
|
||||
from synapse.http.servlet import parse_integer, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_integer,
|
||||
parse_json_object_from_request,
|
||||
)
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.async_helpers import yieldable_gather_results
|
||||
|
@ -32,7 +37,7 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RemoteKey(DirectServeJsonResource):
|
||||
class RemoteKey(RestServlet):
|
||||
"""HTTP resource for retrieving the TLS certificate and NACL signature
|
||||
verification keys for a collection of servers. Checks that the reported
|
||||
X.509 TLS certificate matches the one used in the HTTPS connection. Checks
|
||||
|
@ -88,11 +93,7 @@ class RemoteKey(DirectServeJsonResource):
|
|||
}
|
||||
"""
|
||||
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
|
||||
self.fetcher = ServerKeyFetcher(hs)
|
||||
self.store = hs.get_datastores().main
|
||||
self.clock = hs.get_clock()
|
||||
|
@ -101,36 +102,48 @@ class RemoteKey(DirectServeJsonResource):
|
|||
)
|
||||
self.config = hs.config
|
||||
|
||||
async def _async_render_GET(self, request: SynapseRequest) -> None:
|
||||
assert request.postpath is not None
|
||||
if len(request.postpath) == 1:
|
||||
(server,) = request.postpath
|
||||
query: dict = {server.decode("ascii"): {}}
|
||||
elif len(request.postpath) == 2:
|
||||
server, key_id = request.postpath
|
||||
def register(self, http_server: HttpServer) -> None:
|
||||
http_server.register_paths(
|
||||
"GET",
|
||||
(
|
||||
re.compile(
|
||||
"^/_matrix/key/v2/query/(?P<server>[^/]*)(/(?P<key_id>[^/]*))?$"
|
||||
),
|
||||
),
|
||||
self.on_GET,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
http_server.register_paths(
|
||||
"POST",
|
||||
(re.compile("^/_matrix/key/v2/query$"),),
|
||||
self.on_POST,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
async def on_GET(
|
||||
self, request: Request, server: str, key_id: Optional[str] = None
|
||||
) -> Tuple[int, JsonDict]:
|
||||
if server and key_id:
|
||||
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
|
||||
arguments = {}
|
||||
if minimum_valid_until_ts is not None:
|
||||
arguments["minimum_valid_until_ts"] = minimum_valid_until_ts
|
||||
query = {server.decode("ascii"): {key_id.decode("ascii"): arguments}}
|
||||
query = {server: {key_id: arguments}}
|
||||
else:
|
||||
raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND)
|
||||
query = {server: {}}
|
||||
|
||||
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
||||
return 200, await self.query_keys(query, query_remote_on_cache_miss=True)
|
||||
|
||||
async def _async_render_POST(self, request: SynapseRequest) -> None:
|
||||
async def on_POST(self, request: Request) -> Tuple[int, JsonDict]:
|
||||
content = parse_json_object_from_request(request)
|
||||
|
||||
query = content["server_keys"]
|
||||
|
||||
await self.query_keys(request, query, query_remote_on_cache_miss=True)
|
||||
return 200, await self.query_keys(query, query_remote_on_cache_miss=True)
|
||||
|
||||
async def query_keys(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
query: JsonDict,
|
||||
query_remote_on_cache_miss: bool = False,
|
||||
) -> None:
|
||||
self, query: JsonDict, query_remote_on_cache_miss: bool = False
|
||||
) -> JsonDict:
|
||||
logger.info("Handling query for keys %r", query)
|
||||
|
||||
store_queries = []
|
||||
|
@ -232,7 +245,7 @@ class RemoteKey(DirectServeJsonResource):
|
|||
for server_name, keys in cache_misses.items()
|
||||
),
|
||||
)
|
||||
await self.query_keys(request, query, query_remote_on_cache_miss=False)
|
||||
return await self.query_keys(query, query_remote_on_cache_miss=False)
|
||||
else:
|
||||
signed_keys = []
|
||||
for key_json_raw in json_results:
|
||||
|
@ -244,6 +257,4 @@ class RemoteKey(DirectServeJsonResource):
|
|||
|
||||
signed_keys.append(key_json)
|
||||
|
||||
response = {"server_keys": signed_keys}
|
||||
|
||||
respond_with_json(request, 200, response, canonical_json=True)
|
||||
return {"server_keys": signed_keys}
|
||||
|
|
|
@ -17,6 +17,9 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
from synapse.rest.synapse.client.oidc.backchannel_logout_resource import (
|
||||
OIDCBackchannelLogoutResource,
|
||||
)
|
||||
from synapse.rest.synapse.client.oidc.callback_resource import OIDCCallbackResource
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -29,6 +32,7 @@ class OIDCResource(Resource):
|
|||
def __init__(self, hs: "HomeServer"):
|
||||
Resource.__init__(self)
|
||||
self.putChild(b"callback", OIDCCallbackResource(hs))
|
||||
self.putChild(b"backchannel_logout", OIDCBackchannelLogoutResource(hs))
|
||||
|
||||
|
||||
__all__ = ["OIDCResource"]
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from synapse.http.server import DirectServeJsonResource
|
||||
from synapse.http.site import SynapseRequest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OIDCBackchannelLogoutResource(DirectServeJsonResource):
|
||||
isLeaf = 1
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__()
|
||||
self._oidc_handler = hs.get_oidc_handler()
|
||||
|
||||
async def _async_render_POST(self, request: SynapseRequest) -> None:
|
||||
await self._oidc_handler.handle_backchannel_logout(request)
|
|
@ -201,7 +201,7 @@ class DataStore(
|
|||
name: Optional[str] = None,
|
||||
guests: bool = True,
|
||||
deactivated: bool = False,
|
||||
order_by: str = UserSortOrder.USER_ID.value,
|
||||
order_by: str = UserSortOrder.NAME.value,
|
||||
direction: str = "f",
|
||||
approved: bool = True,
|
||||
) -> Tuple[List[JsonDict], int]:
|
||||
|
@ -261,6 +261,7 @@ class DataStore(
|
|||
sql_base = f"""
|
||||
FROM users as u
|
||||
LEFT JOIN profiles AS p ON u.name = '@' || p.user_id || ':' || ?
|
||||
LEFT JOIN erased_users AS eu ON u.name = eu.user_id
|
||||
{where_clause}
|
||||
"""
|
||||
sql = "SELECT COUNT(*) as total_users " + sql_base
|
||||
|
@ -269,7 +270,8 @@ class DataStore(
|
|||
|
||||
sql = f"""
|
||||
SELECT name, user_type, is_guest, admin, deactivated, shadow_banned,
|
||||
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved
|
||||
displayname, avatar_url, creation_ts * 1000 as creation_ts, approved,
|
||||
eu.user_id is not null as erased
|
||||
{sql_base}
|
||||
ORDER BY {order_by_column} {order}, u.name ASC
|
||||
LIMIT ? OFFSET ?
|
||||
|
@ -277,6 +279,13 @@ class DataStore(
|
|||
args += [limit, start]
|
||||
txn.execute(sql, args)
|
||||
users = self.db_pool.cursor_to_dict(txn)
|
||||
|
||||
# some of those boolean values are returned as integers when we're on SQLite
|
||||
columns_to_boolify = ["erased"]
|
||||
for user in users:
|
||||
for column in columns_to_boolify:
|
||||
user[column] = bool(user[column])
|
||||
|
||||
return users, count
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
|
|
|
@ -157,10 +157,23 @@ class ApplicationServiceWorkerStore(RoomMemberWorkerStore):
|
|||
app_service: "ApplicationService",
|
||||
cache_context: _CacheContext,
|
||||
) -> List[str]:
|
||||
users_in_room = await self.get_users_in_room(
|
||||
"""
|
||||
Get all users in a room that the appservice controls.
|
||||
|
||||
Args:
|
||||
room_id: The room to check in.
|
||||
app_service: The application service to check interest/control against
|
||||
|
||||
Returns:
|
||||
List of user IDs that the appservice controls.
|
||||
"""
|
||||
# We can use `get_local_users_in_room(...)` here because an application service
|
||||
# can only be interested in local users of the server it's on (ignore any remote
|
||||
# users that might match the user namespace regex).
|
||||
local_users_in_room = await self.get_local_users_in_room(
|
||||
room_id, on_invalidate=cache_context.invalidate
|
||||
)
|
||||
return list(filter(app_service.is_interested_in_user, users_in_room))
|
||||
return list(filter(app_service.is_interested_in_user, local_users_in_room))
|
||||
|
||||
|
||||
class ApplicationServiceStore(ApplicationServiceWorkerStore):
|
||||
|
|
|
@ -274,6 +274,13 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
|
|||
destination, int(from_stream_id)
|
||||
)
|
||||
if not has_changed:
|
||||
# debugging for https://github.com/matrix-org/synapse/issues/14251
|
||||
issue_8631_logger.debug(
|
||||
"%s: no change between %i and %i",
|
||||
destination,
|
||||
from_stream_id,
|
||||
now_stream_id,
|
||||
)
|
||||
return now_stream_id, []
|
||||
|
||||
updates = await self.db_pool.runInteraction(
|
||||
|
@ -1848,7 +1855,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||
self,
|
||||
txn: LoggingTransaction,
|
||||
user_id: str,
|
||||
device_ids: Iterable[str],
|
||||
device_id: str,
|
||||
hosts: Collection[str],
|
||||
stream_ids: List[int],
|
||||
context: Optional[Dict[str, str]],
|
||||
|
@ -1864,6 +1871,21 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||
stream_id_iterator = iter(stream_ids)
|
||||
|
||||
encoded_context = json_encoder.encode(context)
|
||||
mark_sent = not self.hs.is_mine_id(user_id)
|
||||
|
||||
values = [
|
||||
(
|
||||
destination,
|
||||
next(stream_id_iterator),
|
||||
user_id,
|
||||
device_id,
|
||||
mark_sent,
|
||||
now,
|
||||
encoded_context if whitelisted_homeserver(destination) else "{}",
|
||||
)
|
||||
for destination in hosts
|
||||
]
|
||||
|
||||
self.db_pool.simple_insert_many_txn(
|
||||
txn,
|
||||
table="device_lists_outbound_pokes",
|
||||
|
@ -1876,23 +1898,21 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||
"ts",
|
||||
"opentracing_context",
|
||||
),
|
||||
values=[
|
||||
(
|
||||
destination,
|
||||
next(stream_id_iterator),
|
||||
user_id,
|
||||
device_id,
|
||||
not self.hs.is_mine_id(
|
||||
user_id
|
||||
), # We only need to send out update for *our* users
|
||||
now,
|
||||
encoded_context if whitelisted_homeserver(destination) else "{}",
|
||||
)
|
||||
for destination in hosts
|
||||
for device_id in device_ids
|
||||
],
|
||||
values=values,
|
||||
)
|
||||
|
||||
# debugging for https://github.com/matrix-org/synapse/issues/14251
|
||||
if issue_8631_logger.isEnabledFor(logging.DEBUG):
|
||||
issue_8631_logger.debug(
|
||||
"Recorded outbound pokes for %s:%s with device stream ids %s",
|
||||
user_id,
|
||||
device_id,
|
||||
{
|
||||
stream_id: destination
|
||||
for (destination, stream_id, _, _, _, _, _) in values
|
||||
},
|
||||
)
|
||||
|
||||
def _add_device_outbound_room_poke_txn(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
|
@ -1997,7 +2017,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
|||
self._add_device_outbound_poke_to_stream_txn(
|
||||
txn,
|
||||
user_id=user_id,
|
||||
device_ids=[device_id],
|
||||
device_id=device_id,
|
||||
hosts=hosts,
|
||||
stream_ids=stream_ids,
|
||||
context=context,
|
||||
|
|
|
@ -139,11 +139,15 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
|||
@trace
|
||||
@cancellable
|
||||
async def get_e2e_device_keys_for_cs_api(
|
||||
self, query_list: List[Tuple[str, Optional[str]]]
|
||||
self,
|
||||
query_list: List[Tuple[str, Optional[str]]],
|
||||
include_displaynames: bool = True,
|
||||
) -> Dict[str, Dict[str, JsonDict]]:
|
||||
"""Fetch a list of device keys, formatted suitably for the C/S API.
|
||||
Args:
|
||||
query_list(list): List of pairs of user_ids and device_ids.
|
||||
query_list: List of pairs of user_ids and device_ids.
|
||||
include_displaynames: Whether to include the displayname of returned devices
|
||||
(if one exists).
|
||||
Returns:
|
||||
Dict mapping from user-id to dict mapping from device_id to
|
||||
key data. The key data will be a dict in the same format as the
|
||||
|
@ -166,9 +170,12 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
|||
continue
|
||||
|
||||
r["unsigned"] = {}
|
||||
display_name = device_info.display_name
|
||||
if display_name is not None:
|
||||
r["unsigned"]["device_display_name"] = display_name
|
||||
if include_displaynames:
|
||||
# Include the device's display name in the "unsigned" dictionary
|
||||
display_name = device_info.display_name
|
||||
if display_name is not None:
|
||||
r["unsigned"]["device_display_name"] = display_name
|
||||
|
||||
rv[user_id][device_id] = r
|
||||
|
||||
return rv
|
||||
|
|
|
@ -29,6 +29,7 @@ from typing import (
|
|||
)
|
||||
|
||||
from synapse.api.errors import StoreError
|
||||
from synapse.config.homeserver import ExperimentalConfig
|
||||
from synapse.replication.slave.storage._slaved_id_tracker import SlavedIdTracker
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.database import (
|
||||
|
@ -62,7 +63,9 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def _load_rules(
|
||||
rawrules: List[JsonDict], enabled_map: Dict[str, bool]
|
||||
rawrules: List[JsonDict],
|
||||
enabled_map: Dict[str, bool],
|
||||
experimental_config: ExperimentalConfig,
|
||||
) -> FilteredPushRules:
|
||||
"""Take the DB rows returned from the DB and convert them into a full
|
||||
`FilteredPushRules` object.
|
||||
|
@ -80,7 +83,9 @@ def _load_rules(
|
|||
|
||||
push_rules = PushRules(ruleslist)
|
||||
|
||||
filtered_rules = FilteredPushRules(push_rules, enabled_map)
|
||||
filtered_rules = FilteredPushRules(
|
||||
push_rules, enabled_map, msc3664_enabled=experimental_config.msc3664_enabled
|
||||
)
|
||||
|
||||
return filtered_rules
|
||||
|
||||
|
@ -160,7 +165,7 @@ class PushRulesWorkerStore(
|
|||
|
||||
enabled_map = await self.get_push_rules_enabled_for_user(user_id)
|
||||
|
||||
return _load_rules(rows, enabled_map)
|
||||
return _load_rules(rows, enabled_map, self.hs.config.experimental)
|
||||
|
||||
async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]:
|
||||
results = await self.db_pool.simple_select_list(
|
||||
|
@ -219,7 +224,9 @@ class PushRulesWorkerStore(
|
|||
results: Dict[str, FilteredPushRules] = {}
|
||||
|
||||
for user_id, rules in raw_rules.items():
|
||||
results[user_id] = _load_rules(rules, enabled_map_by_user.get(user_id, {}))
|
||||
results[user_id] = _load_rules(
|
||||
rules, enabled_map_by_user.get(user_id, {}), self.hs.config.experimental
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
|
|
@ -21,7 +21,13 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast
|
|||
import attr
|
||||
|
||||
from synapse.api.constants import UserTypes
|
||||
from synapse.api.errors import Codes, StoreError, SynapseError, ThreepidValidationError
|
||||
from synapse.api.errors import (
|
||||
Codes,
|
||||
NotFoundError,
|
||||
StoreError,
|
||||
SynapseError,
|
||||
ThreepidValidationError,
|
||||
)
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.storage.database import (
|
||||
|
@ -50,6 +56,14 @@ class ExternalIDReuseException(Exception):
|
|||
because this external id is given to an other user."""
|
||||
|
||||
|
||||
class LoginTokenExpired(Exception):
|
||||
"""Exception if the login token sent expired"""
|
||||
|
||||
|
||||
class LoginTokenReused(Exception):
|
||||
"""Exception if the login token sent was already used"""
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class TokenLookupResult:
|
||||
"""Result of looking up an access token.
|
||||
|
@ -115,6 +129,20 @@ class RefreshTokenLookupResult:
|
|||
If None, the session can be refreshed indefinitely."""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class LoginTokenLookupResult:
|
||||
"""Result of looking up a login token."""
|
||||
|
||||
user_id: str
|
||||
"""The user this token belongs to."""
|
||||
|
||||
auth_provider_id: Optional[str]
|
||||
"""The SSO Identity Provider that the user authenticated with, to get this token."""
|
||||
|
||||
auth_provider_session_id: Optional[str]
|
||||
"""The session ID advertised by the SSO Identity Provider."""
|
||||
|
||||
|
||||
class RegistrationWorkerStore(CacheInvalidationWorkerStore):
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1789,6 +1817,130 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
|
|||
"replace_refresh_token", _replace_refresh_token_txn
|
||||
)
|
||||
|
||||
async def add_login_token_to_user(
|
||||
self,
|
||||
user_id: str,
|
||||
token: str,
|
||||
expiry_ts: int,
|
||||
auth_provider_id: Optional[str],
|
||||
auth_provider_session_id: Optional[str],
|
||||
) -> None:
|
||||
"""Adds a short-term login token for the given user.
|
||||
|
||||
Args:
|
||||
user_id: The user ID.
|
||||
token: The new login token to add.
|
||||
expiry_ts (milliseconds since the epoch): Time after which the login token
|
||||
cannot be used.
|
||||
auth_provider_id: The SSO Identity Provider that the user authenticated with
|
||||
to get this token, if any
|
||||
auth_provider_session_id: The session ID advertised by the SSO Identity
|
||||
Provider, if any.
|
||||
"""
|
||||
await self.db_pool.simple_insert(
|
||||
"login_tokens",
|
||||
{
|
||||
"token": token,
|
||||
"user_id": user_id,
|
||||
"expiry_ts": expiry_ts,
|
||||
"auth_provider_id": auth_provider_id,
|
||||
"auth_provider_session_id": auth_provider_session_id,
|
||||
},
|
||||
desc="add_login_token_to_user",
|
||||
)
|
||||
|
||||
def _consume_login_token(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
token: str,
|
||||
ts: int,
|
||||
) -> LoginTokenLookupResult:
|
||||
values = self.db_pool.simple_select_one_txn(
|
||||
txn,
|
||||
"login_tokens",
|
||||
keyvalues={"token": token},
|
||||
retcols=(
|
||||
"user_id",
|
||||
"expiry_ts",
|
||||
"used_ts",
|
||||
"auth_provider_id",
|
||||
"auth_provider_session_id",
|
||||
),
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
if values is None:
|
||||
raise NotFoundError()
|
||||
|
||||
self.db_pool.simple_update_one_txn(
|
||||
txn,
|
||||
"login_tokens",
|
||||
keyvalues={"token": token},
|
||||
updatevalues={"used_ts": ts},
|
||||
)
|
||||
user_id = values["user_id"]
|
||||
expiry_ts = values["expiry_ts"]
|
||||
used_ts = values["used_ts"]
|
||||
auth_provider_id = values["auth_provider_id"]
|
||||
auth_provider_session_id = values["auth_provider_session_id"]
|
||||
|
||||
# Token was already used
|
||||
if used_ts is not None:
|
||||
raise LoginTokenReused()
|
||||
|
||||
# Token expired
|
||||
if ts > int(expiry_ts):
|
||||
raise LoginTokenExpired()
|
||||
|
||||
return LoginTokenLookupResult(
|
||||
user_id=user_id,
|
||||
auth_provider_id=auth_provider_id,
|
||||
auth_provider_session_id=auth_provider_session_id,
|
||||
)
|
||||
|
||||
async def consume_login_token(self, token: str) -> LoginTokenLookupResult:
|
||||
"""Lookup a login token and consume it.
|
||||
|
||||
Args:
|
||||
token: The login token.
|
||||
|
||||
Returns:
|
||||
The data stored with that token, including the `user_id`. Returns `None` if
|
||||
the token does not exist or if it expired.
|
||||
|
||||
Raises:
|
||||
NotFound if the login token was not found in database
|
||||
LoginTokenExpired if the login token expired
|
||||
LoginTokenReused if the login token was already used
|
||||
"""
|
||||
return await self.db_pool.runInteraction(
|
||||
"consume_login_token",
|
||||
self._consume_login_token,
|
||||
token,
|
||||
self._clock.time_msec(),
|
||||
)
|
||||
|
||||
async def invalidate_login_tokens_by_session_id(
|
||||
self, auth_provider_id: str, auth_provider_session_id: str
|
||||
) -> None:
|
||||
"""Invalidate login tokens with the given IdP session ID.
|
||||
|
||||
Args:
|
||||
auth_provider_id: The SSO Identity Provider that the user authenticated with
|
||||
to get this token
|
||||
auth_provider_session_id: The session ID advertised by the SSO Identity
|
||||
Provider
|
||||
"""
|
||||
await self.db_pool.simple_update(
|
||||
table="login_tokens",
|
||||
keyvalues={
|
||||
"auth_provider_id": auth_provider_id,
|
||||
"auth_provider_session_id": auth_provider_session_id,
|
||||
},
|
||||
updatevalues={"used_ts": self._clock.time_msec()},
|
||||
desc="invalidate_login_tokens_by_session_id",
|
||||
)
|
||||
|
||||
@cached()
|
||||
async def is_guest(self, user_id: str) -> bool:
|
||||
res = await self.db_pool.simple_select_one_onecol(
|
||||
|
@ -2019,6 +2171,12 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
|
|||
and hs.config.experimental.msc3866.require_approval_for_new_accounts
|
||||
)
|
||||
|
||||
# Create a background job for removing expired login tokens
|
||||
if hs.config.worker.run_background_tasks:
|
||||
self._clock.looping_call(
|
||||
self._delete_expired_login_tokens, THIRTY_MINUTES_IN_MS
|
||||
)
|
||||
|
||||
async def add_access_token_to_user(
|
||||
self,
|
||||
user_id: str,
|
||||
|
@ -2617,6 +2775,23 @@ class RegistrationStore(StatsStore, RegistrationBackgroundUpdateStore):
|
|||
approved,
|
||||
)
|
||||
|
||||
@wrap_as_background_process("delete_expired_login_tokens")
|
||||
async def _delete_expired_login_tokens(self) -> None:
|
||||
"""Remove login tokens with expiry dates that have passed."""
|
||||
|
||||
def _delete_expired_login_tokens_txn(txn: LoggingTransaction, ts: int) -> None:
|
||||
sql = "DELETE FROM login_tokens WHERE expiry_ts <= ?"
|
||||
txn.execute(sql, (ts,))
|
||||
|
||||
# We keep the expired tokens for an extra 5 minutes so we can measure how many
|
||||
# times a token is being used after its expiry
|
||||
now = self._clock.time_msec()
|
||||
await self.db_pool.runInteraction(
|
||||
"delete_expired_login_tokens",
|
||||
_delete_expired_login_tokens_txn,
|
||||
now - (5 * 60 * 1000),
|
||||
)
|
||||
|
||||
|
||||
def find_max_generated_user_id_localpart(cur: Cursor) -> int:
|
||||
"""
|
||||
|
|
|
@ -152,6 +152,9 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||
the forward extremities of those rooms will exclude most members. We may also
|
||||
calculate room state incorrectly for such rooms and believe that a member is or
|
||||
is not in the room when the opposite is true.
|
||||
|
||||
Note: If you only care about users in the room local to the homeserver, use
|
||||
`get_local_users_in_room(...)` instead which will be more performant.
|
||||
"""
|
||||
return await self.db_pool.simple_select_onecol(
|
||||
table="current_state_events",
|
||||
|
@ -707,8 +710,8 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||
|
||||
# 250 users is pretty arbitrary but the data can be quite large if users
|
||||
# are in many rooms.
|
||||
for user_ids in batch_iter(user_ids, 250):
|
||||
all_user_rooms.update(await self._get_rooms_for_users(user_ids))
|
||||
for batch_user_ids in batch_iter(user_ids, 250):
|
||||
all_user_rooms.update(await self._get_rooms_for_users(batch_user_ids))
|
||||
|
||||
return all_user_rooms
|
||||
|
||||
|
@ -742,7 +745,7 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||
# user and the set of other users, and then checking if there is any
|
||||
# overlap.
|
||||
sql = f"""
|
||||
SELECT b.state_key
|
||||
SELECT DISTINCT b.state_key
|
||||
FROM (
|
||||
SELECT room_id FROM current_state_events
|
||||
WHERE type = 'm.room.member' AND membership = 'join' AND state_key = ?
|
||||
|
@ -751,7 +754,6 @@ class RoomMemberWorkerStore(EventsWorkerStore):
|
|||
SELECT room_id, state_key FROM current_state_events
|
||||
WHERE type = 'm.room.member' AND membership = 'join' AND {clause}
|
||||
) AS b using (room_id)
|
||||
LIMIT 1
|
||||
"""
|
||||
|
||||
txn.execute(sql, (user_id, *args))
|
||||
|
|
|
@ -11,10 +11,22 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import enum
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Collection, Iterable, List, Optional, Set, Tuple
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Collection,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -27,7 +39,7 @@ from synapse.storage.database import (
|
|||
LoggingTransaction,
|
||||
)
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
|
||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||
from synapse.types import JsonDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -68,11 +80,11 @@ class SearchWorkerStore(SQLBaseStore):
|
|||
if not self.hs.config.server.enable_search:
|
||||
return
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
sql = (
|
||||
"INSERT INTO event_search"
|
||||
" (event_id, room_id, key, vector, stream_ordering, origin_server_ts)"
|
||||
" VALUES (?,?,?,to_tsvector('english', ?),?,?)"
|
||||
)
|
||||
sql = """
|
||||
INSERT INTO event_search
|
||||
(event_id, room_id, key, vector, stream_ordering, origin_server_ts)
|
||||
VALUES (?,?,?,to_tsvector('english', ?),?,?)
|
||||
"""
|
||||
|
||||
args1 = (
|
||||
(
|
||||
|
@ -89,20 +101,20 @@ class SearchWorkerStore(SQLBaseStore):
|
|||
txn.execute_batch(sql, args1)
|
||||
|
||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||
sql = (
|
||||
"INSERT INTO event_search (event_id, room_id, key, value)"
|
||||
" VALUES (?,?,?,?)"
|
||||
self.db_pool.simple_insert_many_txn(
|
||||
txn,
|
||||
table="event_search",
|
||||
keys=("event_id", "room_id", "key", "value"),
|
||||
values=(
|
||||
(
|
||||
entry.event_id,
|
||||
entry.room_id,
|
||||
entry.key,
|
||||
_clean_value_for_search(entry.value),
|
||||
)
|
||||
for entry in entries
|
||||
),
|
||||
)
|
||||
args2 = (
|
||||
(
|
||||
entry.event_id,
|
||||
entry.room_id,
|
||||
entry.key,
|
||||
_clean_value_for_search(entry.value),
|
||||
)
|
||||
for entry in entries
|
||||
)
|
||||
txn.execute_batch(sql, args2)
|
||||
|
||||
else:
|
||||
# This should be unreachable.
|
||||
|
@ -150,15 +162,17 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
|
|||
TYPES = ["m.room.name", "m.room.message", "m.room.topic"]
|
||||
|
||||
def reindex_search_txn(txn: LoggingTransaction) -> int:
|
||||
sql = (
|
||||
"SELECT stream_ordering, event_id, room_id, type, json, "
|
||||
" origin_server_ts FROM events"
|
||||
" JOIN event_json USING (room_id, event_id)"
|
||||
" WHERE ? <= stream_ordering AND stream_ordering < ?"
|
||||
" AND (%s)"
|
||||
" ORDER BY stream_ordering DESC"
|
||||
" LIMIT ?"
|
||||
) % (" OR ".join("type = '%s'" % (t,) for t in TYPES),)
|
||||
sql = """
|
||||
SELECT stream_ordering, event_id, room_id, type, json, origin_server_ts
|
||||
FROM events
|
||||
JOIN event_json USING (room_id, event_id)
|
||||
WHERE ? <= stream_ordering AND stream_ordering < ?
|
||||
AND (%s)
|
||||
ORDER BY stream_ordering DESC
|
||||
LIMIT ?
|
||||
""" % (
|
||||
" OR ".join("type = '%s'" % (t,) for t in TYPES),
|
||||
)
|
||||
|
||||
txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
|
||||
|
||||
|
@ -272,8 +286,10 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
|
|||
|
||||
try:
|
||||
c.execute(
|
||||
"CREATE INDEX CONCURRENTLY event_search_fts_idx"
|
||||
" ON event_search USING GIN (vector)"
|
||||
"""
|
||||
CREATE INDEX CONCURRENTLY event_search_fts_idx
|
||||
ON event_search USING GIN (vector)
|
||||
"""
|
||||
)
|
||||
except psycopg2.ProgrammingError as e:
|
||||
logger.warning(
|
||||
|
@ -311,12 +327,16 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
|
|||
# We create with NULLS FIRST so that when we search *backwards*
|
||||
# we get the ones with non null origin_server_ts *first*
|
||||
c.execute(
|
||||
"CREATE INDEX CONCURRENTLY event_search_room_order ON event_search("
|
||||
"room_id, origin_server_ts NULLS FIRST, stream_ordering NULLS FIRST)"
|
||||
"""
|
||||
CREATE INDEX CONCURRENTLY event_search_room_order
|
||||
ON event_search(room_id, origin_server_ts NULLS FIRST, stream_ordering NULLS FIRST)
|
||||
"""
|
||||
)
|
||||
c.execute(
|
||||
"CREATE INDEX CONCURRENTLY event_search_order ON event_search("
|
||||
"origin_server_ts NULLS FIRST, stream_ordering NULLS FIRST)"
|
||||
"""
|
||||
CREATE INDEX CONCURRENTLY event_search_order
|
||||
ON event_search(origin_server_ts NULLS FIRST, stream_ordering NULLS FIRST)
|
||||
"""
|
||||
)
|
||||
conn.set_session(autocommit=False)
|
||||
|
||||
|
@ -333,14 +353,14 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
|
|||
)
|
||||
|
||||
def reindex_search_txn(txn: LoggingTransaction) -> Tuple[int, bool]:
|
||||
sql = (
|
||||
"UPDATE event_search AS es SET stream_ordering = e.stream_ordering,"
|
||||
" origin_server_ts = e.origin_server_ts"
|
||||
" FROM events AS e"
|
||||
" WHERE e.event_id = es.event_id"
|
||||
" AND ? <= e.stream_ordering AND e.stream_ordering < ?"
|
||||
" RETURNING es.stream_ordering"
|
||||
)
|
||||
sql = """
|
||||
UPDATE event_search AS es
|
||||
SET stream_ordering = e.stream_ordering, origin_server_ts = e.origin_server_ts
|
||||
FROM events AS e
|
||||
WHERE e.event_id = es.event_id
|
||||
AND ? <= e.stream_ordering AND e.stream_ordering < ?
|
||||
RETURNING es.stream_ordering
|
||||
"""
|
||||
|
||||
min_stream_id = max_stream_id - batch_size
|
||||
txn.execute(sql, (min_stream_id, max_stream_id))
|
||||
|
@ -421,8 +441,6 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
"""
|
||||
clauses = []
|
||||
|
||||
search_query = _parse_query(self.database_engine, search_term)
|
||||
|
||||
args: List[Any] = []
|
||||
|
||||
# Make sure we don't explode because the person is in too many rooms.
|
||||
|
@ -444,32 +462,36 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
count_clauses = clauses
|
||||
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
sql = (
|
||||
"SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank,"
|
||||
" room_id, event_id"
|
||||
" FROM event_search"
|
||||
" WHERE vector @@ to_tsquery('english', ?)"
|
||||
)
|
||||
search_query = search_term
|
||||
tsquery_func = self.database_engine.tsquery_func
|
||||
sql = f"""
|
||||
SELECT ts_rank_cd(vector, {tsquery_func}('english', ?)) AS rank,
|
||||
room_id, event_id
|
||||
FROM event_search
|
||||
WHERE vector @@ {tsquery_func}('english', ?)
|
||||
"""
|
||||
args = [search_query, search_query] + args
|
||||
|
||||
count_sql = (
|
||||
"SELECT room_id, count(*) as count FROM event_search"
|
||||
" WHERE vector @@ to_tsquery('english', ?)"
|
||||
)
|
||||
count_sql = f"""
|
||||
SELECT room_id, count(*) as count FROM event_search
|
||||
WHERE vector @@ {tsquery_func}('english', ?)
|
||||
"""
|
||||
count_args = [search_query] + count_args
|
||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||
sql = (
|
||||
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
|
||||
" FROM event_search"
|
||||
" WHERE value MATCH ?"
|
||||
)
|
||||
search_query = _parse_query_for_sqlite(search_term)
|
||||
|
||||
sql = """
|
||||
SELECT rank(matchinfo(event_search)) as rank, room_id, event_id
|
||||
FROM event_search
|
||||
WHERE value MATCH ?
|
||||
"""
|
||||
args = [search_query] + args
|
||||
|
||||
count_sql = (
|
||||
"SELECT room_id, count(*) as count FROM event_search"
|
||||
" WHERE value MATCH ?"
|
||||
)
|
||||
count_args = [search_term] + count_args
|
||||
count_sql = """
|
||||
SELECT room_id, count(*) as count FROM event_search
|
||||
WHERE value MATCH ?
|
||||
"""
|
||||
count_args = [search_query] + count_args
|
||||
else:
|
||||
# This should be unreachable.
|
||||
raise Exception("Unrecognized database engine")
|
||||
|
@ -501,7 +523,9 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
|
||||
highlights = None
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
highlights = await self._find_highlights_in_postgres(search_query, events)
|
||||
highlights = await self._find_highlights_in_postgres(
|
||||
search_query, events, tsquery_func
|
||||
)
|
||||
|
||||
count_sql += " GROUP BY room_id"
|
||||
|
||||
|
@ -510,7 +534,6 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
)
|
||||
|
||||
count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
|
||||
|
||||
return {
|
||||
"results": [
|
||||
{"event": event_map[r["event_id"]], "rank": r["rank"]}
|
||||
|
@ -542,9 +565,6 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
Each match as a dictionary.
|
||||
"""
|
||||
clauses = []
|
||||
|
||||
search_query = _parse_query(self.database_engine, search_term)
|
||||
|
||||
args: List[Any] = []
|
||||
|
||||
# Make sure we don't explode because the person is in too many rooms.
|
||||
|
@ -576,26 +596,30 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
raise SynapseError(400, "Invalid pagination token")
|
||||
|
||||
clauses.append(
|
||||
"(origin_server_ts < ?"
|
||||
" OR (origin_server_ts = ? AND stream_ordering < ?))"
|
||||
"""
|
||||
(origin_server_ts < ? OR (origin_server_ts = ? AND stream_ordering < ?))
|
||||
"""
|
||||
)
|
||||
args.extend([origin_server_ts, origin_server_ts, stream])
|
||||
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
sql = (
|
||||
"SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank,"
|
||||
" origin_server_ts, stream_ordering, room_id, event_id"
|
||||
" FROM event_search"
|
||||
" WHERE vector @@ to_tsquery('english', ?) AND "
|
||||
)
|
||||
search_query = search_term
|
||||
tsquery_func = self.database_engine.tsquery_func
|
||||
sql = f"""
|
||||
SELECT ts_rank_cd(vector, {tsquery_func}('english', ?)) as rank,
|
||||
origin_server_ts, stream_ordering, room_id, event_id
|
||||
FROM event_search
|
||||
WHERE vector @@ {tsquery_func}('english', ?) AND
|
||||
"""
|
||||
args = [search_query, search_query] + args
|
||||
|
||||
count_sql = (
|
||||
"SELECT room_id, count(*) as count FROM event_search"
|
||||
" WHERE vector @@ to_tsquery('english', ?) AND "
|
||||
)
|
||||
count_sql = f"""
|
||||
SELECT room_id, count(*) as count FROM event_search
|
||||
WHERE vector @@ {tsquery_func}('english', ?) AND
|
||||
"""
|
||||
count_args = [search_query] + count_args
|
||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||
|
||||
# We use CROSS JOIN here to ensure we use the right indexes.
|
||||
# https://sqlite.org/optoverview.html#crossjoin
|
||||
#
|
||||
|
@ -604,23 +628,25 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
# in the events table to get the topological ordering. We need
|
||||
# to use the indexes in this order because sqlite refuses to
|
||||
# MATCH unless it uses the full text search index
|
||||
sql = (
|
||||
"SELECT rank(matchinfo) as rank, room_id, event_id,"
|
||||
" origin_server_ts, stream_ordering"
|
||||
" FROM (SELECT key, event_id, matchinfo(event_search) as matchinfo"
|
||||
" FROM event_search"
|
||||
" WHERE value MATCH ?"
|
||||
" )"
|
||||
" CROSS JOIN events USING (event_id)"
|
||||
" WHERE "
|
||||
sql = """
|
||||
SELECT
|
||||
rank(matchinfo) as rank, room_id, event_id, origin_server_ts, stream_ordering
|
||||
FROM (
|
||||
SELECT key, event_id, matchinfo(event_search) as matchinfo
|
||||
FROM event_search
|
||||
WHERE value MATCH ?
|
||||
)
|
||||
CROSS JOIN events USING (event_id)
|
||||
WHERE
|
||||
"""
|
||||
search_query = _parse_query_for_sqlite(search_term)
|
||||
args = [search_query] + args
|
||||
|
||||
count_sql = (
|
||||
"SELECT room_id, count(*) as count FROM event_search"
|
||||
" WHERE value MATCH ? AND "
|
||||
)
|
||||
count_args = [search_term] + count_args
|
||||
count_sql = """
|
||||
SELECT room_id, count(*) as count FROM event_search
|
||||
WHERE value MATCH ? AND
|
||||
"""
|
||||
count_args = [search_query] + count_args
|
||||
else:
|
||||
# This should be unreachable.
|
||||
raise Exception("Unrecognized database engine")
|
||||
|
@ -631,10 +657,10 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
# We add an arbitrary limit here to ensure we don't try to pull the
|
||||
# entire table from the database.
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
sql += (
|
||||
" ORDER BY origin_server_ts DESC NULLS LAST,"
|
||||
" stream_ordering DESC NULLS LAST LIMIT ?"
|
||||
)
|
||||
sql += """
|
||||
ORDER BY origin_server_ts DESC NULLS LAST, stream_ordering DESC NULLS LAST
|
||||
LIMIT ?
|
||||
"""
|
||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||
sql += " ORDER BY origin_server_ts DESC, stream_ordering DESC LIMIT ?"
|
||||
else:
|
||||
|
@ -660,7 +686,9 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
|
||||
highlights = None
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
highlights = await self._find_highlights_in_postgres(search_query, events)
|
||||
highlights = await self._find_highlights_in_postgres(
|
||||
search_query, events, tsquery_func
|
||||
)
|
||||
|
||||
count_sql += " GROUP BY room_id"
|
||||
|
||||
|
@ -686,7 +714,7 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
}
|
||||
|
||||
async def _find_highlights_in_postgres(
|
||||
self, search_query: str, events: List[EventBase]
|
||||
self, search_query: str, events: List[EventBase], tsquery_func: str
|
||||
) -> Set[str]:
|
||||
"""Given a list of events and a search term, return a list of words
|
||||
that match from the content of the event.
|
||||
|
@ -697,6 +725,7 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
Args:
|
||||
search_query
|
||||
events: A list of events
|
||||
tsquery_func: The tsquery_* function to use when making queries
|
||||
|
||||
Returns:
|
||||
A set of strings.
|
||||
|
@ -729,7 +758,7 @@ class SearchStore(SearchBackgroundUpdateStore):
|
|||
while stop_sel in value:
|
||||
stop_sel += ">"
|
||||
|
||||
query = "SELECT ts_headline(?, to_tsquery('english', ?), %s)" % (
|
||||
query = f"SELECT ts_headline(?, {tsquery_func}('english', ?), %s)" % (
|
||||
_to_postgres_options(
|
||||
{
|
||||
"StartSel": start_sel,
|
||||
|
@ -760,20 +789,127 @@ def _to_postgres_options(options_dict: JsonDict) -> str:
|
|||
return "'%s'" % (",".join("%s=%s" % (k, v) for k, v in options_dict.items()),)
|
||||
|
||||
|
||||
def _parse_query(database_engine: BaseDatabaseEngine, search_term: str) -> str:
|
||||
"""Takes a plain unicode string from the user and converts it into a form
|
||||
that can be passed to database.
|
||||
We use this so that we can add prefix matching, which isn't something
|
||||
that is supported by default.
|
||||
@dataclass
|
||||
class Phrase:
|
||||
phrase: List[str]
|
||||
|
||||
|
||||
class SearchToken(enum.Enum):
|
||||
Not = enum.auto()
|
||||
Or = enum.auto()
|
||||
And = enum.auto()
|
||||
|
||||
|
||||
Token = Union[str, Phrase, SearchToken]
|
||||
TokenList = List[Token]
|
||||
|
||||
|
||||
def _is_stop_word(word: str) -> bool:
|
||||
# TODO Pull these out of the dictionary:
|
||||
# https://github.com/postgres/postgres/blob/master/src/backend/snowball/stopwords/english.stop
|
||||
return word in {"the", "a", "you", "me", "and", "but"}
|
||||
|
||||
|
||||
def _tokenize_query(query: str) -> TokenList:
|
||||
"""
|
||||
Convert the user-supplied `query` into a TokenList, which can be translated into
|
||||
some DB-specific syntax.
|
||||
|
||||
# Pull out the individual words, discarding any non-word characters.
|
||||
results = re.findall(r"([\w\-]+)", search_term, re.UNICODE)
|
||||
The following constructs are supported:
|
||||
|
||||
if isinstance(database_engine, PostgresEngine):
|
||||
return " & ".join(result + ":*" for result in results)
|
||||
elif isinstance(database_engine, Sqlite3Engine):
|
||||
return " & ".join(result + "*" for result in results)
|
||||
else:
|
||||
# This should be unreachable.
|
||||
raise Exception("Unrecognized database engine")
|
||||
- phrase queries using "double quotes"
|
||||
- case-insensitive `or` and `and` operators
|
||||
- negation of a keyword via unary `-`
|
||||
- unary hyphen to denote NOT e.g. 'include -exclude'
|
||||
|
||||
The following differs from websearch_to_tsquery:
|
||||
|
||||
- Stop words are not removed.
|
||||
- Unclosed phrases are treated differently.
|
||||
|
||||
"""
|
||||
tokens: TokenList = []
|
||||
|
||||
# Find phrases.
|
||||
in_phrase = False
|
||||
parts = deque(query.split('"'))
|
||||
for i, part in enumerate(parts):
|
||||
# The contents inside double quotes is treated as a phrase.
|
||||
in_phrase = bool(i % 2)
|
||||
|
||||
# Pull out the individual words, discarding any non-word characters.
|
||||
words = deque(re.findall(r"([\w\-]+)", part, re.UNICODE))
|
||||
|
||||
# Phrases have simplified handling of words.
|
||||
if in_phrase:
|
||||
# Skip stop words.
|
||||
phrase = [word for word in words if not _is_stop_word(word)]
|
||||
|
||||
# Consecutive words are implicitly ANDed together.
|
||||
if tokens and tokens[-1] not in (SearchToken.Not, SearchToken.Or):
|
||||
tokens.append(SearchToken.And)
|
||||
|
||||
# Add the phrase.
|
||||
tokens.append(Phrase(phrase))
|
||||
continue
|
||||
|
||||
# Otherwise, not in a phrase.
|
||||
while words:
|
||||
word = words.popleft()
|
||||
|
||||
if word.startswith("-"):
|
||||
tokens.append(SearchToken.Not)
|
||||
|
||||
# If there's more word, put it back to be processed again.
|
||||
word = word[1:]
|
||||
if word:
|
||||
words.appendleft(word)
|
||||
elif word.lower() == "or":
|
||||
tokens.append(SearchToken.Or)
|
||||
else:
|
||||
# Skip stop words.
|
||||
if _is_stop_word(word):
|
||||
continue
|
||||
|
||||
# Consecutive words are implicitly ANDed together.
|
||||
if tokens and tokens[-1] not in (SearchToken.Not, SearchToken.Or):
|
||||
tokens.append(SearchToken.And)
|
||||
|
||||
# Add the search term.
|
||||
tokens.append(word)
|
||||
|
||||
return tokens
|
||||
|
||||
|
||||
def _tokens_to_sqlite_match_query(tokens: TokenList) -> str:
|
||||
"""
|
||||
Convert the list of tokens to a string suitable for passing to sqlite's MATCH.
|
||||
Assume sqlite was compiled with enhanced query syntax.
|
||||
|
||||
Ref: https://www.sqlite.org/fts3.html#full_text_index_queries
|
||||
"""
|
||||
match_query = []
|
||||
for token in tokens:
|
||||
if isinstance(token, str):
|
||||
match_query.append(token)
|
||||
elif isinstance(token, Phrase):
|
||||
match_query.append('"' + " ".join(token.phrase) + '"')
|
||||
elif token == SearchToken.Not:
|
||||
# TODO: SQLite treats NOT as a *binary* operator. Hopefully a search
|
||||
# term has already been added before this.
|
||||
match_query.append(" NOT ")
|
||||
elif token == SearchToken.Or:
|
||||
match_query.append(" OR ")
|
||||
elif token == SearchToken.And:
|
||||
match_query.append(" AND ")
|
||||
else:
|
||||
raise ValueError(f"unknown token {token}")
|
||||
|
||||
return "".join(match_query)
|
||||
|
||||
|
||||
def _parse_query_for_sqlite(search_term: str) -> str:
|
||||
"""Takes a plain unicode string from the user and converts it into a form
|
||||
that can be passed to sqllite's matchinfo().
|
||||
"""
|
||||
return _tokens_to_sqlite_match_query(_tokenize_query(search_term))
|
||||
|
|
|
@ -170,6 +170,22 @@ class PostgresEngine(
|
|||
"""Do we support the `RETURNING` clause in insert/update/delete?"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def tsquery_func(self) -> str:
|
||||
"""
|
||||
Selects a tsquery_* func to use.
|
||||
|
||||
Ref: https://www.postgresql.org/docs/current/textsearch-controls.html
|
||||
|
||||
Returns:
|
||||
The function name.
|
||||
"""
|
||||
# Postgres 11 added support for websearch_to_tsquery.
|
||||
assert self._version is not None
|
||||
if self._version >= 110000:
|
||||
return "websearch_to_tsquery"
|
||||
return "plainto_tsquery"
|
||||
|
||||
def is_deadlock(self, error: Exception) -> bool:
|
||||
if isinstance(error, psycopg2.DatabaseError):
|
||||
# https://www.postgresql.org/docs/current/static/errcodes-appendix.html
|
||||
|
|
|
@ -88,6 +88,10 @@ class Sqlite3Engine(BaseDatabaseEngine[sqlite3.Connection, sqlite3.Cursor]):
|
|||
|
||||
db_conn.create_function("rank", 1, _rank)
|
||||
db_conn.execute("PRAGMA foreign_keys = ON;")
|
||||
|
||||
# Enable WAL.
|
||||
# see https://www.sqlite.org/wal.html
|
||||
db_conn.execute("PRAGMA journal_mode = WAL;")
|
||||
db_conn.commit()
|
||||
|
||||
def is_deadlock(self, error: Exception) -> bool:
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
|
||||
from synapse.storage.engines import BaseDatabaseEngine, Sqlite3Engine
|
||||
from synapse.storage.types import Cursor
|
||||
|
||||
|
||||
def run_create(cur: Cursor, database_engine: BaseDatabaseEngine) -> None:
|
||||
"""
|
||||
Upgrade the event_search table to use the porter tokenizer if it isn't already
|
||||
|
||||
Applies only for sqlite.
|
||||
"""
|
||||
if not isinstance(database_engine, Sqlite3Engine):
|
||||
return
|
||||
|
||||
# Rebuild the table event_search table with tokenize=porter configured.
|
||||
cur.execute("DROP TABLE event_search")
|
||||
cur.execute(
|
||||
"""
|
||||
CREATE VIRTUAL TABLE event_search
|
||||
USING fts4 (tokenize=porter, event_id, room_id, sender, key, value )
|
||||
"""
|
||||
)
|
||||
|
||||
# Re-run the background job to re-populate the event_search table.
|
||||
cur.execute("SELECT MIN(stream_ordering) FROM events")
|
||||
row = cur.fetchone()
|
||||
min_stream_id = row[0]
|
||||
|
||||
# If there are not any events, nothing to do.
|
||||
if min_stream_id is None:
|
||||
return
|
||||
|
||||
cur.execute("SELECT MAX(stream_ordering) FROM events")
|
||||
row = cur.fetchone()
|
||||
max_stream_id = row[0]
|
||||
|
||||
progress = {
|
||||
"target_min_stream_id_inclusive": min_stream_id,
|
||||
"max_stream_id_exclusive": max_stream_id + 1,
|
||||
}
|
||||
progress_json = json.dumps(progress)
|
||||
|
||||
sql = """
|
||||
INSERT into background_updates (ordering, update_name, progress_json)
|
||||
VALUES (?, ?, ?)
|
||||
"""
|
||||
|
||||
cur.execute(sql, (7310, "event_search", progress_json))
|
35
synapse/storage/schema/main/delta/73/10login_tokens.sql
Normal file
35
synapse/storage/schema/main/delta/73/10login_tokens.sql
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
-- Login tokens are short-lived tokens that are used for the m.login.token
|
||||
-- login method, mainly during SSO logins
|
||||
CREATE TABLE login_tokens (
|
||||
token TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
expiry_ts BIGINT NOT NULL,
|
||||
used_ts BIGINT,
|
||||
auth_provider_id TEXT,
|
||||
auth_provider_session_id TEXT
|
||||
);
|
||||
|
||||
-- We're sometimes querying them by their session ID we got from their IDP
|
||||
CREATE INDEX login_tokens_auth_provider_idx
|
||||
ON login_tokens (auth_provider_id, auth_provider_session_id);
|
||||
|
||||
-- We're deleting them by their expiration time
|
||||
CREATE INDEX login_tokens_expiry_time_idx
|
||||
ON login_tokens (expiry_ts);
|
||||
|
|
@ -395,8 +395,8 @@ class DeferredCache(Generic[KT, VT]):
|
|||
# _pending_deferred_cache.pop should either return a CacheEntry, or, in the
|
||||
# case of a TreeCache, a dict of keys to cache entries. Either way calling
|
||||
# iterate_tree_cache_entry on it will do the right thing.
|
||||
for entry in iterate_tree_cache_entry(entry):
|
||||
for cb in entry.get_invalidation_callbacks(key):
|
||||
for iter_entry in iterate_tree_cache_entry(entry):
|
||||
for cb in iter_entry.get_invalidation_callbacks(key):
|
||||
cb()
|
||||
|
||||
def invalidate_all(self) -> None:
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import enum
|
||||
import functools
|
||||
import inspect
|
||||
import logging
|
||||
|
@ -146,109 +145,6 @@ class _CacheDescriptorBase:
|
|||
)
|
||||
|
||||
|
||||
class _LruCachedFunction(Generic[F]):
|
||||
cache: LruCache[CacheKey, Any]
|
||||
__call__: F
|
||||
|
||||
|
||||
def lru_cache(
|
||||
*, max_entries: int = 1000, cache_context: bool = False
|
||||
) -> Callable[[F], _LruCachedFunction[F]]:
|
||||
"""A method decorator that applies a memoizing cache around the function.
|
||||
|
||||
This is more-or-less a drop-in equivalent to functools.lru_cache, although note
|
||||
that the signature is slightly different.
|
||||
|
||||
The main differences with functools.lru_cache are:
|
||||
(a) the size of the cache can be controlled via the cache_factor mechanism
|
||||
(b) the wrapped function can request a "cache_context" which provides a
|
||||
callback mechanism to indicate that the result is no longer valid
|
||||
(c) prometheus metrics are exposed automatically.
|
||||
|
||||
The function should take zero or more arguments, which are used as the key for the
|
||||
cache. Single-argument functions use that argument as the cache key; otherwise the
|
||||
arguments are built into a tuple.
|
||||
|
||||
Cached functions can be "chained" (i.e. a cached function can call other cached
|
||||
functions and get appropriately invalidated when they called caches are
|
||||
invalidated) by adding a special "cache_context" argument to the function
|
||||
and passing that as a kwarg to all caches called. For example:
|
||||
|
||||
@lru_cache(cache_context=True)
|
||||
def foo(self, key, cache_context):
|
||||
r1 = self.bar1(key, on_invalidate=cache_context.invalidate)
|
||||
r2 = self.bar2(key, on_invalidate=cache_context.invalidate)
|
||||
return r1 + r2
|
||||
|
||||
The wrapped function also has a 'cache' property which offers direct access to the
|
||||
underlying LruCache.
|
||||
"""
|
||||
|
||||
def func(orig: F) -> _LruCachedFunction[F]:
|
||||
desc = LruCacheDescriptor(
|
||||
orig,
|
||||
max_entries=max_entries,
|
||||
cache_context=cache_context,
|
||||
)
|
||||
return cast(_LruCachedFunction[F], desc)
|
||||
|
||||
return func
|
||||
|
||||
|
||||
class LruCacheDescriptor(_CacheDescriptorBase):
|
||||
"""Helper for @lru_cache"""
|
||||
|
||||
class _Sentinel(enum.Enum):
|
||||
sentinel = object()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
orig: Callable[..., Any],
|
||||
max_entries: int = 1000,
|
||||
cache_context: bool = False,
|
||||
):
|
||||
super().__init__(
|
||||
orig, num_args=None, uncached_args=None, cache_context=cache_context
|
||||
)
|
||||
self.max_entries = max_entries
|
||||
|
||||
def __get__(self, obj: Optional[Any], owner: Optional[Type]) -> Callable[..., Any]:
|
||||
cache: LruCache[CacheKey, Any] = LruCache(
|
||||
cache_name=self.name,
|
||||
max_size=self.max_entries,
|
||||
)
|
||||
|
||||
get_cache_key = self.cache_key_builder
|
||||
sentinel = LruCacheDescriptor._Sentinel.sentinel
|
||||
|
||||
@functools.wraps(self.orig)
|
||||
def _wrapped(*args: Any, **kwargs: Any) -> Any:
|
||||
invalidate_callback = kwargs.pop("on_invalidate", None)
|
||||
callbacks = (invalidate_callback,) if invalidate_callback else ()
|
||||
|
||||
cache_key = get_cache_key(args, kwargs)
|
||||
|
||||
ret = cache.get(cache_key, default=sentinel, callbacks=callbacks)
|
||||
if ret != sentinel:
|
||||
return ret
|
||||
|
||||
# Add our own `cache_context` to argument list if the wrapped function
|
||||
# has asked for one
|
||||
if self.add_cache_context:
|
||||
kwargs["cache_context"] = _CacheContext.get_instance(cache, cache_key)
|
||||
|
||||
ret2 = self.orig(obj, *args, **kwargs)
|
||||
cache.set(cache_key, ret2, callbacks=callbacks)
|
||||
|
||||
return ret2
|
||||
|
||||
wrapped = cast(CachedFunction, _wrapped)
|
||||
wrapped.cache = cache
|
||||
obj.__dict__[self.name] = wrapped
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
class DeferredCacheDescriptor(_CacheDescriptorBase):
|
||||
"""A method decorator that applies a memoizing cache around the function.
|
||||
|
||||
|
@ -432,7 +328,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase):
|
|||
num_args = cached_method.num_args
|
||||
|
||||
if num_args != self.num_args:
|
||||
raise Exception(
|
||||
raise TypeError(
|
||||
"Number of args (%s) does not match underlying cache_method_name=%s (%s)."
|
||||
% (self.num_args, self.cached_method_name, num_args)
|
||||
)
|
||||
|
|
|
@ -24,7 +24,7 @@ from typing_extensions import Literal
|
|||
|
||||
from synapse.util import Clock, stringutils
|
||||
|
||||
MacaroonType = Literal["access", "delete_pusher", "session", "login"]
|
||||
MacaroonType = Literal["access", "delete_pusher", "session"]
|
||||
|
||||
|
||||
def get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str:
|
||||
|
@ -111,19 +111,6 @@ class OidcSessionData:
|
|||
"""The session ID of the ongoing UI Auth ("" if this is a login)"""
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class LoginTokenAttributes:
|
||||
"""Data we store in a short-term login token"""
|
||||
|
||||
user_id: str
|
||||
|
||||
auth_provider_id: str
|
||||
"""The SSO Identity Provider that the user authenticated with, to get this token."""
|
||||
|
||||
auth_provider_session_id: Optional[str]
|
||||
"""The session ID advertised by the SSO Identity Provider."""
|
||||
|
||||
|
||||
class MacaroonGenerator:
|
||||
def __init__(self, clock: Clock, location: str, secret_key: bytes):
|
||||
self._clock = clock
|
||||
|
@ -165,35 +152,6 @@ class MacaroonGenerator:
|
|||
macaroon.add_first_party_caveat(f"pushkey = {pushkey}")
|
||||
return macaroon.serialize()
|
||||
|
||||
def generate_short_term_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
auth_provider_id: str,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
) -> str:
|
||||
"""Generate a short-term login token used during SSO logins
|
||||
|
||||
Args:
|
||||
user_id: The user for which the token is valid.
|
||||
auth_provider_id: The SSO IdP the user used.
|
||||
auth_provider_session_id: The session ID got during login from the SSO IdP.
|
||||
|
||||
Returns:
|
||||
A signed token valid for using as a ``m.login.token`` token.
|
||||
"""
|
||||
now = self._clock.time_msec()
|
||||
expiry = now + duration_in_ms
|
||||
macaroon = self._generate_base_macaroon("login")
|
||||
macaroon.add_first_party_caveat(f"user_id = {user_id}")
|
||||
macaroon.add_first_party_caveat(f"time < {expiry}")
|
||||
macaroon.add_first_party_caveat(f"auth_provider_id = {auth_provider_id}")
|
||||
if auth_provider_session_id is not None:
|
||||
macaroon.add_first_party_caveat(
|
||||
f"auth_provider_session_id = {auth_provider_session_id}"
|
||||
)
|
||||
return macaroon.serialize()
|
||||
|
||||
def generate_oidc_session_token(
|
||||
self,
|
||||
state: str,
|
||||
|
@ -233,49 +191,6 @@ class MacaroonGenerator:
|
|||
|
||||
return macaroon.serialize()
|
||||
|
||||
def verify_short_term_login_token(self, token: str) -> LoginTokenAttributes:
|
||||
"""Verify a short-term-login macaroon
|
||||
|
||||
Checks that the given token is a valid, unexpired short-term-login token
|
||||
minted by this server.
|
||||
|
||||
Args:
|
||||
token: The login token to verify.
|
||||
|
||||
Returns:
|
||||
A set of attributes carried by this token, including the
|
||||
``user_id`` and informations about the SSO IDP used during that
|
||||
login.
|
||||
|
||||
Raises:
|
||||
MacaroonVerificationFailedException if the verification failed
|
||||
"""
|
||||
macaroon = pymacaroons.Macaroon.deserialize(token)
|
||||
|
||||
v = self._base_verifier("login")
|
||||
v.satisfy_general(lambda c: c.startswith("user_id = "))
|
||||
v.satisfy_general(lambda c: c.startswith("auth_provider_id = "))
|
||||
v.satisfy_general(lambda c: c.startswith("auth_provider_session_id = "))
|
||||
satisfy_expiry(v, self._clock.time_msec)
|
||||
v.verify(macaroon, self._secret_key)
|
||||
|
||||
user_id = get_value_from_macaroon(macaroon, "user_id")
|
||||
auth_provider_id = get_value_from_macaroon(macaroon, "auth_provider_id")
|
||||
|
||||
auth_provider_session_id: Optional[str] = None
|
||||
try:
|
||||
auth_provider_session_id = get_value_from_macaroon(
|
||||
macaroon, "auth_provider_session_id"
|
||||
)
|
||||
except MacaroonVerificationFailedException:
|
||||
pass
|
||||
|
||||
return LoginTokenAttributes(
|
||||
user_id=user_id,
|
||||
auth_provider_id=auth_provider_id,
|
||||
auth_provider_session_id=auth_provider_session_id,
|
||||
)
|
||||
|
||||
def verify_guest_token(self, token: str) -> str:
|
||||
"""Verify a guest access token macaroon
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ class NotRetryingDestination(Exception):
|
|||
destination: the domain in question
|
||||
"""
|
||||
|
||||
msg = "Not retrying server %s." % (destination,)
|
||||
msg = f"Not retrying server {destination} because we tried it recently retry_last_ts={retry_last_ts} and we won't check for another retry_interval={retry_interval}ms."
|
||||
super().__init__(msg)
|
||||
|
||||
self.retry_last_ts = retry_last_ts
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue