2015-03-23 10:20:28 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 13:01:18 -05:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2017-03-13 13:27:51 -04:00
|
|
|
# Copyright 2017 Vector Creations Ltd
|
2015-03-23 10:20:28 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-06-14 04:27:37 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import logging
|
2019-07-12 12:26:02 -04:00
|
|
|
import time
|
2018-08-01 10:54:06 -04:00
|
|
|
import unicodedata
|
2020-03-03 05:54:44 -05:00
|
|
|
import urllib.parse
|
2020-03-12 11:36:27 -04:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
import attr
|
2020-03-12 11:36:27 -04:00
|
|
|
import bcrypt # type: ignore[import]
|
2018-07-09 02:09:20 -04:00
|
|
|
import pymacaroons
|
2018-06-28 09:49:57 -04:00
|
|
|
|
2018-10-23 08:12:32 -04:00
|
|
|
from twisted.internet import defer
|
2018-07-09 02:09:20 -04:00
|
|
|
|
|
|
|
import synapse.util.stringutils as stringutils
|
2015-03-23 10:20:28 -04:00
|
|
|
from synapse.api.constants import LoginType
|
2017-12-04 10:47:27 -05:00
|
|
|
from synapse.api.errors import (
|
2018-07-09 02:09:20 -04:00
|
|
|
AuthError,
|
|
|
|
Codes,
|
|
|
|
InteractiveAuthIncompleteError,
|
|
|
|
LoginError,
|
|
|
|
StoreError,
|
2017-12-04 10:47:27 -05:00
|
|
|
SynapseError,
|
2019-07-15 06:45:29 -04:00
|
|
|
UserDeactivatedError,
|
2017-12-04 10:47:27 -05:00
|
|
|
)
|
2019-03-15 13:46:16 -04:00
|
|
|
from synapse.api.ratelimiting import Ratelimiter
|
2019-09-25 06:33:03 -04:00
|
|
|
from synapse.handlers.ui_auth import INTERACTIVE_AUTH_CHECKERS
|
|
|
|
from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker
|
2020-03-03 05:54:44 -05:00
|
|
|
from synapse.http.server import finish_request
|
|
|
|
from synapse.http.site import SynapseRequest
|
2019-07-03 10:07:04 -04:00
|
|
|
from synapse.logging.context import defer_to_thread
|
2017-11-02 10:13:25 -04:00
|
|
|
from synapse.module_api import ModuleApi
|
2020-03-03 05:54:44 -05:00
|
|
|
from synapse.push.mailer import load_jinja2_templates
|
2020-03-12 11:36:27 -04:00
|
|
|
from synapse.types import Requester, UserID
|
2017-06-29 09:08:33 -04:00
|
|
|
from synapse.util.caches.expiringcache import ExpiringCache
|
2015-04-01 10:05:30 -04:00
|
|
|
|
2018-07-09 02:09:20 -04:00
|
|
|
from ._base import BaseHandler
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class AuthHandler(BaseHandler):
|
2016-03-16 11:42:35 -04:00
|
|
|
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
2016-07-19 05:21:42 -04:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
hs (synapse.server.HomeServer):
|
|
|
|
"""
|
2015-03-23 10:20:28 -04:00
|
|
|
super(AuthHandler, self).__init__(hs)
|
2019-09-25 06:33:03 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
self.checkers = {} # type: Dict[str, UserInteractiveAuthChecker]
|
2019-09-25 06:33:03 -04:00
|
|
|
for auth_checker_class in INTERACTIVE_AUTH_CHECKERS:
|
|
|
|
inst = auth_checker_class(hs)
|
2019-09-25 07:10:26 -04:00
|
|
|
if inst.is_enabled():
|
2020-03-12 11:36:27 -04:00
|
|
|
self.checkers[inst.AUTH_TYPE] = inst # type: ignore
|
2019-09-25 06:33:03 -04:00
|
|
|
|
2015-10-16 09:52:08 -04:00
|
|
|
self.bcrypt_rounds = hs.config.bcrypt_rounds
|
2017-06-29 09:08:33 -04:00
|
|
|
|
|
|
|
# This is not a cache per se, but a store of all current sessions that
|
|
|
|
# expire after N hours
|
|
|
|
self.sessions = ExpiringCache(
|
|
|
|
cache_name="register_sessions",
|
|
|
|
clock=hs.get_clock(),
|
|
|
|
expiry_ms=self.SESSION_EXPIRE_MS,
|
|
|
|
reset_expiry_on_get=True,
|
|
|
|
)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2017-11-02 10:13:25 -04:00
|
|
|
account_handler = ModuleApi(hs, self)
|
2016-10-03 05:27:10 -04:00
|
|
|
self.password_providers = [
|
|
|
|
module(config=config, account_handler=account_handler)
|
|
|
|
for module, config in hs.config.password_providers
|
|
|
|
]
|
2016-04-06 12:03:55 -04:00
|
|
|
|
2016-11-15 08:03:19 -05:00
|
|
|
logger.info("Extra password_providers: %r", self.password_providers)
|
|
|
|
|
2016-04-06 11:32:06 -04:00
|
|
|
self.hs = hs # FIXME better possibility to access registrationHandler later?
|
2017-02-02 05:53:36 -05:00
|
|
|
self.macaroon_gen = hs.get_macaroon_generator()
|
2017-10-31 06:38:40 -04:00
|
|
|
self._password_enabled = hs.config.password_enabled
|
|
|
|
|
2017-12-04 11:49:40 -05:00
|
|
|
# we keep this as a list despite the O(N^2) implication so that we can
|
|
|
|
# keep PASSWORD first and avoid confusing clients which pick the first
|
|
|
|
# type in the list. (NB that the spec doesn't require us to do so and
|
|
|
|
# clients which favour types that they don't understand over those that
|
|
|
|
# they do are technically broken)
|
|
|
|
login_types = []
|
2017-10-31 06:38:40 -04:00
|
|
|
if self._password_enabled:
|
2017-12-04 11:49:40 -05:00
|
|
|
login_types.append(LoginType.PASSWORD)
|
2017-10-31 06:43:57 -04:00
|
|
|
for provider in self.password_providers:
|
|
|
|
if hasattr(provider, "get_supported_login_types"):
|
2017-12-04 11:49:40 -05:00
|
|
|
for t in provider.get_supported_login_types().keys():
|
|
|
|
if t not in login_types:
|
|
|
|
login_types.append(t)
|
|
|
|
self._supported_login_types = login_types
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2019-11-06 06:00:54 -05:00
|
|
|
# Ratelimiter for failed auth during UIA. Uses same ratelimit config
|
|
|
|
# as per `rc_login.failed_attempts`.
|
|
|
|
self._failed_uia_attempts_ratelimiter = Ratelimiter()
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
self._clock = self.hs.get_clock()
|
|
|
|
|
2020-03-03 05:54:44 -05:00
|
|
|
# Load the SSO redirect confirmation page HTML template
|
|
|
|
self._sso_redirect_confirm_template = load_jinja2_templates(
|
|
|
|
hs.config.sso_redirect_confirm_template_dir, ["sso_redirect_confirm.html"],
|
|
|
|
)[0]
|
|
|
|
|
|
|
|
self._server_name = hs.config.server_name
|
|
|
|
|
|
|
|
# cast to tuple for use with str.startswith
|
|
|
|
self._whitelisted_sso_clients = tuple(hs.config.sso_client_whitelist)
|
|
|
|
|
2017-12-04 11:38:10 -05:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def validate_user_via_ui_auth(
|
|
|
|
self, requester: Requester, request_body: Dict[str, Any], clientip: str
|
|
|
|
):
|
2017-12-04 11:38:10 -05:00
|
|
|
"""
|
|
|
|
Checks that the user is who they claim to be, via a UI auth.
|
|
|
|
|
|
|
|
This is used for things like device deletion and password reset where
|
|
|
|
the user already has a valid access token, but we want to double-check
|
|
|
|
that it isn't stolen by re-authenticating them.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
requester: The user, as given by the access token
|
2017-12-04 11:38:10 -05:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
request_body: The body of the request sent by the client
|
2017-12-04 11:38:10 -05:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
clientip: The IP address of the client.
|
2017-12-04 11:38:10 -05:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
defer.Deferred[dict]: the parameters for this request (which may
|
|
|
|
have been given only in a previous call).
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
InteractiveAuthIncompleteError if the client has not yet completed
|
|
|
|
any of the permitted login flows
|
|
|
|
|
|
|
|
AuthError if the client has completed a login flow, and it gives
|
|
|
|
a different user to `requester`
|
2019-11-06 06:00:54 -05:00
|
|
|
|
2019-11-18 12:10:16 -05:00
|
|
|
LimitExceededError if the ratelimiter's failed request count for this
|
|
|
|
user is too high to proceed
|
2019-11-06 06:00:54 -05:00
|
|
|
|
2017-12-04 11:38:10 -05:00
|
|
|
"""
|
|
|
|
|
2019-11-06 06:00:54 -05:00
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
|
|
|
# Check if we should be ratelimited due to too many previous failed attempts
|
|
|
|
self._failed_uia_attempts_ratelimiter.ratelimit(
|
|
|
|
user_id,
|
|
|
|
time_now_s=self._clock.time(),
|
|
|
|
rate_hz=self.hs.config.rc_login_failed_attempts.per_second,
|
|
|
|
burst_count=self.hs.config.rc_login_failed_attempts.burst_count,
|
|
|
|
update=False,
|
|
|
|
)
|
|
|
|
|
2017-12-04 11:49:40 -05:00
|
|
|
# build a list of supported flows
|
2019-06-20 05:32:02 -04:00
|
|
|
flows = [[login_type] for login_type in self._supported_login_types]
|
2017-12-04 11:38:10 -05:00
|
|
|
|
2019-11-06 06:00:54 -05:00
|
|
|
try:
|
|
|
|
result, params, _ = yield self.check_auth(flows, request_body, clientip)
|
|
|
|
except LoginError:
|
|
|
|
# Update the ratelimite to say we failed (`can_do_action` doesn't raise).
|
|
|
|
self._failed_uia_attempts_ratelimiter.can_do_action(
|
|
|
|
user_id,
|
|
|
|
time_now_s=self._clock.time(),
|
|
|
|
rate_hz=self.hs.config.rc_login_failed_attempts.per_second,
|
|
|
|
burst_count=self.hs.config.rc_login_failed_attempts.burst_count,
|
|
|
|
update=True,
|
|
|
|
)
|
|
|
|
raise
|
2017-12-04 11:38:10 -05:00
|
|
|
|
2017-12-04 11:49:40 -05:00
|
|
|
# find the completed login type
|
|
|
|
for login_type in self._supported_login_types:
|
|
|
|
if login_type not in result:
|
|
|
|
continue
|
|
|
|
|
|
|
|
user_id = result[login_type]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# this can't happen
|
2019-06-20 05:32:02 -04:00
|
|
|
raise Exception("check_auth returned True but no successful login type")
|
2017-12-04 11:38:10 -05:00
|
|
|
|
|
|
|
# check that the UI auth matched the access token
|
|
|
|
if user_id != requester.user.to_string():
|
|
|
|
raise AuthError(403, "Invalid auth")
|
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return params
|
2017-12-04 11:38:10 -05:00
|
|
|
|
2019-09-25 07:10:26 -04:00
|
|
|
def get_enabled_auth_types(self):
|
|
|
|
"""Return the enabled user-interactive authentication types
|
|
|
|
|
|
|
|
Returns the UI-Auth types which are supported by the homeserver's current
|
|
|
|
config.
|
|
|
|
"""
|
|
|
|
return self.checkers.keys()
|
|
|
|
|
2015-03-23 10:20:28 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def check_auth(
|
|
|
|
self, flows: List[List[str]], clientdict: Dict[str, Any], clientip: str
|
|
|
|
):
|
2015-03-23 10:20:28 -04:00
|
|
|
"""
|
|
|
|
Takes a dictionary sent by the client in the login / registration
|
2017-10-26 19:01:00 -04:00
|
|
|
protocol and handles the User-Interactive Auth flow.
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
As a side effect, this function fills in the 'creds' key on the user's
|
|
|
|
session with a map, which maps each auth-type (str) to the relevant
|
|
|
|
identity authenticated by that auth-type (mostly str, but for captcha, bool).
|
|
|
|
|
2017-12-04 10:47:27 -05:00
|
|
|
If no auth flows have been completed successfully, raises an
|
|
|
|
InteractiveAuthIncompleteError. To handle this, you can use
|
|
|
|
synapse.rest.client.v2_alpha._base.interactive_auth_handler as a
|
|
|
|
decorator.
|
|
|
|
|
2015-03-23 10:20:28 -04:00
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
flows: A list of login flows. Each flow is an ordered list of
|
|
|
|
strings representing auth-types. At least one full
|
|
|
|
flow must be completed in order for auth to be successful.
|
2017-12-04 10:47:27 -05:00
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
clientdict: The dictionary from the client root level, not the
|
|
|
|
'auth' key: this method prompts for auth if none is sent.
|
2017-12-04 10:47:27 -05:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
clientip: The IP address of the client.
|
2017-12-04 10:47:27 -05:00
|
|
|
|
2015-03-23 10:20:28 -04:00
|
|
|
Returns:
|
2017-12-04 10:47:27 -05:00
|
|
|
defer.Deferred[dict, dict, str]: a deferred tuple of
|
|
|
|
(creds, params, session_id).
|
2015-04-15 10:50:38 -04:00
|
|
|
|
2017-12-04 10:47:27 -05:00
|
|
|
'creds' contains the authenticated credentials of each stage.
|
2015-04-15 10:50:38 -04:00
|
|
|
|
2017-12-04 10:47:27 -05:00
|
|
|
'params' contains the parameters for this request (which may
|
|
|
|
have been given only in a previous call).
|
2016-03-16 07:56:24 -04:00
|
|
|
|
2017-12-04 10:47:27 -05:00
|
|
|
'session_id' is the ID of this session, either passed in by the
|
|
|
|
client or assigned by this call
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
InteractiveAuthIncompleteError if the client has not yet completed
|
|
|
|
all the stages in any of the permitted flows.
|
2015-03-23 10:20:28 -04:00
|
|
|
"""
|
|
|
|
|
2015-04-15 10:50:38 -04:00
|
|
|
authdict = None
|
2020-03-12 11:36:27 -04:00
|
|
|
sid = None # type: Optional[str]
|
2019-06-20 05:32:02 -04:00
|
|
|
if clientdict and "auth" in clientdict:
|
|
|
|
authdict = clientdict["auth"]
|
|
|
|
del clientdict["auth"]
|
|
|
|
if "session" in authdict:
|
|
|
|
sid = authdict["session"]
|
2015-08-12 10:49:37 -04:00
|
|
|
session = self._get_session_info(sid)
|
2015-04-15 10:50:38 -04:00
|
|
|
|
|
|
|
if len(clientdict) > 0:
|
2015-04-23 09:44:12 -04:00
|
|
|
# This was designed to allow the client to omit the parameters
|
|
|
|
# and just supply the session in subsequent calls so it split
|
|
|
|
# auth between devices by just sharing the session, (eg. so you
|
|
|
|
# could continue registration from your phone having clicked the
|
|
|
|
# email auth link on there). It's probably too open to abuse
|
|
|
|
# because it lets unauthenticated clients store arbitrary objects
|
2019-11-12 08:08:12 -05:00
|
|
|
# on a homeserver.
|
2015-07-15 14:28:03 -04:00
|
|
|
# Revisit: Assumimg the REST APIs do sensible validation, the data
|
|
|
|
# isn't arbintrary.
|
2019-06-20 05:32:02 -04:00
|
|
|
session["clientdict"] = clientdict
|
2015-08-12 10:49:37 -04:00
|
|
|
self._save_session(session)
|
2019-06-20 05:32:02 -04:00
|
|
|
elif "clientdict" in session:
|
|
|
|
clientdict = session["clientdict"]
|
2015-04-15 10:50:38 -04:00
|
|
|
|
|
|
|
if not authdict:
|
2017-12-04 10:47:27 -05:00
|
|
|
raise InteractiveAuthIncompleteError(
|
2019-06-20 05:32:02 -04:00
|
|
|
self._auth_dict_for_flows(flows, session)
|
2015-04-01 10:05:30 -04:00
|
|
|
)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if "creds" not in session:
|
|
|
|
session["creds"] = {}
|
|
|
|
creds = session["creds"]
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
# check auth type currently being presented
|
2020-03-12 11:36:27 -04:00
|
|
|
errordict = {} # type: Dict[str, Any]
|
2019-06-20 05:32:02 -04:00
|
|
|
if "type" in authdict:
|
2020-03-12 11:36:27 -04:00
|
|
|
login_type = authdict["type"] # type: str
|
2016-10-06 13:20:27 -04:00
|
|
|
try:
|
2019-09-06 06:35:28 -04:00
|
|
|
result = yield self._check_auth_dict(authdict, clientip)
|
2016-10-06 13:20:27 -04:00
|
|
|
if result:
|
2016-10-11 06:34:40 -04:00
|
|
|
creds[login_type] = result
|
2016-10-06 13:20:27 -04:00
|
|
|
self._save_session(session)
|
2017-12-04 11:49:40 -05:00
|
|
|
except LoginError as e:
|
2016-10-11 06:34:40 -04:00
|
|
|
if login_type == LoginType.EMAIL_IDENTITY:
|
|
|
|
# riot used to have a bug where it would request a new
|
|
|
|
# validation token (thus sending a new email) each time it
|
|
|
|
# got a 401 with a 'flows' field.
|
|
|
|
# (https://github.com/vector-im/vector-web/issues/2447).
|
|
|
|
#
|
|
|
|
# Grandfather in the old behaviour for now to avoid
|
|
|
|
# breaking old riot deployments.
|
2017-12-04 11:49:40 -05:00
|
|
|
raise
|
2016-10-11 06:34:40 -04:00
|
|
|
|
2016-10-06 13:20:27 -04:00
|
|
|
# this step failed. Merge the error dict into the response
|
|
|
|
# so that the client can have another go.
|
|
|
|
errordict = e.error_dict()
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
for f in flows:
|
2018-05-31 05:03:47 -04:00
|
|
|
if len(set(f) - set(creds)) == 0:
|
2016-11-24 05:11:45 -05:00
|
|
|
# it's very useful to know what args are stored, but this can
|
|
|
|
# include the password in the case of registering, so only log
|
2016-11-24 05:54:59 -05:00
|
|
|
# the keys (confusingly, clientdict may contain a password
|
|
|
|
# param, creds is just what the user authed as for UI auth
|
|
|
|
# and is not sensitive).
|
2016-11-24 05:11:45 -05:00
|
|
|
logger.info(
|
|
|
|
"Auth completed with creds: %r. Client dict has keys: %r",
|
2019-06-20 05:32:02 -04:00
|
|
|
creds,
|
|
|
|
list(clientdict),
|
2016-11-24 05:11:45 -05:00
|
|
|
)
|
2019-08-30 11:28:26 -04:00
|
|
|
return creds, clientdict, session["id"]
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
ret = self._auth_dict_for_flows(flows, session)
|
2019-06-20 05:32:02 -04:00
|
|
|
ret["completed"] = list(creds)
|
2016-10-06 13:20:27 -04:00
|
|
|
ret.update(errordict)
|
2019-06-20 05:32:02 -04:00
|
|
|
raise InteractiveAuthIncompleteError(ret)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def add_oob_auth(self, stagetype: str, authdict: Dict[str, Any], clientip: str):
|
2015-04-27 09:08:45 -04:00
|
|
|
"""
|
|
|
|
Adds the result of out-of-band authentication into an existing auth
|
|
|
|
session. Currently used for adding the result of fallback auth.
|
|
|
|
"""
|
2015-04-01 10:05:30 -04:00
|
|
|
if stagetype not in self.checkers:
|
|
|
|
raise LoginError(400, "", Codes.MISSING_PARAM)
|
2019-06-20 05:32:02 -04:00
|
|
|
if "session" not in authdict:
|
2015-04-01 10:05:30 -04:00
|
|
|
raise LoginError(400, "", Codes.MISSING_PARAM)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
sess = self._get_session_info(authdict["session"])
|
|
|
|
if "creds" not in sess:
|
|
|
|
sess["creds"] = {}
|
|
|
|
creds = sess["creds"]
|
2015-04-01 10:05:30 -04:00
|
|
|
|
2019-09-25 06:33:03 -04:00
|
|
|
result = yield self.checkers[stagetype].check_auth(authdict, clientip)
|
2015-04-01 10:05:30 -04:00
|
|
|
if result:
|
|
|
|
creds[stagetype] = result
|
|
|
|
self._save_session(sess)
|
2019-07-23 09:00:55 -04:00
|
|
|
return True
|
|
|
|
return False
|
2015-04-01 10:05:30 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def get_session_id(self, clientdict: Dict[str, Any]) -> Optional[str]:
|
2016-03-16 15:36:57 -04:00
|
|
|
"""
|
|
|
|
Gets the session ID for a client given the client dictionary
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
clientdict: The dictionary sent by the client in the request
|
|
|
|
|
|
|
|
Returns:
|
2020-03-12 11:36:27 -04:00
|
|
|
The string session ID the client sent. If the client did
|
2016-04-01 11:08:59 -04:00
|
|
|
not send a session ID, returns None.
|
2016-03-16 15:36:57 -04:00
|
|
|
"""
|
|
|
|
sid = None
|
2019-06-20 05:32:02 -04:00
|
|
|
if clientdict and "auth" in clientdict:
|
|
|
|
authdict = clientdict["auth"]
|
|
|
|
if "session" in authdict:
|
|
|
|
sid = authdict["session"]
|
2016-03-16 15:36:57 -04:00
|
|
|
return sid
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def set_session_data(self, session_id: str, key: str, value: Any) -> None:
|
2016-03-16 07:56:24 -04:00
|
|
|
"""
|
|
|
|
Store a key-value pair into the sessions data associated with this
|
|
|
|
request. This data is stored server-side and cannot be modified by
|
|
|
|
the client.
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
session_id: The ID of this session as returned from check_auth
|
|
|
|
key: The key to store the data under
|
|
|
|
value: The data to store
|
2016-03-16 07:56:24 -04:00
|
|
|
"""
|
|
|
|
sess = self._get_session_info(session_id)
|
2019-06-20 05:32:02 -04:00
|
|
|
sess.setdefault("serverdict", {})[key] = value
|
2016-03-16 07:56:24 -04:00
|
|
|
self._save_session(sess)
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def get_session_data(
|
|
|
|
self, session_id: str, key: str, default: Optional[Any] = None
|
|
|
|
) -> Any:
|
2016-03-16 07:56:24 -04:00
|
|
|
"""
|
|
|
|
Retrieve data stored with set_session_data
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
session_id: The ID of this session as returned from check_auth
|
|
|
|
key: The key to store the data under
|
|
|
|
default: Value to return if the key has not been set
|
2016-03-16 07:56:24 -04:00
|
|
|
"""
|
|
|
|
sess = self._get_session_info(session_id)
|
2019-06-20 05:32:02 -04:00
|
|
|
return sess.setdefault("serverdict", {}).get(key, default)
|
2016-03-16 07:56:24 -04:00
|
|
|
|
2017-11-01 13:03:20 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def _check_auth_dict(self, authdict: Dict[str, Any], clientip: str):
|
2017-12-04 11:49:40 -05:00
|
|
|
"""Attempt to validate the auth dict provided by a client
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
authdict: auth dict provided by the client
|
|
|
|
clientip: IP address of the client
|
2017-12-04 11:49:40 -05:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: result of the stage verification.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem accessing the database
|
|
|
|
SynapseError if there was a problem with the request
|
|
|
|
LoginError if there was an authentication problem.
|
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
login_type = authdict["type"]
|
2017-12-04 11:49:40 -05:00
|
|
|
checker = self.checkers.get(login_type)
|
|
|
|
if checker is not None:
|
2019-09-25 06:33:03 -04:00
|
|
|
res = yield checker.check_auth(authdict, clientip=clientip)
|
2019-07-23 09:00:55 -04:00
|
|
|
return res
|
2017-12-04 11:49:40 -05:00
|
|
|
|
|
|
|
# build a v1-login-style dict out of the authdict and fall back to the
|
|
|
|
# v1 code
|
|
|
|
user_id = authdict.get("user")
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2017-12-04 11:49:40 -05:00
|
|
|
if user_id is None:
|
|
|
|
raise SynapseError(400, "", Codes.MISSING_PARAM)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2017-12-04 11:49:40 -05:00
|
|
|
(canonical_id, callback) = yield self.validate_login(user_id, authdict)
|
2019-07-23 09:00:55 -04:00
|
|
|
return canonical_id
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def _get_params_recaptcha(self) -> dict:
|
2015-03-30 13:13:10 -04:00
|
|
|
return {"public_key": self.hs.config.recaptcha_public_key}
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def _get_params_terms(self) -> dict:
|
2018-10-03 17:25:53 -04:00
|
|
|
return {
|
2018-10-12 20:03:27 -04:00
|
|
|
"policies": {
|
2018-10-15 18:10:29 -04:00
|
|
|
"privacy_policy": {
|
2018-10-15 16:46:09 -04:00
|
|
|
"version": self.hs.config.user_consent_version,
|
|
|
|
"en": {
|
2018-11-06 05:32:34 -05:00
|
|
|
"name": self.hs.config.user_consent_policy_name,
|
2019-06-20 05:32:02 -04:00
|
|
|
"url": "%s_matrix/consent?v=%s"
|
|
|
|
% (
|
2018-10-31 15:19:28 -04:00
|
|
|
self.hs.config.public_baseurl,
|
|
|
|
self.hs.config.user_consent_version,
|
|
|
|
),
|
2018-10-15 16:46:09 -04:00
|
|
|
},
|
2019-06-20 05:32:02 -04:00
|
|
|
}
|
|
|
|
}
|
2018-10-03 17:25:53 -04:00
|
|
|
}
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def _auth_dict_for_flows(
|
|
|
|
self, flows: List[List[str]], session: Dict[str, Any]
|
|
|
|
) -> Dict[str, Any]:
|
2015-03-30 13:13:10 -04:00
|
|
|
public_flows = []
|
|
|
|
for f in flows:
|
2015-04-15 10:50:38 -04:00
|
|
|
public_flows.append(f)
|
2015-03-30 13:13:10 -04:00
|
|
|
|
|
|
|
get_params = {
|
2015-04-01 10:05:30 -04:00
|
|
|
LoginType.RECAPTCHA: self._get_params_recaptcha,
|
2018-10-15 16:56:13 -04:00
|
|
|
LoginType.TERMS: self._get_params_terms,
|
2015-03-30 13:13:10 -04:00
|
|
|
}
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
params = {} # type: Dict[str, Any]
|
2015-03-30 13:13:10 -04:00
|
|
|
|
|
|
|
for f in public_flows:
|
|
|
|
for stage in f:
|
|
|
|
if stage in get_params and stage not in params:
|
|
|
|
params[stage] = get_params[stage]()
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-03-30 13:13:10 -04:00
|
|
|
return {
|
2019-06-20 05:32:02 -04:00
|
|
|
"session": session["id"],
|
2015-03-30 13:13:10 -04:00
|
|
|
"flows": [{"stages": f} for f in public_flows],
|
2019-06-20 05:32:02 -04:00
|
|
|
"params": params,
|
2015-03-31 04:50:44 -04:00
|
|
|
}
|
2015-04-01 10:05:30 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def _get_session_info(self, session_id: Optional[str]) -> dict:
|
|
|
|
"""
|
|
|
|
Gets or creates a session given a session ID.
|
|
|
|
|
|
|
|
The session can be used to track data across multiple requests, e.g. for
|
|
|
|
interactive authentication.
|
|
|
|
"""
|
2015-04-01 10:05:30 -04:00
|
|
|
if session_id not in self.sessions:
|
|
|
|
session_id = None
|
|
|
|
|
|
|
|
if not session_id:
|
|
|
|
# create a new session
|
|
|
|
while session_id is None or session_id in self.sessions:
|
2016-03-16 11:55:49 -04:00
|
|
|
session_id = stringutils.random_string(24)
|
2019-06-20 05:32:02 -04:00
|
|
|
self.sessions[session_id] = {"id": session_id}
|
2015-04-01 10:05:30 -04:00
|
|
|
|
|
|
|
return self.sessions[session_id]
|
|
|
|
|
2015-10-07 09:45:57 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def get_access_token_for_user_id(
|
|
|
|
self, user_id: str, device_id: Optional[str], valid_until_ms: Optional[int]
|
|
|
|
):
|
2015-10-07 09:45:57 -04:00
|
|
|
"""
|
2016-11-28 04:52:02 -05:00
|
|
|
Creates a new access token for the user with the given user ID.
|
2016-07-15 07:34:23 -04:00
|
|
|
|
2015-11-11 06:20:23 -05:00
|
|
|
The user is assumed to have been authenticated by some other
|
2016-07-15 07:34:23 -04:00
|
|
|
machanism (e.g. CAS), and the user_id converted to the canonical case.
|
2015-10-07 09:45:57 -04:00
|
|
|
|
2016-07-22 09:52:53 -04:00
|
|
|
The device will be recorded in the table if it is not there already.
|
|
|
|
|
2015-10-07 09:45:57 -04:00
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
user_id: canonical User ID
|
|
|
|
device_id: the device ID to associate with the tokens.
|
2016-07-22 09:52:53 -04:00
|
|
|
None to leave the tokens unassociated with a device (deprecated:
|
|
|
|
we should always have a device ID)
|
2020-03-12 11:36:27 -04:00
|
|
|
valid_until_ms: when the token is valid until. None for
|
2019-07-12 12:26:02 -04:00
|
|
|
no expiry.
|
2015-10-07 09:45:57 -04:00
|
|
|
Returns:
|
|
|
|
The access token for the user's session.
|
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem storing the token.
|
|
|
|
"""
|
2019-07-12 12:26:02 -04:00
|
|
|
fmt_expiry = ""
|
|
|
|
if valid_until_ms is not None:
|
|
|
|
fmt_expiry = time.strftime(
|
|
|
|
" until %Y-%m-%d %H:%M:%S", time.localtime(valid_until_ms / 1000.0)
|
|
|
|
)
|
|
|
|
logger.info("Logging in user %s on device %s%s", user_id, device_id, fmt_expiry)
|
|
|
|
|
2018-08-14 10:04:48 -04:00
|
|
|
yield self.auth.check_auth_blocking(user_id)
|
2016-07-22 09:52:53 -04:00
|
|
|
|
2019-07-10 14:10:07 -04:00
|
|
|
access_token = self.macaroon_gen.generate_access_token(user_id)
|
2019-07-12 12:26:02 -04:00
|
|
|
yield self.store.add_access_token_to_user(
|
|
|
|
user_id, access_token, device_id, valid_until_ms
|
|
|
|
)
|
2019-07-10 14:10:07 -04:00
|
|
|
|
2016-07-22 09:52:53 -04:00
|
|
|
# the device *should* have been registered before we got here; however,
|
|
|
|
# it's possible we raced against a DELETE operation. The thing we
|
|
|
|
# really don't want is active access_tokens without a record of the
|
|
|
|
# device, so we double-check it here.
|
|
|
|
if device_id is not None:
|
2017-11-01 06:23:21 -04:00
|
|
|
try:
|
|
|
|
yield self.store.get_device(user_id, device_id)
|
|
|
|
except StoreError:
|
|
|
|
yield self.store.delete_access_token(access_token)
|
|
|
|
raise StoreError(400, "Login raced against device deletion")
|
2016-07-22 09:52:53 -04:00
|
|
|
|
2019-07-23 09:00:55 -04:00
|
|
|
return access_token
|
2015-10-07 09:45:57 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def check_user_exists(self, user_id: str):
|
2016-07-15 07:34:23 -04:00
|
|
|
"""
|
|
|
|
Checks to see if a user with the given id exists. Will check case
|
|
|
|
insensitively, but return None if there are multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
user_id: complete @user:id
|
2016-07-15 07:34:23 -04:00
|
|
|
|
|
|
|
Returns:
|
2018-12-07 07:10:07 -05:00
|
|
|
defer.Deferred: (unicode) canonical_user_id, or None if zero or
|
2016-07-15 07:34:23 -04:00
|
|
|
multiple matches
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
Raises:
|
2019-07-15 06:45:29 -04:00
|
|
|
UserDeactivatedError if a user is found but is deactivated.
|
2016-07-15 07:34:23 -04:00
|
|
|
"""
|
2016-10-06 13:20:27 -04:00
|
|
|
res = yield self._find_user_id_and_pwd_hash(user_id)
|
|
|
|
if res is not None:
|
2019-07-23 09:00:55 -04:00
|
|
|
return res[0]
|
|
|
|
return None
|
2015-10-07 09:45:57 -04:00
|
|
|
|
2015-08-19 04:30:52 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def _find_user_id_and_pwd_hash(self, user_id: str):
|
2015-08-26 08:42:45 -04:00
|
|
|
"""Checks to see if a user with the given id exists. Will check case
|
2016-10-06 13:20:27 -04:00
|
|
|
insensitively, but will return None if there are multiple inexact
|
|
|
|
matches.
|
2015-08-26 08:42:45 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
tuple: A 2-tuple of `(canonical_user_id, password_hash)`
|
2016-10-06 13:20:27 -04:00
|
|
|
None: if there is not exactly one match
|
2015-08-26 08:42:45 -04:00
|
|
|
"""
|
|
|
|
user_infos = yield self.store.get_users_by_id_case_insensitive(user_id)
|
2016-10-06 13:20:27 -04:00
|
|
|
|
|
|
|
result = None
|
2015-08-26 08:42:45 -04:00
|
|
|
if not user_infos:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("Attempted to login as %s but they do not exist", user_id)
|
2016-10-06 13:20:27 -04:00
|
|
|
elif len(user_infos) == 1:
|
|
|
|
# a single match (possibly not exact)
|
|
|
|
result = user_infos.popitem()
|
|
|
|
elif user_id in user_infos:
|
|
|
|
# multiple matches, but one is exact
|
|
|
|
result = (user_id, user_infos[user_id])
|
2015-08-26 08:42:45 -04:00
|
|
|
else:
|
2016-10-06 13:20:27 -04:00
|
|
|
# multiple matches, none of them exact
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning(
|
2016-10-06 13:20:27 -04:00
|
|
|
"Attempted to login as %s but it matches more than one user "
|
|
|
|
"inexactly: %r",
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id,
|
|
|
|
user_infos.keys(),
|
2016-10-06 13:20:27 -04:00
|
|
|
)
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2015-08-26 08:42:45 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def get_supported_login_types(self) -> Iterable[str]:
|
2017-10-31 06:38:40 -04:00
|
|
|
"""Get a the login types supported for the /login API
|
|
|
|
|
|
|
|
By default this is just 'm.login.password' (unless password_enabled is
|
|
|
|
False in the config file), but password auth providers can provide
|
|
|
|
other login types.
|
|
|
|
|
|
|
|
Returns:
|
2020-03-12 11:36:27 -04:00
|
|
|
login types
|
2017-10-31 06:38:40 -04:00
|
|
|
"""
|
|
|
|
return self._supported_login_types
|
|
|
|
|
2016-04-06 10:58:50 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def validate_login(self, username: str, login_submission: Dict[str, Any]):
|
2017-10-31 06:38:40 -04:00
|
|
|
"""Authenticates the user for the /login API
|
2016-07-15 07:34:23 -04:00
|
|
|
|
2017-10-31 06:38:40 -04:00
|
|
|
Also used by the user-interactive auth flow to validate
|
|
|
|
m.login.password auth types.
|
2016-07-15 07:34:23 -04:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
username: username supplied by the user
|
|
|
|
login_submission: the whole of the login submission
|
2017-10-31 06:38:40 -04:00
|
|
|
(including 'type' and other relevant fields)
|
2016-04-14 14:00:21 -04:00
|
|
|
Returns:
|
2017-10-31 11:15:51 -04:00
|
|
|
Deferred[str, func]: canonical user id, and optional callback
|
|
|
|
to be called once the access token and device id are issued
|
2016-07-15 07:34:23 -04:00
|
|
|
Raises:
|
2017-10-31 06:38:40 -04:00
|
|
|
StoreError if there was a problem accessing the database
|
|
|
|
SynapseError if there was a problem with the request
|
|
|
|
LoginError if there was an authentication problem.
|
2016-04-14 14:00:21 -04:00
|
|
|
"""
|
2017-10-31 06:38:40 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if username.startswith("@"):
|
2017-10-31 06:43:57 -04:00
|
|
|
qualified_user_id = username
|
|
|
|
else:
|
2019-06-20 05:32:02 -04:00
|
|
|
qualified_user_id = UserID(username, self.hs.hostname).to_string()
|
2017-10-31 06:38:40 -04:00
|
|
|
|
|
|
|
login_type = login_submission.get("type")
|
2017-10-31 06:43:57 -04:00
|
|
|
known_login_type = False
|
2017-10-31 06:38:40 -04:00
|
|
|
|
2017-10-31 06:43:57 -04:00
|
|
|
# special case to check for "password" for the check_password interface
|
|
|
|
# for the auth providers
|
|
|
|
password = login_submission.get("password")
|
2018-08-01 10:54:06 -04:00
|
|
|
|
2017-10-31 06:43:57 -04:00
|
|
|
if login_type == LoginType.PASSWORD:
|
|
|
|
if not self._password_enabled:
|
|
|
|
raise SynapseError(400, "Password login has been disabled.")
|
|
|
|
if not password:
|
|
|
|
raise SynapseError(400, "Missing parameter: password")
|
2017-10-31 06:38:40 -04:00
|
|
|
|
2016-10-03 05:27:10 -04:00
|
|
|
for provider in self.password_providers:
|
2019-06-20 05:32:02 -04:00
|
|
|
if hasattr(provider, "check_password") and login_type == LoginType.PASSWORD:
|
2017-10-31 06:43:57 -04:00
|
|
|
known_login_type = True
|
2019-06-20 05:32:02 -04:00
|
|
|
is_valid = yield provider.check_password(qualified_user_id, password)
|
2017-10-31 06:43:57 -04:00
|
|
|
if is_valid:
|
2019-08-30 11:28:26 -04:00
|
|
|
return qualified_user_id, None
|
2017-10-31 06:43:57 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
if not hasattr(provider, "get_supported_login_types") or not hasattr(
|
|
|
|
provider, "check_auth"
|
|
|
|
):
|
2017-10-31 06:43:57 -04:00
|
|
|
# this password provider doesn't understand custom login types
|
|
|
|
continue
|
|
|
|
|
|
|
|
supported_login_types = provider.get_supported_login_types()
|
|
|
|
if login_type not in supported_login_types:
|
|
|
|
# this password provider doesn't understand this login type
|
|
|
|
continue
|
|
|
|
|
|
|
|
known_login_type = True
|
|
|
|
login_fields = supported_login_types[login_type]
|
|
|
|
|
|
|
|
missing_fields = []
|
|
|
|
login_dict = {}
|
|
|
|
for f in login_fields:
|
|
|
|
if f not in login_submission:
|
|
|
|
missing_fields.append(f)
|
|
|
|
else:
|
|
|
|
login_dict[f] = login_submission[f]
|
|
|
|
if missing_fields:
|
|
|
|
raise SynapseError(
|
2019-06-20 05:32:02 -04:00
|
|
|
400,
|
|
|
|
"Missing parameters for login type %s: %s"
|
|
|
|
% (login_type, missing_fields),
|
2017-10-31 06:43:57 -04:00
|
|
|
)
|
2016-04-15 06:17:18 -04:00
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
result = yield provider.check_auth(username, login_type, login_dict)
|
2017-10-31 11:15:51 -04:00
|
|
|
if result:
|
|
|
|
if isinstance(result, str):
|
|
|
|
result = (result, None)
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2017-10-31 06:43:57 -04:00
|
|
|
|
2019-06-27 13:37:29 -04:00
|
|
|
if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled:
|
2017-10-31 06:43:57 -04:00
|
|
|
known_login_type = True
|
|
|
|
|
|
|
|
canonical_user_id = yield self._check_local_password(
|
2019-06-20 05:32:02 -04:00
|
|
|
qualified_user_id, password
|
2017-10-31 06:43:57 -04:00
|
|
|
)
|
2016-10-06 13:20:27 -04:00
|
|
|
|
2017-10-31 06:43:57 -04:00
|
|
|
if canonical_user_id:
|
2019-08-30 11:28:26 -04:00
|
|
|
return canonical_user_id, None
|
2017-10-31 06:43:57 -04:00
|
|
|
|
|
|
|
if not known_login_type:
|
|
|
|
raise SynapseError(400, "Unknown login type %s" % login_type)
|
2016-10-06 13:20:27 -04:00
|
|
|
|
2019-03-18 08:57:20 -04:00
|
|
|
# We raise a 403 here, but note that if we're doing user-interactive
|
|
|
|
# login, it turns all LoginErrors into a 401 anyway.
|
2019-06-20 05:32:02 -04:00
|
|
|
raise LoginError(403, "Invalid password", errcode=Codes.FORBIDDEN)
|
2015-08-12 10:49:37 -04:00
|
|
|
|
2019-03-26 13:48:30 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def check_password_provider_3pid(self, medium: str, address: str, password: str):
|
2019-03-26 13:48:30 -04:00
|
|
|
"""Check if a password provider is able to validate a thirdparty login
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
medium: The medium of the 3pid (ex. email).
|
|
|
|
address: The address of the 3pid (ex. jdoe@example.com).
|
|
|
|
password: The password of the user.
|
2019-03-26 13:48:30 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[(str|None, func|None)]: A tuple of `(user_id,
|
|
|
|
callback)`. If authentication is successful, `user_id` is a `str`
|
|
|
|
containing the authenticated, canonical user ID. `callback` is
|
|
|
|
then either a function to be later run after the server has
|
|
|
|
completed login/registration, or `None`. If authentication was
|
|
|
|
unsuccessful, `user_id` and `callback` are both `None`.
|
|
|
|
"""
|
|
|
|
for provider in self.password_providers:
|
|
|
|
if hasattr(provider, "check_3pid_auth"):
|
|
|
|
# This function is able to return a deferred that either
|
|
|
|
# resolves None, meaning authentication failure, or upon
|
|
|
|
# success, to a str (which is the user_id) or a tuple of
|
|
|
|
# (user_id, callback_func), where callback_func should be run
|
|
|
|
# after we've finished everything else
|
2019-06-20 05:32:02 -04:00
|
|
|
result = yield provider.check_3pid_auth(medium, address, password)
|
2019-03-26 13:48:30 -04:00
|
|
|
if result:
|
|
|
|
# Check if the return value is a str or a tuple
|
|
|
|
if isinstance(result, str):
|
|
|
|
# If it's a str, set callback function to None
|
|
|
|
result = (result, None)
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2019-03-26 13:48:30 -04:00
|
|
|
|
2019-08-30 11:28:26 -04:00
|
|
|
return None, None
|
2019-03-26 13:48:30 -04:00
|
|
|
|
2016-04-06 10:58:50 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def _check_local_password(self, user_id: str, password: str):
|
2016-07-15 07:34:23 -04:00
|
|
|
"""Authenticate a user against the local password database.
|
|
|
|
|
2016-10-06 13:20:27 -04:00
|
|
|
user_id is checked case insensitively, but will return None if there are
|
2016-07-15 07:34:23 -04:00
|
|
|
multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
user_id: complete @user:id
|
|
|
|
password: the provided password
|
2016-07-15 07:34:23 -04:00
|
|
|
Returns:
|
2019-03-26 13:48:30 -04:00
|
|
|
Deferred[unicode] the canonical_user_id, or Deferred[None] if
|
|
|
|
unknown user/bad password
|
2016-07-15 07:34:23 -04:00
|
|
|
"""
|
2016-10-06 13:20:27 -04:00
|
|
|
lookupres = yield self._find_user_id_and_pwd_hash(user_id)
|
|
|
|
if not lookupres:
|
2019-07-23 09:00:55 -04:00
|
|
|
return None
|
2016-10-06 13:20:27 -04:00
|
|
|
(user_id, password_hash) = lookupres
|
2019-07-15 06:45:29 -04:00
|
|
|
|
|
|
|
# If the password hash is None, the account has likely been deactivated
|
|
|
|
if not password_hash:
|
|
|
|
deactivated = yield self.store.get_user_deactivated_status(user_id)
|
|
|
|
if deactivated:
|
|
|
|
raise UserDeactivatedError("This account has been deactivated")
|
|
|
|
|
2018-01-10 13:01:28 -05:00
|
|
|
result = yield self.validate_hash(password, password_hash)
|
2016-07-15 07:34:23 -04:00
|
|
|
if not result:
|
2019-10-31 06:23:24 -04:00
|
|
|
logger.warning("Failed password login for user %s", user_id)
|
2019-07-23 09:00:55 -04:00
|
|
|
return None
|
|
|
|
return user_id
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2018-08-01 06:47:58 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def validate_short_term_login_token_and_get_user_id(self, login_token: str):
|
2016-08-09 11:29:28 -04:00
|
|
|
auth_api = self.hs.get_auth()
|
2018-08-01 06:47:58 -04:00
|
|
|
user_id = None
|
2015-11-11 06:12:35 -05:00
|
|
|
try:
|
2016-08-08 11:34:07 -04:00
|
|
|
macaroon = pymacaroons.Macaroon.deserialize(login_token)
|
|
|
|
user_id = auth_api.get_user_id_from_macaroon(macaroon)
|
2019-07-30 03:25:02 -04:00
|
|
|
auth_api.validate_macaroon(macaroon, "login", user_id)
|
2016-08-09 11:29:28 -04:00
|
|
|
except Exception:
|
|
|
|
raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN)
|
2019-11-05 12:39:16 -05:00
|
|
|
|
2018-08-14 10:04:48 -04:00
|
|
|
yield self.auth.check_auth_blocking(user_id)
|
2019-07-23 09:00:55 -04:00
|
|
|
return user_id
|
2015-11-05 09:01:12 -05:00
|
|
|
|
2017-11-01 11:42:38 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def delete_access_token(self, access_token: str):
|
2017-11-01 06:29:34 -04:00
|
|
|
"""Invalidate a single access token
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
access_token: access token to be deleted
|
2017-11-01 06:29:34 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred
|
|
|
|
"""
|
2017-11-01 11:42:38 -04:00
|
|
|
user_info = yield self.auth.get_user_by_access_token(access_token)
|
|
|
|
yield self.store.delete_access_token(access_token)
|
|
|
|
|
|
|
|
# see if any of our auth providers want to know about this
|
|
|
|
for provider in self.password_providers:
|
|
|
|
if hasattr(provider, "on_logged_out"):
|
|
|
|
yield provider.on_logged_out(
|
|
|
|
user_id=str(user_info["user"]),
|
|
|
|
device_id=user_info["device_id"],
|
|
|
|
access_token=access_token,
|
|
|
|
)
|
2017-11-01 06:29:34 -04:00
|
|
|
|
2017-11-29 09:33:05 -05:00
|
|
|
# delete pushers associated with this access token
|
|
|
|
if user_info["token_id"] is not None:
|
|
|
|
yield self.hs.get_pusherpool().remove_pushers_by_access_token(
|
2019-06-20 05:32:02 -04:00
|
|
|
str(user_info["user"]), (user_info["token_id"],)
|
2017-11-29 09:33:05 -05:00
|
|
|
)
|
|
|
|
|
2017-11-01 11:42:38 -04:00
|
|
|
@defer.inlineCallbacks
|
2019-06-20 05:32:02 -04:00
|
|
|
def delete_access_tokens_for_user(
|
2020-03-12 11:36:27 -04:00
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
except_token_id: Optional[str] = None,
|
|
|
|
device_id: Optional[str] = None,
|
2019-06-20 05:32:02 -04:00
|
|
|
):
|
2017-11-01 06:29:34 -04:00
|
|
|
"""Invalidate access tokens belonging to a user
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
user_id: ID of user the tokens belong to
|
|
|
|
except_token_id: access_token ID which should *not* be deleted
|
|
|
|
device_id: ID of device the tokens are associated with.
|
2017-11-01 06:29:34 -04:00
|
|
|
If None, tokens associated with any device (or no device) will
|
|
|
|
be deleted
|
|
|
|
Returns:
|
|
|
|
Deferred
|
|
|
|
"""
|
2017-11-01 11:42:38 -04:00
|
|
|
tokens_and_devices = yield self.store.user_delete_access_tokens(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, except_token_id=except_token_id, device_id=device_id
|
2017-11-01 06:29:34 -04:00
|
|
|
)
|
|
|
|
|
2017-11-01 11:42:38 -04:00
|
|
|
# see if any of our auth providers want to know about this
|
|
|
|
for provider in self.password_providers:
|
|
|
|
if hasattr(provider, "on_logged_out"):
|
2017-11-29 09:33:05 -05:00
|
|
|
for token, token_id, device_id in tokens_and_devices:
|
2017-11-01 11:42:38 -04:00
|
|
|
yield provider.on_logged_out(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id=user_id, device_id=device_id, access_token=token
|
2017-11-01 11:42:38 -04:00
|
|
|
)
|
|
|
|
|
2017-11-29 09:33:05 -05:00
|
|
|
# delete pushers associated with the access tokens
|
|
|
|
yield self.hs.get_pusherpool().remove_pushers_by_access_token(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, (token_id for _, token_id, _ in tokens_and_devices)
|
2017-11-29 09:33:05 -05:00
|
|
|
)
|
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def add_threepid(self, user_id: str, medium: str, address: str, validated_at: int):
|
2020-02-07 05:29:36 -05:00
|
|
|
# check if medium has a valid value
|
|
|
|
if medium not in ["email", "msisdn"]:
|
|
|
|
raise SynapseError(
|
|
|
|
code=400,
|
|
|
|
msg=("'%s' is not a valid value for 'medium'" % (medium,)),
|
|
|
|
errcode=Codes.INVALID_PARAM,
|
|
|
|
)
|
|
|
|
|
2016-10-19 06:13:55 -04:00
|
|
|
# 'Canonicalise' email addresses down to lower case.
|
2019-11-12 08:08:12 -05:00
|
|
|
# We've now moving towards the homeserver being the entity that
|
2016-10-19 06:13:55 -04:00
|
|
|
# is responsible for validating threepids used for resetting passwords
|
|
|
|
# on accounts, so in future Synapse will gain knowledge of specific
|
|
|
|
# types (mediums) of threepid. For now, we still use the existing
|
|
|
|
# infrastructure, but this is the start of synapse gaining knowledge
|
|
|
|
# of specific types of threepid (and fixes the fact that checking
|
2016-12-21 04:44:03 -05:00
|
|
|
# for the presence of an email address during password reset was
|
2016-10-19 06:13:55 -04:00
|
|
|
# case sensitive).
|
2019-06-20 05:32:02 -04:00
|
|
|
if medium == "email":
|
2016-10-19 06:13:55 -04:00
|
|
|
address = address.lower()
|
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
yield self.store.user_add_threepid(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, medium, address, validated_at, self.hs.get_clock().time_msec()
|
2015-08-12 10:49:37 -04:00
|
|
|
)
|
|
|
|
|
2016-12-20 13:27:30 -05:00
|
|
|
@defer.inlineCallbacks
|
2020-03-12 11:36:27 -04:00
|
|
|
def delete_threepid(
|
|
|
|
self, user_id: str, medium: str, address: str, id_server: Optional[str] = None
|
|
|
|
):
|
2018-08-08 06:54:55 -04:00
|
|
|
"""Attempts to unbind the 3pid on the identity servers and deletes it
|
|
|
|
from the local database.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
user_id: ID of user to remove the 3pid from.
|
|
|
|
medium: The medium of the 3pid being removed: "email" or "msisdn".
|
|
|
|
address: The 3pid address to remove.
|
|
|
|
id_server: Use the given identity server when unbinding
|
2019-04-01 05:21:12 -04:00
|
|
|
any threepids. If None then will attempt to unbind using the
|
|
|
|
identity server specified when binding (if known).
|
|
|
|
|
2018-08-08 06:54:55 -04:00
|
|
|
Returns:
|
|
|
|
Deferred[bool]: Returns True if successfully unbound the 3pid on
|
|
|
|
the identity server, False if identity server doesn't support the
|
|
|
|
unbind API.
|
|
|
|
"""
|
|
|
|
|
2016-12-20 13:27:30 -05:00
|
|
|
# 'Canonicalise' email addresses as per above
|
2019-06-20 05:32:02 -04:00
|
|
|
if medium == "email":
|
2016-12-20 13:27:30 -05:00
|
|
|
address = address.lower()
|
|
|
|
|
2018-05-24 06:08:05 -04:00
|
|
|
identity_handler = self.hs.get_handlers().identity_handler
|
2018-08-08 06:54:55 -04:00
|
|
|
result = yield identity_handler.try_unbind_threepid(
|
2019-06-20 05:32:02 -04:00
|
|
|
user_id, {"medium": medium, "address": address, "id_server": id_server}
|
2018-05-24 06:08:05 -04:00
|
|
|
)
|
|
|
|
|
2019-06-20 05:32:02 -04:00
|
|
|
yield self.store.user_delete_threepid(user_id, medium, address)
|
2019-07-23 09:00:55 -04:00
|
|
|
return result
|
2016-12-20 13:27:30 -05:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def _save_session(self, session: Dict[str, Any]) -> None:
|
|
|
|
"""Update the last used time on the session to now and add it back to the session store."""
|
2015-04-01 10:05:30 -04:00
|
|
|
# TODO: Persistent storage
|
|
|
|
logger.debug("Saving session %s", session)
|
2016-03-16 11:51:28 -04:00
|
|
|
session["last_used"] = self.hs.get_clock().time_msec()
|
2015-04-01 10:05:30 -04:00
|
|
|
self.sessions[session["id"]] = session
|
2015-08-26 10:59:32 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def hash(self, password: str):
|
2015-08-26 10:59:32 -04:00
|
|
|
"""Computes a secure hash of password.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
password: Password to hash.
|
2015-08-26 10:59:32 -04:00
|
|
|
|
|
|
|
Returns:
|
2018-08-01 10:54:06 -04:00
|
|
|
Deferred(unicode): Hashed password.
|
2015-08-26 10:59:32 -04:00
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2018-01-10 13:01:28 -05:00
|
|
|
def _do_hash():
|
2018-08-01 10:54:06 -04:00
|
|
|
# Normalise the Unicode in the password
|
|
|
|
pw = unicodedata.normalize("NFKC", password)
|
|
|
|
|
|
|
|
return bcrypt.hashpw(
|
2019-06-20 05:32:02 -04:00
|
|
|
pw.encode("utf8") + self.hs.config.password_pepper.encode("utf8"),
|
2018-08-01 10:54:06 -04:00
|
|
|
bcrypt.gensalt(self.bcrypt_rounds),
|
2019-06-20 05:32:02 -04:00
|
|
|
).decode("ascii")
|
2018-01-10 13:01:28 -05:00
|
|
|
|
2019-07-03 10:07:04 -04:00
|
|
|
return defer_to_thread(self.hs.get_reactor(), _do_hash)
|
2015-08-26 10:59:32 -04:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def validate_hash(self, password: str, stored_hash: bytes):
|
2015-08-26 10:59:32 -04:00
|
|
|
"""Validates that self.hash(password) == stored_hash.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 11:36:27 -04:00
|
|
|
password: Password to hash.
|
|
|
|
stored_hash: Expected hash value.
|
2015-08-26 10:59:32 -04:00
|
|
|
|
|
|
|
Returns:
|
2018-01-10 13:01:28 -05:00
|
|
|
Deferred(bool): Whether self.hash(password) == stored_hash.
|
2015-08-26 10:59:32 -04:00
|
|
|
"""
|
2019-06-20 05:32:02 -04:00
|
|
|
|
2018-01-10 13:01:28 -05:00
|
|
|
def _do_validate_hash():
|
2018-08-01 10:54:06 -04:00
|
|
|
# Normalise the Unicode in the password
|
|
|
|
pw = unicodedata.normalize("NFKC", password)
|
|
|
|
|
2018-03-05 11:51:09 -05:00
|
|
|
return bcrypt.checkpw(
|
2019-06-20 05:32:02 -04:00
|
|
|
pw.encode("utf8") + self.hs.config.password_pepper.encode("utf8"),
|
|
|
|
stored_hash,
|
2018-03-05 11:51:09 -05:00
|
|
|
)
|
2018-01-10 13:01:28 -05:00
|
|
|
|
|
|
|
if stored_hash:
|
2018-09-06 10:22:23 -04:00
|
|
|
if not isinstance(stored_hash, bytes):
|
2019-06-20 05:32:02 -04:00
|
|
|
stored_hash = stored_hash.encode("ascii")
|
2018-09-06 10:22:23 -04:00
|
|
|
|
2019-07-03 10:07:04 -04:00
|
|
|
return defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
|
2016-05-11 07:06:02 -04:00
|
|
|
else:
|
2018-01-10 13:01:28 -05:00
|
|
|
return defer.succeed(False)
|
2016-10-03 05:27:10 -04:00
|
|
|
|
2020-03-03 05:54:44 -05:00
|
|
|
def complete_sso_login(
|
|
|
|
self,
|
|
|
|
registered_user_id: str,
|
|
|
|
request: SynapseRequest,
|
|
|
|
client_redirect_url: str,
|
|
|
|
):
|
|
|
|
"""Having figured out a mxid for this user, complete the HTTP request
|
|
|
|
|
|
|
|
Args:
|
|
|
|
registered_user_id: The registered user ID to complete SSO login for.
|
|
|
|
request: The request to complete.
|
|
|
|
client_redirect_url: The URL to which to redirect the user at the end of the
|
|
|
|
process.
|
|
|
|
"""
|
|
|
|
# Create a login token
|
|
|
|
login_token = self.macaroon_gen.generate_short_term_login_token(
|
|
|
|
registered_user_id
|
|
|
|
)
|
|
|
|
|
|
|
|
# Append the login token to the original redirect URL (i.e. with its query
|
|
|
|
# parameters kept intact) to build the URL to which the template needs to
|
|
|
|
# redirect the users once they have clicked on the confirmation link.
|
|
|
|
redirect_url = self.add_query_param_to_url(
|
|
|
|
client_redirect_url, "loginToken", login_token
|
|
|
|
)
|
|
|
|
|
|
|
|
# if the client is whitelisted, we can redirect straight to it
|
|
|
|
if client_redirect_url.startswith(self._whitelisted_sso_clients):
|
|
|
|
request.redirect(redirect_url)
|
|
|
|
finish_request(request)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Otherwise, serve the redirect confirmation page.
|
|
|
|
|
|
|
|
# Remove the query parameters from the redirect URL to get a shorter version of
|
|
|
|
# it. This is only to display a human-readable URL in the template, but not the
|
|
|
|
# URL we redirect users to.
|
|
|
|
redirect_url_no_params = client_redirect_url.split("?")[0]
|
|
|
|
|
|
|
|
html = self._sso_redirect_confirm_template.render(
|
|
|
|
display_url=redirect_url_no_params,
|
|
|
|
redirect_url=redirect_url,
|
|
|
|
server_name=self._server_name,
|
|
|
|
).encode("utf-8")
|
|
|
|
|
|
|
|
request.setResponseCode(200)
|
|
|
|
request.setHeader(b"Content-Type", b"text/html; charset=utf-8")
|
|
|
|
request.setHeader(b"Content-Length", b"%d" % (len(html),))
|
|
|
|
request.write(html)
|
|
|
|
finish_request(request)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def add_query_param_to_url(url: str, param_name: str, param: Any):
|
|
|
|
url_parts = list(urllib.parse.urlparse(url))
|
|
|
|
query = dict(urllib.parse.parse_qsl(url_parts[4]))
|
|
|
|
query.update({param_name: param})
|
|
|
|
url_parts[4] = urllib.parse.urlencode(query)
|
|
|
|
return urllib.parse.urlunparse(url_parts)
|
|
|
|
|
2016-10-03 05:27:10 -04:00
|
|
|
|
2018-06-22 04:37:10 -04:00
|
|
|
@attr.s
|
|
|
|
class MacaroonGenerator(object):
|
|
|
|
|
|
|
|
hs = attr.ib()
|
2017-02-02 05:53:36 -05:00
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def generate_access_token(
|
|
|
|
self, user_id: str, extra_caveats: Optional[List[str]] = None
|
|
|
|
) -> str:
|
2017-02-02 05:53:36 -05:00
|
|
|
extra_caveats = extra_caveats or []
|
|
|
|
macaroon = self._generate_base_macaroon(user_id)
|
|
|
|
macaroon.add_first_party_caveat("type = access")
|
|
|
|
# Include a nonce, to make sure that each login gets a different
|
|
|
|
# access token.
|
2019-06-20 05:32:02 -04:00
|
|
|
macaroon.add_first_party_caveat(
|
|
|
|
"nonce = %s" % (stringutils.random_string_with_symbols(16),)
|
|
|
|
)
|
2017-02-02 05:53:36 -05:00
|
|
|
for caveat in extra_caveats:
|
|
|
|
macaroon.add_first_party_caveat(caveat)
|
|
|
|
return macaroon.serialize()
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def generate_short_term_login_token(
|
|
|
|
self, user_id: str, duration_in_ms: int = (2 * 60 * 1000)
|
|
|
|
) -> str:
|
2017-02-02 05:53:36 -05:00
|
|
|
macaroon = self._generate_base_macaroon(user_id)
|
|
|
|
macaroon.add_first_party_caveat("type = login")
|
2018-06-22 04:37:10 -04:00
|
|
|
now = self.hs.get_clock().time_msec()
|
2017-02-02 05:53:36 -05:00
|
|
|
expiry = now + duration_in_ms
|
|
|
|
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
|
|
|
return macaroon.serialize()
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def generate_delete_pusher_token(self, user_id: str) -> str:
|
2017-02-02 05:53:36 -05:00
|
|
|
macaroon = self._generate_base_macaroon(user_id)
|
|
|
|
macaroon.add_first_party_caveat("type = delete_pusher")
|
|
|
|
return macaroon.serialize()
|
|
|
|
|
2020-03-12 11:36:27 -04:00
|
|
|
def _generate_base_macaroon(self, user_id: str) -> pymacaroons.Macaroon:
|
2017-02-02 05:53:36 -05:00
|
|
|
macaroon = pymacaroons.Macaroon(
|
2018-06-22 04:37:10 -04:00
|
|
|
location=self.hs.config.server_name,
|
2017-02-02 05:53:36 -05:00
|
|
|
identifier="key",
|
2019-06-20 05:32:02 -04:00
|
|
|
key=self.hs.config.macaroon_secret_key,
|
|
|
|
)
|
2017-02-02 05:53:36 -05:00
|
|
|
macaroon.add_first_party_caveat("gen = 1")
|
|
|
|
macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
|
|
|
|
return macaroon
|