2015-03-23 10:20:28 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 13:01:18 -05:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2015-03-23 10:20:28 -04:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from ._base import BaseHandler
|
|
|
|
from synapse.api.constants import LoginType
|
|
|
|
from synapse.types import UserID
|
2016-05-27 05:35:15 -04:00
|
|
|
from synapse.api.errors import AuthError, LoginError, Codes, StoreError, SynapseError
|
2015-04-15 10:50:38 -04:00
|
|
|
from synapse.util.async import run_on_reactor
|
2016-06-05 20:05:57 -04:00
|
|
|
from synapse.config.ldap import LDAPMode
|
2015-04-01 10:05:30 -04:00
|
|
|
|
2015-03-30 13:13:10 -04:00
|
|
|
from twisted.web.client import PartialDownloadError
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
import logging
|
|
|
|
import bcrypt
|
2015-08-20 06:35:56 -04:00
|
|
|
import pymacaroons
|
2015-03-30 13:13:10 -04:00
|
|
|
import simplejson
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
try:
|
|
|
|
import ldap3
|
|
|
|
except ImportError:
|
|
|
|
ldap3 = None
|
|
|
|
pass
|
|
|
|
|
2015-04-01 10:05:30 -04:00
|
|
|
import synapse.util.stringutils as stringutils
|
|
|
|
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class AuthHandler(BaseHandler):
|
2016-03-16 11:42:35 -04:00
|
|
|
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
2016-07-19 05:21:42 -04:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
hs (synapse.server.HomeServer):
|
|
|
|
"""
|
2015-03-23 10:20:28 -04:00
|
|
|
super(AuthHandler, self).__init__(hs)
|
2015-04-01 10:05:30 -04:00
|
|
|
self.checkers = {
|
|
|
|
LoginType.PASSWORD: self._check_password_auth,
|
|
|
|
LoginType.RECAPTCHA: self._check_recaptcha,
|
2015-04-15 10:50:38 -04:00
|
|
|
LoginType.EMAIL_IDENTITY: self._check_email_identity,
|
2015-04-15 12:14:25 -04:00
|
|
|
LoginType.DUMMY: self._check_dummy_auth,
|
2015-04-01 10:05:30 -04:00
|
|
|
}
|
2015-10-16 09:52:08 -04:00
|
|
|
self.bcrypt_rounds = hs.config.bcrypt_rounds
|
2015-04-01 10:05:30 -04:00
|
|
|
self.sessions = {}
|
2015-11-05 09:01:12 -05:00
|
|
|
self.INVALID_TOKEN_HTTP_STATUS = 401
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2016-04-06 07:02:49 -04:00
|
|
|
self.ldap_enabled = hs.config.ldap_enabled
|
2016-06-05 20:05:57 -04:00
|
|
|
if self.ldap_enabled:
|
|
|
|
if not ldap3:
|
|
|
|
raise RuntimeError(
|
|
|
|
'Missing ldap3 library. This is required for LDAP Authentication.'
|
|
|
|
)
|
|
|
|
self.ldap_mode = hs.config.ldap_mode
|
|
|
|
self.ldap_uri = hs.config.ldap_uri
|
|
|
|
self.ldap_start_tls = hs.config.ldap_start_tls
|
|
|
|
self.ldap_base = hs.config.ldap_base
|
|
|
|
self.ldap_filter = hs.config.ldap_filter
|
|
|
|
self.ldap_attributes = hs.config.ldap_attributes
|
|
|
|
if self.ldap_mode == LDAPMode.SEARCH:
|
|
|
|
self.ldap_bind_dn = hs.config.ldap_bind_dn
|
|
|
|
self.ldap_bind_password = hs.config.ldap_bind_password
|
2016-04-06 12:03:55 -04:00
|
|
|
|
2016-04-06 11:32:06 -04:00
|
|
|
self.hs = hs # FIXME better possibility to access registrationHandler later?
|
2016-07-22 09:52:53 -04:00
|
|
|
self.device_handler = hs.get_device_handler()
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2015-03-23 10:20:28 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-08-12 10:49:37 -04:00
|
|
|
def check_auth(self, flows, clientdict, clientip):
|
2015-03-23 10:20:28 -04:00
|
|
|
"""
|
|
|
|
Takes a dictionary sent by the client in the login / registration
|
|
|
|
protocol and handles the login flow.
|
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
As a side effect, this function fills in the 'creds' key on the user's
|
|
|
|
session with a map, which maps each auth-type (str) to the relevant
|
|
|
|
identity authenticated by that auth-type (mostly str, but for captcha, bool).
|
|
|
|
|
2015-03-23 10:20:28 -04:00
|
|
|
Args:
|
2015-08-12 10:49:37 -04:00
|
|
|
flows (list): A list of login flows. Each flow is an ordered list of
|
|
|
|
strings representing auth-types. At least one full
|
|
|
|
flow must be completed in order for auth to be successful.
|
|
|
|
clientdict: The dictionary from the client root level, not the
|
|
|
|
'auth' key: this method prompts for auth if none is sent.
|
|
|
|
clientip (str): The IP address of the client.
|
2015-03-23 10:20:28 -04:00
|
|
|
Returns:
|
2016-03-16 07:56:24 -04:00
|
|
|
A tuple of (authed, dict, dict, session_id) where authed is true if
|
|
|
|
the client has successfully completed an auth flow. If it is true
|
|
|
|
the first dict contains the authenticated credentials of each stage.
|
2015-04-15 10:50:38 -04:00
|
|
|
|
|
|
|
If authed is false, the first dictionary is the server response to
|
|
|
|
the login request and should be passed back to the client.
|
|
|
|
|
|
|
|
In either case, the second dict contains the parameters for this
|
|
|
|
request (which may have been given only in a previous call).
|
2016-03-16 07:56:24 -04:00
|
|
|
|
|
|
|
session_id is the ID of this session, either passed in by the client
|
|
|
|
or assigned by the call to check_auth
|
2015-03-23 10:20:28 -04:00
|
|
|
"""
|
|
|
|
|
2015-04-15 10:50:38 -04:00
|
|
|
authdict = None
|
|
|
|
sid = None
|
|
|
|
if clientdict and 'auth' in clientdict:
|
|
|
|
authdict = clientdict['auth']
|
|
|
|
del clientdict['auth']
|
|
|
|
if 'session' in authdict:
|
|
|
|
sid = authdict['session']
|
2015-08-12 10:49:37 -04:00
|
|
|
session = self._get_session_info(sid)
|
2015-04-15 10:50:38 -04:00
|
|
|
|
|
|
|
if len(clientdict) > 0:
|
2015-04-23 09:44:12 -04:00
|
|
|
# This was designed to allow the client to omit the parameters
|
|
|
|
# and just supply the session in subsequent calls so it split
|
|
|
|
# auth between devices by just sharing the session, (eg. so you
|
|
|
|
# could continue registration from your phone having clicked the
|
|
|
|
# email auth link on there). It's probably too open to abuse
|
|
|
|
# because it lets unauthenticated clients store arbitrary objects
|
|
|
|
# on a home server.
|
2015-07-15 14:28:03 -04:00
|
|
|
# Revisit: Assumimg the REST APIs do sensible validation, the data
|
|
|
|
# isn't arbintrary.
|
2015-08-12 10:49:37 -04:00
|
|
|
session['clientdict'] = clientdict
|
|
|
|
self._save_session(session)
|
|
|
|
elif 'clientdict' in session:
|
|
|
|
clientdict = session['clientdict']
|
2015-04-15 10:50:38 -04:00
|
|
|
|
|
|
|
if not authdict:
|
2015-04-01 10:05:30 -04:00
|
|
|
defer.returnValue(
|
2016-03-16 07:56:24 -04:00
|
|
|
(
|
|
|
|
False, self._auth_dict_for_flows(flows, session),
|
|
|
|
clientdict, session['id']
|
|
|
|
)
|
2015-04-01 10:05:30 -04:00
|
|
|
)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
if 'creds' not in session:
|
|
|
|
session['creds'] = {}
|
|
|
|
creds = session['creds']
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
# check auth type currently being presented
|
2015-04-01 10:05:30 -04:00
|
|
|
if 'type' in authdict:
|
|
|
|
if authdict['type'] not in self.checkers:
|
|
|
|
raise LoginError(400, "", Codes.UNRECOGNIZED)
|
|
|
|
result = yield self.checkers[authdict['type']](authdict, clientip)
|
|
|
|
if result:
|
|
|
|
creds[authdict['type']] = result
|
2015-08-12 10:49:37 -04:00
|
|
|
self._save_session(session)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
for f in flows:
|
|
|
|
if len(set(f) - set(creds.keys())) == 0:
|
|
|
|
logger.info("Auth completed with creds: %r", creds)
|
2016-03-16 07:56:24 -04:00
|
|
|
defer.returnValue((True, creds, clientdict, session['id']))
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
ret = self._auth_dict_for_flows(flows, session)
|
2015-03-23 10:20:28 -04:00
|
|
|
ret['completed'] = creds.keys()
|
2016-03-16 07:56:24 -04:00
|
|
|
defer.returnValue((False, ret, clientdict, session['id']))
|
2015-03-23 10:20:28 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-04-01 10:05:30 -04:00
|
|
|
def add_oob_auth(self, stagetype, authdict, clientip):
|
2015-04-27 09:08:45 -04:00
|
|
|
"""
|
|
|
|
Adds the result of out-of-band authentication into an existing auth
|
|
|
|
session. Currently used for adding the result of fallback auth.
|
|
|
|
"""
|
2015-04-01 10:05:30 -04:00
|
|
|
if stagetype not in self.checkers:
|
|
|
|
raise LoginError(400, "", Codes.MISSING_PARAM)
|
|
|
|
if 'session' not in authdict:
|
|
|
|
raise LoginError(400, "", Codes.MISSING_PARAM)
|
|
|
|
|
|
|
|
sess = self._get_session_info(
|
|
|
|
authdict['session']
|
|
|
|
)
|
|
|
|
if 'creds' not in sess:
|
|
|
|
sess['creds'] = {}
|
|
|
|
creds = sess['creds']
|
|
|
|
|
|
|
|
result = yield self.checkers[stagetype](authdict, clientip)
|
|
|
|
if result:
|
|
|
|
creds[stagetype] = result
|
|
|
|
self._save_session(sess)
|
|
|
|
defer.returnValue(True)
|
|
|
|
defer.returnValue(False)
|
|
|
|
|
2016-03-16 15:36:57 -04:00
|
|
|
def get_session_id(self, clientdict):
|
|
|
|
"""
|
|
|
|
Gets the session ID for a client given the client dictionary
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
clientdict: The dictionary sent by the client in the request
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str|None: The string session ID the client sent. If the client did
|
|
|
|
not send a session ID, returns None.
|
2016-03-16 15:36:57 -04:00
|
|
|
"""
|
|
|
|
sid = None
|
|
|
|
if clientdict and 'auth' in clientdict:
|
|
|
|
authdict = clientdict['auth']
|
|
|
|
if 'session' in authdict:
|
|
|
|
sid = authdict['session']
|
|
|
|
return sid
|
|
|
|
|
2016-03-16 07:56:24 -04:00
|
|
|
def set_session_data(self, session_id, key, value):
|
|
|
|
"""
|
|
|
|
Store a key-value pair into the sessions data associated with this
|
|
|
|
request. This data is stored server-side and cannot be modified by
|
|
|
|
the client.
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
session_id (string): The ID of this session as returned from check_auth
|
|
|
|
key (string): The key to store the data under
|
|
|
|
value (any): The data to store
|
2016-03-16 07:56:24 -04:00
|
|
|
"""
|
|
|
|
sess = self._get_session_info(session_id)
|
|
|
|
sess.setdefault('serverdict', {})[key] = value
|
|
|
|
self._save_session(sess)
|
|
|
|
|
|
|
|
def get_session_data(self, session_id, key, default=None):
|
|
|
|
"""
|
|
|
|
Retrieve data stored with set_session_data
|
2016-04-01 11:08:59 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
session_id (string): The ID of this session as returned from check_auth
|
|
|
|
key (string): The key to store the data under
|
|
|
|
default (any): Value to return if the key has not been set
|
2016-03-16 07:56:24 -04:00
|
|
|
"""
|
|
|
|
sess = self._get_session_info(session_id)
|
|
|
|
return sess.setdefault('serverdict', {}).get(key, default)
|
|
|
|
|
2015-04-01 10:05:30 -04:00
|
|
|
def _check_password_auth(self, authdict, _):
|
2015-03-23 10:20:28 -04:00
|
|
|
if "user" not in authdict or "password" not in authdict:
|
|
|
|
raise LoginError(400, "", Codes.MISSING_PARAM)
|
|
|
|
|
2015-08-12 11:09:19 -04:00
|
|
|
user_id = authdict["user"]
|
2015-03-23 10:20:28 -04:00
|
|
|
password = authdict["password"]
|
2015-08-12 11:09:19 -04:00
|
|
|
if not user_id.startswith('@'):
|
|
|
|
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2016-07-15 07:34:23 -04:00
|
|
|
return self._check_password(user_id, password)
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-03-30 13:13:10 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-04-01 10:05:30 -04:00
|
|
|
def _check_recaptcha(self, authdict, clientip):
|
2015-03-30 13:13:10 -04:00
|
|
|
try:
|
|
|
|
user_response = authdict["response"]
|
|
|
|
except KeyError:
|
|
|
|
# Client tried to provide captcha but didn't give the parameter:
|
|
|
|
# bad request.
|
|
|
|
raise LoginError(
|
|
|
|
400, "Captcha response is required",
|
|
|
|
errcode=Codes.CAPTCHA_NEEDED
|
|
|
|
)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"Submitting recaptcha response %s with remoteip %s",
|
|
|
|
user_response, clientip
|
|
|
|
)
|
|
|
|
|
|
|
|
# TODO: get this from the homeserver rather than creating a new one for
|
|
|
|
# each request
|
|
|
|
try:
|
2015-09-09 07:02:07 -04:00
|
|
|
client = self.hs.get_simple_http_client()
|
2015-05-29 07:11:40 -04:00
|
|
|
resp_body = yield client.post_urlencoded_get_json(
|
|
|
|
self.hs.config.recaptcha_siteverify_api,
|
2015-03-30 13:13:10 -04:00
|
|
|
args={
|
|
|
|
'secret': self.hs.config.recaptcha_private_key,
|
|
|
|
'response': user_response,
|
|
|
|
'remoteip': clientip,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
except PartialDownloadError as pde:
|
|
|
|
# Twisted is silly
|
|
|
|
data = pde.response
|
2015-05-29 07:11:40 -04:00
|
|
|
resp_body = simplejson.loads(data)
|
2015-05-28 13:05:00 -04:00
|
|
|
|
2016-07-22 12:00:56 -04:00
|
|
|
if 'success' in resp_body:
|
|
|
|
# Note that we do NOT check the hostname here: we explicitly
|
|
|
|
# intend the CAPTCHA to be presented by whatever client the
|
|
|
|
# user is using, we just care that they have completed a CAPTCHA.
|
|
|
|
logger.info(
|
|
|
|
"%s reCAPTCHA from hostname %s",
|
|
|
|
"Successful" if resp_body['success'] else "Failed",
|
2016-07-22 12:18:50 -04:00
|
|
|
resp_body.get('hostname')
|
2016-07-22 12:00:56 -04:00
|
|
|
)
|
|
|
|
if resp_body['success']:
|
|
|
|
defer.returnValue(True)
|
2015-03-30 13:13:10 -04:00
|
|
|
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
|
|
|
|
|
2015-04-15 10:50:38 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _check_email_identity(self, authdict, _):
|
|
|
|
yield run_on_reactor()
|
|
|
|
|
2015-04-24 06:27:38 -04:00
|
|
|
if 'threepid_creds' not in authdict:
|
|
|
|
raise LoginError(400, "Missing threepid_creds", Codes.MISSING_PARAM)
|
2015-04-17 14:53:47 -04:00
|
|
|
|
2015-04-24 06:27:38 -04:00
|
|
|
threepid_creds = authdict['threepid_creds']
|
2015-04-15 10:50:38 -04:00
|
|
|
identity_handler = self.hs.get_handlers().identity_handler
|
|
|
|
|
2015-04-24 06:27:38 -04:00
|
|
|
logger.info("Getting validated threepid. threepidcreds: %r" % (threepid_creds,))
|
|
|
|
threepid = yield identity_handler.threepid_from_creds(threepid_creds)
|
2015-04-15 10:50:38 -04:00
|
|
|
|
2015-04-17 14:53:47 -04:00
|
|
|
if not threepid:
|
|
|
|
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
|
|
|
|
|
2015-04-24 06:27:38 -04:00
|
|
|
threepid['threepid_creds'] = authdict['threepid_creds']
|
2015-04-16 14:56:44 -04:00
|
|
|
|
2015-04-15 10:50:38 -04:00
|
|
|
defer.returnValue(threepid)
|
|
|
|
|
2015-04-15 12:14:25 -04:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _check_dummy_auth(self, authdict, _):
|
|
|
|
yield run_on_reactor()
|
|
|
|
defer.returnValue(True)
|
|
|
|
|
2015-04-01 10:05:30 -04:00
|
|
|
def _get_params_recaptcha(self):
|
2015-03-30 13:13:10 -04:00
|
|
|
return {"public_key": self.hs.config.recaptcha_public_key}
|
|
|
|
|
2015-04-01 10:05:30 -04:00
|
|
|
def _auth_dict_for_flows(self, flows, session):
|
2015-03-30 13:13:10 -04:00
|
|
|
public_flows = []
|
|
|
|
for f in flows:
|
2015-04-15 10:50:38 -04:00
|
|
|
public_flows.append(f)
|
2015-03-30 13:13:10 -04:00
|
|
|
|
|
|
|
get_params = {
|
2015-04-01 10:05:30 -04:00
|
|
|
LoginType.RECAPTCHA: self._get_params_recaptcha,
|
2015-03-30 13:13:10 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
params = {}
|
|
|
|
|
|
|
|
for f in public_flows:
|
|
|
|
for stage in f:
|
|
|
|
if stage in get_params and stage not in params:
|
|
|
|
params[stage] = get_params[stage]()
|
2015-03-23 10:20:28 -04:00
|
|
|
|
2015-03-30 13:13:10 -04:00
|
|
|
return {
|
2015-04-01 10:05:30 -04:00
|
|
|
"session": session['id'],
|
2015-03-30 13:13:10 -04:00
|
|
|
"flows": [{"stages": f} for f in public_flows],
|
|
|
|
"params": params
|
2015-03-31 04:50:44 -04:00
|
|
|
}
|
2015-04-01 10:05:30 -04:00
|
|
|
|
|
|
|
def _get_session_info(self, session_id):
|
|
|
|
if session_id not in self.sessions:
|
|
|
|
session_id = None
|
|
|
|
|
|
|
|
if not session_id:
|
|
|
|
# create a new session
|
|
|
|
while session_id is None or session_id in self.sessions:
|
2016-03-16 11:55:49 -04:00
|
|
|
session_id = stringutils.random_string(24)
|
2015-04-01 10:05:30 -04:00
|
|
|
self.sessions[session_id] = {
|
|
|
|
"id": session_id,
|
|
|
|
}
|
|
|
|
|
|
|
|
return self.sessions[session_id]
|
|
|
|
|
2016-07-15 07:34:23 -04:00
|
|
|
def validate_password_login(self, user_id, password):
|
2015-08-12 10:49:37 -04:00
|
|
|
"""
|
|
|
|
Authenticates the user with their username and password.
|
|
|
|
|
|
|
|
Used only by the v1 login API.
|
|
|
|
|
|
|
|
Args:
|
2016-07-15 07:34:23 -04:00
|
|
|
user_id (str): complete @user:id
|
2015-08-12 10:49:37 -04:00
|
|
|
password (str): Password
|
|
|
|
Returns:
|
2016-07-15 07:34:23 -04:00
|
|
|
defer.Deferred: (str) canonical user id
|
2015-08-12 10:49:37 -04:00
|
|
|
Raises:
|
2016-07-15 07:34:23 -04:00
|
|
|
StoreError if there was a problem accessing the database
|
2015-08-12 10:49:37 -04:00
|
|
|
LoginError if there was an authentication problem.
|
|
|
|
"""
|
2016-07-15 07:34:23 -04:00
|
|
|
return self._check_password(user_id, password)
|
2015-08-12 11:09:19 -04:00
|
|
|
|
2015-10-07 09:45:57 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-07-22 09:52:53 -04:00
|
|
|
def get_login_tuple_for_user_id(self, user_id, device_id=None,
|
|
|
|
initial_display_name=None):
|
2015-10-07 09:45:57 -04:00
|
|
|
"""
|
2015-11-11 06:20:23 -05:00
|
|
|
Gets login tuple for the user with the given user ID.
|
2016-07-15 07:34:23 -04:00
|
|
|
|
|
|
|
Creates a new access/refresh token for the user.
|
|
|
|
|
2015-11-11 06:20:23 -05:00
|
|
|
The user is assumed to have been authenticated by some other
|
2016-07-15 07:34:23 -04:00
|
|
|
machanism (e.g. CAS), and the user_id converted to the canonical case.
|
2015-10-07 09:45:57 -04:00
|
|
|
|
2016-07-22 09:52:53 -04:00
|
|
|
The device will be recorded in the table if it is not there already.
|
|
|
|
|
2015-10-07 09:45:57 -04:00
|
|
|
Args:
|
2016-07-15 07:34:23 -04:00
|
|
|
user_id (str): canonical User ID
|
2016-07-22 09:52:53 -04:00
|
|
|
device_id (str|None): the device ID to associate with the tokens.
|
|
|
|
None to leave the tokens unassociated with a device (deprecated:
|
|
|
|
we should always have a device ID)
|
|
|
|
initial_display_name (str): display name to associate with the
|
|
|
|
device if it needs re-registering
|
2015-10-07 09:45:57 -04:00
|
|
|
Returns:
|
|
|
|
A tuple of:
|
|
|
|
The access token for the user's session.
|
|
|
|
The refresh token for the user's session.
|
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem storing the token.
|
|
|
|
LoginError if there was an authentication problem.
|
|
|
|
"""
|
2016-07-15 08:19:07 -04:00
|
|
|
logger.info("Logging in user %s on device %s", user_id, device_id)
|
|
|
|
access_token = yield self.issue_access_token(user_id, device_id)
|
|
|
|
refresh_token = yield self.issue_refresh_token(user_id, device_id)
|
2016-07-22 09:52:53 -04:00
|
|
|
|
|
|
|
# the device *should* have been registered before we got here; however,
|
|
|
|
# it's possible we raced against a DELETE operation. The thing we
|
|
|
|
# really don't want is active access_tokens without a record of the
|
|
|
|
# device, so we double-check it here.
|
|
|
|
if device_id is not None:
|
|
|
|
yield self.device_handler.check_device_registered(
|
|
|
|
user_id, device_id, initial_display_name
|
|
|
|
)
|
|
|
|
|
2016-07-15 07:34:23 -04:00
|
|
|
defer.returnValue((access_token, refresh_token))
|
2015-10-07 09:45:57 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-07-15 07:34:23 -04:00
|
|
|
def check_user_exists(self, user_id):
|
|
|
|
"""
|
|
|
|
Checks to see if a user with the given id exists. Will check case
|
|
|
|
insensitively, but return None if there are multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
(str) user_id: complete @user:id
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
defer.Deferred: (str) canonical_user_id, or None if zero or
|
|
|
|
multiple matches
|
|
|
|
"""
|
2015-10-07 09:45:57 -04:00
|
|
|
try:
|
2016-07-15 07:34:23 -04:00
|
|
|
res = yield self._find_user_id_and_pwd_hash(user_id)
|
|
|
|
defer.returnValue(res[0])
|
2015-10-07 09:45:57 -04:00
|
|
|
except LoginError:
|
2016-07-15 07:34:23 -04:00
|
|
|
defer.returnValue(None)
|
2015-10-07 09:45:57 -04:00
|
|
|
|
2015-08-19 04:30:52 -04:00
|
|
|
@defer.inlineCallbacks
|
2015-08-26 08:42:45 -04:00
|
|
|
def _find_user_id_and_pwd_hash(self, user_id):
|
|
|
|
"""Checks to see if a user with the given id exists. Will check case
|
|
|
|
insensitively, but will throw if there are multiple inexact matches.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
tuple: A 2-tuple of `(canonical_user_id, password_hash)`
|
|
|
|
"""
|
|
|
|
user_infos = yield self.store.get_users_by_id_case_insensitive(user_id)
|
|
|
|
if not user_infos:
|
2015-08-12 10:49:37 -04:00
|
|
|
logger.warn("Attempted to login as %s but they do not exist", user_id)
|
|
|
|
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
|
|
|
|
2015-08-26 08:42:45 -04:00
|
|
|
if len(user_infos) > 1:
|
|
|
|
if user_id not in user_infos:
|
|
|
|
logger.warn(
|
|
|
|
"Attempted to login as %s but it matches more than one user "
|
|
|
|
"inexactly: %r",
|
|
|
|
user_id, user_infos.keys()
|
|
|
|
)
|
|
|
|
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
|
|
|
|
|
|
|
defer.returnValue((user_id, user_infos[user_id]))
|
|
|
|
else:
|
|
|
|
defer.returnValue(user_infos.popitem())
|
|
|
|
|
2016-04-06 10:58:50 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-04-06 07:02:49 -04:00
|
|
|
def _check_password(self, user_id, password):
|
2016-07-15 07:34:23 -04:00
|
|
|
"""Authenticate a user against the LDAP and local databases.
|
|
|
|
|
|
|
|
user_id is checked case insensitively against the local database, but
|
|
|
|
will throw if there are multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): complete @user:id
|
2016-04-14 14:00:21 -04:00
|
|
|
Returns:
|
2016-07-15 07:34:23 -04:00
|
|
|
(str) the canonical_user_id
|
|
|
|
Raises:
|
|
|
|
LoginError if the password was incorrect
|
2016-04-14 14:00:21 -04:00
|
|
|
"""
|
2016-04-15 06:17:18 -04:00
|
|
|
valid_ldap = yield self._check_ldap_password(user_id, password)
|
|
|
|
if valid_ldap:
|
2016-07-15 07:34:23 -04:00
|
|
|
defer.returnValue(user_id)
|
2016-04-15 06:17:18 -04:00
|
|
|
|
2016-07-15 07:34:23 -04:00
|
|
|
result = yield self._check_local_password(user_id, password)
|
|
|
|
defer.returnValue(result)
|
2015-08-12 10:49:37 -04:00
|
|
|
|
2016-04-06 10:58:50 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-04-06 07:02:49 -04:00
|
|
|
def _check_local_password(self, user_id, password):
|
2016-07-15 07:34:23 -04:00
|
|
|
"""Authenticate a user against the local password database.
|
|
|
|
|
|
|
|
user_id is checked case insensitively, but will throw if there are
|
|
|
|
multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): complete @user:id
|
|
|
|
Returns:
|
|
|
|
(str) the canonical_user_id
|
|
|
|
Raises:
|
|
|
|
LoginError if the password was incorrect
|
|
|
|
"""
|
|
|
|
user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
|
|
|
|
result = self.validate_hash(password, password_hash)
|
|
|
|
if not result:
|
|
|
|
logger.warn("Failed password login for user %s", user_id)
|
|
|
|
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
|
|
|
defer.returnValue(user_id)
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2016-04-06 10:58:50 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-04-06 07:02:49 -04:00
|
|
|
def _check_ldap_password(self, user_id, password):
|
2016-06-05 20:05:57 -04:00
|
|
|
""" Attempt to authenticate a user against an LDAP Server
|
|
|
|
and register an account if none exists.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if authentication against LDAP was successful
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not ldap3 or not self.ldap_enabled:
|
2016-04-06 10:58:50 -04:00
|
|
|
defer.returnValue(False)
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
if self.ldap_mode not in LDAPMode.LIST:
|
|
|
|
raise RuntimeError(
|
|
|
|
'Invalid ldap mode specified: {mode}'.format(
|
|
|
|
mode=self.ldap_mode
|
|
|
|
)
|
|
|
|
)
|
2016-04-06 11:56:12 -04:00
|
|
|
|
2016-04-06 07:02:49 -04:00
|
|
|
try:
|
2016-06-05 20:05:57 -04:00
|
|
|
server = ldap3.Server(self.ldap_uri)
|
|
|
|
logger.debug(
|
|
|
|
"Attempting ldap connection with %s",
|
|
|
|
self.ldap_uri
|
|
|
|
)
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
localpart = UserID.from_string(user_id).localpart
|
|
|
|
if self.ldap_mode == LDAPMode.SIMPLE:
|
|
|
|
# bind with the the local users ldap credentials
|
|
|
|
bind_dn = "{prop}={value},{base}".format(
|
|
|
|
prop=self.ldap_attributes['uid'],
|
|
|
|
value=localpart,
|
|
|
|
base=self.ldap_base
|
|
|
|
)
|
|
|
|
conn = ldap3.Connection(server, bind_dn, password)
|
|
|
|
logger.debug(
|
|
|
|
"Established ldap connection in simple mode: %s",
|
|
|
|
conn
|
|
|
|
)
|
2016-04-06 10:58:50 -04:00
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
if self.ldap_start_tls:
|
|
|
|
conn.start_tls()
|
|
|
|
logger.debug(
|
|
|
|
"Upgraded ldap connection in simple mode through StartTLS: %s",
|
|
|
|
conn
|
|
|
|
)
|
|
|
|
|
|
|
|
conn.bind()
|
|
|
|
|
|
|
|
elif self.ldap_mode == LDAPMode.SEARCH:
|
|
|
|
# connect with preconfigured credentials and search for local user
|
|
|
|
conn = ldap3.Connection(
|
|
|
|
server,
|
|
|
|
self.ldap_bind_dn,
|
|
|
|
self.ldap_bind_password
|
|
|
|
)
|
|
|
|
logger.debug(
|
|
|
|
"Established ldap connection in search mode: %s",
|
|
|
|
conn
|
|
|
|
)
|
|
|
|
|
|
|
|
if self.ldap_start_tls:
|
|
|
|
conn.start_tls()
|
|
|
|
logger.debug(
|
|
|
|
"Upgraded ldap connection in search mode through StartTLS: %s",
|
|
|
|
conn
|
|
|
|
)
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
conn.bind()
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
# find matching dn
|
|
|
|
query = "({prop}={value})".format(
|
|
|
|
prop=self.ldap_attributes['uid'],
|
|
|
|
value=localpart
|
|
|
|
)
|
|
|
|
if self.ldap_filter:
|
|
|
|
query = "(&{query}{filter})".format(
|
|
|
|
query=query,
|
|
|
|
filter=self.ldap_filter
|
|
|
|
)
|
|
|
|
logger.debug("ldap search filter: %s", query)
|
|
|
|
result = conn.search(self.ldap_base, query)
|
|
|
|
|
|
|
|
if result and len(conn.response) == 1:
|
|
|
|
# found exactly one result
|
|
|
|
user_dn = conn.response[0]['dn']
|
|
|
|
logger.debug('ldap search found dn: %s', user_dn)
|
|
|
|
|
|
|
|
# unbind and reconnect, rebind with found dn
|
|
|
|
conn.unbind()
|
|
|
|
conn = ldap3.Connection(
|
|
|
|
server,
|
|
|
|
user_dn,
|
|
|
|
password,
|
|
|
|
auto_bind=True
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# found 0 or > 1 results, abort!
|
|
|
|
logger.warn(
|
|
|
|
"ldap search returned unexpected (%d!=1) amount of results",
|
|
|
|
len(conn.response)
|
|
|
|
)
|
|
|
|
defer.returnValue(False)
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"User authenticated against ldap server: %s",
|
|
|
|
conn
|
|
|
|
)
|
|
|
|
|
|
|
|
# check for existing account, if none exists, create one
|
2016-07-15 07:34:23 -04:00
|
|
|
if not (yield self.check_user_exists(user_id)):
|
2016-06-05 20:05:57 -04:00
|
|
|
# query user metadata for account creation
|
|
|
|
query = "({prop}={value})".format(
|
|
|
|
prop=self.ldap_attributes['uid'],
|
|
|
|
value=localpart
|
|
|
|
)
|
|
|
|
|
|
|
|
if self.ldap_mode == LDAPMode.SEARCH and self.ldap_filter:
|
|
|
|
query = "(&{filter}{user_filter})".format(
|
|
|
|
filter=query,
|
|
|
|
user_filter=self.ldap_filter
|
|
|
|
)
|
|
|
|
logger.debug("ldap registration filter: %s", query)
|
|
|
|
|
|
|
|
result = conn.search(
|
|
|
|
search_base=self.ldap_base,
|
|
|
|
search_filter=query,
|
|
|
|
attributes=[
|
|
|
|
self.ldap_attributes['name'],
|
|
|
|
self.ldap_attributes['mail']
|
|
|
|
]
|
2016-04-06 07:02:49 -04:00
|
|
|
)
|
|
|
|
|
2016-06-05 20:05:57 -04:00
|
|
|
if len(conn.response) == 1:
|
|
|
|
attrs = conn.response[0]['attributes']
|
|
|
|
mail = attrs[self.ldap_attributes['mail']][0]
|
|
|
|
name = attrs[self.ldap_attributes['name']][0]
|
|
|
|
|
|
|
|
# create account
|
|
|
|
registration_handler = self.hs.get_handlers().registration_handler
|
|
|
|
user_id, access_token = (
|
|
|
|
yield registration_handler.register(localpart=localpart)
|
|
|
|
)
|
|
|
|
|
|
|
|
# TODO: bind email, set displayname with data from ldap directory
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"ldap registration successful: %d: %s (%s, %)",
|
|
|
|
user_id,
|
|
|
|
localpart,
|
|
|
|
name,
|
|
|
|
mail
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.warn(
|
|
|
|
"ldap registration failed: unexpected (%d!=1) amount of results",
|
|
|
|
len(result)
|
|
|
|
)
|
|
|
|
defer.returnValue(False)
|
|
|
|
|
2016-04-06 10:58:50 -04:00
|
|
|
defer.returnValue(True)
|
2016-06-05 20:05:57 -04:00
|
|
|
except ldap3.core.exceptions.LDAPException as e:
|
|
|
|
logger.warn("Error during ldap authentication: %s", e)
|
2016-04-06 10:58:50 -04:00
|
|
|
defer.returnValue(False)
|
2016-04-06 07:02:49 -04:00
|
|
|
|
2015-08-20 06:35:56 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-07-15 08:19:07 -04:00
|
|
|
def issue_access_token(self, user_id, device_id=None):
|
2015-08-20 11:21:35 -04:00
|
|
|
access_token = self.generate_access_token(user_id)
|
2016-07-15 08:19:07 -04:00
|
|
|
yield self.store.add_access_token_to_user(user_id, access_token,
|
|
|
|
device_id)
|
2015-08-20 06:35:56 -04:00
|
|
|
defer.returnValue(access_token)
|
|
|
|
|
2015-08-20 11:21:35 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-07-15 08:19:07 -04:00
|
|
|
def issue_refresh_token(self, user_id, device_id=None):
|
2015-08-20 11:21:35 -04:00
|
|
|
refresh_token = self.generate_refresh_token(user_id)
|
2016-07-15 08:19:07 -04:00
|
|
|
yield self.store.add_refresh_token_to_user(user_id, refresh_token,
|
|
|
|
device_id)
|
2015-08-20 11:21:35 -04:00
|
|
|
defer.returnValue(refresh_token)
|
|
|
|
|
2016-07-08 10:53:18 -04:00
|
|
|
def generate_access_token(self, user_id, extra_caveats=None,
|
|
|
|
duration_in_ms=(60 * 60 * 1000)):
|
2015-11-04 12:29:07 -05:00
|
|
|
extra_caveats = extra_caveats or []
|
2015-08-20 12:43:12 -04:00
|
|
|
macaroon = self._generate_base_macaroon(user_id)
|
2015-08-20 06:35:56 -04:00
|
|
|
macaroon.add_first_party_caveat("type = access")
|
|
|
|
now = self.hs.get_clock().time_msec()
|
2016-07-08 10:53:18 -04:00
|
|
|
expiry = now + duration_in_ms
|
2015-08-20 06:35:56 -04:00
|
|
|
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
2015-11-04 12:29:07 -05:00
|
|
|
for caveat in extra_caveats:
|
|
|
|
macaroon.add_first_party_caveat(caveat)
|
2015-08-20 06:35:56 -04:00
|
|
|
return macaroon.serialize()
|
|
|
|
|
2015-08-20 11:21:35 -04:00
|
|
|
def generate_refresh_token(self, user_id):
|
|
|
|
m = self._generate_base_macaroon(user_id)
|
|
|
|
m.add_first_party_caveat("type = refresh")
|
|
|
|
# Important to add a nonce, because otherwise every refresh token for a
|
|
|
|
# user will be the same.
|
2015-08-20 12:22:41 -04:00
|
|
|
m.add_first_party_caveat("nonce = %s" % (
|
|
|
|
stringutils.random_string_with_symbols(16),
|
|
|
|
))
|
2015-08-20 11:21:35 -04:00
|
|
|
return m.serialize()
|
|
|
|
|
2016-04-20 10:21:40 -04:00
|
|
|
def generate_short_term_login_token(self, user_id, duration_in_ms=(2 * 60 * 1000)):
|
2015-11-05 09:01:12 -05:00
|
|
|
macaroon = self._generate_base_macaroon(user_id)
|
|
|
|
macaroon.add_first_party_caveat("type = login")
|
|
|
|
now = self.hs.get_clock().time_msec()
|
2016-04-20 10:21:40 -04:00
|
|
|
expiry = now + duration_in_ms
|
2015-11-05 09:01:12 -05:00
|
|
|
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
|
|
|
return macaroon.serialize()
|
|
|
|
|
2016-06-02 06:44:15 -04:00
|
|
|
def generate_delete_pusher_token(self, user_id):
|
|
|
|
macaroon = self._generate_base_macaroon(user_id)
|
|
|
|
macaroon.add_first_party_caveat("type = delete_pusher")
|
|
|
|
return macaroon.serialize()
|
|
|
|
|
2015-11-05 09:01:12 -05:00
|
|
|
def validate_short_term_login_token_and_get_user_id(self, login_token):
|
2015-11-11 06:12:35 -05:00
|
|
|
try:
|
|
|
|
macaroon = pymacaroons.Macaroon.deserialize(login_token)
|
|
|
|
auth_api = self.hs.get_auth()
|
2015-11-19 10:16:25 -05:00
|
|
|
auth_api.validate_macaroon(macaroon, "login", True)
|
2016-01-05 13:01:18 -05:00
|
|
|
return self.get_user_from_macaroon(macaroon)
|
2015-11-11 06:12:35 -05:00
|
|
|
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
|
|
|
raise AuthError(401, "Invalid token", errcode=Codes.UNKNOWN_TOKEN)
|
2015-11-05 09:01:12 -05:00
|
|
|
|
2015-08-20 11:21:35 -04:00
|
|
|
def _generate_base_macaroon(self, user_id):
|
|
|
|
macaroon = pymacaroons.Macaroon(
|
2015-08-20 12:22:41 -04:00
|
|
|
location=self.hs.config.server_name,
|
|
|
|
identifier="key",
|
|
|
|
key=self.hs.config.macaroon_secret_key)
|
2015-08-20 11:21:35 -04:00
|
|
|
macaroon.add_first_party_caveat("gen = 1")
|
|
|
|
macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
|
|
|
|
return macaroon
|
|
|
|
|
2016-01-05 13:01:18 -05:00
|
|
|
def get_user_from_macaroon(self, macaroon):
|
2015-11-05 09:01:12 -05:00
|
|
|
user_prefix = "user_id = "
|
|
|
|
for caveat in macaroon.caveats:
|
|
|
|
if caveat.caveat_id.startswith(user_prefix):
|
|
|
|
return caveat.caveat_id[len(user_prefix):]
|
|
|
|
raise AuthError(
|
|
|
|
self.INVALID_TOKEN_HTTP_STATUS, "No user_id found in token",
|
|
|
|
errcode=Codes.UNKNOWN_TOKEN
|
|
|
|
)
|
|
|
|
|
2015-08-12 10:49:37 -04:00
|
|
|
@defer.inlineCallbacks
|
2016-03-11 08:14:18 -05:00
|
|
|
def set_password(self, user_id, newpassword, requester=None):
|
2015-08-26 10:59:32 -04:00
|
|
|
password_hash = self.hash(newpassword)
|
2015-08-12 10:49:37 -04:00
|
|
|
|
2016-03-11 08:14:18 -05:00
|
|
|
except_access_token_ids = [requester.access_token_id] if requester else []
|
|
|
|
|
2016-05-27 05:35:15 -04:00
|
|
|
try:
|
|
|
|
yield self.store.user_set_password_hash(user_id, password_hash)
|
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
raise SynapseError(404, "Unknown user", Codes.NOT_FOUND)
|
|
|
|
raise e
|
2016-03-11 09:29:01 -05:00
|
|
|
yield self.store.user_delete_access_tokens(
|
2016-03-11 09:25:05 -05:00
|
|
|
user_id, except_access_token_ids
|
2016-03-11 08:14:18 -05:00
|
|
|
)
|
2016-03-11 09:34:09 -05:00
|
|
|
yield self.hs.get_pusherpool().remove_pushers_by_user(
|
2016-03-11 09:25:05 -05:00
|
|
|
user_id, except_access_token_ids
|
2016-03-11 08:14:18 -05:00
|
|
|
)
|
2015-08-12 10:49:37 -04:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def add_threepid(self, user_id, medium, address, validated_at):
|
|
|
|
yield self.store.user_add_threepid(
|
|
|
|
user_id, medium, address, validated_at,
|
|
|
|
self.hs.get_clock().time_msec()
|
|
|
|
)
|
|
|
|
|
2015-04-01 10:05:30 -04:00
|
|
|
def _save_session(self, session):
|
|
|
|
# TODO: Persistent storage
|
|
|
|
logger.debug("Saving session %s", session)
|
2016-03-16 11:51:28 -04:00
|
|
|
session["last_used"] = self.hs.get_clock().time_msec()
|
2015-04-01 10:05:30 -04:00
|
|
|
self.sessions[session["id"]] = session
|
2016-03-16 07:56:24 -04:00
|
|
|
self._prune_sessions()
|
|
|
|
|
|
|
|
def _prune_sessions(self):
|
2016-03-16 08:51:34 -04:00
|
|
|
for sid, sess in self.sessions.items():
|
2016-03-16 07:56:24 -04:00
|
|
|
last_used = 0
|
|
|
|
if 'last_used' in sess:
|
|
|
|
last_used = sess['last_used']
|
2016-03-16 12:22:20 -04:00
|
|
|
now = self.hs.get_clock().time_msec()
|
|
|
|
if last_used < now - AuthHandler.SESSION_EXPIRE_MS:
|
2016-03-16 07:56:24 -04:00
|
|
|
del self.sessions[sid]
|
2015-08-26 10:59:32 -04:00
|
|
|
|
|
|
|
def hash(self, password):
|
|
|
|
"""Computes a secure hash of password.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
password (str): Password to hash.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Hashed password (str).
|
|
|
|
"""
|
2016-07-05 06:12:53 -04:00
|
|
|
return bcrypt.hashpw(password + self.hs.config.password_pepper,
|
2016-07-05 06:01:00 -04:00
|
|
|
bcrypt.gensalt(self.bcrypt_rounds))
|
2015-08-26 10:59:32 -04:00
|
|
|
|
|
|
|
def validate_hash(self, password, stored_hash):
|
|
|
|
"""Validates that self.hash(password) == stored_hash.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
password (str): Password to hash.
|
|
|
|
stored_hash (str): Expected hash value.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Whether self.hash(password) == stored_hash (bool).
|
|
|
|
"""
|
2016-05-11 07:06:02 -04:00
|
|
|
if stored_hash:
|
2016-07-05 06:12:53 -04:00
|
|
|
return bcrypt.hashpw(password + self.hs.config.password_pepper,
|
2016-07-04 13:13:52 -04:00
|
|
|
stored_hash.encode('utf-8')) == stored_hash
|
2016-05-11 07:06:02 -04:00
|
|
|
else:
|
|
|
|
return False
|