2021-01-06 10:51:18 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2020-06-01 12:55:07 -04:00
|
|
|
import time
|
2020-03-02 11:36:32 -05:00
|
|
|
import urllib.parse
|
2021-02-26 09:02:06 -05:00
|
|
|
from typing import Any, Dict, List, Optional, Union
|
2021-04-09 13:44:38 -04:00
|
|
|
from unittest.mock import Mock
|
2021-01-18 09:52:49 -05:00
|
|
|
from urllib.parse import urlencode
|
2020-03-02 11:36:32 -05:00
|
|
|
|
2021-01-06 11:16:16 -05:00
|
|
|
import pymacaroons
|
|
|
|
|
|
|
|
from twisted.web.resource import Resource
|
|
|
|
|
2019-05-01 10:32:38 -04:00
|
|
|
import synapse.rest.admin
|
2020-09-18 09:55:13 -04:00
|
|
|
from synapse.appservice import ApplicationService
|
2020-05-14 11:32:49 -04:00
|
|
|
from synapse.rest.client.v1 import login, logout
|
2020-09-18 09:55:13 -04:00
|
|
|
from synapse.rest.client.v2_alpha import devices, register
|
2019-07-12 12:26:02 -04:00
|
|
|
from synapse.rest.client.v2_alpha.account import WhoamiRestServlet
|
2021-02-01 10:47:59 -05:00
|
|
|
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
2021-01-12 16:30:15 -05:00
|
|
|
from synapse.types import create_requester
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
from tests import unittest
|
2021-01-06 11:16:16 -05:00
|
|
|
from tests.handlers.test_oidc import HAS_OIDC
|
|
|
|
from tests.handlers.test_saml import has_saml2
|
|
|
|
from tests.rest.client.v1.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG
|
2021-01-18 09:52:49 -05:00
|
|
|
from tests.test_utils.html_parsers import TestHtmlParser
|
2021-01-15 08:45:13 -05:00
|
|
|
from tests.unittest import HomeserverTestCase, override_config, skip_unless
|
2021-01-06 10:51:18 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
import jwt
|
|
|
|
|
|
|
|
HAS_JWT = True
|
|
|
|
except ImportError:
|
|
|
|
HAS_JWT = False
|
|
|
|
|
|
|
|
|
2021-02-26 09:02:06 -05:00
|
|
|
# synapse server name: used to populate public_baseurl in some tests
|
|
|
|
SYNAPSE_SERVER_PUBLIC_HOSTNAME = "synapse"
|
|
|
|
|
|
|
|
# public_baseurl for some tests. It uses an http:// scheme because
|
|
|
|
# FakeChannel.isSecure() returns False, so synapse will see the requested uri as
|
|
|
|
# http://..., so using http in the public_baseurl stops Synapse trying to redirect to
|
|
|
|
# https://....
|
|
|
|
BASE_URL = "http://%s/" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,)
|
2021-01-06 10:51:18 -05:00
|
|
|
|
|
|
|
# CAS server used in some tests
|
|
|
|
CAS_SERVER = "https://fake.test"
|
|
|
|
|
|
|
|
# just enough to tell pysaml2 where to redirect to
|
|
|
|
SAML_SERVER = "https://test.saml.server/idp/sso"
|
|
|
|
TEST_SAML_METADATA = """
|
|
|
|
<md:EntityDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
|
|
|
<md:IDPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
|
|
|
|
<md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" Location="%(SAML_SERVER)s"/>
|
|
|
|
</md:IDPSSODescriptor>
|
|
|
|
</md:EntityDescriptor>
|
|
|
|
""" % {
|
|
|
|
"SAML_SERVER": SAML_SERVER,
|
|
|
|
}
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
LOGIN_URL = b"/_matrix/client/r0/login"
|
2019-07-12 12:26:02 -04:00
|
|
|
TEST_URL = b"/_matrix/client/r0/account/whoami"
|
2019-03-15 13:46:16 -04:00
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
# a (valid) url with some annoying characters in. %3D is =, %26 is &, %2B is +
|
|
|
|
TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q"+%3D%2B"="fö%26=o"'
|
|
|
|
|
|
|
|
# the query params in TEST_CLIENT_REDIRECT_URL
|
|
|
|
EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [("<ab c>", ""), ('q" =+"', '"fö&=o"')]
|
|
|
|
|
2021-01-27 07:41:24 -05:00
|
|
|
# (possibly experimental) login flows we expect to appear in the list after the normal
|
|
|
|
# ones
|
|
|
|
ADDITIONAL_LOGIN_FLOWS = [{"type": "uk.half-shot.msc2778.login.application_service"}]
|
|
|
|
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
class LoginRestServletTestCase(unittest.HomeserverTestCase):
|
|
|
|
|
|
|
|
servlets = [
|
2019-05-02 06:59:16 -04:00
|
|
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
2019-03-15 13:46:16 -04:00
|
|
|
login.register_servlets,
|
2020-05-14 11:32:49 -04:00
|
|
|
logout.register_servlets,
|
2019-07-12 12:26:02 -04:00
|
|
|
devices.register_servlets,
|
|
|
|
lambda hs, http_server: WhoamiRestServlet(hs).register(http_server),
|
2019-03-15 13:46:16 -04:00
|
|
|
]
|
|
|
|
|
|
|
|
def make_homeserver(self, reactor, clock):
|
|
|
|
self.hs = self.setup_test_homeserver()
|
|
|
|
self.hs.config.enable_registration = True
|
|
|
|
self.hs.config.registrations_require_3pid = []
|
|
|
|
self.hs.config.auto_join_rooms = []
|
|
|
|
self.hs.config.enable_registration_captcha = False
|
|
|
|
|
|
|
|
return self.hs
|
|
|
|
|
2020-06-05 05:47:20 -04:00
|
|
|
@override_config(
|
|
|
|
{
|
|
|
|
"rc_login": {
|
|
|
|
"address": {"per_second": 0.17, "burst_count": 5},
|
|
|
|
# Prevent the account login ratelimiter from raising first
|
|
|
|
#
|
|
|
|
# This is normally covered by the default test homeserver config
|
|
|
|
# which sets these values to 10000, but as we're overriding the entire
|
|
|
|
# rc_login dict here, we need to set this manually as well
|
|
|
|
"account": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2019-03-15 13:46:16 -04:00
|
|
|
def test_POST_ratelimiting_per_address(self):
|
|
|
|
# Create different users so we're sure not to be bothered by the per-user
|
|
|
|
# ratelimiter.
|
|
|
|
for i in range(0, 6):
|
|
|
|
self.register_user("kermit" + str(i), "monkey")
|
|
|
|
|
|
|
|
for i in range(0, 6):
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
2019-05-10 01:12:11 -04:00
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit" + str(i)},
|
2019-03-15 13:46:16 -04:00
|
|
|
"password": "monkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
if i == 5:
|
|
|
|
self.assertEquals(channel.result["code"], b"429", channel.result)
|
|
|
|
retry_after_ms = int(channel.json_body["retry_after_ms"])
|
|
|
|
else:
|
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
|
|
|
# Since we're ratelimiting at 1 request/min, retry_after_ms should be lower
|
|
|
|
# than 1min.
|
|
|
|
self.assertTrue(retry_after_ms < 6000)
|
|
|
|
|
2020-08-19 08:07:57 -04:00
|
|
|
self.reactor.advance(retry_after_ms / 1000.0 + 1.0)
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
2019-05-10 01:12:11 -04:00
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit" + str(i)},
|
2019-03-15 13:46:16 -04:00
|
|
|
"password": "monkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
2020-06-05 05:47:20 -04:00
|
|
|
@override_config(
|
|
|
|
{
|
|
|
|
"rc_login": {
|
|
|
|
"account": {"per_second": 0.17, "burst_count": 5},
|
|
|
|
# Prevent the address login ratelimiter from raising first
|
|
|
|
#
|
|
|
|
# This is normally covered by the default test homeserver config
|
|
|
|
# which sets these values to 10000, but as we're overriding the entire
|
|
|
|
# rc_login dict here, we need to set this manually as well
|
|
|
|
"address": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2019-03-15 13:46:16 -04:00
|
|
|
def test_POST_ratelimiting_per_account(self):
|
|
|
|
self.register_user("kermit", "monkey")
|
|
|
|
|
|
|
|
for i in range(0, 6):
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
2019-05-10 01:12:11 -04:00
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit"},
|
2019-03-15 13:46:16 -04:00
|
|
|
"password": "monkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
if i == 5:
|
|
|
|
self.assertEquals(channel.result["code"], b"429", channel.result)
|
|
|
|
retry_after_ms = int(channel.json_body["retry_after_ms"])
|
|
|
|
else:
|
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
|
|
|
# Since we're ratelimiting at 1 request/min, retry_after_ms should be lower
|
|
|
|
# than 1min.
|
|
|
|
self.assertTrue(retry_after_ms < 6000)
|
|
|
|
|
2019-05-10 01:12:11 -04:00
|
|
|
self.reactor.advance(retry_after_ms / 1000.0)
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
2019-05-10 01:12:11 -04:00
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit"},
|
2019-03-15 13:46:16 -04:00
|
|
|
"password": "monkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-03-15 13:46:16 -04:00
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
2019-03-18 08:57:20 -04:00
|
|
|
|
2020-06-05 05:47:20 -04:00
|
|
|
@override_config(
|
|
|
|
{
|
|
|
|
"rc_login": {
|
|
|
|
# Prevent the address login ratelimiter from raising first
|
|
|
|
#
|
|
|
|
# This is normally covered by the default test homeserver config
|
|
|
|
# which sets these values to 10000, but as we're overriding the entire
|
|
|
|
# rc_login dict here, we need to set this manually as well
|
|
|
|
"address": {"per_second": 10000, "burst_count": 10000},
|
|
|
|
"failed_attempts": {"per_second": 0.17, "burst_count": 5},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2019-03-18 08:57:20 -04:00
|
|
|
def test_POST_ratelimiting_per_account_failed_attempts(self):
|
|
|
|
self.register_user("kermit", "monkey")
|
|
|
|
|
|
|
|
for i in range(0, 6):
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
2019-05-10 01:12:11 -04:00
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit"},
|
2019-03-18 08:57:20 -04:00
|
|
|
"password": "notamonkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-03-18 08:57:20 -04:00
|
|
|
|
|
|
|
if i == 5:
|
|
|
|
self.assertEquals(channel.result["code"], b"429", channel.result)
|
|
|
|
retry_after_ms = int(channel.json_body["retry_after_ms"])
|
|
|
|
else:
|
|
|
|
self.assertEquals(channel.result["code"], b"403", channel.result)
|
|
|
|
|
|
|
|
# Since we're ratelimiting at 1 request/min, retry_after_ms should be lower
|
|
|
|
# than 1min.
|
|
|
|
self.assertTrue(retry_after_ms < 6000)
|
|
|
|
|
2020-08-19 08:07:57 -04:00
|
|
|
self.reactor.advance(retry_after_ms / 1000.0 + 1.0)
|
2019-03-18 08:57:20 -04:00
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
2019-05-10 01:12:11 -04:00
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit"},
|
2019-03-18 08:57:20 -04:00
|
|
|
"password": "notamonkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-03-18 08:57:20 -04:00
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"403", channel.result)
|
2019-07-12 12:26:02 -04:00
|
|
|
|
|
|
|
@override_config({"session_lifetime": "24h"})
|
|
|
|
def test_soft_logout(self):
|
|
|
|
self.register_user("kermit", "monkey")
|
|
|
|
|
|
|
|
# we shouldn't be able to make requests without an access token
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL)
|
2019-07-12 12:26:02 -04:00
|
|
|
self.assertEquals(channel.result["code"], b"401", channel.result)
|
|
|
|
self.assertEquals(channel.json_body["errcode"], "M_MISSING_TOKEN")
|
|
|
|
|
|
|
|
# log in as normal
|
|
|
|
params = {
|
|
|
|
"type": "m.login.password",
|
|
|
|
"identifier": {"type": "m.id.user", "user": "kermit"},
|
|
|
|
"password": "monkey",
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2019-07-12 12:26:02 -04:00
|
|
|
|
|
|
|
self.assertEquals(channel.code, 200, channel.result)
|
|
|
|
access_token = channel.json_body["access_token"]
|
|
|
|
device_id = channel.json_body["device_id"]
|
|
|
|
|
|
|
|
# we should now be able to make requests with the access token
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2019-07-12 12:26:02 -04:00
|
|
|
self.assertEquals(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
# time passes
|
|
|
|
self.reactor.advance(24 * 3600)
|
|
|
|
|
|
|
|
# ... and we should be soft-logouted
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2019-07-12 12:26:02 -04:00
|
|
|
self.assertEquals(channel.code, 401, channel.result)
|
|
|
|
self.assertEquals(channel.json_body["errcode"], "M_UNKNOWN_TOKEN")
|
|
|
|
self.assertEquals(channel.json_body["soft_logout"], True)
|
|
|
|
|
|
|
|
#
|
|
|
|
# test behaviour after deleting the expired device
|
|
|
|
#
|
|
|
|
|
|
|
|
# we now log in as a different device
|
|
|
|
access_token_2 = self.login("kermit", "monkey")
|
|
|
|
|
|
|
|
# more requests with the expired token should still return a soft-logout
|
|
|
|
self.reactor.advance(3600)
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2019-07-12 12:26:02 -04:00
|
|
|
self.assertEquals(channel.code, 401, channel.result)
|
|
|
|
self.assertEquals(channel.json_body["errcode"], "M_UNKNOWN_TOKEN")
|
|
|
|
self.assertEquals(channel.json_body["soft_logout"], True)
|
|
|
|
|
|
|
|
# ... but if we delete that device, it will be a proper logout
|
|
|
|
self._delete_device(access_token_2, "kermit", "monkey", device_id)
|
|
|
|
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2019-07-12 12:26:02 -04:00
|
|
|
self.assertEquals(channel.code, 401, channel.result)
|
|
|
|
self.assertEquals(channel.json_body["errcode"], "M_UNKNOWN_TOKEN")
|
|
|
|
self.assertEquals(channel.json_body["soft_logout"], False)
|
|
|
|
|
|
|
|
def _delete_device(self, access_token, user_id, password, device_id):
|
|
|
|
"""Perform the UI-Auth to delete a device"""
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(
|
2019-07-12 12:26:02 -04:00
|
|
|
b"DELETE", "devices/" + device_id, access_token=access_token
|
|
|
|
)
|
|
|
|
self.assertEquals(channel.code, 401, channel.result)
|
|
|
|
# check it's a UI-Auth fail
|
|
|
|
self.assertEqual(
|
|
|
|
set(channel.json_body.keys()),
|
|
|
|
{"flows", "params", "session"},
|
|
|
|
channel.result,
|
|
|
|
)
|
|
|
|
|
|
|
|
auth = {
|
|
|
|
"type": "m.login.password",
|
|
|
|
# https://github.com/matrix-org/synapse/issues/5665
|
|
|
|
# "identifier": {"type": "m.id.user", "user": user_id},
|
|
|
|
"user": user_id,
|
|
|
|
"password": password,
|
|
|
|
"session": channel.json_body["session"],
|
|
|
|
}
|
|
|
|
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(
|
2019-07-12 12:26:02 -04:00
|
|
|
b"DELETE",
|
|
|
|
"devices/" + device_id,
|
|
|
|
access_token=access_token,
|
|
|
|
content={"auth": auth},
|
|
|
|
)
|
|
|
|
self.assertEquals(channel.code, 200, channel.result)
|
2020-03-02 11:36:32 -05:00
|
|
|
|
2020-05-14 11:32:49 -04:00
|
|
|
@override_config({"session_lifetime": "24h"})
|
|
|
|
def test_session_can_hard_logout_after_being_soft_logged_out(self):
|
|
|
|
self.register_user("kermit", "monkey")
|
|
|
|
|
|
|
|
# log in as normal
|
|
|
|
access_token = self.login("kermit", "monkey")
|
|
|
|
|
|
|
|
# we should now be able to make requests with the access token
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2020-05-14 11:32:49 -04:00
|
|
|
self.assertEquals(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
# time passes
|
|
|
|
self.reactor.advance(24 * 3600)
|
|
|
|
|
|
|
|
# ... and we should be soft-logouted
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2020-05-14 11:32:49 -04:00
|
|
|
self.assertEquals(channel.code, 401, channel.result)
|
|
|
|
self.assertEquals(channel.json_body["errcode"], "M_UNKNOWN_TOKEN")
|
|
|
|
self.assertEquals(channel.json_body["soft_logout"], True)
|
|
|
|
|
|
|
|
# Now try to hard logout this session
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", "/logout", access_token=access_token)
|
2020-05-14 11:32:49 -04:00
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
|
|
|
@override_config({"session_lifetime": "24h"})
|
|
|
|
def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out(self):
|
|
|
|
self.register_user("kermit", "monkey")
|
|
|
|
|
|
|
|
# log in as normal
|
|
|
|
access_token = self.login("kermit", "monkey")
|
|
|
|
|
|
|
|
# we should now be able to make requests with the access token
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2020-05-14 11:32:49 -04:00
|
|
|
self.assertEquals(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
# time passes
|
|
|
|
self.reactor.advance(24 * 3600)
|
|
|
|
|
|
|
|
# ... and we should be soft-logouted
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"GET", TEST_URL, access_token=access_token)
|
2020-05-14 11:32:49 -04:00
|
|
|
self.assertEquals(channel.code, 401, channel.result)
|
|
|
|
self.assertEquals(channel.json_body["errcode"], "M_UNKNOWN_TOKEN")
|
|
|
|
self.assertEquals(channel.json_body["soft_logout"], True)
|
|
|
|
|
|
|
|
# Now try to hard log out all of the user's sessions
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", "/logout/all", access_token=access_token)
|
2020-05-14 11:32:49 -04:00
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
2020-03-02 11:36:32 -05:00
|
|
|
|
2021-01-06 11:16:16 -05:00
|
|
|
@skip_unless(has_saml2 and HAS_OIDC, "Requires SAML2 and OIDC")
|
|
|
|
class MultiSSOTestCase(unittest.HomeserverTestCase):
|
|
|
|
"""Tests for homeservers with multiple SSO providers enabled"""
|
|
|
|
|
|
|
|
servlets = [
|
|
|
|
login.register_servlets,
|
|
|
|
]
|
|
|
|
|
|
|
|
def default_config(self) -> Dict[str, Any]:
|
|
|
|
config = super().default_config()
|
|
|
|
|
|
|
|
config["public_baseurl"] = BASE_URL
|
|
|
|
|
|
|
|
config["cas_config"] = {
|
|
|
|
"enabled": True,
|
|
|
|
"server_url": CAS_SERVER,
|
|
|
|
"service_url": "https://matrix.goodserver.com:8448",
|
|
|
|
}
|
|
|
|
|
|
|
|
config["saml2_config"] = {
|
|
|
|
"sp_config": {
|
|
|
|
"metadata": {"inline": [TEST_SAML_METADATA]},
|
|
|
|
# use the XMLSecurity backend to avoid relying on xmlsec1
|
|
|
|
"crypto_backend": "XMLSecurity",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
# default OIDC provider
|
2021-01-06 11:16:16 -05:00
|
|
|
config["oidc_config"] = TEST_OIDC_CONFIG
|
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
# additional OIDC providers
|
|
|
|
config["oidc_providers"] = [
|
|
|
|
{
|
|
|
|
"idp_id": "idp1",
|
|
|
|
"idp_name": "IDP1",
|
|
|
|
"discover": False,
|
|
|
|
"issuer": "https://issuer1",
|
|
|
|
"client_id": "test-client-id",
|
|
|
|
"client_secret": "test-client-secret",
|
|
|
|
"scopes": ["profile"],
|
|
|
|
"authorization_endpoint": "https://issuer1/auth",
|
|
|
|
"token_endpoint": "https://issuer1/token",
|
|
|
|
"userinfo_endpoint": "https://issuer1/userinfo",
|
|
|
|
"user_mapping_provider": {
|
|
|
|
"config": {"localpart_template": "{{ user.sub }}"}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
2021-01-06 11:16:16 -05:00
|
|
|
return config
|
|
|
|
|
|
|
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
|
|
|
d = super().create_resource_dict()
|
2021-02-01 10:47:59 -05:00
|
|
|
d.update(build_synapse_client_resource_tree(self.hs))
|
2021-01-06 11:16:16 -05:00
|
|
|
return d
|
|
|
|
|
2021-01-27 07:41:24 -05:00
|
|
|
def test_get_login_flows(self):
|
|
|
|
"""GET /login should return password and SSO flows"""
|
|
|
|
channel = self.make_request("GET", "/_matrix/client/r0/login")
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
2021-03-16 07:21:26 -04:00
|
|
|
expected_flow_types = [
|
|
|
|
"m.login.cas",
|
|
|
|
"m.login.sso",
|
|
|
|
"m.login.token",
|
|
|
|
"m.login.password",
|
|
|
|
] + [f["type"] for f in ADDITIONAL_LOGIN_FLOWS]
|
2021-01-27 07:41:24 -05:00
|
|
|
|
2021-03-16 07:21:26 -04:00
|
|
|
self.assertCountEqual(
|
|
|
|
[f["type"] for f in channel.json_body["flows"]], expected_flow_types
|
|
|
|
)
|
2021-01-27 07:41:24 -05:00
|
|
|
|
|
|
|
@override_config({"experimental_features": {"msc2858_enabled": True}})
|
|
|
|
def test_get_msc2858_login_flows(self):
|
|
|
|
"""The SSO flow should include IdP info if MSC2858 is enabled"""
|
|
|
|
channel = self.make_request("GET", "/_matrix/client/r0/login")
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
# stick the flows results in a dict by type
|
|
|
|
flow_results = {} # type: Dict[str, Any]
|
|
|
|
for f in channel.json_body["flows"]:
|
|
|
|
flow_type = f["type"]
|
|
|
|
self.assertNotIn(
|
|
|
|
flow_type, flow_results, "duplicate flow type %s" % (flow_type,)
|
|
|
|
)
|
|
|
|
flow_results[flow_type] = f
|
|
|
|
|
|
|
|
self.assertIn("m.login.sso", flow_results, "m.login.sso was not returned")
|
|
|
|
sso_flow = flow_results.pop("m.login.sso")
|
|
|
|
# we should have a set of IdPs
|
|
|
|
self.assertCountEqual(
|
|
|
|
sso_flow["org.matrix.msc2858.identity_providers"],
|
|
|
|
[
|
|
|
|
{"id": "cas", "name": "CAS"},
|
|
|
|
{"id": "saml", "name": "SAML"},
|
|
|
|
{"id": "oidc-idp1", "name": "IDP1"},
|
|
|
|
{"id": "oidc", "name": "OIDC"},
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
# the rest of the flows are simple
|
|
|
|
expected_flows = [
|
|
|
|
{"type": "m.login.cas"},
|
|
|
|
{"type": "m.login.token"},
|
|
|
|
{"type": "m.login.password"},
|
|
|
|
] + ADDITIONAL_LOGIN_FLOWS
|
|
|
|
|
|
|
|
self.assertCountEqual(flow_results.values(), expected_flows)
|
|
|
|
|
2021-01-06 11:16:16 -05:00
|
|
|
def test_multi_sso_redirect(self):
|
|
|
|
"""/login/sso/redirect should redirect to an identity picker"""
|
|
|
|
# first hit the redirect url, which should redirect to our idp picker
|
2021-02-26 09:02:06 -05:00
|
|
|
channel = self._make_sso_redirect_request(False, None)
|
2021-01-06 11:16:16 -05:00
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
|
|
|
uri = channel.headers.getRawHeaders("Location")[0]
|
|
|
|
|
|
|
|
# hitting that picker should give us some HTML
|
|
|
|
channel = self.make_request("GET", uri)
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
|
|
|
|
|
|
|
# parse the form to check it has fields assumed elsewhere in this class
|
2021-02-03 15:31:23 -05:00
|
|
|
html = channel.result["body"].decode("utf-8")
|
2021-01-18 09:52:49 -05:00
|
|
|
p = TestHtmlParser()
|
2021-02-03 15:31:23 -05:00
|
|
|
p.feed(html)
|
2021-01-06 11:16:16 -05:00
|
|
|
p.close()
|
|
|
|
|
2021-02-03 15:31:23 -05:00
|
|
|
# there should be a link for each href
|
|
|
|
returned_idps = [] # type: List[str]
|
|
|
|
for link in p.links:
|
|
|
|
path, query = link.split("?", 1)
|
|
|
|
self.assertEqual(path, "pick_idp")
|
|
|
|
params = urllib.parse.parse_qs(query)
|
|
|
|
self.assertEqual(params["redirectUrl"], [TEST_CLIENT_REDIRECT_URL])
|
|
|
|
returned_idps.append(params["idp"][0])
|
2021-01-06 11:16:16 -05:00
|
|
|
|
2021-02-03 15:31:23 -05:00
|
|
|
self.assertCountEqual(returned_idps, ["cas", "oidc", "oidc-idp1", "saml"])
|
2021-01-06 11:16:16 -05:00
|
|
|
|
|
|
|
def test_multi_sso_redirect_to_cas(self):
|
|
|
|
"""If CAS is chosen, should redirect to the CAS server"""
|
|
|
|
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
2021-01-18 09:52:49 -05:00
|
|
|
"/_synapse/client/pick_idp?redirectUrl="
|
|
|
|
+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
|
|
|
|
+ "&idp=cas",
|
2021-01-06 11:16:16 -05:00
|
|
|
shorthand=False,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
2021-03-03 15:47:38 -05:00
|
|
|
location_headers = channel.headers.getRawHeaders("Location")
|
|
|
|
assert location_headers
|
|
|
|
cas_uri = location_headers[0]
|
2021-01-06 11:16:16 -05:00
|
|
|
cas_uri_path, cas_uri_query = cas_uri.split("?", 1)
|
|
|
|
|
|
|
|
# it should redirect us to the login page of the cas server
|
|
|
|
self.assertEqual(cas_uri_path, CAS_SERVER + "/login")
|
|
|
|
|
|
|
|
# check that the redirectUrl is correctly encoded in the service param - ie, the
|
|
|
|
# place that CAS will redirect to
|
|
|
|
cas_uri_params = urllib.parse.parse_qs(cas_uri_query)
|
|
|
|
service_uri = cas_uri_params["service"][0]
|
|
|
|
_, service_uri_query = service_uri.split("?", 1)
|
|
|
|
service_uri_params = urllib.parse.parse_qs(service_uri_query)
|
2021-01-18 09:52:49 -05:00
|
|
|
self.assertEqual(service_uri_params["redirectUrl"][0], TEST_CLIENT_REDIRECT_URL)
|
2021-01-06 11:16:16 -05:00
|
|
|
|
|
|
|
def test_multi_sso_redirect_to_saml(self):
|
|
|
|
"""If SAML is chosen, should redirect to the SAML server"""
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
"/_synapse/client/pick_idp?redirectUrl="
|
2021-01-18 09:52:49 -05:00
|
|
|
+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
|
2021-01-06 11:16:16 -05:00
|
|
|
+ "&idp=saml",
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
2021-03-03 15:47:38 -05:00
|
|
|
location_headers = channel.headers.getRawHeaders("Location")
|
|
|
|
assert location_headers
|
|
|
|
saml_uri = location_headers[0]
|
2021-01-06 11:16:16 -05:00
|
|
|
saml_uri_path, saml_uri_query = saml_uri.split("?", 1)
|
|
|
|
|
|
|
|
# it should redirect us to the login page of the SAML server
|
|
|
|
self.assertEqual(saml_uri_path, SAML_SERVER)
|
|
|
|
|
|
|
|
# the RelayState is used to carry the client redirect url
|
|
|
|
saml_uri_params = urllib.parse.parse_qs(saml_uri_query)
|
|
|
|
relay_state_param = saml_uri_params["RelayState"][0]
|
2021-01-18 09:52:49 -05:00
|
|
|
self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL)
|
2021-01-06 11:16:16 -05:00
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
def test_login_via_oidc(self):
|
2021-01-06 11:16:16 -05:00
|
|
|
"""If OIDC is chosen, should redirect to the OIDC auth endpoint"""
|
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
# pick the default OIDC provider
|
2021-01-06 11:16:16 -05:00
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
"/_synapse/client/pick_idp?redirectUrl="
|
2021-01-18 09:52:49 -05:00
|
|
|
+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
|
2021-01-06 11:16:16 -05:00
|
|
|
+ "&idp=oidc",
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
2021-03-03 15:47:38 -05:00
|
|
|
location_headers = channel.headers.getRawHeaders("Location")
|
|
|
|
assert location_headers
|
|
|
|
oidc_uri = location_headers[0]
|
2021-01-06 11:16:16 -05:00
|
|
|
oidc_uri_path, oidc_uri_query = oidc_uri.split("?", 1)
|
|
|
|
|
|
|
|
# it should redirect us to the auth page of the OIDC server
|
|
|
|
self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT)
|
|
|
|
|
|
|
|
# ... and should have set a cookie including the redirect url
|
2021-03-03 15:47:38 -05:00
|
|
|
cookie_headers = channel.headers.getRawHeaders("Set-Cookie")
|
|
|
|
assert cookie_headers
|
|
|
|
cookies = {} # type: Dict[str, str]
|
|
|
|
for h in cookie_headers:
|
|
|
|
key, value = h.split(";")[0].split("=", maxsplit=1)
|
|
|
|
cookies[key] = value
|
2021-01-06 11:16:16 -05:00
|
|
|
|
|
|
|
oidc_session_cookie = cookies["oidc_session"]
|
|
|
|
macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie)
|
|
|
|
self.assertEqual(
|
|
|
|
self._get_value_from_macaroon(macaroon, "client_redirect_url"),
|
2021-01-18 09:52:49 -05:00
|
|
|
TEST_CLIENT_REDIRECT_URL,
|
2021-01-06 11:16:16 -05:00
|
|
|
)
|
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {"sub": "user1"})
|
|
|
|
|
|
|
|
# that should serve a confirmation page
|
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
2021-03-03 15:47:38 -05:00
|
|
|
content_type_headers = channel.headers.getRawHeaders("Content-Type")
|
|
|
|
assert content_type_headers
|
|
|
|
self.assertTrue(content_type_headers[-1].startswith("text/html"))
|
2021-01-18 09:52:49 -05:00
|
|
|
p = TestHtmlParser()
|
|
|
|
p.feed(channel.text_body)
|
|
|
|
p.close()
|
|
|
|
|
|
|
|
# ... which should contain our redirect link
|
|
|
|
self.assertEqual(len(p.links), 1)
|
|
|
|
path, query = p.links[0].split("?", 1)
|
|
|
|
self.assertEqual(path, "https://x")
|
|
|
|
|
|
|
|
# it will have url-encoded the params properly, so we'll have to parse them
|
|
|
|
params = urllib.parse.parse_qsl(
|
|
|
|
query, keep_blank_values=True, strict_parsing=True, errors="strict"
|
|
|
|
)
|
|
|
|
self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS)
|
|
|
|
self.assertEqual(params[2][0], "loginToken")
|
|
|
|
|
|
|
|
# finally, submit the matrix login token to the login API, which gives us our
|
|
|
|
# matrix access token, mxid, and device id.
|
|
|
|
login_token = params[2][1]
|
|
|
|
chan = self.make_request(
|
|
|
|
"POST",
|
|
|
|
"/login",
|
|
|
|
content={"type": "m.login.token", "token": login_token},
|
|
|
|
)
|
|
|
|
self.assertEqual(chan.code, 200, chan.result)
|
|
|
|
self.assertEqual(chan.json_body["user_id"], "@user1:test")
|
|
|
|
|
2021-01-06 11:16:16 -05:00
|
|
|
def test_multi_sso_redirect_to_unknown(self):
|
|
|
|
"""An unknown IdP should cause a 400"""
|
|
|
|
channel = self.make_request(
|
|
|
|
"GET",
|
|
|
|
"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz",
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 400, channel.result)
|
|
|
|
|
2021-01-27 07:41:24 -05:00
|
|
|
def test_client_idp_redirect_to_unknown(self):
|
|
|
|
"""If the client tries to pick an unknown IdP, return a 404"""
|
2021-03-16 07:21:26 -04:00
|
|
|
channel = self._make_sso_redirect_request(False, "xxx")
|
2021-01-27 07:41:24 -05:00
|
|
|
self.assertEqual(channel.code, 404, channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_NOT_FOUND")
|
|
|
|
|
|
|
|
def test_client_idp_redirect_to_oidc(self):
|
|
|
|
"""If the client pick a known IdP, redirect to it"""
|
2021-03-16 07:21:26 -04:00
|
|
|
channel = self._make_sso_redirect_request(False, "oidc")
|
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
|
|
|
oidc_uri = channel.headers.getRawHeaders("Location")[0]
|
|
|
|
oidc_uri_path, oidc_uri_query = oidc_uri.split("?", 1)
|
|
|
|
|
|
|
|
# it should redirect us to the auth page of the OIDC server
|
|
|
|
self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT)
|
|
|
|
|
|
|
|
@override_config({"experimental_features": {"msc2858_enabled": True}})
|
|
|
|
def test_client_msc2858_redirect_to_oidc(self):
|
|
|
|
"""Test the unstable API"""
|
2021-02-26 09:02:06 -05:00
|
|
|
channel = self._make_sso_redirect_request(True, "oidc")
|
2021-01-27 07:41:24 -05:00
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
|
|
|
oidc_uri = channel.headers.getRawHeaders("Location")[0]
|
|
|
|
oidc_uri_path, oidc_uri_query = oidc_uri.split("?", 1)
|
|
|
|
|
|
|
|
# it should redirect us to the auth page of the OIDC server
|
|
|
|
self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT)
|
|
|
|
|
2021-03-16 07:21:26 -04:00
|
|
|
def test_client_idp_redirect_msc2858_disabled(self):
|
|
|
|
"""If the client tries to use the MSC2858 endpoint but MSC2858 is disabled, return a 400"""
|
|
|
|
channel = self._make_sso_redirect_request(True, "oidc")
|
|
|
|
self.assertEqual(channel.code, 400, channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
|
|
|
|
|
2021-02-26 09:02:06 -05:00
|
|
|
def _make_sso_redirect_request(
|
|
|
|
self, unstable_endpoint: bool = False, idp_prov: Optional[str] = None
|
|
|
|
):
|
|
|
|
"""Send a request to /_matrix/client/r0/login/sso/redirect
|
|
|
|
|
|
|
|
... or the unstable equivalent
|
|
|
|
|
|
|
|
... possibly specifying an IDP provider
|
|
|
|
"""
|
|
|
|
endpoint = (
|
|
|
|
"/_matrix/client/unstable/org.matrix.msc2858/login/sso/redirect"
|
|
|
|
if unstable_endpoint
|
|
|
|
else "/_matrix/client/r0/login/sso/redirect"
|
|
|
|
)
|
|
|
|
if idp_prov is not None:
|
|
|
|
endpoint += "/" + idp_prov
|
|
|
|
endpoint += "?redirectUrl=" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)
|
|
|
|
|
|
|
|
return self.make_request(
|
|
|
|
"GET",
|
|
|
|
endpoint,
|
|
|
|
custom_headers=[("Host", SYNAPSE_SERVER_PUBLIC_HOSTNAME)],
|
|
|
|
)
|
|
|
|
|
2021-01-06 11:16:16 -05:00
|
|
|
@staticmethod
|
|
|
|
def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str:
|
|
|
|
prefix = key + " = "
|
|
|
|
for caveat in macaroon.caveats:
|
|
|
|
if caveat.caveat_id.startswith(prefix):
|
|
|
|
return caveat.caveat_id[len(prefix) :]
|
|
|
|
raise ValueError("No %s caveat in macaroon" % (key,))
|
|
|
|
|
|
|
|
|
2020-04-09 13:28:13 -04:00
|
|
|
class CASTestCase(unittest.HomeserverTestCase):
|
2020-03-02 11:36:32 -05:00
|
|
|
|
|
|
|
servlets = [
|
|
|
|
login.register_servlets,
|
|
|
|
]
|
|
|
|
|
|
|
|
def make_homeserver(self, reactor, clock):
|
|
|
|
self.base_url = "https://matrix.goodserver.com/"
|
|
|
|
self.redirect_path = "_synapse/client/login/sso/redirect/confirm"
|
|
|
|
|
|
|
|
config = self.default_config()
|
2021-02-11 11:16:54 -05:00
|
|
|
config["public_baseurl"] = (
|
|
|
|
config.get("public_baseurl") or "https://matrix.goodserver.com:8448"
|
|
|
|
)
|
2020-03-02 11:36:32 -05:00
|
|
|
config["cas_config"] = {
|
|
|
|
"enabled": True,
|
2021-01-06 11:16:16 -05:00
|
|
|
"server_url": CAS_SERVER,
|
2020-03-02 11:36:32 -05:00
|
|
|
}
|
|
|
|
|
2020-04-09 13:28:13 -04:00
|
|
|
cas_user_id = "username"
|
|
|
|
self.user_id = "@%s:test" % cas_user_id
|
|
|
|
|
2020-03-02 11:36:32 -05:00
|
|
|
async def get_raw(uri, args):
|
|
|
|
"""Return an example response payload from a call to the `/proxyValidate`
|
|
|
|
endpoint of a CAS server, copied from
|
|
|
|
https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20
|
|
|
|
|
|
|
|
This needs to be returned by an async function (as opposed to set as the
|
|
|
|
mock's return value) because the corresponding Synapse code awaits on it.
|
|
|
|
"""
|
2020-04-09 13:28:13 -04:00
|
|
|
return (
|
|
|
|
"""
|
2020-03-02 11:36:32 -05:00
|
|
|
<cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'>
|
|
|
|
<cas:authenticationSuccess>
|
2020-04-09 13:28:13 -04:00
|
|
|
<cas:user>%s</cas:user>
|
2020-03-02 11:36:32 -05:00
|
|
|
<cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket>
|
|
|
|
<cas:proxies>
|
|
|
|
<cas:proxy>https://proxy2/pgtUrl</cas:proxy>
|
|
|
|
<cas:proxy>https://proxy1/pgtUrl</cas:proxy>
|
|
|
|
</cas:proxies>
|
|
|
|
</cas:authenticationSuccess>
|
|
|
|
</cas:serviceResponse>
|
|
|
|
"""
|
2020-04-09 13:28:13 -04:00
|
|
|
% cas_user_id
|
2020-07-10 14:30:08 -04:00
|
|
|
).encode("utf-8")
|
2020-03-02 11:36:32 -05:00
|
|
|
|
|
|
|
mocked_http_client = Mock(spec=["get_raw"])
|
|
|
|
mocked_http_client.get_raw.side_effect = get_raw
|
|
|
|
|
|
|
|
self.hs = self.setup_test_homeserver(
|
|
|
|
config=config,
|
|
|
|
proxied_http_client=mocked_http_client,
|
|
|
|
)
|
|
|
|
|
|
|
|
return self.hs
|
|
|
|
|
2020-04-09 13:28:13 -04:00
|
|
|
def prepare(self, reactor, clock, hs):
|
|
|
|
self.deactivate_account_handler = hs.get_deactivate_account_handler()
|
|
|
|
|
2020-03-02 11:36:32 -05:00
|
|
|
def test_cas_redirect_confirm(self):
|
|
|
|
"""Tests that the SSO login flow serves a confirmation page before redirecting a
|
|
|
|
user to the redirect URL.
|
|
|
|
"""
|
2020-03-02 12:05:09 -05:00
|
|
|
base_url = "/_matrix/client/r0/login/cas/ticket?redirectUrl"
|
2020-03-02 11:36:32 -05:00
|
|
|
redirect_url = "https://dodgy-site.com/"
|
|
|
|
|
|
|
|
url_parts = list(urllib.parse.urlparse(base_url))
|
|
|
|
query = dict(urllib.parse.parse_qsl(url_parts[4]))
|
|
|
|
query.update({"redirectUrl": redirect_url})
|
|
|
|
query.update({"ticket": "ticket"})
|
|
|
|
url_parts[4] = urllib.parse.urlencode(query)
|
|
|
|
cas_ticket_url = urllib.parse.urlunparse(url_parts)
|
|
|
|
|
|
|
|
# Get Synapse to call the fake CAS and serve the template.
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request("GET", cas_ticket_url)
|
2020-03-02 11:36:32 -05:00
|
|
|
|
|
|
|
# Test that the response is HTML.
|
2021-01-04 13:13:49 -05:00
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
2020-03-02 11:36:32 -05:00
|
|
|
content_type_header_value = ""
|
|
|
|
for header in channel.result.get("headers", []):
|
|
|
|
if header[0] == b"Content-Type":
|
|
|
|
content_type_header_value = header[1].decode("utf8")
|
|
|
|
|
|
|
|
self.assertTrue(content_type_header_value.startswith("text/html"))
|
|
|
|
|
|
|
|
# Test that the body isn't empty.
|
|
|
|
self.assertTrue(len(channel.result["body"]) > 0)
|
|
|
|
|
|
|
|
# And that it contains our redirect link
|
|
|
|
self.assertIn(redirect_url, channel.result["body"].decode("UTF-8"))
|
2020-03-02 12:05:09 -05:00
|
|
|
|
|
|
|
@override_config(
|
|
|
|
{
|
|
|
|
"sso": {
|
|
|
|
"client_whitelist": [
|
|
|
|
"https://legit-site.com/",
|
|
|
|
"https://other-site.com/",
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def test_cas_redirect_whitelisted(self):
|
2021-01-07 07:16:23 -05:00
|
|
|
"""Tests that the SSO login flow serves a redirect to a whitelisted url"""
|
2020-03-27 16:24:52 -04:00
|
|
|
self._test_redirect("https://legit-site.com/")
|
|
|
|
|
|
|
|
@override_config({"public_baseurl": "https://example.com"})
|
|
|
|
def test_cas_redirect_login_fallback(self):
|
|
|
|
self._test_redirect("https://example.com/_matrix/static/client/login")
|
|
|
|
|
|
|
|
def _test_redirect(self, redirect_url):
|
|
|
|
"""Tests that the SSO login flow serves a redirect for the given redirect URL."""
|
2020-03-02 12:05:09 -05:00
|
|
|
cas_ticket_url = (
|
|
|
|
"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket"
|
|
|
|
% (urllib.parse.quote(redirect_url))
|
|
|
|
)
|
|
|
|
|
|
|
|
# Get Synapse to call the fake CAS and serve the template.
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request("GET", cas_ticket_url)
|
2020-03-02 12:05:09 -05:00
|
|
|
|
|
|
|
self.assertEqual(channel.code, 302)
|
|
|
|
location_headers = channel.headers.getRawHeaders("Location")
|
2021-03-03 15:47:38 -05:00
|
|
|
assert location_headers
|
2020-03-02 12:05:09 -05:00
|
|
|
self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url)
|
2020-04-09 13:28:13 -04:00
|
|
|
|
|
|
|
@override_config({"sso": {"client_whitelist": ["https://legit-site.com/"]}})
|
|
|
|
def test_deactivated_user(self):
|
|
|
|
"""Logging in as a deactivated account should error."""
|
|
|
|
redirect_url = "https://legit-site.com/"
|
|
|
|
|
|
|
|
# First login (to create the user).
|
|
|
|
self._test_redirect(redirect_url)
|
|
|
|
|
|
|
|
# Deactivate the account.
|
|
|
|
self.get_success(
|
2021-01-12 16:30:15 -05:00
|
|
|
self.deactivate_account_handler.deactivate_account(
|
|
|
|
self.user_id, False, create_requester(self.user_id)
|
|
|
|
)
|
2020-04-09 13:28:13 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
# Request the CAS ticket.
|
|
|
|
cas_ticket_url = (
|
|
|
|
"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket"
|
|
|
|
% (urllib.parse.quote(redirect_url))
|
|
|
|
)
|
|
|
|
|
|
|
|
# Get Synapse to call the fake CAS and serve the template.
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request("GET", cas_ticket_url)
|
2020-04-09 13:28:13 -04:00
|
|
|
|
|
|
|
# Because the user is deactivated they are served an error template.
|
|
|
|
self.assertEqual(channel.code, 403)
|
|
|
|
self.assertIn(b"SSO account deactivated", channel.result["body"])
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
|
2021-01-06 10:51:18 -05:00
|
|
|
@skip_unless(HAS_JWT, "requires jwt")
|
2020-06-01 12:55:07 -04:00
|
|
|
class JWTTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
|
|
|
login.register_servlets,
|
|
|
|
]
|
|
|
|
|
|
|
|
jwt_secret = "secret"
|
2020-07-14 07:16:43 -04:00
|
|
|
jwt_algorithm = "HS256"
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
def make_homeserver(self, reactor, clock):
|
|
|
|
self.hs = self.setup_test_homeserver()
|
|
|
|
self.hs.config.jwt_enabled = True
|
|
|
|
self.hs.config.jwt_secret = self.jwt_secret
|
2020-07-14 07:16:43 -04:00
|
|
|
self.hs.config.jwt_algorithm = self.jwt_algorithm
|
2020-06-01 12:55:07 -04:00
|
|
|
return self.hs
|
|
|
|
|
2021-01-06 10:51:18 -05:00
|
|
|
def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_secret) -> str:
|
2020-12-22 13:00:14 -05:00
|
|
|
# PyJWT 2.0.0 changed the return type of jwt.encode from bytes to str.
|
2021-01-06 10:51:18 -05:00
|
|
|
result = jwt.encode(
|
|
|
|
payload, secret, self.jwt_algorithm
|
|
|
|
) # type: Union[str, bytes]
|
2020-12-22 13:00:14 -05:00
|
|
|
if isinstance(result, bytes):
|
|
|
|
return result.decode("ascii")
|
|
|
|
return result
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
def jwt_login(self, *args):
|
2021-01-06 10:51:18 -05:00
|
|
|
params = {"type": "org.matrix.login.jwt", "token": self.jwt_encode(*args)}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2020-06-01 12:55:07 -04:00
|
|
|
return channel
|
|
|
|
|
|
|
|
def test_login_jwt_valid_registered(self):
|
|
|
|
self.register_user("kermit", "monkey")
|
|
|
|
channel = self.jwt_login({"sub": "kermit"})
|
|
|
|
self.assertEqual(channel.result["code"], b"200", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["user_id"], "@kermit:test")
|
|
|
|
|
|
|
|
def test_login_jwt_valid_unregistered(self):
|
|
|
|
channel = self.jwt_login({"sub": "frog"})
|
|
|
|
self.assertEqual(channel.result["code"], b"200", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["user_id"], "@frog:test")
|
|
|
|
|
|
|
|
def test_login_jwt_invalid_signature(self):
|
|
|
|
channel = self.jwt_login({"sub": "frog"}, "notsecret")
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"],
|
|
|
|
"JWT validation failed: Signature verification failed",
|
|
|
|
)
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
def test_login_jwt_expired(self):
|
|
|
|
channel = self.jwt_login({"sub": "frog", "exp": 864000})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"], "JWT validation failed: Signature has expired"
|
|
|
|
)
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
def test_login_jwt_not_before(self):
|
|
|
|
now = int(time.time())
|
|
|
|
channel = self.jwt_login({"sub": "frog", "nbf": now + 3600})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"],
|
|
|
|
"JWT validation failed: The token is not yet valid (nbf)",
|
|
|
|
)
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
def test_login_no_sub(self):
|
|
|
|
channel = self.jwt_login({"username": "root"})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-06-01 12:55:07 -04:00
|
|
|
self.assertEqual(channel.json_body["error"], "Invalid JWT")
|
|
|
|
|
2020-07-14 07:16:43 -04:00
|
|
|
@override_config(
|
|
|
|
{
|
|
|
|
"jwt_config": {
|
|
|
|
"jwt_enabled": True,
|
|
|
|
"secret": jwt_secret,
|
|
|
|
"algorithm": jwt_algorithm,
|
|
|
|
"issuer": "test-issuer",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def test_login_iss(self):
|
|
|
|
"""Test validating the issuer claim."""
|
|
|
|
# A valid issuer.
|
|
|
|
channel = self.jwt_login({"sub": "kermit", "iss": "test-issuer"})
|
|
|
|
self.assertEqual(channel.result["code"], b"200", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["user_id"], "@kermit:test")
|
|
|
|
|
|
|
|
# An invalid issuer.
|
|
|
|
channel = self.jwt_login({"sub": "kermit", "iss": "invalid"})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"], "JWT validation failed: Invalid issuer"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Not providing an issuer.
|
|
|
|
channel = self.jwt_login({"sub": "kermit"})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"],
|
|
|
|
'JWT validation failed: Token is missing the "iss" claim',
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_login_iss_no_config(self):
|
|
|
|
"""Test providing an issuer claim without requiring it in the configuration."""
|
|
|
|
channel = self.jwt_login({"sub": "kermit", "iss": "invalid"})
|
|
|
|
self.assertEqual(channel.result["code"], b"200", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["user_id"], "@kermit:test")
|
|
|
|
|
|
|
|
@override_config(
|
|
|
|
{
|
|
|
|
"jwt_config": {
|
|
|
|
"jwt_enabled": True,
|
|
|
|
"secret": jwt_secret,
|
|
|
|
"algorithm": jwt_algorithm,
|
|
|
|
"audiences": ["test-audience"],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
def test_login_aud(self):
|
|
|
|
"""Test validating the audience claim."""
|
|
|
|
# A valid audience.
|
|
|
|
channel = self.jwt_login({"sub": "kermit", "aud": "test-audience"})
|
|
|
|
self.assertEqual(channel.result["code"], b"200", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["user_id"], "@kermit:test")
|
|
|
|
|
|
|
|
# An invalid audience.
|
|
|
|
channel = self.jwt_login({"sub": "kermit", "aud": "invalid"})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"], "JWT validation failed: Invalid audience"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Not providing an audience.
|
|
|
|
channel = self.jwt_login({"sub": "kermit"})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"],
|
|
|
|
'JWT validation failed: Token is missing the "aud" claim',
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_login_aud_no_config(self):
|
|
|
|
"""Test providing an audience without requiring it in the configuration."""
|
|
|
|
channel = self.jwt_login({"sub": "kermit", "aud": "invalid"})
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"], "JWT validation failed: Invalid audience"
|
|
|
|
)
|
|
|
|
|
2020-06-01 12:55:07 -04:00
|
|
|
def test_login_no_token(self):
|
2021-01-06 10:51:18 -05:00
|
|
|
params = {"type": "org.matrix.login.jwt"}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-06-01 12:55:07 -04:00
|
|
|
self.assertEqual(channel.json_body["error"], "Token field for JWT is missing")
|
|
|
|
|
|
|
|
|
|
|
|
# The JWTPubKeyTestCase is a complement to JWTTestCase where we instead use
|
|
|
|
# RSS256, with a public key configured in synapse as "jwt_secret", and tokens
|
|
|
|
# signed by the private key.
|
2021-01-06 10:51:18 -05:00
|
|
|
@skip_unless(HAS_JWT, "requires jwt")
|
2020-06-01 12:55:07 -04:00
|
|
|
class JWTPubKeyTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
login.register_servlets,
|
|
|
|
]
|
|
|
|
|
|
|
|
# This key's pubkey is used as the jwt_secret setting of synapse. Valid
|
|
|
|
# tokens are signed by this and validated using the pubkey. It is generated
|
|
|
|
# with `openssl genrsa 512` (not a secure way to generate real keys, but
|
|
|
|
# good enough for tests!)
|
|
|
|
jwt_privatekey = "\n".join(
|
|
|
|
[
|
|
|
|
"-----BEGIN RSA PRIVATE KEY-----",
|
|
|
|
"MIIBPAIBAAJBAM50f1Q5gsdmzifLstzLHb5NhfajiOt7TKO1vSEWdq7u9x8SMFiB",
|
|
|
|
"492RM9W/XFoh8WUfL9uL6Now6tPRDsWv3xsCAwEAAQJAUv7OOSOtiU+wzJq82rnk",
|
|
|
|
"yR4NHqt7XX8BvkZPM7/+EjBRanmZNSp5kYZzKVaZ/gTOM9+9MwlmhidrUOweKfB/",
|
|
|
|
"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq",
|
|
|
|
"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN",
|
|
|
|
"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA",
|
|
|
|
"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=",
|
|
|
|
"-----END RSA PRIVATE KEY-----",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Generated with `openssl rsa -in foo.key -pubout`, with the the above
|
|
|
|
# private key placed in foo.key (jwt_privatekey).
|
|
|
|
jwt_pubkey = "\n".join(
|
|
|
|
[
|
|
|
|
"-----BEGIN PUBLIC KEY-----",
|
|
|
|
"MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAM50f1Q5gsdmzifLstzLHb5NhfajiOt7",
|
|
|
|
"TKO1vSEWdq7u9x8SMFiB492RM9W/XFoh8WUfL9uL6Now6tPRDsWv3xsCAwEAAQ==",
|
|
|
|
"-----END PUBLIC KEY-----",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
# This key is used to sign tokens that shouldn't be accepted by synapse.
|
|
|
|
# Generated just like jwt_privatekey.
|
|
|
|
bad_privatekey = "\n".join(
|
|
|
|
[
|
|
|
|
"-----BEGIN RSA PRIVATE KEY-----",
|
|
|
|
"MIIBOgIBAAJBAL//SQrKpKbjCCnv/FlasJCv+t3k/MPsZfniJe4DVFhsktF2lwQv",
|
|
|
|
"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L",
|
|
|
|
"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY",
|
|
|
|
"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I",
|
|
|
|
"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb",
|
|
|
|
"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0",
|
|
|
|
"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m",
|
|
|
|
"-----END RSA PRIVATE KEY-----",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
def make_homeserver(self, reactor, clock):
|
|
|
|
self.hs = self.setup_test_homeserver()
|
|
|
|
self.hs.config.jwt_enabled = True
|
|
|
|
self.hs.config.jwt_secret = self.jwt_pubkey
|
|
|
|
self.hs.config.jwt_algorithm = "RS256"
|
|
|
|
return self.hs
|
|
|
|
|
2021-01-06 10:51:18 -05:00
|
|
|
def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey) -> str:
|
2020-12-22 13:00:14 -05:00
|
|
|
# PyJWT 2.0.0 changed the return type of jwt.encode from bytes to str.
|
2021-01-06 10:51:18 -05:00
|
|
|
result = jwt.encode(payload, secret, "RS256") # type: Union[bytes,str]
|
2020-12-22 13:00:14 -05:00
|
|
|
if isinstance(result, bytes):
|
|
|
|
return result.decode("ascii")
|
|
|
|
return result
|
2020-06-01 12:55:07 -04:00
|
|
|
|
|
|
|
def jwt_login(self, *args):
|
2021-01-06 10:51:18 -05:00
|
|
|
params = {"type": "org.matrix.login.jwt", "token": self.jwt_encode(*args)}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2020-06-01 12:55:07 -04:00
|
|
|
return channel
|
|
|
|
|
|
|
|
def test_login_jwt_valid(self):
|
|
|
|
channel = self.jwt_login({"sub": "kermit"})
|
|
|
|
self.assertEqual(channel.result["code"], b"200", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["user_id"], "@kermit:test")
|
|
|
|
|
|
|
|
def test_login_jwt_invalid_signature(self):
|
|
|
|
channel = self.jwt_login({"sub": "frog"}, self.bad_privatekey)
|
2020-07-15 07:10:21 -04:00
|
|
|
self.assertEqual(channel.result["code"], b"403", channel.result)
|
|
|
|
self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN")
|
2020-07-14 07:16:43 -04:00
|
|
|
self.assertEqual(
|
|
|
|
channel.json_body["error"],
|
|
|
|
"JWT validation failed: Signature verification failed",
|
|
|
|
)
|
2020-09-18 09:55:13 -04:00
|
|
|
|
|
|
|
|
|
|
|
AS_USER = "as_user_alice"
|
|
|
|
|
|
|
|
|
|
|
|
class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
login.register_servlets,
|
|
|
|
register.register_servlets,
|
|
|
|
]
|
|
|
|
|
|
|
|
def register_as_user(self, username):
|
2020-12-15 17:28:06 -05:00
|
|
|
self.make_request(
|
2020-09-18 09:55:13 -04:00
|
|
|
b"POST",
|
|
|
|
"/_matrix/client/r0/register?access_token=%s" % (self.service.token,),
|
|
|
|
{"username": username},
|
|
|
|
)
|
|
|
|
|
|
|
|
def make_homeserver(self, reactor, clock):
|
|
|
|
self.hs = self.setup_test_homeserver()
|
|
|
|
|
|
|
|
self.service = ApplicationService(
|
|
|
|
id="unique_identifier",
|
|
|
|
token="some_token",
|
|
|
|
hostname="example.com",
|
|
|
|
sender="@asbot:example.com",
|
|
|
|
namespaces={
|
|
|
|
ApplicationService.NS_USERS: [
|
|
|
|
{"regex": r"@as_user.*", "exclusive": False}
|
|
|
|
],
|
|
|
|
ApplicationService.NS_ROOMS: [],
|
|
|
|
ApplicationService.NS_ALIASES: [],
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.another_service = ApplicationService(
|
|
|
|
id="another__identifier",
|
|
|
|
token="another_token",
|
|
|
|
hostname="example.com",
|
|
|
|
sender="@as2bot:example.com",
|
|
|
|
namespaces={
|
|
|
|
ApplicationService.NS_USERS: [
|
|
|
|
{"regex": r"@as2_user.*", "exclusive": False}
|
|
|
|
],
|
|
|
|
ApplicationService.NS_ROOMS: [],
|
|
|
|
ApplicationService.NS_ALIASES: [],
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.hs.get_datastore().services_cache.append(self.service)
|
|
|
|
self.hs.get_datastore().services_cache.append(self.another_service)
|
|
|
|
return self.hs
|
|
|
|
|
|
|
|
def test_login_appservice_user(self):
|
2021-01-07 07:16:23 -05:00
|
|
|
"""Test that an appservice user can use /login"""
|
2020-09-18 09:55:13 -04:00
|
|
|
self.register_as_user(AS_USER)
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": login.LoginRestServlet.APPSERVICE_TYPE,
|
|
|
|
"identifier": {"type": "m.id.user", "user": AS_USER},
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(
|
2020-09-18 09:55:13 -04:00
|
|
|
b"POST", LOGIN_URL, params, access_token=self.service.token
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
|
|
|
def test_login_appservice_user_bot(self):
|
2021-01-07 07:16:23 -05:00
|
|
|
"""Test that the appservice bot can use /login"""
|
2020-09-18 09:55:13 -04:00
|
|
|
self.register_as_user(AS_USER)
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": login.LoginRestServlet.APPSERVICE_TYPE,
|
|
|
|
"identifier": {"type": "m.id.user", "user": self.service.sender},
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(
|
2020-09-18 09:55:13 -04:00
|
|
|
b"POST", LOGIN_URL, params, access_token=self.service.token
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"200", channel.result)
|
|
|
|
|
|
|
|
def test_login_appservice_wrong_user(self):
|
2021-01-07 07:16:23 -05:00
|
|
|
"""Test that non-as users cannot login with the as token"""
|
2020-09-18 09:55:13 -04:00
|
|
|
self.register_as_user(AS_USER)
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": login.LoginRestServlet.APPSERVICE_TYPE,
|
|
|
|
"identifier": {"type": "m.id.user", "user": "fibble_wibble"},
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(
|
2020-09-18 09:55:13 -04:00
|
|
|
b"POST", LOGIN_URL, params, access_token=self.service.token
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"403", channel.result)
|
|
|
|
|
|
|
|
def test_login_appservice_wrong_as(self):
|
2021-01-07 07:16:23 -05:00
|
|
|
"""Test that as users cannot login with wrong as token"""
|
2020-09-18 09:55:13 -04:00
|
|
|
self.register_as_user(AS_USER)
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": login.LoginRestServlet.APPSERVICE_TYPE,
|
|
|
|
"identifier": {"type": "m.id.user", "user": AS_USER},
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(
|
2020-09-18 09:55:13 -04:00
|
|
|
b"POST", LOGIN_URL, params, access_token=self.another_service.token
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"403", channel.result)
|
|
|
|
|
|
|
|
def test_login_appservice_no_token(self):
|
|
|
|
"""Test that users must provide a token when using the appservice
|
2021-01-07 07:16:23 -05:00
|
|
|
login method
|
2020-09-18 09:55:13 -04:00
|
|
|
"""
|
|
|
|
self.register_as_user(AS_USER)
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"type": login.LoginRestServlet.APPSERVICE_TYPE,
|
|
|
|
"identifier": {"type": "m.id.user", "user": AS_USER},
|
|
|
|
}
|
2020-12-15 09:44:04 -05:00
|
|
|
channel = self.make_request(b"POST", LOGIN_URL, params)
|
2020-09-18 09:55:13 -04:00
|
|
|
|
|
|
|
self.assertEquals(channel.result["code"], b"401", channel.result)
|
2021-01-15 08:45:13 -05:00
|
|
|
|
|
|
|
|
|
|
|
@skip_unless(HAS_OIDC, "requires OIDC")
|
|
|
|
class UsernamePickerTestCase(HomeserverTestCase):
|
|
|
|
"""Tests for the username picker flow of SSO login"""
|
|
|
|
|
|
|
|
servlets = [login.register_servlets]
|
|
|
|
|
|
|
|
def default_config(self):
|
|
|
|
config = super().default_config()
|
|
|
|
config["public_baseurl"] = BASE_URL
|
|
|
|
|
|
|
|
config["oidc_config"] = {}
|
|
|
|
config["oidc_config"].update(TEST_OIDC_CONFIG)
|
|
|
|
config["oidc_config"]["user_mapping_provider"] = {
|
|
|
|
"config": {"display_name_template": "{{ user.displayname }}"}
|
|
|
|
}
|
|
|
|
|
|
|
|
# whitelist this client URI so we redirect straight to it rather than
|
|
|
|
# serving a confirmation page
|
2021-01-18 09:52:49 -05:00
|
|
|
config["sso"] = {"client_whitelist": ["https://x"]}
|
2021-01-15 08:45:13 -05:00
|
|
|
return config
|
|
|
|
|
|
|
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
|
|
|
d = super().create_resource_dict()
|
2021-02-01 10:47:59 -05:00
|
|
|
d.update(build_synapse_client_resource_tree(self.hs))
|
2021-01-15 08:45:13 -05:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_username_picker(self):
|
|
|
|
"""Test the happy path of a username picker flow."""
|
|
|
|
|
|
|
|
# do the start of the login flow
|
|
|
|
channel = self.helper.auth_via_oidc(
|
2021-01-18 09:52:49 -05:00
|
|
|
{"sub": "tester", "displayname": "Jonny"}, TEST_CLIENT_REDIRECT_URL
|
2021-01-15 08:45:13 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
# that should redirect to the username picker
|
|
|
|
self.assertEqual(channel.code, 302, channel.result)
|
2021-03-03 15:47:38 -05:00
|
|
|
location_headers = channel.headers.getRawHeaders("Location")
|
|
|
|
assert location_headers
|
|
|
|
picker_url = location_headers[0]
|
2021-02-01 10:52:50 -05:00
|
|
|
self.assertEqual(picker_url, "/_synapse/client/pick_username/account_details")
|
2021-01-15 08:45:13 -05:00
|
|
|
|
|
|
|
# ... with a username_mapping_session cookie
|
|
|
|
cookies = {} # type: Dict[str,str]
|
|
|
|
channel.extract_cookies(cookies)
|
|
|
|
self.assertIn("username_mapping_session", cookies)
|
|
|
|
session_id = cookies["username_mapping_session"]
|
|
|
|
|
|
|
|
# introspect the sso handler a bit to check that the username mapping session
|
|
|
|
# looks ok.
|
|
|
|
username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions
|
|
|
|
self.assertIn(
|
|
|
|
session_id,
|
|
|
|
username_mapping_sessions,
|
|
|
|
"session id not found in map",
|
|
|
|
)
|
|
|
|
session = username_mapping_sessions[session_id]
|
|
|
|
self.assertEqual(session.remote_user_id, "tester")
|
|
|
|
self.assertEqual(session.display_name, "Jonny")
|
2021-01-18 09:52:49 -05:00
|
|
|
self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL)
|
2021-01-15 08:45:13 -05:00
|
|
|
|
|
|
|
# the expiry time should be about 15 minutes away
|
|
|
|
expected_expiry = self.clock.time_msec() + (15 * 60 * 1000)
|
|
|
|
self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000)
|
|
|
|
|
|
|
|
# Now, submit a username to the username picker, which should serve a redirect
|
2021-02-01 08:15:51 -05:00
|
|
|
# to the completion page
|
2021-01-15 08:45:13 -05:00
|
|
|
content = urlencode({b"username": b"bobby"}).encode("utf8")
|
|
|
|
chan = self.make_request(
|
|
|
|
"POST",
|
2021-02-01 10:52:50 -05:00
|
|
|
path=picker_url,
|
2021-01-15 08:45:13 -05:00
|
|
|
content=content,
|
|
|
|
content_is_form=True,
|
|
|
|
custom_headers=[
|
|
|
|
("Cookie", "username_mapping_session=" + session_id),
|
|
|
|
# old versions of twisted don't do form-parsing without a valid
|
|
|
|
# content-length header.
|
|
|
|
("Content-Length", str(len(content))),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(chan.code, 302, chan.result)
|
|
|
|
location_headers = chan.headers.getRawHeaders("Location")
|
2021-03-03 15:47:38 -05:00
|
|
|
assert location_headers
|
2021-02-01 08:15:51 -05:00
|
|
|
|
|
|
|
# send a request to the completion page, which should 302 to the client redirectUrl
|
|
|
|
chan = self.make_request(
|
|
|
|
"GET",
|
|
|
|
path=location_headers[0],
|
|
|
|
custom_headers=[("Cookie", "username_mapping_session=" + session_id)],
|
|
|
|
)
|
|
|
|
self.assertEqual(chan.code, 302, chan.result)
|
|
|
|
location_headers = chan.headers.getRawHeaders("Location")
|
2021-03-03 15:47:38 -05:00
|
|
|
assert location_headers
|
2021-02-01 08:15:51 -05:00
|
|
|
|
2021-01-18 09:52:49 -05:00
|
|
|
# ensure that the returned location matches the requested redirect URL
|
|
|
|
path, query = location_headers[0].split("?", 1)
|
|
|
|
self.assertEqual(path, "https://x")
|
|
|
|
|
|
|
|
# it will have url-encoded the params properly, so we'll have to parse them
|
|
|
|
params = urllib.parse.parse_qsl(
|
|
|
|
query, keep_blank_values=True, strict_parsing=True, errors="strict"
|
2021-01-15 08:45:13 -05:00
|
|
|
)
|
2021-01-18 09:52:49 -05:00
|
|
|
self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS)
|
|
|
|
self.assertEqual(params[2][0], "loginToken")
|
2021-01-15 08:45:13 -05:00
|
|
|
|
|
|
|
# fish the login token out of the returned redirect uri
|
2021-01-18 09:52:49 -05:00
|
|
|
login_token = params[2][1]
|
2021-01-15 08:45:13 -05:00
|
|
|
|
|
|
|
# finally, submit the matrix login token to the login API, which gives us our
|
|
|
|
# matrix access token, mxid, and device id.
|
|
|
|
chan = self.make_request(
|
|
|
|
"POST",
|
|
|
|
"/login",
|
|
|
|
content={"type": "m.login.token", "token": login_token},
|
|
|
|
)
|
|
|
|
self.assertEqual(chan.code, 200, chan.result)
|
|
|
|
self.assertEqual(chan.json_body["user_id"], "@bobby:test")
|