2020-05-08 08:30:40 -04:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2020 Quentin Gliech
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2020-11-25 10:04:22 -05:00
|
|
|
import inspect
|
2020-05-08 08:30:40 -04:00
|
|
|
import logging
|
2021-01-13 05:26:12 -05:00
|
|
|
from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar
|
2020-05-08 08:30:40 -04:00
|
|
|
from urllib.parse import urlencode
|
|
|
|
|
|
|
|
import attr
|
|
|
|
import pymacaroons
|
|
|
|
from authlib.common.security import generate_token
|
|
|
|
from authlib.jose import JsonWebToken
|
|
|
|
from authlib.oauth2.auth import ClientAuth
|
|
|
|
from authlib.oauth2.rfc6749.parameters import prepare_grant_uri
|
|
|
|
from authlib.oidc.core import CodeIDToken, ImplicitIDToken, UserInfo
|
|
|
|
from authlib.oidc.discovery import OpenIDProviderMetadata, get_well_known_url
|
|
|
|
from jinja2 import Environment, Template
|
|
|
|
from pymacaroons.exceptions import (
|
|
|
|
MacaroonDeserializationException,
|
|
|
|
MacaroonInvalidSignatureException,
|
|
|
|
)
|
|
|
|
from typing_extensions import TypedDict
|
|
|
|
|
|
|
|
from twisted.web.client import readBody
|
|
|
|
|
|
|
|
from synapse.config import ConfigError
|
2020-11-25 10:04:22 -05:00
|
|
|
from synapse.handlers.sso import MappingException, UserAttributes
|
2020-05-08 08:30:40 -04:00
|
|
|
from synapse.http.site import SynapseRequest
|
2020-06-03 16:13:17 -04:00
|
|
|
from synapse.logging.context import make_deferred_yieldable
|
2020-12-02 07:45:42 -05:00
|
|
|
from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart
|
2020-08-19 07:26:03 -04:00
|
|
|
from synapse.util import json_decoder
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-08-11 13:00:17 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
SESSION_COOKIE_NAME = b"oidc_session"
|
|
|
|
|
|
|
|
#: A token exchanged from the token endpoint, as per RFC6749 sec 5.1. and
|
|
|
|
#: OpenID.Core sec 3.1.3.3.
|
|
|
|
Token = TypedDict(
|
|
|
|
"Token",
|
|
|
|
{
|
|
|
|
"access_token": str,
|
|
|
|
"token_type": str,
|
|
|
|
"id_token": Optional[str],
|
|
|
|
"refresh_token": Optional[str],
|
|
|
|
"expires_in": int,
|
|
|
|
"scope": Optional[str],
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
#: A JWK, as per RFC7517 sec 4. The type could be more precise than that, but
|
|
|
|
#: there is no real point of doing this in our case.
|
|
|
|
JWK = Dict[str, str]
|
|
|
|
|
|
|
|
#: A JWK Set, as per RFC7517 sec 5.
|
|
|
|
JWKS = TypedDict("JWKS", {"keys": List[JWK]})
|
|
|
|
|
|
|
|
|
|
|
|
class OidcError(Exception):
|
|
|
|
"""Used to catch errors when calling the token_endpoint
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, error, error_description=None):
|
|
|
|
self.error = error
|
|
|
|
self.error_description = error_description
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
if self.error_description:
|
|
|
|
return "{}: {}".format(self.error, self.error_description)
|
|
|
|
return self.error
|
|
|
|
|
|
|
|
|
2021-01-13 05:26:12 -05:00
|
|
|
class OidcHandler:
|
2020-05-08 08:30:40 -04:00
|
|
|
"""Handles requests related to the OpenID Connect login flow.
|
|
|
|
"""
|
|
|
|
|
2020-08-11 13:00:17 -04:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2021-01-13 05:26:12 -05:00
|
|
|
self._store = hs.get_datastore()
|
|
|
|
|
|
|
|
self._token_generator = OidcSessionTokenGenerator(hs)
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
self._callback_url = hs.config.oidc_callback_url # type: str
|
|
|
|
self._scopes = hs.config.oidc_scopes # type: List[str]
|
2020-10-01 13:54:35 -04:00
|
|
|
self._user_profile_method = hs.config.oidc_user_profile_method # type: str
|
2020-05-08 08:30:40 -04:00
|
|
|
self._client_auth = ClientAuth(
|
|
|
|
hs.config.oidc_client_id,
|
|
|
|
hs.config.oidc_client_secret,
|
|
|
|
hs.config.oidc_client_auth_method,
|
|
|
|
) # type: ClientAuth
|
|
|
|
self._client_auth_method = hs.config.oidc_client_auth_method # type: str
|
|
|
|
self._provider_metadata = OpenIDProviderMetadata(
|
|
|
|
issuer=hs.config.oidc_issuer,
|
|
|
|
authorization_endpoint=hs.config.oidc_authorization_endpoint,
|
|
|
|
token_endpoint=hs.config.oidc_token_endpoint,
|
|
|
|
userinfo_endpoint=hs.config.oidc_userinfo_endpoint,
|
|
|
|
jwks_uri=hs.config.oidc_jwks_uri,
|
|
|
|
) # type: OpenIDProviderMetadata
|
|
|
|
self._provider_needs_discovery = hs.config.oidc_discover # type: bool
|
|
|
|
self._user_mapping_provider = hs.config.oidc_user_mapping_provider_class(
|
|
|
|
hs.config.oidc_user_mapping_provider_config
|
|
|
|
) # type: OidcMappingProvider
|
|
|
|
self._skip_verification = hs.config.oidc_skip_verification # type: bool
|
2020-09-25 07:01:45 -04:00
|
|
|
self._allow_existing_users = hs.config.oidc_allow_existing_users # type: bool
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
self._http_client = hs.get_proxied_http_client()
|
|
|
|
self._server_name = hs.config.server_name # type: str
|
|
|
|
|
|
|
|
# identifier for the external_ids table
|
2021-01-04 13:13:49 -05:00
|
|
|
self.idp_id = "oidc"
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2021-01-05 06:25:28 -05:00
|
|
|
# user-facing name of this auth provider
|
|
|
|
self.idp_name = "OIDC"
|
|
|
|
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler = hs.get_sso_handler()
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2021-01-04 13:13:49 -05:00
|
|
|
self._sso_handler.register_identity_provider(self)
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
def _validate_metadata(self):
|
|
|
|
"""Verifies the provider metadata.
|
|
|
|
|
|
|
|
This checks the validity of the currently loaded provider. Not
|
|
|
|
everything is checked, only:
|
|
|
|
|
|
|
|
- ``issuer``
|
|
|
|
- ``authorization_endpoint``
|
|
|
|
- ``token_endpoint``
|
|
|
|
- ``response_types_supported`` (checks if "code" is in it)
|
|
|
|
- ``jwks_uri``
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ValueError: if something in the provider is not valid
|
|
|
|
"""
|
|
|
|
# Skip verification to allow non-compliant providers (e.g. issuers not running on a secure origin)
|
|
|
|
if self._skip_verification is True:
|
|
|
|
return
|
|
|
|
|
|
|
|
m = self._provider_metadata
|
|
|
|
m.validate_issuer()
|
|
|
|
m.validate_authorization_endpoint()
|
|
|
|
m.validate_token_endpoint()
|
|
|
|
|
|
|
|
if m.get("token_endpoint_auth_methods_supported") is not None:
|
|
|
|
m.validate_token_endpoint_auth_methods_supported()
|
|
|
|
if (
|
|
|
|
self._client_auth_method
|
|
|
|
not in m["token_endpoint_auth_methods_supported"]
|
|
|
|
):
|
|
|
|
raise ValueError(
|
|
|
|
'"{auth_method}" not in "token_endpoint_auth_methods_supported" ({supported!r})'.format(
|
|
|
|
auth_method=self._client_auth_method,
|
|
|
|
supported=m["token_endpoint_auth_methods_supported"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if m.get("response_types_supported") is not None:
|
|
|
|
m.validate_response_types_supported()
|
|
|
|
|
|
|
|
if "code" not in m["response_types_supported"]:
|
|
|
|
raise ValueError(
|
|
|
|
'"code" not in "response_types_supported" (%r)'
|
|
|
|
% (m["response_types_supported"],)
|
|
|
|
)
|
|
|
|
|
2020-10-01 13:54:35 -04:00
|
|
|
# Ensure there's a userinfo endpoint to fetch from if it is required.
|
2020-05-08 08:30:40 -04:00
|
|
|
if self._uses_userinfo:
|
|
|
|
if m.get("userinfo_endpoint") is None:
|
|
|
|
raise ValueError(
|
2020-10-01 13:54:35 -04:00
|
|
|
'provider has no "userinfo_endpoint", even though it is required'
|
2020-05-08 08:30:40 -04:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
# If we're not using userinfo, we need a valid jwks to validate the ID token
|
|
|
|
if m.get("jwks") is None:
|
|
|
|
if m.get("jwks_uri") is not None:
|
|
|
|
m.validate_jwks_uri()
|
|
|
|
else:
|
|
|
|
raise ValueError('"jwks_uri" must be set')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _uses_userinfo(self) -> bool:
|
|
|
|
"""Returns True if the ``userinfo_endpoint`` should be used.
|
|
|
|
|
|
|
|
This is based on the requested scopes: if the scopes include
|
|
|
|
``openid``, the provider should give use an ID token containing the
|
2020-10-23 12:38:40 -04:00
|
|
|
user information. If not, we should fetch them using the
|
2020-05-08 08:30:40 -04:00
|
|
|
``access_token`` with the ``userinfo_endpoint``.
|
|
|
|
"""
|
|
|
|
|
2020-10-01 13:54:35 -04:00
|
|
|
return (
|
|
|
|
"openid" not in self._scopes
|
|
|
|
or self._user_profile_method == "userinfo_endpoint"
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
async def load_metadata(self) -> OpenIDProviderMetadata:
|
|
|
|
"""Load and validate the provider metadata.
|
|
|
|
|
|
|
|
The values metadatas are discovered if ``oidc_config.discovery`` is
|
|
|
|
``True`` and then cached.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ValueError: if something in the provider is not valid
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The provider's metadata.
|
|
|
|
"""
|
|
|
|
# If we are using the OpenID Discovery documents, it needs to be loaded once
|
|
|
|
# FIXME: should there be a lock here?
|
|
|
|
if self._provider_needs_discovery:
|
|
|
|
url = get_well_known_url(self._provider_metadata["issuer"], external=True)
|
|
|
|
metadata_response = await self._http_client.get_json(url)
|
|
|
|
# TODO: maybe update the other way around to let user override some values?
|
|
|
|
self._provider_metadata.update(metadata_response)
|
|
|
|
self._provider_needs_discovery = False
|
|
|
|
|
|
|
|
self._validate_metadata()
|
|
|
|
|
|
|
|
return self._provider_metadata
|
|
|
|
|
|
|
|
async def load_jwks(self, force: bool = False) -> JWKS:
|
|
|
|
"""Load the JSON Web Key Set used to sign ID tokens.
|
|
|
|
|
|
|
|
If we're not using the ``userinfo_endpoint``, user infos are extracted
|
|
|
|
from the ID token, which is a JWT signed by keys given by the provider.
|
|
|
|
The keys are then cached.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
force: Force reloading the keys.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The key set
|
|
|
|
|
|
|
|
Looks like this::
|
|
|
|
|
|
|
|
{
|
|
|
|
'keys': [
|
|
|
|
{
|
|
|
|
'kid': 'abcdef',
|
|
|
|
'kty': 'RSA',
|
|
|
|
'alg': 'RS256',
|
|
|
|
'use': 'sig',
|
|
|
|
'e': 'XXXX',
|
|
|
|
'n': 'XXXX',
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
if self._uses_userinfo:
|
|
|
|
# We're not using jwt signing, return an empty jwk set
|
|
|
|
return {"keys": []}
|
|
|
|
|
|
|
|
# First check if the JWKS are loaded in the provider metadata.
|
|
|
|
# It can happen either if the provider gives its JWKS in the discovery
|
|
|
|
# document directly or if it was already loaded once.
|
|
|
|
metadata = await self.load_metadata()
|
|
|
|
jwk_set = metadata.get("jwks")
|
|
|
|
if jwk_set is not None and not force:
|
|
|
|
return jwk_set
|
|
|
|
|
|
|
|
# Loading the JWKS using the `jwks_uri` metadata
|
|
|
|
uri = metadata.get("jwks_uri")
|
|
|
|
if not uri:
|
|
|
|
raise RuntimeError('Missing "jwks_uri" in metadata')
|
|
|
|
|
|
|
|
jwk_set = await self._http_client.get_json(uri)
|
|
|
|
|
|
|
|
# Caching the JWKS in the provider's metadata
|
|
|
|
self._provider_metadata["jwks"] = jwk_set
|
|
|
|
return jwk_set
|
|
|
|
|
|
|
|
async def _exchange_code(self, code: str) -> Token:
|
|
|
|
"""Exchange an authorization code for a token.
|
|
|
|
|
|
|
|
This calls the ``token_endpoint`` with the authorization code we
|
|
|
|
received in the callback to exchange it for a token. The call uses the
|
|
|
|
``ClientAuth`` to authenticate with the client with its ID and secret.
|
|
|
|
|
2020-06-03 16:13:17 -04:00
|
|
|
See:
|
|
|
|
https://tools.ietf.org/html/rfc6749#section-3.2
|
|
|
|
https://openid.net/specs/openid-connect-core-1_0.html#TokenEndpoint
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
Args:
|
2020-05-15 12:26:02 -04:00
|
|
|
code: The authorization code we got from the callback.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
A dict containing various tokens.
|
|
|
|
|
|
|
|
May look like this::
|
|
|
|
|
|
|
|
{
|
|
|
|
'token_type': 'bearer',
|
|
|
|
'access_token': 'abcdef',
|
|
|
|
'expires_in': 3599,
|
|
|
|
'id_token': 'ghijkl',
|
|
|
|
'refresh_token': 'mnopqr',
|
|
|
|
}
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
OidcError: when the ``token_endpoint`` returned an error.
|
|
|
|
"""
|
|
|
|
metadata = await self.load_metadata()
|
|
|
|
token_endpoint = metadata.get("token_endpoint")
|
|
|
|
headers = {
|
|
|
|
"Content-Type": "application/x-www-form-urlencoded",
|
|
|
|
"User-Agent": self._http_client.user_agent,
|
|
|
|
"Accept": "application/json",
|
|
|
|
}
|
|
|
|
|
|
|
|
args = {
|
|
|
|
"grant_type": "authorization_code",
|
|
|
|
"code": code,
|
|
|
|
"redirect_uri": self._callback_url,
|
|
|
|
}
|
|
|
|
body = urlencode(args, True)
|
|
|
|
|
|
|
|
# Fill the body/headers with credentials
|
|
|
|
uri, headers, body = self._client_auth.prepare(
|
|
|
|
method="POST", uri=token_endpoint, headers=headers, body=body
|
|
|
|
)
|
|
|
|
headers = {k: [v] for (k, v) in headers.items()}
|
|
|
|
|
|
|
|
# Do the actual request
|
|
|
|
# We're not using the SimpleHttpClient util methods as we don't want to
|
|
|
|
# check the HTTP status code and we do the body encoding ourself.
|
|
|
|
response = await self._http_client.request(
|
|
|
|
method="POST", uri=uri, data=body.encode("utf-8"), headers=headers,
|
|
|
|
)
|
|
|
|
|
|
|
|
# This is used in multiple error messages below
|
|
|
|
status = "{code} {phrase}".format(
|
|
|
|
code=response.code, phrase=response.phrase.decode("utf-8")
|
|
|
|
)
|
|
|
|
|
2020-06-03 16:13:17 -04:00
|
|
|
resp_body = await make_deferred_yieldable(readBody(response))
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
if response.code >= 500:
|
|
|
|
# In case of a server error, we should first try to decode the body
|
|
|
|
# and check for an error field. If not, we respond with a generic
|
|
|
|
# error message.
|
|
|
|
try:
|
2020-08-19 07:26:03 -04:00
|
|
|
resp = json_decoder.decode(resp_body.decode("utf-8"))
|
2020-05-08 08:30:40 -04:00
|
|
|
error = resp["error"]
|
|
|
|
description = resp.get("error_description", error)
|
|
|
|
except (ValueError, KeyError):
|
|
|
|
# Catch ValueError for the JSON decoding and KeyError for the "error" field
|
|
|
|
error = "server_error"
|
|
|
|
description = (
|
|
|
|
(
|
|
|
|
'Authorization server responded with a "{status}" error '
|
|
|
|
"while exchanging the authorization code."
|
|
|
|
).format(status=status),
|
|
|
|
)
|
|
|
|
|
|
|
|
raise OidcError(error, description)
|
|
|
|
|
|
|
|
# Since it is a not a 5xx code, body should be a valid JSON. It will
|
|
|
|
# raise if not.
|
2020-08-19 07:26:03 -04:00
|
|
|
resp = json_decoder.decode(resp_body.decode("utf-8"))
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
if "error" in resp:
|
|
|
|
error = resp["error"]
|
|
|
|
# In case the authorization server responded with an error field,
|
|
|
|
# it should be a 4xx code. If not, warn about it but don't do
|
|
|
|
# anything special and report the original error message.
|
|
|
|
if response.code < 400:
|
|
|
|
logger.debug(
|
|
|
|
"Invalid response from the authorization server: "
|
|
|
|
'responded with a "{status}" '
|
|
|
|
"but body has an error field: {error!r}".format(
|
|
|
|
status=status, error=resp["error"]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
description = resp.get("error_description", error)
|
|
|
|
raise OidcError(error, description)
|
|
|
|
|
|
|
|
# Now, this should not be an error. According to RFC6749 sec 5.1, it
|
|
|
|
# should be a 200 code. We're a bit more flexible than that, and will
|
|
|
|
# only throw on a 4xx code.
|
|
|
|
if response.code >= 400:
|
|
|
|
description = (
|
|
|
|
'Authorization server responded with a "{status}" error '
|
|
|
|
'but did not include an "error" field in its response.'.format(
|
|
|
|
status=status
|
|
|
|
)
|
|
|
|
)
|
|
|
|
logger.warning(description)
|
|
|
|
# Body was still valid JSON. Might be useful to log it for debugging.
|
|
|
|
logger.warning("Code exchange response: {resp!r}".format(resp=resp))
|
|
|
|
raise OidcError("server_error", description)
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
async def _fetch_userinfo(self, token: Token) -> UserInfo:
|
2020-10-23 12:38:40 -04:00
|
|
|
"""Fetch user information from the ``userinfo_endpoint``.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
token: the token given by the ``token_endpoint``.
|
|
|
|
Must include an ``access_token`` field.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
UserInfo: an object representing the user.
|
|
|
|
"""
|
|
|
|
metadata = await self.load_metadata()
|
|
|
|
|
|
|
|
resp = await self._http_client.get_json(
|
|
|
|
metadata["userinfo_endpoint"],
|
|
|
|
headers={"Authorization": ["Bearer {}".format(token["access_token"])]},
|
|
|
|
)
|
|
|
|
|
|
|
|
return UserInfo(resp)
|
|
|
|
|
|
|
|
async def _parse_id_token(self, token: Token, nonce: str) -> UserInfo:
|
|
|
|
"""Return an instance of UserInfo from token's ``id_token``.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
token: the token given by the ``token_endpoint``.
|
|
|
|
Must include an ``id_token`` field.
|
|
|
|
nonce: the nonce value originally sent in the initial authorization
|
|
|
|
request. This value should match the one inside the token.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
An object representing the user.
|
|
|
|
"""
|
|
|
|
metadata = await self.load_metadata()
|
|
|
|
claims_params = {
|
|
|
|
"nonce": nonce,
|
|
|
|
"client_id": self._client_auth.client_id,
|
|
|
|
}
|
|
|
|
if "access_token" in token:
|
|
|
|
# If we got an `access_token`, there should be an `at_hash` claim
|
|
|
|
# in the `id_token` that we can check against.
|
|
|
|
claims_params["access_token"] = token["access_token"]
|
|
|
|
claims_cls = CodeIDToken
|
|
|
|
else:
|
|
|
|
claims_cls = ImplicitIDToken
|
|
|
|
|
|
|
|
alg_values = metadata.get("id_token_signing_alg_values_supported", ["RS256"])
|
|
|
|
|
|
|
|
jwt = JsonWebToken(alg_values)
|
|
|
|
|
|
|
|
claim_options = {"iss": {"values": [metadata["issuer"]]}}
|
|
|
|
|
|
|
|
# Try to decode the keys in cache first, then retry by forcing the keys
|
|
|
|
# to be reloaded
|
|
|
|
jwk_set = await self.load_jwks()
|
|
|
|
try:
|
|
|
|
claims = jwt.decode(
|
|
|
|
token["id_token"],
|
|
|
|
key=jwk_set,
|
|
|
|
claims_cls=claims_cls,
|
|
|
|
claims_options=claim_options,
|
|
|
|
claims_params=claims_params,
|
|
|
|
)
|
|
|
|
except ValueError:
|
2020-06-03 16:13:17 -04:00
|
|
|
logger.info("Reloading JWKS after decode error")
|
2020-05-08 08:30:40 -04:00
|
|
|
jwk_set = await self.load_jwks(force=True) # try reloading the jwks
|
|
|
|
claims = jwt.decode(
|
|
|
|
token["id_token"],
|
|
|
|
key=jwk_set,
|
|
|
|
claims_cls=claims_cls,
|
|
|
|
claims_options=claim_options,
|
|
|
|
claims_params=claims_params,
|
|
|
|
)
|
|
|
|
|
|
|
|
claims.validate(leeway=120) # allows 2 min of clock skew
|
|
|
|
return UserInfo(claims)
|
|
|
|
|
|
|
|
async def handle_redirect_request(
|
2020-05-15 12:26:02 -04:00
|
|
|
self,
|
|
|
|
request: SynapseRequest,
|
2021-01-04 13:13:49 -05:00
|
|
|
client_redirect_url: Optional[bytes],
|
2020-05-15 12:26:02 -04:00
|
|
|
ui_auth_session_id: Optional[str] = None,
|
|
|
|
) -> str:
|
2020-05-08 08:30:40 -04:00
|
|
|
"""Handle an incoming request to /login/sso/redirect
|
|
|
|
|
2020-05-15 12:26:02 -04:00
|
|
|
It returns a redirect to the authorization endpoint with a few
|
2020-05-08 08:30:40 -04:00
|
|
|
parameters:
|
|
|
|
|
|
|
|
- ``client_id``: the client ID set in ``oidc_config.client_id``
|
|
|
|
- ``response_type``: ``code``
|
|
|
|
- ``redirect_uri``: the callback URL ; ``{base url}/_synapse/oidc/callback``
|
|
|
|
- ``scope``: the list of scopes set in ``oidc_config.scopes``
|
|
|
|
- ``state``: a random string
|
|
|
|
- ``nonce``: a random string
|
|
|
|
|
2020-05-15 12:26:02 -04:00
|
|
|
In addition generating a redirect URL, we are setting a cookie with
|
2020-05-08 08:30:40 -04:00
|
|
|
a signed macaroon token containing the state, the nonce and the
|
|
|
|
client_redirect_url params. Those are then checked when the client
|
|
|
|
comes back from the provider.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
request: the incoming request from the browser.
|
|
|
|
We'll respond to it with a redirect and a cookie.
|
|
|
|
client_redirect_url: the URL that we should redirect the client to
|
2021-01-04 13:13:49 -05:00
|
|
|
when everything is done (or None for UI Auth)
|
2020-05-15 12:26:02 -04:00
|
|
|
ui_auth_session_id: The session ID of the ongoing UI Auth (or
|
|
|
|
None if this is a login).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The redirect URL to the authorization endpoint.
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
state = generate_token()
|
|
|
|
nonce = generate_token()
|
|
|
|
|
2021-01-04 13:13:49 -05:00
|
|
|
if not client_redirect_url:
|
|
|
|
client_redirect_url = b""
|
|
|
|
|
2021-01-13 05:26:12 -05:00
|
|
|
cookie = self._token_generator.generate_oidc_session_token(
|
2020-05-15 12:26:02 -04:00
|
|
|
state=state,
|
2021-01-13 05:26:12 -05:00
|
|
|
session_data=OidcSessionData(
|
|
|
|
nonce=nonce,
|
|
|
|
client_redirect_url=client_redirect_url.decode(),
|
|
|
|
ui_auth_session_id=ui_auth_session_id,
|
|
|
|
),
|
2020-05-08 08:30:40 -04:00
|
|
|
)
|
|
|
|
request.addCookie(
|
|
|
|
SESSION_COOKIE_NAME,
|
|
|
|
cookie,
|
|
|
|
path="/_synapse/oidc",
|
|
|
|
max_age="3600",
|
|
|
|
httpOnly=True,
|
|
|
|
sameSite="lax",
|
|
|
|
)
|
|
|
|
|
|
|
|
metadata = await self.load_metadata()
|
|
|
|
authorization_endpoint = metadata.get("authorization_endpoint")
|
2020-05-15 12:26:02 -04:00
|
|
|
return prepare_grant_uri(
|
2020-05-08 08:30:40 -04:00
|
|
|
authorization_endpoint,
|
|
|
|
client_id=self._client_auth.client_id,
|
|
|
|
response_type="code",
|
|
|
|
redirect_uri=self._callback_url,
|
|
|
|
scope=self._scopes,
|
|
|
|
state=state,
|
|
|
|
nonce=nonce,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def handle_oidc_callback(self, request: SynapseRequest) -> None:
|
|
|
|
"""Handle an incoming request to /_synapse/oidc/callback
|
|
|
|
|
|
|
|
Since we might want to display OIDC-related errors in a user-friendly
|
|
|
|
way, we don't raise SynapseError from here. Instead, we call
|
2020-11-17 09:46:23 -05:00
|
|
|
``self._sso_handler.render_error`` which displays an HTML page for the error.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
Most of the OpenID Connect logic happens here:
|
|
|
|
|
|
|
|
- first, we check if there was any error returned by the provider and
|
|
|
|
display it
|
|
|
|
- then we fetch the session cookie, decode and verify it
|
|
|
|
- the ``state`` query parameter should match with the one stored in the
|
|
|
|
session cookie
|
|
|
|
- once we known this session is legit, exchange the code with the
|
|
|
|
provider using the ``token_endpoint`` (see ``_exchange_code``)
|
|
|
|
- once we have the token, use it to either extract the UserInfo from
|
|
|
|
the ``id_token`` (``_parse_id_token``), or use the ``access_token``
|
|
|
|
to fetch UserInfo from the ``userinfo_endpoint``
|
|
|
|
(``_fetch_userinfo``)
|
|
|
|
- map those UserInfo to a Matrix user (``_map_userinfo_to_user``) and
|
|
|
|
finish the login
|
|
|
|
|
|
|
|
Args:
|
|
|
|
request: the incoming request from the browser.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# The provider might redirect with an error.
|
|
|
|
# In that case, just display it as-is.
|
|
|
|
if b"error" in request.args:
|
2020-06-03 16:13:17 -04:00
|
|
|
# error response from the auth server. see:
|
|
|
|
# https://tools.ietf.org/html/rfc6749#section-4.1.2.1
|
|
|
|
# https://openid.net/specs/openid-connect-core-1_0.html#AuthError
|
2020-05-08 08:30:40 -04:00
|
|
|
error = request.args[b"error"][0].decode()
|
|
|
|
description = request.args.get(b"error_description", [b""])[0].decode()
|
|
|
|
|
|
|
|
# Most of the errors returned by the provider could be due by
|
|
|
|
# either the provider misbehaving or Synapse being misconfigured.
|
|
|
|
# The only exception of that is "access_denied", where the user
|
|
|
|
# probably cancelled the login flow. In other cases, log those errors.
|
|
|
|
if error != "access_denied":
|
|
|
|
logger.error("Error from the OIDC provider: %s %s", error, description)
|
|
|
|
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, error, description)
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
2020-06-03 16:13:17 -04:00
|
|
|
# otherwise, it is presumably a successful response. see:
|
|
|
|
# https://tools.ietf.org/html/rfc6749#section-4.1.2
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
# Fetch the session cookie
|
2020-06-03 16:13:17 -04:00
|
|
|
session = request.getCookie(SESSION_COOKIE_NAME) # type: Optional[bytes]
|
2020-05-08 08:30:40 -04:00
|
|
|
if session is None:
|
|
|
|
logger.info("No session cookie found")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(
|
|
|
|
request, "missing_session", "No session cookie found"
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
# Remove the cookie. There is a good chance that if the callback failed
|
|
|
|
# once, it will fail next time and the code will already be exchanged.
|
|
|
|
# Removing it early avoids spamming the provider with token requests.
|
|
|
|
request.addCookie(
|
|
|
|
SESSION_COOKIE_NAME,
|
|
|
|
b"",
|
|
|
|
path="/_synapse/oidc",
|
|
|
|
expires="Thu, Jan 01 1970 00:00:00 UTC",
|
|
|
|
httpOnly=True,
|
|
|
|
sameSite="lax",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check for the state query parameter
|
|
|
|
if b"state" not in request.args:
|
|
|
|
logger.info("State parameter is missing")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(
|
|
|
|
request, "invalid_request", "State parameter is missing"
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
state = request.args[b"state"][0].decode()
|
|
|
|
|
|
|
|
# Deserialize the session token and verify it.
|
|
|
|
try:
|
2021-01-13 05:26:12 -05:00
|
|
|
session_data = self._token_generator.verify_oidc_session_token(
|
|
|
|
session, state
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
except MacaroonDeserializationException as e:
|
|
|
|
logger.exception("Invalid session")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, "invalid_session", str(e))
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
except MacaroonInvalidSignatureException as e:
|
|
|
|
logger.exception("Could not verify session")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, "mismatching_session", str(e))
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
# Exchange the code with the provider
|
|
|
|
if b"code" not in request.args:
|
|
|
|
logger.info("Code parameter is missing")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(
|
|
|
|
request, "invalid_request", "Code parameter is missing"
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
2020-06-03 16:13:17 -04:00
|
|
|
logger.debug("Exchanging code")
|
2020-05-08 08:30:40 -04:00
|
|
|
code = request.args[b"code"][0].decode()
|
|
|
|
try:
|
|
|
|
token = await self._exchange_code(code)
|
|
|
|
except OidcError as e:
|
|
|
|
logger.exception("Could not exchange code")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, e.error, e.error_description)
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
2020-06-03 16:13:17 -04:00
|
|
|
logger.debug("Successfully obtained OAuth2 access token")
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
# Now that we have a token, get the userinfo, either by decoding the
|
|
|
|
# `id_token` or by fetching the `userinfo_endpoint`.
|
|
|
|
if self._uses_userinfo:
|
2020-06-03 16:13:17 -04:00
|
|
|
logger.debug("Fetching userinfo")
|
2020-05-08 08:30:40 -04:00
|
|
|
try:
|
|
|
|
userinfo = await self._fetch_userinfo(token)
|
|
|
|
except Exception as e:
|
|
|
|
logger.exception("Could not fetch userinfo")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, "fetch_error", str(e))
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
else:
|
2020-06-03 16:13:17 -04:00
|
|
|
logger.debug("Extracting userinfo from id_token")
|
2020-05-08 08:30:40 -04:00
|
|
|
try:
|
2021-01-13 05:26:12 -05:00
|
|
|
userinfo = await self._parse_id_token(token, nonce=session_data.nonce)
|
2020-05-08 08:30:40 -04:00
|
|
|
except Exception as e:
|
|
|
|
logger.exception("Invalid id_token")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, "invalid_token", str(e))
|
2020-05-08 08:30:40 -04:00
|
|
|
return
|
|
|
|
|
2020-12-08 09:03:38 -05:00
|
|
|
# first check if we're doing a UIA
|
2021-01-13 05:26:12 -05:00
|
|
|
if session_data.ui_auth_session_id:
|
2020-12-08 09:03:38 -05:00
|
|
|
try:
|
|
|
|
remote_user_id = self._remote_id_from_userinfo(userinfo)
|
|
|
|
except Exception as e:
|
|
|
|
logger.exception("Could not extract remote user id")
|
|
|
|
self._sso_handler.render_error(request, "mapping_error", str(e))
|
|
|
|
return
|
|
|
|
|
|
|
|
return await self._sso_handler.complete_sso_ui_auth_request(
|
2021-01-13 05:26:12 -05:00
|
|
|
self.idp_id, remote_user_id, session_data.ui_auth_session_id, request
|
2020-12-08 09:03:38 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
# otherwise, it's a login
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
# Call the mapper to register/login the user
|
|
|
|
try:
|
2020-12-16 15:01:53 -05:00
|
|
|
await self._complete_oidc_login(
|
2021-01-13 05:26:12 -05:00
|
|
|
userinfo, token, request, session_data.client_redirect_url
|
2020-08-20 15:42:58 -04:00
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
except MappingException as e:
|
|
|
|
logger.exception("Could not map user")
|
2020-11-17 09:46:23 -05:00
|
|
|
self._sso_handler.render_error(request, "mapping_error", str(e))
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-12-16 15:01:53 -05:00
|
|
|
async def _complete_oidc_login(
|
|
|
|
self,
|
|
|
|
userinfo: UserInfo,
|
|
|
|
token: Token,
|
|
|
|
request: SynapseRequest,
|
|
|
|
client_redirect_url: str,
|
|
|
|
) -> None:
|
|
|
|
"""Given a UserInfo response, complete the login flow
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
UserInfo should have a claim that uniquely identifies users. This claim
|
|
|
|
is usually `sub`, but can be configured with `oidc_config.subject_claim`.
|
|
|
|
It is then used as an `external_id`.
|
|
|
|
|
|
|
|
If we don't find the user that way, we should register the user,
|
|
|
|
mapping the localpart and the display name from the UserInfo.
|
|
|
|
|
2020-09-25 07:01:45 -04:00
|
|
|
If a user already exists with the mxid we've mapped and allow_existing_users
|
|
|
|
is disabled, raise an exception.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-12-16 15:01:53 -05:00
|
|
|
Otherwise, render a redirect back to the client_redirect_url with a loginToken.
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
Args:
|
|
|
|
userinfo: an object representing the user
|
|
|
|
token: a dict with the tokens obtained from the provider
|
2020-12-16 15:01:53 -05:00
|
|
|
request: The request to respond to
|
|
|
|
client_redirect_url: The redirect URL passed in by the client.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
MappingException: if there was an error while mapping some properties
|
|
|
|
"""
|
|
|
|
try:
|
2020-12-08 09:03:38 -05:00
|
|
|
remote_user_id = self._remote_id_from_userinfo(userinfo)
|
2020-05-08 08:30:40 -04:00
|
|
|
except Exception as e:
|
|
|
|
raise MappingException(
|
|
|
|
"Failed to extract subject from OIDC response: %s" % (e,)
|
|
|
|
)
|
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
# Older mapping providers don't accept the `failures` argument, so we
|
|
|
|
# try and detect support.
|
|
|
|
mapper_signature = inspect.signature(
|
|
|
|
self._user_mapping_provider.map_user_attributes
|
2020-05-08 08:30:40 -04:00
|
|
|
)
|
2020-11-25 10:04:22 -05:00
|
|
|
supports_failures = "failures" in mapper_signature.parameters
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
async def oidc_response_to_user_attributes(failures: int) -> UserAttributes:
|
|
|
|
"""
|
|
|
|
Call the mapping provider to map the OIDC userinfo and token to user attributes.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
This is backwards compatibility for abstraction for the SSO handler.
|
|
|
|
"""
|
|
|
|
if supports_failures:
|
|
|
|
attributes = await self._user_mapping_provider.map_user_attributes(
|
|
|
|
userinfo, token, failures
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# If the mapping provider does not support processing failures,
|
|
|
|
# do not continually generate the same Matrix ID since it will
|
|
|
|
# continue to already be in use. Note that the error raised is
|
|
|
|
# arbitrary and will get turned into a MappingException.
|
|
|
|
if failures:
|
2020-12-04 08:25:15 -05:00
|
|
|
raise MappingException(
|
2020-11-25 10:04:22 -05:00
|
|
|
"Mapping provider does not support de-duplicating Matrix IDs"
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
attributes = await self._user_mapping_provider.map_user_attributes( # type: ignore
|
|
|
|
userinfo, token
|
|
|
|
)
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
return UserAttributes(**attributes)
|
2020-11-17 09:46:23 -05:00
|
|
|
|
2020-12-02 07:45:42 -05:00
|
|
|
async def grandfather_existing_users() -> Optional[str]:
|
|
|
|
if self._allow_existing_users:
|
|
|
|
# If allowing existing users we want to generate a single localpart
|
|
|
|
# and attempt to match it.
|
|
|
|
attributes = await oidc_response_to_user_attributes(failures=0)
|
|
|
|
|
2021-01-13 05:26:12 -05:00
|
|
|
user_id = UserID(attributes.localpart, self._server_name).to_string()
|
|
|
|
users = await self._store.get_users_by_id_case_insensitive(user_id)
|
2020-12-02 07:45:42 -05:00
|
|
|
if users:
|
|
|
|
# If an existing matrix ID is returned, then use it.
|
|
|
|
if len(users) == 1:
|
|
|
|
previously_registered_user_id = next(iter(users))
|
|
|
|
elif user_id in users:
|
|
|
|
previously_registered_user_id = user_id
|
|
|
|
else:
|
|
|
|
# Do not attempt to continue generating Matrix IDs.
|
|
|
|
raise MappingException(
|
|
|
|
"Attempted to login as '{}' but it matches more than one user inexactly: {}".format(
|
|
|
|
user_id, users
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return previously_registered_user_id
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2020-12-16 15:01:53 -05:00
|
|
|
# Mapping providers might not have get_extra_attributes: only call this
|
|
|
|
# method if it exists.
|
|
|
|
extra_attributes = None
|
|
|
|
get_extra_attributes = getattr(
|
|
|
|
self._user_mapping_provider, "get_extra_attributes", None
|
|
|
|
)
|
|
|
|
if get_extra_attributes:
|
|
|
|
extra_attributes = await get_extra_attributes(userinfo, token)
|
|
|
|
|
|
|
|
await self._sso_handler.complete_sso_login_request(
|
2021-01-04 13:13:49 -05:00
|
|
|
self.idp_id,
|
2020-11-25 10:04:22 -05:00
|
|
|
remote_user_id,
|
2020-12-16 15:01:53 -05:00
|
|
|
request,
|
|
|
|
client_redirect_url,
|
2020-11-25 10:04:22 -05:00
|
|
|
oidc_response_to_user_attributes,
|
2020-12-02 07:45:42 -05:00
|
|
|
grandfather_existing_users,
|
2020-12-16 15:01:53 -05:00
|
|
|
extra_attributes,
|
2020-05-08 08:30:40 -04:00
|
|
|
)
|
|
|
|
|
2020-12-08 09:03:38 -05:00
|
|
|
def _remote_id_from_userinfo(self, userinfo: UserInfo) -> str:
|
|
|
|
"""Extract the unique remote id from an OIDC UserInfo block
|
|
|
|
|
|
|
|
Args:
|
|
|
|
userinfo: An object representing the user given by the OIDC provider
|
|
|
|
Returns:
|
|
|
|
remote user id
|
|
|
|
"""
|
|
|
|
remote_user_id = self._user_mapping_provider.get_remote_user_id(userinfo)
|
|
|
|
# Some OIDC providers use integer IDs, but Synapse expects external IDs
|
|
|
|
# to be strings.
|
|
|
|
return str(remote_user_id)
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2021-01-13 05:26:12 -05:00
|
|
|
class OidcSessionTokenGenerator:
|
|
|
|
"""Methods for generating and checking OIDC Session cookies."""
|
|
|
|
|
|
|
|
def __init__(self, hs: "HomeServer"):
|
|
|
|
self._clock = hs.get_clock()
|
|
|
|
self._server_name = hs.hostname
|
|
|
|
self._macaroon_secret_key = hs.config.key.macaroon_secret_key
|
|
|
|
|
|
|
|
def generate_oidc_session_token(
|
|
|
|
self,
|
|
|
|
state: str,
|
|
|
|
session_data: "OidcSessionData",
|
|
|
|
duration_in_ms: int = (60 * 60 * 1000),
|
|
|
|
) -> str:
|
|
|
|
"""Generates a signed token storing data about an OIDC session.
|
|
|
|
|
|
|
|
When Synapse initiates an authorization flow, it creates a random state
|
|
|
|
and a random nonce. Those parameters are given to the provider and
|
|
|
|
should be verified when the client comes back from the provider.
|
|
|
|
It is also used to store the client_redirect_url, which is used to
|
|
|
|
complete the SSO login flow.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
state: The ``state`` parameter passed to the OIDC provider.
|
|
|
|
session_data: data to include in the session token.
|
|
|
|
duration_in_ms: An optional duration for the token in milliseconds.
|
|
|
|
Defaults to an hour.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A signed macaroon token with the session information.
|
|
|
|
"""
|
|
|
|
macaroon = pymacaroons.Macaroon(
|
|
|
|
location=self._server_name, identifier="key", key=self._macaroon_secret_key,
|
|
|
|
)
|
|
|
|
macaroon.add_first_party_caveat("gen = 1")
|
|
|
|
macaroon.add_first_party_caveat("type = session")
|
|
|
|
macaroon.add_first_party_caveat("state = %s" % (state,))
|
|
|
|
macaroon.add_first_party_caveat("nonce = %s" % (session_data.nonce,))
|
|
|
|
macaroon.add_first_party_caveat(
|
|
|
|
"client_redirect_url = %s" % (session_data.client_redirect_url,)
|
|
|
|
)
|
|
|
|
if session_data.ui_auth_session_id:
|
|
|
|
macaroon.add_first_party_caveat(
|
|
|
|
"ui_auth_session_id = %s" % (session_data.ui_auth_session_id,)
|
|
|
|
)
|
|
|
|
now = self._clock.time_msec()
|
|
|
|
expiry = now + duration_in_ms
|
|
|
|
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
|
|
|
|
|
|
|
return macaroon.serialize()
|
|
|
|
|
|
|
|
def verify_oidc_session_token(
|
|
|
|
self, session: bytes, state: str
|
|
|
|
) -> "OidcSessionData":
|
|
|
|
"""Verifies and extract an OIDC session token.
|
|
|
|
|
|
|
|
This verifies that a given session token was issued by this homeserver
|
|
|
|
and extract the nonce and client_redirect_url caveats.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
session: The session token to verify
|
|
|
|
state: The state the OIDC provider gave back
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The data extracted from the session cookie
|
|
|
|
"""
|
|
|
|
macaroon = pymacaroons.Macaroon.deserialize(session)
|
|
|
|
|
|
|
|
v = pymacaroons.Verifier()
|
|
|
|
v.satisfy_exact("gen = 1")
|
|
|
|
v.satisfy_exact("type = session")
|
|
|
|
v.satisfy_exact("state = %s" % (state,))
|
|
|
|
v.satisfy_general(lambda c: c.startswith("nonce = "))
|
|
|
|
v.satisfy_general(lambda c: c.startswith("client_redirect_url = "))
|
|
|
|
# Sometimes there's a UI auth session ID, it seems to be OK to attempt
|
|
|
|
# to always satisfy this.
|
|
|
|
v.satisfy_general(lambda c: c.startswith("ui_auth_session_id = "))
|
|
|
|
v.satisfy_general(self._verify_expiry)
|
|
|
|
|
|
|
|
v.verify(macaroon, self._macaroon_secret_key)
|
|
|
|
|
|
|
|
# Extract the `nonce`, `client_redirect_url`, and maybe the
|
|
|
|
# `ui_auth_session_id` from the token.
|
|
|
|
nonce = self._get_value_from_macaroon(macaroon, "nonce")
|
|
|
|
client_redirect_url = self._get_value_from_macaroon(
|
|
|
|
macaroon, "client_redirect_url"
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
ui_auth_session_id = self._get_value_from_macaroon(
|
|
|
|
macaroon, "ui_auth_session_id"
|
|
|
|
) # type: Optional[str]
|
|
|
|
except ValueError:
|
|
|
|
ui_auth_session_id = None
|
|
|
|
|
|
|
|
return OidcSessionData(
|
|
|
|
nonce=nonce,
|
|
|
|
client_redirect_url=client_redirect_url,
|
|
|
|
ui_auth_session_id=ui_auth_session_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _get_value_from_macaroon(self, macaroon: pymacaroons.Macaroon, key: str) -> str:
|
|
|
|
"""Extracts a caveat value from a macaroon token.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
macaroon: the token
|
|
|
|
key: the key of the caveat to extract
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The extracted value
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
Exception: if the caveat was not in the macaroon
|
|
|
|
"""
|
|
|
|
prefix = key + " = "
|
|
|
|
for caveat in macaroon.caveats:
|
|
|
|
if caveat.caveat_id.startswith(prefix):
|
|
|
|
return caveat.caveat_id[len(prefix) :]
|
|
|
|
raise ValueError("No %s caveat in macaroon" % (key,))
|
|
|
|
|
|
|
|
def _verify_expiry(self, caveat: str) -> bool:
|
|
|
|
prefix = "time < "
|
|
|
|
if not caveat.startswith(prefix):
|
|
|
|
return False
|
|
|
|
expiry = int(caveat[len(prefix) :])
|
|
|
|
now = self._clock.time_msec()
|
|
|
|
return now < expiry
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s(frozen=True, slots=True)
|
|
|
|
class OidcSessionData:
|
|
|
|
"""The attributes which are stored in a OIDC session cookie"""
|
|
|
|
|
|
|
|
# The `nonce` parameter passed to the OIDC provider.
|
|
|
|
nonce = attr.ib(type=str)
|
|
|
|
|
|
|
|
# The URL the client gave when it initiated the flow. ("" if this is a UI Auth)
|
|
|
|
client_redirect_url = attr.ib(type=str)
|
|
|
|
|
|
|
|
# The session ID of the ongoing UI Auth (None if this is a login)
|
|
|
|
ui_auth_session_id = attr.ib(type=Optional[str], default=None)
|
|
|
|
|
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
UserAttributeDict = TypedDict(
|
2020-12-18 09:19:46 -05:00
|
|
|
"UserAttributeDict", {"localpart": Optional[str], "display_name": Optional[str]}
|
2020-05-08 08:30:40 -04:00
|
|
|
)
|
|
|
|
C = TypeVar("C")
|
|
|
|
|
|
|
|
|
|
|
|
class OidcMappingProvider(Generic[C]):
|
|
|
|
"""A mapping provider maps a UserInfo object to user attributes.
|
|
|
|
|
|
|
|
It should provide the API described by this class.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, config: C):
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
config: A custom config object from this module, parsed by ``parse_config()``
|
|
|
|
"""
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def parse_config(config: dict) -> C:
|
|
|
|
"""Parse the dict provided by the homeserver's config
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config: A dictionary containing configuration options for this provider
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A custom config object for this module
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def get_remote_user_id(self, userinfo: UserInfo) -> str:
|
|
|
|
"""Get a unique user ID for this user.
|
|
|
|
|
|
|
|
Usually, in an OIDC-compliant scenario, it should be the ``sub`` claim from the UserInfo object.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
userinfo: An object representing the user given by the OIDC provider
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A unique user ID
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
async def map_user_attributes(
|
2020-11-25 10:04:22 -05:00
|
|
|
self, userinfo: UserInfo, token: Token, failures: int
|
|
|
|
) -> UserAttributeDict:
|
2020-09-30 13:02:43 -04:00
|
|
|
"""Map a `UserInfo` object into user attributes.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
Args:
|
|
|
|
userinfo: An object representing the user given by the OIDC provider
|
|
|
|
token: A dict with the tokens returned by the provider
|
2020-11-25 10:04:22 -05:00
|
|
|
failures: How many times a call to this function with this
|
|
|
|
UserInfo has resulted in a failure.
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
A dict containing the ``localpart`` and (optionally) the ``display_name``
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2020-09-30 13:02:43 -04:00
|
|
|
async def get_extra_attributes(self, userinfo: UserInfo, token: Token) -> JsonDict:
|
|
|
|
"""Map a `UserInfo` object into additional attributes passed to the client during login.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
userinfo: An object representing the user given by the OIDC provider
|
|
|
|
token: A dict with the tokens returned by the provider
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A dict containing additional attributes. Must be JSON serializable.
|
|
|
|
"""
|
|
|
|
return {}
|
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
# Used to clear out "None" values in templates
|
|
|
|
def jinja_finalize(thing):
|
|
|
|
return thing if thing is not None else ""
|
|
|
|
|
|
|
|
|
|
|
|
env = Environment(finalize=jinja_finalize)
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
class JinjaOidcMappingConfig:
|
2020-12-18 09:19:46 -05:00
|
|
|
subject_claim = attr.ib(type=str)
|
|
|
|
localpart_template = attr.ib(type=Optional[Template])
|
|
|
|
display_name_template = attr.ib(type=Optional[Template])
|
|
|
|
extra_attributes = attr.ib(type=Dict[str, Template])
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
|
|
|
|
class JinjaOidcMappingProvider(OidcMappingProvider[JinjaOidcMappingConfig]):
|
|
|
|
"""An implementation of a mapping provider based on Jinja templates.
|
|
|
|
|
|
|
|
This is the default mapping provider.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, config: JinjaOidcMappingConfig):
|
|
|
|
self._config = config
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def parse_config(config: dict) -> JinjaOidcMappingConfig:
|
|
|
|
subject_claim = config.get("subject_claim", "sub")
|
|
|
|
|
2020-12-18 09:19:46 -05:00
|
|
|
localpart_template = None # type: Optional[Template]
|
|
|
|
if "localpart_template" in config:
|
|
|
|
try:
|
|
|
|
localpart_template = env.from_string(config["localpart_template"])
|
|
|
|
except Exception as e:
|
|
|
|
raise ConfigError(
|
|
|
|
"invalid jinja template", path=["localpart_template"]
|
|
|
|
) from e
|
2020-05-08 08:30:40 -04:00
|
|
|
|
|
|
|
display_name_template = None # type: Optional[Template]
|
|
|
|
if "display_name_template" in config:
|
|
|
|
try:
|
|
|
|
display_name_template = env.from_string(config["display_name_template"])
|
|
|
|
except Exception as e:
|
|
|
|
raise ConfigError(
|
2020-12-18 09:19:46 -05:00
|
|
|
"invalid jinja template", path=["display_name_template"]
|
|
|
|
) from e
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-09-30 13:02:43 -04:00
|
|
|
extra_attributes = {} # type Dict[str, Template]
|
|
|
|
if "extra_attributes" in config:
|
|
|
|
extra_attributes_config = config.get("extra_attributes") or {}
|
|
|
|
if not isinstance(extra_attributes_config, dict):
|
2020-12-18 09:19:46 -05:00
|
|
|
raise ConfigError("must be a dict", path=["extra_attributes"])
|
2020-09-30 13:02:43 -04:00
|
|
|
|
|
|
|
for key, value in extra_attributes_config.items():
|
|
|
|
try:
|
|
|
|
extra_attributes[key] = env.from_string(value)
|
|
|
|
except Exception as e:
|
|
|
|
raise ConfigError(
|
2020-12-18 09:19:46 -05:00
|
|
|
"invalid jinja template", path=["extra_attributes", key]
|
|
|
|
) from e
|
2020-09-30 13:02:43 -04:00
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
return JinjaOidcMappingConfig(
|
|
|
|
subject_claim=subject_claim,
|
|
|
|
localpart_template=localpart_template,
|
|
|
|
display_name_template=display_name_template,
|
2020-09-30 13:02:43 -04:00
|
|
|
extra_attributes=extra_attributes,
|
2020-05-08 08:30:40 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_remote_user_id(self, userinfo: UserInfo) -> str:
|
|
|
|
return userinfo[self._config.subject_claim]
|
|
|
|
|
|
|
|
async def map_user_attributes(
|
2020-11-25 10:04:22 -05:00
|
|
|
self, userinfo: UserInfo, token: Token, failures: int
|
|
|
|
) -> UserAttributeDict:
|
2020-12-18 09:19:46 -05:00
|
|
|
localpart = None
|
|
|
|
|
|
|
|
if self._config.localpart_template:
|
|
|
|
localpart = self._config.localpart_template.render(user=userinfo).strip()
|
2020-05-08 08:30:40 -04:00
|
|
|
|
2020-12-18 09:19:46 -05:00
|
|
|
# Ensure only valid characters are included in the MXID.
|
|
|
|
localpart = map_username_to_mxid_localpart(localpart)
|
2020-11-19 14:25:17 -05:00
|
|
|
|
2020-12-18 09:19:46 -05:00
|
|
|
# Append suffix integer if last call to this function failed to produce
|
|
|
|
# a usable mxid.
|
|
|
|
localpart += str(failures) if failures else ""
|
2020-11-25 10:04:22 -05:00
|
|
|
|
2020-05-08 08:30:40 -04:00
|
|
|
display_name = None # type: Optional[str]
|
|
|
|
if self._config.display_name_template is not None:
|
|
|
|
display_name = self._config.display_name_template.render(
|
|
|
|
user=userinfo
|
|
|
|
).strip()
|
|
|
|
|
|
|
|
if display_name == "":
|
|
|
|
display_name = None
|
|
|
|
|
2020-11-25 10:04:22 -05:00
|
|
|
return UserAttributeDict(localpart=localpart, display_name=display_name)
|
2020-09-30 13:02:43 -04:00
|
|
|
|
|
|
|
async def get_extra_attributes(self, userinfo: UserInfo, token: Token) -> JsonDict:
|
|
|
|
extras = {} # type: Dict[str, str]
|
|
|
|
for key, template in self._config.extra_attributes.items():
|
|
|
|
try:
|
|
|
|
extras[key] = template.render(user=userinfo).strip()
|
|
|
|
except Exception as e:
|
|
|
|
# Log an error and skip this value (don't break login for this).
|
|
|
|
logger.error("Failed to render OIDC extra attribute %s: %s" % (key, e))
|
|
|
|
return extras
|