Merge pull request #6724 from matrix-org/rav/log_saml_attributes

Log saml assertions rather than the whole response
This commit is contained in:
Richard van der Hoff 2020-01-17 10:33:24 +00:00 committed by GitHub
commit 59dc87c618
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 114 additions and 24 deletions

1
changelog.d/6724.misc Normal file
View File

@ -0,0 +1 @@
When processing a SAML response, log the assertions for easier configuration.

View File

@ -32,6 +32,7 @@ from synapse.types import (
mxid_localpart_allowed_characters, mxid_localpart_allowed_characters,
) )
from synapse.util.async_helpers import Linearizer from synapse.util.async_helpers import Linearizer
from synapse.util.iterutils import chunk_seq
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -132,7 +133,17 @@ class SamlHandler:
logger.warning("SAML2 response was not signed") logger.warning("SAML2 response was not signed")
raise SynapseError(400, "SAML2 response was not signed") raise SynapseError(400, "SAML2 response was not signed")
logger.info("SAML2 response: %s", saml2_auth.origxml) logger.debug("SAML2 response: %s", saml2_auth.origxml)
for assertion in saml2_auth.assertions:
# kibana limits the length of a log field, whereas this is all rather
# useful, so split it up.
count = 0
for part in chunk_seq(str(assertion), 10000):
logger.info(
"SAML2 assertion: %s%s", "(%i)..." % (count,) if count else "", part
)
count += 1
logger.info("SAML2 mapped attributes: %s", saml2_auth.ava) logger.info("SAML2 mapped attributes: %s", saml2_auth.ava)
self._outstanding_requests_dict.pop(saml2_auth.in_response_to, None) self._outstanding_requests_dict.pop(saml2_auth.in_response_to, None)

View File

@ -21,7 +21,7 @@ from twisted.internet import defer
from synapse.storage._base import SQLBaseStore from synapse.storage._base import SQLBaseStore
from synapse.storage.engines import PostgresEngine from synapse.storage.engines import PostgresEngine
from synapse.util import batch_iter from synapse.util.iterutils import batch_iter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -33,13 +33,13 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
from synapse.storage.database import Database from synapse.storage.database import Database
from synapse.types import get_verify_key_from_cross_signing_key from synapse.types import get_verify_key_from_cross_signing_key
from synapse.util import batch_iter
from synapse.util.caches.descriptors import ( from synapse.util.caches.descriptors import (
Cache, Cache,
cached, cached,
cachedInlineCallbacks, cachedInlineCallbacks,
cachedList, cachedList,
) )
from synapse.util.iterutils import batch_iter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -43,9 +43,9 @@ from synapse.storage.data_stores.main.events_worker import EventsWorkerStore
from synapse.storage.data_stores.main.state import StateGroupWorkerStore from synapse.storage.data_stores.main.state import StateGroupWorkerStore
from synapse.storage.database import Database from synapse.storage.database import Database
from synapse.types import RoomStreamToken, get_domain_from_id from synapse.types import RoomStreamToken, get_domain_from_id
from synapse.util import batch_iter
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
from synapse.util.frozenutils import frozendict_json_encoder from synapse.util.frozenutils import frozendict_json_encoder
from synapse.util.iterutils import batch_iter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -37,8 +37,8 @@ from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
from synapse.storage.database import Database from synapse.storage.database import Database
from synapse.types import get_domain_from_id from synapse.types import get_domain_from_id
from synapse.util import batch_iter
from synapse.util.caches.descriptors import Cache from synapse.util.caches.descriptors import Cache
from synapse.util.iterutils import batch_iter
from synapse.util.metrics import Measure from synapse.util.metrics import Measure
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -23,8 +23,8 @@ from signedjson.key import decode_verify_key_bytes
from synapse.storage._base import SQLBaseStore from synapse.storage._base import SQLBaseStore
from synapse.storage.keys import FetchKeyResult from synapse.storage.keys import FetchKeyResult
from synapse.util import batch_iter
from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.iterutils import batch_iter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -17,8 +17,8 @@ from twisted.internet import defer
from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause from synapse.storage._base import SQLBaseStore, make_in_list_sql_clause
from synapse.storage.presence import UserPresenceState from synapse.storage.presence import UserPresenceState
from synapse.util import batch_iter
from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.descriptors import cached, cachedList
from synapse.util.iterutils import batch_iter
class PresenceStore(SQLBaseStore): class PresenceStore(SQLBaseStore):

View File

@ -15,7 +15,6 @@
import logging import logging
import re import re
from itertools import islice
import attr import attr
@ -107,22 +106,6 @@ class Clock(object):
raise raise
def batch_iter(iterable, size):
"""batch an iterable up into tuples with a maximum size
Args:
iterable (iterable): the iterable to slice
size (int): the maximum batch size
Returns:
an iterator over the chunks
"""
# make sure we can deal with iterables like lists too
sourceiter = iter(iterable)
# call islice until it returns an empty tuple
return iter(lambda: tuple(islice(sourceiter, size)), ())
def log_failure(failure, msg, consumeErrors=True): def log_failure(failure, msg, consumeErrors=True):
"""Creates a function suitable for passing to `Deferred.addErrback` that """Creates a function suitable for passing to `Deferred.addErrback` that
logs any failures that occur. logs any failures that occur.

48
synapse/util/iterutils.py Normal file
View File

@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import islice
from typing import Iterable, Iterator, Sequence, Tuple, TypeVar
T = TypeVar("T")
def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T]]:
"""batch an iterable up into tuples with a maximum size
Args:
iterable (iterable): the iterable to slice
size (int): the maximum batch size
Returns:
an iterator over the chunks
"""
# make sure we can deal with iterables like lists too
sourceiter = iter(iterable)
# call islice until it returns an empty tuple
return iter(lambda: tuple(islice(sourceiter, size)), ())
ISeq = TypeVar("ISeq", bound=Sequence, covariant=True)
def chunk_seq(iseq: ISeq, maxlen: int) -> Iterable[ISeq]:
"""Split the given sequence into chunks of the given size
The last chunk may be shorter than the given size.
If the input is empty, no chunks are returned.
"""
return (iseq[i : i + maxlen] for i in range(0, len(iseq), maxlen))

View File

@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.util.iterutils import chunk_seq
from tests.unittest import TestCase
class ChunkSeqTests(TestCase):
def test_short_seq(self):
parts = chunk_seq("123", 8)
self.assertEqual(
list(parts), ["123"],
)
def test_long_seq(self):
parts = chunk_seq("abcdefghijklmnop", 8)
self.assertEqual(
list(parts), ["abcdefgh", "ijklmnop"],
)
def test_uneven_parts(self):
parts = chunk_seq("abcdefghijklmnop", 5)
self.assertEqual(
list(parts), ["abcde", "fghij", "klmno", "p"],
)
def test_empty_input(self):
parts = chunk_seq([], 5)
self.assertEqual(
list(parts), [],
)