Rewrite KeyringTestCase as a HomeServerTestCase (#4986)

This is a bit fiddly due to the keyring doing weird things with logcontexts.
This commit is contained in:
Richard van der Hoff 2019-04-03 14:11:27 +01:00 committed by GitHub
parent e8419554ff
commit e4d473d855
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 99 additions and 101 deletions

1
changelog.d/4985.misc Normal file
View File

@ -0,0 +1 @@
Rewrite KeyringTestCase as a HomeserverTestCase.

View File

@ -19,14 +19,14 @@ from mock import Mock
import signedjson.key import signedjson.key
import signedjson.sign import signedjson.sign
from twisted.internet import defer, reactor from twisted.internet import defer
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.crypto import keyring from synapse.crypto import keyring
from synapse.util import Clock, logcontext from synapse.util import logcontext
from synapse.util.logcontext import LoggingContext from synapse.util.logcontext import LoggingContext
from tests import unittest, utils from tests import unittest
class MockPerspectiveServer(object): class MockPerspectiveServer(object):
@ -52,75 +52,50 @@ class MockPerspectiveServer(object):
return res return res
class KeyringTestCase(unittest.TestCase): class KeyringTestCase(unittest.HomeserverTestCase):
@defer.inlineCallbacks def make_homeserver(self, reactor, clock):
def setUp(self):
self.mock_perspective_server = MockPerspectiveServer() self.mock_perspective_server = MockPerspectiveServer()
self.http_client = Mock() self.http_client = Mock()
self.hs = yield utils.setup_test_homeserver( hs = self.setup_test_homeserver(handlers=None, http_client=self.http_client)
self.addCleanup, handlers=None, http_client=self.http_client
)
keys = self.mock_perspective_server.get_verify_keys() keys = self.mock_perspective_server.get_verify_keys()
self.hs.config.perspectives = {self.mock_perspective_server.server_name: keys} hs.config.perspectives = {self.mock_perspective_server.server_name: keys}
return hs
def assert_sentinel_context(self):
if LoggingContext.current_context() != LoggingContext.sentinel:
self.fail(
"Expected sentinel context but got %s" % (
LoggingContext.current_context(),
)
)
def check_context(self, _, expected): def check_context(self, _, expected):
self.assertEquals( self.assertEquals(
getattr(LoggingContext.current_context(), "request", None), expected getattr(LoggingContext.current_context(), "request", None), expected
) )
@defer.inlineCallbacks
def test_wait_for_previous_lookups(self): def test_wait_for_previous_lookups(self):
kr = keyring.Keyring(self.hs) kr = keyring.Keyring(self.hs)
lookup_1_deferred = defer.Deferred() lookup_1_deferred = defer.Deferred()
lookup_2_deferred = defer.Deferred() lookup_2_deferred = defer.Deferred()
with LoggingContext("one") as context_one: # we run the lookup in a logcontext so that the patched inlineCallbacks can check
context_one.request = "one" # it is doing the right thing with logcontexts.
wait_1_deferred = run_in_context(
wait_1_deferred = kr.wait_for_previous_lookups( kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_1_deferred}
["server1"], {"server1": lookup_1_deferred}
) )
# there were no previous lookups, so the deferred should be ready # there were no previous lookups, so the deferred should be ready
self.assertTrue(wait_1_deferred.called) self.successResultOf(wait_1_deferred)
# ... so we should have preserved the LoggingContext.
self.assertIs(LoggingContext.current_context(), context_one)
wait_1_deferred.addBoth(self.check_context, "one")
with LoggingContext("two") as context_two:
context_two.request = "two"
# set off another wait. It should block because the first lookup # set off another wait. It should block because the first lookup
# hasn't yet completed. # hasn't yet completed.
wait_2_deferred = kr.wait_for_previous_lookups( wait_2_deferred = run_in_context(
["server1"], {"server1": lookup_2_deferred} kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_2_deferred}
) )
self.assertFalse(wait_2_deferred.called) self.assertFalse(wait_2_deferred.called)
# ... so we should have reset the LoggingContext.
self.assert_sentinel_context()
wait_2_deferred.addBoth(self.check_context, "two")
# let the first lookup complete (in the sentinel context) # let the first lookup complete (in the sentinel context)
lookup_1_deferred.callback(None) lookup_1_deferred.callback(None)
# now the second wait should complete and restore our # now the second wait should complete.
# loggingcontext. self.successResultOf(wait_2_deferred)
yield wait_2_deferred
@defer.inlineCallbacks
def test_verify_json_objects_for_server_awaits_previous_requests(self): def test_verify_json_objects_for_server_awaits_previous_requests(self):
clock = Clock(reactor)
key1 = signedjson.key.generate_signing_key(1) key1 = signedjson.key.generate_signing_key(1)
kr = keyring.Keyring(self.hs) kr = keyring.Keyring(self.hs)
@ -145,10 +120,12 @@ class KeyringTestCase(unittest.TestCase):
self.http_client.post_json.side_effect = get_perspectives self.http_client.post_json.side_effect = get_perspectives
# start off a first set of lookups
@defer.inlineCallbacks
def first_lookup():
with LoggingContext("11") as context_11: with LoggingContext("11") as context_11:
context_11.request = "11" context_11.request = "11"
# start off a first set of lookups
res_deferreds = kr.verify_json_objects_for_server( res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1), ("server11", {})] [("server10", json1), ("server11", {})]
) )
@ -164,62 +141,82 @@ class KeyringTestCase(unittest.TestCase):
self.assertFalse(res_deferreds[0].called) self.assertFalse(res_deferreds[0].called)
res_deferreds[0].addBoth(self.check_context, None) res_deferreds[0].addBoth(self.check_context, None)
yield logcontext.make_deferred_yieldable(res_deferreds[0])
# let verify_json_objects_for_server finish its work before we kill the
# logcontext
yield self.clock.sleep(0)
d0 = first_lookup()
# wait a tick for it to send the request to the perspectives server # wait a tick for it to send the request to the perspectives server
# (it first tries the datastore) # (it first tries the datastore)
yield clock.sleep(1) # XXX find out why this takes so long! self.pump()
self.http_client.post_json.assert_called_once() self.http_client.post_json.assert_called_once()
self.assertIs(LoggingContext.current_context(), context_11)
context_12 = LoggingContext("12")
context_12.request = "12"
with logcontext.PreserveLoggingContext(context_12):
# a second request for a server with outstanding requests # a second request for a server with outstanding requests
# should block rather than start a second call # should block rather than start a second call
@defer.inlineCallbacks
def second_lookup():
with LoggingContext("12") as context_12:
context_12.request = "12"
self.http_client.post_json.reset_mock() self.http_client.post_json.reset_mock()
self.http_client.post_json.return_value = defer.Deferred() self.http_client.post_json.return_value = defer.Deferred()
res_deferreds_2 = kr.verify_json_objects_for_server( res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1)] [("server10", json1, )]
) )
yield clock.sleep(1)
self.http_client.post_json.assert_not_called()
res_deferreds_2[0].addBoth(self.check_context, None) res_deferreds_2[0].addBoth(self.check_context, None)
yield logcontext.make_deferred_yieldable(res_deferreds_2[0])
# let verify_json_objects_for_server finish its work before we kill the
# logcontext
yield self.clock.sleep(0)
d2 = second_lookup()
self.pump()
self.http_client.post_json.assert_not_called()
# complete the first request # complete the first request
with logcontext.PreserveLoggingContext():
persp_deferred.callback(persp_resp) persp_deferred.callback(persp_resp)
self.assertIs(LoggingContext.current_context(), context_11) self.get_success(d0)
self.get_success(d2)
with logcontext.PreserveLoggingContext():
yield res_deferreds[0]
yield res_deferreds_2[0]
@defer.inlineCallbacks
def test_verify_json_for_server(self): def test_verify_json_for_server(self):
kr = keyring.Keyring(self.hs) kr = keyring.Keyring(self.hs)
key1 = signedjson.key.generate_signing_key(1) key1 = signedjson.key.generate_signing_key(1)
yield self.hs.datastore.store_server_verify_key( r = self.hs.datastore.store_server_verify_key(
"server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1) "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1)
) )
self.get_success(r)
json1 = {} json1 = {}
signedjson.sign.sign_json(json1, "server9", key1) signedjson.sign.sign_json(json1, "server9", key1)
with LoggingContext("one") as context_one: # should fail immediately on an unsigned object
context_one.request = "one" d = _verify_json_for_server(kr, "server9", {})
self.failureResultOf(d, SynapseError)
defer = kr.verify_json_for_server("server9", {}) d = _verify_json_for_server(kr, "server9", json1)
try: self.assertFalse(d.called)
yield defer self.get_success(d)
self.fail("should fail on unsigned json")
except SynapseError:
pass
self.assertIs(LoggingContext.current_context(), context_one)
defer = kr.verify_json_for_server("server9", json1)
self.assertFalse(defer.called)
self.assert_sentinel_context()
yield defer
self.assertIs(LoggingContext.current_context(), context_one) @defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
with LoggingContext("testctx"):
rv = yield f(*args, **kwargs)
defer.returnValue(rv)
def _verify_json_for_server(keyring, server_name, json_object):
"""thin wrapper around verify_json_for_server which makes sure it is wrapped
with the patched defer.inlineCallbacks.
"""
@defer.inlineCallbacks
def v():
rv1 = yield keyring.verify_json_for_server(server_name, json_object)
defer.returnValue(rv1)
return run_in_context(v)