Rewrite KeyringTestCase as a HomeServerTestCase (#4986)

This is a bit fiddly due to the keyring doing weird things with logcontexts.
This commit is contained in:
Richard van der Hoff 2019-04-03 14:11:27 +01:00 committed by GitHub
parent e8419554ff
commit e4d473d855
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 99 additions and 101 deletions

1
changelog.d/4985.misc Normal file
View File

@ -0,0 +1 @@
Rewrite KeyringTestCase as a HomeserverTestCase.

View File

@ -19,14 +19,14 @@ from mock import Mock
import signedjson.key import signedjson.key
import signedjson.sign import signedjson.sign
from twisted.internet import defer, reactor from twisted.internet import defer
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.crypto import keyring from synapse.crypto import keyring
from synapse.util import Clock, logcontext from synapse.util import logcontext
from synapse.util.logcontext import LoggingContext from synapse.util.logcontext import LoggingContext
from tests import unittest, utils from tests import unittest
class MockPerspectiveServer(object): class MockPerspectiveServer(object):
@ -52,75 +52,50 @@ class MockPerspectiveServer(object):
return res return res
class KeyringTestCase(unittest.TestCase): class KeyringTestCase(unittest.HomeserverTestCase):
@defer.inlineCallbacks def make_homeserver(self, reactor, clock):
def setUp(self):
self.mock_perspective_server = MockPerspectiveServer() self.mock_perspective_server = MockPerspectiveServer()
self.http_client = Mock() self.http_client = Mock()
self.hs = yield utils.setup_test_homeserver( hs = self.setup_test_homeserver(handlers=None, http_client=self.http_client)
self.addCleanup, handlers=None, http_client=self.http_client
)
keys = self.mock_perspective_server.get_verify_keys() keys = self.mock_perspective_server.get_verify_keys()
self.hs.config.perspectives = {self.mock_perspective_server.server_name: keys} hs.config.perspectives = {self.mock_perspective_server.server_name: keys}
return hs
def assert_sentinel_context(self):
if LoggingContext.current_context() != LoggingContext.sentinel:
self.fail(
"Expected sentinel context but got %s" % (
LoggingContext.current_context(),
)
)
def check_context(self, _, expected): def check_context(self, _, expected):
self.assertEquals( self.assertEquals(
getattr(LoggingContext.current_context(), "request", None), expected getattr(LoggingContext.current_context(), "request", None), expected
) )
@defer.inlineCallbacks
def test_wait_for_previous_lookups(self): def test_wait_for_previous_lookups(self):
kr = keyring.Keyring(self.hs) kr = keyring.Keyring(self.hs)
lookup_1_deferred = defer.Deferred() lookup_1_deferred = defer.Deferred()
lookup_2_deferred = defer.Deferred() lookup_2_deferred = defer.Deferred()
with LoggingContext("one") as context_one: # we run the lookup in a logcontext so that the patched inlineCallbacks can check
context_one.request = "one" # it is doing the right thing with logcontexts.
wait_1_deferred = run_in_context(
kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_1_deferred}
)
wait_1_deferred = kr.wait_for_previous_lookups( # there were no previous lookups, so the deferred should be ready
["server1"], {"server1": lookup_1_deferred} self.successResultOf(wait_1_deferred)
)
# there were no previous lookups, so the deferred should be ready # set off another wait. It should block because the first lookup
self.assertTrue(wait_1_deferred.called) # hasn't yet completed.
# ... so we should have preserved the LoggingContext. wait_2_deferred = run_in_context(
self.assertIs(LoggingContext.current_context(), context_one) kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_2_deferred}
wait_1_deferred.addBoth(self.check_context, "one") )
with LoggingContext("two") as context_two: self.assertFalse(wait_2_deferred.called)
context_two.request = "two"
# set off another wait. It should block because the first lookup # let the first lookup complete (in the sentinel context)
# hasn't yet completed. lookup_1_deferred.callback(None)
wait_2_deferred = kr.wait_for_previous_lookups(
["server1"], {"server1": lookup_2_deferred}
)
self.assertFalse(wait_2_deferred.called)
# ... so we should have reset the LoggingContext. # now the second wait should complete.
self.assert_sentinel_context() self.successResultOf(wait_2_deferred)
wait_2_deferred.addBoth(self.check_context, "two")
# let the first lookup complete (in the sentinel context)
lookup_1_deferred.callback(None)
# now the second wait should complete and restore our
# loggingcontext.
yield wait_2_deferred
@defer.inlineCallbacks
def test_verify_json_objects_for_server_awaits_previous_requests(self): def test_verify_json_objects_for_server_awaits_previous_requests(self):
clock = Clock(reactor)
key1 = signedjson.key.generate_signing_key(1) key1 = signedjson.key.generate_signing_key(1)
kr = keyring.Keyring(self.hs) kr = keyring.Keyring(self.hs)
@ -145,81 +120,103 @@ class KeyringTestCase(unittest.TestCase):
self.http_client.post_json.side_effect = get_perspectives self.http_client.post_json.side_effect = get_perspectives
with LoggingContext("11") as context_11: # start off a first set of lookups
context_11.request = "11" @defer.inlineCallbacks
def first_lookup():
with LoggingContext("11") as context_11:
context_11.request = "11"
# start off a first set of lookups res_deferreds = kr.verify_json_objects_for_server(
res_deferreds = kr.verify_json_objects_for_server( [("server10", json1), ("server11", {})]
[("server10", json1), ("server11", {})] )
)
# the unsigned json should be rejected pretty quickly # the unsigned json should be rejected pretty quickly
self.assertTrue(res_deferreds[1].called) self.assertTrue(res_deferreds[1].called)
try: try:
yield res_deferreds[1] yield res_deferreds[1]
self.assertFalse("unsigned json didn't cause a failure") self.assertFalse("unsigned json didn't cause a failure")
except SynapseError: except SynapseError:
pass pass
self.assertFalse(res_deferreds[0].called) self.assertFalse(res_deferreds[0].called)
res_deferreds[0].addBoth(self.check_context, None) res_deferreds[0].addBoth(self.check_context, None)
# wait a tick for it to send the request to the perspectives server yield logcontext.make_deferred_yieldable(res_deferreds[0])
# (it first tries the datastore)
yield clock.sleep(1) # XXX find out why this takes so long!
self.http_client.post_json.assert_called_once()
self.assertIs(LoggingContext.current_context(), context_11) # let verify_json_objects_for_server finish its work before we kill the
# logcontext
yield self.clock.sleep(0)
context_12 = LoggingContext("12") d0 = first_lookup()
context_12.request = "12"
with logcontext.PreserveLoggingContext(context_12): # wait a tick for it to send the request to the perspectives server
# a second request for a server with outstanding requests # (it first tries the datastore)
# should block rather than start a second call self.pump()
self.http_client.post_json.assert_called_once()
# a second request for a server with outstanding requests
# should block rather than start a second call
@defer.inlineCallbacks
def second_lookup():
with LoggingContext("12") as context_12:
context_12.request = "12"
self.http_client.post_json.reset_mock() self.http_client.post_json.reset_mock()
self.http_client.post_json.return_value = defer.Deferred() self.http_client.post_json.return_value = defer.Deferred()
res_deferreds_2 = kr.verify_json_objects_for_server( res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1)] [("server10", json1, )]
) )
yield clock.sleep(1)
self.http_client.post_json.assert_not_called()
res_deferreds_2[0].addBoth(self.check_context, None) res_deferreds_2[0].addBoth(self.check_context, None)
yield logcontext.make_deferred_yieldable(res_deferreds_2[0])
# complete the first request # let verify_json_objects_for_server finish its work before we kill the
with logcontext.PreserveLoggingContext(): # logcontext
persp_deferred.callback(persp_resp) yield self.clock.sleep(0)
self.assertIs(LoggingContext.current_context(), context_11)
with logcontext.PreserveLoggingContext(): d2 = second_lookup()
yield res_deferreds[0]
yield res_deferreds_2[0] self.pump()
self.http_client.post_json.assert_not_called()
# complete the first request
persp_deferred.callback(persp_resp)
self.get_success(d0)
self.get_success(d2)
@defer.inlineCallbacks
def test_verify_json_for_server(self): def test_verify_json_for_server(self):
kr = keyring.Keyring(self.hs) kr = keyring.Keyring(self.hs)
key1 = signedjson.key.generate_signing_key(1) key1 = signedjson.key.generate_signing_key(1)
yield self.hs.datastore.store_server_verify_key( r = self.hs.datastore.store_server_verify_key(
"server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1) "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1)
) )
self.get_success(r)
json1 = {} json1 = {}
signedjson.sign.sign_json(json1, "server9", key1) signedjson.sign.sign_json(json1, "server9", key1)
with LoggingContext("one") as context_one: # should fail immediately on an unsigned object
context_one.request = "one" d = _verify_json_for_server(kr, "server9", {})
self.failureResultOf(d, SynapseError)
defer = kr.verify_json_for_server("server9", {}) d = _verify_json_for_server(kr, "server9", json1)
try: self.assertFalse(d.called)
yield defer self.get_success(d)
self.fail("should fail on unsigned json")
except SynapseError:
pass
self.assertIs(LoggingContext.current_context(), context_one)
defer = kr.verify_json_for_server("server9", json1)
self.assertFalse(defer.called)
self.assert_sentinel_context()
yield defer
self.assertIs(LoggingContext.current_context(), context_one) @defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
with LoggingContext("testctx"):
rv = yield f(*args, **kwargs)
defer.returnValue(rv)
def _verify_json_for_server(keyring, server_name, json_object):
"""thin wrapper around verify_json_for_server which makes sure it is wrapped
with the patched defer.inlineCallbacks.
"""
@defer.inlineCallbacks
def v():
rv1 = yield keyring.verify_json_for_server(server_name, json_object)
defer.returnValue(rv1)
return run_in_context(v)