Merge remote-tracking branch 'origin/develop' into dbkr/http_request_propagate_error

This commit is contained in:
David Baker 2017-04-26 11:31:55 +01:00
commit 81804909d3
19 changed files with 309 additions and 122 deletions

View File

@ -245,6 +245,25 @@ Setting up a TURN server
For reliable VoIP calls to be routed via this homeserver, you MUST configure For reliable VoIP calls to be routed via this homeserver, you MUST configure
a TURN server. See `<docs/turn-howto.rst>`_ for details. a TURN server. See `<docs/turn-howto.rst>`_ for details.
IPv6
----
As of Synapse 0.19 we finally support IPv6, many thanks to @kyrias and @glyph
for providing PR #1696.
However, for federation to work on hosts with IPv6 DNS servers you **must**
be running Twisted 17.1.0 or later - see https://github.com/matrix-org/synapse/issues/1002
for details. We can't make Synapse depend on Twisted 17.1 by default
yet as it will break most older distributions (see https://github.com/matrix-org/synapse/pull/1909)
so if you are using operating system dependencies you'll have to install your
own Twisted 17.1 package via pip or backports etc.
If you're running in a virtualenv then pip should have installed the newest
Twisted automatically, but if your virtualenv is old you will need to manually
upgrade to a newer Twisted dependency via:
pip install Twisted>=17.1.0
Running Synapse Running Synapse
=============== ===============

View File

@ -36,15 +36,13 @@ class HttpClient(object):
the request body. This will be encoded as JSON. the request body. This will be encoded as JSON.
Returns: Returns:
Deferred: Succeeds when we get *any* HTTP response. Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
The result of the deferred is a tuple of `(code, response)`,
where `response` is a dict representing the decoded JSON body.
""" """
pass pass
def get_json(self, url, args=None): def get_json(self, url, args=None):
""" Get's some json from the given host homeserver and path """ Gets some json from the given host homeserver and path
Args: Args:
url (str): The URL to GET data from. url (str): The URL to GET data from.
@ -54,10 +52,8 @@ class HttpClient(object):
and *not* a string. and *not* a string.
Returns: Returns:
Deferred: Succeeds when we get *any* HTTP response. Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
The result of the deferred is a tuple of `(code, response)`,
where `response` is a dict representing the decoded JSON body.
""" """
pass pass
@ -214,4 +210,4 @@ class _JsonProducer(object):
pass pass
def stopProducing(self): def stopProducing(self):
pass pass

View File

@ -0,0 +1,73 @@
Query Account
=============
This API returns information about a specific user account.
The api is::
GET /_matrix/client/r0/admin/whois/<user_id>
including an ``access_token`` of a server admin.
It returns a JSON body like the following:
.. code:: json
{
"user_id": "<user_id>",
"devices": {
"": {
"sessions": [
{
"connections": [
{
"ip": "1.2.3.4",
"last_seen": 1417222374433,
"user_agent": "Mozilla/5.0 ..."
},
{
"ip": "1.2.3.10",
"last_seen": 1417222374500,
"user_agent": "Dalvik/2.1.0 ..."
}
]
}
]
}
}
}
``last_seen`` is measured in milliseconds since the Unix epoch.
Deactivate Account
==================
This API deactivates an account. It removes active access tokens, resets the
password, and deletes third-party IDs (to prevent the user requesting a
password reset).
The api is::
POST /_matrix/client/r0/admin/deactivate/<user_id>
including an ``access_token`` of a server admin, and an empty request body.
Reset password
==============
Changes the password of another user.
The api is::
POST /_matrix/client/r0/admin/reset_password/<user_id>
with a body of:
.. code:: json
{
"new_password": "<secret>"
}
including an ``access_token`` of a server admin.

View File

@ -21,13 +21,12 @@ How to monitor Synapse metrics using Prometheus
3. Add a prometheus target for synapse. 3. Add a prometheus target for synapse.
It needs to set the ``metrics_path`` to a non-default value:: It needs to set the ``metrics_path`` to a non-default value (under ``scrape_configs``)::
- job_name: "synapse" - job_name: "synapse"
metrics_path: "/_synapse/metrics" metrics_path: "/_synapse/metrics"
static_configs: static_configs:
- targets: - targets: ["my.server.here:9092"]
"my.server.here:9092"
If your prometheus is older than 1.5.2, you will need to replace If your prometheus is older than 1.5.2, you will need to replace
``static_configs`` in the above with ``target_groups``. ``static_configs`` in the above with ``target_groups``.

View File

@ -474,8 +474,13 @@ class FederationClient(FederationBase):
content (object): Any additional data to put into the content field content (object): Any additional data to put into the content field
of the event. of the event.
Return: Return:
A tuple of (origin (str), event (object)) where origin is the remote Deferred: resolves to a tuple of (origin (str), event (object))
homeserver which generated the event. where origin is the remote homeserver which generated the event.
Fails with a ``CodeMessageException`` if the chosen remote server
returns a 300/400 code.
Fails with a ``RuntimeError`` if no servers were reachable.
""" """
valid_memberships = {Membership.JOIN, Membership.LEAVE} valid_memberships = {Membership.JOIN, Membership.LEAVE}
if membership not in valid_memberships: if membership not in valid_memberships:
@ -528,6 +533,27 @@ class FederationClient(FederationBase):
@defer.inlineCallbacks @defer.inlineCallbacks
def send_join(self, destinations, pdu): def send_join(self, destinations, pdu):
"""Sends a join event to one of a list of homeservers.
Doing so will cause the remote server to add the event to the graph,
and send the event out to the rest of the federation.
Args:
destinations (str): Candidate homeservers which are probably
participating in the room.
pdu (BaseEvent): event to be sent
Return:
Deferred: resolves to a dict with members ``origin`` (a string
giving the serer the event was sent to, ``state`` (?) and
``auth_chain``.
Fails with a ``CodeMessageException`` if the chosen remote server
returns a 300/400 code.
Fails with a ``RuntimeError`` if no servers were reachable.
"""
for destination in destinations: for destination in destinations:
if destination == self.server_name: if destination == self.server_name:
continue continue
@ -635,6 +661,26 @@ class FederationClient(FederationBase):
@defer.inlineCallbacks @defer.inlineCallbacks
def send_leave(self, destinations, pdu): def send_leave(self, destinations, pdu):
"""Sends a leave event to one of a list of homeservers.
Doing so will cause the remote server to add the event to the graph,
and send the event out to the rest of the federation.
This is mostly useful to reject received invites.
Args:
destinations (str): Candidate homeservers which are probably
participating in the room.
pdu (BaseEvent): event to be sent
Return:
Deferred: resolves to None.
Fails with a ``CodeMessageException`` if the chosen remote server
returns a non-200 code.
Fails with a ``RuntimeError`` if no servers were reachable.
"""
for destination in destinations: for destination in destinations:
if destination == self.server_name: if destination == self.server_name:
continue continue

View File

@ -193,6 +193,26 @@ class TransportLayerClient(object):
@defer.inlineCallbacks @defer.inlineCallbacks
@log_function @log_function
def make_membership_event(self, destination, room_id, user_id, membership): def make_membership_event(self, destination, room_id, user_id, membership):
"""Asks a remote server to build and sign us a membership event
Note that this does not append any events to any graphs.
Args:
destination (str): address of remote homeserver
room_id (str): room to join/leave
user_id (str): user to be joined/left
membership (str): one of join/leave
Returns:
Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body (ie, the new event).
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server.
"""
valid_memberships = {Membership.JOIN, Membership.LEAVE} valid_memberships = {Membership.JOIN, Membership.LEAVE}
if membership not in valid_memberships: if membership not in valid_memberships:
raise RuntimeError( raise RuntimeError(
@ -201,11 +221,23 @@ class TransportLayerClient(object):
) )
path = PREFIX + "/make_%s/%s/%s" % (membership, room_id, user_id) path = PREFIX + "/make_%s/%s/%s" % (membership, room_id, user_id)
ignore_backoff = False
retry_on_dns_fail = False
if membership == Membership.LEAVE:
# we particularly want to do our best to send leave events. The
# problem is that if it fails, we won't retry it later, so if the
# remote server was just having a momentary blip, the room will be
# out of sync.
ignore_backoff = True
retry_on_dns_fail = True
content = yield self.client.get_json( content = yield self.client.get_json(
destination=destination, destination=destination,
path=path, path=path,
retry_on_dns_fail=False, retry_on_dns_fail=retry_on_dns_fail,
timeout=20000, timeout=20000,
ignore_backoff=ignore_backoff,
) )
defer.returnValue(content) defer.returnValue(content)
@ -232,6 +264,12 @@ class TransportLayerClient(object):
destination=destination, destination=destination,
path=path, path=path,
data=content, data=content,
# we want to do our best to send this through. The problem is
# that if it fails, we won't retry it later, so if the remote
# server was just having a momentary blip, the room will be out of
# sync.
ignore_backoff=True,
) )
defer.returnValue(response) defer.returnValue(response)

View File

@ -1090,19 +1090,13 @@ class FederationHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def do_remotely_reject_invite(self, target_hosts, room_id, user_id): def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
try: origin, event = yield self._make_and_verify_event(
origin, event = yield self._make_and_verify_event( target_hosts,
target_hosts, room_id,
room_id, user_id,
user_id, "leave"
"leave" )
) event = self._sign_event(event)
event = self._sign_event(event)
except SynapseError:
raise
except CodeMessageException as e:
logger.warn("Failed to reject invite: %s", e)
raise SynapseError(500, "Failed to reject invite")
# Try the host that we succesfully called /make_leave/ on first for # Try the host that we succesfully called /make_leave/ on first for
# the /send_leave/ request. # the /send_leave/ request.
@ -1112,16 +1106,10 @@ class FederationHandler(BaseHandler):
except ValueError: except ValueError:
pass pass
try: yield self.replication_layer.send_leave(
yield self.replication_layer.send_leave( target_hosts,
target_hosts, event
event )
)
except SynapseError:
raise
except CodeMessageException as e:
logger.warn("Failed to reject invite: %s", e)
raise SynapseError(500, "Failed to reject invite")
context = yield self.state_handler.compute_event_context(event) context = yield self.state_handler.compute_event_context(event)

View File

@ -139,13 +139,6 @@ class RoomMemberHandler(BaseHandler):
) )
yield user_joined_room(self.distributor, user, room_id) yield user_joined_room(self.distributor, user, room_id)
def reject_remote_invite(self, user_id, room_id, remote_room_hosts):
return self.hs.get_handlers().federation_handler.do_remotely_reject_invite(
remote_room_hosts,
room_id,
user_id
)
@defer.inlineCallbacks @defer.inlineCallbacks
def update_membership( def update_membership(
self, self,
@ -286,13 +279,21 @@ class RoomMemberHandler(BaseHandler):
else: else:
# send the rejection to the inviter's HS. # send the rejection to the inviter's HS.
remote_room_hosts = remote_room_hosts + [inviter.domain] remote_room_hosts = remote_room_hosts + [inviter.domain]
fed_handler = self.hs.get_handlers().federation_handler
try: try:
ret = yield self.reject_remote_invite( ret = yield fed_handler.do_remotely_reject_invite(
target.to_string(), room_id, remote_room_hosts remote_room_hosts,
room_id,
target.to_string(),
) )
defer.returnValue(ret) defer.returnValue(ret)
except SynapseError as e: except Exception as e:
# if we were unable to reject the exception, just mark
# it as rejected on our end and plough ahead.
#
# The 'except' clause is very broad, but we need to
# capture everything from DNS failures upwards
#
logger.warn("Failed to reject invite: %s", e) logger.warn("Failed to reject invite: %s", e)
yield self.store.locally_reject_invite( yield self.store.locally_reject_invite(

View File

@ -125,6 +125,8 @@ class MatrixFederationHttpClient(object):
code >= 300. code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server. to retry this server.
(May also fail with plenty of other Exceptions for things like DNS
failures, connection failures, SSL failures.)
""" """
limiter = yield synapse.util.retryutils.get_retry_limiter( limiter = yield synapse.util.retryutils.get_retry_limiter(
destination, destination,
@ -302,8 +304,10 @@ class MatrixFederationHttpClient(object):
Returns: Returns:
Deferred: Succeeds when we get a 2xx HTTP response. The result Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body. On a 4xx or 5xx error response a will be the decoded JSON body.
CodeMessageException is raised.
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server. to retry this server.
@ -360,8 +364,10 @@ class MatrixFederationHttpClient(object):
try the request anyway. try the request anyway.
Returns: Returns:
Deferred: Succeeds when we get a 2xx HTTP response. The result Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body. On a 4xx or 5xx error response a will be the decoded JSON body.
CodeMessageException is raised.
Fails with ``HTTPRequestException`` if we get an HTTP response
code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server. to retry this server.
@ -410,10 +416,11 @@ class MatrixFederationHttpClient(object):
ignore_backoff (bool): true to ignore the historical backoff data ignore_backoff (bool): true to ignore the historical backoff data
and try the request anyway. and try the request anyway.
Returns: Returns:
Deferred: Succeeds when we get *any* HTTP response. Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
The result of the deferred is a tuple of `(code, response)`, Fails with ``HTTPRequestException`` if we get an HTTP response
where `response` is a dict representing the decoded JSON body. code >= 300.
Fails with ``NotRetryingDestination`` if we are not yet ready Fails with ``NotRetryingDestination`` if we are not yet ready
to retry this server. to retry this server.

View File

@ -87,7 +87,11 @@ class BulkPushRuleEvaluator:
condition_cache = {} condition_cache = {}
for uid, rules in self.rules_by_user.items(): for uid, rules in self.rules_by_user.items():
display_name = room_members.get(uid, {}).get("display_name", None) display_name = None
profile_info = room_members.get(uid)
if profile_info:
display_name = profile_info.display_name
if not display_name: if not display_name:
# Handle the case where we are pushing a membership event to # Handle the case where we are pushing a membership event to
# that user, as they might not be already joined. # that user, as they might not be already joined.

View File

@ -102,9 +102,6 @@ class SlavedEventStore(BaseSlavedStore):
_get_state_groups_from_groups_txn = ( _get_state_groups_from_groups_txn = (
DataStore._get_state_groups_from_groups_txn.__func__ DataStore._get_state_groups_from_groups_txn.__func__
) )
_get_state_group_from_group = (
StateStore.__dict__["_get_state_group_from_group"]
)
get_recent_event_ids_for_room = ( get_recent_event_ids_for_room = (
StreamStore.__dict__["get_recent_event_ids_for_room"] StreamStore.__dict__["get_recent_event_ids_for_room"]
) )

View File

@ -406,7 +406,13 @@ class JoinedRoomMemberListRestServlet(ClientV1RestServlet):
users_with_profile = yield self.state.get_current_user_in_room(room_id) users_with_profile = yield self.state.get_current_user_in_room(room_id)
defer.returnValue((200, { defer.returnValue((200, {
"joined": users_with_profile "joined": {
user_id: {
"avatar_url": profile.avatar_url,
"display_name": profile.display_name,
}
for user_id, profile in users_with_profile.iteritems()
}
})) }))

View File

@ -84,12 +84,11 @@ class LocalKey(Resource):
} }
old_verify_keys = {} old_verify_keys = {}
for key in self.config.old_signing_keys: for key_id, key in self.config.old_signing_keys.items():
key_id = "%s:%s" % (key.alg, key.version)
verify_key_bytes = key.encode() verify_key_bytes = key.encode()
old_verify_keys[key_id] = { old_verify_keys[key_id] = {
u"key": encode_base64(verify_key_bytes), u"key": encode_base64(verify_key_bytes),
u"expired_ts": key.expired, u"expired_ts": key.expired_ts,
} }
tls_fingerprints = self.config.tls_fingerprints tls_fingerprints = self.config.tls_fingerprints

View File

@ -47,10 +47,13 @@ class ReceiptsStore(SQLBaseStore):
# Returns an ObservableDeferred # Returns an ObservableDeferred
res = self.get_users_with_read_receipts_in_room.cache.get((room_id,), None) res = self.get_users_with_read_receipts_in_room.cache.get((room_id,), None)
if res and res.called and user_id in res.result: if res:
# We'd only be adding to the set, so no point invalidating if the if isinstance(res, defer.Deferred) and res.called:
# user is already there res = res.result
return if user_id in res:
# We'd only be adding to the set, so no point invalidating if the
# user is already there
return
self.get_users_with_read_receipts_in_room.invalidate((room_id,)) self.get_users_with_read_receipts_in_room.invalidate((room_id,))

View File

@ -19,6 +19,7 @@ from collections import namedtuple
from ._base import SQLBaseStore from ._base import SQLBaseStore
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
from synapse.util.stringutils import to_ascii
from synapse.api.constants import Membership, EventTypes from synapse.api.constants import Membership, EventTypes
from synapse.types import get_domain_from_id from synapse.types import get_domain_from_id
@ -35,6 +36,13 @@ RoomsForUser = namedtuple(
) )
# We store this using a namedtuple so that we save about 3x space over using a
# dict.
ProfileInfo = namedtuple(
"ProfileInfo", ("avatar_url", "display_name")
)
_MEMBERSHIP_PROFILE_UPDATE_NAME = "room_membership_profile_update" _MEMBERSHIP_PROFILE_UPDATE_NAME = "room_membership_profile_update"
@ -422,20 +430,20 @@ class RoomMemberStore(SQLBaseStore):
) )
users_in_room = { users_in_room = {
row["user_id"]: { to_ascii(row["user_id"]): ProfileInfo(
"display_name": row["display_name"], avatar_url=to_ascii(row["avatar_url"]),
"avatar_url": row["avatar_url"], display_name=to_ascii(row["display_name"]),
} )
for row in rows for row in rows
} }
if event is not None and event.type == EventTypes.Member: if event is not None and event.type == EventTypes.Member:
if event.membership == Membership.JOIN: if event.membership == Membership.JOIN:
if event.event_id in member_event_ids: if event.event_id in member_event_ids:
users_in_room[event.state_key] = { users_in_room[to_ascii(event.state_key)] = ProfileInfo(
"display_name": event.content.get("displayname", None), display_name=to_ascii(event.content.get("displayname", None)),
"avatar_url": event.content.get("avatar_url", None), avatar_url=to_ascii(event.content.get("avatar_url", None)),
} )
defer.returnValue(users_in_room) defer.returnValue(users_in_room)

View File

@ -279,12 +279,7 @@ class StateStore(SQLBaseStore):
return count return count
@cached(num_args=2, max_entries=100000, iterable=True) @defer.inlineCallbacks
def _get_state_group_from_group(self, group, types):
raise NotImplementedError()
@cachedList(cached_method_name="_get_state_group_from_group",
list_name="groups", num_args=2, inlineCallbacks=True)
def _get_state_groups_from_groups(self, groups, types): def _get_state_groups_from_groups(self, groups, types):
"""Returns dictionary state_group -> (dict of (type, state_key) -> event id) """Returns dictionary state_group -> (dict of (type, state_key) -> event id)
""" """
@ -512,7 +507,7 @@ class StateStore(SQLBaseStore):
state_map = yield self.get_state_ids_for_events([event_id], types) state_map = yield self.get_state_ids_for_events([event_id], types)
defer.returnValue(state_map[event_id]) defer.returnValue(state_map[event_id])
@cached(num_args=2, max_entries=100000) @cached(num_args=2, max_entries=50000)
def _get_state_group_for_event(self, room_id, event_id): def _get_state_group_for_event(self, room_id, event_id):
return self._simple_select_one_onecol( return self._simple_select_one_onecol(
table="event_to_state_groups", table="event_to_state_groups",

View File

@ -14,13 +14,10 @@
# limitations under the License. # limitations under the License.
import synapse.metrics import synapse.metrics
from lrucache import LruCache
import os import os
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1)) CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
DEBUG_CACHES = False
metrics = synapse.metrics.get_metrics_for("synapse.util.caches") metrics = synapse.metrics.get_metrics_for("synapse.util.caches")
caches_by_name = {} caches_by_name = {}
@ -40,10 +37,6 @@ def register_cache(name, cache):
) )
_string_cache = LruCache(int(100000 * CACHE_SIZE_FACTOR))
_stirng_cache_metrics = register_cache("string_cache", _string_cache)
KNOWN_KEYS = { KNOWN_KEYS = {
key: key for key in key: key for key in
( (
@ -67,14 +60,16 @@ KNOWN_KEYS = {
def intern_string(string): def intern_string(string):
"""Takes a (potentially) unicode string and interns using custom cache """Takes a (potentially) unicode string and interns it if it's ascii
""" """
new_str = _string_cache.setdefault(string, string) if string is None:
if new_str is string: return None
_stirng_cache_metrics.inc_hits()
else: try:
_stirng_cache_metrics.inc_misses() string = string.encode("ascii")
return new_str return intern(string)
except UnicodeEncodeError:
return string
def intern_dict(dictionary): def intern_dict(dictionary):
@ -87,13 +82,9 @@ def intern_dict(dictionary):
def _intern_known_values(key, value): def _intern_known_values(key, value):
intern_str_keys = ("event_id", "room_id") intern_keys = ("event_id", "room_id", "sender", "user_id", "type", "state_key",)
intern_unicode_keys = ("sender", "user_id", "type", "state_key")
if key in intern_str_keys: if key in intern_keys:
return intern(value.encode('ascii'))
if key in intern_unicode_keys:
return intern_string(value) return intern_string(value)
return value return value

View File

@ -19,7 +19,7 @@ from synapse.util import unwrapFirstError, logcontext
from synapse.util.caches.lrucache import LruCache from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
from . import DEBUG_CACHES, register_cache from . import register_cache
from twisted.internet import defer from twisted.internet import defer
from collections import namedtuple from collections import namedtuple
@ -76,7 +76,7 @@ class Cache(object):
self.cache = LruCache( self.cache = LruCache(
max_size=max_entries, keylen=keylen, cache_type=cache_type, max_size=max_entries, keylen=keylen, cache_type=cache_type,
size_callback=(lambda d: len(d.result)) if iterable else None, size_callback=(lambda d: len(d)) if iterable else None,
) )
self.name = name self.name = name
@ -96,6 +96,17 @@ class Cache(object):
) )
def get(self, key, default=_CacheSentinel, callback=None): def get(self, key, default=_CacheSentinel, callback=None):
"""Looks the key up in the caches.
Args:
key(tuple)
default: What is returned if key is not in the caches. If not
specified then function throws KeyError instead
callback(fn): Gets called when the entry in the cache is invalidated
Returns:
Either a Deferred or the raw result
"""
callbacks = [callback] if callback else [] callbacks = [callback] if callback else []
val = self._pending_deferred_cache.get(key, _CacheSentinel) val = self._pending_deferred_cache.get(key, _CacheSentinel)
if val is not _CacheSentinel: if val is not _CacheSentinel:
@ -137,7 +148,7 @@ class Cache(object):
if self.sequence == entry.sequence: if self.sequence == entry.sequence:
existing_entry = self._pending_deferred_cache.pop(key, None) existing_entry = self._pending_deferred_cache.pop(key, None)
if existing_entry is entry: if existing_entry is entry:
self.cache.set(key, entry.deferred, entry.callbacks) self.cache.set(key, result, entry.callbacks)
else: else:
entry.invalidate() entry.invalidate()
else: else:
@ -335,20 +346,10 @@ class CacheDescriptor(_CacheDescriptorBase):
try: try:
cached_result_d = cache.get(cache_key, callback=invalidate_callback) cached_result_d = cache.get(cache_key, callback=invalidate_callback)
observer = cached_result_d.observe() if isinstance(cached_result_d, ObservableDeferred):
if DEBUG_CACHES: observer = cached_result_d.observe()
@defer.inlineCallbacks else:
def check_result(cached_result): observer = cached_result_d
actual_result = yield self.function_to_call(obj, *args, **kwargs)
if actual_result != cached_result:
logger.error(
"Stale cache entry %s%r: cached: %r, actual %r",
self.orig.__name__, cache_key,
cached_result, actual_result,
)
raise ValueError("Stale cache entry")
defer.returnValue(cached_result)
observer.addCallback(check_result)
except KeyError: except KeyError:
ret = defer.maybeDeferred( ret = defer.maybeDeferred(
@ -447,7 +448,9 @@ class CacheListDescriptor(_CacheDescriptorBase):
try: try:
res = cache.get(tuple(key), callback=invalidate_callback) res = cache.get(tuple(key), callback=invalidate_callback)
if not res.has_succeeded(): if not isinstance(res, ObservableDeferred):
results[arg] = res
elif not res.has_succeeded():
res = res.observe() res = res.observe()
res.addCallback(lambda r, arg: (arg, r), arg) res.addCallback(lambda r, arg: (arg, r), arg)
cached_defers[arg] = res cached_defers[arg] = res

View File

@ -40,3 +40,17 @@ def is_ascii(s):
return False return False
else: else:
return True return True
def to_ascii(s):
"""Converts a string to ascii if it is ascii, otherwise leave it alone.
If given None then will return None.
"""
if s is None:
return None
try:
return s.encode("ascii")
except UnicodeEncodeError:
return s