Invalidate retry cache in both directions

This commit is contained in:
Erik Johnston 2016-11-22 17:45:44 +00:00
parent 51e89709aa
commit 90565d015e
6 changed files with 132 additions and 27 deletions

View File

@ -0,0 +1,60 @@
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.http.server import respond_with_json_bytes, request_handler
from synapse.http.servlet import parse_json_object_from_request
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
class ExpireCacheResource(Resource):
"""
HTTP endpoint for expiring storage caches.
POST /_synapse/replication/expire_cache HTTP/1.1
Content-Type: application/json
{
"invalidate": [
{
"name": "func_name",
"keys": ["key1", "key2"]
}
]
}
"""
def __init__(self, hs):
Resource.__init__(self) # Resource is old-style, so no super()
self.store = hs.get_datastore()
self.version_string = hs.version_string
self.clock = hs.get_clock()
def render_POST(self, request):
self._async_render_POST(request)
return NOT_DONE_YET
@request_handler()
def _async_render_POST(self, request):
content = parse_json_object_from_request(request)
for row in content["invalidate"]:
name = row["name"]
keys = tuple(row["keys"])
getattr(self.store, name).invalidate(keys)
respond_with_json_bytes(request, 200, "{}")

View File

@ -17,6 +17,7 @@ from synapse.http.servlet import parse_integer, parse_string
from synapse.http.server import request_handler, finish_request from synapse.http.server import request_handler, finish_request
from synapse.replication.pusher_resource import PusherResource from synapse.replication.pusher_resource import PusherResource
from synapse.replication.presence_resource import PresenceResource from synapse.replication.presence_resource import PresenceResource
from synapse.replication.expire_cache import ExpireCacheResource
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from twisted.web.resource import Resource from twisted.web.resource import Resource
@ -124,6 +125,7 @@ class ReplicationResource(Resource):
self.putChild("remove_pushers", PusherResource(hs)) self.putChild("remove_pushers", PusherResource(hs))
self.putChild("syncing_users", PresenceResource(hs)) self.putChild("syncing_users", PresenceResource(hs))
self.putChild("expire_cache", ExpireCacheResource(hs))
def render_GET(self, request): def render_GET(self, request):
self._async_render_GET(request) self._async_render_GET(request)

View File

@ -34,6 +34,9 @@ class BaseSlavedStore(SQLBaseStore):
else: else:
self._cache_id_gen = None self._cache_id_gen = None
self.expire_cache_url = hs.config.worker_replication_url + "/expire_cache"
self.http_client = hs.get_simple_http_client()
def stream_positions(self): def stream_positions(self):
pos = {} pos = {}
if self._cache_id_gen: if self._cache_id_gen:
@ -54,3 +57,19 @@ class BaseSlavedStore(SQLBaseStore):
logger.info("Got unexpected cache_func: %r", cache_func) logger.info("Got unexpected cache_func: %r", cache_func)
self._cache_id_gen.advance(int(stream["position"])) self._cache_id_gen.advance(int(stream["position"]))
return defer.succeed(None) return defer.succeed(None)
def _invalidate_cache_and_stream(self, txn, cache_func, keys):
txn.call_after(cache_func.invalidate, keys)
txn.call_after(self._send_invalidation_poke, cache_func, keys)
@defer.inlineCallbacks
def _send_invalidation_poke(self, cache_func, keys):
try:
yield self.http_client.post_json_get_json(self.expire_cache_url, {
"invalidate": [{
"name": cache_func.__name__,
"keys": list(keys),
}]
})
except:
logger.exception("Failed to poke on expire_cache")

View File

@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from twisted.internet import defer
from ._base import BaseSlavedStore from ._base import BaseSlavedStore
from synapse.storage import DataStore from synapse.storage import DataStore
from synapse.storage.transactions import TransactionStore from synapse.storage.transactions import TransactionStore
@ -22,12 +21,10 @@ from synapse.storage.transactions import TransactionStore
class TransactionStore(BaseSlavedStore): class TransactionStore(BaseSlavedStore):
get_destination_retry_timings = TransactionStore.__dict__[ get_destination_retry_timings = TransactionStore.__dict__[
"get_destination_retry_timings" "get_destination_retry_timings"
].orig ]
_get_destination_retry_timings = DataStore._get_destination_retry_timings.__func__ _get_destination_retry_timings = DataStore._get_destination_retry_timings.__func__
set_destination_retry_timings = DataStore.set_destination_retry_timings.__func__
_set_destination_retry_timings = DataStore._set_destination_retry_timings.__func__
prep_send_transaction = DataStore.prep_send_transaction.__func__ prep_send_transaction = DataStore.prep_send_transaction.__func__
delivered_txn = DataStore.delivered_txn.__func__ delivered_txn = DataStore.delivered_txn.__func__
# For now, don't record the destination rety timings
def set_destination_retry_timings(*args, **kwargs):
return defer.succeed(None)

View File

@ -14,6 +14,7 @@
# limitations under the License. # limitations under the License.
from ._base import SQLBaseStore from ._base import SQLBaseStore
from synapse.storage.engines import PostgresEngine
from synapse.util.caches.descriptors import cached from synapse.util.caches.descriptors import cached
from twisted.internet import defer from twisted.internet import defer
@ -200,23 +201,46 @@ class TransactionStore(SQLBaseStore):
def _set_destination_retry_timings(self, txn, destination, def _set_destination_retry_timings(self, txn, destination,
retry_last_ts, retry_interval): retry_last_ts, retry_interval):
txn.call_after(self.get_destination_retry_timings.invalidate, (destination,)) self.database_engine.lock_table(txn, "destinations")
self._simple_upsert_txn( self._invalidate_cache_and_stream(
txn, self.get_destination_retry_timings, (destination,)
)
# We need to be careful here as the data may have changed from under us
# due to a worker setting the timings.
prev_row = self._simple_select_one_txn(
txn,
table="destinations",
keyvalues={
"destination": destination,
},
retcols=("retry_last_ts", "retry_interval"),
allow_none=True,
)
if not prev_row:
self._simple_insert_txn(
txn,
table="destinations",
values={
"destination": destination,
"retry_last_ts": retry_last_ts,
"retry_interval": retry_interval,
}
)
elif retry_interval == 0 or prev_row["retry_interval"] < retry_interval:
self._simple_update_one_txn(
txn, txn,
"destinations", "destinations",
keyvalues={ keyvalues={
"destination": destination, "destination": destination,
}, },
values={ updatevalues={
"retry_last_ts": retry_last_ts, "retry_last_ts": retry_last_ts,
"retry_interval": retry_interval, "retry_interval": retry_interval,
}, },
insertion_values={
"destination": destination,
"retry_last_ts": retry_last_ts,
"retry_interval": retry_interval,
}
) )
def get_destinations_needing_retry(self): def get_destinations_needing_retry(self):

View File

@ -121,12 +121,6 @@ class RetryDestinationLimiter(object):
pass pass
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
def err(failure):
logger.exception(
"Failed to store set_destination_retry_timings",
failure.value
)
valid_err_code = False valid_err_code = False
if exc_type is not None and issubclass(exc_type, CodeMessageException): if exc_type is not None and issubclass(exc_type, CodeMessageException):
valid_err_code = 0 <= exc_val.code < 500 valid_err_code = 0 <= exc_val.code < 500
@ -151,6 +145,15 @@ class RetryDestinationLimiter(object):
retry_last_ts = int(self.clock.time_msec()) retry_last_ts = int(self.clock.time_msec())
self.store.set_destination_retry_timings( @defer.inlineCallbacks
def store_retry_timings():
try:
yield self.store.set_destination_retry_timings(
self.destination, retry_last_ts, self.retry_interval self.destination, retry_last_ts, self.retry_interval
).addErrback(err) )
except:
logger.exception(
"Failed to store set_destination_retry_timings",
)
store_retry_timings()