Merge remote-tracking branch 'upstream/release-v1.29.0'

This commit is contained in:
Tulir Asokan 2021-03-04 12:49:37 +02:00
commit adb990d8ba
126 changed files with 2399 additions and 765 deletions

View file

@ -15,11 +15,12 @@
# limitations under the License.
import logging
import urllib.parse
from typing import TYPE_CHECKING, Any, Dict, Iterable, Union
from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union
from prometheus_client import Counter
from twisted.internet.error import AlreadyCalled, AlreadyCancelled
from twisted.internet.interfaces import IDelayedCall
from synapse.api.constants import EventTypes
from synapse.events import EventBase
@ -71,9 +72,10 @@ class HttpPusher(Pusher):
self.data = pusher_config.data
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
self.failing_since = pusher_config.failing_since
self.timed_call = None
self.timed_call = None # type: Optional[IDelayedCall]
self._is_processing = False
self._group_unread_count_by_room = hs.config.push_group_unread_count_by_room
self._pusherpool = hs.get_pusherpool()
self.data = pusher_config.data
if self.data is None:
@ -292,7 +294,7 @@ class HttpPusher(Pusher):
)
else:
logger.info("Pushkey %s was rejected: removing", pk)
await self.hs.remove_pusher(self.app_id, pk, self.user_id)
await self._pusherpool.remove_pusher(self.app_id, pk, self.user_id)
return True
async def _build_notification_dict(

View file

@ -19,12 +19,14 @@ from typing import TYPE_CHECKING, Dict, Iterable, Optional
from prometheus_client import Gauge
from synapse.api.errors import Codes, SynapseError
from synapse.metrics.background_process_metrics import (
run_as_background_process,
wrap_as_background_process,
)
from synapse.push import Pusher, PusherConfig, PusherConfigException
from synapse.push.pusher import PusherFactory
from synapse.replication.http.push import ReplicationRemovePusherRestServlet
from synapse.types import JsonDict, RoomStreamToken
from synapse.util.async_helpers import concurrently_execute
@ -58,7 +60,6 @@ class PusherPool:
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.pusher_factory = PusherFactory(hs)
self._should_start_pushers = hs.config.start_pushers
self.store = self.hs.get_datastore()
self.clock = self.hs.get_clock()
@ -67,6 +68,16 @@ class PusherPool:
# We shard the handling of push notifications by user ID.
self._pusher_shard_config = hs.config.push.pusher_shard_config
self._instance_name = hs.get_instance_name()
self._should_start_pushers = (
self._instance_name in self._pusher_shard_config.instances
)
# We can only delete pushers on master.
self._remove_pusher_client = None
if hs.config.worker.worker_app:
self._remove_pusher_client = ReplicationRemovePusherRestServlet.make_client(
hs
)
# Record the last stream ID that we were poked about so we can get
# changes since then. We set this to the current max stream ID on
@ -103,6 +114,11 @@ class PusherPool:
The newly created pusher.
"""
if kind == "email":
email_owner = await self.store.get_user_id_by_threepid("email", pushkey)
if email_owner != user_id:
raise SynapseError(400, "Email not found", Codes.THREEPID_NOT_FOUND)
time_now_msec = self.clock.time_msec()
# create the pusher setting last_stream_ordering to the current maximum
@ -175,9 +191,6 @@ class PusherPool:
user_id: user to remove pushers for
access_tokens: access token *ids* to remove pushers for
"""
if not self._pusher_shard_config.should_handle(self._instance_name, user_id):
return
tokens = set(access_tokens)
for p in await self.store.get_pushers_by_user_id(user_id):
if p.access_token in tokens:
@ -380,6 +393,12 @@ class PusherPool:
synapse_pushers.labels(type(pusher).__name__, pusher.app_id).dec()
await self.store.delete_pusher_by_app_id_pushkey_user_id(
app_id, pushkey, user_id
)
# We can only delete pushers on master.
if self._remove_pusher_client:
await self._remove_pusher_client(
app_id=app_id, pushkey=pushkey, user_id=user_id
)
else:
await self.store.delete_pusher_by_app_id_pushkey_user_id(
app_id, pushkey, user_id
)