mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-08-07 02:14:11 -04:00
Add an API for listing threads in a room. (#13394)
Implement the /threads endpoint from MSC3856. This is currently unstable and behind an experimental configuration flag. It includes a background update to backfill data, results from the /threads endpoint will be partial until that finishes.
This commit is contained in:
parent
b6baa46db0
commit
3bbe532abb
10 changed files with 522 additions and 6 deletions
|
@ -11,6 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import enum
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, FrozenSet, Iterable, List, Optional, Tuple
|
||||
|
||||
|
@ -20,7 +21,7 @@ from synapse.api.constants import RelationTypes
|
|||
from synapse.api.errors import SynapseError
|
||||
from synapse.events import EventBase, relation_from_event
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.storage.databases.main.relations import _RelatedEvent
|
||||
from synapse.storage.databases.main.relations import ThreadsNextBatch, _RelatedEvent
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import JsonDict, Requester, StreamToken, UserID
|
||||
from synapse.visibility import filter_events_for_client
|
||||
|
@ -32,6 +33,13 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ThreadsListInclude(str, enum.Enum):
|
||||
"""Valid values for the 'include' flag of /threads."""
|
||||
|
||||
all = "all"
|
||||
participated = "participated"
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class _ThreadAggregation:
|
||||
# The latest event in the thread.
|
||||
|
@ -482,3 +490,79 @@ class RelationsHandler:
|
|||
results.setdefault(event_id, BundledAggregations()).replace = edit
|
||||
|
||||
return results
|
||||
|
||||
async def get_threads(
|
||||
self,
|
||||
requester: Requester,
|
||||
room_id: str,
|
||||
include: ThreadsListInclude,
|
||||
limit: int = 5,
|
||||
from_token: Optional[ThreadsNextBatch] = None,
|
||||
) -> JsonDict:
|
||||
"""Get related events of a event, ordered by topological ordering.
|
||||
|
||||
Args:
|
||||
requester: The user requesting the relations.
|
||||
room_id: The room the event belongs to.
|
||||
include: One of "all" or "participated" to indicate which threads should
|
||||
be returned.
|
||||
limit: Only fetch the most recent `limit` events.
|
||||
from_token: Fetch rows from the given token, or from the start if None.
|
||||
|
||||
Returns:
|
||||
The pagination chunk.
|
||||
"""
|
||||
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
# TODO Properly handle a user leaving a room.
|
||||
(_, member_event_id) = await self._auth.check_user_in_room_or_world_readable(
|
||||
room_id, requester, allow_departed_users=True
|
||||
)
|
||||
|
||||
# Note that ignored users are not passed into get_relations_for_event
|
||||
# below. Ignored users are handled in filter_events_for_client (and by
|
||||
# not passing them in here we should get a better cache hit rate).
|
||||
thread_roots, next_batch = await self._main_store.get_threads(
|
||||
room_id=room_id, limit=limit, from_token=from_token
|
||||
)
|
||||
|
||||
events = await self._main_store.get_events_as_list(thread_roots)
|
||||
|
||||
if include == ThreadsListInclude.participated:
|
||||
# Pre-seed thread participation with whether the requester sent the event.
|
||||
participated = {event.event_id: event.sender == user_id for event in events}
|
||||
# For events the requester did not send, check the database for whether
|
||||
# the requester sent a threaded reply.
|
||||
participated.update(
|
||||
await self._main_store.get_threads_participated(
|
||||
[eid for eid, p in participated.items() if not p],
|
||||
user_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Limit the returned threads to those the user has participated in.
|
||||
events = [event for event in events if participated[event.event_id]]
|
||||
|
||||
events = await filter_events_for_client(
|
||||
self._storage_controllers,
|
||||
user_id,
|
||||
events,
|
||||
is_peeking=(member_event_id is None),
|
||||
)
|
||||
|
||||
aggregations = await self.get_bundled_aggregations(
|
||||
events, requester.user.to_string()
|
||||
)
|
||||
|
||||
now = self._clock.time_msec()
|
||||
serialized_events = self._event_serializer.serialize_events(
|
||||
events, now, bundle_aggregations=aggregations
|
||||
)
|
||||
|
||||
return_value: JsonDict = {"chunk": serialized_events}
|
||||
|
||||
if next_batch:
|
||||
return_value["next_batch"] = str(next_batch)
|
||||
|
||||
return return_value
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue