mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2025-08-21 17:48:07 -04:00
Merge remote-tracking branch 'upstream/release-v1.39'
This commit is contained in:
commit
9754df5623
260 changed files with 3981 additions and 2390 deletions
|
@ -402,9 +402,9 @@ class JoinRoomAliasServlet(ResolveRoomIdMixin, RestServlet):
|
|||
|
||||
# Get the room ID from the identifier.
|
||||
try:
|
||||
remote_room_hosts = [
|
||||
remote_room_hosts: Optional[List[str]] = [
|
||||
x.decode("ascii") for x in request.args[b"server_name"]
|
||||
] # type: Optional[List[str]]
|
||||
]
|
||||
except Exception:
|
||||
remote_room_hosts = None
|
||||
room_id, remote_room_hosts = await self.resolve_room_id(
|
||||
|
@ -462,6 +462,7 @@ class MakeRoomAdminRestServlet(ResolveRoomIdMixin, RestServlet):
|
|||
super().__init__(hs)
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastore()
|
||||
self.event_creation_handler = hs.get_event_creation_handler()
|
||||
self.state_handler = hs.get_state_handler()
|
||||
self.is_mine_id = hs.is_mine_id
|
||||
|
@ -500,7 +501,13 @@ class MakeRoomAdminRestServlet(ResolveRoomIdMixin, RestServlet):
|
|||
admin_user_id = None
|
||||
|
||||
for admin_user in reversed(admin_users):
|
||||
if room_state.get((EventTypes.Member, admin_user)):
|
||||
(
|
||||
current_membership_type,
|
||||
_,
|
||||
) = await self.store.get_local_current_membership_for_user_in_room(
|
||||
admin_user, room_id
|
||||
)
|
||||
if current_membership_type == "join":
|
||||
admin_user_id = admin_user
|
||||
break
|
||||
|
||||
|
@ -652,9 +659,7 @@ class RoomEventContextServlet(RestServlet):
|
|||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter = Filter(
|
||||
json_decoder.decode(filter_json)
|
||||
) # type: Optional[Filter]
|
||||
event_filter: Optional[Filter] = Filter(json_decoder.decode(filter_json))
|
||||
else:
|
||||
event_filter = None
|
||||
|
||||
|
|
|
@ -357,7 +357,7 @@ class UserRegisterServlet(RestServlet):
|
|||
def __init__(self, hs: "HomeServer"):
|
||||
self.auth_handler = hs.get_auth_handler()
|
||||
self.reactor = hs.get_reactor()
|
||||
self.nonces = {} # type: Dict[str, int]
|
||||
self.nonces: Dict[str, int] = {}
|
||||
self.hs = hs
|
||||
|
||||
def _clear_old_nonces(self):
|
||||
|
@ -560,16 +560,24 @@ class AccountValidityRenewServlet(RestServlet):
|
|||
async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
body = parse_json_object_from_request(request)
|
||||
if self.account_activity_handler.on_legacy_admin_request_callback:
|
||||
expiration_ts = await (
|
||||
self.account_activity_handler.on_legacy_admin_request_callback(request)
|
||||
)
|
||||
else:
|
||||
body = parse_json_object_from_request(request)
|
||||
|
||||
if "user_id" not in body:
|
||||
raise SynapseError(400, "Missing property 'user_id' in the request body")
|
||||
if "user_id" not in body:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Missing property 'user_id' in the request body",
|
||||
)
|
||||
|
||||
expiration_ts = await self.account_activity_handler.renew_account_for_user(
|
||||
body["user_id"],
|
||||
body.get("expiration_ts"),
|
||||
not body.get("enable_renewal_emails", True),
|
||||
)
|
||||
expiration_ts = await self.account_activity_handler.renew_account_for_user(
|
||||
body["user_id"],
|
||||
body.get("expiration_ts"),
|
||||
not body.get("enable_renewal_emails", True),
|
||||
)
|
||||
|
||||
res = {"expiration_ts": expiration_ts}
|
||||
return 200, res
|
||||
|
|
|
@ -44,19 +44,14 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
LoginResponse = TypedDict(
|
||||
"LoginResponse",
|
||||
{
|
||||
"user_id": str,
|
||||
"access_token": str,
|
||||
"home_server": str,
|
||||
"expires_in_ms": Optional[int],
|
||||
"refresh_token": Optional[str],
|
||||
"device_id": str,
|
||||
"well_known": Optional[Dict[str, Any]],
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
class LoginResponse(TypedDict, total=False):
|
||||
user_id: str
|
||||
access_token: str
|
||||
home_server: str
|
||||
expires_in_ms: Optional[int]
|
||||
refresh_token: Optional[str]
|
||||
device_id: str
|
||||
well_known: Optional[Dict[str, Any]]
|
||||
|
||||
|
||||
class LoginRestServlet(RestServlet):
|
||||
|
@ -121,7 +116,7 @@ class LoginRestServlet(RestServlet):
|
|||
flows.append({"type": LoginRestServlet.CAS_TYPE})
|
||||
|
||||
if self.cas_enabled or self.saml2_enabled or self.oidc_enabled:
|
||||
sso_flow = {
|
||||
sso_flow: JsonDict = {
|
||||
"type": LoginRestServlet.SSO_TYPE,
|
||||
"identity_providers": [
|
||||
_get_auth_flow_dict_for_idp(
|
||||
|
@ -129,7 +124,7 @@ class LoginRestServlet(RestServlet):
|
|||
)
|
||||
for idp in self._sso_handler.get_identity_providers().values()
|
||||
],
|
||||
} # type: JsonDict
|
||||
}
|
||||
|
||||
if self._msc2858_enabled:
|
||||
# backwards-compatibility support for clients which don't
|
||||
|
@ -150,9 +145,7 @@ class LoginRestServlet(RestServlet):
|
|||
# login flow types returned.
|
||||
flows.append({"type": LoginRestServlet.TOKEN_TYPE})
|
||||
|
||||
flows.extend(
|
||||
({"type": t} for t in self.auth_handler.get_supported_login_types())
|
||||
)
|
||||
flows.extend({"type": t} for t in self.auth_handler.get_supported_login_types())
|
||||
|
||||
flows.append({"type": LoginRestServlet.APPSERVICE_TYPE})
|
||||
|
||||
|
@ -447,7 +440,7 @@ def _get_auth_flow_dict_for_idp(
|
|||
use_unstable_brands: whether we should use brand identifiers suitable
|
||||
for the unstable API
|
||||
"""
|
||||
e = {"id": idp.idp_id, "name": idp.idp_name} # type: JsonDict
|
||||
e: JsonDict = {"id": idp.idp_id, "name": idp.idp_name}
|
||||
if idp.idp_icon:
|
||||
e["icon"] = idp.idp_icon
|
||||
if idp.idp_brand:
|
||||
|
@ -561,7 +554,7 @@ class SsoRedirectServlet(RestServlet):
|
|||
finish_request(request)
|
||||
return
|
||||
|
||||
args = request.args # type: Dict[bytes, List[bytes]] # type: ignore
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
client_redirect_url = parse_bytes_from_args(args, "redirectUrl", required=True)
|
||||
sso_url = await self._sso_handler.handle_redirect_request(
|
||||
request,
|
||||
|
|
|
@ -29,6 +29,7 @@ from synapse.api.errors import (
|
|||
SynapseError,
|
||||
)
|
||||
from synapse.api.filtering import Filter
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.events.utils import format_event_for_client_v2
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
|
@ -47,11 +48,13 @@ from synapse.storage.state import StateFilter
|
|||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
Requester,
|
||||
RoomAlias,
|
||||
RoomID,
|
||||
StreamToken,
|
||||
ThirdPartyInstanceID,
|
||||
UserID,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.stringutils import parse_and_validate_server_name, random_string
|
||||
|
@ -312,7 +315,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
self.room_member_handler = hs.get_room_member_handler()
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def inherit_depth_from_prev_ids(self, prev_event_ids) -> int:
|
||||
async def _inherit_depth_from_prev_ids(self, prev_event_ids) -> int:
|
||||
(
|
||||
most_recent_prev_event_id,
|
||||
most_recent_prev_event_depth,
|
||||
|
@ -352,6 +355,54 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
|
||||
return depth
|
||||
|
||||
def _create_insertion_event_dict(
|
||||
self, sender: str, room_id: str, origin_server_ts: int
|
||||
):
|
||||
"""Creates an event dict for an "insertion" event with the proper fields
|
||||
and a random chunk ID.
|
||||
|
||||
Args:
|
||||
sender: The event author MXID
|
||||
room_id: The room ID that the event belongs to
|
||||
origin_server_ts: Timestamp when the event was sent
|
||||
|
||||
Returns:
|
||||
Tuple of event ID and stream ordering position
|
||||
"""
|
||||
|
||||
next_chunk_id = random_string(8)
|
||||
insertion_event = {
|
||||
"type": EventTypes.MSC2716_INSERTION,
|
||||
"sender": sender,
|
||||
"room_id": room_id,
|
||||
"content": {
|
||||
EventContentFields.MSC2716_NEXT_CHUNK_ID: next_chunk_id,
|
||||
EventContentFields.MSC2716_HISTORICAL: True,
|
||||
},
|
||||
"origin_server_ts": origin_server_ts,
|
||||
}
|
||||
|
||||
return insertion_event
|
||||
|
||||
async def _create_requester_for_user_id_from_app_service(
|
||||
self, user_id: str, app_service: ApplicationService
|
||||
) -> Requester:
|
||||
"""Creates a new requester for the given user_id
|
||||
and validates that the app service is allowed to control
|
||||
the given user.
|
||||
|
||||
Args:
|
||||
user_id: The author MXID that the app service is controlling
|
||||
app_service: The app service that controls the user
|
||||
|
||||
Returns:
|
||||
Requester object
|
||||
"""
|
||||
|
||||
await self.auth.validate_appservice_can_control_user_id(app_service, user_id)
|
||||
|
||||
return create_requester(user_id, app_service=app_service)
|
||||
|
||||
async def on_POST(self, request, room_id):
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=False)
|
||||
|
||||
|
@ -417,7 +468,9 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
if event_dict["type"] == EventTypes.Member:
|
||||
membership = event_dict["content"].get("membership", None)
|
||||
event_id, _ = await self.room_member_handler.update_membership(
|
||||
requester,
|
||||
await self._create_requester_for_user_id_from_app_service(
|
||||
state_event["sender"], requester.app_service
|
||||
),
|
||||
target=UserID.from_string(event_dict["state_key"]),
|
||||
room_id=room_id,
|
||||
action=membership,
|
||||
|
@ -437,7 +490,9 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
event,
|
||||
_,
|
||||
) = await self.event_creation_handler.create_and_send_nonmember_event(
|
||||
requester,
|
||||
await self._create_requester_for_user_id_from_app_service(
|
||||
state_event["sender"], requester.app_service
|
||||
),
|
||||
event_dict,
|
||||
outlier=True,
|
||||
prev_event_ids=[fake_prev_event_id],
|
||||
|
@ -452,37 +507,73 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
|
||||
events_to_create = body["events"]
|
||||
|
||||
# If provided, connect the chunk to the last insertion point
|
||||
# The chunk ID passed in comes from the chunk_id in the
|
||||
# "insertion" event from the previous chunk.
|
||||
if chunk_id_from_query:
|
||||
last_event_in_chunk = events_to_create[-1]
|
||||
last_event_in_chunk["content"][
|
||||
EventContentFields.MSC2716_CHUNK_ID
|
||||
] = chunk_id_from_query
|
||||
prev_event_ids = prev_events_from_query
|
||||
inherited_depth = await self._inherit_depth_from_prev_ids(
|
||||
prev_events_from_query
|
||||
)
|
||||
|
||||
# Add an "insertion" event to the start of each chunk (next to the oldest
|
||||
# Figure out which chunk to connect to. If they passed in
|
||||
# chunk_id_from_query let's use it. The chunk ID passed in comes
|
||||
# from the chunk_id in the "insertion" event from the previous chunk.
|
||||
last_event_in_chunk = events_to_create[-1]
|
||||
chunk_id_to_connect_to = chunk_id_from_query
|
||||
base_insertion_event = None
|
||||
if chunk_id_from_query:
|
||||
# TODO: Verify the chunk_id_from_query corresponds to an insertion event
|
||||
pass
|
||||
# Otherwise, create an insertion event to act as a starting point.
|
||||
#
|
||||
# We don't always have an insertion event to start hanging more history
|
||||
# off of (ideally there would be one in the main DAG, but that's not the
|
||||
# case if we're wanting to add history to e.g. existing rooms without
|
||||
# an insertion event), in which case we just create a new insertion event
|
||||
# that can then get pointed to by a "marker" event later.
|
||||
else:
|
||||
base_insertion_event_dict = self._create_insertion_event_dict(
|
||||
sender=requester.user.to_string(),
|
||||
room_id=room_id,
|
||||
origin_server_ts=last_event_in_chunk["origin_server_ts"],
|
||||
)
|
||||
base_insertion_event_dict["prev_events"] = prev_event_ids.copy()
|
||||
|
||||
(
|
||||
base_insertion_event,
|
||||
_,
|
||||
) = await self.event_creation_handler.create_and_send_nonmember_event(
|
||||
await self._create_requester_for_user_id_from_app_service(
|
||||
base_insertion_event_dict["sender"],
|
||||
requester.app_service,
|
||||
),
|
||||
base_insertion_event_dict,
|
||||
prev_event_ids=base_insertion_event_dict.get("prev_events"),
|
||||
auth_event_ids=auth_event_ids,
|
||||
historical=True,
|
||||
depth=inherited_depth,
|
||||
)
|
||||
|
||||
chunk_id_to_connect_to = base_insertion_event["content"][
|
||||
EventContentFields.MSC2716_NEXT_CHUNK_ID
|
||||
]
|
||||
|
||||
# Connect this current chunk to the insertion event from the previous chunk
|
||||
last_event_in_chunk["content"][
|
||||
EventContentFields.MSC2716_CHUNK_ID
|
||||
] = chunk_id_to_connect_to
|
||||
|
||||
# Add an "insertion" event to the start of each chunk (next to the oldest-in-time
|
||||
# event in the chunk) so the next chunk can be connected to this one.
|
||||
next_chunk_id = random_string(64)
|
||||
insertion_event = {
|
||||
"type": EventTypes.MSC2716_INSERTION,
|
||||
"sender": requester.user.to_string(),
|
||||
"content": {
|
||||
EventContentFields.MSC2716_NEXT_CHUNK_ID: next_chunk_id,
|
||||
EventContentFields.MSC2716_HISTORICAL: True,
|
||||
},
|
||||
insertion_event = self._create_insertion_event_dict(
|
||||
sender=requester.user.to_string(),
|
||||
room_id=room_id,
|
||||
# Since the insertion event is put at the start of the chunk,
|
||||
# where the oldest event is, copy the origin_server_ts from
|
||||
# where the oldest-in-time event is, copy the origin_server_ts from
|
||||
# the first event we're inserting
|
||||
"origin_server_ts": events_to_create[0]["origin_server_ts"],
|
||||
}
|
||||
origin_server_ts=events_to_create[0]["origin_server_ts"],
|
||||
)
|
||||
# Prepend the insertion event to the start of the chunk
|
||||
events_to_create = [insertion_event] + events_to_create
|
||||
|
||||
inherited_depth = await self.inherit_depth_from_prev_ids(prev_events_from_query)
|
||||
|
||||
event_ids = []
|
||||
prev_event_ids = prev_events_from_query
|
||||
events_to_persist = []
|
||||
for ev in events_to_create:
|
||||
assert_params_in_dict(ev, ["type", "origin_server_ts", "content", "sender"])
|
||||
|
@ -501,7 +592,9 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
}
|
||||
|
||||
event, context = await self.event_creation_handler.create_event(
|
||||
requester,
|
||||
await self._create_requester_for_user_id_from_app_service(
|
||||
ev["sender"], requester.app_service
|
||||
),
|
||||
event_dict,
|
||||
prev_event_ids=event_dict.get("prev_events"),
|
||||
auth_event_ids=auth_event_ids,
|
||||
|
@ -531,15 +624,23 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
|||
# where topological_ordering is just depth.
|
||||
for (event, context) in reversed(events_to_persist):
|
||||
ev = await self.event_creation_handler.handle_new_client_event(
|
||||
requester=requester,
|
||||
await self._create_requester_for_user_id_from_app_service(
|
||||
event["sender"], requester.app_service
|
||||
),
|
||||
event=event,
|
||||
context=context,
|
||||
)
|
||||
|
||||
# Add the base_insertion_event to the bottom of the list we return
|
||||
if base_insertion_event is not None:
|
||||
event_ids.append(base_insertion_event.event_id)
|
||||
|
||||
return 200, {
|
||||
"state_events": auth_event_ids,
|
||||
"events": event_ids,
|
||||
"next_chunk_id": next_chunk_id,
|
||||
"next_chunk_id": insertion_event["content"][
|
||||
EventContentFields.MSC2716_NEXT_CHUNK_ID
|
||||
],
|
||||
}
|
||||
|
||||
def on_GET(self, request, room_id):
|
||||
|
@ -685,7 +786,7 @@ class PublicRoomListRestServlet(TransactionRestServlet):
|
|||
server = parse_string(request, "server", default=None)
|
||||
content = parse_json_object_from_request(request)
|
||||
|
||||
limit = int(content.get("limit", 100)) # type: Optional[int]
|
||||
limit: Optional[int] = int(content.get("limit", 100))
|
||||
since_token = content.get("since", None)
|
||||
search_filter = content.get("filter", None)
|
||||
|
||||
|
@ -831,9 +932,7 @@ class RoomMessageListRestServlet(RestServlet):
|
|||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter = Filter(
|
||||
json_decoder.decode(filter_json)
|
||||
) # type: Optional[Filter]
|
||||
event_filter: Optional[Filter] = Filter(json_decoder.decode(filter_json))
|
||||
if (
|
||||
event_filter
|
||||
and event_filter.filter_json.get("event_format", "client")
|
||||
|
@ -946,9 +1045,7 @@ class RoomEventContextServlet(RestServlet):
|
|||
filter_str = parse_string(request, "filter", encoding="utf-8")
|
||||
if filter_str:
|
||||
filter_json = urlparse.unquote(filter_str)
|
||||
event_filter = Filter(
|
||||
json_decoder.decode(filter_json)
|
||||
) # type: Optional[Filter]
|
||||
event_filter: Optional[Filter] = Filter(json_decoder.decode(filter_json))
|
||||
else:
|
||||
event_filter = None
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
import logging
|
||||
|
||||
from synapse.api.errors import AuthError, SynapseError
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.server import respond_with_html
|
||||
from synapse.http.servlet import RestServlet
|
||||
|
||||
|
@ -92,11 +92,6 @@ class AccountValiditySendMailServlet(RestServlet):
|
|||
)
|
||||
|
||||
async def on_POST(self, request):
|
||||
if not self.account_validity_renew_by_email_enabled:
|
||||
raise AuthError(
|
||||
403, "Account renewal via email is disabled on this server."
|
||||
)
|
||||
|
||||
requester = await self.auth.get_user_by_req(request, allow_expired=True)
|
||||
user_id = requester.user.to_string()
|
||||
await self.account_activity_handler.send_renewal_email_to_user(user_id)
|
||||
|
|
|
@ -59,7 +59,7 @@ class SendToDeviceRestServlet(servlet.RestServlet):
|
|||
requester, message_type, content["messages"]
|
||||
)
|
||||
|
||||
response = (200, {}) # type: Tuple[int, dict]
|
||||
response: Tuple[int, dict] = (200, {})
|
||||
return response
|
||||
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ class ConsentResource(DirectServeHtmlResource):
|
|||
has_consented = False
|
||||
public_version = username == ""
|
||||
if not public_version:
|
||||
args = request.args # type: Dict[bytes, List[bytes]]
|
||||
args: Dict[bytes, List[bytes]] = request.args
|
||||
userhmac_bytes = parse_bytes_from_args(args, "h", required=True)
|
||||
|
||||
self._check_hash(username, userhmac_bytes)
|
||||
|
@ -154,7 +154,7 @@ class ConsentResource(DirectServeHtmlResource):
|
|||
"""
|
||||
version = parse_string(request, "v", required=True)
|
||||
username = parse_string(request, "u", required=True)
|
||||
args = request.args # type: Dict[bytes, List[bytes]]
|
||||
args: Dict[bytes, List[bytes]] = request.args
|
||||
userhmac = parse_bytes_from_args(args, "h", required=True)
|
||||
|
||||
self._check_hash(username, userhmac)
|
||||
|
|
|
@ -97,7 +97,7 @@ class RemoteKey(DirectServeJsonResource):
|
|||
async def _async_render_GET(self, request):
|
||||
if len(request.postpath) == 1:
|
||||
(server,) = request.postpath
|
||||
query = {server.decode("ascii"): {}} # type: dict
|
||||
query: dict = {server.decode("ascii"): {}}
|
||||
elif len(request.postpath) == 2:
|
||||
server, key_id = request.postpath
|
||||
minimum_valid_until_ts = parse_integer(request, "minimum_valid_until_ts")
|
||||
|
@ -141,7 +141,7 @@ class RemoteKey(DirectServeJsonResource):
|
|||
time_now_ms = self.clock.time_msec()
|
||||
|
||||
# Note that the value is unused.
|
||||
cache_misses = {} # type: Dict[str, Dict[str, int]]
|
||||
cache_misses: Dict[str, Dict[str, int]] = {}
|
||||
for (server_name, key_id, _), results in cached.items():
|
||||
results = [(result["ts_added_ms"], result) for result in results]
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ import PIL.Image
|
|||
# check for JPEG support.
|
||||
try:
|
||||
PIL.Image._getdecoder("rgb", "jpeg", None)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
if str(e).startswith("decoder jpeg not available"):
|
||||
raise Exception(
|
||||
"FATAL: jpeg codec not supported. Install pillow correctly! "
|
||||
|
@ -32,7 +32,7 @@ except Exception:
|
|||
# check for PNG support.
|
||||
try:
|
||||
PIL.Image._getdecoder("rgb", "zip", None)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
if str(e).startswith("decoder zip not available"):
|
||||
raise Exception(
|
||||
"FATAL: zip codec not supported. Install pillow correctly! "
|
||||
|
|
|
@ -49,7 +49,7 @@ TEXT_CONTENT_TYPES = [
|
|||
def parse_media_id(request: Request) -> Tuple[str, str, Optional[str]]:
|
||||
try:
|
||||
# The type on postpath seems incorrect in Twisted 21.2.0.
|
||||
postpath = request.postpath # type: List[bytes] # type: ignore
|
||||
postpath: List[bytes] = request.postpath # type: ignore
|
||||
assert postpath
|
||||
|
||||
# This allows users to append e.g. /test.png to the URL. Useful for
|
||||
|
|
|
@ -78,16 +78,16 @@ class MediaRepository:
|
|||
|
||||
Thumbnailer.set_limits(self.max_image_pixels)
|
||||
|
||||
self.primary_base_path = hs.config.media_store_path # type: str
|
||||
self.filepaths = MediaFilePaths(self.primary_base_path) # type: MediaFilePaths
|
||||
self.primary_base_path: str = hs.config.media_store_path
|
||||
self.filepaths: MediaFilePaths = MediaFilePaths(self.primary_base_path)
|
||||
|
||||
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
|
||||
self.thumbnail_requirements = hs.config.thumbnail_requirements
|
||||
|
||||
self.remote_media_linearizer = Linearizer(name="media_remote")
|
||||
|
||||
self.recently_accessed_remotes = set() # type: Set[Tuple[str, str]]
|
||||
self.recently_accessed_locals = set() # type: Set[str]
|
||||
self.recently_accessed_remotes: Set[Tuple[str, str]] = set()
|
||||
self.recently_accessed_locals: Set[str] = set()
|
||||
|
||||
self.federation_domain_whitelist = hs.config.federation_domain_whitelist
|
||||
|
||||
|
@ -711,7 +711,7 @@ class MediaRepository:
|
|||
|
||||
# We deduplicate the thumbnail sizes by ignoring the cropped versions if
|
||||
# they have the same dimensions of a scaled one.
|
||||
thumbnails = {} # type: Dict[Tuple[int, int, str], str]
|
||||
thumbnails: Dict[Tuple[int, int, str], str] = {}
|
||||
for r_width, r_height, r_method, r_type in requirements:
|
||||
if r_method == "crop":
|
||||
thumbnails.setdefault((r_width, r_height, r_type), r_method)
|
||||
|
|
|
@ -191,7 +191,7 @@ class MediaStorage:
|
|||
|
||||
for provider in self.storage_providers:
|
||||
for path in paths:
|
||||
res = await provider.fetch(path, file_info) # type: Any
|
||||
res: Any = await provider.fetch(path, file_info)
|
||||
if res:
|
||||
logger.debug("Streaming %s from %s", path, provider)
|
||||
return res
|
||||
|
@ -233,7 +233,7 @@ class MediaStorage:
|
|||
os.makedirs(dirname)
|
||||
|
||||
for provider in self.storage_providers:
|
||||
res = await provider.fetch(path, file_info) # type: Any
|
||||
res: Any = await provider.fetch(path, file_info)
|
||||
if res:
|
||||
with res:
|
||||
consumer = BackgroundFileConsumer(
|
||||
|
|
|
@ -158,12 +158,12 @@ class PreviewUrlResource(DirectServeJsonResource):
|
|||
|
||||
# memory cache mapping urls to an ObservableDeferred returning
|
||||
# JSON-encoded OG metadata
|
||||
self._cache = ExpiringCache(
|
||||
self._cache: ExpiringCache[str, ObservableDeferred] = ExpiringCache(
|
||||
cache_name="url_previews",
|
||||
clock=self.clock,
|
||||
# don't spider URLs more often than once an hour
|
||||
expiry_ms=ONE_HOUR,
|
||||
) # type: ExpiringCache[str, ObservableDeferred]
|
||||
)
|
||||
|
||||
if self._worker_run_media_background_jobs:
|
||||
self._cleaner_loop = self.clock.looping_call(
|
||||
|
@ -449,7 +449,7 @@ class PreviewUrlResource(DirectServeJsonResource):
|
|||
file_info = FileInfo(server_name=None, file_id=file_id, url_cache=True)
|
||||
|
||||
# If this URL can be accessed via oEmbed, use that instead.
|
||||
url_to_download = url # type: Optional[str]
|
||||
url_to_download: Optional[str] = url
|
||||
oembed_url = self._get_oembed_url(url)
|
||||
if oembed_url:
|
||||
# The result might be a new URL to download, or it might be HTML content.
|
||||
|
@ -777,7 +777,7 @@ def _calc_og(tree: "etree.Element", media_uri: str) -> Dict[str, Optional[str]]:
|
|||
# "og:video:height" : "720",
|
||||
# "og:video:secure_url": "https://www.youtube.com/v/LXDBoHyjmtw?version=3",
|
||||
|
||||
og = {} # type: Dict[str, Optional[str]]
|
||||
og: Dict[str, Optional[str]] = {}
|
||||
for tag in tree.xpath("//*/meta[starts-with(@property, 'og:')]"):
|
||||
if "content" in tag.attrib:
|
||||
# if we've got more than 50 tags, someone is taking the piss
|
||||
|
|
|
@ -61,11 +61,11 @@ class UploadResource(DirectServeJsonResource):
|
|||
errcode=Codes.TOO_LARGE,
|
||||
)
|
||||
|
||||
args = request.args # type: Dict[bytes, List[bytes]] # type: ignore
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
upload_name_bytes = parse_bytes_from_args(args, "filename")
|
||||
if upload_name_bytes:
|
||||
try:
|
||||
upload_name = upload_name_bytes.decode("utf8") # type: Optional[str]
|
||||
upload_name: Optional[str] = upload_name_bytes.decode("utf8")
|
||||
except UnicodeDecodeError:
|
||||
raise SynapseError(
|
||||
msg="Invalid UTF-8 filename parameter: %r" % (upload_name), code=400
|
||||
|
@ -89,7 +89,7 @@ class UploadResource(DirectServeJsonResource):
|
|||
# TODO(markjh): parse content-dispostion
|
||||
|
||||
try:
|
||||
content = request.content # type: IO # type: ignore
|
||||
content: IO = request.content # type: ignore
|
||||
content_uri = await self.media_repo.create_content(
|
||||
media_type, upload_name, content, content_length, requester.user
|
||||
)
|
||||
|
|
|
@ -118,9 +118,9 @@ class AccountDetailsResource(DirectServeHtmlResource):
|
|||
use_display_name = parse_boolean(request, "use_display_name", default=False)
|
||||
|
||||
try:
|
||||
emails_to_use = [
|
||||
emails_to_use: List[str] = [
|
||||
val.decode("utf-8") for val in request.args.get(b"use_email", [])
|
||||
] # type: List[str]
|
||||
]
|
||||
except ValueError:
|
||||
raise SynapseError(400, "Query parameter use_email must be utf-8")
|
||||
except SynapseError as e:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue