mirror of
https://github.com/matrix-org/pantalaimon.git
synced 2025-03-15 02:36:38 -04:00
Merge remote-tracking branch 'origin/master' into hs/sync-options
This commit is contained in:
commit
5a7a8a0f5a
24
CHANGELOG.md
24
CHANGELOG.md
@ -4,6 +4,30 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## 0.9.3 2021-05-14
|
||||
|
||||
### Added
|
||||
|
||||
- [[#98]] Add the ability to remove old room keys
|
||||
- [[#95]] Encrypt thumbnails uploaded by a client
|
||||
|
||||
### Fixed
|
||||
|
||||
- [[#96]] Split out the media cache loading logic to avoid returning the
|
||||
whole LRU cache when it shouldn't
|
||||
|
||||
[#98]: https://github.com/matrix-org/pantalaimon/pull/98
|
||||
[#96]: https://github.com/matrix-org/pantalaimon/pull/96
|
||||
[#95]: https://github.com/matrix-org/pantalaimon/pull/95
|
||||
|
||||
## 0.9.3 2021-05-14
|
||||
|
||||
### Changed
|
||||
|
||||
- [[#73f68c7]] Bump the allowed nio version
|
||||
|
||||
[73f68c7]: https://github.com/matrix-org/pantalaimon/commit/73f68c76fb05037bd7fe71688ce39eb1f526a385
|
||||
|
||||
## 0.9.2 2021-03-10
|
||||
|
||||
### Changed
|
||||
|
@ -12,3 +12,4 @@ Proxy = http://localhost:8080
|
||||
SSL = False
|
||||
IgnoreVerification = False
|
||||
UseKeyring = True
|
||||
DropOldKeys = False
|
||||
|
@ -51,6 +51,11 @@ This option configures if a proxy instance should use the OS keyring to store
|
||||
its own access tokens. The access tokens are required for the daemon to resume
|
||||
operation. If this is set to "No", access tokens are stored in the pantalaimon
|
||||
database in plaintext. Defaults to "Yes".
|
||||
.It Cm DropOldKeys
|
||||
This option configures if a proxy instance should only keep the latest version
|
||||
of a room key from a certain user around. This effectively means that only newly
|
||||
incoming messages will be decryptable, the proxy will be unable to decrypt the
|
||||
room history. Defaults to "No".
|
||||
.It Cm SearchRequests
|
||||
This option configures if the proxy should make additional HTTP requests to the
|
||||
server when clients use the search API endpoint. Some data that is required to
|
||||
|
@ -62,6 +62,13 @@ The following keys are optional in the proxy instance sections:
|
||||
> operation. If this is set to "No", access tokens are stored in the pantalaimon
|
||||
> database in plaintext. Defaults to "Yes".
|
||||
|
||||
**DropOldKeys**
|
||||
|
||||
> This option configures if a proxy instance should only keep the latest version
|
||||
> of a room key from a certain user around. This effectively means that only newly
|
||||
> incoming messages will be decryptable, the proxy will be unable to decrypt the
|
||||
> room history. Defaults to "No".
|
||||
|
||||
Aditional to the homeserver section a special section with the name
|
||||
**Default**
|
||||
can be used to configure the following values for all homeservers:
|
||||
@ -150,4 +157,4 @@ pantalaimon(8)
|
||||
was written by
|
||||
Damir Jelić <[poljar@termina.org.uk](mailto:poljar@termina.org.uk)>.
|
||||
|
||||
Linux 5.1.3-arch2-1-ARCH - May 8, 2019
|
||||
Linux 5.11.16-arch1-1 - May 8, 2019
|
||||
|
@ -41,6 +41,7 @@ class PanConfigParser(configparser.ConfigParser):
|
||||
"DebugEncryption": "False",
|
||||
"SyncOnStartup": "False",
|
||||
"StopSyncingTimeout": "600"
|
||||
"DropOldKeys": "False",
|
||||
},
|
||||
converters={
|
||||
"address": parse_address,
|
||||
@ -129,6 +130,8 @@ class ServerConfig:
|
||||
client has requested a /sync, before stopping a sync.
|
||||
store_forgetful (bool): Enable or disable discarding of previous sessions
|
||||
from the store.
|
||||
drop_old_keys (bool): Should Pantalaimon only keep the most recent
|
||||
decryption key around.
|
||||
"""
|
||||
|
||||
name = attr.ib(type=str)
|
||||
@ -148,6 +151,8 @@ class ServerConfig:
|
||||
sync_on_startup = attr.ib(type=bool, default=False)
|
||||
sync_stop_after = attr.ib(type=int, default=600)
|
||||
store_forgetful = attr.ib(type=bool, default=True)
|
||||
drop_old_keys = attr.ib(type=bool, default=False)
|
||||
|
||||
|
||||
@attr.s
|
||||
class PanConfig:
|
||||
@ -242,6 +247,7 @@ class PanConfig:
|
||||
f"already defined before."
|
||||
)
|
||||
listen_set.add(listen_tuple)
|
||||
drop_old_keys = section.getboolean("DropOldKeys")
|
||||
|
||||
server_conf = ServerConfig(
|
||||
section_name,
|
||||
@ -259,6 +265,7 @@ class PanConfig:
|
||||
sync_on_startup,
|
||||
sync_stop_after,
|
||||
store_forgetful,
|
||||
drop_old_keys,
|
||||
)
|
||||
|
||||
self.servers[section_name] = server_conf
|
||||
|
@ -85,6 +85,7 @@ CORS_HEADERS = {
|
||||
|
||||
class NotDecryptedAvailableError(Exception):
|
||||
"""Exception that signals that no decrypted upload is available"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@ -120,7 +121,7 @@ class ProxyDaemon:
|
||||
self.hostname = self.homeserver.hostname
|
||||
self.store = PanStore(self.data_dir)
|
||||
accounts = self.store.load_users(self.name)
|
||||
self.media_info = self.store.load_media(self.name)
|
||||
self.media_info = self.store.load_media_cache(self.name)
|
||||
self.upload_info = self.store.load_upload(self.name)
|
||||
|
||||
for user_id, device_id in accounts:
|
||||
@ -867,8 +868,9 @@ class ProxyDaemon:
|
||||
if not upload_info or not media_info:
|
||||
raise NotDecryptedAvailableError
|
||||
|
||||
response, decrypted_file = await self._load_decrypted_file(media_info.mxc_server, media_info.mxc_path,
|
||||
upload_info.filename)
|
||||
response, decrypted_file = await self._load_decrypted_file(
|
||||
media_info.mxc_server, media_info.mxc_path, upload_info.filename
|
||||
)
|
||||
|
||||
if response is None and decrypted_file is None:
|
||||
raise NotDecryptedAvailableError
|
||||
@ -926,10 +928,17 @@ class ProxyDaemon:
|
||||
# The room isn't encrypted just forward the message.
|
||||
if not encrypt:
|
||||
content_msgtype = content.get("msgtype")
|
||||
if content_msgtype in ["m.image", "m.video", "m.audio", "m.file"] or msgtype == "m.room.avatar":
|
||||
if (
|
||||
content_msgtype in ["m.image", "m.video", "m.audio", "m.file"]
|
||||
or msgtype == "m.room.avatar"
|
||||
):
|
||||
try:
|
||||
content = await self._map_decrypted_uri("url", content, request, client)
|
||||
return await self.forward_to_web(request, data=json.dumps(content), token=client.access_token)
|
||||
content = await self._map_decrypted_uri(
|
||||
"url", content, request, client
|
||||
)
|
||||
return await self.forward_to_web(
|
||||
request, data=json.dumps(content), token=client.access_token
|
||||
)
|
||||
except ClientConnectionError as e:
|
||||
return web.Response(status=500, text=str(e))
|
||||
except (KeyError, NotDecryptedAvailableError):
|
||||
@ -942,8 +951,13 @@ class ProxyDaemon:
|
||||
async def _send(ignore_unverified=False):
|
||||
try:
|
||||
content_msgtype = content.get("msgtype")
|
||||
if content_msgtype in ["m.image", "m.video", "m.audio", "m.file"] or msgtype == "m.room.avatar":
|
||||
upload_info, media_info = self._get_upload_and_media_info("url", content)
|
||||
if (
|
||||
content_msgtype in ["m.image", "m.video", "m.audio", "m.file"]
|
||||
or msgtype == "m.room.avatar"
|
||||
):
|
||||
upload_info, media_info = self._get_upload_and_media_info(
|
||||
"url", content
|
||||
)
|
||||
if not upload_info or not media_info:
|
||||
response = await client.room_send(
|
||||
room_id, msgtype, content, txnid, ignore_unverified
|
||||
@ -956,10 +970,21 @@ class ProxyDaemon:
|
||||
body=await response.transport_response.read(),
|
||||
)
|
||||
|
||||
media_content = media_info.to_content(content, upload_info.mimetype)
|
||||
media_info.to_content(content, upload_info.mimetype)
|
||||
if content["info"].get("thumbnail_url", False):
|
||||
(
|
||||
thumb_upload_info,
|
||||
thumb_media_info,
|
||||
) = self._get_upload_and_media_info(
|
||||
"thumbnail_url", content["info"]
|
||||
)
|
||||
if thumb_upload_info and thumb_media_info:
|
||||
thumb_media_info.to_thumbnail(
|
||||
content, thumb_upload_info.mimetype
|
||||
)
|
||||
|
||||
response = await client.room_send(
|
||||
room_id, msgtype, media_content, txnid, ignore_unverified
|
||||
room_id, msgtype, content, txnid, ignore_unverified
|
||||
)
|
||||
else:
|
||||
response = await client.room_send(
|
||||
@ -1161,14 +1186,22 @@ class ProxyDaemon:
|
||||
body=await response.transport_response.read(),
|
||||
)
|
||||
|
||||
self.store.save_upload(self.name, response.content_uri, file_name, content_type)
|
||||
self.store.save_upload(
|
||||
self.name, response.content_uri, file_name, content_type
|
||||
)
|
||||
|
||||
mxc = urlparse(response.content_uri)
|
||||
mxc_server = mxc.netloc.strip("/")
|
||||
mxc_path = mxc.path.strip("/")
|
||||
|
||||
logger.info(f"Adding media info for {mxc_server}/{mxc_path} to the store")
|
||||
media_info = MediaInfo(mxc_server, mxc_path, maybe_keys["key"], maybe_keys["iv"], maybe_keys["hashes"])
|
||||
media_info = MediaInfo(
|
||||
mxc_server,
|
||||
mxc_path,
|
||||
maybe_keys["key"],
|
||||
maybe_keys["iv"],
|
||||
maybe_keys["hashes"],
|
||||
)
|
||||
self.store.save_media(self.name, media_info)
|
||||
|
||||
return web.Response(
|
||||
@ -1242,8 +1275,12 @@ class ProxyDaemon:
|
||||
return self._not_json
|
||||
|
||||
try:
|
||||
content = await self._map_decrypted_uri("avatar_url", content, request, client)
|
||||
return await self.forward_to_web(request, data=json.dumps(content), token=client.access_token)
|
||||
content = await self._map_decrypted_uri(
|
||||
"avatar_url", content, request, client
|
||||
)
|
||||
return await self.forward_to_web(
|
||||
request, data=json.dumps(content), token=client.access_token
|
||||
)
|
||||
except ClientConnectionError as e:
|
||||
return web.Response(status=500, text=str(e))
|
||||
except (KeyError, NotDecryptedAvailableError):
|
||||
@ -1255,7 +1292,9 @@ class ProxyDaemon:
|
||||
file_name = request.match_info.get("file_name")
|
||||
|
||||
try:
|
||||
response, decrypted_file = await self._load_decrypted_file(server_name, media_id, file_name)
|
||||
response, decrypted_file = await self._load_decrypted_file(
|
||||
server_name, media_id, file_name
|
||||
)
|
||||
|
||||
if response is None and decrypted_file is None:
|
||||
return await self.forward_to_web(request)
|
||||
|
@ -29,6 +29,7 @@ from logbook import StderrHandler
|
||||
from pantalaimon.config import PanConfig, PanConfigError, parse_log_level
|
||||
from pantalaimon.daemon import ProxyDaemon
|
||||
from pantalaimon.log import logger
|
||||
from pantalaimon.store import KeyDroppingSqliteStore
|
||||
from pantalaimon.thread_messages import DaemonResponse
|
||||
from pantalaimon.ui import UI_ENABLED
|
||||
|
||||
@ -47,6 +48,8 @@ def create_dirs(data_dir, conf_dir):
|
||||
|
||||
async def init(data_dir, server_conf, send_queue, recv_queue):
|
||||
"""Initialize the proxy and the http server."""
|
||||
store_class = KeyDroppingSqliteStore if server_conf.drop_old_keys else None
|
||||
|
||||
proxy = ProxyDaemon(
|
||||
server_conf.name,
|
||||
server_conf.homeserver,
|
||||
@ -56,6 +59,7 @@ async def init(data_dir, server_conf, send_queue, recv_queue):
|
||||
recv_queue=recv_queue.async_q if recv_queue else None,
|
||||
proxy=server_conf.proxy.geturl() if server_conf.proxy else None,
|
||||
ssl=None if server_conf.ssl is True else False,
|
||||
client_store_class=store_class,
|
||||
)
|
||||
|
||||
# 100 MB max POST size
|
||||
@ -101,7 +105,6 @@ async def init(data_dir, server_conf, send_queue, recv_queue):
|
||||
r"/_matrix/client/r0/profile/{userId}/avatar_url",
|
||||
proxy.profile,
|
||||
),
|
||||
|
||||
]
|
||||
)
|
||||
app.router.add_route("*", "/" + "{proxyPath:.*}", proxy.router)
|
||||
@ -259,7 +262,7 @@ async def daemon(context, log_level, debug_encryption, config, data_path):
|
||||
"connect to pantalaimon."
|
||||
)
|
||||
)
|
||||
@click.version_option(version="0.9.2", prog_name="pantalaimon")
|
||||
@click.version_option(version="0.10.0", prog_name="pantalaimon")
|
||||
@click.option(
|
||||
"--log-level",
|
||||
type=click.Choice(["error", "warning", "info", "debug"]),
|
||||
|
@ -690,7 +690,7 @@ class PanCtl:
|
||||
"the pantalaimon daemon."
|
||||
)
|
||||
)
|
||||
@click.version_option(version="0.9.2", prog_name="panctl")
|
||||
@click.version_option(version="0.10.0", prog_name="panctl")
|
||||
def main():
|
||||
loop = asyncio.get_event_loop()
|
||||
glib_loop = GLib.MainLoop()
|
||||
|
@ -18,10 +18,12 @@ from collections import defaultdict
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import attr
|
||||
from nio.crypto import TrustState
|
||||
from nio.crypto import TrustState, GroupSessionStore
|
||||
from nio.store import (
|
||||
Accounts,
|
||||
MegolmInboundSessions,
|
||||
DeviceKeys,
|
||||
SqliteStore,
|
||||
DeviceTrustState,
|
||||
use_database,
|
||||
use_database_atomic,
|
||||
@ -32,7 +34,6 @@ from nio.store import (
|
||||
from peewee import SQL, DoesNotExist, ForeignKeyField, Model, SqliteDatabase, TextField
|
||||
from cachetools import LRUCache
|
||||
|
||||
|
||||
MAX_LOADED_MEDIA = 10000
|
||||
MAX_LOADED_UPLOAD = 10000
|
||||
|
||||
@ -53,15 +54,23 @@ class MediaInfo:
|
||||
|
||||
def to_content(self, content: Dict, mime_type: str) -> Dict[Any, Any]:
|
||||
content["file"] = {
|
||||
"v": "v2",
|
||||
"key": self.key,
|
||||
"iv": self.iv,
|
||||
"hashes": self.hashes,
|
||||
"url": content["url"],
|
||||
"mimetype": mime_type,
|
||||
"v": "v2",
|
||||
"key": self.key,
|
||||
"iv": self.iv,
|
||||
"hashes": self.hashes,
|
||||
"url": content["url"],
|
||||
"mimetype": mime_type,
|
||||
}
|
||||
|
||||
return content
|
||||
def to_thumbnail(self, content: Dict, mime_type: str) -> Dict[Any, Any]:
|
||||
content["info"]["thumbnail_file"] = {
|
||||
"v": "v2",
|
||||
"key": self.key,
|
||||
"iv": self.iv,
|
||||
"hashes": self.hashes,
|
||||
"url": content["info"]["thumbnail_url"],
|
||||
"mimetype": mime_type,
|
||||
}
|
||||
|
||||
|
||||
@attr.s
|
||||
@ -248,32 +257,34 @@ class PanStore:
|
||||
hashes=media.hashes,
|
||||
).on_conflict_ignore().execute()
|
||||
|
||||
@use_database
|
||||
def load_media_cache(self, server):
|
||||
server, _ = Servers.get_or_create(name=server)
|
||||
media_cache = LRUCache(maxsize=MAX_LOADED_MEDIA)
|
||||
|
||||
for i, m in enumerate(server.media):
|
||||
if i > MAX_LOADED_MEDIA:
|
||||
break
|
||||
|
||||
media = MediaInfo(m.mxc_server, m.mxc_path, m.key, m.iv, m.hashes)
|
||||
media_cache[(m.mxc_server, m.mxc_path)] = media
|
||||
|
||||
return media_cache
|
||||
|
||||
@use_database
|
||||
def load_media(self, server, mxc_server=None, mxc_path=None):
|
||||
server, _ = Servers.get_or_create(name=server)
|
||||
|
||||
if not mxc_path:
|
||||
media_cache = LRUCache(maxsize=MAX_LOADED_MEDIA)
|
||||
m = PanMediaInfo.get_or_none(
|
||||
PanMediaInfo.server == server,
|
||||
PanMediaInfo.mxc_server == mxc_server,
|
||||
PanMediaInfo.mxc_path == mxc_path,
|
||||
)
|
||||
|
||||
for i, m in enumerate(server.media):
|
||||
if i > MAX_LOADED_MEDIA:
|
||||
break
|
||||
if not m:
|
||||
return None
|
||||
|
||||
media = MediaInfo(m.mxc_server, m.mxc_path, m.key, m.iv, m.hashes)
|
||||
media_cache[(m.mxc_server, m.mxc_path)] = media
|
||||
|
||||
return media_cache
|
||||
else:
|
||||
m = PanMediaInfo.get_or_none(
|
||||
PanMediaInfo.server == server,
|
||||
PanMediaInfo.mxc_server == mxc_server,
|
||||
PanMediaInfo.mxc_path == mxc_path,
|
||||
)
|
||||
|
||||
if not m:
|
||||
return None
|
||||
|
||||
return MediaInfo(m.mxc_server, m.mxc_path, m.key, m.iv, m.hashes)
|
||||
return MediaInfo(m.mxc_server, m.mxc_path, m.key, m.iv, m.hashes)
|
||||
|
||||
@use_database_atomic
|
||||
def replace_fetcher_task(self, server, pan_user, old_task, new_task):
|
||||
@ -462,4 +473,47 @@ class PanSqliteStore(SqliteStore):
|
||||
(MegolmInboundSessions.sender_key == session.sender_key) |
|
||||
(MegolmInboundSessions.room_id == session.room_id)
|
||||
).execute()
|
||||
super().save_inbound_group_session(session)
|
||||
super().save_inbound_group_session(session)
|
||||
|
||||
class KeyDroppingSqliteStore(SqliteStore):
|
||||
@use_database
|
||||
def save_inbound_group_session(self, session):
|
||||
"""Save the provided Megolm inbound group session to the database.
|
||||
|
||||
Args:
|
||||
session (InboundGroupSession): The session to save.
|
||||
"""
|
||||
account = self._get_account()
|
||||
assert account
|
||||
|
||||
MegolmInboundSessions.delete().where(
|
||||
MegolmInboundSessions.sender_key == session.sender_key,
|
||||
MegolmInboundSessions.account == account,
|
||||
MegolmInboundSessions.room_id == session.room_id,
|
||||
).execute()
|
||||
|
||||
super().save_inbound_group_session(session)
|
||||
|
||||
@use_database
|
||||
def load_inbound_group_sessions(self):
|
||||
store = super().load_inbound_group_sessions()
|
||||
|
||||
return KeyDroppingGroupSessionStore.from_group_session_store(store)
|
||||
|
||||
|
||||
class KeyDroppingGroupSessionStore(GroupSessionStore):
|
||||
def from_group_session_store(store):
|
||||
new_store = KeyDroppingGroupSessionStore()
|
||||
new_store._entries = store._entries
|
||||
|
||||
return new_store
|
||||
|
||||
def add(self, session) -> bool:
|
||||
room_id = session.room_id
|
||||
sender_key = session.sender_key
|
||||
if session in self._entries[room_id][sender_key].values():
|
||||
return False
|
||||
|
||||
self._entries[room_id][sender_key].clear()
|
||||
self._entries[room_id][sender_key][session.id] = session
|
||||
return True
|
||||
|
4
setup.py
4
setup.py
@ -7,7 +7,7 @@ with open("README.md", encoding="utf-8") as f:
|
||||
|
||||
setup(
|
||||
name="pantalaimon",
|
||||
version="0.9.2",
|
||||
version="0.10.0",
|
||||
url="https://github.com/matrix-org/pantalaimon",
|
||||
author="The Matrix.org Team",
|
||||
author_email="poljar@termina.org.uk",
|
||||
@ -29,7 +29,7 @@ setup(
|
||||
"cachetools >= 3.0.0",
|
||||
"prompt_toolkit > 2, < 4",
|
||||
"typing;python_version<'3.5'",
|
||||
"matrix-nio[e2e] >= 0.14, < 0.18"
|
||||
"matrix-nio[e2e] >= 0.14, < 0.19"
|
||||
],
|
||||
extras_require={
|
||||
"ui": [
|
||||
|
@ -153,7 +153,7 @@ class TestClass(object):
|
||||
|
||||
def test_media_storage(self, panstore):
|
||||
server_name = "test"
|
||||
media_cache = panstore.load_media(server_name)
|
||||
media_cache = panstore.load_media_cache(server_name)
|
||||
assert not media_cache
|
||||
|
||||
event = self.encrypted_media_event
|
||||
@ -171,7 +171,7 @@ class TestClass(object):
|
||||
|
||||
panstore.save_media(server_name, media)
|
||||
|
||||
media_cache = panstore.load_media(server_name)
|
||||
media_cache = panstore.load_media_cache(server_name)
|
||||
|
||||
assert (mxc_server, mxc_path) in media_cache
|
||||
media_info = media_cache[(mxc_server, mxc_path)]
|
||||
|
Loading…
x
Reference in New Issue
Block a user