mirror of
https://github.com/matrix-org/pantalaimon.git
synced 2025-07-20 13:38:46 -04:00
Compare commits
No commits in common. "master" and "0.10.3" have entirely different histories.
24 changed files with 189 additions and 435 deletions
45
.github/workflows/ci.yml
vendored
45
.github/workflows/ci.yml
vendored
|
@ -1,45 +0,0 @@
|
||||||
name: Build Status
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ['3.8', '3.9', '3.10']
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install Tox and any other packages
|
|
||||||
run: |
|
|
||||||
wget https://gitlab.matrix.org/matrix-org/olm/-/archive/master/olm-master.tar.bz2
|
|
||||||
tar -xvf olm-master.tar.bz2
|
|
||||||
pushd olm-master && make && sudo make PREFIX="/usr" install && popd
|
|
||||||
rm -r olm-master
|
|
||||||
pip install tox
|
|
||||||
- name: Run Tox
|
|
||||||
run: tox -e py
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
- name: Install Tox and any other packages
|
|
||||||
run: |
|
|
||||||
wget https://gitlab.matrix.org/matrix-org/olm/-/archive/master/olm-master.tar.bz2
|
|
||||||
tar -xvf olm-master.tar.bz2
|
|
||||||
pushd olm-master && make && sudo make PREFIX="/usr" install && popd
|
|
||||||
rm -r olm-master
|
|
||||||
pip install tox
|
|
||||||
- name: Run Tox
|
|
||||||
run: tox -e coverage
|
|
53
.github/workflows/docker.yml
vendored
53
.github/workflows/docker.yml
vendored
|
@ -1,53 +0,0 @@
|
||||||
name: Create and publish a docker image
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
tags:
|
|
||||||
- '\d+.\d+.\d+'
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
env:
|
|
||||||
REGISTRY: ghcr.io
|
|
||||||
IMAGE_NAME: ${{ github.repository }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Log in to container registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
||||||
tags: |
|
|
||||||
type=ref,event=branch
|
|
||||||
type=ref,event=tag,prefix=v
|
|
||||||
type=sha
|
|
||||||
|
|
||||||
- name: Build and push docker image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
26
CHANGELOG.md
26
CHANGELOG.md
|
@ -4,32 +4,6 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## 0.10.5 2022-09-28
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- [[#137]] Proxy the v3 endpoints as well
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- [[#130]] Make sure the token variable is declared
|
|
||||||
|
|
||||||
[#137]: https://github.com/matrix-org/pantalaimon/pull/137
|
|
||||||
[#130]: https://github.com/matrix-org/pantalaimon/pull/130
|
|
||||||
|
|
||||||
## 0.10.4 2022-02-04
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- [[#122]] Fix the GLib import for panctl on some distributions
|
|
||||||
- [[#120]] Don't use strip to filter Bearer from the auth header
|
|
||||||
- [[#118]] Don't use the raw path if we need to sanitize filters, fixing room
|
|
||||||
history fetching for Fractal
|
|
||||||
|
|
||||||
[#122]: https://github.com/matrix-org/pantalaimon/pull/122
|
|
||||||
[#120]: https://github.com/matrix-org/pantalaimon/pull/120
|
|
||||||
[#118]: https://github.com/matrix-org/pantalaimon/pull/118
|
|
||||||
|
|
||||||
## 0.10.3 2021-09-02
|
## 0.10.3 2021-09-02
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
56
README.md
56
README.md
|
@ -25,19 +25,11 @@ Installing pantalaimon works like usually with python packages:
|
||||||
|
|
||||||
python setup.py install
|
python setup.py install
|
||||||
|
|
||||||
or you can use `pip` and install it with:
|
|
||||||
```
|
|
||||||
pip install .[ui]
|
|
||||||
```
|
|
||||||
|
|
||||||
It is recommended that you create a virtual environment first or install dependencies
|
|
||||||
via your package manager. They are usually found with `python-<package-name>`.
|
|
||||||
|
|
||||||
Pantalaimon can also be found on pypi:
|
Pantalaimon can also be found on pypi:
|
||||||
|
|
||||||
pip install pantalaimon
|
pip install pantalaimon
|
||||||
|
|
||||||
Pantalaimon contains a dbus based UI that can be used to control the daemon.
|
Pantalaimon contains a dbus based UI that can be used to controll the daemon.
|
||||||
The dbus based UI is completely optional and needs to be installed with the
|
The dbus based UI is completely optional and needs to be installed with the
|
||||||
daemon:
|
daemon:
|
||||||
|
|
||||||
|
@ -85,10 +77,6 @@ docker build -t pantalaimon .
|
||||||
# volume below is for where Pantalaimon should dump some data.
|
# volume below is for where Pantalaimon should dump some data.
|
||||||
docker run -it --rm -v /path/to/pantalaimon/dir:/data -p 8008:8008 pantalaimon
|
docker run -it --rm -v /path/to/pantalaimon/dir:/data -p 8008:8008 pantalaimon
|
||||||
```
|
```
|
||||||
The Docker image in the above example can alternatively be built straight from any branch or tag without the need to clone the repo, just by using this syntax:
|
|
||||||
```bash
|
|
||||||
docker build -t pantalaimon github.com/matrix-org/pantalaimon#master
|
|
||||||
```
|
|
||||||
|
|
||||||
An example `pantalaimon.conf` for Docker is:
|
An example `pantalaimon.conf` for Docker is:
|
||||||
```conf
|
```conf
|
||||||
|
@ -108,7 +96,7 @@ IgnoreVerification = True
|
||||||
Usage
|
Usage
|
||||||
=====
|
=====
|
||||||
|
|
||||||
While pantalaimon is a daemon, it is meant to be run as the same user as the app it is proxying for. It won't
|
While pantalaimon is a daemon, it is meant to be run as your own user. It won't
|
||||||
verify devices for you automatically, unless configured to do so, and requires
|
verify devices for you automatically, unless configured to do so, and requires
|
||||||
user interaction to verify, ignore or blacklist devices. A more complete
|
user interaction to verify, ignore or blacklist devices. A more complete
|
||||||
description of Pantalaimon can be found in the [man page](docs/man/pantalaimon.8.md).
|
description of Pantalaimon can be found in the [man page](docs/man/pantalaimon.8.md).
|
||||||
|
@ -119,7 +107,7 @@ specifies one or more homeservers for pantalaimon to connect to.
|
||||||
A minimal pantalaimon configuration looks like this:
|
A minimal pantalaimon configuration looks like this:
|
||||||
```dosini
|
```dosini
|
||||||
[local-matrix]
|
[local-matrix]
|
||||||
Homeserver = https://localhost:443
|
Homeserver = https://localhost:8448
|
||||||
ListenAddress = localhost
|
ListenAddress = localhost
|
||||||
ListenPort = 8009
|
ListenPort = 8009
|
||||||
```
|
```
|
||||||
|
@ -148,41 +136,3 @@ To control the daemon an interactive utility is provided in the form of
|
||||||
`panctl` can be used to verify, blacklist or ignore devices, import or export
|
`panctl` can be used to verify, blacklist or ignore devices, import or export
|
||||||
session keys, or to introspect devices of users that we share encrypted rooms
|
session keys, or to introspect devices of users that we share encrypted rooms
|
||||||
with.
|
with.
|
||||||
|
|
||||||
### Setup
|
|
||||||
This is all coming from an excellent comment that you can find [here](https://github.com/matrix-org/pantalaimon/issues/154#issuecomment-1951591191).
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
1) Ensure you have an OS keyring installed. In my case I installed `gnome-keyring`. You may also want a GUI like `seahorse` to inspect the keyring. (pantalaimon will work without a keyring but your client will have to log in with the password every time `pantalaimon` is restarted, instead of being able to reuse the access token from the previous successful login.)
|
|
||||||
|
|
||||||
2) In case you have prior attempts, clean the slate by deleting the `~/.local/share/pantalaimon` directory.
|
|
||||||
|
|
||||||
3) Start `pantalaimon`.
|
|
||||||
|
|
||||||
4) Connect a client to the `ListenAddress:ListenPort` you specified in `pantalaimon.conf`, eg to `127.0.0.1:8009`, using the same username and password you would've used to login to your homeserver directly.
|
|
||||||
|
|
||||||
5) The login should succeed, but at this point all encrypted messages will fail to decrypt. This is fine.
|
|
||||||
|
|
||||||
6) Start another client that you were already using for your encrypted chats previously. In my case this was `app.element.io`, so the rest of the steps here assume that.
|
|
||||||
|
|
||||||
7) Run `panctl`. At the prompt, run `start-verification <user ID> <user ID> <Element's device ID>`. `<user ID>` here is the full user ID like `@arnavion:arnavion.dev`. If you only have the one Element session, `panctl` will show you the device ID as an autocomplete hint so you don't have to look it up. If you do need to look it up, go to Element -> profile icon -> All Settings -> Sessions, expand the "Current session" item, and the "Session ID" is the device ID.
|
|
||||||
|
|
||||||
8) In Element you will see a popup "Incoming Verification Request". Click "Continue". It will change to a popup containing some emojis, and `panctl` will print the same emojis. Click the "They match" button. It will now change to a popup like "Waiting for other client to confirm..."
|
|
||||||
|
|
||||||
9) In `panctl`, run `confirm-verification <user ID> <user ID> <Element's device ID>`, ie the same command as before but with `confirm-verification` instead of `start-verification`.
|
|
||||||
|
|
||||||
10) At this point, if you look at all your sessions in Element (profile icon -> All Settings -> Sessions), you should see "pantalaimon" in the "Other sessions" list as a "Verified" session.
|
|
||||||
|
|
||||||
11) Export the E2E room keys that Element was using via profile icon -> Security & Privacy -> Export E2E room keys. Pick any password and then save the file to some path.
|
|
||||||
|
|
||||||
12) Back in `panctl`, run `import-keys <user ID> <path of file> <password you used to encrypt the file>`. After a few seconds, in the output of `pantalaimon`, you should see a log like `INFO: pantalaimon: Successfully imported keys for <user ID> from <path of file>`.
|
|
||||||
|
|
||||||
13) Close and restart the client you had used in step 5, ie the one you want to connect to `pantalaimon`. Now, finally, you should be able to see the encrypted chats be decrypted.
|
|
||||||
|
|
||||||
14) Delete the E2E room keys backup file from step 12. You don't need it any more.
|
|
||||||
|
|
||||||
|
|
||||||
15) If in step 11 you had other unverified sessions from pantalaimon from your prior attempts, you can sign out of them too.
|
|
||||||
|
|
||||||
You will probably have to repeat steps 11-14 any time you start a new encrypted chat in Element.
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ The message will be sent away after all devices are marked as ignored.
|
||||||
In contrast to the
|
In contrast to the
|
||||||
.Cm send-anyways
|
.Cm send-anyways
|
||||||
command this command cancels the sending of a message to an encrypted room with
|
command this command cancels the sending of a message to an encrypted room with
|
||||||
unverified devices and gives the user the opportunity to verify or blacklist
|
unverified devices and gives the user the oportunity to verify or blacklist
|
||||||
devices as they see fit.
|
devices as they see fit.
|
||||||
.It Cm import-keys Ar pan-user Ar file Ar passphrase
|
.It Cm import-keys Ar pan-user Ar file Ar passphrase
|
||||||
Import end-to-end encryption keys from the given file for the given pan-user.
|
Import end-to-end encryption keys from the given file for the given pan-user.
|
||||||
|
|
|
@ -74,7 +74,7 @@ are as follows:
|
||||||
> In contrast to the
|
> In contrast to the
|
||||||
> **send-anyways**
|
> **send-anyways**
|
||||||
> command this command cancels the sending of a message to an encrypted room with
|
> command this command cancels the sending of a message to an encrypted room with
|
||||||
> unverified devices and gives the user the opportunity to verify or blacklist
|
> unverified devices and gives the user the oportunity to verify or blacklist
|
||||||
> devices as they see fit.
|
> devices as they see fit.
|
||||||
|
|
||||||
**import-keys** *pan-user* *file* *passphrase*
|
**import-keys** *pan-user* *file* *passphrase*
|
||||||
|
|
|
@ -86,7 +86,7 @@ The amount of time to wait between room message history requests to the
|
||||||
Homeserver in ms. Defaults to 3000.
|
Homeserver in ms. Defaults to 3000.
|
||||||
.El
|
.El
|
||||||
.Pp
|
.Pp
|
||||||
Additional to the homeserver section a special section with the name
|
Aditional to the homeserver section a special section with the name
|
||||||
.Cm Default
|
.Cm Default
|
||||||
can be used to configure the following values for all homeservers:
|
can be used to configure the following values for all homeservers:
|
||||||
.Cm ListenAddress ,
|
.Cm ListenAddress ,
|
||||||
|
|
|
@ -69,7 +69,7 @@ The following keys are optional in the proxy instance sections:
|
||||||
> incoming messages will be decryptable, the proxy will be unable to decrypt the
|
> incoming messages will be decryptable, the proxy will be unable to decrypt the
|
||||||
> room history. Defaults to "No".
|
> room history. Defaults to "No".
|
||||||
|
|
||||||
Additional to the homeserver section a special section with the name
|
Aditional to the homeserver section a special section with the name
|
||||||
**Default**
|
**Default**
|
||||||
can be used to configure the following values for all homeservers:
|
can be used to configure the following values for all homeservers:
|
||||||
**ListenAddress**,
|
**ListenAddress**,
|
||||||
|
|
|
@ -24,7 +24,7 @@ behalf of the client.
|
||||||
is supposed to run as your own user and listen to connections on a
|
is supposed to run as your own user and listen to connections on a
|
||||||
non-privileged port. A client needs to log in using the standard Matrix HTTP
|
non-privileged port. A client needs to log in using the standard Matrix HTTP
|
||||||
calls to register itself to the daemon, such a registered user is called a pan
|
calls to register itself to the daemon, such a registered user is called a pan
|
||||||
user and will have its own sync loop to keep up with the server. Multiple matrix
|
user and will have it's own sync loop to keep up with the server. Multiple matrix
|
||||||
clients can connect and use the same pan user.
|
clients can connect and use the same pan user.
|
||||||
|
|
||||||
If user interaction is required
|
If user interaction is required
|
||||||
|
|
|
@ -16,6 +16,7 @@ import asyncio
|
||||||
import os
|
import os
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from aiohttp.client_exceptions import ClientConnectionError
|
from aiohttp.client_exceptions import ClientConnectionError
|
||||||
|
@ -134,7 +135,7 @@ class InvalidLimit(Exception):
|
||||||
class SqliteQStore(SqliteStore):
|
class SqliteQStore(SqliteStore):
|
||||||
def _create_database(self):
|
def _create_database(self):
|
||||||
return SqliteQueueDatabase(
|
return SqliteQueueDatabase(
|
||||||
self.database_path, pragmas=(("foreign_keys", 1), ("secure_delete", 1))
|
self.database_path, pragmas=(("foregign_keys", 1), ("secure_delete", 1))
|
||||||
)
|
)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -553,7 +554,6 @@ class PanClient(AsyncClient):
|
||||||
full_state=True,
|
full_state=True,
|
||||||
since=next_batch,
|
since=next_batch,
|
||||||
loop_sleep_time=loop_sleep_time,
|
loop_sleep_time=loop_sleep_time,
|
||||||
set_presence="offline",
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.task = task
|
self.task = task
|
||||||
|
@ -708,6 +708,7 @@ class PanClient(AsyncClient):
|
||||||
for share in self.get_active_key_requests(
|
for share in self.get_active_key_requests(
|
||||||
message.user_id, message.device_id
|
message.user_id, message.device_id
|
||||||
):
|
):
|
||||||
|
|
||||||
continued = True
|
continued = True
|
||||||
|
|
||||||
if not self.continue_key_share(share):
|
if not self.continue_key_share(share):
|
||||||
|
@ -734,7 +735,7 @@ class PanClient(AsyncClient):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
response = (
|
response = (
|
||||||
f"Successfully continued the key requests from "
|
f"Succesfully continued the key requests from "
|
||||||
f"{message.user_id} via {message.device_id}"
|
f"{message.user_id} via {message.device_id}"
|
||||||
)
|
)
|
||||||
ret = "m.ok"
|
ret = "m.ok"
|
||||||
|
@ -759,7 +760,7 @@ class PanClient(AsyncClient):
|
||||||
|
|
||||||
if cancelled:
|
if cancelled:
|
||||||
response = (
|
response = (
|
||||||
f"Successfully cancelled key requests from "
|
f"Succesfully cancelled key requests from "
|
||||||
f"{message.user_id} via {message.device_id}"
|
f"{message.user_id} via {message.device_id}"
|
||||||
)
|
)
|
||||||
ret = "m.ok"
|
ret = "m.ok"
|
||||||
|
@ -809,9 +810,8 @@ class PanClient(AsyncClient):
|
||||||
|
|
||||||
if not isinstance(event, MegolmEvent):
|
if not isinstance(event, MegolmEvent):
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Encrypted event is not a megolm event:" "\n{}".format(
|
"Encrypted event is not a megolm event:"
|
||||||
pformat(event_dict)
|
"\n{}".format(pformat(event_dict))
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -835,9 +835,9 @@ class PanClient(AsyncClient):
|
||||||
decrypted_event.source["content"]["url"] = decrypted_event.url
|
decrypted_event.source["content"]["url"] = decrypted_event.url
|
||||||
|
|
||||||
if decrypted_event.thumbnail_url:
|
if decrypted_event.thumbnail_url:
|
||||||
decrypted_event.source["content"]["info"]["thumbnail_url"] = (
|
decrypted_event.source["content"]["info"][
|
||||||
decrypted_event.thumbnail_url
|
"thumbnail_url"
|
||||||
)
|
] = decrypted_event.thumbnail_url
|
||||||
|
|
||||||
event_dict.update(decrypted_event.source)
|
event_dict.update(decrypted_event.source)
|
||||||
event_dict["decrypted"] = True
|
event_dict["decrypted"] = True
|
||||||
|
|
|
@ -31,7 +31,7 @@ class PanConfigParser(configparser.ConfigParser):
|
||||||
"IgnoreVerification": "False",
|
"IgnoreVerification": "False",
|
||||||
"ListenAddress": "localhost",
|
"ListenAddress": "localhost",
|
||||||
"ListenPort": "8009",
|
"ListenPort": "8009",
|
||||||
"LogLevel": "warning",
|
"LogLevel": "warnig",
|
||||||
"Notifications": "on",
|
"Notifications": "on",
|
||||||
"UseKeyring": "yes",
|
"UseKeyring": "yes",
|
||||||
"SearchRequests": "off",
|
"SearchRequests": "off",
|
||||||
|
@ -113,7 +113,7 @@ class ServerConfig:
|
||||||
E2E encrypted messages.
|
E2E encrypted messages.
|
||||||
keyring (bool): Enable or disable the OS keyring for the storage of
|
keyring (bool): Enable or disable the OS keyring for the storage of
|
||||||
access tokens.
|
access tokens.
|
||||||
search_requests (bool): Enable or disable additional Homeserver requests
|
search_requests (bool): Enable or disable aditional Homeserver requests
|
||||||
for the search API endpoint.
|
for the search API endpoint.
|
||||||
index_encrypted_only (bool): Enable or disable message indexing fro
|
index_encrypted_only (bool): Enable or disable message indexing fro
|
||||||
non-encrypted rooms.
|
non-encrypted rooms.
|
||||||
|
@ -186,6 +186,7 @@ class PanConfig:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for section_name, section in config.items():
|
for section_name, section in config.items():
|
||||||
|
|
||||||
if section_name == "Default":
|
if section_name == "Default":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -125,8 +125,6 @@ class ProxyDaemon:
|
||||||
self.upload_info = self.store.load_upload(self.name)
|
self.upload_info = self.store.load_upload(self.name)
|
||||||
|
|
||||||
for user_id, device_id in accounts:
|
for user_id, device_id in accounts:
|
||||||
token = None
|
|
||||||
|
|
||||||
if self.conf.keyring:
|
if self.conf.keyring:
|
||||||
try:
|
try:
|
||||||
token = keyring.get_password(
|
token = keyring.get_password(
|
||||||
|
@ -227,8 +225,7 @@ class ProxyDaemon:
|
||||||
|
|
||||||
if ret:
|
if ret:
|
||||||
msg = (
|
msg = (
|
||||||
f"Device {device.id} of user "
|
f"Device {device.id} of user " f"{device.user_id} succesfully verified."
|
||||||
f"{device.user_id} successfully verified."
|
|
||||||
)
|
)
|
||||||
await client.send_update_device(device)
|
await client.send_update_device(device)
|
||||||
else:
|
else:
|
||||||
|
@ -243,7 +240,7 @@ class ProxyDaemon:
|
||||||
if ret:
|
if ret:
|
||||||
msg = (
|
msg = (
|
||||||
f"Device {device.id} of user "
|
f"Device {device.id} of user "
|
||||||
f"{device.user_id} successfully unverified."
|
f"{device.user_id} succesfully unverified."
|
||||||
)
|
)
|
||||||
await client.send_update_device(device)
|
await client.send_update_device(device)
|
||||||
else:
|
else:
|
||||||
|
@ -258,7 +255,7 @@ class ProxyDaemon:
|
||||||
if ret:
|
if ret:
|
||||||
msg = (
|
msg = (
|
||||||
f"Device {device.id} of user "
|
f"Device {device.id} of user "
|
||||||
f"{device.user_id} successfully blacklisted."
|
f"{device.user_id} succesfully blacklisted."
|
||||||
)
|
)
|
||||||
await client.send_update_device(device)
|
await client.send_update_device(device)
|
||||||
else:
|
else:
|
||||||
|
@ -275,7 +272,7 @@ class ProxyDaemon:
|
||||||
if ret:
|
if ret:
|
||||||
msg = (
|
msg = (
|
||||||
f"Device {device.id} of user "
|
f"Device {device.id} of user "
|
||||||
f"{device.user_id} successfully unblacklisted."
|
f"{device.user_id} succesfully unblacklisted."
|
||||||
)
|
)
|
||||||
await client.send_update_device(device)
|
await client.send_update_device(device)
|
||||||
else:
|
else:
|
||||||
|
@ -310,6 +307,7 @@ class ProxyDaemon:
|
||||||
DeviceUnblacklistMessage,
|
DeviceUnblacklistMessage,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
|
|
||||||
device = client.device_store[message.user_id].get(message.device_id, None)
|
device = client.device_store[message.user_id].get(message.device_id, None)
|
||||||
|
|
||||||
if not device:
|
if not device:
|
||||||
|
@ -358,7 +356,7 @@ class ProxyDaemon:
|
||||||
|
|
||||||
else:
|
else:
|
||||||
info_msg = (
|
info_msg = (
|
||||||
f"Successfully exported keys for {client.user_id} " f"to {path}"
|
f"Succesfully exported keys for {client.user_id} " f"to {path}"
|
||||||
)
|
)
|
||||||
logger.info(info_msg)
|
logger.info(info_msg)
|
||||||
await self.send_response(
|
await self.send_response(
|
||||||
|
@ -381,7 +379,7 @@ class ProxyDaemon:
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
info_msg = (
|
info_msg = (
|
||||||
f"Successfully imported keys for {client.user_id} " f"from {path}"
|
f"Succesfully imported keys for {client.user_id} " f"from {path}"
|
||||||
)
|
)
|
||||||
logger.info(info_msg)
|
logger.info(info_msg)
|
||||||
await self.send_response(
|
await self.send_response(
|
||||||
|
@ -420,9 +418,7 @@ class ProxyDaemon:
|
||||||
access_token = request.query.get("access_token", "")
|
access_token = request.query.get("access_token", "")
|
||||||
|
|
||||||
if not access_token:
|
if not access_token:
|
||||||
access_token = request.headers.get("Authorization", "").replace(
|
access_token = request.headers.get("Authorization", "").strip("Bearer ")
|
||||||
"Bearer ", "", 1
|
|
||||||
)
|
|
||||||
|
|
||||||
return access_token
|
return access_token
|
||||||
|
|
||||||
|
@ -464,7 +460,6 @@ class ProxyDaemon:
|
||||||
data=None, # type: bytes
|
data=None, # type: bytes
|
||||||
session=None, # type: aiohttp.ClientSession
|
session=None, # type: aiohttp.ClientSession
|
||||||
token=None, # type: str
|
token=None, # type: str
|
||||||
use_raw_path=True, # type: bool
|
|
||||||
):
|
):
|
||||||
# type: (...) -> aiohttp.ClientResponse
|
# type: (...) -> aiohttp.ClientResponse
|
||||||
"""Forward the given request to our configured homeserver.
|
"""Forward the given request to our configured homeserver.
|
||||||
|
@ -479,10 +474,6 @@ class ProxyDaemon:
|
||||||
should be used to forward the request.
|
should be used to forward the request.
|
||||||
token (str, optional): The access token that should be used for the
|
token (str, optional): The access token that should be used for the
|
||||||
request.
|
request.
|
||||||
use_raw_path (str, optional): Should the raw path be used from the
|
|
||||||
request or should we use the path and re-encode it. Some may need
|
|
||||||
their filters to be sanitized, this requires the parsed version of
|
|
||||||
the path, otherwise we leave the path as is.
|
|
||||||
"""
|
"""
|
||||||
if not session:
|
if not session:
|
||||||
if not self.default_session:
|
if not self.default_session:
|
||||||
|
@ -491,7 +482,7 @@ class ProxyDaemon:
|
||||||
|
|
||||||
assert session
|
assert session
|
||||||
|
|
||||||
path = request.raw_path if use_raw_path else urllib.parse.quote(request.path)
|
path = request.raw_path
|
||||||
method = request.method
|
method = request.method
|
||||||
|
|
||||||
headers = CIMultiDict(request.headers)
|
headers = CIMultiDict(request.headers)
|
||||||
|
@ -616,9 +607,7 @@ class ProxyDaemon:
|
||||||
await pan_client.close()
|
await pan_client.close()
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info(
|
logger.info(f"Succesfully started new background sync client for " f"{user_id}")
|
||||||
f"Successfully started new background sync client for " f"{user_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
await self.send_ui_message(
|
await self.send_ui_message(
|
||||||
UpdateUsersMessage(self.name, user_id, pan_client.device_id)
|
UpdateUsersMessage(self.name, user_id, pan_client.device_id)
|
||||||
|
@ -684,7 +673,7 @@ class ProxyDaemon:
|
||||||
|
|
||||||
if user_id and access_token:
|
if user_id and access_token:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"User: {user} successfully logged in, starting "
|
f"User: {user} succesfully logged in, starting "
|
||||||
f"a background sync client."
|
f"a background sync client."
|
||||||
)
|
)
|
||||||
await self.start_pan_client(
|
await self.start_pan_client(
|
||||||
|
@ -735,7 +724,7 @@ class ProxyDaemon:
|
||||||
return decryption_method(body, ignore_failures=False)
|
return decryption_method(body, ignore_failures=False)
|
||||||
except EncryptionError:
|
except EncryptionError:
|
||||||
logger.info("Error decrypting sync, waiting for next pan " "sync")
|
logger.info("Error decrypting sync, waiting for next pan " "sync")
|
||||||
(await client.synced.wait(),)
|
await client.synced.wait(),
|
||||||
logger.info("Pan synced, retrying decryption.")
|
logger.info("Pan synced, retrying decryption.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -772,7 +761,7 @@ class ProxyDaemon:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = await self.forward_request(
|
response = await self.forward_request(
|
||||||
request, params=query, token=client.access_token, use_raw_path=False
|
request, params=query, token=client.access_token
|
||||||
)
|
)
|
||||||
except ClientConnectionError as e:
|
except ClientConnectionError as e:
|
||||||
return web.Response(status=500, text=str(e))
|
return web.Response(status=500, text=str(e))
|
||||||
|
@ -795,27 +784,6 @@ class ProxyDaemon:
|
||||||
body=await response.read(),
|
body=await response.read(),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def createRoom(self, request):
|
|
||||||
try:
|
|
||||||
content = await request.json()
|
|
||||||
except (JSONDecodeError, ContentTypeError):
|
|
||||||
return self._not_json
|
|
||||||
|
|
||||||
invite = content.get("invite", ())
|
|
||||||
if invite:
|
|
||||||
access_token = self.get_access_token(request)
|
|
||||||
|
|
||||||
if not access_token:
|
|
||||||
return self._missing_token
|
|
||||||
|
|
||||||
client = await self._find_client(access_token)
|
|
||||||
if not client:
|
|
||||||
return self._unknown_token
|
|
||||||
|
|
||||||
client.users_for_key_query.update(invite)
|
|
||||||
|
|
||||||
return await self.forward_to_web(request)
|
|
||||||
|
|
||||||
async def messages(self, request):
|
async def messages(self, request):
|
||||||
access_token = self.get_access_token(request)
|
access_token = self.get_access_token(request)
|
||||||
|
|
||||||
|
@ -841,9 +809,7 @@ class ProxyDaemon:
|
||||||
query["filter"] = request_filter
|
query["filter"] = request_filter
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = await self.forward_request(
|
response = await self.forward_request(request, params=query)
|
||||||
request, params=query, use_raw_path=False
|
|
||||||
)
|
|
||||||
except ClientConnectionError as e:
|
except ClientConnectionError as e:
|
||||||
return web.Response(status=500, text=str(e))
|
return web.Response(status=500, text=str(e))
|
||||||
|
|
||||||
|
@ -940,7 +906,7 @@ class ProxyDaemon:
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{
|
{
|
||||||
"errcode": "M_FORBIDDEN",
|
"errcode": "M_FORBIDDEN",
|
||||||
"error": "You do not have permission to send the event.",
|
"error": "You do not have permission to send the event."
|
||||||
},
|
},
|
||||||
headers=CORS_HEADERS,
|
headers=CORS_HEADERS,
|
||||||
status=403,
|
status=403,
|
||||||
|
@ -982,11 +948,7 @@ class ProxyDaemon:
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
content["url"] = await self._decrypt_uri(content["url"], client)
|
content["url"] = await self._decrypt_uri(content["url"], client)
|
||||||
if (
|
if "info" in content and "thumbnail_url" in content["info"]:
|
||||||
"info" in content
|
|
||||||
and "thumbnail_url" in content["info"]
|
|
||||||
and content["info"]["thumbnail_url"] is not None
|
|
||||||
):
|
|
||||||
content["info"]["thumbnail_url"] = await self._decrypt_uri(
|
content["info"]["thumbnail_url"] = await self._decrypt_uri(
|
||||||
content["info"]["thumbnail_url"], client
|
content["info"]["thumbnail_url"], client
|
||||||
)
|
)
|
||||||
|
@ -1056,7 +1018,7 @@ class ProxyDaemon:
|
||||||
except SendRetryError as e:
|
except SendRetryError as e:
|
||||||
return web.Response(status=503, text=str(e))
|
return web.Response(status=503, text=str(e))
|
||||||
|
|
||||||
# Acquire a semaphore here so we only send out one
|
# Aquire a semaphore here so we only send out one
|
||||||
# UnverifiedDevicesSignal
|
# UnverifiedDevicesSignal
|
||||||
sem = client.send_semaphores[room_id]
|
sem = client.send_semaphores[room_id]
|
||||||
|
|
||||||
|
@ -1296,9 +1258,7 @@ class ProxyDaemon:
|
||||||
client = next(iter(self.pan_clients.values()))
|
client = next(iter(self.pan_clients.values()))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = await client.download(
|
response = await client.download(server_name, media_id, file_name)
|
||||||
server_name=server_name, media_id=media_id, filename=file_name
|
|
||||||
)
|
|
||||||
except ClientConnectionError as e:
|
except ClientConnectionError as e:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ if False:
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import tantivy
|
import tantivy
|
||||||
|
@ -229,6 +230,7 @@ if False:
|
||||||
)
|
)
|
||||||
|
|
||||||
for message in query:
|
for message in query:
|
||||||
|
|
||||||
event = message.event
|
event = message.event
|
||||||
|
|
||||||
event_dict = {
|
event_dict = {
|
||||||
|
@ -499,5 +501,6 @@ if False:
|
||||||
|
|
||||||
return search_result
|
return search_result
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
INDEXING_ENABLED = False
|
INDEXING_ENABLED = False
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import janus
|
import janus
|
||||||
|
@ -22,7 +23,7 @@ import keyring
|
||||||
import logbook
|
import logbook
|
||||||
import nio
|
import nio
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from platformdirs import user_config_dir, user_data_dir
|
from appdirs import user_config_dir, user_data_dir
|
||||||
from logbook import StderrHandler
|
from logbook import StderrHandler
|
||||||
|
|
||||||
from pantalaimon.config import PanConfig, PanConfigError, parse_log_level
|
from pantalaimon.config import PanConfig, PanConfigError, parse_log_level
|
||||||
|
@ -67,47 +68,28 @@ async def init(data_dir, server_conf, send_queue, recv_queue):
|
||||||
app.add_routes(
|
app.add_routes(
|
||||||
[
|
[
|
||||||
web.post("/_matrix/client/r0/login", proxy.login),
|
web.post("/_matrix/client/r0/login", proxy.login),
|
||||||
web.post("/_matrix/client/v3/login", proxy.login),
|
|
||||||
web.get("/_matrix/client/r0/sync", proxy.sync),
|
web.get("/_matrix/client/r0/sync", proxy.sync),
|
||||||
web.get("/_matrix/client/v3/sync", proxy.sync),
|
|
||||||
web.post("/_matrix/client/r0/createRoom", proxy.createRoom),
|
|
||||||
web.post("/_matrix/client/v3/createRoom", proxy.createRoom),
|
|
||||||
web.get("/_matrix/client/r0/rooms/{room_id}/messages", proxy.messages),
|
web.get("/_matrix/client/r0/rooms/{room_id}/messages", proxy.messages),
|
||||||
web.get("/_matrix/client/v3/rooms/{room_id}/messages", proxy.messages),
|
|
||||||
web.put(
|
web.put(
|
||||||
r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}/{txnid}",
|
r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}/{txnid}",
|
||||||
proxy.send_message,
|
proxy.send_message,
|
||||||
),
|
),
|
||||||
web.put(
|
|
||||||
r"/_matrix/client/v3/rooms/{room_id}/send/{event_type}/{txnid}",
|
|
||||||
proxy.send_message,
|
|
||||||
),
|
|
||||||
web.post(
|
web.post(
|
||||||
r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}",
|
r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}",
|
||||||
proxy.send_message,
|
proxy.send_message,
|
||||||
),
|
),
|
||||||
web.post("/_matrix/client/r0/user/{user_id}/filter", proxy.filter),
|
web.post("/_matrix/client/r0/user/{user_id}/filter", proxy.filter),
|
||||||
web.post("/_matrix/client/v3/user/{user_id}/filter", proxy.filter),
|
|
||||||
web.post("/.well-known/matrix/client", proxy.well_known),
|
web.post("/.well-known/matrix/client", proxy.well_known),
|
||||||
web.get("/.well-known/matrix/client", proxy.well_known),
|
web.get("/.well-known/matrix/client", proxy.well_known),
|
||||||
web.post("/_matrix/client/r0/search", proxy.search),
|
web.post("/_matrix/client/r0/search", proxy.search),
|
||||||
web.post("/_matrix/client/v3/search", proxy.search),
|
|
||||||
web.options("/_matrix/client/r0/search", proxy.search_opts),
|
web.options("/_matrix/client/r0/search", proxy.search_opts),
|
||||||
web.options("/_matrix/client/v3/search", proxy.search_opts),
|
|
||||||
web.get(
|
web.get(
|
||||||
"/_matrix/media/v1/download/{server_name}/{media_id}", proxy.download
|
"/_matrix/media/v1/download/{server_name}/{media_id}", proxy.download
|
||||||
),
|
),
|
||||||
web.get(
|
|
||||||
"/_matrix/media/v3/download/{server_name}/{media_id}", proxy.download
|
|
||||||
),
|
|
||||||
web.get(
|
web.get(
|
||||||
"/_matrix/media/v1/download/{server_name}/{media_id}/{file_name}",
|
"/_matrix/media/v1/download/{server_name}/{media_id}/{file_name}",
|
||||||
proxy.download,
|
proxy.download,
|
||||||
),
|
),
|
||||||
web.get(
|
|
||||||
"/_matrix/media/v3/download/{server_name}/{media_id}/{file_name}",
|
|
||||||
proxy.download,
|
|
||||||
),
|
|
||||||
web.get(
|
web.get(
|
||||||
"/_matrix/media/r0/download/{server_name}/{media_id}", proxy.download
|
"/_matrix/media/r0/download/{server_name}/{media_id}", proxy.download
|
||||||
),
|
),
|
||||||
|
@ -119,18 +101,10 @@ async def init(data_dir, server_conf, send_queue, recv_queue):
|
||||||
r"/_matrix/media/r0/upload",
|
r"/_matrix/media/r0/upload",
|
||||||
proxy.upload,
|
proxy.upload,
|
||||||
),
|
),
|
||||||
web.post(
|
|
||||||
r"/_matrix/media/v3/upload",
|
|
||||||
proxy.upload,
|
|
||||||
),
|
|
||||||
web.put(
|
web.put(
|
||||||
r"/_matrix/client/r0/profile/{userId}/avatar_url",
|
r"/_matrix/client/r0/profile/{userId}/avatar_url",
|
||||||
proxy.profile,
|
proxy.profile,
|
||||||
),
|
),
|
||||||
web.put(
|
|
||||||
r"/_matrix/client/v3/profile/{userId}/avatar_url",
|
|
||||||
proxy.profile,
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
app.router.add_route("*", "/" + "{proxyPath:.*}", proxy.router)
|
app.router.add_route("*", "/" + "{proxyPath:.*}", proxy.router)
|
||||||
|
@ -288,7 +262,7 @@ async def daemon(context, log_level, debug_encryption, config, data_path):
|
||||||
"connect to pantalaimon."
|
"connect to pantalaimon."
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@click.version_option(version="0.10.5", prog_name="pantalaimon")
|
@click.version_option(version="0.10.2", prog_name="pantalaimon")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--log-level",
|
"--log-level",
|
||||||
type=click.Choice(["error", "warning", "info", "debug"]),
|
type=click.Choice(["error", "warning", "info", "debug"]),
|
||||||
|
|
|
@ -20,16 +20,10 @@ import sys
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from itertools import zip_longest
|
from itertools import zip_longest
|
||||||
from typing import List
|
from typing import List
|
||||||
from shlex import split
|
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import click
|
import click
|
||||||
|
|
||||||
try:
|
|
||||||
from gi.repository import GLib
|
from gi.repository import GLib
|
||||||
except ModuleNotFoundError:
|
|
||||||
from pgi.repository import GLib
|
|
||||||
|
|
||||||
from prompt_toolkit import __version__ as ptk_version
|
from prompt_toolkit import __version__ as ptk_version
|
||||||
from prompt_toolkit import HTML, PromptSession, print_formatted_text
|
from prompt_toolkit import HTML, PromptSession, print_formatted_text
|
||||||
from prompt_toolkit.completion import Completer, Completion, PathCompleter
|
from prompt_toolkit.completion import Completer, Completion, PathCompleter
|
||||||
|
@ -465,7 +459,7 @@ class PanCtl:
|
||||||
def sas_done(self, pan_user, user_id, device_id, _):
|
def sas_done(self, pan_user, user_id, device_id, _):
|
||||||
print(
|
print(
|
||||||
f"Device {device_id} of user {user_id}"
|
f"Device {device_id} of user {user_id}"
|
||||||
f" successfully verified for pan user {pan_user}."
|
f" succesfully verified for pan user {pan_user}."
|
||||||
)
|
)
|
||||||
|
|
||||||
def show_sas_invite(self, pan_user, user_id, device_id, _):
|
def show_sas_invite(self, pan_user, user_id, device_id, _):
|
||||||
|
@ -590,7 +584,7 @@ class PanCtl:
|
||||||
parser = PanctlParser(self.commands)
|
parser = PanctlParser(self.commands)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
args = parser.parse_args(split(result, posix=False))
|
args = parser.parse_args(result.split())
|
||||||
except ParseError:
|
except ParseError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -696,9 +690,9 @@ class PanCtl:
|
||||||
"the pantalaimon daemon."
|
"the pantalaimon daemon."
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@click.version_option(version="0.10.5", prog_name="panctl")
|
@click.version_option(version="0.10.3", prog_name="panctl")
|
||||||
def main():
|
def main():
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
glib_loop = GLib.MainLoop()
|
glib_loop = GLib.MainLoop()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
from nio.crypto import TrustState, GroupSessionStore
|
from nio.crypto import TrustState, GroupSessionStore
|
||||||
|
@ -431,6 +431,7 @@ class PanStore:
|
||||||
device_store = defaultdict(dict)
|
device_store = defaultdict(dict)
|
||||||
|
|
||||||
for d in account.device_keys:
|
for d in account.device_keys:
|
||||||
|
|
||||||
if d.deleted:
|
if d.deleted:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -470,14 +470,14 @@ if UI_ENABLED:
|
||||||
self.bus.publish("org.pantalaimon1", self.control_if, self.device_if)
|
self.bus.publish("org.pantalaimon1", self.control_if, self.device_if)
|
||||||
|
|
||||||
def unverified_notification(self, message):
|
def unverified_notification(self, message):
|
||||||
notification = notify2.Notification(
|
notificaton = notify2.Notification(
|
||||||
"Unverified devices.",
|
"Unverified devices.",
|
||||||
message=(
|
message=(
|
||||||
f"There are unverified devices in the room "
|
f"There are unverified devices in the room "
|
||||||
f"{message.room_display_name}."
|
f"{message.room_display_name}."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
notification.set_category("im")
|
notificaton.set_category("im")
|
||||||
|
|
||||||
def send_cb(notification, action_key, user_data):
|
def send_cb(notification, action_key, user_data):
|
||||||
message = user_data
|
message = user_data
|
||||||
|
@ -488,20 +488,20 @@ if UI_ENABLED:
|
||||||
self.control_if.CancelSending(message.pan_user, message.room_id)
|
self.control_if.CancelSending(message.pan_user, message.room_id)
|
||||||
|
|
||||||
if "actions" in notify2.get_server_caps():
|
if "actions" in notify2.get_server_caps():
|
||||||
notification.add_action("send", "Send anyways", send_cb, message)
|
notificaton.add_action("send", "Send anyways", send_cb, message)
|
||||||
notification.add_action("cancel", "Cancel sending", cancel_cb, message)
|
notificaton.add_action("cancel", "Cancel sending", cancel_cb, message)
|
||||||
|
|
||||||
notification.show()
|
notificaton.show()
|
||||||
|
|
||||||
def sas_invite_notification(self, message):
|
def sas_invite_notification(self, message):
|
||||||
notification = notify2.Notification(
|
notificaton = notify2.Notification(
|
||||||
"Key verification invite",
|
"Key verification invite",
|
||||||
message=(
|
message=(
|
||||||
f"{message.user_id} via {message.device_id} has started "
|
f"{message.user_id} via {message.device_id} has started "
|
||||||
f"a key verification process."
|
f"a key verification process."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
notification.set_category("im")
|
notificaton.set_category("im")
|
||||||
|
|
||||||
def accept_cb(notification, action_key, user_data):
|
def accept_cb(notification, action_key, user_data):
|
||||||
message = user_data
|
message = user_data
|
||||||
|
@ -516,17 +516,17 @@ if UI_ENABLED:
|
||||||
)
|
)
|
||||||
|
|
||||||
if "actions" in notify2.get_server_caps():
|
if "actions" in notify2.get_server_caps():
|
||||||
notification.add_action("accept", "Accept", accept_cb, message)
|
notificaton.add_action("accept", "Accept", accept_cb, message)
|
||||||
notification.add_action("cancel", "Cancel", cancel_cb, message)
|
notificaton.add_action("cancel", "Cancel", cancel_cb, message)
|
||||||
|
|
||||||
notification.show()
|
notificaton.show()
|
||||||
|
|
||||||
def sas_show_notification(self, message):
|
def sas_show_notification(self, message):
|
||||||
emojis = [x[0] for x in message.emoji]
|
emojis = [x[0] for x in message.emoji]
|
||||||
|
|
||||||
emoji_str = " ".join(emojis)
|
emoji_str = " ".join(emojis)
|
||||||
|
|
||||||
notification = notify2.Notification(
|
notificaton = notify2.Notification(
|
||||||
"Short authentication string",
|
"Short authentication string",
|
||||||
message=(
|
message=(
|
||||||
f"Short authentication string for the key verification of"
|
f"Short authentication string for the key verification of"
|
||||||
|
@ -534,7 +534,7 @@ if UI_ENABLED:
|
||||||
f"{emoji_str}"
|
f"{emoji_str}"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
notification.set_category("im")
|
notificaton.set_category("im")
|
||||||
|
|
||||||
def confirm_cb(notification, action_key, user_data):
|
def confirm_cb(notification, action_key, user_data):
|
||||||
message = user_data
|
message = user_data
|
||||||
|
@ -549,21 +549,21 @@ if UI_ENABLED:
|
||||||
)
|
)
|
||||||
|
|
||||||
if "actions" in notify2.get_server_caps():
|
if "actions" in notify2.get_server_caps():
|
||||||
notification.add_action("confirm", "Confirm", confirm_cb, message)
|
notificaton.add_action("confirm", "Confirm", confirm_cb, message)
|
||||||
notification.add_action("cancel", "Cancel", cancel_cb, message)
|
notificaton.add_action("cancel", "Cancel", cancel_cb, message)
|
||||||
|
|
||||||
notification.show()
|
notificaton.show()
|
||||||
|
|
||||||
def sas_done_notification(self, message):
|
def sas_done_notification(self, message):
|
||||||
notification = notify2.Notification(
|
notificaton = notify2.Notification(
|
||||||
"Device successfully verified.",
|
"Device successfully verified.",
|
||||||
message=(
|
message=(
|
||||||
f"Device {message.device_id} of user {message.user_id} "
|
f"Device {message.device_id} of user {message.user_id} "
|
||||||
f"successfully verified."
|
f"successfully verified."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
notification.set_category("im")
|
notificaton.set_category("im")
|
||||||
notification.show()
|
notificaton.show()
|
||||||
|
|
||||||
def message_callback(self):
|
def message_callback(self):
|
||||||
try:
|
try:
|
||||||
|
|
37
setup.py
37
setup.py
|
@ -1,34 +1,18 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
import os
|
|
||||||
|
|
||||||
with open("README.md", encoding="utf-8") as f:
|
with open("README.md", encoding="utf-8") as f:
|
||||||
long_description = f.read()
|
long_description = f.read()
|
||||||
|
|
||||||
|
|
||||||
def get_manpages():
|
|
||||||
"""
|
|
||||||
This function goes and gets all the man pages so they can be installed when
|
|
||||||
the package is installed.
|
|
||||||
"""
|
|
||||||
man_pages = []
|
|
||||||
for root, _, files in os.walk("docs/man"):
|
|
||||||
for file in files:
|
|
||||||
if file.endswith((".1", ".5", ".8")):
|
|
||||||
man_section = file.split(".")[-1]
|
|
||||||
dest_dir = os.path.join("share", "man", f"man{man_section}")
|
|
||||||
man_pages.append((dest_dir, [os.path.join(root, file)]))
|
|
||||||
return man_pages
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="pantalaimon",
|
name="pantalaimon",
|
||||||
version="0.10.5",
|
version="0.10.3",
|
||||||
url="https://github.com/matrix-org/pantalaimon",
|
url="https://github.com/matrix-org/pantalaimon",
|
||||||
author="The Matrix.org Team",
|
author="The Matrix.org Team",
|
||||||
author_email="poljar@termina.org.uk",
|
author_email="poljar@termina.org.uk",
|
||||||
description=("A Matrix proxy daemon that adds E2E encryption capabilities."),
|
description=("A Matrix proxy daemon that adds E2E encryption "
|
||||||
|
"capabilities."),
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
license="Apache License, Version 2.0",
|
license="Apache License, Version 2.0",
|
||||||
|
@ -36,7 +20,7 @@ setup(
|
||||||
install_requires=[
|
install_requires=[
|
||||||
"attrs >= 19.3.0",
|
"attrs >= 19.3.0",
|
||||||
"aiohttp >= 3.6, < 4.0",
|
"aiohttp >= 3.6, < 4.0",
|
||||||
"platformdirs >= 4.3.6",
|
"appdirs >= 1.4.4",
|
||||||
"click >= 7.1.2",
|
"click >= 7.1.2",
|
||||||
"keyring >= 21.2.1",
|
"keyring >= 21.2.1",
|
||||||
"logbook >= 1.5.3",
|
"logbook >= 1.5.3",
|
||||||
|
@ -45,22 +29,19 @@ setup(
|
||||||
"cachetools >= 3.0.0",
|
"cachetools >= 3.0.0",
|
||||||
"prompt_toolkit > 2, < 4",
|
"prompt_toolkit > 2, < 4",
|
||||||
"typing;python_version<'3.5'",
|
"typing;python_version<'3.5'",
|
||||||
"matrix-nio[e2e] >= 0.24, < 0.25.2",
|
"matrix-nio[e2e] >= 0.18, < 0.19"
|
||||||
],
|
],
|
||||||
extras_require={
|
extras_require={
|
||||||
"ui": [
|
"ui": [
|
||||||
"dbus-python >= 1.2, < 1.3",
|
"dbus-python >= 1.2, < 1.3",
|
||||||
"PyGObject >= 3.46, < 3.50",
|
"PyGObject >= 3.36, < 3.39",
|
||||||
"pydbus >= 0.6, < 0.7",
|
"pydbus >= 0.6, < 0.7",
|
||||||
"notify2 >= 0.3, < 0.4",
|
"notify2 >= 0.3, < 0.4",
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
entry_points={
|
entry_points={
|
||||||
"console_scripts": [
|
"console_scripts": ["pantalaimon=pantalaimon.main:main",
|
||||||
"pantalaimon=pantalaimon.main:main",
|
"panctl=pantalaimon.panctl:main"],
|
||||||
"panctl=pantalaimon.panctl:main",
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
zip_safe=False,
|
zip_safe=False
|
||||||
data_files=get_manpages(),
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
pytest==8.3.5
|
pytest
|
||||||
pytest-flake8==1.2.2
|
pytest-flake8
|
||||||
pytest-isort==4.0.0
|
pytest-isort
|
||||||
pytest-cov==5.0.0
|
pytest-cov
|
||||||
faker<=37.1.0
|
faker
|
||||||
aiohttp<=3.11.16
|
aiohttp
|
||||||
pytest-aiohttp<=1.1.0
|
pytest-aiohttp
|
||||||
pytest-asyncio<=0.26.0
|
aioresponses
|
||||||
aioresponses<=0.7.8
|
|
||||||
|
|
|
@ -34,9 +34,11 @@ class Provider(BaseProvider):
|
||||||
def client(self):
|
def client(self):
|
||||||
return ClientInfo(faker.mx_id(), faker.access_token())
|
return ClientInfo(faker.mx_id(), faker.access_token())
|
||||||
|
|
||||||
|
|
||||||
def avatar_url(self):
|
def avatar_url(self):
|
||||||
return "mxc://{}/{}#auto".format(
|
return "mxc://{}/{}#auto".format(
|
||||||
faker.hostname(), "".join(choices(ascii_letters) for i in range(24))
|
faker.hostname(),
|
||||||
|
"".join(choices(ascii_letters) for i in range(24))
|
||||||
)
|
)
|
||||||
|
|
||||||
def olm_key_pair(self):
|
def olm_key_pair(self):
|
||||||
|
@ -54,6 +56,7 @@ class Provider(BaseProvider):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
faker.add_provider(Provider)
|
faker.add_provider(Provider)
|
||||||
|
|
||||||
|
|
||||||
|
@ -77,7 +80,13 @@ def tempdir():
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def panstore(tempdir):
|
def panstore(tempdir):
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
store = SqliteStore(faker.mx_id(), faker.device_id(), tempdir, "", "pan.db")
|
store = SqliteStore(
|
||||||
|
faker.mx_id(),
|
||||||
|
faker.device_id(),
|
||||||
|
tempdir,
|
||||||
|
"",
|
||||||
|
"pan.db"
|
||||||
|
)
|
||||||
account = OlmAccount()
|
account = OlmAccount()
|
||||||
store.save_account(account)
|
store.save_account(account)
|
||||||
|
|
||||||
|
@ -121,23 +130,21 @@ async def pan_proxy_server(tempdir, aiohttp_server):
|
||||||
recv_queue=ui_queue.async_q,
|
recv_queue=ui_queue.async_q,
|
||||||
proxy=None,
|
proxy=None,
|
||||||
ssl=False,
|
ssl=False,
|
||||||
client_store_class=SqliteStore,
|
client_store_class=SqliteStore
|
||||||
)
|
)
|
||||||
|
|
||||||
app.add_routes(
|
app.add_routes([
|
||||||
[
|
|
||||||
web.post("/_matrix/client/r0/login", proxy.login),
|
web.post("/_matrix/client/r0/login", proxy.login),
|
||||||
web.get("/_matrix/client/r0/sync", proxy.sync),
|
web.get("/_matrix/client/r0/sync", proxy.sync),
|
||||||
web.get("/_matrix/client/r0/rooms/{room_id}/messages", proxy.messages),
|
web.get("/_matrix/client/r0/rooms/{room_id}/messages", proxy.messages),
|
||||||
web.put(
|
web.put(
|
||||||
r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}/{txnid}",
|
r"/_matrix/client/r0/rooms/{room_id}/send/{event_type}/{txnid}",
|
||||||
proxy.send_message,
|
proxy.send_message
|
||||||
),
|
),
|
||||||
web.post("/_matrix/client/r0/user/{user_id}/filter", proxy.filter),
|
web.post("/_matrix/client/r0/user/{user_id}/filter", proxy.filter),
|
||||||
web.post("/_matrix/client/r0/search", proxy.search),
|
web.post("/_matrix/client/r0/search", proxy.search),
|
||||||
web.options("/_matrix/client/r0/search", proxy.search_opts),
|
web.options("/_matrix/client/r0/search", proxy.search_opts),
|
||||||
]
|
])
|
||||||
)
|
|
||||||
|
|
||||||
server = await aiohttp_server(app)
|
server = await aiohttp_server(app)
|
||||||
|
|
||||||
|
@ -154,7 +161,7 @@ async def running_proxy(pan_proxy_server, aioresponse, aiohttp_client):
|
||||||
"access_token": "abc123",
|
"access_token": "abc123",
|
||||||
"device_id": "GHTYAJCE",
|
"device_id": "GHTYAJCE",
|
||||||
"home_server": "example.org",
|
"home_server": "example.org",
|
||||||
"user_id": "@example:example.org",
|
"user_id": "@example:example.org"
|
||||||
}
|
}
|
||||||
|
|
||||||
aioclient = await aiohttp_client(server)
|
aioclient = await aiohttp_client(server)
|
||||||
|
@ -163,7 +170,7 @@ async def running_proxy(pan_proxy_server, aioresponse, aiohttp_client):
|
||||||
"https://example.org/_matrix/client/r0/login",
|
"https://example.org/_matrix/client/r0/login",
|
||||||
status=200,
|
status=200,
|
||||||
payload=login_response,
|
payload=login_response,
|
||||||
repeat=True,
|
repeat=True
|
||||||
)
|
)
|
||||||
|
|
||||||
await aioclient.post(
|
await aioclient.post(
|
||||||
|
@ -172,7 +179,7 @@ async def running_proxy(pan_proxy_server, aioresponse, aiohttp_client):
|
||||||
"type": "m.login.password",
|
"type": "m.login.password",
|
||||||
"user": "example",
|
"user": "example",
|
||||||
"password": "wordpass",
|
"password": "wordpass",
|
||||||
},
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
yield server, aioclient, proxy, queues
|
yield server, aioclient, proxy, queues
|
||||||
|
|
|
@ -25,10 +25,10 @@ ALICE_ID = "@alice:example.org"
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def client(tmpdir):
|
async def client(tmpdir, loop):
|
||||||
store = PanStore(tmpdir)
|
store = PanStore(tmpdir)
|
||||||
queue = janus.Queue()
|
queue = janus.Queue()
|
||||||
conf = ServerConfig("example", "https://example.org")
|
conf = ServerConfig("example", "https://exapmle.org")
|
||||||
conf.history_fetch_delay = 0.1
|
conf.history_fetch_delay = 0.1
|
||||||
|
|
||||||
store.save_server_user("example", "@example:example.org")
|
store.save_server_user("example", "@example:example.org")
|
||||||
|
@ -371,7 +371,7 @@ class TestClass(object):
|
||||||
|
|
||||||
await client.loop_stop()
|
await client.loop_stop()
|
||||||
|
|
||||||
async def test_history_fetching_tasks(self, client, aioresponse):
|
async def test_history_fetching_tasks(self, client, aioresponse, loop):
|
||||||
if not INDEXING_ENABLED:
|
if not INDEXING_ENABLED:
|
||||||
pytest.skip("Indexing needs to be enabled to test this")
|
pytest.skip("Indexing needs to be enabled to test this")
|
||||||
|
|
||||||
|
@ -380,9 +380,7 @@ class TestClass(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
aioresponse.get(
|
aioresponse.get(
|
||||||
sync_url,
|
sync_url, status=200, payload=self.initial_sync_response,
|
||||||
status=200,
|
|
||||||
payload=self.initial_sync_response,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True)
|
aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True)
|
||||||
|
@ -423,7 +421,7 @@ class TestClass(object):
|
||||||
tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id)
|
tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id)
|
||||||
assert len(tasks) == 1
|
assert len(tasks) == 1
|
||||||
|
|
||||||
# Check that the task is our prev_batch from the sync response
|
# Check that the task is our prev_batch from the sync resposne
|
||||||
assert tasks[0].room_id == TEST_ROOM_ID
|
assert tasks[0].room_id == TEST_ROOM_ID
|
||||||
assert tasks[0].token == "t392-516_47314_0_7_1_1_1_11444_1"
|
assert tasks[0].token == "t392-516_47314_0_7_1_1_1_11444_1"
|
||||||
|
|
||||||
|
@ -433,7 +431,7 @@ class TestClass(object):
|
||||||
tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id)
|
tasks = client.pan_store.load_fetcher_tasks(client.server_name, client.user_id)
|
||||||
assert len(tasks) == 1
|
assert len(tasks) == 1
|
||||||
|
|
||||||
# Check that the task is our end token from the messages response
|
# Check that the task is our end token from the messages resposne
|
||||||
assert tasks[0].room_id == TEST_ROOM_ID
|
assert tasks[0].room_id == TEST_ROOM_ID
|
||||||
assert tasks[0].token == "t47409-4357353_219380_26003_2265"
|
assert tasks[0].token == "t47409-4357353_219380_26003_2265"
|
||||||
|
|
||||||
|
@ -447,7 +445,7 @@ class TestClass(object):
|
||||||
|
|
||||||
await client.loop_stop()
|
await client.loop_stop()
|
||||||
|
|
||||||
async def test_history_fetching_resume(self, client, aioresponse):
|
async def test_history_fetching_resume(self, client, aioresponse, loop):
|
||||||
if not INDEXING_ENABLED:
|
if not INDEXING_ENABLED:
|
||||||
pytest.skip("Indexing needs to be enabled to test this")
|
pytest.skip("Indexing needs to be enabled to test this")
|
||||||
|
|
||||||
|
@ -456,9 +454,7 @@ class TestClass(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
aioresponse.get(
|
aioresponse.get(
|
||||||
sync_url,
|
sync_url, status=200, payload=self.initial_sync_response,
|
||||||
status=200,
|
|
||||||
payload=self.initial_sync_response,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True)
|
aioresponse.get(sync_url, status=200, payload=self.empty_sync, repeat=True)
|
||||||
|
@ -523,7 +519,7 @@ class TestClass(object):
|
||||||
)
|
)
|
||||||
assert len(tasks) == 1
|
assert len(tasks) == 1
|
||||||
|
|
||||||
# Check that the task is our end token from the messages response
|
# Check that the task is our end token from the messages resposne
|
||||||
assert tasks[0].room_id == TEST_ROOM_ID
|
assert tasks[0].room_id == TEST_ROOM_ID
|
||||||
assert tasks[0].token == "t47409-4357353_219380_26003_2265"
|
assert tasks[0].token == "t47409-4357353_219380_26003_2265"
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
from nio.crypto import OlmDevice
|
from nio.crypto import OlmDevice
|
||||||
|
|
||||||
from conftest import faker
|
from conftest import faker
|
||||||
|
@ -25,7 +27,7 @@ class TestClass(object):
|
||||||
"access_token": "abc123",
|
"access_token": "abc123",
|
||||||
"device_id": "GHTYAJCE",
|
"device_id": "GHTYAJCE",
|
||||||
"home_server": "example.org",
|
"home_server": "example.org",
|
||||||
"user_id": "@example:example.org",
|
"user_id": "@example:example.org"
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -34,7 +36,12 @@ class TestClass(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def keys_upload_response(self):
|
def keys_upload_response(self):
|
||||||
return {"one_time_key_counts": {"curve25519": 10, "signed_curve25519": 20}}
|
return {
|
||||||
|
"one_time_key_counts": {
|
||||||
|
"curve25519": 10,
|
||||||
|
"signed_curve25519": 20
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def example_devices(self):
|
def example_devices(self):
|
||||||
|
@ -45,7 +52,10 @@ class TestClass(object):
|
||||||
devices[device.user_id][device.id] = device
|
devices[device.user_id][device.id] = device
|
||||||
|
|
||||||
bob_device = OlmDevice(
|
bob_device = OlmDevice(
|
||||||
BOB_ID, BOB_DEVICE, {"ed25519": BOB_ONETIME, "curve25519": BOB_CURVE}
|
BOB_ID,
|
||||||
|
BOB_DEVICE,
|
||||||
|
{"ed25519": BOB_ONETIME,
|
||||||
|
"curve25519": BOB_CURVE}
|
||||||
)
|
)
|
||||||
|
|
||||||
devices[BOB_ID][BOB_DEVICE] = bob_device
|
devices[BOB_ID][BOB_DEVICE] = bob_device
|
||||||
|
@ -61,7 +71,7 @@ class TestClass(object):
|
||||||
"https://example.org/_matrix/client/r0/login",
|
"https://example.org/_matrix/client/r0/login",
|
||||||
status=200,
|
status=200,
|
||||||
payload=self.login_response,
|
payload=self.login_response,
|
||||||
repeat=True,
|
repeat=True
|
||||||
)
|
)
|
||||||
|
|
||||||
assert not daemon.pan_clients
|
assert not daemon.pan_clients
|
||||||
|
@ -72,7 +82,7 @@ class TestClass(object):
|
||||||
"type": "m.login.password",
|
"type": "m.login.password",
|
||||||
"user": "example",
|
"user": "example",
|
||||||
"password": "wordpass",
|
"password": "wordpass",
|
||||||
},
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status == 200
|
assert resp.status == 200
|
||||||
|
@ -95,11 +105,11 @@ class TestClass(object):
|
||||||
"https://example.org/_matrix/client/r0/login",
|
"https://example.org/_matrix/client/r0/login",
|
||||||
status=200,
|
status=200,
|
||||||
payload=self.login_response,
|
payload=self.login_response,
|
||||||
repeat=True,
|
repeat=True
|
||||||
)
|
)
|
||||||
|
|
||||||
sync_url = re.compile(
|
sync_url = re.compile(
|
||||||
r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*"
|
r'^https://example\.org/_matrix/client/r0/sync\?access_token=.*'
|
||||||
)
|
)
|
||||||
|
|
||||||
aioresponse.get(
|
aioresponse.get(
|
||||||
|
@ -114,16 +124,14 @@ class TestClass(object):
|
||||||
"type": "m.login.password",
|
"type": "m.login.password",
|
||||||
"user": "example",
|
"user": "example",
|
||||||
"password": "wordpass",
|
"password": "wordpass",
|
||||||
},
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that the pan client started to sync after logging in.
|
# Check that the pan client started to sync after logging in.
|
||||||
pan_client = list(daemon.pan_clients.values())[0]
|
pan_client = list(daemon.pan_clients.values())[0]
|
||||||
assert len(pan_client.rooms) == 1
|
assert len(pan_client.rooms) == 1
|
||||||
|
|
||||||
async def test_pan_client_keys_upload(
|
async def test_pan_client_keys_upload(self, pan_proxy_server, aiohttp_client, aioresponse):
|
||||||
self, pan_proxy_server, aiohttp_client, aioresponse
|
|
||||||
):
|
|
||||||
server, daemon, _ = pan_proxy_server
|
server, daemon, _ = pan_proxy_server
|
||||||
|
|
||||||
client = await aiohttp_client(server)
|
client = await aiohttp_client(server)
|
||||||
|
@ -132,11 +140,11 @@ class TestClass(object):
|
||||||
"https://example.org/_matrix/client/r0/login",
|
"https://example.org/_matrix/client/r0/login",
|
||||||
status=200,
|
status=200,
|
||||||
payload=self.login_response,
|
payload=self.login_response,
|
||||||
repeat=True,
|
repeat=True
|
||||||
)
|
)
|
||||||
|
|
||||||
sync_url = re.compile(
|
sync_url = re.compile(
|
||||||
r"^https://example\.org/_matrix/client/r0/sync\?access_token=.*"
|
r'^https://example\.org/_matrix/client/r0/sync\?access_token=.*'
|
||||||
)
|
)
|
||||||
|
|
||||||
aioresponse.get(
|
aioresponse.get(
|
||||||
|
@ -161,7 +169,7 @@ class TestClass(object):
|
||||||
"type": "m.login.password",
|
"type": "m.login.password",
|
||||||
"user": "example",
|
"user": "example",
|
||||||
"password": "wordpass",
|
"password": "wordpass",
|
||||||
},
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
pan_client = list(daemon.pan_clients.values())[0]
|
pan_client = list(daemon.pan_clients.values())[0]
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import pdb
|
||||||
import pprint
|
import pprint
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from nio import RoomMessage, RoomEncryptedMedia
|
from nio import RoomMessage, RoomEncryptedMedia
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
from conftest import faker
|
||||||
from pantalaimon.index import INDEXING_ENABLED
|
from pantalaimon.index import INDEXING_ENABLED
|
||||||
from pantalaimon.store import FetchTask, MediaInfo, UploadInfo
|
from pantalaimon.store import FetchTask, MediaInfo, UploadInfo
|
||||||
|
|
||||||
|
@ -25,7 +27,7 @@ class TestClass(object):
|
||||||
"type": "m.room.message",
|
"type": "m.room.message",
|
||||||
"unsigned": {"age": 43289803095},
|
"unsigned": {"age": 43289803095},
|
||||||
"user_id": "@example2:localhost",
|
"user_id": "@example2:localhost",
|
||||||
"age": 43289803095,
|
"age": 43289803095
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -41,14 +43,13 @@ class TestClass(object):
|
||||||
"type": "m.room.message",
|
"type": "m.room.message",
|
||||||
"unsigned": {"age": 43289803095},
|
"unsigned": {"age": 43289803095},
|
||||||
"user_id": "@example2:localhost",
|
"user_id": "@example2:localhost",
|
||||||
"age": 43289803095,
|
"age": 43289803095
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def encrypted_media_event(self):
|
def encrypted_media_event(self):
|
||||||
return RoomEncryptedMedia.from_dict(
|
return RoomEncryptedMedia.from_dict({
|
||||||
{
|
|
||||||
"room_id": "!testroom:localhost",
|
"room_id": "!testroom:localhost",
|
||||||
"event_id": "$15163622445EBvZK:localhost",
|
"event_id": "$15163622445EBvZK:localhost",
|
||||||
"origin_server_ts": 1516362244030,
|
"origin_server_ts": 1516362244030,
|
||||||
|
@ -64,21 +65,21 @@ class TestClass(object):
|
||||||
"ext": True,
|
"ext": True,
|
||||||
"k": "yx0QvkgYlasdWEsdalkejaHBzCkKEBAp3tB7dGtWgrs",
|
"k": "yx0QvkgYlasdWEsdalkejaHBzCkKEBAp3tB7dGtWgrs",
|
||||||
"key_ops": ["encrypt", "decrypt"],
|
"key_ops": ["encrypt", "decrypt"],
|
||||||
"kty": "oct",
|
"kty": "oct"
|
||||||
},
|
},
|
||||||
"iv": "0pglXX7fspIBBBBAEERLFd",
|
"iv": "0pglXX7fspIBBBBAEERLFd",
|
||||||
"hashes": {
|
"hashes": {
|
||||||
"sha256": "eXRDFvh+aXsQRj8a+5ZVVWUQ9Y6u9DYiz4tq1NvbLu8"
|
"sha256": "eXRDFvh+aXsQRj8a+5ZVVWUQ9Y6u9DYiz4tq1NvbLu8"
|
||||||
},
|
},
|
||||||
"url": "mxc://localhost/maDtasSiPFjROFMnlwxIhhyW",
|
"url": "mxc://localhost/maDtasSiPFjROFMnlwxIhhyW",
|
||||||
"mimetype": "image/jpeg",
|
"mimetype": "image/jpeg"
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
|
})
|
||||||
|
|
||||||
def test_account_loading(self, panstore):
|
def test_account_loading(self, panstore):
|
||||||
accounts = panstore.load_all_users()
|
accounts = panstore.load_all_users()
|
||||||
|
# pdb.set_trace()
|
||||||
assert len(accounts) == 10
|
assert len(accounts) == 10
|
||||||
|
|
||||||
def test_token_saving(self, panstore, access_token):
|
def test_token_saving(self, panstore, access_token):
|
||||||
|
@ -129,8 +130,7 @@ class TestClass(object):
|
||||||
if not INDEXING_ENABLED:
|
if not INDEXING_ENABLED:
|
||||||
pytest.skip("Indexing needs to be enabled to test this")
|
pytest.skip("Indexing needs to be enabled to test this")
|
||||||
|
|
||||||
from pantalaimon.index import IndexStore
|
from pantalaimon.index import Index, IndexStore
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
store = IndexStore("example", tempdir)
|
store = IndexStore("example", tempdir)
|
||||||
|
@ -148,10 +148,8 @@ class TestClass(object):
|
||||||
assert len(result["results"]) == 1
|
assert len(result["results"]) == 1
|
||||||
assert result["count"] == 1
|
assert result["count"] == 1
|
||||||
assert result["results"][0]["result"] == self.test_event.source
|
assert result["results"][0]["result"] == self.test_event.source
|
||||||
assert (
|
assert (result["results"][0]["context"]["events_after"][0]
|
||||||
result["results"][0]["context"]["events_after"][0]
|
== self.another_event.source)
|
||||||
== self.another_event.source
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_media_storage(self, panstore):
|
def test_media_storage(self, panstore):
|
||||||
server_name = "test"
|
server_name = "test"
|
||||||
|
|
14
tox.ini
14
tox.ini
|
@ -1,14 +1,21 @@
|
||||||
|
# content of: tox.ini , put in same dir as setup.py
|
||||||
[tox]
|
[tox]
|
||||||
envlist = coverage
|
envlist = py38,py39,coverage
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
|
basepython =
|
||||||
|
py38: python3.8
|
||||||
|
py39: python3.9
|
||||||
|
py3: python3.9
|
||||||
|
|
||||||
deps = -rtest-requirements.txt
|
deps = -rtest-requirements.txt
|
||||||
install_command = pip install {opts} {packages}
|
install_command = pip install {opts} {packages}
|
||||||
|
|
||||||
passenv = TOXENV,CI
|
passenv = TOXENV CI TRAVIS TRAVIS_*
|
||||||
commands = pytest
|
commands = pytest
|
||||||
|
usedevelop = True
|
||||||
|
|
||||||
[testenv:coverage]
|
[testenv:coverage]
|
||||||
|
basepython = python3.9
|
||||||
commands =
|
commands =
|
||||||
pytest --cov=pantalaimon --cov-report term-missing
|
pytest --cov=pantalaimon --cov-report term-missing
|
||||||
coverage xml
|
coverage xml
|
||||||
|
@ -18,6 +25,5 @@ deps =
|
||||||
-rtest-requirements.txt
|
-rtest-requirements.txt
|
||||||
coverage
|
coverage
|
||||||
codecov>=1.4.0
|
codecov>=1.4.0
|
||||||
pytest-asyncio
|
|
||||||
setenv =
|
setenv =
|
||||||
COVERAGE_FILE=.coverage
|
COVERAGE_FILE=.coverage
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue