mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-10-01 11:49:51 -04:00
Merge branch 'develop' of github.com:matrix-org/synapse into erikj/unfederatable
This commit is contained in:
commit
d59acb8c5b
3
.gitignore
vendored
3
.gitignore
vendored
@ -43,3 +43,6 @@ build/
|
||||
localhost-800*/
|
||||
static/client/register/register_config.js
|
||||
.tox
|
||||
|
||||
env/
|
||||
*.config
|
||||
|
11
CHANGES.rst
11
CHANGES.rst
@ -1,3 +1,14 @@
|
||||
Changes in synapse v0.10.0 (2015-09-03)
|
||||
=======================================
|
||||
|
||||
No change from release candidate.
|
||||
|
||||
Changes in synapse v0.10.0-rc6 (2015-09-02)
|
||||
===========================================
|
||||
|
||||
* Remove some of the old database upgrade scripts.
|
||||
* Fix database port script to work with newly created sqlite databases.
|
||||
|
||||
Changes in synapse v0.10.0-rc5 (2015-08-27)
|
||||
===========================================
|
||||
|
||||
|
@ -94,6 +94,7 @@ Synapse is the reference python/twisted Matrix homeserver implementation.
|
||||
System requirements:
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 2.7
|
||||
- At least 512 MB RAM.
|
||||
|
||||
Synapse is written in python but some of the libraries is uses are written in
|
||||
C. So before we can install synapse itself we need a working C compiler and the
|
||||
@ -120,6 +121,7 @@ To install the synapse homeserver run::
|
||||
|
||||
virtualenv -p python2.7 ~/.synapse
|
||||
source ~/.synapse/bin/activate
|
||||
pip install --upgrade setuptools
|
||||
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
This installs synapse, along with the libraries it uses, into a virtual
|
||||
@ -284,6 +286,11 @@ may need to manually upgrade it::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
|
||||
Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
|
||||
You can fix this by upgrading setuptools::
|
||||
|
||||
pip install --upgrade setuptools
|
||||
|
||||
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||
refuse to run until you remove the temporary installation directory it
|
||||
created. To reset the installation::
|
||||
|
@ -56,10 +56,9 @@ if __name__ == '__main__':
|
||||
|
||||
js = json.load(args.json)
|
||||
|
||||
|
||||
auth = Auth(Mock())
|
||||
check_auth(
|
||||
auth,
|
||||
[FrozenEvent(d) for d in js["auth_chain"]],
|
||||
[FrozenEvent(d) for d in js["pdus"]],
|
||||
[FrozenEvent(d) for d in js.get("pdus", [])],
|
||||
)
|
||||
|
@ -29,7 +29,7 @@ import traceback
|
||||
import yaml
|
||||
|
||||
|
||||
logger = logging.getLogger("port_from_sqlite_to_postgres")
|
||||
logger = logging.getLogger("synapse_port_db")
|
||||
|
||||
|
||||
BOOLEAN_COLUMNS = {
|
||||
@ -412,6 +412,7 @@ class Porter(object):
|
||||
self._convert_rows("sent_transactions", headers, rows)
|
||||
|
||||
inserted_rows = len(rows)
|
||||
if inserted_rows:
|
||||
max_inserted_rowid = max(r[0] for r in rows)
|
||||
|
||||
def insert(txn):
|
||||
@ -420,6 +421,8 @@ class Porter(object):
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
else:
|
||||
max_inserted_rowid = 0
|
||||
|
||||
def get_start_id(txn):
|
||||
txn.execute(
|
||||
|
2
setup.py
2
setup.py
@ -81,7 +81,7 @@ setup(
|
||||
packages=find_packages(exclude=["tests", "tests.*"]),
|
||||
description="Reference Synapse Home Server",
|
||||
install_requires=dependencies['requirements'](include_conditional=True).keys(),
|
||||
dependency_links=dependencies["DEPENDENCY_LINKS"],
|
||||
dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
long_description=long_description,
|
||||
|
@ -16,4 +16,4 @@
|
||||
""" This is a reference implementation of a Matrix home server.
|
||||
"""
|
||||
|
||||
__version__ = "0.10.0-rc5"
|
||||
__version__ = "0.10.0"
|
||||
|
@ -20,7 +20,7 @@ from twisted.internet import defer
|
||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.types import RoomID, UserID
|
||||
from synapse.types import RoomID, UserID, EventID
|
||||
|
||||
import logging
|
||||
|
||||
@ -108,7 +108,7 @@ class Auth(object):
|
||||
self._check_power_levels(event, auth_events)
|
||||
|
||||
if event.type == EventTypes.Redaction:
|
||||
self._check_redaction(event, auth_events)
|
||||
self.check_redaction(event, auth_events)
|
||||
|
||||
logger.debug("Allowing! %s", event)
|
||||
except AuthError as e:
|
||||
@ -572,12 +572,31 @@ class Auth(object):
|
||||
|
||||
return True
|
||||
|
||||
def _check_redaction(self, event, auth_events):
|
||||
def check_redaction(self, event, auth_events):
|
||||
"""Check whether the event sender is allowed to redact the target event.
|
||||
|
||||
Returns:
|
||||
True if the the sender is allowed to redact the target event if the
|
||||
target event was created by them.
|
||||
False if the sender is allowed to redact the target event with no
|
||||
further checks.
|
||||
|
||||
Raises:
|
||||
AuthError if the event sender is definitely not allowed to redact
|
||||
the target event.
|
||||
"""
|
||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
redact_level = self._get_named_level(auth_events, "redact", 50)
|
||||
|
||||
if user_level < redact_level:
|
||||
if user_level > redact_level:
|
||||
return False
|
||||
|
||||
redacter_domain = EventID.from_string(event.event_id).domain
|
||||
redactee_domain = EventID.from_string(event.redacts).domain
|
||||
if redacter_domain == redactee_domain:
|
||||
return True
|
||||
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to redact events"
|
||||
|
@ -341,7 +341,7 @@ def get_version_string():
|
||||
)
|
||||
).encode("ascii")
|
||||
except Exception as e:
|
||||
logger.warn("Failed to check for git repository: %s", e)
|
||||
logger.info("Failed to check for git repository: %s", e)
|
||||
|
||||
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
||||
|
||||
|
@ -182,7 +182,7 @@ class Config(object):
|
||||
) % (entry_path, )
|
||||
continue
|
||||
|
||||
files.add(config_path)
|
||||
files.append(entry_path)
|
||||
|
||||
config_files.extend(sorted(files))
|
||||
else:
|
||||
|
@ -162,7 +162,9 @@ class Keyring(object):
|
||||
def remove_deferreds(res, server_name, group_id):
|
||||
server_to_gids[server_name].discard(group_id)
|
||||
if not server_to_gids[server_name]:
|
||||
server_to_deferred.pop(server_name).callback(None)
|
||||
d = server_to_deferred.pop(server_name, None)
|
||||
if d:
|
||||
d.callback(None)
|
||||
return res
|
||||
|
||||
for g_id, deferred in deferreds.items():
|
||||
@ -200,8 +202,15 @@ class Keyring(object):
|
||||
else:
|
||||
break
|
||||
|
||||
for server_name, deferred in server_to_deferred:
|
||||
self.key_downloads[server_name] = ObservableDeferred(deferred)
|
||||
for server_name, deferred in server_to_deferred.items():
|
||||
d = ObservableDeferred(deferred)
|
||||
self.key_downloads[server_name] = d
|
||||
|
||||
def rm(r, server_name):
|
||||
self.key_downloads.pop(server_name, None)
|
||||
return r
|
||||
|
||||
d.addBoth(rm, server_name)
|
||||
|
||||
def get_server_verify_keys(self, group_id_to_group, group_id_to_deferred):
|
||||
"""Takes a dict of KeyGroups and tries to find at least one key for
|
||||
@ -220,9 +229,8 @@ class Keyring(object):
|
||||
merged_results = {}
|
||||
|
||||
missing_keys = {
|
||||
group.server_name: key_id
|
||||
group.server_name: set(group.key_ids)
|
||||
for group in group_id_to_group.values()
|
||||
for key_id in group.key_ids
|
||||
}
|
||||
|
||||
for fn in key_fetch_fns:
|
||||
@ -279,16 +287,15 @@ class Keyring(object):
|
||||
def get_keys_from_store(self, server_name_and_key_ids):
|
||||
res = yield defer.gatherResults(
|
||||
[
|
||||
self.store.get_server_verify_keys(server_name, key_ids)
|
||||
self.store.get_server_verify_keys(
|
||||
server_name, key_ids
|
||||
).addCallback(lambda ks, server: (server, ks), server_name)
|
||||
for server_name, key_ids in server_name_and_key_ids
|
||||
],
|
||||
consumeErrors=True,
|
||||
).addErrback(unwrapFirstError)
|
||||
|
||||
defer.returnValue(dict(zip(
|
||||
[server_name for server_name, _ in server_name_and_key_ids],
|
||||
res
|
||||
)))
|
||||
defer.returnValue(dict(res))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_keys_from_perspectives(self, server_name_and_key_ids):
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import LimitExceededError, SynapseError
|
||||
from synapse.api.errors import LimitExceededError, SynapseError, AuthError
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.api.constants import Membership, EventTypes
|
||||
from synapse.types import UserID, RoomAlias
|
||||
@ -146,6 +146,21 @@ class BaseHandler(object):
|
||||
returned_invite.signatures
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Redaction:
|
||||
if self.auth.check_redaction(event, auth_events=context.current_state):
|
||||
original_event = yield self.store.get_event(
|
||||
event.redacts,
|
||||
check_redacted=False,
|
||||
get_prev_content=False,
|
||||
allow_rejected=False,
|
||||
allow_none=False
|
||||
)
|
||||
if event.user_id != original_event.user_id:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to redact events"
|
||||
)
|
||||
|
||||
destinations = set(extra_destinations)
|
||||
for k, s in context.current_state.items():
|
||||
try:
|
||||
|
@ -17,7 +17,7 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
from resource import getrusage, getpagesize, RUSAGE_SELF
|
||||
from resource import getrusage, RUSAGE_SELF
|
||||
import functools
|
||||
import os
|
||||
import stat
|
||||
@ -100,7 +100,6 @@ def render_all():
|
||||
# process resource usage
|
||||
|
||||
rusage = None
|
||||
PAGE_SIZE = getpagesize()
|
||||
|
||||
|
||||
def update_resource_metrics():
|
||||
@ -113,8 +112,8 @@ resource_metrics = get_metrics_for("process.resource")
|
||||
resource_metrics.register_callback("utime", lambda: rusage.ru_utime * 1000)
|
||||
resource_metrics.register_callback("stime", lambda: rusage.ru_stime * 1000)
|
||||
|
||||
# pages
|
||||
resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * PAGE_SIZE)
|
||||
# kilobytes
|
||||
resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * 1024)
|
||||
|
||||
TYPES = {
|
||||
stat.S_IFSOCK: "SOCK",
|
||||
@ -131,6 +130,10 @@ def _process_fds():
|
||||
counts = {(k,): 0 for k in TYPES.values()}
|
||||
counts[("other",)] = 0
|
||||
|
||||
# Not every OS will have a /proc/self/fd directory
|
||||
if not os.path.exists("/proc/self/fd"):
|
||||
return counts
|
||||
|
||||
for fd in os.listdir("/proc/self/fd"):
|
||||
try:
|
||||
s = os.stat("/proc/self/fd/%s" % (fd))
|
||||
|
@ -96,6 +96,7 @@ class ThreepidRestServlet(RestServlet):
|
||||
self.hs = hs
|
||||
self.identity_handler = hs.get_handlers().identity_handler
|
||||
self.auth = hs.get_auth()
|
||||
self.auth_handler = hs.get_handlers().auth_handler
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request):
|
||||
|
16
synapse/storage/schema/delta/23/drop_state_index.sql
Normal file
16
synapse/storage/schema/delta/23/drop_state_index.sql
Normal file
@ -0,0 +1,16 @@
|
||||
/* Copyright 2015 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
DROP INDEX IF EXISTS state_groups_state_tuple;
|
Loading…
Reference in New Issue
Block a user