Merge branch 'develop' into markjh/pushrule_stream

This commit is contained in:
Mark Haines 2016-03-04 14:44:34 +00:00
commit ec7460b4f2
41 changed files with 641 additions and 148 deletions

View File

@ -21,5 +21,6 @@ recursive-include synapse/static *.html
recursive-include synapse/static *.js recursive-include synapse/static *.js
exclude jenkins.sh exclude jenkins.sh
exclude jenkins*.sh
prune demo/etc prune demo/etc

View File

@ -565,4 +565,21 @@ sphinxcontrib-napoleon::
Building internal API documentation:: Building internal API documentation::
python setup.py build_sphinx python setup.py build_sphinx
Halp!! Synapse eats all my RAM!
===============================
Synapse's architecture is quite RAM hungry currently - we deliberately
cache a lot of recent room data and metadata in RAM in order to speed up
common requests. We'll improve this in future, but for now the easiest
way to either reduce the RAM usage (at the risk of slowing things down)
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
variable. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1.0 will max out
at around 3-4GB of resident memory - this is what we currently run the
matrix.org on. The default setting is currently 0.1, which is probably
around a ~700MB footprint. You can dial it down further to 0.02 if
desired, which targets roughly ~512MB. Conversely you can dial it up if
you need performance for lots of users and have a box with a lot of RAM.

22
jenkins-flake8.sh Executable file
View File

@ -0,0 +1,22 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
# Write coverage reports to a separate file for each process
export COVERAGE_OPTS="-p"
export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
export PEP8SUFFIX="--output-file=violations.flake8.log"
rm .coverage* || echo "No coverage files to remove"
tox -e packaging -e pep8

92
jenkins-postgres.sh Executable file
View File

@ -0,0 +1,92 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
# Write coverage reports to a separate file for each process
export COVERAGE_OPTS="-p"
export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
# Don't exit with non-0 status code on Jenkins,
# so that the build steps continue and a later step can decided whether to
# UNSTABLE or FAILURE this build.
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
rm .coverage* || echo "No coverage files to remove"
tox --notest
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
TOX_BIN=$WORKSPACE/.tox/py27/bin
if [[ ! -e .sytest-base ]]; then
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
else
(cd .sytest-base; git fetch -p)
fi
rm -rf sytest
git clone .sytest-base sytest --shared
cd sytest
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
: ${PERL5LIB:=$WORKSPACE/perl5/lib/perl5}
: ${PERL_MB_OPT:=--install_base=$WORKSPACE/perl5}
: ${PERL_MM_OPT:=INSTALL_BASE=$WORKSPACE/perl5}
export PERL5LIB PERL_MB_OPT PERL_MM_OPT
./install-deps.pl
: ${PORT_BASE:=8000}
if [[ -z "$POSTGRES_DB_1" ]]; then
echo >&2 "Variable POSTGRES_DB_1 not set"
exit 1
fi
if [[ -z "$POSTGRES_DB_2" ]]; then
echo >&2 "Variable POSTGRES_DB_2 not set"
exit 1
fi
mkdir -p "localhost-$(($PORT_BASE + 1))"
mkdir -p "localhost-$(($PORT_BASE + 2))"
cat > localhost-$(($PORT_BASE + 1))/database.yaml << EOF
name: psycopg2
args:
database: $POSTGRES_DB_1
EOF
cat > localhost-$(($PORT_BASE + 2))/database.yaml << EOF
name: psycopg2
args:
database: $POSTGRES_DB_2
EOF
# Run if both postgresql databases exist
echo >&2 "Running sytest with PostgreSQL";
$TOX_BIN/pip install psycopg2
./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
--python $TOX_BIN/python --all --port-base $PORT_BASE > results-postgresql.tap
cd ..
cp sytest/.coverage.* .
# Combine the coverage reports
echo "Combining:" .coverage.*
$TOX_BIN/python -m coverage combine
# Output coverage to coverage.xml
$TOX_BIN/coverage xml -o coverage.xml

63
jenkins-sqlite.sh Executable file
View File

@ -0,0 +1,63 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
# Write coverage reports to a separate file for each process
export COVERAGE_OPTS="-p"
export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
# Don't exit with non-0 status code on Jenkins,
# so that the build steps continue and a later step can decided whether to
# UNSTABLE or FAILURE this build.
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
rm .coverage* || echo "No coverage files to remove"
tox --notest
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
TOX_BIN=$WORKSPACE/.tox/py27/bin
if [[ ! -e .sytest-base ]]; then
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
else
(cd .sytest-base; git fetch -p)
fi
rm -rf sytest
git clone .sytest-base sytest --shared
cd sytest
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
: ${PERL5LIB:=$WORKSPACE/perl5/lib/perl5}
: ${PERL_MB_OPT:=--install_base=$WORKSPACE/perl5}
: ${PERL_MM_OPT:=INSTALL_BASE=$WORKSPACE/perl5}
export PERL5LIB PERL_MB_OPT PERL_MM_OPT
./install-deps.pl
: ${PORT_BASE:=8500}
echo >&2 "Running sytest with SQLite3";
./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
--python $TOX_BIN/python --all --port-base $PORT_BASE > results-sqlite3.tap
cd ..
cp sytest/.coverage.* .
# Combine the coverage reports
echo "Combining:" .coverage.*
$TOX_BIN/python -m coverage combine
# Output coverage to coverage.xml
$TOX_BIN/coverage xml -o coverage.xml

25
jenkins-unittests.sh Executable file
View File

@ -0,0 +1,25 @@
#!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit"
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
# Write coverage reports to a separate file for each process
export COVERAGE_OPTS="-p"
export DUMP_COVERAGE_COMMAND="coverage help"
# Output flake8 violations to violations.flake8.log
# Don't exit with non-0 status code on Jenkins,
# so that the build steps continue and a later step can decided whether to
# UNSTABLE or FAILURE this build.
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
rm .coverage* || echo "No coverage files to remove"
tox -e py27

View File

@ -1,8 +1,11 @@
#!/bin/bash -eu #!/bin/bash
set -eux
: ${WORKSPACE:="$(pwd)"} : ${WORKSPACE:="$(pwd)"}
export PYTHONDONTWRITEBYTECODE=yep export PYTHONDONTWRITEBYTECODE=yep
export SYNAPSE_CACHE_FACTOR=1
# Output test results as junit xml # Output test results as junit xml
export TRIAL_FLAGS="--reporter=subunit" export TRIAL_FLAGS="--reporter=subunit"

View File

@ -309,8 +309,8 @@ class Porter(object):
**self.postgres_config["args"] **self.postgres_config["args"]
) )
sqlite_engine = create_engine("sqlite3") sqlite_engine = create_engine(FakeConfig(sqlite_config))
postgres_engine = create_engine("psycopg2") postgres_engine = create_engine(FakeConfig(postgres_config))
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine) self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
self.postgres_store = Store(postgres_db_pool, postgres_engine) self.postgres_store = Store(postgres_db_pool, postgres_engine)
@ -792,3 +792,8 @@ if __name__ == "__main__":
if end_error_exec_info: if end_error_exec_info:
exc_type, exc_value, exc_traceback = end_error_exec_info exc_type, exc_value, exc_traceback = end_error_exec_info
traceback.print_exception(exc_type, exc_value, exc_traceback) traceback.print_exception(exc_type, exc_value, exc_traceback)
class FakeConfig:
def __init__(self, database_config):
self.database_config = database_config

View File

@ -386,7 +386,7 @@ def setup(config_options):
tls_server_context_factory = context_factory.ServerContextFactory(config) tls_server_context_factory = context_factory.ServerContextFactory(config)
database_engine = create_engine(config.database_config["name"]) database_engine = create_engine(config)
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
hs = SynapseHomeServer( hs = SynapseHomeServer(

40
synapse/config/api.py Normal file
View File

@ -0,0 +1,40 @@
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
from synapse.api.constants import EventTypes
class ApiConfig(Config):
def read_config(self, config):
self.room_invite_state_types = config.get("room_invite_state_types", [
EventTypes.JoinRules,
EventTypes.CanonicalAlias,
EventTypes.RoomAvatar,
EventTypes.Name,
])
def default_config(cls, **kwargs):
return """\
## API Configuration ##
# A list of event types that will be included in the room_invite_state
room_invite_state_types:
- "{JoinRules}"
- "{CanonicalAlias}"
- "{RoomAvatar}"
- "{Name}"
""".format(**vars(EventTypes))

View File

@ -23,6 +23,7 @@ from .captcha import CaptchaConfig
from .voip import VoipConfig from .voip import VoipConfig
from .registration import RegistrationConfig from .registration import RegistrationConfig
from .metrics import MetricsConfig from .metrics import MetricsConfig
from .api import ApiConfig
from .appservice import AppServiceConfig from .appservice import AppServiceConfig
from .key import KeyConfig from .key import KeyConfig
from .saml2 import SAML2Config from .saml2 import SAML2Config
@ -32,7 +33,7 @@ from .password import PasswordConfig
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig, class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
RatelimitConfig, ContentRepositoryConfig, CaptchaConfig, RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
VoipConfig, RegistrationConfig, MetricsConfig, VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig,
AppServiceConfig, KeyConfig, SAML2Config, CasConfig, AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
PasswordConfig,): PasswordConfig,):
pass pass

View File

@ -97,4 +97,7 @@ class ContentRepositoryConfig(Config):
- width: 640 - width: 640
height: 480 height: 480
method: scale method: scale
- width: 800
height: 600
method: scale
""" % locals() """ % locals()

View File

@ -160,10 +160,10 @@ class BaseHandler(object):
) )
defer.returnValue(res.get(user_id, [])) defer.returnValue(res.get(user_id, []))
def ratelimit(self, user_id): def ratelimit(self, requester):
time_now = self.clock.time() time_now = self.clock.time()
allowed, time_allowed = self.ratelimiter.send_message( allowed, time_allowed = self.ratelimiter.send_message(
user_id, time_now, requester.user.to_string(), time_now,
msg_rate_hz=self.hs.config.rc_messages_per_second, msg_rate_hz=self.hs.config.rc_messages_per_second,
burst_count=self.hs.config.rc_message_burst_count, burst_count=self.hs.config.rc_message_burst_count,
) )
@ -199,8 +199,7 @@ class BaseHandler(object):
# events in the room, because we don't know enough about the graph # events in the room, because we don't know enough about the graph
# fragment we received to treat it like a graph, so the above returned # fragment we received to treat it like a graph, so the above returned
# no relevant events. It may have returned some events (if we have # no relevant events. It may have returned some events (if we have
# joined and left the room), but not useful ones, like the invite. So we # joined and left the room), but not useful ones, like the invite.
# forcibly set our context to the invite we received over federation.
if ( if (
not self.is_host_in_room(context.current_state) and not self.is_host_in_room(context.current_state) and
builder.type == EventTypes.Member builder.type == EventTypes.Member
@ -208,7 +207,27 @@ class BaseHandler(object):
prev_member_event = yield self.store.get_room_member( prev_member_event = yield self.store.get_room_member(
builder.sender, builder.room_id builder.sender, builder.room_id
) )
if prev_member_event:
# The prev_member_event may already be in context.current_state,
# despite us not being present in the room; in particular, if
# inviting user, and all other local users, have already left.
#
# In that case, we have all the information we need, and we don't
# want to drop "context" - not least because we may need to handle
# the invite locally, which will require us to have the whole
# context (not just prev_member_event) to auth it.
#
context_event_ids = (
e.event_id for e in context.current_state.values()
)
if (
prev_member_event and
prev_member_event.event_id not in context_event_ids
):
# The prev_member_event is missing from context, so it must
# have arrived over federation and is an outlier. We forcibly
# set our context to the invite we received over federation
builder.prev_events = ( builder.prev_events = (
prev_member_event.event_id, prev_member_event.event_id,
prev_member_event.prev_events prev_member_event.prev_events
@ -263,11 +282,18 @@ class BaseHandler(object):
return False return False
@defer.inlineCallbacks @defer.inlineCallbacks
def handle_new_client_event(self, event, context, ratelimit=True, extra_users=[]): def handle_new_client_event(
self,
requester,
event,
context,
ratelimit=True,
extra_users=[]
):
# We now need to go and hit out to wherever we need to hit out to. # We now need to go and hit out to wherever we need to hit out to.
if ratelimit: if ratelimit:
self.ratelimit(event.sender) self.ratelimit(requester)
self.auth.check(event, auth_events=context.current_state) self.auth.check(event, auth_events=context.current_state)
@ -307,12 +333,8 @@ class BaseHandler(object):
"sender": e.sender, "sender": e.sender,
} }
for k, e in context.current_state.items() for k, e in context.current_state.items()
if e.type in ( if e.type in self.hs.config.room_invite_state_types
EventTypes.JoinRules, or is_inviter_member_event(e)
EventTypes.CanonicalAlias,
EventTypes.RoomAvatar,
EventTypes.Name,
) or is_inviter_member_event(e)
] ]
invitee = UserID.from_string(event.state_key) invitee = UserID.from_string(event.state_key)
@ -348,6 +370,12 @@ class BaseHandler(object):
"You don't have permission to redact events" "You don't have permission to redact events"
) )
if event.type == EventTypes.Create and context.current_state:
raise AuthError(
403,
"Changing the room create event is forbidden",
)
action_generator = ActionGenerator(self.hs) action_generator = ActionGenerator(self.hs)
yield action_generator.handle_push_actions_for_event( yield action_generator.handle_push_actions_for_event(
event, context, self event, context, self

View File

@ -477,4 +477,4 @@ class AuthHandler(BaseHandler):
Returns: Returns:
Whether self.hash(password) == stored_hash (bool). Whether self.hash(password) == stored_hash (bool).
""" """
return bcrypt.checkpw(password, stored_hash) return bcrypt.hashpw(password, stored_hash) == stored_hash

View File

@ -17,9 +17,9 @@
from twisted.internet import defer from twisted.internet import defer
from ._base import BaseHandler from ._base import BaseHandler
from synapse.api.errors import SynapseError, Codes, CodeMessageException from synapse.api.errors import SynapseError, Codes, CodeMessageException, AuthError
from synapse.api.constants import EventTypes from synapse.api.constants import EventTypes
from synapse.types import RoomAlias from synapse.types import RoomAlias, UserID
import logging import logging
import string import string
@ -38,7 +38,7 @@ class DirectoryHandler(BaseHandler):
) )
@defer.inlineCallbacks @defer.inlineCallbacks
def _create_association(self, room_alias, room_id, servers=None): def _create_association(self, room_alias, room_id, servers=None, creator=None):
# general association creation for both human users and app services # general association creation for both human users and app services
for wchar in string.whitespace: for wchar in string.whitespace:
@ -60,7 +60,8 @@ class DirectoryHandler(BaseHandler):
yield self.store.create_room_alias_association( yield self.store.create_room_alias_association(
room_alias, room_alias,
room_id, room_id,
servers servers,
creator=creator,
) )
@defer.inlineCallbacks @defer.inlineCallbacks
@ -77,7 +78,7 @@ class DirectoryHandler(BaseHandler):
400, "This alias is reserved by an application service.", 400, "This alias is reserved by an application service.",
errcode=Codes.EXCLUSIVE errcode=Codes.EXCLUSIVE
) )
yield self._create_association(room_alias, room_id, servers) yield self._create_association(room_alias, room_id, servers, creator=user_id)
@defer.inlineCallbacks @defer.inlineCallbacks
def create_appservice_association(self, service, room_alias, room_id, def create_appservice_association(self, service, room_alias, room_id,
@ -95,7 +96,11 @@ class DirectoryHandler(BaseHandler):
def delete_association(self, user_id, room_alias): def delete_association(self, user_id, room_alias):
# association deletion for human users # association deletion for human users
# TODO Check if server admin can_delete = yield self._user_can_delete_alias(room_alias, user_id)
if not can_delete:
raise AuthError(
403, "You don't have permission to delete the alias.",
)
can_delete = yield self.can_modify_alias( can_delete = yield self.can_modify_alias(
room_alias, room_alias,
@ -212,17 +217,21 @@ class DirectoryHandler(BaseHandler):
) )
@defer.inlineCallbacks @defer.inlineCallbacks
def send_room_alias_update_event(self, user_id, room_id): def send_room_alias_update_event(self, requester, user_id, room_id):
aliases = yield self.store.get_aliases_for_room(room_id) aliases = yield self.store.get_aliases_for_room(room_id)
msg_handler = self.hs.get_handlers().message_handler msg_handler = self.hs.get_handlers().message_handler
yield msg_handler.create_and_send_nonmember_event({ yield msg_handler.create_and_send_nonmember_event(
"type": EventTypes.Aliases, requester,
"state_key": self.hs.hostname, {
"room_id": room_id, "type": EventTypes.Aliases,
"sender": user_id, "state_key": self.hs.hostname,
"content": {"aliases": aliases}, "room_id": room_id,
}, ratelimit=False) "sender": user_id,
"content": {"aliases": aliases},
},
ratelimit=False
)
@defer.inlineCallbacks @defer.inlineCallbacks
def get_association_from_room_alias(self, room_alias): def get_association_from_room_alias(self, room_alias):
@ -257,3 +266,13 @@ class DirectoryHandler(BaseHandler):
return return
# either no interested services, or no service with an exclusive lock # either no interested services, or no service with an exclusive lock
defer.returnValue(True) defer.returnValue(True)
@defer.inlineCallbacks
def _user_can_delete_alias(self, alias, user_id):
creator = yield self.store.get_room_alias_creator(alias.to_string())
if creator and creator == user_id:
defer.returnValue(True)
is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id))
defer.returnValue(is_admin)

View File

@ -1657,7 +1657,7 @@ class FederationHandler(BaseHandler):
self.auth.check(event, context.current_state) self.auth.check(event, context.current_state)
yield self._check_signature(event, auth_events=context.current_state) yield self._check_signature(event, auth_events=context.current_state)
member_handler = self.hs.get_handlers().room_member_handler member_handler = self.hs.get_handlers().room_member_handler
yield member_handler.send_membership_event(event, context, from_client=False) yield member_handler.send_membership_event(None, event, context)
else: else:
destinations = set(x.split(":", 1)[-1] for x in (sender_user_id, room_id)) destinations = set(x.split(":", 1)[-1] for x in (sender_user_id, room_id))
yield self.replication_layer.forward_third_party_invite( yield self.replication_layer.forward_third_party_invite(
@ -1686,7 +1686,7 @@ class FederationHandler(BaseHandler):
# TODO: Make sure the signatures actually are correct. # TODO: Make sure the signatures actually are correct.
event.signatures.update(returned_invite.signatures) event.signatures.update(returned_invite.signatures)
member_handler = self.hs.get_handlers().room_member_handler member_handler = self.hs.get_handlers().room_member_handler
yield member_handler.send_membership_event(event, context, from_client=False) yield member_handler.send_membership_event(None, event, context)
@defer.inlineCallbacks @defer.inlineCallbacks
def add_display_name_to_third_party_invite(self, event_dict, event, context): def add_display_name_to_third_party_invite(self, event_dict, event, context):

View File

@ -215,7 +215,7 @@ class MessageHandler(BaseHandler):
defer.returnValue((event, context)) defer.returnValue((event, context))
@defer.inlineCallbacks @defer.inlineCallbacks
def send_nonmember_event(self, event, context, ratelimit=True): def send_nonmember_event(self, requester, event, context, ratelimit=True):
""" """
Persists and notifies local clients and federation of an event. Persists and notifies local clients and federation of an event.
@ -241,6 +241,7 @@ class MessageHandler(BaseHandler):
defer.returnValue(prev_state) defer.returnValue(prev_state)
yield self.handle_new_client_event( yield self.handle_new_client_event(
requester=requester,
event=event, event=event,
context=context, context=context,
ratelimit=ratelimit, ratelimit=ratelimit,
@ -268,9 +269,9 @@ class MessageHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def create_and_send_nonmember_event( def create_and_send_nonmember_event(
self, self,
requester,
event_dict, event_dict,
ratelimit=True, ratelimit=True,
token_id=None,
txn_id=None txn_id=None
): ):
""" """
@ -280,10 +281,11 @@ class MessageHandler(BaseHandler):
""" """
event, context = yield self.create_event( event, context = yield self.create_event(
event_dict, event_dict,
token_id=token_id, token_id=requester.access_token_id,
txn_id=txn_id txn_id=txn_id
) )
yield self.send_nonmember_event( yield self.send_nonmember_event(
requester,
event, event,
context, context,
ratelimit=ratelimit, ratelimit=ratelimit,

View File

@ -130,6 +130,10 @@ class PresenceHandler(BaseHandler):
for state in active_presence for state in active_presence
} }
metrics.register_callback(
"user_to_current_state_size", lambda: len(self.user_to_current_state)
)
now = self.clock.time_msec() now = self.clock.time_msec()
for state in active_presence: for state in active_presence:
self.wheel_timer.insert( self.wheel_timer.insert(

View File

@ -89,13 +89,13 @@ class ProfileHandler(BaseHandler):
defer.returnValue(result["displayname"]) defer.returnValue(result["displayname"])
@defer.inlineCallbacks @defer.inlineCallbacks
def set_displayname(self, target_user, auth_user, new_displayname): def set_displayname(self, target_user, requester, new_displayname):
"""target_user is the user whose displayname is to be changed; """target_user is the user whose displayname is to be changed;
auth_user is the user attempting to make this change.""" auth_user is the user attempting to make this change."""
if not self.hs.is_mine(target_user): if not self.hs.is_mine(target_user):
raise SynapseError(400, "User is not hosted on this Home Server") raise SynapseError(400, "User is not hosted on this Home Server")
if target_user != auth_user: if target_user != requester.user:
raise AuthError(400, "Cannot set another user's displayname") raise AuthError(400, "Cannot set another user's displayname")
if new_displayname == '': if new_displayname == '':
@ -109,7 +109,7 @@ class ProfileHandler(BaseHandler):
"displayname": new_displayname, "displayname": new_displayname,
}) })
yield self._update_join_states(target_user) yield self._update_join_states(requester)
@defer.inlineCallbacks @defer.inlineCallbacks
def get_avatar_url(self, target_user): def get_avatar_url(self, target_user):
@ -139,13 +139,13 @@ class ProfileHandler(BaseHandler):
defer.returnValue(result["avatar_url"]) defer.returnValue(result["avatar_url"])
@defer.inlineCallbacks @defer.inlineCallbacks
def set_avatar_url(self, target_user, auth_user, new_avatar_url): def set_avatar_url(self, target_user, requester, new_avatar_url):
"""target_user is the user whose avatar_url is to be changed; """target_user is the user whose avatar_url is to be changed;
auth_user is the user attempting to make this change.""" auth_user is the user attempting to make this change."""
if not self.hs.is_mine(target_user): if not self.hs.is_mine(target_user):
raise SynapseError(400, "User is not hosted on this Home Server") raise SynapseError(400, "User is not hosted on this Home Server")
if target_user != auth_user: if target_user != requester.user:
raise AuthError(400, "Cannot set another user's avatar_url") raise AuthError(400, "Cannot set another user's avatar_url")
yield self.store.set_profile_avatar_url( yield self.store.set_profile_avatar_url(
@ -156,7 +156,7 @@ class ProfileHandler(BaseHandler):
"avatar_url": new_avatar_url, "avatar_url": new_avatar_url,
}) })
yield self._update_join_states(target_user) yield self._update_join_states(requester)
@defer.inlineCallbacks @defer.inlineCallbacks
def collect_presencelike_data(self, user, state): def collect_presencelike_data(self, user, state):
@ -199,11 +199,12 @@ class ProfileHandler(BaseHandler):
defer.returnValue(response) defer.returnValue(response)
@defer.inlineCallbacks @defer.inlineCallbacks
def _update_join_states(self, user): def _update_join_states(self, requester):
user = requester.user
if not self.hs.is_mine(user): if not self.hs.is_mine(user):
return return
self.ratelimit(user.to_string()) self.ratelimit(requester)
joins = yield self.store.get_rooms_for_user( joins = yield self.store.get_rooms_for_user(
user.to_string(), user.to_string(),

View File

@ -36,8 +36,6 @@ class ReceiptsHandler(BaseHandler):
) )
self.clock = self.hs.get_clock() self.clock = self.hs.get_clock()
self._receipt_cache = None
@defer.inlineCallbacks @defer.inlineCallbacks
def received_client_receipt(self, room_id, receipt_type, user_id, def received_client_receipt(self, room_id, receipt_type, user_id,
event_id): event_id):

View File

@ -157,6 +157,7 @@ class RegistrationHandler(BaseHandler):
) )
except SynapseError: except SynapseError:
# if user id is taken, just generate another # if user id is taken, just generate another
user = None
user_id = None user_id = None
token = None token = None
attempts += 1 attempts += 1

View File

@ -18,7 +18,7 @@ from twisted.internet import defer
from ._base import BaseHandler from ._base import BaseHandler
from synapse.types import UserID, RoomAlias, RoomID, RoomStreamToken from synapse.types import UserID, RoomAlias, RoomID, RoomStreamToken, Requester
from synapse.api.constants import ( from synapse.api.constants import (
EventTypes, Membership, JoinRules, RoomCreationPreset, EventTypes, Membership, JoinRules, RoomCreationPreset,
) )
@ -90,7 +90,7 @@ class RoomCreationHandler(BaseHandler):
""" """
user_id = requester.user.to_string() user_id = requester.user.to_string()
self.ratelimit(user_id) self.ratelimit(requester)
if "room_alias_name" in config: if "room_alias_name" in config:
for wchar in string.whitespace: for wchar in string.whitespace:
@ -185,26 +185,32 @@ class RoomCreationHandler(BaseHandler):
if "name" in config: if "name" in config:
name = config["name"] name = config["name"]
yield msg_handler.create_and_send_nonmember_event({ yield msg_handler.create_and_send_nonmember_event(
"type": EventTypes.Name, requester,
"room_id": room_id, {
"sender": user_id, "type": EventTypes.Name,
"state_key": "", "room_id": room_id,
"content": {"name": name}, "sender": user_id,
}, ratelimit=False) "state_key": "",
"content": {"name": name},
},
ratelimit=False)
if "topic" in config: if "topic" in config:
topic = config["topic"] topic = config["topic"]
yield msg_handler.create_and_send_nonmember_event({ yield msg_handler.create_and_send_nonmember_event(
"type": EventTypes.Topic, requester,
"room_id": room_id, {
"sender": user_id, "type": EventTypes.Topic,
"state_key": "", "room_id": room_id,
"content": {"topic": topic}, "sender": user_id,
}, ratelimit=False) "state_key": "",
"content": {"topic": topic},
},
ratelimit=False)
for invitee in invite_list: for invitee in invite_list:
room_member_handler.update_membership( yield room_member_handler.update_membership(
requester, requester,
UserID.from_string(invitee), UserID.from_string(invitee),
room_id, room_id,
@ -231,7 +237,7 @@ class RoomCreationHandler(BaseHandler):
if room_alias: if room_alias:
result["room_alias"] = room_alias.to_string() result["room_alias"] = room_alias.to_string()
yield directory_handler.send_room_alias_update_event( yield directory_handler.send_room_alias_update_event(
user_id, room_id requester, user_id, room_id
) )
defer.returnValue(result) defer.returnValue(result)
@ -263,7 +269,11 @@ class RoomCreationHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def send(etype, content, **kwargs): def send(etype, content, **kwargs):
event = create(etype, content, **kwargs) event = create(etype, content, **kwargs)
yield msg_handler.create_and_send_nonmember_event(event, ratelimit=False) yield msg_handler.create_and_send_nonmember_event(
creator,
event,
ratelimit=False
)
config = RoomCreationHandler.PRESETS_DICT[preset_config] config = RoomCreationHandler.PRESETS_DICT[preset_config]
@ -454,12 +464,11 @@ class RoomMemberHandler(BaseHandler):
member_handler = self.hs.get_handlers().room_member_handler member_handler = self.hs.get_handlers().room_member_handler
yield member_handler.send_membership_event( yield member_handler.send_membership_event(
requester,
event, event,
context, context,
is_guest=requester.is_guest,
ratelimit=ratelimit, ratelimit=ratelimit,
remote_room_hosts=remote_room_hosts, remote_room_hosts=remote_room_hosts,
from_client=True,
) )
if action == "forget": if action == "forget":
@ -468,17 +477,19 @@ class RoomMemberHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def send_membership_event( def send_membership_event(
self, self,
requester,
event, event,
context, context,
is_guest=False,
remote_room_hosts=None, remote_room_hosts=None,
ratelimit=True, ratelimit=True,
from_client=True,
): ):
""" """
Change the membership status of a user in a room. Change the membership status of a user in a room.
Args: Args:
requester (Requester): The local user who requested the membership
event. If None, certain checks, like whether this homeserver can
act as the sender, will be skipped.
event (SynapseEvent): The membership event. event (SynapseEvent): The membership event.
context: The context of the event. context: The context of the event.
is_guest (bool): Whether the sender is a guest. is_guest (bool): Whether the sender is a guest.
@ -486,19 +497,23 @@ class RoomMemberHandler(BaseHandler):
the room, and could be danced with in order to join this the room, and could be danced with in order to join this
homeserver for the first time. homeserver for the first time.
ratelimit (bool): Whether to rate limit this request. ratelimit (bool): Whether to rate limit this request.
from_client (bool): Whether this request is the result of a local
client request (rather than over federation). If so, we will
perform extra checks, like that this homeserver can act as this
client.
Raises: Raises:
SynapseError if there was a problem changing the membership. SynapseError if there was a problem changing the membership.
""" """
remote_room_hosts = remote_room_hosts or []
target_user = UserID.from_string(event.state_key) target_user = UserID.from_string(event.state_key)
room_id = event.room_id room_id = event.room_id
if from_client: if requester is not None:
sender = UserID.from_string(event.sender) sender = UserID.from_string(event.sender)
assert sender == requester.user, (
"Sender (%s) must be same as requester (%s)" %
(sender, requester.user)
)
assert self.hs.is_mine(sender), "Sender must be our own: %s" % (sender,) assert self.hs.is_mine(sender), "Sender must be our own: %s" % (sender,)
else:
requester = Requester(target_user, None, False)
message_handler = self.hs.get_handlers().message_handler message_handler = self.hs.get_handlers().message_handler
prev_event = message_handler.deduplicate_state_event(event, context) prev_event = message_handler.deduplicate_state_event(event, context)
@ -508,7 +523,7 @@ class RoomMemberHandler(BaseHandler):
action = "send" action = "send"
if event.membership == Membership.JOIN: if event.membership == Membership.JOIN:
if is_guest and not self._can_guest_join(context.current_state): if requester.is_guest and not self._can_guest_join(context.current_state):
# This should be an auth check, but guests are a local concept, # This should be an auth check, but guests are a local concept,
# so don't really fit into the general auth process. # so don't really fit into the general auth process.
raise AuthError(403, "Guest access not allowed") raise AuthError(403, "Guest access not allowed")
@ -521,8 +536,24 @@ class RoomMemberHandler(BaseHandler):
action = "remote_join" action = "remote_join"
elif event.membership == Membership.LEAVE: elif event.membership == Membership.LEAVE:
is_host_in_room = self.is_host_in_room(context.current_state) is_host_in_room = self.is_host_in_room(context.current_state)
if not is_host_in_room: if not is_host_in_room:
action = "remote_reject" # perhaps we've been invited
inviter = self.get_inviter(target_user.to_string(), context.current_state)
if not inviter:
raise SynapseError(404, "Not a known room")
if self.hs.is_mine(inviter):
# the inviter was on our server, but has now left. Carry on
# with the normal rejection codepath.
#
# This is a bit of a hack, because the room might still be
# active on other servers.
pass
else:
# send the rejection to the inviter's HS.
remote_room_hosts = remote_room_hosts + [inviter.domain]
action = "remote_reject"
federation_handler = self.hs.get_handlers().federation_handler federation_handler = self.hs.get_handlers().federation_handler
@ -541,16 +572,14 @@ class RoomMemberHandler(BaseHandler):
event.content, event.content,
) )
elif action == "remote_reject": elif action == "remote_reject":
inviter = self.get_inviter(target_user.to_string(), context.current_state)
if not inviter:
raise SynapseError(404, "No known servers")
yield federation_handler.do_remotely_reject_invite( yield federation_handler.do_remotely_reject_invite(
[inviter.domain], remote_room_hosts,
room_id, room_id,
event.user_id event.user_id
) )
else: else:
yield self.handle_new_client_event( yield self.handle_new_client_event(
requester,
event, event,
context, context,
extra_users=[target_user], extra_users=[target_user],
@ -669,12 +698,12 @@ class RoomMemberHandler(BaseHandler):
) )
else: else:
yield self._make_and_store_3pid_invite( yield self._make_and_store_3pid_invite(
requester,
id_server, id_server,
medium, medium,
address, address,
room_id, room_id,
inviter, inviter,
requester.access_token_id,
txn_id=txn_id txn_id=txn_id
) )
@ -732,12 +761,12 @@ class RoomMemberHandler(BaseHandler):
@defer.inlineCallbacks @defer.inlineCallbacks
def _make_and_store_3pid_invite( def _make_and_store_3pid_invite(
self, self,
requester,
id_server, id_server,
medium, medium,
address, address,
room_id, room_id,
user, user,
token_id,
txn_id txn_id
): ):
room_state = yield self.hs.get_state_handler().get_current_state(room_id) room_state = yield self.hs.get_state_handler().get_current_state(room_id)
@ -787,6 +816,7 @@ class RoomMemberHandler(BaseHandler):
msg_handler = self.hs.get_handlers().message_handler msg_handler = self.hs.get_handlers().message_handler
yield msg_handler.create_and_send_nonmember_event( yield msg_handler.create_and_send_nonmember_event(
requester,
{ {
"type": EventTypes.ThirdPartyInvite, "type": EventTypes.ThirdPartyInvite,
"content": { "content": {
@ -801,7 +831,6 @@ class RoomMemberHandler(BaseHandler):
"sender": user.to_string(), "sender": user.to_string(),
"state_key": token, "state_key": token,
}, },
token_id=token_id,
txn_id=txn_id, txn_id=txn_id,
) )

View File

@ -75,7 +75,11 @@ class ClientDirectoryServer(ClientV1RestServlet):
yield dir_handler.create_association( yield dir_handler.create_association(
user_id, room_alias, room_id, servers user_id, room_alias, room_id, servers
) )
yield dir_handler.send_room_alias_update_event(user_id, room_id) yield dir_handler.send_room_alias_update_event(
requester,
user_id,
room_id
)
except SynapseError as e: except SynapseError as e:
raise e raise e
except: except:
@ -118,9 +122,6 @@ class ClientDirectoryServer(ClientV1RestServlet):
requester = yield self.auth.get_user_by_req(request) requester = yield self.auth.get_user_by_req(request)
user = requester.user user = requester.user
is_admin = yield self.auth.is_server_admin(user)
if not is_admin:
raise AuthError(403, "You need to be a server admin")
room_alias = RoomAlias.from_string(room_alias) room_alias = RoomAlias.from_string(room_alias)

View File

@ -51,7 +51,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
defer.returnValue((400, "Unable to parse name")) defer.returnValue((400, "Unable to parse name"))
yield self.handlers.profile_handler.set_displayname( yield self.handlers.profile_handler.set_displayname(
user, requester.user, new_name) user, requester, new_name)
defer.returnValue((200, {})) defer.returnValue((200, {}))
@ -88,7 +88,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
defer.returnValue((400, "Unable to parse name")) defer.returnValue((400, "Unable to parse name"))
yield self.handlers.profile_handler.set_avatar_url( yield self.handlers.profile_handler.set_avatar_url(
user, requester.user, new_name) user, requester, new_name)
defer.returnValue((200, {})) defer.returnValue((200, {}))

View File

@ -158,12 +158,12 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
if event_type == EventTypes.Member: if event_type == EventTypes.Member:
yield self.handlers.room_member_handler.send_membership_event( yield self.handlers.room_member_handler.send_membership_event(
requester,
event, event,
context, context,
is_guest=requester.is_guest,
) )
else: else:
yield msg_handler.send_nonmember_event(event, context) yield msg_handler.send_nonmember_event(requester, event, context)
defer.returnValue((200, {"event_id": event.event_id})) defer.returnValue((200, {"event_id": event.event_id}))
@ -183,13 +183,13 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
msg_handler = self.handlers.message_handler msg_handler = self.handlers.message_handler
event = yield msg_handler.create_and_send_nonmember_event( event = yield msg_handler.create_and_send_nonmember_event(
requester,
{ {
"type": event_type, "type": event_type,
"content": content, "content": content,
"room_id": room_id, "room_id": room_id,
"sender": requester.user.to_string(), "sender": requester.user.to_string(),
}, },
token_id=requester.access_token_id,
txn_id=txn_id, txn_id=txn_id,
) )
@ -504,6 +504,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
msg_handler = self.handlers.message_handler msg_handler = self.handlers.message_handler
event = yield msg_handler.create_and_send_nonmember_event( event = yield msg_handler.create_and_send_nonmember_event(
requester,
{ {
"type": EventTypes.Redaction, "type": EventTypes.Redaction,
"content": content, "content": content,
@ -511,7 +512,6 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
"sender": requester.user.to_string(), "sender": requester.user.to_string(),
"redacts": event_id, "redacts": event_id,
}, },
token_id=requester.access_token_id,
txn_id=txn_id, txn_id=txn_id,
) )

View File

@ -34,8 +34,8 @@ class ApplicationServiceStore(SQLBaseStore):
def __init__(self, hs): def __init__(self, hs):
super(ApplicationServiceStore, self).__init__(hs) super(ApplicationServiceStore, self).__init__(hs)
self.hostname = hs.hostname self.hostname = hs.hostname
self.services_cache = [] self.services_cache = ApplicationServiceStore.load_appservices(
self._populate_appservice_cache( hs.hostname,
hs.config.app_service_config_files hs.config.app_service_config_files
) )
@ -144,21 +144,23 @@ class ApplicationServiceStore(SQLBaseStore):
return rooms_for_user_matching_user_id return rooms_for_user_matching_user_id
def _load_appservice(self, as_info): @classmethod
def _load_appservice(cls, hostname, as_info, config_filename):
required_string_fields = [ required_string_fields = [
# TODO: Add id here when it's stable to release "id", "url", "as_token", "hs_token", "sender_localpart"
"url", "as_token", "hs_token", "sender_localpart"
] ]
for field in required_string_fields: for field in required_string_fields:
if not isinstance(as_info.get(field), basestring): if not isinstance(as_info.get(field), basestring):
raise KeyError("Required string field: '%s'", field) raise KeyError("Required string field: '%s' (%s)" % (
field, config_filename,
))
localpart = as_info["sender_localpart"] localpart = as_info["sender_localpart"]
if urllib.quote(localpart) != localpart: if urllib.quote(localpart) != localpart:
raise ValueError( raise ValueError(
"sender_localpart needs characters which are not URL encoded." "sender_localpart needs characters which are not URL encoded."
) )
user = UserID(localpart, self.hostname) user = UserID(localpart, hostname)
user_id = user.to_string() user_id = user.to_string()
# namespace checks # namespace checks
@ -188,25 +190,30 @@ class ApplicationServiceStore(SQLBaseStore):
namespaces=as_info["namespaces"], namespaces=as_info["namespaces"],
hs_token=as_info["hs_token"], hs_token=as_info["hs_token"],
sender=user_id, sender=user_id,
id=as_info["id"] if "id" in as_info else as_info["as_token"], id=as_info["id"],
) )
def _populate_appservice_cache(self, config_files): @classmethod
"""Populates a cache of Application Services from the config files.""" def load_appservices(cls, hostname, config_files):
"""Returns a list of Application Services from the config files."""
if not isinstance(config_files, list): if not isinstance(config_files, list):
logger.warning( logger.warning(
"Expected %s to be a list of AS config files.", config_files "Expected %s to be a list of AS config files.", config_files
) )
return return []
# Dicts of value -> filename # Dicts of value -> filename
seen_as_tokens = {} seen_as_tokens = {}
seen_ids = {} seen_ids = {}
appservices = []
for config_file in config_files: for config_file in config_files:
try: try:
with open(config_file, 'r') as f: with open(config_file, 'r') as f:
appservice = self._load_appservice(yaml.load(f)) appservice = ApplicationServiceStore._load_appservice(
hostname, yaml.load(f), config_file
)
if appservice.id in seen_ids: if appservice.id in seen_ids:
raise ConfigError( raise ConfigError(
"Cannot reuse ID across application services: " "Cannot reuse ID across application services: "
@ -226,11 +233,12 @@ class ApplicationServiceStore(SQLBaseStore):
) )
seen_as_tokens[appservice.token] = config_file seen_as_tokens[appservice.token] = config_file
logger.info("Loaded application service: %s", appservice) logger.info("Loaded application service: %s", appservice)
self.services_cache.append(appservice) appservices.append(appservice)
except Exception as e: except Exception as e:
logger.error("Failed to load appservice from '%s'", config_file) logger.error("Failed to load appservice from '%s'", config_file)
logger.exception(e) logger.exception(e)
raise raise
return appservices
class ApplicationServiceTransactionStore(SQLBaseStore): class ApplicationServiceTransactionStore(SQLBaseStore):

View File

@ -70,13 +70,14 @@ class DirectoryStore(SQLBaseStore):
) )
@defer.inlineCallbacks @defer.inlineCallbacks
def create_room_alias_association(self, room_alias, room_id, servers): def create_room_alias_association(self, room_alias, room_id, servers, creator=None):
""" Creates an associatin between a room alias and room_id/servers """ Creates an associatin between a room alias and room_id/servers
Args: Args:
room_alias (RoomAlias) room_alias (RoomAlias)
room_id (str) room_id (str)
servers (list) servers (list)
creator (str): Optional user_id of creator.
Returns: Returns:
Deferred Deferred
@ -87,6 +88,7 @@ class DirectoryStore(SQLBaseStore):
{ {
"room_alias": room_alias.to_string(), "room_alias": room_alias.to_string(),
"room_id": room_id, "room_id": room_id,
"creator": creator,
}, },
desc="create_room_alias_association", desc="create_room_alias_association",
) )
@ -107,6 +109,17 @@ class DirectoryStore(SQLBaseStore):
) )
self.get_aliases_for_room.invalidate((room_id,)) self.get_aliases_for_room.invalidate((room_id,))
def get_room_alias_creator(self, room_alias):
return self._simple_select_one_onecol(
table="room_aliases",
keyvalues={
"room_alias": room_alias,
},
retcol="creator",
desc="get_room_alias_creator",
allow_none=True
)
@defer.inlineCallbacks @defer.inlineCallbacks
def delete_room_alias(self, room_alias): def delete_room_alias(self, room_alias):
room_id = yield self.runInteraction( room_id = yield self.runInteraction(

View File

@ -26,12 +26,13 @@ SUPPORTED_MODULE = {
} }
def create_engine(name): def create_engine(config):
name = config.database_config["name"]
engine_class = SUPPORTED_MODULE.get(name, None) engine_class = SUPPORTED_MODULE.get(name, None)
if engine_class: if engine_class:
module = importlib.import_module(name) module = importlib.import_module(name)
return engine_class(module) return engine_class(module, config=config)
raise RuntimeError( raise RuntimeError(
"Unsupported database engine '%s'" % (name,) "Unsupported database engine '%s'" % (name,)

View File

@ -21,9 +21,10 @@ from ._base import IncorrectDatabaseSetup
class PostgresEngine(object): class PostgresEngine(object):
single_threaded = False single_threaded = False
def __init__(self, database_module): def __init__(self, database_module, config):
self.module = database_module self.module = database_module
self.module.extensions.register_type(self.module.extensions.UNICODE) self.module.extensions.register_type(self.module.extensions.UNICODE)
self.config = config
def check_database(self, txn): def check_database(self, txn):
txn.execute("SHOW SERVER_ENCODING") txn.execute("SHOW SERVER_ENCODING")
@ -44,7 +45,7 @@ class PostgresEngine(object):
) )
def prepare_database(self, db_conn): def prepare_database(self, db_conn):
prepare_database(db_conn, self) prepare_database(db_conn, self, config=self.config)
def is_deadlock(self, error): def is_deadlock(self, error):
if isinstance(error, self.module.DatabaseError): if isinstance(error, self.module.DatabaseError):

View File

@ -23,8 +23,9 @@ import struct
class Sqlite3Engine(object): class Sqlite3Engine(object):
single_threaded = True single_threaded = True
def __init__(self, database_module): def __init__(self, database_module, config):
self.module = database_module self.module = database_module
self.config = config
def check_database(self, txn): def check_database(self, txn):
pass pass
@ -38,7 +39,7 @@ class Sqlite3Engine(object):
def prepare_database(self, db_conn): def prepare_database(self, db_conn):
prepare_sqlite3_database(db_conn) prepare_sqlite3_database(db_conn)
prepare_database(db_conn, self) prepare_database(db_conn, self, config=self.config)
def is_deadlock(self, error): def is_deadlock(self, error):
return False return False

View File

@ -50,7 +50,7 @@ class UpgradeDatabaseException(PrepareDatabaseException):
pass pass
def prepare_database(db_conn, database_engine): def prepare_database(db_conn, database_engine, config):
"""Prepares a database for usage. Will either create all necessary tables """Prepares a database for usage. Will either create all necessary tables
or upgrade from an older schema version. or upgrade from an older schema version.
""" """
@ -61,10 +61,10 @@ def prepare_database(db_conn, database_engine):
if version_info: if version_info:
user_version, delta_files, upgraded = version_info user_version, delta_files, upgraded = version_info
_upgrade_existing_database( _upgrade_existing_database(
cur, user_version, delta_files, upgraded, database_engine cur, user_version, delta_files, upgraded, database_engine, config
) )
else: else:
_setup_new_database(cur, database_engine) _setup_new_database(cur, database_engine, config)
# cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,)) # cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
@ -75,7 +75,7 @@ def prepare_database(db_conn, database_engine):
raise raise
def _setup_new_database(cur, database_engine): def _setup_new_database(cur, database_engine, config):
"""Sets up the database by finding a base set of "full schemas" and then """Sets up the database by finding a base set of "full schemas" and then
applying any necessary deltas. applying any necessary deltas.
@ -148,11 +148,12 @@ def _setup_new_database(cur, database_engine):
applied_delta_files=[], applied_delta_files=[],
upgraded=False, upgraded=False,
database_engine=database_engine, database_engine=database_engine,
config=config,
) )
def _upgrade_existing_database(cur, current_version, applied_delta_files, def _upgrade_existing_database(cur, current_version, applied_delta_files,
upgraded, database_engine): upgraded, database_engine, config):
"""Upgrades an existing database. """Upgrades an existing database.
Delta files can either be SQL stored in *.sql files, or python modules Delta files can either be SQL stored in *.sql files, or python modules
@ -245,7 +246,7 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
module_name, absolute_path, python_file module_name, absolute_path, python_file
) )
logger.debug("Running script %s", relative_path) logger.debug("Running script %s", relative_path)
module.run_upgrade(cur, database_engine) module.run_upgrade(cur, database_engine, config=config)
elif ext == ".pyc": elif ext == ".pyc":
# Sometimes .pyc files turn up anyway even though we've # Sometimes .pyc files turn up anyway even though we've
# disabled their generation; e.g. from distribution package # disabled their generation; e.g. from distribution package

View File

@ -0,0 +1,16 @@
/* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
ALTER TABLE room_aliases ADD COLUMN creator TEXT;

View File

@ -0,0 +1,59 @@
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from synapse.storage.appservice import ApplicationServiceStore
logger = logging.getLogger(__name__)
def run_upgrade(cur, database_engine, config, *args, **kwargs):
# NULL indicates user was not registered by an appservice.
cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT")
cur.execute("SELECT name FROM users")
rows = cur.fetchall()
config_files = []
try:
config_files = config.app_service_config_files
except AttributeError:
logger.warning("Could not get app_service_config_files from config")
pass
appservices = ApplicationServiceStore.load_appservices(
config.server_name, config_files
)
owned = {}
for row in rows:
user_id = row[0]
for appservice in appservices:
if appservice.is_exclusive_user(user_id):
if user_id in owned.keys():
logger.error(
"user_id %s was owned by more than one application"
" service (IDs %s and %s); assigning arbitrarily to %s" %
(user_id, owned[user_id], appservice.id, owned[user_id])
)
owned[user_id] = appservice.id
for user_id, as_id in owned.items():
cur.execute(
database_engine.convert_param_style(
"UPDATE users SET appservice_id = ? WHERE name = ?"
),
(as_id, user_id)
)

View File

@ -28,6 +28,7 @@ from twisted.internet import defer
from collections import OrderedDict from collections import OrderedDict
import os
import functools import functools
import inspect import inspect
import threading import threading
@ -38,6 +39,9 @@ logger = logging.getLogger(__name__)
_CacheSentinel = object() _CacheSentinel = object()
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
class Cache(object): class Cache(object):
def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False): def __init__(self, name, max_entries=1000, keylen=1, lru=True, tree=False):
@ -140,6 +144,8 @@ class CacheDescriptor(object):
""" """
def __init__(self, orig, max_entries=1000, num_args=1, lru=True, tree=False, def __init__(self, orig, max_entries=1000, num_args=1, lru=True, tree=False,
inlineCallbacks=False): inlineCallbacks=False):
max_entries = int(max_entries * CACHE_SIZE_FACTOR)
self.orig = orig self.orig = orig
if inlineCallbacks: if inlineCallbacks:

View File

@ -18,11 +18,15 @@ from synapse.util.caches import cache_counter, caches_by_name
from blist import sorteddict from blist import sorteddict
import logging import logging
import os
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
class StreamChangeCache(object): class StreamChangeCache(object):
"""Keeps track of the stream positions of the latest change in a set of entities. """Keeps track of the stream positions of the latest change in a set of entities.
@ -33,7 +37,7 @@ class StreamChangeCache(object):
old then the cache will simply return all given entities. old then the cache will simply return all given entities.
""" """
def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache={}): def __init__(self, name, current_stream_pos, max_size=10000, prefilled_cache={}):
self._max_size = max_size self._max_size = int(max_size * CACHE_SIZE_FACTOR)
self._entity_to_key = {} self._entity_to_key = {}
self._cache = sorteddict() self._cache = sorteddict()
self._earliest_known_stream_pos = current_stream_pos self._earliest_known_stream_pos = current_stream_pos

View File

@ -23,7 +23,7 @@ from synapse.api.errors import AuthError
from synapse.handlers.profile import ProfileHandler from synapse.handlers.profile import ProfileHandler
from synapse.types import UserID from synapse.types import UserID
from tests.utils import setup_test_homeserver from tests.utils import setup_test_homeserver, requester_for_user
class ProfileHandlers(object): class ProfileHandlers(object):
@ -84,7 +84,11 @@ class ProfileTestCase(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_set_my_name(self): def test_set_my_name(self):
yield self.handler.set_displayname(self.frank, self.frank, "Frank Jr.") yield self.handler.set_displayname(
self.frank,
requester_for_user(self.frank),
"Frank Jr."
)
self.assertEquals( self.assertEquals(
(yield self.store.get_profile_displayname(self.frank.localpart)), (yield self.store.get_profile_displayname(self.frank.localpart)),
@ -93,7 +97,11 @@ class ProfileTestCase(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_set_my_name_noauth(self): def test_set_my_name_noauth(self):
d = self.handler.set_displayname(self.frank, self.bob, "Frank Jr.") d = self.handler.set_displayname(
self.frank,
requester_for_user(self.bob),
"Frank Jr."
)
yield self.assertFailure(d, AuthError) yield self.assertFailure(d, AuthError)
@ -136,7 +144,7 @@ class ProfileTestCase(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_set_my_avatar(self): def test_set_my_avatar(self):
yield self.handler.set_avatar_url( yield self.handler.set_avatar_url(
self.frank, self.frank, "http://my.server/pic.gif" self.frank, requester_for_user(self.frank), "http://my.server/pic.gif"
) )
self.assertEquals( self.assertEquals(

View File

@ -18,7 +18,7 @@ from synapse.types import Requester, UserID
from twisted.internet import defer from twisted.internet import defer
from tests import unittest from tests import unittest
from tests.utils import setup_test_homeserver from tests.utils import setup_test_homeserver, requester_for_user
from mock import Mock, NonCallableMock from mock import Mock, NonCallableMock
import json import json
import contextlib import contextlib
@ -135,12 +135,15 @@ class ReplicationResourceCase(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def send_text_message(self, room_id, message): def send_text_message(self, room_id, message):
handler = self.hs.get_handlers().message_handler handler = self.hs.get_handlers().message_handler
event = yield handler.create_and_send_nonmember_event({ event = yield handler.create_and_send_nonmember_event(
"type": "m.room.message", requester_for_user(self.user),
"content": {"body": "message", "msgtype": "m.text"}, {
"room_id": room_id, "type": "m.room.message",
"sender": self.user.to_string(), "content": {"body": "message", "msgtype": "m.text"},
}) "room_id": room_id,
"sender": self.user.to_string(),
}
)
defer.returnValue(event.event_id) defer.returnValue(event.event_id)
@defer.inlineCallbacks @defer.inlineCallbacks

View File

@ -86,7 +86,7 @@ class ProfileTestCase(unittest.TestCase):
self.assertEquals(200, code) self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD") self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].localpart, "1234ABCD") self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "Frank Jr.") self.assertEquals(mocked_set.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks @defer.inlineCallbacks
@ -155,5 +155,5 @@ class ProfileTestCase(unittest.TestCase):
self.assertEquals(200, code) self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD") self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].localpart, "1234ABCD") self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif") self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif")

View File

@ -35,7 +35,8 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
def setUp(self): def setUp(self):
self.as_yaml_files = [] self.as_yaml_files = []
config = Mock( config = Mock(
app_service_config_files=self.as_yaml_files app_service_config_files=self.as_yaml_files,
event_cache_size=1,
) )
hs = yield setup_test_homeserver(config=config) hs = yield setup_test_homeserver(config=config)
@ -109,7 +110,8 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase):
self.as_yaml_files = [] self.as_yaml_files = []
config = Mock( config = Mock(
app_service_config_files=self.as_yaml_files app_service_config_files=self.as_yaml_files,
event_cache_size=1,
) )
hs = yield setup_test_homeserver(config=config) hs = yield setup_test_homeserver(config=config)
self.db_pool = hs.get_db_pool() self.db_pool = hs.get_db_pool()
@ -438,7 +440,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
f1 = self._write_config(suffix="1") f1 = self._write_config(suffix="1")
f2 = self._write_config(suffix="2") f2 = self._write_config(suffix="2")
config = Mock(app_service_config_files=[f1, f2]) config = Mock(app_service_config_files=[f1, f2], event_cache_size=1)
hs = yield setup_test_homeserver(config=config, datastore=Mock()) hs = yield setup_test_homeserver(config=config, datastore=Mock())
ApplicationServiceStore(hs) ApplicationServiceStore(hs)
@ -448,7 +450,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
f1 = self._write_config(id="id", suffix="1") f1 = self._write_config(id="id", suffix="1")
f2 = self._write_config(id="id", suffix="2") f2 = self._write_config(id="id", suffix="2")
config = Mock(app_service_config_files=[f1, f2]) config = Mock(app_service_config_files=[f1, f2], event_cache_size=1)
hs = yield setup_test_homeserver(config=config, datastore=Mock()) hs = yield setup_test_homeserver(config=config, datastore=Mock())
with self.assertRaises(ConfigError) as cm: with self.assertRaises(ConfigError) as cm:
@ -464,7 +466,7 @@ class ApplicationServiceStoreConfigTestCase(unittest.TestCase):
f1 = self._write_config(as_token="as_token", suffix="1") f1 = self._write_config(as_token="as_token", suffix="1")
f2 = self._write_config(as_token="as_token", suffix="2") f2 = self._write_config(as_token="as_token", suffix="2")
config = Mock(app_service_config_files=[f1, f2]) config = Mock(app_service_config_files=[f1, f2], event_cache_size=1)
hs = yield setup_test_homeserver(config=config, datastore=Mock()) hs = yield setup_test_homeserver(config=config, datastore=Mock())
with self.assertRaises(ConfigError) as cm: with self.assertRaises(ConfigError) as cm:

View File

@ -48,11 +48,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
config = Mock() config = Mock()
config.event_cache_size = 1 config.event_cache_size = 1
config.database_config = {"name": "sqlite3"}
hs = HomeServer( hs = HomeServer(
"test", "test",
db_pool=self.db_pool, db_pool=self.db_pool,
config=config, config=config,
database_engine=create_engine("sqlite3"), database_engine=create_engine(config),
) )
self.datastore = SQLBaseStore(hs) self.datastore = SQLBaseStore(hs)

View File

@ -20,6 +20,7 @@ from synapse.storage.prepare_database import prepare_database
from synapse.storage.engines import create_engine from synapse.storage.engines import create_engine
from synapse.server import HomeServer from synapse.server import HomeServer
from synapse.federation.transport import server from synapse.federation.transport import server
from synapse.types import Requester
from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.logcontext import LoggingContext from synapse.util.logcontext import LoggingContext
@ -50,6 +51,9 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
config.macaroon_secret_key = "not even a little secret" config.macaroon_secret_key = "not even a little secret"
config.server_name = "server.under.test" config.server_name = "server.under.test"
config.trusted_third_party_id_servers = [] config.trusted_third_party_id_servers = []
config.room_invite_state_types = []
config.database_config = {"name": "sqlite3"}
if "clock" not in kargs: if "clock" not in kargs:
kargs["clock"] = MockClock() kargs["clock"] = MockClock()
@ -60,7 +64,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
hs = HomeServer( hs = HomeServer(
name, db_pool=db_pool, config=config, name, db_pool=db_pool, config=config,
version_string="Synapse/tests", version_string="Synapse/tests",
database_engine=create_engine("sqlite3"), database_engine=create_engine(config),
get_db_conn=db_pool.get_db_conn, get_db_conn=db_pool.get_db_conn,
**kargs **kargs
) )
@ -69,7 +73,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
hs = HomeServer( hs = HomeServer(
name, db_pool=None, datastore=datastore, config=config, name, db_pool=None, datastore=datastore, config=config,
version_string="Synapse/tests", version_string="Synapse/tests",
database_engine=create_engine("sqlite3"), database_engine=create_engine(config),
**kargs **kargs
) )
@ -278,18 +282,24 @@ class SQLiteMemoryDbPool(ConnectionPool, object):
cp_max=1, cp_max=1,
) )
self.config = Mock()
self.config.database_config = {"name": "sqlite3"}
def prepare(self): def prepare(self):
engine = create_engine("sqlite3") engine = self.create_engine()
return self.runWithConnection( return self.runWithConnection(
lambda conn: prepare_database(conn, engine) lambda conn: prepare_database(conn, engine, self.config)
) )
def get_db_conn(self): def get_db_conn(self):
conn = self.connect() conn = self.connect()
engine = create_engine("sqlite3") engine = self.create_engine()
prepare_database(conn, engine) prepare_database(conn, engine, self.config)
return conn return conn
def create_engine(self):
return create_engine(self.config)
class MemoryDataStore(object): class MemoryDataStore(object):
@ -502,3 +512,7 @@ class DeferredMockCallable(object):
"call(%s)" % _format_call(c[0], c[1]) for c in calls "call(%s)" % _format_call(c[0], c[1]) for c in calls
]) ])
) )
def requester_for_user(user):
return Requester(user, None, False)