mirror of
https://git.anonymousland.org/anonymousland/synapse.git
synced 2024-12-26 15:19:25 -05:00
Merge remote-tracking branch 'origin/develop' into store_event_actions
This commit is contained in:
commit
c061b47c57
45
CHANGES.rst
45
CHANGES.rst
@ -1,3 +1,48 @@
|
|||||||
|
Changes in synapse v0.12.0-rc2 (2015-12-14)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Add caches for whether rooms have been forgotten by a user (PR #434)
|
||||||
|
* Remove instructions to use ``--process-dependency-link`` since all of the
|
||||||
|
dependencies of synapse are on PyPI (PR #436)
|
||||||
|
* Parallelise the processing of ``/sync`` requests (PR #437)
|
||||||
|
* Fix race updating presence in ``/events`` (PR #444)
|
||||||
|
* Fix bug back-populating search results (PR #441)
|
||||||
|
* Fix bug calculating state in ``/sync`` requests (PR #442)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc1 (2015-12-10)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Host the client APIs released as r0 by
|
||||||
|
https://matrix.org/docs/spec/r0.0.0/client_server.html
|
||||||
|
on paths prefixed by ``/_matrix/client/r0``. (PR #430, PR #415, PR #400)
|
||||||
|
* Updates the client APIs to match r0 of the matrix specification.
|
||||||
|
|
||||||
|
* All APIs return events in the new event format, old APIs also include
|
||||||
|
the fields needed to parse the event using the old format for
|
||||||
|
compatibility. (PR #402)
|
||||||
|
* Search results are now given as a JSON array rather than
|
||||||
|
a JSON object (PR #405)
|
||||||
|
* Miscellaneous changes to search (PR #403, PR #406, PR #412)
|
||||||
|
* Filter JSON objects may now be passed as query parameters to ``/sync``
|
||||||
|
(PR #431)
|
||||||
|
* Fix implementation of ``/admin/whois`` (PR #418)
|
||||||
|
* Only include the rooms that user has left in ``/sync`` if the client
|
||||||
|
requests them in the filter (PR #423)
|
||||||
|
* Don't push for ``m.room.message`` by default (PR #411)
|
||||||
|
* Add API for setting per account user data (PR #392)
|
||||||
|
* Allow users to forget rooms (PR #385)
|
||||||
|
|
||||||
|
* Performance improvements and monitoring:
|
||||||
|
|
||||||
|
* Add per-request counters for CPU time spent on the main python thread.
|
||||||
|
(PR #421, PR #420)
|
||||||
|
* Add per-request counters for time spent in the database (PR #429)
|
||||||
|
* Make state updates in the C+S API idempotent (PR #416)
|
||||||
|
* Only fire ``user_joined_room`` if the user has actually joined. (PR #410)
|
||||||
|
* Reuse a single http client, rather than creating new ones (PR #413)
|
||||||
|
|
||||||
|
* Fixed a bug upgrading from older versions of synapse on postgresql (PR #417)
|
||||||
|
|
||||||
Changes in synapse v0.11.1 (2015-11-20)
|
Changes in synapse v0.11.1 (2015-11-20)
|
||||||
=======================================
|
=======================================
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ To install the synapse homeserver run::
|
|||||||
virtualenv -p python2.7 ~/.synapse
|
virtualenv -p python2.7 ~/.synapse
|
||||||
source ~/.synapse/bin/activate
|
source ~/.synapse/bin/activate
|
||||||
pip install --upgrade setuptools
|
pip install --upgrade setuptools
|
||||||
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
pip install https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
This installs synapse, along with the libraries it uses, into a virtual
|
This installs synapse, along with the libraries it uses, into a virtual
|
||||||
environment under ``~/.synapse``. Feel free to pick a different directory
|
environment under ``~/.synapse``. Feel free to pick a different directory
|
||||||
@ -235,8 +235,7 @@ pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
|||||||
You also may need to explicitly specify python 2.7 again during the install
|
You also may need to explicitly specify python 2.7 again during the install
|
||||||
request::
|
request::
|
||||||
|
|
||||||
pip2.7 install --process-dependency-links \
|
pip2.7 install https://github.com/matrix-org/synapse/tarball/master
|
||||||
https://github.com/matrix-org/synapse/tarball/master
|
|
||||||
|
|
||||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
@ -295,8 +294,7 @@ Troubleshooting
|
|||||||
Troubleshooting Installation
|
Troubleshooting Installation
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
Synapse requires pip 1.7 or later, so if your OS provides too old a version you
|
||||||
you get errors about ``error: no such option: --process-dependency-links`` you
|
|
||||||
may need to manually upgrade it::
|
may need to manually upgrade it::
|
||||||
|
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
|
21
jenkins.sh
21
jenkins.sh
@ -5,9 +5,10 @@ export PYTHONDONTWRITEBYTECODE=yep
|
|||||||
# Output test results as junit xml
|
# Output test results as junit xml
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
# Output coverage to coverage.xml
|
# Include branch coverage
|
||||||
export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
|
export COVERAGE_OPTS="-p"
|
||||||
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
# Output flake8 violations to violations.flake8.log
|
||||||
# Don't exit with non-0 status code on Jenkins,
|
# Don't exit with non-0 status code on Jenkins,
|
||||||
@ -15,6 +16,8 @@ export DUMP_COVERAGE_COMMAND="coverage xml -o coverage.xml"
|
|||||||
# UNSTABLE or FAILURE this build.
|
# UNSTABLE or FAILURE this build.
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||||
|
|
||||||
|
rm .coverage.* || echo "No files to remove"
|
||||||
|
|
||||||
tox
|
tox
|
||||||
|
|
||||||
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
||||||
@ -45,7 +48,7 @@ export PERL5LIB PERL_MB_OPT PERL_MM_OPT
|
|||||||
: ${PORT_BASE:=8000}
|
: ${PORT_BASE:=8000}
|
||||||
|
|
||||||
echo >&2 "Running sytest with SQLite3";
|
echo >&2 "Running sytest with SQLite3";
|
||||||
./run-tests.pl -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-sqlite3.tap
|
./run-tests.pl --coverage -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-sqlite3.tap
|
||||||
|
|
||||||
RUN_POSTGRES=""
|
RUN_POSTGRES=""
|
||||||
|
|
||||||
@ -64,7 +67,15 @@ done
|
|||||||
if test $RUN_POSTGRES = ":$(($PORT_BASE + 1)):$(($PORT_BASE + 2))"; then
|
if test $RUN_POSTGRES = ":$(($PORT_BASE + 1)):$(($PORT_BASE + 2))"; then
|
||||||
echo >&2 "Running sytest with PostgreSQL";
|
echo >&2 "Running sytest with PostgreSQL";
|
||||||
pip install psycopg2
|
pip install psycopg2
|
||||||
./run-tests.pl -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-postgresql.tap
|
./run-tests.pl --coverage -O tap --synapse-directory .. --all --port-base $PORT_BASE > results-postgresql.tap
|
||||||
else
|
else
|
||||||
echo >&2 "Skipping running sytest with PostgreSQL, $RUN_POSTGRES"
|
echo >&2 "Skipping running sytest with PostgreSQL, $RUN_POSTGRES"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
cp sytest/.coverage.* .
|
||||||
|
|
||||||
|
# Combine the coverage reports
|
||||||
|
python -m coverage combine
|
||||||
|
# Output coverage to coverage.xml
|
||||||
|
coverage xml -o coverage.xml
|
||||||
|
1
scripts/gen_password
Normal file
1
scripts/gen_password
Normal file
@ -0,0 +1 @@
|
|||||||
|
perl -MCrypt::Random -MCrypt::Eksblowfish::Bcrypt -e 'print Crypt::Eksblowfish::Bcrypt::bcrypt("secret", "\$2\$12\$" . Crypt::Eksblowfish::Bcrypt::en_base64(Crypt::Random::makerandom_octet(Length=>16)))."\n"'
|
@ -16,4 +16,4 @@
|
|||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.11.1"
|
__version__ = "0.12.0-rc2"
|
||||||
|
@ -778,7 +778,7 @@ class Auth(object):
|
|||||||
if "third_party_invite" in event.content:
|
if "third_party_invite" in event.content:
|
||||||
key = (
|
key = (
|
||||||
EventTypes.ThirdPartyInvite,
|
EventTypes.ThirdPartyInvite,
|
||||||
event.content["third_party_invite"]["token"]
|
event.content["third_party_invite"]["signed"]["token"]
|
||||||
)
|
)
|
||||||
third_party_invite = current_state.get(key)
|
third_party_invite = current_state.get(key)
|
||||||
if third_party_invite:
|
if third_party_invite:
|
||||||
|
@ -230,7 +230,9 @@ class Keyring(object):
|
|||||||
|
|
||||||
missing_keys = {}
|
missing_keys = {}
|
||||||
for group in group_id_to_group.values():
|
for group in group_id_to_group.values():
|
||||||
missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
|
missing_keys.setdefault(group.server_name, set()).update(
|
||||||
|
group.key_ids
|
||||||
|
)
|
||||||
|
|
||||||
for fn in key_fetch_fns:
|
for fn in key_fetch_fns:
|
||||||
results = yield fn(missing_keys.items())
|
results = yield fn(missing_keys.items())
|
||||||
|
@ -69,7 +69,12 @@ class EventStreamHandler(BaseHandler):
|
|||||||
A deferred that completes once their presence has been updated.
|
A deferred that completes once their presence has been updated.
|
||||||
"""
|
"""
|
||||||
if user not in self._streams_per_user:
|
if user not in self._streams_per_user:
|
||||||
self._streams_per_user[user] = 0
|
# Make sure we set the streams per user to 1 here rather than
|
||||||
|
# setting it to zero and incrementing the value below.
|
||||||
|
# Otherwise this may race with stopped_stream causing the
|
||||||
|
# user to be erased from the map before we have a chance
|
||||||
|
# to increment it.
|
||||||
|
self._streams_per_user[user] = 1
|
||||||
if user in self._stop_timer_per_user:
|
if user in self._stop_timer_per_user:
|
||||||
try:
|
try:
|
||||||
self.clock.cancel_call_later(
|
self.clock.cancel_call_later(
|
||||||
@ -79,8 +84,8 @@ class EventStreamHandler(BaseHandler):
|
|||||||
logger.exception("Failed to cancel event timer")
|
logger.exception("Failed to cancel event timer")
|
||||||
else:
|
else:
|
||||||
yield started_user_eventstream(self.distributor, user)
|
yield started_user_eventstream(self.distributor, user)
|
||||||
|
else:
|
||||||
self._streams_per_user[user] += 1
|
self._streams_per_user[user] += 1
|
||||||
|
|
||||||
def stopped_stream(self, user):
|
def stopped_stream(self, user):
|
||||||
"""If there are no streams for a user this starts a timer that will
|
"""If there are no streams for a user this starts a timer that will
|
||||||
|
@ -604,7 +604,7 @@ class FederationHandler(BaseHandler):
|
|||||||
handled_events = set()
|
handled_events = set()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
new_event = self._sign_event(event)
|
event = self._sign_event(event)
|
||||||
# Try the host we successfully got a response to /make_join/
|
# Try the host we successfully got a response to /make_join/
|
||||||
# request first.
|
# request first.
|
||||||
try:
|
try:
|
||||||
@ -612,7 +612,7 @@ class FederationHandler(BaseHandler):
|
|||||||
target_hosts.insert(0, origin)
|
target_hosts.insert(0, origin)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
ret = yield self.replication_layer.send_join(target_hosts, new_event)
|
ret = yield self.replication_layer.send_join(target_hosts, event)
|
||||||
|
|
||||||
origin = ret["origin"]
|
origin = ret["origin"]
|
||||||
state = ret["state"]
|
state = ret["state"]
|
||||||
@ -621,12 +621,12 @@ class FederationHandler(BaseHandler):
|
|||||||
|
|
||||||
handled_events.update([s.event_id for s in state])
|
handled_events.update([s.event_id for s in state])
|
||||||
handled_events.update([a.event_id for a in auth_chain])
|
handled_events.update([a.event_id for a in auth_chain])
|
||||||
handled_events.add(new_event.event_id)
|
handled_events.add(event.event_id)
|
||||||
|
|
||||||
logger.debug("do_invite_join auth_chain: %s", auth_chain)
|
logger.debug("do_invite_join auth_chain: %s", auth_chain)
|
||||||
logger.debug("do_invite_join state: %s", state)
|
logger.debug("do_invite_join state: %s", state)
|
||||||
|
|
||||||
logger.debug("do_invite_join event: %s", new_event)
|
logger.debug("do_invite_join event: %s", event)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield self.store.store_room(
|
yield self.store.store_room(
|
||||||
@ -644,14 +644,14 @@ class FederationHandler(BaseHandler):
|
|||||||
|
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
d = self.notifier.on_new_room_event(
|
d = self.notifier.on_new_room_event(
|
||||||
new_event, event_stream_id, max_stream_id,
|
event, event_stream_id, max_stream_id,
|
||||||
extra_users=[joinee]
|
extra_users=[joinee]
|
||||||
)
|
)
|
||||||
|
|
||||||
def log_failure(f):
|
def log_failure(f):
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Failed to notify about %s: %s",
|
"Failed to notify about %s: %s",
|
||||||
new_event.event_id, f.value
|
event.event_id, f.value
|
||||||
)
|
)
|
||||||
|
|
||||||
d.addErrback(log_failure)
|
d.addErrback(log_failure)
|
||||||
@ -1658,11 +1658,22 @@ class FederationHandler(BaseHandler):
|
|||||||
sender = invite["sender"]
|
sender = invite["sender"]
|
||||||
room_id = invite["room_id"]
|
room_id = invite["room_id"]
|
||||||
|
|
||||||
|
if "signed" not in invite or "token" not in invite["signed"]:
|
||||||
|
logger.info(
|
||||||
|
"Discarding received notification of third party invite "
|
||||||
|
"without signed: %s" % (invite,)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
third_party_invite = {
|
||||||
|
"signed": invite["signed"],
|
||||||
|
}
|
||||||
|
|
||||||
event_dict = {
|
event_dict = {
|
||||||
"type": EventTypes.Member,
|
"type": EventTypes.Member,
|
||||||
"content": {
|
"content": {
|
||||||
"membership": Membership.INVITE,
|
"membership": Membership.INVITE,
|
||||||
"third_party_invite": invite,
|
"third_party_invite": third_party_invite,
|
||||||
},
|
},
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"sender": sender,
|
"sender": sender,
|
||||||
@ -1673,6 +1684,11 @@ class FederationHandler(BaseHandler):
|
|||||||
builder = self.event_builder_factory.new(event_dict)
|
builder = self.event_builder_factory.new(event_dict)
|
||||||
EventValidator().validate_new(builder)
|
EventValidator().validate_new(builder)
|
||||||
event, context = yield self._create_new_client_event(builder=builder)
|
event, context = yield self._create_new_client_event(builder=builder)
|
||||||
|
|
||||||
|
event, context = yield self.add_display_name_to_third_party_invite(
|
||||||
|
event_dict, event, context
|
||||||
|
)
|
||||||
|
|
||||||
self.auth.check(event, context.current_state)
|
self.auth.check(event, context.current_state)
|
||||||
yield self._validate_keyserver(event, auth_events=context.current_state)
|
yield self._validate_keyserver(event, auth_events=context.current_state)
|
||||||
member_handler = self.hs.get_handlers().room_member_handler
|
member_handler = self.hs.get_handlers().room_member_handler
|
||||||
@ -1694,6 +1710,10 @@ class FederationHandler(BaseHandler):
|
|||||||
builder=builder,
|
builder=builder,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
event, context = yield self.add_display_name_to_third_party_invite(
|
||||||
|
event_dict, event, context
|
||||||
|
)
|
||||||
|
|
||||||
self.auth.check(event, auth_events=context.current_state)
|
self.auth.check(event, auth_events=context.current_state)
|
||||||
yield self._validate_keyserver(event, auth_events=context.current_state)
|
yield self._validate_keyserver(event, auth_events=context.current_state)
|
||||||
|
|
||||||
@ -1703,6 +1723,27 @@ class FederationHandler(BaseHandler):
|
|||||||
member_handler = self.hs.get_handlers().room_member_handler
|
member_handler = self.hs.get_handlers().room_member_handler
|
||||||
yield member_handler.change_membership(event, context)
|
yield member_handler.change_membership(event, context)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def add_display_name_to_third_party_invite(self, event_dict, event, context):
|
||||||
|
key = (
|
||||||
|
EventTypes.ThirdPartyInvite,
|
||||||
|
event.content["third_party_invite"]["signed"]["token"]
|
||||||
|
)
|
||||||
|
original_invite = context.current_state.get(key)
|
||||||
|
if not original_invite:
|
||||||
|
logger.info(
|
||||||
|
"Could not find invite event for third_party_invite - "
|
||||||
|
"discarding: %s" % (event_dict,)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
display_name = original_invite.content["display_name"]
|
||||||
|
event_dict["content"]["third_party_invite"]["display_name"] = display_name
|
||||||
|
builder = self.event_builder_factory.new(event_dict)
|
||||||
|
EventValidator().validate_new(builder)
|
||||||
|
event, context = yield self._create_new_client_event(builder=builder)
|
||||||
|
defer.returnValue((event, context))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _validate_keyserver(self, event, auth_events):
|
def _validate_keyserver(self, event, auth_events):
|
||||||
token = event.content["third_party_invite"]["signed"]["token"]
|
token = event.content["third_party_invite"]["signed"]["token"]
|
||||||
|
@ -42,7 +42,7 @@ class RegistrationHandler(BaseHandler):
|
|||||||
|
|
||||||
self.distributor = hs.get_distributor()
|
self.distributor = hs.get_distributor()
|
||||||
self.distributor.declare("registered_user")
|
self.distributor.declare("registered_user")
|
||||||
self.captch_client = CaptchaServerHttpClient(hs)
|
self.captcha_client = CaptchaServerHttpClient(hs)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_username(self, localpart):
|
def check_username(self, localpart):
|
||||||
@ -132,25 +132,9 @@ class RegistrationHandler(BaseHandler):
|
|||||||
raise RegistrationError(
|
raise RegistrationError(
|
||||||
500, "Cannot generate user ID.")
|
500, "Cannot generate user ID.")
|
||||||
|
|
||||||
# create a default avatar for the user
|
# We used to generate default identicons here, but nowadays
|
||||||
# XXX: ideally clients would explicitly specify one, but given they don't
|
# we want clients to generate their own as part of their branding
|
||||||
# and we want consistent and pretty identicons for random users, we'll
|
# rather than there being consistent matrix-wide ones, so we don't.
|
||||||
# do it here.
|
|
||||||
try:
|
|
||||||
auth_user = UserID.from_string(user_id)
|
|
||||||
media_repository = self.hs.get_resource_for_media_repository()
|
|
||||||
identicon_resource = media_repository.getChildWithDefault("identicon", None)
|
|
||||||
upload_resource = media_repository.getChildWithDefault("upload", None)
|
|
||||||
identicon_bytes = identicon_resource.generate_identicon(user_id, 320, 320)
|
|
||||||
content_uri = yield upload_resource.create_content(
|
|
||||||
"image/png", None, identicon_bytes, len(identicon_bytes), auth_user
|
|
||||||
)
|
|
||||||
profile_handler = self.hs.get_handlers().profile_handler
|
|
||||||
profile_handler.set_avatar_url(
|
|
||||||
auth_user, auth_user, ("%s#auto" % (content_uri,))
|
|
||||||
)
|
|
||||||
except NotImplementedError:
|
|
||||||
pass # make tests pass without messing around creating default avatars
|
|
||||||
|
|
||||||
defer.returnValue((user_id, token))
|
defer.returnValue((user_id, token))
|
||||||
|
|
||||||
|
@ -704,13 +704,48 @@ class RoomMemberHandler(BaseHandler):
|
|||||||
token_id,
|
token_id,
|
||||||
txn_id
|
txn_id
|
||||||
):
|
):
|
||||||
|
room_state = yield self.hs.get_state_handler().get_current_state(room_id)
|
||||||
|
|
||||||
|
inviter_display_name = ""
|
||||||
|
inviter_avatar_url = ""
|
||||||
|
member_event = room_state.get((EventTypes.Member, user.to_string()))
|
||||||
|
if member_event:
|
||||||
|
inviter_display_name = member_event.content.get("displayname", "")
|
||||||
|
inviter_avatar_url = member_event.content.get("avatar_url", "")
|
||||||
|
|
||||||
|
canonical_room_alias = ""
|
||||||
|
canonical_alias_event = room_state.get((EventTypes.CanonicalAlias, ""))
|
||||||
|
if canonical_alias_event:
|
||||||
|
canonical_room_alias = canonical_alias_event.content.get("alias", "")
|
||||||
|
|
||||||
|
room_name = ""
|
||||||
|
room_name_event = room_state.get((EventTypes.Name, ""))
|
||||||
|
if room_name_event:
|
||||||
|
room_name = room_name_event.content.get("name", "")
|
||||||
|
|
||||||
|
room_join_rules = ""
|
||||||
|
join_rules_event = room_state.get((EventTypes.JoinRules, ""))
|
||||||
|
if join_rules_event:
|
||||||
|
room_join_rules = join_rules_event.content.get("join_rule", "")
|
||||||
|
|
||||||
|
room_avatar_url = ""
|
||||||
|
room_avatar_event = room_state.get((EventTypes.RoomAvatar, ""))
|
||||||
|
if room_avatar_event:
|
||||||
|
room_avatar_url = room_avatar_event.content.get("url", "")
|
||||||
|
|
||||||
token, public_key, key_validity_url, display_name = (
|
token, public_key, key_validity_url, display_name = (
|
||||||
yield self._ask_id_server_for_third_party_invite(
|
yield self._ask_id_server_for_third_party_invite(
|
||||||
id_server,
|
id_server=id_server,
|
||||||
medium,
|
medium=medium,
|
||||||
address,
|
address=address,
|
||||||
room_id,
|
room_id=room_id,
|
||||||
user.to_string()
|
inviter_user_id=user.to_string(),
|
||||||
|
room_alias=canonical_room_alias,
|
||||||
|
room_avatar_url=room_avatar_url,
|
||||||
|
room_join_rules=room_join_rules,
|
||||||
|
room_name=room_name,
|
||||||
|
inviter_display_name=inviter_display_name,
|
||||||
|
inviter_avatar_url=inviter_avatar_url
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
msg_handler = self.hs.get_handlers().message_handler
|
msg_handler = self.hs.get_handlers().message_handler
|
||||||
@ -732,7 +767,19 @@ class RoomMemberHandler(BaseHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _ask_id_server_for_third_party_invite(
|
def _ask_id_server_for_third_party_invite(
|
||||||
self, id_server, medium, address, room_id, sender):
|
self,
|
||||||
|
id_server,
|
||||||
|
medium,
|
||||||
|
address,
|
||||||
|
room_id,
|
||||||
|
inviter_user_id,
|
||||||
|
room_alias,
|
||||||
|
room_avatar_url,
|
||||||
|
room_join_rules,
|
||||||
|
room_name,
|
||||||
|
inviter_display_name,
|
||||||
|
inviter_avatar_url
|
||||||
|
):
|
||||||
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
|
is_url = "%s%s/_matrix/identity/api/v1/store-invite" % (
|
||||||
id_server_scheme, id_server,
|
id_server_scheme, id_server,
|
||||||
)
|
)
|
||||||
@ -742,7 +789,13 @@ class RoomMemberHandler(BaseHandler):
|
|||||||
"medium": medium,
|
"medium": medium,
|
||||||
"address": address,
|
"address": address,
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"sender": sender,
|
"room_alias": room_alias,
|
||||||
|
"room_avatar_url": room_avatar_url,
|
||||||
|
"room_join_rules": room_join_rules,
|
||||||
|
"room_name": room_name,
|
||||||
|
"sender": inviter_user_id,
|
||||||
|
"sender_display_name": inviter_display_name,
|
||||||
|
"sender_avatar_url": inviter_avatar_url,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
# TODO: Check for success
|
# TODO: Check for success
|
||||||
@ -755,7 +808,7 @@ class RoomMemberHandler(BaseHandler):
|
|||||||
defer.returnValue((token, public_key, key_validity_url, display_name))
|
defer.returnValue((token, public_key, key_validity_url, display_name))
|
||||||
|
|
||||||
def forget(self, user, room_id):
|
def forget(self, user, room_id):
|
||||||
self.store.forget(user.to_string(), room_id)
|
return self.store.forget(user.to_string(), room_id)
|
||||||
|
|
||||||
|
|
||||||
class RoomListHandler(BaseHandler):
|
class RoomListHandler(BaseHandler):
|
||||||
|
@ -152,11 +152,15 @@ class SearchHandler(BaseHandler):
|
|||||||
|
|
||||||
highlights = set()
|
highlights = set()
|
||||||
|
|
||||||
|
count = None
|
||||||
|
|
||||||
if order_by == "rank":
|
if order_by == "rank":
|
||||||
search_result = yield self.store.search_msgs(
|
search_result = yield self.store.search_msgs(
|
||||||
room_ids, search_term, keys
|
room_ids, search_term, keys
|
||||||
)
|
)
|
||||||
|
|
||||||
|
count = search_result["count"]
|
||||||
|
|
||||||
if search_result["highlights"]:
|
if search_result["highlights"]:
|
||||||
highlights.update(search_result["highlights"])
|
highlights.update(search_result["highlights"])
|
||||||
|
|
||||||
@ -207,6 +211,8 @@ class SearchHandler(BaseHandler):
|
|||||||
if search_result["highlights"]:
|
if search_result["highlights"]:
|
||||||
highlights.update(search_result["highlights"])
|
highlights.update(search_result["highlights"])
|
||||||
|
|
||||||
|
count = search_result["count"]
|
||||||
|
|
||||||
results = search_result["results"]
|
results = search_result["results"]
|
||||||
|
|
||||||
results_map = {r["event"].event_id: r for r in results}
|
results_map = {r["event"].event_id: r for r in results}
|
||||||
@ -359,7 +365,7 @@ class SearchHandler(BaseHandler):
|
|||||||
|
|
||||||
rooms_cat_res = {
|
rooms_cat_res = {
|
||||||
"results": results,
|
"results": results,
|
||||||
"count": len(results),
|
"count": count,
|
||||||
"highlights": list(highlights),
|
"highlights": list(highlights),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ from ._base import BaseHandler
|
|||||||
|
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.api.constants import Membership, EventTypes
|
from synapse.api.constants import Membership, EventTypes
|
||||||
|
from synapse.util import unwrapFirstError
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
@ -224,9 +225,10 @@ class SyncHandler(BaseHandler):
|
|||||||
joined = []
|
joined = []
|
||||||
invited = []
|
invited = []
|
||||||
archived = []
|
archived = []
|
||||||
|
deferreds = []
|
||||||
for event in room_list:
|
for event in room_list:
|
||||||
if event.membership == Membership.JOIN:
|
if event.membership == Membership.JOIN:
|
||||||
room_sync = yield self.full_state_sync_for_joined_room(
|
room_sync_deferred = self.full_state_sync_for_joined_room(
|
||||||
room_id=event.room_id,
|
room_id=event.room_id,
|
||||||
sync_config=sync_config,
|
sync_config=sync_config,
|
||||||
now_token=now_token,
|
now_token=now_token,
|
||||||
@ -235,7 +237,8 @@ class SyncHandler(BaseHandler):
|
|||||||
tags_by_room=tags_by_room,
|
tags_by_room=tags_by_room,
|
||||||
account_data_by_room=account_data_by_room,
|
account_data_by_room=account_data_by_room,
|
||||||
)
|
)
|
||||||
joined.append(room_sync)
|
room_sync_deferred.addCallback(joined.append)
|
||||||
|
deferreds.append(room_sync_deferred)
|
||||||
elif event.membership == Membership.INVITE:
|
elif event.membership == Membership.INVITE:
|
||||||
invite = yield self.store.get_event(event.event_id)
|
invite = yield self.store.get_event(event.event_id)
|
||||||
invited.append(InvitedSyncResult(
|
invited.append(InvitedSyncResult(
|
||||||
@ -246,7 +249,7 @@ class SyncHandler(BaseHandler):
|
|||||||
leave_token = now_token.copy_and_replace(
|
leave_token = now_token.copy_and_replace(
|
||||||
"room_key", "s%d" % (event.stream_ordering,)
|
"room_key", "s%d" % (event.stream_ordering,)
|
||||||
)
|
)
|
||||||
room_sync = yield self.full_state_sync_for_archived_room(
|
room_sync_deferred = self.full_state_sync_for_archived_room(
|
||||||
sync_config=sync_config,
|
sync_config=sync_config,
|
||||||
room_id=event.room_id,
|
room_id=event.room_id,
|
||||||
leave_event_id=event.event_id,
|
leave_event_id=event.event_id,
|
||||||
@ -255,7 +258,12 @@ class SyncHandler(BaseHandler):
|
|||||||
tags_by_room=tags_by_room,
|
tags_by_room=tags_by_room,
|
||||||
account_data_by_room=account_data_by_room,
|
account_data_by_room=account_data_by_room,
|
||||||
)
|
)
|
||||||
archived.append(room_sync)
|
room_sync_deferred.addCallback(archived.append)
|
||||||
|
deferreds.append(room_sync_deferred)
|
||||||
|
|
||||||
|
yield defer.gatherResults(
|
||||||
|
deferreds, consumeErrors=True
|
||||||
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
defer.returnValue(SyncResult(
|
defer.returnValue(SyncResult(
|
||||||
presence=presence,
|
presence=presence,
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
|
|
||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError
|
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError, Codes
|
||||||
)
|
)
|
||||||
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
||||||
import synapse.metrics
|
import synapse.metrics
|
||||||
@ -127,7 +127,10 @@ def request_handler(request_handler):
|
|||||||
respond_with_json(
|
respond_with_json(
|
||||||
request,
|
request,
|
||||||
500,
|
500,
|
||||||
{"error": "Internal server error"},
|
{
|
||||||
|
"error": "Internal server error",
|
||||||
|
"errcode": Codes.UNKNOWN,
|
||||||
|
},
|
||||||
send_cors=True
|
send_cors=True
|
||||||
)
|
)
|
||||||
return wrapped_request_handler
|
return wrapped_request_handler
|
||||||
|
@ -490,7 +490,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if membership_action == "forget":
|
if membership_action == "forget":
|
||||||
self.handlers.room_member_handler.forget(user, room_id)
|
yield self.handlers.room_member_handler.forget(user, room_id)
|
||||||
|
|
||||||
defer.returnValue((200, {}))
|
defer.returnValue((200, {}))
|
||||||
|
|
||||||
|
@ -104,7 +104,6 @@ class SyncRestServlet(RestServlet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if filter_id and filter_id.startswith('{'):
|
if filter_id and filter_id.startswith('{'):
|
||||||
logging.error("MJH %r", filter_id)
|
|
||||||
try:
|
try:
|
||||||
filter_object = json.loads(filter_id)
|
filter_object = json.loads(filter_id)
|
||||||
except:
|
except:
|
||||||
@ -352,20 +351,36 @@ class SyncRestServlet(RestServlet):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
|
prev_event_id = timeline_event.unsigned.get("replaces_state", None)
|
||||||
logger.debug("Replacing %s with %s in state dict",
|
|
||||||
timeline_event.event_id, prev_event_id)
|
|
||||||
|
|
||||||
if prev_event_id is None:
|
prev_content = timeline_event.unsigned.get('prev_content')
|
||||||
|
prev_sender = timeline_event.unsigned.get('prev_sender')
|
||||||
|
# Empircally it seems possible for the event to have a
|
||||||
|
# "replaces_state" key but not a prev_content or prev_sender
|
||||||
|
# markjh conjectures that it could be due to the server not
|
||||||
|
# having a copy of that event.
|
||||||
|
# If this is the case the we ignore the previous event. This will
|
||||||
|
# cause the displayname calculations on the client to be incorrect
|
||||||
|
if prev_event_id is None or not prev_content or not prev_sender:
|
||||||
|
logger.debug(
|
||||||
|
"Removing %r from the state dict, as it is missing"
|
||||||
|
" prev_content (prev_event_id=%r)",
|
||||||
|
timeline_event.event_id, prev_event_id
|
||||||
|
)
|
||||||
del result[event_key]
|
del result[event_key]
|
||||||
else:
|
else:
|
||||||
|
logger.debug(
|
||||||
|
"Replacing %r with %r in state dict",
|
||||||
|
timeline_event.event_id, prev_event_id
|
||||||
|
)
|
||||||
result[event_key] = FrozenEvent({
|
result[event_key] = FrozenEvent({
|
||||||
"type": timeline_event.type,
|
"type": timeline_event.type,
|
||||||
"state_key": timeline_event.state_key,
|
"state_key": timeline_event.state_key,
|
||||||
"content": timeline_event.unsigned['prev_content'],
|
"content": prev_content,
|
||||||
"sender": timeline_event.unsigned['prev_sender'],
|
"sender": prev_sender,
|
||||||
"event_id": prev_event_id,
|
"event_id": prev_event_id,
|
||||||
"room_id": timeline_event.room_id,
|
"room_id": timeline_event.room_id,
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.debug("New value: %r", result.get(event_key))
|
logger.debug("New value: %r", result.get(event_key))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -258,10 +258,10 @@ class RegistrationStore(SQLBaseStore):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
|
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
|
||||||
yield self._simple_upsert("user_threepids", {
|
yield self._simple_upsert("user_threepids", {
|
||||||
"user_id": user_id,
|
|
||||||
"medium": medium,
|
"medium": medium,
|
||||||
"address": address,
|
"address": address,
|
||||||
}, {
|
}, {
|
||||||
|
"user_id": user_id,
|
||||||
"validated_at": validated_at,
|
"validated_at": validated_at,
|
||||||
"added_at": added_at,
|
"added_at": added_at,
|
||||||
})
|
})
|
||||||
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from ._base import SQLBaseStore
|
from ._base import SQLBaseStore
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
|
|
||||||
from synapse.api.constants import Membership
|
from synapse.api.constants import Membership
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
@ -121,7 +121,7 @@ class RoomMemberStore(SQLBaseStore):
|
|||||||
return self.get_rooms_for_user_where_membership_is(
|
return self.get_rooms_for_user_where_membership_is(
|
||||||
user_id, [Membership.INVITE]
|
user_id, [Membership.INVITE]
|
||||||
).addCallback(lambda invites: self._get_events([
|
).addCallback(lambda invites: self._get_events([
|
||||||
invites.event_id for invite in invites
|
invite.event_id for invite in invites
|
||||||
]))
|
]))
|
||||||
|
|
||||||
def get_leave_and_ban_events_for_user(self, user_id):
|
def get_leave_and_ban_events_for_user(self, user_id):
|
||||||
@ -270,6 +270,7 @@ class RoomMemberStore(SQLBaseStore):
|
|||||||
|
|
||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
def forget(self, user_id, room_id):
|
def forget(self, user_id, room_id):
|
||||||
"""Indicate that user_id wishes to discard history for room_id."""
|
"""Indicate that user_id wishes to discard history for room_id."""
|
||||||
def f(txn):
|
def f(txn):
|
||||||
@ -284,9 +285,11 @@ class RoomMemberStore(SQLBaseStore):
|
|||||||
" room_id = ?"
|
" room_id = ?"
|
||||||
)
|
)
|
||||||
txn.execute(sql, (user_id, room_id))
|
txn.execute(sql, (user_id, room_id))
|
||||||
self.runInteraction("forget_membership", f)
|
yield self.runInteraction("forget_membership", f)
|
||||||
|
self.was_forgotten_at.invalidate_all()
|
||||||
|
self.did_forget.invalidate((user_id, room_id))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@cachedInlineCallbacks(num_args=2)
|
||||||
def did_forget(self, user_id, room_id):
|
def did_forget(self, user_id, room_id):
|
||||||
"""Returns whether user_id has elected to discard history for room_id.
|
"""Returns whether user_id has elected to discard history for room_id.
|
||||||
|
|
||||||
@ -310,7 +313,7 @@ class RoomMemberStore(SQLBaseStore):
|
|||||||
count = yield self.runInteraction("did_forget_membership", f)
|
count = yield self.runInteraction("did_forget_membership", f)
|
||||||
defer.returnValue(count == 0)
|
defer.returnValue(count == 0)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@cachedInlineCallbacks(num_args=3)
|
||||||
def was_forgotten_at(self, user_id, room_id, event_id):
|
def was_forgotten_at(self, user_id, room_id, event_id):
|
||||||
"""Returns whether user_id has elected to discard history for room_id at event_id.
|
"""Returns whether user_id has elected to discard history for room_id at event_id.
|
||||||
|
|
||||||
|
@ -85,6 +85,11 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
# skip over it.
|
# skip over it.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not isinstance(value, basestring):
|
||||||
|
# If the event body, name or topic isn't a string
|
||||||
|
# then skip over it
|
||||||
|
continue
|
||||||
|
|
||||||
event_search_rows.append((event_id, room_id, key, value))
|
event_search_rows.append((event_id, room_id, key, value))
|
||||||
|
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
@ -143,7 +148,7 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
|
|
||||||
search_query = search_query = _parse_query(self.database_engine, search_term)
|
search_query = search_query = _parse_query(self.database_engine, search_term)
|
||||||
|
|
||||||
args = [search_query]
|
args = []
|
||||||
|
|
||||||
# Make sure we don't explode because the person is in too many rooms.
|
# Make sure we don't explode because the person is in too many rooms.
|
||||||
# We filter the results below regardless.
|
# We filter the results below regardless.
|
||||||
@ -162,18 +167,36 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
"(%s)" % (" OR ".join(local_clauses),)
|
"(%s)" % (" OR ".join(local_clauses),)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
count_args = args
|
||||||
|
count_clauses = clauses
|
||||||
|
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT ts_rank_cd(vector, query) AS rank, room_id, event_id"
|
"SELECT ts_rank_cd(vector, to_tsquery('english', ?)) AS rank,"
|
||||||
" FROM to_tsquery('english', ?) as query, event_search"
|
" room_id, event_id"
|
||||||
" WHERE vector @@ query"
|
" FROM event_search"
|
||||||
|
" WHERE vector @@ to_tsquery('english', ?)"
|
||||||
)
|
)
|
||||||
|
args = [search_query, search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE vector @@ to_tsquery('english', ?)"
|
||||||
|
)
|
||||||
|
count_args = [search_query] + count_args
|
||||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
|
"SELECT rank(matchinfo(event_search)) as rank, room_id, event_id"
|
||||||
" FROM event_search"
|
" FROM event_search"
|
||||||
" WHERE value MATCH ?"
|
" WHERE value MATCH ?"
|
||||||
)
|
)
|
||||||
|
args = [search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE value MATCH ?"
|
||||||
|
)
|
||||||
|
count_args = [search_term] + count_args
|
||||||
else:
|
else:
|
||||||
# This should be unreachable.
|
# This should be unreachable.
|
||||||
raise Exception("Unrecognized database engine")
|
raise Exception("Unrecognized database engine")
|
||||||
@ -181,6 +204,9 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
for clause in clauses:
|
for clause in clauses:
|
||||||
sql += " AND " + clause
|
sql += " AND " + clause
|
||||||
|
|
||||||
|
for clause in count_clauses:
|
||||||
|
count_sql += " AND " + clause
|
||||||
|
|
||||||
# We add an arbitrary limit here to ensure we don't try to pull the
|
# We add an arbitrary limit here to ensure we don't try to pull the
|
||||||
# entire table from the database.
|
# entire table from the database.
|
||||||
sql += " ORDER BY rank DESC LIMIT 500"
|
sql += " ORDER BY rank DESC LIMIT 500"
|
||||||
@ -202,6 +228,14 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
highlights = yield self._find_highlights_in_postgres(search_query, events)
|
highlights = yield self._find_highlights_in_postgres(search_query, events)
|
||||||
|
|
||||||
|
count_sql += " GROUP BY room_id"
|
||||||
|
|
||||||
|
count_results = yield self._execute(
|
||||||
|
"search_rooms_count", self.cursor_to_dict, count_sql, *count_args
|
||||||
|
)
|
||||||
|
|
||||||
|
count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
@ -212,6 +246,7 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
if r["event_id"] in event_map
|
if r["event_id"] in event_map
|
||||||
],
|
],
|
||||||
"highlights": highlights,
|
"highlights": highlights,
|
||||||
|
"count": count,
|
||||||
})
|
})
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@ -232,7 +267,7 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
|
|
||||||
search_query = search_query = _parse_query(self.database_engine, search_term)
|
search_query = search_query = _parse_query(self.database_engine, search_term)
|
||||||
|
|
||||||
args = [search_query]
|
args = []
|
||||||
|
|
||||||
# Make sure we don't explode because the person is in too many rooms.
|
# Make sure we don't explode because the person is in too many rooms.
|
||||||
# We filter the results below regardless.
|
# We filter the results below regardless.
|
||||||
@ -251,6 +286,11 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
"(%s)" % (" OR ".join(local_clauses),)
|
"(%s)" % (" OR ".join(local_clauses),)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# take copies of the current args and clauses lists, before adding
|
||||||
|
# pagination clauses to main query.
|
||||||
|
count_args = list(args)
|
||||||
|
count_clauses = list(clauses)
|
||||||
|
|
||||||
if pagination_token:
|
if pagination_token:
|
||||||
try:
|
try:
|
||||||
origin_server_ts, stream = pagination_token.split(",")
|
origin_server_ts, stream = pagination_token.split(",")
|
||||||
@ -267,12 +307,19 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
|
|
||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT ts_rank_cd(vector, query) as rank,"
|
"SELECT ts_rank_cd(vector, to_tsquery('english', ?)) as rank,"
|
||||||
" origin_server_ts, stream_ordering, room_id, event_id"
|
" origin_server_ts, stream_ordering, room_id, event_id"
|
||||||
" FROM to_tsquery('english', ?) as query, event_search"
|
" FROM event_search"
|
||||||
" NATURAL JOIN events"
|
" NATURAL JOIN events"
|
||||||
" WHERE vector @@ query AND "
|
" WHERE vector @@ to_tsquery('english', ?) AND "
|
||||||
)
|
)
|
||||||
|
args = [search_query, search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE vector @@ to_tsquery('english', ?) AND "
|
||||||
|
)
|
||||||
|
count_args = [search_query] + count_args
|
||||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||||
# We use CROSS JOIN here to ensure we use the right indexes.
|
# We use CROSS JOIN here to ensure we use the right indexes.
|
||||||
# https://sqlite.org/optoverview.html#crossjoin
|
# https://sqlite.org/optoverview.html#crossjoin
|
||||||
@ -292,11 +339,19 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
" CROSS JOIN events USING (event_id)"
|
" CROSS JOIN events USING (event_id)"
|
||||||
" WHERE "
|
" WHERE "
|
||||||
)
|
)
|
||||||
|
args = [search_query] + args
|
||||||
|
|
||||||
|
count_sql = (
|
||||||
|
"SELECT room_id, count(*) as count FROM event_search"
|
||||||
|
" WHERE value MATCH ? AND "
|
||||||
|
)
|
||||||
|
count_args = [search_term] + count_args
|
||||||
else:
|
else:
|
||||||
# This should be unreachable.
|
# This should be unreachable.
|
||||||
raise Exception("Unrecognized database engine")
|
raise Exception("Unrecognized database engine")
|
||||||
|
|
||||||
sql += " AND ".join(clauses)
|
sql += " AND ".join(clauses)
|
||||||
|
count_sql += " AND ".join(count_clauses)
|
||||||
|
|
||||||
# We add an arbitrary limit here to ensure we don't try to pull the
|
# We add an arbitrary limit here to ensure we don't try to pull the
|
||||||
# entire table from the database.
|
# entire table from the database.
|
||||||
@ -321,6 +376,14 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
if isinstance(self.database_engine, PostgresEngine):
|
if isinstance(self.database_engine, PostgresEngine):
|
||||||
highlights = yield self._find_highlights_in_postgres(search_query, events)
|
highlights = yield self._find_highlights_in_postgres(search_query, events)
|
||||||
|
|
||||||
|
count_sql += " GROUP BY room_id"
|
||||||
|
|
||||||
|
count_results = yield self._execute(
|
||||||
|
"search_rooms_count", self.cursor_to_dict, count_sql, *count_args
|
||||||
|
)
|
||||||
|
|
||||||
|
count = sum(row["count"] for row in count_results if row["room_id"] in room_ids)
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
@ -334,6 +397,7 @@ class SearchStore(BackgroundUpdateStore):
|
|||||||
if r["event_id"] in event_map
|
if r["event_id"] in event_map
|
||||||
],
|
],
|
||||||
"highlights": highlights,
|
"highlights": highlights,
|
||||||
|
"count": count,
|
||||||
})
|
})
|
||||||
|
|
||||||
def _find_highlights_in_postgres(self, search_query, events):
|
def _find_highlights_in_postgres(self, search_query, events):
|
||||||
|
3
tox.ini
3
tox.ini
@ -11,7 +11,8 @@ deps =
|
|||||||
setenv =
|
setenv =
|
||||||
PYTHONDONTWRITEBYTECODE = no_byte_code
|
PYTHONDONTWRITEBYTECODE = no_byte_code
|
||||||
commands =
|
commands =
|
||||||
/bin/bash -c "coverage run --source=synapse {envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
|
/bin/bash -c "coverage run {env:COVERAGE_OPTS:} --source={toxinidir}/synapse \
|
||||||
|
{envbindir}/trial {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}"
|
||||||
{env:DUMP_COVERAGE_COMMAND:coverage report -m}
|
{env:DUMP_COVERAGE_COMMAND:coverage report -m}
|
||||||
|
|
||||||
[testenv:packaging]
|
[testenv:packaging]
|
||||||
|
Loading…
Reference in New Issue
Block a user