mirror of
https://git.anonymousland.org/anonymousland/synapse-product.git
synced 2024-10-01 08:25:44 -04:00
fix merge conflicts
This commit is contained in:
commit
e66fbcbb02
12
CHANGES.rst
12
CHANGES.rst
@ -3,7 +3,8 @@ Changes in synapse v0.27.0-rc2 (2018-03-19)
|
||||
Bugs:
|
||||
|
||||
* Fix bug introduced in v0.27.0-rc1 that causes much increased memory usage in state cache (PR #3005)
|
||||
|
||||
* Fix bug where an invalid event caused server to stop functioning correctly,
|
||||
due to parsing and serializing bugs in ujson library. (pulled in from 0.26.1)
|
||||
|
||||
Changes in synapse v0.27.0-rc1 (2018-03-14)
|
||||
===========================================
|
||||
@ -54,6 +55,15 @@ Bug fixes:
|
||||
* Fix slow event search, switch back from GIST to GIN indexes (PR #2769, #2848)
|
||||
|
||||
|
||||
Changes in synapse v0.26.1 (2018-03-15)
|
||||
=======================================
|
||||
|
||||
Bug fixes:
|
||||
|
||||
* Fix bug where an invalid event caused server to stop functioning correctly,
|
||||
due to parsing and serializing bugs in ujson library.
|
||||
|
||||
|
||||
|
||||
Changes in synapse v0.26.0 (2018-01-05)
|
||||
=======================================
|
||||
|
@ -30,8 +30,12 @@ use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use Jenkins for continuous integration (http://matrix.org/jenkins), and
|
||||
typically all pull requests get automatically tested Jenkins: if your change breaks the build, Jenkins will yell about it in #matrix-dev:matrix.org so please lurk there and keep an eye open.
|
||||
We use `Jenkins <http://matrix.org/jenkins>`_ and
|
||||
`Travis <https://travis-ci.org/matrix-org/synapse>`_ for continuous
|
||||
integration. All pull requests to synapse get automatically tested by Travis;
|
||||
the Jenkins builds require an adminstrator to start them. If your change
|
||||
breaks the build, this will be shown in github, so please keep an eye on the
|
||||
pull request for feedback.
|
||||
|
||||
Code style
|
||||
~~~~~~~~~~
|
||||
|
15
README.rst
15
README.rst
@ -354,6 +354,10 @@ https://matrix.org/docs/projects/try-matrix-now.html (or build your own with one
|
||||
Fedora
|
||||
------
|
||||
|
||||
Synapse is in the Fedora repositories as ``matrix-synapse``::
|
||||
|
||||
sudo dnf install matrix-synapse
|
||||
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||
|
||||
@ -890,6 +894,17 @@ This should end with a 'PASSED' result::
|
||||
|
||||
PASSED (successes=143)
|
||||
|
||||
Running the Integration Tests
|
||||
=============================
|
||||
|
||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||
access the API as a Matrix client would. It is able to run Synapse directly from
|
||||
the source tree, so installation of the server is not required.
|
||||
|
||||
Testing with SyTest is recommended for verifying that changes related to the
|
||||
Client-Server API are functioning correctly. See the `installation instructions
|
||||
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||
|
||||
Building Internal API Documentation
|
||||
===================================
|
||||
|
@ -17,7 +17,7 @@ from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import UserID, RoomID
|
||||
from twisted.internet import defer
|
||||
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import jsonschema
|
||||
from jsonschema import FormatChecker
|
||||
|
||||
|
@ -77,7 +77,9 @@ class RegistrationConfig(Config):
|
||||
|
||||
# Set the number of bcrypt rounds used to generate password hash.
|
||||
# Larger numbers increase the work factor needed to generate the hash.
|
||||
# The default number of rounds is 12.
|
||||
# The default number is 12 (which equates to 2^12 rounds).
|
||||
# N.B. that increasing this will exponentially increase the time required
|
||||
# to register or login - e.g. 24 => 2^24 rounds which will take >20 mins.
|
||||
bcrypt_rounds: 12
|
||||
|
||||
# Allows users to register as guests without a password/email/etc, and
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import logging
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
@ -38,7 +38,7 @@ from canonicaljson import encode_canonical_json
|
||||
|
||||
import logging
|
||||
import random
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -678,8 +678,8 @@ class EventCreationHandler(object):
|
||||
|
||||
# Ensure that we can round trip before trying to persist in db
|
||||
try:
|
||||
dump = ujson.dumps(unfreeze(event.content))
|
||||
ujson.loads(dump)
|
||||
dump = simplejson.dumps(unfreeze(event.content))
|
||||
simplejson.loads(dump)
|
||||
except Exception:
|
||||
logger.exception("Failed to encode content: %r", event.content)
|
||||
raise
|
||||
|
@ -37,7 +37,7 @@ from twisted.web.util import redirectTo
|
||||
import collections
|
||||
import logging
|
||||
import urllib
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -461,8 +461,7 @@ def respond_with_json(request, code, json_object, send_cors=False,
|
||||
if canonical_json or synapse.events.USE_FROZEN_DICTS:
|
||||
json_bytes = encode_canonical_json(json_object)
|
||||
else:
|
||||
# ujson doesn't like frozen_dicts.
|
||||
json_bytes = ujson.dumps(json_object, ensure_ascii=False)
|
||||
json_bytes = simplejson.dumps(json_object)
|
||||
|
||||
return respond_with_json_bytes(
|
||||
request, code, json_bytes,
|
||||
|
@ -19,7 +19,7 @@ allowed to be sent by which side.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -30,7 +30,7 @@ from synapse.http.servlet import (
|
||||
|
||||
import logging
|
||||
import urllib
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -33,7 +33,7 @@ from ._base import set_timeline_upper_limit
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -23,7 +23,7 @@ import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import urlparse
|
||||
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
|
@ -23,7 +23,7 @@ from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||
from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks
|
||||
|
||||
import abc
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -19,7 +19,7 @@ from . import engines
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
@ -85,7 +85,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||
)
|
||||
rows = []
|
||||
for destination, edu in remote_messages_by_destination.items():
|
||||
edu_json = ujson.dumps(edu)
|
||||
edu_json = simplejson.dumps(edu)
|
||||
rows.append((destination, stream_id, now_ms, edu_json))
|
||||
txn.executemany(sql, rows)
|
||||
|
||||
@ -177,7 +177,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||
" WHERE user_id = ?"
|
||||
)
|
||||
txn.execute(sql, (user_id,))
|
||||
message_json = ujson.dumps(messages_by_device["*"])
|
||||
message_json = simplejson.dumps(messages_by_device["*"])
|
||||
for row in txn:
|
||||
# Add the message for all devices for this user on this
|
||||
# server.
|
||||
@ -199,7 +199,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||
# Only insert into the local inbox if the device exists on
|
||||
# this server
|
||||
device = row[0]
|
||||
message_json = ujson.dumps(messages_by_device[device])
|
||||
message_json = simplejson.dumps(messages_by_device[device])
|
||||
messages_json_for_user[device] = message_json
|
||||
|
||||
if messages_json_for_user:
|
||||
@ -253,7 +253,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||
messages = []
|
||||
for row in txn:
|
||||
stream_pos = row[0]
|
||||
messages.append(ujson.loads(row[1]))
|
||||
messages.append(simplejson.loads(row[1]))
|
||||
if len(messages) < limit:
|
||||
stream_pos = current_stream_id
|
||||
return (messages, stream_pos)
|
||||
@ -389,7 +389,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||
messages = []
|
||||
for row in txn:
|
||||
stream_pos = row[0]
|
||||
messages.append(ujson.loads(row[1]))
|
||||
messages.append(simplejson.loads(row[1]))
|
||||
if len(messages) < limit:
|
||||
stream_pos = current_stream_id
|
||||
return (messages, stream_pos)
|
||||
|
@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
@ -17,7 +17,7 @@ from twisted.internet import defer
|
||||
from synapse.util.caches.descriptors import cached
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
from ._base import SQLBaseStore
|
||||
|
||||
|
@ -22,7 +22,7 @@ from synapse.types import RoomStreamToken
|
||||
from .stream import lower_bound
|
||||
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -38,7 +38,7 @@ from functools import wraps
|
||||
import synapse.metrics
|
||||
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
# these are only included to make the type annotations work
|
||||
from synapse.events import EventBase # noqa: F401
|
||||
@ -56,7 +56,6 @@ event_counter = metrics.register_counter(
|
||||
|
||||
def encode_json(json_object):
|
||||
if USE_FROZEN_DICTS:
|
||||
# ujson doesn't like frozen_dicts
|
||||
return encode_canonical_json(json_object)
|
||||
else:
|
||||
return json.dumps(json_object, ensure_ascii=False)
|
||||
|
@ -28,7 +28,7 @@ from synapse.api.errors import SynapseError
|
||||
from collections import namedtuple
|
||||
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
# these are only included to make the type annotations work
|
||||
from synapse.events import EventBase # noqa: F401
|
||||
|
@ -23,7 +23,7 @@ from twisted.internet import defer
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -22,7 +22,7 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import re
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -28,7 +28,7 @@ from synapse.api.constants import Membership, EventTypes
|
||||
from synapse.types import get_domain_from_id
|
||||
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -17,7 +17,7 @@ import logging
|
||||
from synapse.storage.prepare_database import get_statements
|
||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -66,7 +66,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||
"max_stream_id_exclusive": max_stream_id + 1,
|
||||
"rows_inserted": 0,
|
||||
}
|
||||
progress_json = ujson.dumps(progress)
|
||||
progress_json = simplejson.dumps(progress)
|
||||
|
||||
sql = (
|
||||
"INSERT into background_updates (update_name, progress_json)"
|
||||
|
@ -16,7 +16,7 @@ import logging
|
||||
|
||||
from synapse.storage.prepare_database import get_statements
|
||||
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -45,7 +45,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||
"max_stream_id_exclusive": max_stream_id + 1,
|
||||
"rows_inserted": 0,
|
||||
}
|
||||
progress_json = ujson.dumps(progress)
|
||||
progress_json = simplejson.dumps(progress)
|
||||
|
||||
sql = (
|
||||
"INSERT into background_updates (update_name, progress_json)"
|
||||
|
@ -16,7 +16,7 @@ from synapse.storage.engines import PostgresEngine
|
||||
from synapse.storage.prepare_database import get_statements
|
||||
|
||||
import logging
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -49,7 +49,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||
"rows_inserted": 0,
|
||||
"have_added_indexes": False,
|
||||
}
|
||||
progress_json = ujson.dumps(progress)
|
||||
progress_json = simplejson.dumps(progress)
|
||||
|
||||
sql = (
|
||||
"INSERT into background_updates (update_name, progress_json)"
|
||||
|
@ -15,7 +15,7 @@
|
||||
from synapse.storage.prepare_database import get_statements
|
||||
|
||||
import logging
|
||||
import ujson
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -44,7 +44,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||
"max_stream_id_exclusive": max_stream_id + 1,
|
||||
"rows_inserted": 0,
|
||||
}
|
||||
progress_json = ujson.dumps(progress)
|
||||
progress_json = simplejson.dumps(progress)
|
||||
|
||||
sql = (
|
||||
"INSERT into background_updates (update_name, progress_json)"
|
||||
|
@ -16,7 +16,7 @@
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
import re
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
@ -19,7 +19,7 @@ from synapse.storage.account_data import AccountDataWorkerStore
|
||||
from synapse.util.caches.descriptors import cached
|
||||
from twisted.internet import defer
|
||||
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -23,7 +23,7 @@ from canonicaljson import encode_canonical_json
|
||||
from collections import namedtuple
|
||||
|
||||
import logging
|
||||
import ujson as json
|
||||
import simplejson as json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -132,9 +132,13 @@ class DictionaryCache(object):
|
||||
self._update_or_insert(key, value, known_absent)
|
||||
|
||||
def _update_or_insert(self, key, value, known_absent):
|
||||
entry = self.cache.setdefault(key, DictionaryEntry(False, set(), {}))
|
||||
# We pop and reinsert as we need to tell the cache the size may have
|
||||
# changed
|
||||
|
||||
entry = self.cache.pop(key, DictionaryEntry(False, set(), {}))
|
||||
entry.value.update(value)
|
||||
entry.known_absent.update(known_absent)
|
||||
self.cache[key] = entry
|
||||
|
||||
def _insert(self, key, value, known_absent):
|
||||
self.cache[key] = DictionaryEntry(True, known_absent, value)
|
||||
|
@ -154,11 +154,18 @@ class LruCache(object):
|
||||
def cache_set(key, value, callbacks=[]):
|
||||
node = cache.get(key, None)
|
||||
if node is not None:
|
||||
if value != node.value:
|
||||
# We sometimes store large objects, e.g. dicts, which cause
|
||||
# the inequality check to take a long time. So let's only do
|
||||
# the check if we have some callbacks to call.
|
||||
if node.callbacks and value != node.value:
|
||||
for cb in node.callbacks:
|
||||
cb()
|
||||
node.callbacks.clear()
|
||||
|
||||
# We don't bother to protect this by value != node.value as
|
||||
# generally size_callback will be cheap compared with equality
|
||||
# checks. (For example, taking the size of two dicts is quicker
|
||||
# than comparing them for equality.)
|
||||
if size_callback:
|
||||
cached_cache_len[0] -= size_callback(node.value)
|
||||
cached_cache_len[0] += size_callback(value)
|
||||
|
Loading…
Reference in New Issue
Block a user