Merge branch 'hotfixes-v0.26.1' of github.com:matrix-org/synapse

This commit is contained in:
Erik Johnston 2018-03-16 00:13:51 +00:00
commit f609acc109
27 changed files with 46 additions and 39 deletions

View File

@ -1,3 +1,12 @@
Changes in synapse v0.26.1 (2018-03-15)
=======================================
Bug fixes:
* Fix bug where an invalid event caused server to stop functioning correctly,
due to parsing and serializing bugs in ujson library.
Changes in synapse v0.26.0 (2018-01-05) Changes in synapse v0.26.0 (2018-01-05)
======================================= =======================================

View File

@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server. """ This is a reference implementation of a Matrix home server.
""" """
__version__ = "0.26.0" __version__ = "0.26.1"

View File

@ -17,7 +17,7 @@ from synapse.storage.presence import UserPresenceState
from synapse.types import UserID, RoomID from synapse.types import UserID, RoomID
from twisted.internet import defer from twisted.internet import defer
import ujson as json import simplejson as json
import jsonschema import jsonschema
from jsonschema import FormatChecker from jsonschema import FormatChecker

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import ujson as json import simplejson as json
import logging import logging
from canonicaljson import encode_canonical_json from canonicaljson import encode_canonical_json

View File

@ -35,7 +35,7 @@ from canonicaljson import encode_canonical_json
import logging import logging
import random import random
import ujson import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -561,8 +561,8 @@ class MessageHandler(BaseHandler):
# Ensure that we can round trip before trying to persist in db # Ensure that we can round trip before trying to persist in db
try: try:
dump = ujson.dumps(unfreeze(event.content)) dump = simplejson.dumps(unfreeze(event.content))
ujson.loads(dump) simplejson.loads(dump)
except Exception: except Exception:
logger.exception("Failed to encode content: %r", event.content) logger.exception("Failed to encode content: %r", event.content)
raise raise

View File

@ -36,7 +36,7 @@ from twisted.web.util import redirectTo
import collections import collections
import logging import logging
import urllib import urllib
import ujson import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -370,8 +370,7 @@ def respond_with_json(request, code, json_object, send_cors=False,
if canonical_json or synapse.events.USE_FROZEN_DICTS: if canonical_json or synapse.events.USE_FROZEN_DICTS:
json_bytes = encode_canonical_json(json_object) json_bytes = encode_canonical_json(json_object)
else: else:
# ujson doesn't like frozen_dicts. json_bytes = simplejson.dumps(json_object)
json_bytes = ujson.dumps(json_object, ensure_ascii=False)
return respond_with_json_bytes( return respond_with_json_bytes(
request, code, json_bytes, request, code, json_bytes,

View File

@ -19,7 +19,7 @@ allowed to be sent by which side.
""" """
import logging import logging
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -29,7 +29,7 @@ from synapse.http.servlet import (
import logging import logging
import urllib import urllib
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -33,7 +33,7 @@ from ._base import set_timeline_upper_limit
import itertools import itertools
import logging import logging
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -35,7 +35,7 @@ import os
import re import re
import fnmatch import fnmatch
import cgi import cgi
import ujson as json import simplejson as json
import urlparse import urlparse
import itertools import itertools
import datetime import datetime

View File

@ -18,7 +18,7 @@ from twisted.internet import defer
from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks
import ujson as json import simplejson as json
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -19,7 +19,7 @@ from . import engines
from twisted.internet import defer from twisted.internet import defer
import ujson as json import simplejson as json
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -14,7 +14,7 @@
# limitations under the License. # limitations under the License.
import logging import logging
import ujson import simplejson
from twisted.internet import defer from twisted.internet import defer
@ -85,7 +85,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
) )
rows = [] rows = []
for destination, edu in remote_messages_by_destination.items(): for destination, edu in remote_messages_by_destination.items():
edu_json = ujson.dumps(edu) edu_json = simplejson.dumps(edu)
rows.append((destination, stream_id, now_ms, edu_json)) rows.append((destination, stream_id, now_ms, edu_json))
txn.executemany(sql, rows) txn.executemany(sql, rows)
@ -177,7 +177,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
" WHERE user_id = ?" " WHERE user_id = ?"
) )
txn.execute(sql, (user_id,)) txn.execute(sql, (user_id,))
message_json = ujson.dumps(messages_by_device["*"]) message_json = simplejson.dumps(messages_by_device["*"])
for row in txn: for row in txn:
# Add the message for all devices for this user on this # Add the message for all devices for this user on this
# server. # server.
@ -199,7 +199,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
# Only insert into the local inbox if the device exists on # Only insert into the local inbox if the device exists on
# this server # this server
device = row[0] device = row[0]
message_json = ujson.dumps(messages_by_device[device]) message_json = simplejson.dumps(messages_by_device[device])
messages_json_for_user[device] = message_json messages_json_for_user[device] = message_json
if messages_json_for_user: if messages_json_for_user:
@ -253,7 +253,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
messages = [] messages = []
for row in txn: for row in txn:
stream_pos = row[0] stream_pos = row[0]
messages.append(ujson.loads(row[1])) messages.append(simplejson.loads(row[1]))
if len(messages) < limit: if len(messages) < limit:
stream_pos = current_stream_id stream_pos = current_stream_id
return (messages, stream_pos) return (messages, stream_pos)
@ -389,7 +389,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
messages = [] messages = []
for row in txn: for row in txn:
stream_pos = row[0] stream_pos = row[0]
messages.append(ujson.loads(row[1])) messages.append(simplejson.loads(row[1]))
if len(messages) < limit: if len(messages) < limit:
stream_pos = current_stream_id stream_pos = current_stream_id
return (messages, stream_pos) return (messages, stream_pos)

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import logging import logging
import ujson as json import simplejson as json
from twisted.internet import defer from twisted.internet import defer

View File

@ -17,7 +17,7 @@ from twisted.internet import defer
from synapse.util.caches.descriptors import cached from synapse.util.caches.descriptors import cached
from canonicaljson import encode_canonical_json from canonicaljson import encode_canonical_json
import ujson as json import simplejson as json
from ._base import SQLBaseStore from ._base import SQLBaseStore

View File

@ -21,7 +21,7 @@ from synapse.types import RoomStreamToken
from .stream import lower_bound from .stream import lower_bound
import logging import logging
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -38,7 +38,7 @@ from functools import wraps
import synapse.metrics import synapse.metrics
import logging import logging
import ujson as json import simplejson as json
# these are only included to make the type annotations work # these are only included to make the type annotations work
from synapse.events import EventBase # noqa: F401 from synapse.events import EventBase # noqa: F401
@ -56,7 +56,6 @@ event_counter = metrics.register_counter(
def encode_json(json_object): def encode_json(json_object):
if USE_FROZEN_DICTS: if USE_FROZEN_DICTS:
# ujson doesn't like frozen_dicts
return encode_canonical_json(json_object) return encode_canonical_json(json_object)
else: else:
return json.dumps(json_object, ensure_ascii=False) return json.dumps(json_object, ensure_ascii=False)

View File

@ -20,7 +20,7 @@ from synapse.util.caches.stream_change_cache import StreamChangeCache
from twisted.internet import defer from twisted.internet import defer
import logging import logging
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -23,7 +23,7 @@ from .engines import PostgresEngine, Sqlite3Engine
import collections import collections
import logging import logging
import ujson as json import simplejson as json
import re import re
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -27,7 +27,7 @@ from synapse.api.constants import Membership, EventTypes
from synapse.types import get_domain_from_id from synapse.types import get_domain_from_id
import logging import logging
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -17,7 +17,7 @@ import logging
from synapse.storage.prepare_database import get_statements from synapse.storage.prepare_database import get_statements
from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.storage.engines import PostgresEngine, Sqlite3Engine
import ujson import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -66,7 +66,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"max_stream_id_exclusive": max_stream_id + 1, "max_stream_id_exclusive": max_stream_id + 1,
"rows_inserted": 0, "rows_inserted": 0,
} }
progress_json = ujson.dumps(progress) progress_json = simplejson.dumps(progress)
sql = ( sql = (
"INSERT into background_updates (update_name, progress_json)" "INSERT into background_updates (update_name, progress_json)"

View File

@ -16,7 +16,7 @@ import logging
from synapse.storage.prepare_database import get_statements from synapse.storage.prepare_database import get_statements
import ujson import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -16,7 +16,7 @@ from synapse.storage.engines import PostgresEngine
from synapse.storage.prepare_database import get_statements from synapse.storage.prepare_database import get_statements
import logging import logging
import ujson import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -49,7 +49,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"rows_inserted": 0, "rows_inserted": 0,
"have_added_indexes": False, "have_added_indexes": False,
} }
progress_json = ujson.dumps(progress) progress_json = simplejson.dumps(progress)
sql = ( sql = (
"INSERT into background_updates (update_name, progress_json)" "INSERT into background_updates (update_name, progress_json)"

View File

@ -15,7 +15,7 @@
from synapse.storage.prepare_database import get_statements from synapse.storage.prepare_database import get_statements
import logging import logging
import ujson import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -44,7 +44,7 @@ def run_create(cur, database_engine, *args, **kwargs):
"max_stream_id_exclusive": max_stream_id + 1, "max_stream_id_exclusive": max_stream_id + 1,
"rows_inserted": 0, "rows_inserted": 0,
} }
progress_json = ujson.dumps(progress) progress_json = simplejson.dumps(progress)
sql = ( sql = (
"INSERT into background_updates (update_name, progress_json)" "INSERT into background_updates (update_name, progress_json)"

View File

@ -21,7 +21,7 @@ from synapse.storage.engines import PostgresEngine, Sqlite3Engine
import logging import logging
import re import re
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -17,7 +17,7 @@ from ._base import SQLBaseStore
from synapse.util.caches.descriptors import cached from synapse.util.caches.descriptors import cached
from twisted.internet import defer from twisted.internet import defer
import ujson as json import simplejson as json
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -23,7 +23,7 @@ from canonicaljson import encode_canonical_json
from collections import namedtuple from collections import namedtuple
import logging import logging
import ujson as json import simplejson as json
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)