Use static JSONEncoders

using json.dumps with custom options requires us to create a new JSONEncoder on
each call. It's more efficient to create one upfront and reuse it.
This commit is contained in:
Richard van der Hoff 2018-03-29 22:57:28 +01:00
parent 88cc9cc69e
commit 05630758f2
4 changed files with 34 additions and 20 deletions

View File

@ -27,7 +27,7 @@ from synapse.types import (
from synapse.util.async import run_on_reactor, ReadWriteLock, Limiter from synapse.util.async import run_on_reactor, ReadWriteLock, Limiter
from synapse.util.logcontext import preserve_fn, run_in_background from synapse.util.logcontext import preserve_fn, run_in_background
from synapse.util.metrics import measure_func from synapse.util.metrics import measure_func
from synapse.util.frozenutils import unfreeze from synapse.util.frozenutils import frozendict_json_encoder
from synapse.util.stringutils import random_string from synapse.util.stringutils import random_string
from synapse.visibility import filter_events_for_client from synapse.visibility import filter_events_for_client
from synapse.replication.http.send_event import send_event_to_master from synapse.replication.http.send_event import send_event_to_master
@ -678,7 +678,7 @@ class EventCreationHandler(object):
# Ensure that we can round trip before trying to persist in db # Ensure that we can round trip before trying to persist in db
try: try:
dump = simplejson.dumps(unfreeze(event.content)) dump = frozendict_json_encoder.encode(event.content)
simplejson.loads(dump) simplejson.loads(dump)
except Exception: except Exception:
logger.exception("Failed to encode content: %r", event.content) logger.exception("Failed to encode content: %r", event.content)

View File

@ -24,6 +24,8 @@ import simplejson
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_json_encoder = simplejson.JSONEncoder(namedtuple_as_object=False)
class Command(object): class Command(object):
"""The base command class. """The base command class.
@ -107,7 +109,7 @@ class RdataCommand(Command):
return " ".join(( return " ".join((
self.stream_name, self.stream_name,
str(self.token) if self.token is not None else "batch", str(self.token) if self.token is not None else "batch",
simplejson.dumps(self.row, namedtuple_as_object=False), _json_encoder.dumps(self.row),
)) ))
@ -302,7 +304,7 @@ class InvalidateCacheCommand(Command):
def to_line(self): def to_line(self):
return " ".join(( return " ".join((
self.cache_func, simplejson.dumps(self.keys, namedtuple_as_object=False) self.cache_func, _json_encoder.encode(self.keys),
)) ))
@ -334,7 +336,7 @@ class UserIpCommand(Command):
) )
def to_line(self): def to_line(self):
return self.user_id + " " + simplejson.dumps(( return self.user_id + " " + _json_encoder.encode((
self.access_token, self.ip, self.user_agent, self.device_id, self.access_token, self.ip, self.user_agent, self.device_id,
self.last_seen, self.last_seen,
)) ))

View File

@ -14,15 +14,19 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from synapse.storage.events_worker import EventsWorkerStore from collections import OrderedDict, deque, namedtuple
from functools import wraps
import logging
import simplejson as json
from twisted.internet import defer from twisted.internet import defer
from synapse.events import USE_FROZEN_DICTS
from synapse.storage.events_worker import EventsWorkerStore
from synapse.util.async import ObservableDeferred from synapse.util.async import ObservableDeferred
from synapse.util.frozenutils import frozendict_json_encoder
from synapse.util.logcontext import ( from synapse.util.logcontext import (
PreserveLoggingContext, make_deferred_yieldable PreserveLoggingContext, make_deferred_yieldable,
) )
from synapse.util.logutils import log_function from synapse.util.logutils import log_function
from synapse.util.metrics import Measure from synapse.util.metrics import Measure
@ -30,16 +34,8 @@ from synapse.api.constants import EventTypes
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
from synapse.types import get_domain_from_id from synapse.types import get_domain_from_id
from canonicaljson import encode_canonical_json
from collections import deque, namedtuple, OrderedDict
from functools import wraps
import synapse.metrics import synapse.metrics
import logging
import simplejson as json
# these are only included to make the type annotations work # these are only included to make the type annotations work
from synapse.events import EventBase # noqa: F401 from synapse.events import EventBase # noqa: F401
from synapse.events.snapshot import EventContext # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401
@ -71,10 +67,7 @@ state_delta_reuse_delta_counter = metrics.register_counter(
def encode_json(json_object): def encode_json(json_object):
if USE_FROZEN_DICTS: return frozendict_json_encoder.encode(json_object)
return encode_canonical_json(json_object)
else:
return json.dumps(json_object, ensure_ascii=False)
class _EventPeristenceQueue(object): class _EventPeristenceQueue(object):

View File

@ -14,6 +14,7 @@
# limitations under the License. # limitations under the License.
from frozendict import frozendict from frozendict import frozendict
import simplejson as json
def freeze(o): def freeze(o):
@ -49,3 +50,21 @@ def unfreeze(o):
pass pass
return o return o
def _handle_frozendict(obj):
"""Helper for EventEncoder. Makes frozendicts serializable by returning
the underlying dict
"""
if type(obj) is frozendict:
# fishing the protected dict out of the object is a bit nasty,
# but we don't really want the overhead of copying the dict.
return obj._dict
raise TypeError('Object of type %s is not JSON serializable' %
obj.__class__.__name__)
# A JSONEncoder which is capable of encoding frozendics without barfing
frozendict_json_encoder = json.JSONEncoder(
default=_handle_frozendict,
)