Use the JSON encoder without whitespace in more places. (#8124)

This commit is contained in:
Patrick Cloke 2020-08-20 10:32:33 -04:00 committed by GitHub
parent 5eac0b7e76
commit dbc630a628
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 21 additions and 27 deletions

1
changelog.d/8124.misc Normal file
View File

@ -0,0 +1 @@
Reduce the amount of whitespace in JSON stored and sent in responses.

View File

@ -16,8 +16,6 @@
import logging import logging
from typing import Any, Dict from typing import Any, Dict
from canonicaljson import json
from synapse.api.errors import SynapseError from synapse.api.errors import SynapseError
from synapse.logging.context import run_in_background from synapse.logging.context import run_in_background
from synapse.logging.opentracing import ( from synapse.logging.opentracing import (
@ -27,6 +25,7 @@ from synapse.logging.opentracing import (
start_active_span, start_active_span,
) )
from synapse.types import UserID, get_domain_from_id from synapse.types import UserID, get_domain_from_id
from synapse.util import json_encoder
from synapse.util.stringutils import random_string from synapse.util.stringutils import random_string
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -174,7 +173,7 @@ class DeviceMessageHandler(object):
"sender": sender_user_id, "sender": sender_user_id,
"type": message_type, "type": message_type,
"message_id": message_id, "message_id": message_id,
"org.matrix.opentracing_context": json.dumps(context), "org.matrix.opentracing_context": json_encoder.encode(context),
} }
log_kv({"local_messages": local_messages}) log_kv({"local_messages": local_messages})

View File

@ -172,12 +172,11 @@ from functools import wraps
from typing import TYPE_CHECKING, Dict, Optional, Type from typing import TYPE_CHECKING, Dict, Optional, Type
import attr import attr
from canonicaljson import json
from twisted.internet import defer from twisted.internet import defer
from synapse.config import ConfigError from synapse.config import ConfigError
from synapse.util import json_decoder from synapse.util import json_decoder, json_encoder
if TYPE_CHECKING: if TYPE_CHECKING:
from synapse.http.site import SynapseRequest from synapse.http.site import SynapseRequest
@ -693,7 +692,7 @@ def active_span_context_as_string():
opentracing.tracer.inject( opentracing.tracer.inject(
opentracing.tracer.active_span, opentracing.Format.TEXT_MAP, carrier opentracing.tracer.active_span, opentracing.Format.TEXT_MAP, carrier
) )
return json.dumps(carrier) return json_encoder.encode(carrier)
@only_if_tracing @only_if_tracing

View File

@ -13,12 +13,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import json
import logging import logging
from twisted.web.resource import Resource from twisted.web.resource import Resource
from synapse.http.server import set_cors_headers from synapse.http.server import set_cors_headers
from synapse.util import json_encoder
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -67,4 +67,4 @@ class WellKnownResource(Resource):
logger.debug("returning: %s", r) logger.debug("returning: %s", r)
request.setHeader(b"Content-Type", b"application/json") request.setHeader(b"Content-Type", b"application/json")
return json.dumps(r).encode("utf-8") return json_encoder.encode(r).encode("utf-8")

View File

@ -16,9 +16,8 @@
import logging import logging
from typing import Optional from typing import Optional
from canonicaljson import json
from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.util import json_encoder
from . import engines from . import engines
@ -457,7 +456,7 @@ class BackgroundUpdater(object):
progress(dict): The progress of the update. progress(dict): The progress of the update.
""" """
progress_json = json.dumps(progress) progress_json = json_encoder.encode(progress)
self.db_pool.simple_update_one_txn( self.db_pool.simple_update_one_txn(
txn, txn,

View File

@ -16,13 +16,12 @@
import logging import logging
import re import re
from canonicaljson import json
from synapse.appservice import AppServiceTransaction from synapse.appservice import AppServiceTransaction
from synapse.config.appservice import load_appservices from synapse.config.appservice import load_appservices
from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import DatabasePool from synapse.storage.database import DatabasePool
from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.databases.main.events_worker import EventsWorkerStore
from synapse.util import json_encoder
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -204,7 +203,7 @@ class ApplicationServiceTransactionWorkerStore(
new_txn_id = max(highest_txn_id, last_txn_id) + 1 new_txn_id = max(highest_txn_id, last_txn_id) + 1
# Insert new txn into txn table # Insert new txn into txn table
event_ids = json.dumps([e.event_id for e in events]) event_ids = json_encoder.encode([e.event_id for e in events])
txn.execute( txn.execute(
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) " "INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
"VALUES(?,?,?)", "VALUES(?,?,?)",

View File

@ -21,8 +21,6 @@ from abc import abstractmethod
from enum import Enum from enum import Enum
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
from canonicaljson import json
from synapse.api.constants import EventTypes from synapse.api.constants import EventTypes
from synapse.api.errors import StoreError from synapse.api.errors import StoreError
from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.api.room_versions import RoomVersion, RoomVersions
@ -30,6 +28,7 @@ from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import DatabasePool, LoggingTransaction from synapse.storage.database import DatabasePool, LoggingTransaction
from synapse.storage.databases.main.search import SearchStore from synapse.storage.databases.main.search import SearchStore
from synapse.types import ThirdPartyInstanceID from synapse.types import ThirdPartyInstanceID
from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached from synapse.util.caches.descriptors import cached
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -1310,7 +1309,7 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore, SearchStore):
"event_id": event_id, "event_id": event_id,
"user_id": user_id, "user_id": user_id,
"reason": reason, "reason": reason,
"content": json.dumps(content), "content": json_encoder.encode(content),
}, },
desc="add_event_report", desc="add_event_report",
) )

View File

@ -17,11 +17,10 @@
import logging import logging
from typing import Dict, List, Tuple from typing import Dict, List, Tuple
from canonicaljson import json
from synapse.storage._base import db_to_json from synapse.storage._base import db_to_json
from synapse.storage.databases.main.account_data import AccountDataWorkerStore from synapse.storage.databases.main.account_data import AccountDataWorkerStore
from synapse.types import JsonDict from synapse.types import JsonDict
from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached from synapse.util.caches.descriptors import cached
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -98,7 +97,7 @@ class TagsWorkerStore(AccountDataWorkerStore):
txn.execute(sql, (user_id, room_id)) txn.execute(sql, (user_id, room_id))
tags = [] tags = []
for tag, content in txn: for tag, content in txn:
tags.append(json.dumps(tag) + ":" + content) tags.append(json_encoder.encode(tag) + ":" + content)
tag_json = "{" + ",".join(tags) + "}" tag_json = "{" + ",".join(tags) + "}"
results.append((stream_id, (user_id, room_id, tag_json))) results.append((stream_id, (user_id, room_id, tag_json)))
@ -200,7 +199,7 @@ class TagsStore(TagsWorkerStore):
Returns: Returns:
The next account data ID. The next account data ID.
""" """
content_json = json.dumps(content) content_json = json_encoder.encode(content)
def add_tag_txn(txn, next_id): def add_tag_txn(txn, next_id):
self.db_pool.simple_upsert_txn( self.db_pool.simple_upsert_txn(

View File

@ -15,13 +15,12 @@
from typing import Any, Dict, Optional, Union from typing import Any, Dict, Optional, Union
import attr import attr
from canonicaljson import json
from synapse.api.errors import StoreError from synapse.api.errors import StoreError
from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import LoggingTransaction from synapse.storage.database import LoggingTransaction
from synapse.types import JsonDict from synapse.types import JsonDict
from synapse.util import stringutils as stringutils from synapse.util import json_encoder, stringutils
@attr.s @attr.s
@ -73,7 +72,7 @@ class UIAuthWorkerStore(SQLBaseStore):
StoreError if a unique session ID cannot be generated. StoreError if a unique session ID cannot be generated.
""" """
# The clientdict gets stored as JSON. # The clientdict gets stored as JSON.
clientdict_json = json.dumps(clientdict) clientdict_json = json_encoder.encode(clientdict)
# autogen a session ID and try to create it. We may clash, so just # autogen a session ID and try to create it. We may clash, so just
# try a few times till one goes through, giving up eventually. # try a few times till one goes through, giving up eventually.
@ -144,7 +143,7 @@ class UIAuthWorkerStore(SQLBaseStore):
await self.db_pool.simple_upsert( await self.db_pool.simple_upsert(
table="ui_auth_sessions_credentials", table="ui_auth_sessions_credentials",
keyvalues={"session_id": session_id, "stage_type": stage_type}, keyvalues={"session_id": session_id, "stage_type": stage_type},
values={"result": json.dumps(result)}, values={"result": json_encoder.encode(result)},
desc="mark_ui_auth_stage_complete", desc="mark_ui_auth_stage_complete",
) )
except self.db_pool.engine.module.IntegrityError: except self.db_pool.engine.module.IntegrityError:
@ -185,7 +184,7 @@ class UIAuthWorkerStore(SQLBaseStore):
The dictionary from the client root level, not the 'auth' key. The dictionary from the client root level, not the 'auth' key.
""" """
# The clientdict gets stored as JSON. # The clientdict gets stored as JSON.
clientdict_json = json.dumps(clientdict) clientdict_json = json_encoder.encode(clientdict)
await self.db_pool.simple_update_one( await self.db_pool.simple_update_one(
table="ui_auth_sessions", table="ui_auth_sessions",
@ -234,7 +233,7 @@ class UIAuthWorkerStore(SQLBaseStore):
txn, txn,
table="ui_auth_sessions", table="ui_auth_sessions",
keyvalues={"session_id": session_id}, keyvalues={"session_id": session_id},
updatevalues={"serverdict": json.dumps(serverdict)}, updatevalues={"serverdict": json_encoder.encode(serverdict)},
) )
async def get_ui_auth_session_data( async def get_ui_auth_session_data(