Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Use the JSON encoder without whitespace in more places. #8124

Merged
merged 1 commit into from
Aug 20, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/8124.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Reduce the amount of whitespace in JSON stored and sent in responses.
5 changes: 2 additions & 3 deletions synapse/handlers/devicemessage.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@
import logging
from typing import Any, Dict

from canonicaljson import json

from synapse.api.errors import SynapseError
from synapse.logging.context import run_in_background
from synapse.logging.opentracing import (
Expand All @@ -27,6 +25,7 @@
start_active_span,
)
from synapse.types import UserID, get_domain_from_id
from synapse.util import json_encoder
from synapse.util.stringutils import random_string

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -174,7 +173,7 @@ async def send_device_message(self, sender_user_id, message_type, messages):
"sender": sender_user_id,
"type": message_type,
"message_id": message_id,
"org.matrix.opentracing_context": json.dumps(context),
"org.matrix.opentracing_context": json_encoder.encode(context),
}

log_kv({"local_messages": local_messages})
Expand Down
5 changes: 2 additions & 3 deletions synapse/logging/opentracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,12 +172,11 @@ def set_fates(clotho, lachesis, atropos, father="Zues", mother="Themis"):
from typing import TYPE_CHECKING, Dict, Optional, Type

import attr
from canonicaljson import json

from twisted.internet import defer

from synapse.config import ConfigError
from synapse.util import json_decoder
from synapse.util import json_decoder, json_encoder

if TYPE_CHECKING:
from synapse.http.site import SynapseRequest
Expand Down Expand Up @@ -693,7 +692,7 @@ def active_span_context_as_string():
opentracing.tracer.inject(
opentracing.tracer.active_span, opentracing.Format.TEXT_MAP, carrier
)
return json.dumps(carrier)
return json_encoder.encode(carrier)


@only_if_tracing
Expand Down
4 changes: 2 additions & 2 deletions synapse/rest/well_known.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import json
import logging

from twisted.web.resource import Resource

from synapse.http.server import set_cors_headers
from synapse.util import json_encoder

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -67,4 +67,4 @@ def render_GET(self, request):

logger.debug("returning: %s", r)
request.setHeader(b"Content-Type", b"application/json")
return json.dumps(r).encode("utf-8")
return json_encoder.encode(r).encode("utf-8")
5 changes: 2 additions & 3 deletions synapse/storage/background_updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,8 @@
import logging
from typing import Optional

from canonicaljson import json

from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.util import json_encoder

from . import engines

Expand Down Expand Up @@ -457,7 +456,7 @@ def _background_update_progress_txn(self, txn, update_name, progress):
progress(dict): The progress of the update.
"""

progress_json = json.dumps(progress)
progress_json = json_encoder.encode(progress)

self.db_pool.simple_update_one_txn(
txn,
Expand Down
5 changes: 2 additions & 3 deletions synapse/storage/databases/main/appservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,12 @@
import logging
import re

from canonicaljson import json

from synapse.appservice import AppServiceTransaction
from synapse.config.appservice import load_appservices
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import DatabasePool
from synapse.storage.databases.main.events_worker import EventsWorkerStore
from synapse.util import json_encoder

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -204,7 +203,7 @@ def _create_appservice_txn(txn):
new_txn_id = max(highest_txn_id, last_txn_id) + 1

# Insert new txn into txn table
event_ids = json.dumps([e.event_id for e in events])
event_ids = json_encoder.encode([e.event_id for e in events])
txn.execute(
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
"VALUES(?,?,?)",
Expand Down
5 changes: 2 additions & 3 deletions synapse/storage/databases/main/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,14 @@
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple

from canonicaljson import json

from synapse.api.constants import EventTypes
from synapse.api.errors import StoreError
from synapse.api.room_versions import RoomVersion, RoomVersions
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import DatabasePool, LoggingTransaction
from synapse.storage.databases.main.search import SearchStore
from synapse.types import ThirdPartyInstanceID
from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -1310,7 +1309,7 @@ def add_event_report(
"event_id": event_id,
"user_id": user_id,
"reason": reason,
"content": json.dumps(content),
"content": json_encoder.encode(content),
},
desc="add_event_report",
)
Expand Down
7 changes: 3 additions & 4 deletions synapse/storage/databases/main/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,10 @@
import logging
from typing import Dict, List, Tuple

from canonicaljson import json

from synapse.storage._base import db_to_json
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
from synapse.types import JsonDict
from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -98,7 +97,7 @@ def get_tag_content(txn, tag_ids):
txn.execute(sql, (user_id, room_id))
tags = []
for tag, content in txn:
tags.append(json.dumps(tag) + ":" + content)
tags.append(json_encoder.encode(tag) + ":" + content)
tag_json = "{" + ",".join(tags) + "}"
results.append((stream_id, (user_id, room_id, tag_json)))

Expand Down Expand Up @@ -200,7 +199,7 @@ async def add_tag_to_room(
Returns:
The next account data ID.
"""
content_json = json.dumps(content)
content_json = json_encoder.encode(content)

def add_tag_txn(txn, next_id):
self.db_pool.simple_upsert_txn(
Expand Down
11 changes: 5 additions & 6 deletions synapse/storage/databases/main/ui_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,11 @@
from typing import Any, Dict, Optional, Union

import attr
from canonicaljson import json

from synapse.api.errors import StoreError
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.types import JsonDict
from synapse.util import stringutils as stringutils
from synapse.util import json_encoder, stringutils


@attr.s
Expand Down Expand Up @@ -72,7 +71,7 @@ async def create_ui_auth_session(
StoreError if a unique session ID cannot be generated.
"""
# The clientdict gets stored as JSON.
clientdict_json = json.dumps(clientdict)
clientdict_json = json_encoder.encode(clientdict)

# autogen a session ID and try to create it. We may clash, so just
# try a few times till one goes through, giving up eventually.
Expand Down Expand Up @@ -143,7 +142,7 @@ async def mark_ui_auth_stage_complete(
await self.db_pool.simple_upsert(
table="ui_auth_sessions_credentials",
keyvalues={"session_id": session_id, "stage_type": stage_type},
values={"result": json.dumps(result)},
values={"result": json_encoder.encode(result)},
desc="mark_ui_auth_stage_complete",
)
except self.db_pool.engine.module.IntegrityError:
Expand Down Expand Up @@ -184,7 +183,7 @@ async def set_ui_auth_clientdict(
The dictionary from the client root level, not the 'auth' key.
"""
# The clientdict gets stored as JSON.
clientdict_json = json.dumps(clientdict)
clientdict_json = json_encoder.encode(clientdict)

await self.db_pool.simple_update_one(
table="ui_auth_sessions",
Expand Down Expand Up @@ -231,7 +230,7 @@ def _set_ui_auth_session_data_txn(self, txn, session_id: str, key: str, value: A
txn,
table="ui_auth_sessions",
keyvalues={"session_id": session_id},
updatevalues={"serverdict": json.dumps(serverdict)},
updatevalues={"serverdict": json_encoder.encode(serverdict)},
)

async def get_ui_auth_session_data(
Expand Down