mirror of
https://mau.dev/maunium/synapse.git
synced 2024-12-14 19:53:51 +01:00
Reduce unnecessary whitespace in JSON. (#7372)
This commit is contained in:
parent
2ffd6783c7
commit
4dd27e6d11
15 changed files with 56 additions and 53 deletions
1
changelog.d/7372.misc
Normal file
1
changelog.d/7372.misc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Reduce the amount of whitespace in JSON stored and sent in responses. Contributed by David Vo.
|
|
@ -25,7 +25,7 @@ from io import BytesIO
|
||||||
from typing import Any, Callable, Dict, Tuple, Union
|
from typing import Any, Callable, Dict, Tuple, Union
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
from canonicaljson import encode_canonical_json, encode_pretty_printed_json, json
|
from canonicaljson import encode_canonical_json, encode_pretty_printed_json
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.python import failure
|
from twisted.python import failure
|
||||||
|
@ -46,6 +46,7 @@ from synapse.api.errors import (
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
from synapse.logging.context import preserve_fn
|
from synapse.logging.context import preserve_fn
|
||||||
from synapse.logging.opentracing import trace_servlet
|
from synapse.logging.opentracing import trace_servlet
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches import intern_dict
|
from synapse.util.caches import intern_dict
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -538,7 +539,7 @@ def respond_with_json(
|
||||||
# canonicaljson already encodes to bytes
|
# canonicaljson already encodes to bytes
|
||||||
json_bytes = encode_canonical_json(json_object)
|
json_bytes = encode_canonical_json(json_object)
|
||||||
else:
|
else:
|
||||||
json_bytes = json.dumps(json_object).encode("utf-8")
|
json_bytes = json_encoder.encode(json_object).encode("utf-8")
|
||||||
|
|
||||||
return respond_with_json_bytes(request, code, json_bytes, send_cors=send_cors)
|
return respond_with_json_bytes(request, code, json_bytes, send_cors=send_cors)
|
||||||
|
|
||||||
|
|
|
@ -18,11 +18,12 @@ The VALID_SERVER_COMMANDS and VALID_CLIENT_COMMANDS define which commands are
|
||||||
allowed to be sent by which side.
|
allowed to be sent by which side.
|
||||||
"""
|
"""
|
||||||
import abc
|
import abc
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Tuple, Type
|
from typing import Tuple, Type
|
||||||
|
|
||||||
_json_encoder = json.JSONEncoder()
|
from canonicaljson import json
|
||||||
|
|
||||||
|
from synapse.util import json_encoder as _json_encoder
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@ from typing import Dict, Optional
|
||||||
from urllib import parse as urlparse
|
from urllib import parse as urlparse
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.internet.error import DNSLookupError
|
from twisted.internet.error import DNSLookupError
|
||||||
|
@ -43,6 +42,7 @@ from synapse.http.servlet import parse_integer, parse_string
|
||||||
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.rest.media.v1._base import get_filename_from_headers
|
from synapse.rest.media.v1._base import get_filename_from_headers
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
|
@ -355,7 +355,7 @@ class PreviewUrlResource(DirectServeJsonResource):
|
||||||
|
|
||||||
logger.debug("Calculated OG for %s as %s", url, og)
|
logger.debug("Calculated OG for %s as %s", url, og)
|
||||||
|
|
||||||
jsonog = json.dumps(og)
|
jsonog = json_encoder.encode(og)
|
||||||
|
|
||||||
# store OG in history-aware DB cache
|
# store OG in history-aware DB cache
|
||||||
await self.store.store_url_cache(
|
await self.store.store_url_cache(
|
||||||
|
|
|
@ -18,13 +18,12 @@ import abc
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||||
from synapse.storage.database import DatabasePool
|
from synapse.storage.database import DatabasePool
|
||||||
from synapse.storage.util.id_generators import StreamIdGenerator
|
from synapse.storage.util.id_generators import StreamIdGenerator
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
|
||||||
|
@ -327,7 +326,7 @@ class AccountDataStore(AccountDataWorkerStore):
|
||||||
Returns:
|
Returns:
|
||||||
A deferred that completes once the account_data has been added.
|
A deferred that completes once the account_data has been added.
|
||||||
"""
|
"""
|
||||||
content_json = json.dumps(content)
|
content_json = json_encoder.encode(content)
|
||||||
|
|
||||||
with self._account_data_id_gen.get_next() as next_id:
|
with self._account_data_id_gen.get_next() as next_id:
|
||||||
# no need to lock here as room_account_data has a unique constraint
|
# no need to lock here as room_account_data has a unique constraint
|
||||||
|
@ -373,7 +372,7 @@ class AccountDataStore(AccountDataWorkerStore):
|
||||||
Returns:
|
Returns:
|
||||||
A deferred that completes once the account_data has been added.
|
A deferred that completes once the account_data has been added.
|
||||||
"""
|
"""
|
||||||
content_json = json.dumps(content)
|
content_json = json_encoder.encode(content)
|
||||||
|
|
||||||
with self._account_data_id_gen.get_next() as next_id:
|
with self._account_data_id_gen.get_next() as next_id:
|
||||||
# no need to lock here as account_data has a unique constraint on
|
# no need to lock here as account_data has a unique constraint on
|
||||||
|
|
|
@ -16,13 +16,12 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.logging.opentracing import log_kv, set_tag, trace
|
from synapse.logging.opentracing import log_kv, set_tag, trace
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
||||||
from synapse.storage.database import DatabasePool
|
from synapse.storage.database import DatabasePool
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -354,7 +353,7 @@ class DeviceInboxStore(DeviceInboxWorkerStore, DeviceInboxBackgroundUpdateStore)
|
||||||
)
|
)
|
||||||
rows = []
|
rows = []
|
||||||
for destination, edu in remote_messages_by_destination.items():
|
for destination, edu in remote_messages_by_destination.items():
|
||||||
edu_json = json.dumps(edu)
|
edu_json = json_encoder.encode(edu)
|
||||||
rows.append((destination, stream_id, now_ms, edu_json))
|
rows.append((destination, stream_id, now_ms, edu_json))
|
||||||
txn.executemany(sql, rows)
|
txn.executemany(sql, rows)
|
||||||
|
|
||||||
|
@ -432,7 +431,7 @@ class DeviceInboxStore(DeviceInboxWorkerStore, DeviceInboxBackgroundUpdateStore)
|
||||||
# Handle wildcard device_ids.
|
# Handle wildcard device_ids.
|
||||||
sql = "SELECT device_id FROM devices WHERE user_id = ?"
|
sql = "SELECT device_id FROM devices WHERE user_id = ?"
|
||||||
txn.execute(sql, (user_id,))
|
txn.execute(sql, (user_id,))
|
||||||
message_json = json.dumps(messages_by_device["*"])
|
message_json = json_encoder.encode(messages_by_device["*"])
|
||||||
for row in txn:
|
for row in txn:
|
||||||
# Add the message for all devices for this user on this
|
# Add the message for all devices for this user on this
|
||||||
# server.
|
# server.
|
||||||
|
@ -454,7 +453,7 @@ class DeviceInboxStore(DeviceInboxWorkerStore, DeviceInboxBackgroundUpdateStore)
|
||||||
# Only insert into the local inbox if the device exists on
|
# Only insert into the local inbox if the device exists on
|
||||||
# this server
|
# this server
|
||||||
device = row[0]
|
device = row[0]
|
||||||
message_json = json.dumps(messages_by_device[device])
|
message_json = json_encoder.encode(messages_by_device[device])
|
||||||
messages_json_for_user[device] = message_json
|
messages_json_for_user[device] = message_json
|
||||||
|
|
||||||
if messages_json_for_user:
|
if messages_json_for_user:
|
||||||
|
|
|
@ -17,8 +17,6 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Optional, Set, Tuple
|
from typing import List, Optional, Set, Tuple
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import Codes, StoreError
|
from synapse.api.errors import Codes, StoreError
|
||||||
|
@ -36,6 +34,7 @@ from synapse.storage.database import (
|
||||||
make_tuple_comparison_clause,
|
make_tuple_comparison_clause,
|
||||||
)
|
)
|
||||||
from synapse.types import Collection, get_verify_key_from_cross_signing_key
|
from synapse.types import Collection, get_verify_key_from_cross_signing_key
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches.descriptors import (
|
from synapse.util.caches.descriptors import (
|
||||||
Cache,
|
Cache,
|
||||||
cached,
|
cached,
|
||||||
|
@ -397,7 +396,7 @@ class DeviceWorkerStore(SQLBaseStore):
|
||||||
values={
|
values={
|
||||||
"stream_id": stream_id,
|
"stream_id": stream_id,
|
||||||
"from_user_id": from_user_id,
|
"from_user_id": from_user_id,
|
||||||
"user_ids": json.dumps(user_ids),
|
"user_ids": json_encoder.encode(user_ids),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1032,7 +1031,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
||||||
txn,
|
txn,
|
||||||
table="device_lists_remote_cache",
|
table="device_lists_remote_cache",
|
||||||
keyvalues={"user_id": user_id, "device_id": device_id},
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
||||||
values={"content": json.dumps(content)},
|
values={"content": json_encoder.encode(content)},
|
||||||
# we don't need to lock, because we assume we are the only thread
|
# we don't need to lock, because we assume we are the only thread
|
||||||
# updating this user's devices.
|
# updating this user's devices.
|
||||||
lock=False,
|
lock=False,
|
||||||
|
@ -1088,7 +1087,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
||||||
{
|
{
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"device_id": content["device_id"],
|
"device_id": content["device_id"],
|
||||||
"content": json.dumps(content),
|
"content": json_encoder.encode(content),
|
||||||
}
|
}
|
||||||
for content in devices
|
for content in devices
|
||||||
],
|
],
|
||||||
|
@ -1209,7 +1208,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore):
|
||||||
"device_id": device_id,
|
"device_id": device_id,
|
||||||
"sent": False,
|
"sent": False,
|
||||||
"ts": now,
|
"ts": now,
|
||||||
"opentracing_context": json.dumps(context)
|
"opentracing_context": json_encoder.encode(context)
|
||||||
if whitelisted_homeserver(destination)
|
if whitelisted_homeserver(destination)
|
||||||
else "{}",
|
else "{}",
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,13 +14,12 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import StoreError
|
from synapse.api.errors import StoreError
|
||||||
from synapse.logging.opentracing import log_kv, trace
|
from synapse.logging.opentracing import log_kv, trace
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||||
|
from synapse.util import json_encoder
|
||||||
|
|
||||||
|
|
||||||
class EndToEndRoomKeyStore(SQLBaseStore):
|
class EndToEndRoomKeyStore(SQLBaseStore):
|
||||||
|
@ -50,7 +49,7 @@ class EndToEndRoomKeyStore(SQLBaseStore):
|
||||||
"first_message_index": room_key["first_message_index"],
|
"first_message_index": room_key["first_message_index"],
|
||||||
"forwarded_count": room_key["forwarded_count"],
|
"forwarded_count": room_key["forwarded_count"],
|
||||||
"is_verified": room_key["is_verified"],
|
"is_verified": room_key["is_verified"],
|
||||||
"session_data": json.dumps(room_key["session_data"]),
|
"session_data": json_encoder.encode(room_key["session_data"]),
|
||||||
},
|
},
|
||||||
desc="update_e2e_room_key",
|
desc="update_e2e_room_key",
|
||||||
)
|
)
|
||||||
|
@ -77,7 +76,7 @@ class EndToEndRoomKeyStore(SQLBaseStore):
|
||||||
"first_message_index": room_key["first_message_index"],
|
"first_message_index": room_key["first_message_index"],
|
||||||
"forwarded_count": room_key["forwarded_count"],
|
"forwarded_count": room_key["forwarded_count"],
|
||||||
"is_verified": room_key["is_verified"],
|
"is_verified": room_key["is_verified"],
|
||||||
"session_data": json.dumps(room_key["session_data"]),
|
"session_data": json_encoder.encode(room_key["session_data"]),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
log_kv(
|
log_kv(
|
||||||
|
@ -360,7 +359,7 @@ class EndToEndRoomKeyStore(SQLBaseStore):
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"version": new_version,
|
"version": new_version,
|
||||||
"algorithm": info["algorithm"],
|
"algorithm": info["algorithm"],
|
||||||
"auth_data": json.dumps(info["auth_data"]),
|
"auth_data": json_encoder.encode(info["auth_data"]),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -387,7 +386,7 @@ class EndToEndRoomKeyStore(SQLBaseStore):
|
||||||
updatevalues = {}
|
updatevalues = {}
|
||||||
|
|
||||||
if info is not None and "auth_data" in info:
|
if info is not None and "auth_data" in info:
|
||||||
updatevalues["auth_data"] = json.dumps(info["auth_data"])
|
updatevalues["auth_data"] = json_encoder.encode(info["auth_data"])
|
||||||
if version_etag is not None:
|
if version_etag is not None:
|
||||||
updatevalues["etag"] = version_etag
|
updatevalues["etag"] = version_etag
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from typing import Dict, List, Tuple
|
from typing import Dict, List, Tuple
|
||||||
|
|
||||||
from canonicaljson import encode_canonical_json, json
|
from canonicaljson import encode_canonical_json
|
||||||
|
|
||||||
from twisted.enterprise.adbapi import Connection
|
from twisted.enterprise.adbapi import Connection
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
@ -24,6 +24,7 @@ from twisted.internet import defer
|
||||||
from synapse.logging.opentracing import log_kv, set_tag, trace
|
from synapse.logging.opentracing import log_kv, set_tag, trace
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||||
from synapse.storage.database import make_in_list_sql_clause
|
from synapse.storage.database import make_in_list_sql_clause
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches.descriptors import cached, cachedList
|
from synapse.util.caches.descriptors import cached, cachedList
|
||||||
from synapse.util.iterutils import batch_iter
|
from synapse.util.iterutils import batch_iter
|
||||||
|
|
||||||
|
@ -700,7 +701,7 @@ class EndToEndKeyStore(EndToEndKeyWorkerStore, SQLBaseStore):
|
||||||
values={
|
values={
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"keytype": key_type,
|
"keytype": key_type,
|
||||||
"keydata": json.dumps(key),
|
"keydata": json_encoder.encode(key),
|
||||||
"stream_id": stream_id,
|
"stream_id": stream_id,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,11 +17,10 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore, db_to_json
|
from synapse.storage._base import LoggingTransaction, SQLBaseStore, db_to_json
|
||||||
from synapse.storage.database import DatabasePool
|
from synapse.storage.database import DatabasePool
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -50,7 +49,7 @@ def _serialize_action(actions, is_highlight):
|
||||||
else:
|
else:
|
||||||
if actions == DEFAULT_NOTIF_ACTION:
|
if actions == DEFAULT_NOTIF_ACTION:
|
||||||
return ""
|
return ""
|
||||||
return json.dumps(actions)
|
return json_encoder.encode(actions)
|
||||||
|
|
||||||
|
|
||||||
def _deserialize_action(actions, is_highlight):
|
def _deserialize_action(actions, is_highlight):
|
||||||
|
|
|
@ -16,12 +16,11 @@
|
||||||
|
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
||||||
|
from synapse.util import json_encoder
|
||||||
|
|
||||||
# The category ID for the "default" category. We don't store as null in the
|
# The category ID for the "default" category. We don't store as null in the
|
||||||
# database to avoid the fun of null != null
|
# database to avoid the fun of null != null
|
||||||
|
@ -752,7 +751,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
if profile is None:
|
if profile is None:
|
||||||
insertion_values["profile"] = "{}"
|
insertion_values["profile"] = "{}"
|
||||||
else:
|
else:
|
||||||
update_values["profile"] = json.dumps(profile)
|
update_values["profile"] = json_encoder.encode(profile)
|
||||||
|
|
||||||
if is_public is None:
|
if is_public is None:
|
||||||
insertion_values["is_public"] = True
|
insertion_values["is_public"] = True
|
||||||
|
@ -783,7 +782,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
if profile is None:
|
if profile is None:
|
||||||
insertion_values["profile"] = "{}"
|
insertion_values["profile"] = "{}"
|
||||||
else:
|
else:
|
||||||
update_values["profile"] = json.dumps(profile)
|
update_values["profile"] = json_encoder.encode(profile)
|
||||||
|
|
||||||
if is_public is None:
|
if is_public is None:
|
||||||
insertion_values["is_public"] = True
|
insertion_values["is_public"] = True
|
||||||
|
@ -1007,7 +1006,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
"group_id": group_id,
|
"group_id": group_id,
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"valid_until_ms": remote_attestation["valid_until_ms"],
|
"valid_until_ms": remote_attestation["valid_until_ms"],
|
||||||
"attestation_json": json.dumps(remote_attestation),
|
"attestation_json": json_encoder.encode(remote_attestation),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1131,7 +1130,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
"is_admin": is_admin,
|
"is_admin": is_admin,
|
||||||
"membership": membership,
|
"membership": membership,
|
||||||
"is_publicised": is_publicised,
|
"is_publicised": is_publicised,
|
||||||
"content": json.dumps(content),
|
"content": json_encoder.encode(content),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1143,7 +1142,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
"group_id": group_id,
|
"group_id": group_id,
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"type": "membership",
|
"type": "membership",
|
||||||
"content": json.dumps(
|
"content": json_encoder.encode(
|
||||||
{"membership": membership, "content": content}
|
{"membership": membership, "content": content}
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
@ -1171,7 +1170,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
"group_id": group_id,
|
"group_id": group_id,
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"valid_until_ms": remote_attestation["valid_until_ms"],
|
"valid_until_ms": remote_attestation["valid_until_ms"],
|
||||||
"attestation_json": json.dumps(remote_attestation),
|
"attestation_json": json_encoder.encode(remote_attestation),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -1240,7 +1239,7 @@ class GroupServerStore(GroupServerWorkerStore):
|
||||||
keyvalues={"group_id": group_id, "user_id": user_id},
|
keyvalues={"group_id": group_id, "user_id": user_id},
|
||||||
updatevalues={
|
updatevalues={
|
||||||
"valid_until_ms": attestation["valid_until_ms"],
|
"valid_until_ms": attestation["valid_until_ms"],
|
||||||
"attestation_json": json.dumps(attestation),
|
"attestation_json": json_encoder.encode(attestation),
|
||||||
},
|
},
|
||||||
desc="update_remote_attestion",
|
desc="update_remote_attestion",
|
||||||
)
|
)
|
||||||
|
|
|
@ -18,8 +18,6 @@ import abc
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Tuple, Union
|
from typing import List, Tuple, Union
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.push.baserules import list_with_base_rules
|
from synapse.push.baserules import list_with_base_rules
|
||||||
|
@ -33,6 +31,7 @@ from synapse.storage.databases.main.receipts import ReceiptsWorkerStore
|
||||||
from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
|
from synapse.storage.databases.main.roommember import RoomMemberWorkerStore
|
||||||
from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
|
from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundException
|
||||||
from synapse.storage.util.id_generators import ChainedIdGenerator
|
from synapse.storage.util.id_generators import ChainedIdGenerator
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList
|
from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
|
||||||
|
@ -411,8 +410,8 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||||
before=None,
|
before=None,
|
||||||
after=None,
|
after=None,
|
||||||
):
|
):
|
||||||
conditions_json = json.dumps(conditions)
|
conditions_json = json_encoder.encode(conditions)
|
||||||
actions_json = json.dumps(actions)
|
actions_json = json_encoder.encode(actions)
|
||||||
with self._push_rules_stream_id_gen.get_next() as ids:
|
with self._push_rules_stream_id_gen.get_next() as ids:
|
||||||
stream_id, event_stream_ordering = ids
|
stream_id, event_stream_ordering = ids
|
||||||
if before or after:
|
if before or after:
|
||||||
|
@ -681,7 +680,7 @@ class PushRuleStore(PushRulesWorkerStore):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def set_push_rule_actions(self, user_id, rule_id, actions, is_default_rule):
|
def set_push_rule_actions(self, user_id, rule_id, actions, is_default_rule):
|
||||||
actions_json = json.dumps(actions)
|
actions_json = json_encoder.encode(actions)
|
||||||
|
|
||||||
def set_push_rule_actions_txn(txn, stream_id, event_stream_ordering):
|
def set_push_rule_actions_txn(txn, stream_id, event_stream_ordering):
|
||||||
if is_default_rule:
|
if is_default_rule:
|
||||||
|
|
|
@ -18,13 +18,12 @@ import abc
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from canonicaljson import json
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
||||||
from synapse.storage.database import DatabasePool
|
from synapse.storage.database import DatabasePool
|
||||||
from synapse.storage.util.id_generators import StreamIdGenerator
|
from synapse.storage.util.id_generators import StreamIdGenerator
|
||||||
|
from synapse.util import json_encoder
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
|
@ -459,7 +458,7 @@ class ReceiptsStore(ReceiptsWorkerStore):
|
||||||
values={
|
values={
|
||||||
"stream_id": stream_id,
|
"stream_id": stream_id,
|
||||||
"event_id": event_id,
|
"event_id": event_id,
|
||||||
"data": json.dumps(data),
|
"data": json_encoder.encode(data),
|
||||||
},
|
},
|
||||||
# receipts_linearized has a unique constraint on
|
# receipts_linearized has a unique constraint on
|
||||||
# (user_id, room_id, receipt_type), so no need to lock
|
# (user_id, room_id, receipt_type), so no need to lock
|
||||||
|
@ -585,7 +584,7 @@ class ReceiptsStore(ReceiptsWorkerStore):
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"receipt_type": receipt_type,
|
"receipt_type": receipt_type,
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"event_ids": json.dumps(event_ids),
|
"event_ids": json_encoder.encode(event_ids),
|
||||||
"data": json.dumps(data),
|
"data": json_encoder.encode(data),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,6 +17,7 @@ import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
from canonicaljson import json
|
||||||
|
|
||||||
from twisted.internet import defer, task
|
from twisted.internet import defer, task
|
||||||
|
|
||||||
|
@ -24,6 +25,9 @@ from synapse.logging import context
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create a custom encoder to reduce the whitespace produced by JSON encoding.
|
||||||
|
json_encoder = json.JSONEncoder(separators=(",", ":"))
|
||||||
|
|
||||||
|
|
||||||
def unwrapFirstError(failure):
|
def unwrapFirstError(failure):
|
||||||
# defer.gatherResults and DeferredLists wrap failures.
|
# defer.gatherResults and DeferredLists wrap failures.
|
||||||
|
|
|
@ -63,5 +63,8 @@ def _handle_frozendict(obj):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# A JSONEncoder which is capable of encoding frozendicts without barfing
|
# A JSONEncoder which is capable of encoding frozendicts without barfing.
|
||||||
frozendict_json_encoder = json.JSONEncoder(default=_handle_frozendict)
|
# Additionally reduce the whitespace produced by JSON encoding.
|
||||||
|
frozendict_json_encoder = json.JSONEncoder(
|
||||||
|
default=_handle_frozendict, separators=(",", ":"),
|
||||||
|
)
|
||||||
|
|
Loading…
Reference in a new issue