forked from MirrorHub/synapse
Merge pull request #8678 from matrix-org/rav/fix_frozen_events
Fix serialisation errors when using third-party event rules.
This commit is contained in:
commit
c97da1e45d
7 changed files with 32 additions and 40 deletions
1
changelog.d/8678.bugfix
Normal file
1
changelog.d/8678.bugfix
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Fix `Object of type frozendict is not JSON serializable` exceptions when using third-party event rules.
|
|
@ -50,9 +50,8 @@ from synapse.replication.http.send_event import ReplicationSendEventRestServlet
|
||||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
from synapse.types import Requester, RoomAlias, StreamToken, UserID, create_requester
|
from synapse.types import Requester, RoomAlias, StreamToken, UserID, create_requester
|
||||||
from synapse.util import json_decoder
|
from synapse.util import json_decoder, json_encoder
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.frozenutils import frozendict_json_encoder
|
|
||||||
from synapse.util.metrics import measure_func
|
from synapse.util.metrics import measure_func
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
|
@ -928,7 +927,7 @@ class EventCreationHandler:
|
||||||
|
|
||||||
# Ensure that we can round trip before trying to persist in db
|
# Ensure that we can round trip before trying to persist in db
|
||||||
try:
|
try:
|
||||||
dump = frozendict_json_encoder.encode(event.content)
|
dump = json_encoder.encode(event.content)
|
||||||
json_decoder.decode(dump)
|
json_decoder.decode(dump)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Failed to encode content: %r", event.content)
|
logger.exception("Failed to encode content: %r", event.content)
|
||||||
|
|
|
@ -35,8 +35,6 @@ from twisted.web.server import NOT_DONE_YET, Request
|
||||||
from twisted.web.static import File, NoRangeStaticProducer
|
from twisted.web.static import File, NoRangeStaticProducer
|
||||||
from twisted.web.util import redirectTo
|
from twisted.web.util import redirectTo
|
||||||
|
|
||||||
import synapse.events
|
|
||||||
import synapse.metrics
|
|
||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
CodeMessageException,
|
CodeMessageException,
|
||||||
Codes,
|
Codes,
|
||||||
|
@ -620,7 +618,7 @@ def respond_with_json(
|
||||||
if pretty_print:
|
if pretty_print:
|
||||||
encoder = iterencode_pretty_printed_json
|
encoder = iterencode_pretty_printed_json
|
||||||
else:
|
else:
|
||||||
if canonical_json or synapse.events.USE_FROZEN_DICTS:
|
if canonical_json:
|
||||||
encoder = iterencode_canonical_json
|
encoder = iterencode_canonical_json
|
||||||
else:
|
else:
|
||||||
encoder = _encode_json_bytes
|
encoder = _encode_json_bytes
|
||||||
|
|
|
@ -22,7 +22,7 @@ from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.database import DatabasePool
|
from synapse.storage.database import DatabasePool
|
||||||
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
||||||
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
||||||
from synapse.util.frozenutils import frozendict_json_encoder
|
from synapse.util import json_encoder
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
|
@ -104,7 +104,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
|
||||||
and original_event.internal_metadata.is_redacted()
|
and original_event.internal_metadata.is_redacted()
|
||||||
):
|
):
|
||||||
# Redaction was allowed
|
# Redaction was allowed
|
||||||
pruned_json = frozendict_json_encoder.encode(
|
pruned_json = json_encoder.encode(
|
||||||
prune_event_dict(
|
prune_event_dict(
|
||||||
original_event.room_version, original_event.get_dict()
|
original_event.room_version, original_event.get_dict()
|
||||||
)
|
)
|
||||||
|
@ -170,7 +170,7 @@ class CensorEventsStore(EventsWorkerStore, CacheInvalidationWorkerStore, SQLBase
|
||||||
return
|
return
|
||||||
|
|
||||||
# Prune the event's dict then convert it to JSON.
|
# Prune the event's dict then convert it to JSON.
|
||||||
pruned_json = frozendict_json_encoder.encode(
|
pruned_json = json_encoder.encode(
|
||||||
prune_event_dict(event.room_version, event.get_dict())
|
prune_event_dict(event.room_version, event.get_dict())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ from synapse.storage.database import DatabasePool, LoggingTransaction
|
||||||
from synapse.storage.databases.main.search import SearchEntry
|
from synapse.storage.databases.main.search import SearchEntry
|
||||||
from synapse.storage.util.id_generators import MultiWriterIdGenerator
|
from synapse.storage.util.id_generators import MultiWriterIdGenerator
|
||||||
from synapse.types import StateMap, get_domain_from_id
|
from synapse.types import StateMap, get_domain_from_id
|
||||||
from synapse.util.frozenutils import frozendict_json_encoder
|
from synapse.util import json_encoder
|
||||||
from synapse.util.iterutils import batch_iter
|
from synapse.util.iterutils import batch_iter
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -769,9 +769,7 @@ class PersistEventsStore:
|
||||||
logger.exception("")
|
logger.exception("")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
metadata_json = frozendict_json_encoder.encode(
|
metadata_json = json_encoder.encode(event.internal_metadata.get_dict())
|
||||||
event.internal_metadata.get_dict()
|
|
||||||
)
|
|
||||||
|
|
||||||
sql = "UPDATE event_json SET internal_metadata = ? WHERE event_id = ?"
|
sql = "UPDATE event_json SET internal_metadata = ? WHERE event_id = ?"
|
||||||
txn.execute(sql, (metadata_json, event.event_id))
|
txn.execute(sql, (metadata_json, event.event_id))
|
||||||
|
@ -826,10 +824,10 @@ class PersistEventsStore:
|
||||||
{
|
{
|
||||||
"event_id": event.event_id,
|
"event_id": event.event_id,
|
||||||
"room_id": event.room_id,
|
"room_id": event.room_id,
|
||||||
"internal_metadata": frozendict_json_encoder.encode(
|
"internal_metadata": json_encoder.encode(
|
||||||
event.internal_metadata.get_dict()
|
event.internal_metadata.get_dict()
|
||||||
),
|
),
|
||||||
"json": frozendict_json_encoder.encode(event_dict(event)),
|
"json": json_encoder.encode(event_dict(event)),
|
||||||
"format_version": event.format_version,
|
"format_version": event.format_version,
|
||||||
}
|
}
|
||||||
for event, _ in events_and_contexts
|
for event, _ in events_and_contexts
|
||||||
|
|
|
@ -18,6 +18,7 @@ import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
from frozendict import frozendict
|
||||||
|
|
||||||
from twisted.internet import defer, task
|
from twisted.internet import defer, task
|
||||||
|
|
||||||
|
@ -31,9 +32,26 @@ def _reject_invalid_json(val):
|
||||||
raise ValueError("Invalid JSON value: '%s'" % val)
|
raise ValueError("Invalid JSON value: '%s'" % val)
|
||||||
|
|
||||||
|
|
||||||
# Create a custom encoder to reduce the whitespace produced by JSON encoding and
|
def _handle_frozendict(obj):
|
||||||
# ensure that valid JSON is produced.
|
"""Helper for json_encoder. Makes frozendicts serializable by returning
|
||||||
json_encoder = json.JSONEncoder(allow_nan=False, separators=(",", ":"))
|
the underlying dict
|
||||||
|
"""
|
||||||
|
if type(obj) is frozendict:
|
||||||
|
# fishing the protected dict out of the object is a bit nasty,
|
||||||
|
# but we don't really want the overhead of copying the dict.
|
||||||
|
return obj._dict
|
||||||
|
raise TypeError(
|
||||||
|
"Object of type %s is not JSON serializable" % obj.__class__.__name__
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# A custom JSON encoder which:
|
||||||
|
# * handles frozendicts
|
||||||
|
# * produces valid JSON (no NaNs etc)
|
||||||
|
# * reduces redundant whitespace
|
||||||
|
json_encoder = json.JSONEncoder(
|
||||||
|
allow_nan=False, separators=(",", ":"), default=_handle_frozendict
|
||||||
|
)
|
||||||
|
|
||||||
# Create a custom decoder to reject Python extensions to JSON.
|
# Create a custom decoder to reject Python extensions to JSON.
|
||||||
json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json)
|
json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json)
|
||||||
|
|
|
@ -13,8 +13,6 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from frozendict import frozendict
|
from frozendict import frozendict
|
||||||
|
|
||||||
|
|
||||||
|
@ -49,23 +47,3 @@ def unfreeze(o):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return o
|
return o
|
||||||
|
|
||||||
|
|
||||||
def _handle_frozendict(obj):
|
|
||||||
"""Helper for EventEncoder. Makes frozendicts serializable by returning
|
|
||||||
the underlying dict
|
|
||||||
"""
|
|
||||||
if type(obj) is frozendict:
|
|
||||||
# fishing the protected dict out of the object is a bit nasty,
|
|
||||||
# but we don't really want the overhead of copying the dict.
|
|
||||||
return obj._dict
|
|
||||||
raise TypeError(
|
|
||||||
"Object of type %s is not JSON serializable" % obj.__class__.__name__
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# A JSONEncoder which is capable of encoding frozendicts without barfing.
|
|
||||||
# Additionally reduce the whitespace produced by JSON encoding.
|
|
||||||
frozendict_json_encoder = json.JSONEncoder(
|
|
||||||
allow_nan=False, separators=(",", ":"), default=_handle_frozendict,
|
|
||||||
)
|
|
||||||
|
|
Loading…
Reference in a new issue