0
0
Fork 1
mirror of https://mau.dev/maunium/synapse.git synced 2024-12-13 14:33:14 +01:00

Merge branch 'release-v1.70' into develop

This commit is contained in:
Olivier Wilkinson (reivilibre) 2022-10-25 15:39:35 +01:00
commit 85fcbba595
8 changed files with 307 additions and 55 deletions

132
.ci/scripts/auditwheel_wrapper.py Executable file
View file

@ -0,0 +1,132 @@
#!/usr/bin/env python
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
# compatible wheel, if so rename the wheel before repairing it.
import argparse
import os
import subprocess
from typing import Optional
from zipfile import ZipFile
from packaging.tags import Tag
from packaging.utils import parse_wheel_filename
from packaging.version import Version
def check_is_abi3_compatible(wheel_file: str) -> None:
"""Check the contents of the built wheel for any `.so` files that are *not*
abi3 compatible.
"""
with ZipFile(wheel_file, "r") as wheel:
for file in wheel.namelist():
if not file.endswith(".so"):
continue
if not file.endswith(".abi3.so"):
raise Exception(f"Found non-abi3 lib: {file}")
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
if tag.abi == "abi3":
# Nothing to do.
return wheel_file
check_is_abi3_compatible(wheel_file)
abi3_tag = Tag(tag.interpreter, "abi3", tag.platform)
dirname = os.path.dirname(wheel_file)
new_wheel_file = os.path.join(
dirname,
f"{name}-{version}-{abi3_tag}.whl",
)
os.rename(wheel_file, new_wheel_file)
print("Renamed wheel to", new_wheel_file)
return new_wheel_file
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
"""Entry point"""
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
# normalizes the package name (i.e. it converts matrix_synapse ->
# matrix-synapse), which is not what we want.
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
name = os.path.basename(wheel_file).split("-")[0]
if len(tags) != 1:
# We expect only a wheel file with only a single tag
raise Exception(f"Unexpectedly found multiple tags: {tags}")
tag = next(iter(tags))
if build:
# We don't use build tags in Synapse
raise Exception(f"Unexpected build tag: {build}")
# If the wheel is for cpython then convert it into an abi3 wheel.
if tag.interpreter.startswith("cp"):
wheel_file = cpython(wheel_file, name, version, tag)
# Finally, repair the wheel.
if archs is not None:
# If we are given archs then we are on macos and need to use
# `delocate-listdeps`.
subprocess.run(["delocate-listdeps", wheel_file], check=True)
subprocess.run(
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
check=True,
)
else:
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
parser.add_argument(
"--wheel-dir",
"-w",
metavar="WHEEL_DIR",
help="Directory to store delocated wheels",
required=True,
)
parser.add_argument(
"--require-archs",
metavar="archs",
default=None,
)
parser.add_argument(
"wheel_file",
metavar="WHEEL_FILE",
)
args = parser.parse_args()
wheel_file = args.wheel_file
wheel_dir = args.wheel_dir
archs = args.require_archs
main(wheel_file, wheel_dir, archs)

View file

@ -1,3 +1,21 @@
Synapse 1.70.0rc2 (2022-10-25)
==============================
Bugfixes
--------
- Fix a bug introduced in Synapse 1.70.0rc1 where the information returned from the `/threads` API could be stale when threaded events are redacted. ([\#14248](https://github.com/matrix-org/synapse/issues/14248))
- Fix a bug introduced in Synapse 1.70.0rc1 leading to broken outbound federation when using Python 3.7. ([\#14280](https://github.com/matrix-org/synapse/issues/14280))
- Fix a bug introduced in Synapse 1.70.0rc1 where edits to non-message events were aggregated by the homeserver. ([\#14283](https://github.com/matrix-org/synapse/issues/14283))
Internal Changes
----------------
- Build ABI3 wheels for CPython. ([\#14253](https://github.com/matrix-org/synapse/issues/14253))
- For the aarch64 architecture, only build wheels for CPython manylinux. ([\#14259](https://github.com/matrix-org/synapse/issues/14259))
Synapse 1.70.0rc1 (2022-10-19)
==============================
@ -13,7 +31,7 @@ Features
- The `/relations` endpoint can now be used on workers. ([\#14028](https://github.com/matrix-org/synapse/issues/14028))
- Advertise support for Matrix 1.3 and 1.4 on `/_matrix/client/versions`. ([\#14032](https://github.com/matrix-org/synapse/issues/14032), [\#14184](https://github.com/matrix-org/synapse/issues/14184))
- Improve validation of request bodies for the [Device Management](https://spec.matrix.org/v1.4/client-server-api/#device-management) and [MSC2697 Device Dehyrdation](https://github.com/matrix-org/matrix-spec-proposals/pull/2697) client-server API endpoints. ([\#14054](https://github.com/matrix-org/synapse/issues/14054))
- Experimental support for [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874). ([\#14148](https://github.com/matrix-org/synapse/issues/14148))
- Experimental support for [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874): Filtering threads from the `/messages` endpoint. ([\#14148](https://github.com/matrix-org/synapse/issues/14148))
- Improve the validation of the following PUT endpoints: [`/directory/room/{roomAlias}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directoryroomroomalias), [`/directory/list/room/{roomId}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directorylistroomroomid) and [`/directory/list/appservice/{networkId}/{roomId}`](https://spec.matrix.org/v1.4/application-service-api/#put_matrixclientv3directorylistappservicenetworkidroomid). ([\#14179](https://github.com/matrix-org/synapse/issues/14179))
- Build and publish binary wheels for `aarch64` platforms. ([\#14212](https://github.com/matrix-org/synapse/issues/14212))
@ -21,7 +39,7 @@ Features
Bugfixes
--------
- Prevent device names from appearing in device list updates when `allow_device_name_lookup_over_federation` is `false`. ([\#10015](https://github.com/matrix-org/synapse/issues/10015))
- Prevent device names from appearing in device list updates in some situations when `allow_device_name_lookup_over_federation` is `false`. (This is not comprehensive: see [\#13114](https://github.com/matrix-org/synapse/issues/13114).) ([\#10015](https://github.com/matrix-org/synapse/issues/10015))
- Fix a long-standing bug where redactions were not being sent over federation if we did not have the original event. ([\#13813](https://github.com/matrix-org/synapse/issues/13813))
- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. ([\#14034](https://github.com/matrix-org/synapse/issues/14034))
- Fix a bug introduced in Synapse 1.53.0 when querying `/publicRooms` with both a `room_type` filter and a `third_party_instance_id`. ([\#14053](https://github.com/matrix-org/synapse/issues/14053))

6
debian/changelog vendored
View file

@ -1,3 +1,9 @@
matrix-synapse-py3 (1.70.0~rc2) stable; urgency=medium
* New Synapse release 1.70.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Oct 2022 10:59:47 +0100
matrix-synapse-py3 (1.70.0~rc1) stable; urgency=medium
* New Synapse release 1.70.0rc1.

View file

@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry]
name = "matrix-synapse"
version = "1.70.0rc1"
version = "1.70.0rc2"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@ -318,7 +318,7 @@ build-backend = "poetry.core.masonry.api"
[tool.cibuildwheel]
# Skip unsupported platforms (by us or by Rust).
skip = "cp36* *-musllinux_i686"
skip = "cp36* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
# We need a rust compiler
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
@ -330,3 +330,12 @@ environment= { PATH = "$PATH:$HOME/.cargo/bin" }
before-build = "rm -rf {project}/build"
build-frontend = "build"
test-command = "python -c 'from synapse.synapse_rust import sum_as_string; print(sum_as_string(1, 2))'"
[tool.cibuildwheel.linux]
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py -w {dest_dir} {wheel}"
[tool.cibuildwheel.macos]
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py --require-archs {delocate_archs} -w {dest_dir} {wheel}"

View file

@ -536,8 +536,7 @@ class FederationSender(AbstractFederationSender):
if event_entries:
now = self.clock.time_msec()
last_id = next(reversed(event_ids))
ts = event_to_received_ts[last_id]
ts = max(t for t in event_to_received_ts.values() if t)
assert ts is not None
synapse.metrics.event_processing_lag.labels(

View file

@ -2028,25 +2028,37 @@ class PersistEventsStore:
redacted_event_id: The event that was redacted.
"""
# Fetch the current relation of the event being redacted.
redacted_relates_to = self.db_pool.simple_select_one_onecol_txn(
# Fetch the relation of the event being redacted.
row = self.db_pool.simple_select_one_txn(
txn,
table="event_relations",
keyvalues={"event_id": redacted_event_id},
retcol="relates_to_id",
retcols=("relates_to_id", "relation_type"),
allow_none=True,
)
# Nothing to do if no relation is found.
if row is None:
return
redacted_relates_to = row["relates_to_id"]
rel_type = row["relation_type"]
self.db_pool.simple_delete_txn(
txn, table="event_relations", keyvalues={"event_id": redacted_event_id}
)
# Any relation information for the related event must be cleared.
if redacted_relates_to is not None:
self.store._invalidate_cache_and_stream(
txn, self.store.get_relations_for_event, (redacted_relates_to,)
)
self.store._invalidate_cache_and_stream(
txn, self.store.get_relations_for_event, (redacted_relates_to,)
)
if rel_type == RelationTypes.ANNOTATION:
self.store._invalidate_cache_and_stream(
txn, self.store.get_aggregation_groups_for_event, (redacted_relates_to,)
)
if rel_type == RelationTypes.REPLACE:
self.store._invalidate_cache_and_stream(
txn, self.store.get_applicable_edit, (redacted_relates_to,)
)
if rel_type == RelationTypes.THREAD:
self.store._invalidate_cache_and_stream(
txn, self.store.get_thread_summary, (redacted_relates_to,)
)
@ -2057,9 +2069,38 @@ class PersistEventsStore:
txn, self.store.get_threads, (room_id,)
)
self.db_pool.simple_delete_txn(
txn, table="event_relations", keyvalues={"event_id": redacted_event_id}
)
# Find the new latest event in the thread.
sql = """
SELECT event_id, topological_ordering, stream_ordering
FROM event_relations
INNER JOIN events USING (event_id)
WHERE relates_to_id = ? AND relation_type = ?
ORDER BY topological_ordering DESC, stream_ordering DESC
LIMIT 1
"""
txn.execute(sql, (redacted_relates_to, RelationTypes.THREAD))
# If a latest event is found, update the threads table, this might
# be the same current latest event (if an earlier event in the thread
# was redacted).
latest_event_row = txn.fetchone()
if latest_event_row:
self.db_pool.simple_upsert_txn(
txn,
table="threads",
keyvalues={"room_id": room_id, "thread_id": redacted_relates_to},
values={
"latest_event_id": latest_event_row[0],
"topological_ordering": latest_event_row[1],
"stream_ordering": latest_event_row[2],
},
)
# Otherwise, delete the thread: it no longer exists.
else:
self.db_pool.simple_delete_one_txn(
txn, table="threads", keyvalues={"thread_id": redacted_relates_to}
)
def _store_room_topic_txn(self, txn: LoggingTransaction, event: EventBase) -> None:
if isinstance(event.content.get("topic"), str):

View file

@ -484,11 +484,12 @@ class RelationsWorkerStore(SQLBaseStore):
the event will map to None.
"""
# We only allow edits for events that have the same sender and event type.
# We can't assert these things during regular event auth so we have to do
# the checks post hoc.
# We only allow edits for `m.room.message` events that have the same sender
# and event type. We can't assert these things during regular event auth so
# we have to do the checks post hoc.
# Fetches latest edit that has the same type and sender as the original.
# Fetches latest edit that has the same type and sender as the
# original, and is an `m.room.message`.
if isinstance(self.database_engine, PostgresEngine):
# The `DISTINCT ON` clause will pick the *first* row it encounters,
# so ordering by origin server ts + event ID desc will ensure we get
@ -504,6 +505,7 @@ class RelationsWorkerStore(SQLBaseStore):
WHERE
%s
AND relation_type = ?
AND edit.type = 'm.room.message'
ORDER by original.event_id DESC, edit.origin_server_ts DESC, edit.event_id DESC
"""
else:
@ -522,6 +524,7 @@ class RelationsWorkerStore(SQLBaseStore):
WHERE
%s
AND relation_type = ?
AND edit.type = 'm.room.message'
ORDER by edit.origin_server_ts, edit.event_id
"""

View file

@ -1523,6 +1523,26 @@ class RelationRedactionTestCase(BaseRelationsTestCase):
)
self.assertEqual(200, channel.code, channel.json_body)
def _get_threads(self) -> List[Tuple[str, str]]:
"""Request the threads in the room and returns a list of thread ID and latest event ID."""
# Request the threads in the room.
channel = self.make_request(
"GET",
f"/_matrix/client/v1/rooms/{self.room}/threads",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
threads = channel.json_body["chunk"]
return [
(
t["event_id"],
t["unsigned"]["m.relations"][RelationTypes.THREAD]["latest_event"][
"event_id"
],
)
for t in threads
]
def test_redact_relation_annotation(self) -> None:
"""
Test that annotations of an event are properly handled after the
@ -1567,58 +1587,82 @@ class RelationRedactionTestCase(BaseRelationsTestCase):
The redacted event should not be included in bundled aggregations or
the response to relations.
"""
channel = self._send_relation(
RelationTypes.THREAD,
EventTypes.Message,
content={"body": "reply 1", "msgtype": "m.text"},
)
unredacted_event_id = channel.json_body["event_id"]
# Create a thread with a few events in it.
thread_replies = []
for i in range(3):
channel = self._send_relation(
RelationTypes.THREAD,
EventTypes.Message,
content={"body": f"reply {i}", "msgtype": "m.text"},
)
thread_replies.append(channel.json_body["event_id"])
# Note that the *last* event in the thread is redacted, as that gets
# included in the bundled aggregation.
channel = self._send_relation(
RelationTypes.THREAD,
EventTypes.Message,
content={"body": "reply 2", "msgtype": "m.text"},
)
to_redact_event_id = channel.json_body["event_id"]
# Both relations exist.
event_ids = self._get_related_events()
##################################################
# Check the test data is configured as expected. #
##################################################
self.assertEquals(self._get_related_events(), list(reversed(thread_replies)))
relations = self._get_bundled_aggregations()
self.assertEquals(event_ids, [to_redact_event_id, unredacted_event_id])
self.assertDictContainsSubset(
{
"count": 2,
"current_user_participated": True,
},
{"count": 3, "current_user_participated": True},
relations[RelationTypes.THREAD],
)
# And the latest event returned is the event that will be redacted.
# The latest event is the last sent event.
self.assertEqual(
relations[RelationTypes.THREAD]["latest_event"]["event_id"],
to_redact_event_id,
thread_replies[-1],
)
# Redact one of the reactions.
self._redact(to_redact_event_id)
# There should be one thread, the latest event is the event that will be redacted.
self.assertEqual(self._get_threads(), [(self.parent_id, thread_replies[-1])])
# The unredacted relation should still exist.
event_ids = self._get_related_events()
##########################
# Redact the last event. #
##########################
self._redact(thread_replies.pop())
# The thread should still exist, but the latest event should be updated.
self.assertEquals(self._get_related_events(), list(reversed(thread_replies)))
relations = self._get_bundled_aggregations()
self.assertEquals(event_ids, [unredacted_event_id])
self.assertDictContainsSubset(
{
"count": 1,
"current_user_participated": True,
},
{"count": 2, "current_user_participated": True},
relations[RelationTypes.THREAD],
)
# And the latest event is now the unredacted event.
# And the latest event is the last unredacted event.
self.assertEqual(
relations[RelationTypes.THREAD]["latest_event"]["event_id"],
unredacted_event_id,
thread_replies[-1],
)
self.assertEqual(self._get_threads(), [(self.parent_id, thread_replies[-1])])
###########################################
# Redact the *first* event in the thread. #
###########################################
self._redact(thread_replies.pop(0))
# Nothing should have changed (except the thread count).
self.assertEquals(self._get_related_events(), thread_replies)
relations = self._get_bundled_aggregations()
self.assertDictContainsSubset(
{"count": 1, "current_user_participated": True},
relations[RelationTypes.THREAD],
)
# And the latest event is the last unredacted event.
self.assertEqual(
relations[RelationTypes.THREAD]["latest_event"]["event_id"],
thread_replies[-1],
)
self.assertEqual(self._get_threads(), [(self.parent_id, thread_replies[-1])])
####################################
# Redact the last remaining event. #
####################################
self._redact(thread_replies.pop(0))
self.assertEquals(thread_replies, [])
# The event should no longer be considered a thread.
self.assertEquals(self._get_related_events(), [])
self.assertEquals(self._get_bundled_aggregations(), {})
self.assertEqual(self._get_threads(), [])
def test_redact_parent_edit(self) -> None:
"""Test that edits of an event are redacted when the original event