mirror of
https://gitlab.com/famedly/conduit.git
synced 2024-12-26 20:34:26 +01:00
Merge branch 'update-rust' into 'next'
update rust toolchain See merge request famedly/conduit!572
This commit is contained in:
commit
825ceac1c3
18 changed files with 67 additions and 65 deletions
|
@ -106,7 +106,7 @@ oci-image:aarch64-unknown-linux-musl:
|
|||
debian:x86_64-unknown-linux-gnu:
|
||||
stage: artifacts
|
||||
# See also `rust-toolchain.toml`
|
||||
image: rust:1.70.0
|
||||
image: rust:1.75.0
|
||||
script:
|
||||
- apt-get update && apt-get install -y --no-install-recommends libclang-dev
|
||||
- cargo install cargo-deb
|
||||
|
|
16
Cargo.toml
16
Cargo.toml
|
@ -1,3 +1,14 @@
|
|||
# Keep alphabetically sorted
|
||||
[workspace.lints.rust]
|
||||
explicit_outlives_requirements = "warn"
|
||||
unused_qualifications = "warn"
|
||||
|
||||
# Keep alphabetically sorted
|
||||
[workspace.lints.clippy]
|
||||
cloned_instead_of_copied = "warn"
|
||||
dbg_macro = "warn"
|
||||
str_to_string = "warn"
|
||||
|
||||
[package]
|
||||
name = "conduit"
|
||||
description = "A Matrix homeserver written in Rust"
|
||||
|
@ -10,10 +21,13 @@ version = "0.7.0-alpha"
|
|||
edition = "2021"
|
||||
|
||||
# See also `rust-toolchain.toml`
|
||||
rust-version = "1.70.0"
|
||||
rust-version = "1.75.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Web framework
|
||||
axum = { version = "0.6.18", default-features = false, features = ["form", "headers", "http1", "http2", "json", "matched-path"], optional = true }
|
||||
|
|
15
flake.nix
15
flake.nix
|
@ -39,7 +39,7 @@
|
|||
file = ./rust-toolchain.toml;
|
||||
|
||||
# See also `rust-toolchain.toml`
|
||||
sha256 = "sha256-gdYqng0y9iHYzYPAdkC/ka3DRny3La/S5G8ASj0Ayyc=";
|
||||
sha256 = "sha256-SXRtAuO4IqNOQq+nLbrsDFbVk+3aVA8NNpSZsKlVH/8=";
|
||||
};
|
||||
|
||||
builder = pkgs:
|
||||
|
@ -70,7 +70,7 @@
|
|||
stdenv.hostPlatform.isStatic
|
||||
["-C" "relocation-model=static"]
|
||||
++ lib.optionals
|
||||
(stdenv.buildPlatform.config != pkgs.stdenv.hostPlatform.config)
|
||||
(stdenv.buildPlatform.config != stdenv.hostPlatform.config)
|
||||
["-l" "c"]
|
||||
++ lib.optionals
|
||||
# This check has to match the one [here][0]. We only need to set
|
||||
|
@ -79,10 +79,13 @@
|
|||
#
|
||||
# [0]: https://github.com/NixOS/nixpkgs/blob/612f97239e2cc474c13c9dafa0df378058c5ad8d/pkgs/build-support/rust/lib/default.nix#L36-L39
|
||||
(
|
||||
pkgs.stdenv.hostPlatform.isAarch64
|
||||
&& pkgs.stdenv.hostPlatform.isStatic
|
||||
&& !pkgs.stdenv.isDarwin
|
||||
&& !pkgs.stdenv.cc.bintools.isLLVM
|
||||
# Nixpkgs doesn't check for x86_64 here but we do, because I
|
||||
# observed a failure building statically for x86_64 without
|
||||
# including it here. Linkers are weird.
|
||||
(stdenv.hostPlatform.isAarch64 || stdenv.hostPlatform.isx86_64)
|
||||
&& stdenv.hostPlatform.isStatic
|
||||
&& !stdenv.isDarwin
|
||||
&& !stdenv.cc.bintools.isLLVM
|
||||
)
|
||||
[
|
||||
"-l"
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
# If you're having trouble making the relevant changes, bug a maintainer.
|
||||
|
||||
[toolchain]
|
||||
channel = "1.70.0"
|
||||
channel = "1.75.0"
|
||||
components = [
|
||||
# For rust-analyzer
|
||||
"rust-src",
|
||||
|
|
|
@ -51,7 +51,7 @@ pub async fn create_content_route(
|
|||
.await?;
|
||||
|
||||
Ok(create_content::v3::Response {
|
||||
content_uri: mxc.try_into().expect("Invalid mxc:// URI"),
|
||||
content_uri: mxc.into(),
|
||||
blurhash: None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -116,7 +116,7 @@ impl KvTree for PersyTree {
|
|||
match iter {
|
||||
Ok(iter) => Box::new(iter.filter_map(|(k, v)| {
|
||||
v.into_iter()
|
||||
.map(|val| ((*k).to_owned().into(), (*val).to_owned().into()))
|
||||
.map(|val| ((*k).to_owned(), (*val).to_owned()))
|
||||
.next()
|
||||
})),
|
||||
Err(e) => {
|
||||
|
@ -142,7 +142,7 @@ impl KvTree for PersyTree {
|
|||
Ok(iter) => {
|
||||
let map = iter.filter_map(|(k, v)| {
|
||||
v.into_iter()
|
||||
.map(|val| ((*k).to_owned().into(), (*val).to_owned().into()))
|
||||
.map(|val| ((*k).to_owned(), (*val).to_owned()))
|
||||
.next()
|
||||
});
|
||||
if backwards {
|
||||
|
@ -179,7 +179,7 @@ impl KvTree for PersyTree {
|
|||
iter.take_while(move |(k, _)| (*k).starts_with(&owned_prefix))
|
||||
.filter_map(|(k, v)| {
|
||||
v.into_iter()
|
||||
.map(|val| ((*k).to_owned().into(), (*val).to_owned().into()))
|
||||
.map(|val| ((*k).to_owned(), (*val).to_owned()))
|
||||
.next()
|
||||
}),
|
||||
)
|
||||
|
|
|
@ -33,7 +33,7 @@ impl Iterator for PreparedStatementIterator<'_> {
|
|||
struct NonAliasingBox<T>(*mut T);
|
||||
impl<T> Drop for NonAliasingBox<T> {
|
||||
fn drop(&mut self) {
|
||||
unsafe { Box::from_raw(self.0) };
|
||||
drop(unsafe { Box::from_raw(self.0) });
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -123,13 +123,12 @@ impl service::account_data::Data for KeyValueDatabase {
|
|||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(|(k, v)| {
|
||||
Ok::<_, Error>((
|
||||
RoomAccountDataEventType::try_from(
|
||||
RoomAccountDataEventType::from(
|
||||
utils::string_from_bytes(k.rsplit(|&b| b == 0xff).next().ok_or_else(
|
||||
|| Error::bad_database("RoomUserData ID in db is invalid."),
|
||||
)?)
|
||||
.map_err(|_| Error::bad_database("RoomUserData ID in db is invalid."))?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("RoomUserData ID in db is invalid."))?,
|
||||
),
|
||||
serde_json::from_slice::<Raw<AnyEphemeralRoomEvent>>(&v).map_err(|_| {
|
||||
Error::bad_database("Database contains invalid account data.")
|
||||
})?,
|
||||
|
|
|
@ -256,8 +256,8 @@ lasttimelinecount_cache: {lasttimelinecount_cache}\n"
|
|||
..
|
||||
} = new_keys;
|
||||
|
||||
keys.verify_keys.extend(verify_keys.into_iter());
|
||||
keys.old_verify_keys.extend(old_verify_keys.into_iter());
|
||||
keys.verify_keys.extend(verify_keys);
|
||||
keys.old_verify_keys.extend(old_verify_keys);
|
||||
|
||||
self.server_signingkeys.insert(
|
||||
origin.as_bytes(),
|
||||
|
|
|
@ -157,10 +157,9 @@ impl service::rooms::short::Data for KeyValueDatabase {
|
|||
.ok_or_else(|| Error::bad_database("Invalid statekey in shortstatekey_statekey."))?;
|
||||
|
||||
let event_type =
|
||||
StateEventType::try_from(utils::string_from_bytes(eventtype_bytes).map_err(|_| {
|
||||
StateEventType::from(utils::string_from_bytes(eventtype_bytes).map_err(|_| {
|
||||
Error::bad_database("Event type in shortstatekey_statekey is invalid unicode.")
|
||||
})?)
|
||||
.map_err(|_| Error::bad_database("Event type in shortstatekey_statekey is invalid."))?;
|
||||
})?);
|
||||
|
||||
let state_key = utils::string_from_bytes(statekey_bytes).map_err(|_| {
|
||||
Error::bad_database("Statekey in shortstatekey_statekey is invalid unicode.")
|
||||
|
|
|
@ -146,10 +146,9 @@ impl service::users::Data for KeyValueDatabase {
|
|||
self.userid_avatarurl
|
||||
.get(user_id.as_bytes())?
|
||||
.map(|bytes| {
|
||||
let s = utils::string_from_bytes(&bytes)
|
||||
.map_err(|_| Error::bad_database("Avatar URL in db is invalid."))?;
|
||||
s.try_into()
|
||||
utils::string_from_bytes(&bytes)
|
||||
.map_err(|_| Error::bad_database("Avatar URL in db is invalid."))
|
||||
.map(Into::into)
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
|
|
|
@ -1,12 +1,3 @@
|
|||
#![warn(
|
||||
rust_2018_idioms,
|
||||
unused_qualifications,
|
||||
clippy::cloned_instead_of_copied,
|
||||
clippy::str_to_string
|
||||
)]
|
||||
#![allow(clippy::suspicious_else_formatting)]
|
||||
#![deny(clippy::dbg_macro)]
|
||||
|
||||
pub mod api;
|
||||
mod config;
|
||||
mod database;
|
||||
|
|
10
src/main.rs
10
src/main.rs
|
@ -1,13 +1,3 @@
|
|||
#![warn(
|
||||
rust_2018_idioms,
|
||||
unused_qualifications,
|
||||
clippy::cloned_instead_of_copied,
|
||||
clippy::str_to_string,
|
||||
clippy::future_not_send
|
||||
)]
|
||||
#![allow(clippy::suspicious_else_formatting)]
|
||||
#![deny(clippy::dbg_macro)]
|
||||
|
||||
use std::{future::Future, io, net::SocketAddr, sync::atomic, time::Duration};
|
||||
|
||||
use axum::{
|
||||
|
|
|
@ -128,7 +128,7 @@ impl Resolve for Resolver {
|
|||
.expect("lock should not be poisoned")
|
||||
.get(name.as_str())
|
||||
.and_then(|(override_name, port)| {
|
||||
override_name.get(0).map(|first_name| {
|
||||
override_name.first().map(|first_name| {
|
||||
let x: Box<dyn Iterator<Item = SocketAddr> + Send> =
|
||||
Box::new(iter::once(SocketAddr::new(*first_name, *port)));
|
||||
let x: Resolving = Box::pin(future::ready(Ok(x)));
|
||||
|
|
|
@ -385,7 +385,7 @@ impl PartialEq for PduEvent {
|
|||
}
|
||||
impl PartialOrd for PduEvent {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
self.event_id.partial_cmp(&other.event_id)
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
impl Ord for PduEvent {
|
||||
|
|
|
@ -90,18 +90,6 @@ impl Ord for PduCount {
|
|||
}
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn comparisons() {
|
||||
assert!(PduCount::Normal(1) < PduCount::Normal(2));
|
||||
assert!(PduCount::Backfilled(2) < PduCount::Backfilled(1));
|
||||
assert!(PduCount::Normal(1) > PduCount::Backfilled(1));
|
||||
assert!(PduCount::Backfilled(1) < PduCount::Normal(1));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
|
@ -1208,3 +1196,16 @@ impl Service {
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn comparisons() {
|
||||
assert!(PduCount::Normal(1) < PduCount::Normal(2));
|
||||
assert!(PduCount::Backfilled(2) < PduCount::Backfilled(1));
|
||||
assert!(PduCount::Normal(1) > PduCount::Backfilled(1));
|
||||
assert!(PduCount::Backfilled(1) < PduCount::Normal(1));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ impl Service {
|
|||
for (key, outgoing_kind, event) in self.db.active_requests().filter_map(|r| r.ok()) {
|
||||
let entry = initial_transactions
|
||||
.entry(outgoing_kind.clone())
|
||||
.or_insert_with(Vec::new);
|
||||
.or_default();
|
||||
|
||||
if entry.len() > 30 {
|
||||
warn!(
|
||||
|
|
|
@ -138,12 +138,18 @@ impl Service {
|
|||
cached.lists.insert(list_id.clone(), list.clone());
|
||||
}
|
||||
|
||||
cached
|
||||
.subscriptions
|
||||
.extend(request.room_subscriptions.clone().into_iter());
|
||||
request
|
||||
.room_subscriptions
|
||||
.extend(cached.subscriptions.clone().into_iter());
|
||||
cached.subscriptions.extend(
|
||||
request
|
||||
.room_subscriptions
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone())),
|
||||
);
|
||||
request.room_subscriptions.extend(
|
||||
cached
|
||||
.subscriptions
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone())),
|
||||
);
|
||||
|
||||
request.extensions.e2ee.enabled = request
|
||||
.extensions
|
||||
|
|
Loading…
Reference in a new issue