mirror of
https://gitlab.com/famedly/conduit.git
synced 2024-11-10 19:01:05 +01:00
cargo fmt
This commit is contained in:
parent
33a2b2b772
commit
a4637e2ba1
119 changed files with 2787 additions and 1761 deletions
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils, Error, Result, services};
|
||||
use crate::{services, utils, Error, Result};
|
||||
use bytes::BytesMut;
|
||||
use ruma::api::{IncomingResponse, MatrixVersion, OutgoingRequest, SendAccessToken};
|
||||
use std::{fmt::Debug, mem, time::Duration};
|
||||
|
@ -45,7 +45,11 @@ where
|
|||
*reqwest_request.timeout_mut() = Some(Duration::from_secs(30));
|
||||
|
||||
let url = reqwest_request.url().clone();
|
||||
let mut response = services().globals.default_client().execute(reqwest_request).await?;
|
||||
let mut response = services()
|
||||
.globals
|
||||
.default_client()
|
||||
.execute(reqwest_request)
|
||||
.await?;
|
||||
|
||||
// reqwest::Response -> http::Response conversion
|
||||
let status = response.status();
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH};
|
||||
use crate::{
|
||||
utils, Error, Result, Ruma, services, api::client_server,
|
||||
};
|
||||
use crate::{api::client_server, services, utils, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
account::{
|
||||
|
@ -43,16 +41,18 @@ pub async fn get_register_available_route(
|
|||
body: Ruma<get_username_availability::v3::IncomingRequest>,
|
||||
) -> Result<get_username_availability::v3::Response> {
|
||||
// Validate user id
|
||||
let user_id =
|
||||
UserId::parse_with_server_name(body.username.to_lowercase(), services().globals.server_name())
|
||||
.ok()
|
||||
.filter(|user_id| {
|
||||
!user_id.is_historical() && user_id.server_name() == services().globals.server_name()
|
||||
})
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidUsername,
|
||||
"Username is invalid.",
|
||||
))?;
|
||||
let user_id = UserId::parse_with_server_name(
|
||||
body.username.to_lowercase(),
|
||||
services().globals.server_name(),
|
||||
)
|
||||
.ok()
|
||||
.filter(|user_id| {
|
||||
!user_id.is_historical() && user_id.server_name() == services().globals.server_name()
|
||||
})
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidUsername,
|
||||
"Username is invalid.",
|
||||
))?;
|
||||
|
||||
// Check if username is creative enough
|
||||
if services().users.exists(&user_id)? {
|
||||
|
@ -95,17 +95,19 @@ pub async fn register_route(
|
|||
|
||||
let user_id = match (&body.username, is_guest) {
|
||||
(Some(username), false) => {
|
||||
let proposed_user_id =
|
||||
UserId::parse_with_server_name(username.to_lowercase(), services().globals.server_name())
|
||||
.ok()
|
||||
.filter(|user_id| {
|
||||
!user_id.is_historical()
|
||||
&& user_id.server_name() == services().globals.server_name()
|
||||
})
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidUsername,
|
||||
"Username is invalid.",
|
||||
))?;
|
||||
let proposed_user_id = UserId::parse_with_server_name(
|
||||
username.to_lowercase(),
|
||||
services().globals.server_name(),
|
||||
)
|
||||
.ok()
|
||||
.filter(|user_id| {
|
||||
!user_id.is_historical()
|
||||
&& user_id.server_name() == services().globals.server_name()
|
||||
})
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidUsername,
|
||||
"Username is invalid.",
|
||||
))?;
|
||||
if services().users.exists(&proposed_user_id)? {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::UserInUse,
|
||||
|
@ -176,7 +178,8 @@ pub async fn register_route(
|
|||
|
||||
// Default to pretty displayname
|
||||
let displayname = format!("{} ⚡️", user_id.localpart());
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.set_displayname(&user_id, Some(displayname.clone()))?;
|
||||
|
||||
// Initial account data
|
||||
|
@ -188,7 +191,8 @@ pub async fn register_route(
|
|||
content: ruma::events::push_rules::PushRulesEventContent {
|
||||
global: push::Ruleset::server_default(&user_id),
|
||||
},
|
||||
}).expect("to json always works"),
|
||||
})
|
||||
.expect("to json always works"),
|
||||
)?;
|
||||
|
||||
// Inhibit login does not work for guests
|
||||
|
@ -220,7 +224,8 @@ pub async fn register_route(
|
|||
)?;
|
||||
|
||||
info!("New user {} registered on this server.", user_id);
|
||||
services().admin
|
||||
services()
|
||||
.admin
|
||||
.send_message(RoomMessageEventContent::notice_plain(format!(
|
||||
"New user {} registered on this server.",
|
||||
user_id
|
||||
|
@ -229,7 +234,10 @@ pub async fn register_route(
|
|||
// If this is the first real user, grant them admin privileges
|
||||
// Note: the server user, @conduit:servername, is generated first
|
||||
if services().users.count()? == 2 {
|
||||
services().admin.make_user_admin(&user_id, displayname).await?;
|
||||
services()
|
||||
.admin
|
||||
.make_user_admin(&user_id, displayname)
|
||||
.await?;
|
||||
|
||||
warn!("Granting {} admin privileges as the first user", user_id);
|
||||
}
|
||||
|
@ -272,26 +280,26 @@ pub async fn change_password_route(
|
|||
};
|
||||
|
||||
if let Some(auth) = &body.auth {
|
||||
let (worked, uiaainfo) = services().uiaa.try_auth(
|
||||
sender_user,
|
||||
sender_device,
|
||||
auth,
|
||||
&uiaainfo,
|
||||
)?;
|
||||
let (worked, uiaainfo) =
|
||||
services()
|
||||
.uiaa
|
||||
.try_auth(sender_user, sender_device, auth, &uiaainfo)?;
|
||||
if !worked {
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
}
|
||||
// Success!
|
||||
} else if let Some(json) = body.json_body {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
services().uiaa
|
||||
services()
|
||||
.uiaa
|
||||
.create(sender_user, sender_device, &uiaainfo, &json)?;
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
} else {
|
||||
return Err(Error::BadRequest(ErrorKind::NotJson, "Not json."));
|
||||
}
|
||||
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.set_password(sender_user, Some(&body.new_password))?;
|
||||
|
||||
if body.logout_devices {
|
||||
|
@ -307,7 +315,8 @@ pub async fn change_password_route(
|
|||
}
|
||||
|
||||
info!("User {} changed their password.", sender_user);
|
||||
services().admin
|
||||
services()
|
||||
.admin
|
||||
.send_message(RoomMessageEventContent::notice_plain(format!(
|
||||
"User {} changed their password.",
|
||||
sender_user
|
||||
|
@ -321,9 +330,7 @@ pub async fn change_password_route(
|
|||
/// Get user_id of the sender user.
|
||||
///
|
||||
/// Note: Also works for Application Services
|
||||
pub async fn whoami_route(
|
||||
body: Ruma<whoami::v3::Request>,
|
||||
) -> Result<whoami::v3::Response> {
|
||||
pub async fn whoami_route(body: Ruma<whoami::v3::Request>) -> Result<whoami::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
let device_id = body.sender_device.as_ref().cloned();
|
||||
|
||||
|
@ -361,19 +368,18 @@ pub async fn deactivate_route(
|
|||
};
|
||||
|
||||
if let Some(auth) = &body.auth {
|
||||
let (worked, uiaainfo) = services().uiaa.try_auth(
|
||||
sender_user,
|
||||
sender_device,
|
||||
auth,
|
||||
&uiaainfo,
|
||||
)?;
|
||||
let (worked, uiaainfo) =
|
||||
services()
|
||||
.uiaa
|
||||
.try_auth(sender_user, sender_device, auth, &uiaainfo)?;
|
||||
if !worked {
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
}
|
||||
// Success!
|
||||
} else if let Some(json) = body.json_body {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
services().uiaa
|
||||
services()
|
||||
.uiaa
|
||||
.create(sender_user, sender_device, &uiaainfo, &json)?;
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
} else {
|
||||
|
@ -387,7 +393,8 @@ pub async fn deactivate_route(
|
|||
services().users.deactivate_account(sender_user)?;
|
||||
|
||||
info!("User {} deactivated their account.", sender_user);
|
||||
services().admin
|
||||
services()
|
||||
.admin
|
||||
.send_message(RoomMessageEventContent::notice_plain(format!(
|
||||
"User {} deactivated their account.",
|
||||
sender_user
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use regex::Regex;
|
||||
use ruma::{
|
||||
api::{
|
||||
|
@ -25,11 +25,18 @@ pub async fn create_alias_route(
|
|||
));
|
||||
}
|
||||
|
||||
if services().rooms.alias.resolve_local_alias(&body.room_alias)?.is_some() {
|
||||
if services()
|
||||
.rooms
|
||||
.alias
|
||||
.resolve_local_alias(&body.room_alias)?
|
||||
.is_some()
|
||||
{
|
||||
return Err(Error::Conflict("Alias already exists."));
|
||||
}
|
||||
|
||||
services().rooms.alias
|
||||
services()
|
||||
.rooms
|
||||
.alias
|
||||
.set_alias(&body.room_alias, &body.room_id)?;
|
||||
|
||||
Ok(create_alias::v3::Response::new())
|
||||
|
@ -69,9 +76,7 @@ pub async fn get_alias_route(
|
|||
get_alias_helper(&body.room_alias).await
|
||||
}
|
||||
|
||||
pub(crate) async fn get_alias_helper(
|
||||
room_alias: &RoomAliasId,
|
||||
) -> Result<get_alias::v3::Response> {
|
||||
pub(crate) async fn get_alias_helper(room_alias: &RoomAliasId) -> Result<get_alias::v3::Response> {
|
||||
if room_alias.server_name() != services().globals.server_name() {
|
||||
let response = services()
|
||||
.sending
|
||||
|
@ -115,9 +120,15 @@ pub(crate) async fn get_alias_helper(
|
|||
.await
|
||||
.is_ok()
|
||||
{
|
||||
room_id = Some(services().rooms.alias.resolve_local_alias(room_alias)?.ok_or_else(|| {
|
||||
Error::bad_config("Appservice lied to us. Room does not exist.")
|
||||
})?);
|
||||
room_id = Some(
|
||||
services()
|
||||
.rooms
|
||||
.alias
|
||||
.resolve_local_alias(room_alias)?
|
||||
.ok_or_else(|| {
|
||||
Error::bad_config("Appservice lied to us. Room does not exist.")
|
||||
})?,
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::api::client::{
|
||||
backup::{
|
||||
add_backup_keys, add_backup_keys_for_room, add_backup_keys_for_session,
|
||||
|
@ -31,7 +31,8 @@ pub async fn update_backup_version_route(
|
|||
body: Ruma<update_backup_version::v3::IncomingRequest>,
|
||||
) -> Result<update_backup_version::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
services().key_backups
|
||||
services()
|
||||
.key_backups
|
||||
.update_backup(sender_user, &body.version, &body.algorithm)?;
|
||||
|
||||
Ok(update_backup_version::v3::Response {})
|
||||
|
@ -45,13 +46,13 @@ pub async fn get_latest_backup_info_route(
|
|||
) -> Result<get_latest_backup_info::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let (version, algorithm) =
|
||||
services().key_backups
|
||||
.get_latest_backup(sender_user)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
"Key backup does not exist.",
|
||||
))?;
|
||||
let (version, algorithm) = services()
|
||||
.key_backups
|
||||
.get_latest_backup(sender_user)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
"Key backup does not exist.",
|
||||
))?;
|
||||
|
||||
Ok(get_latest_backup_info::v3::Response {
|
||||
algorithm,
|
||||
|
@ -78,8 +79,13 @@ pub async fn get_backup_info_route(
|
|||
|
||||
Ok(get_backup_info::v3::Response {
|
||||
algorithm,
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
version: body.version.to_owned(),
|
||||
})
|
||||
}
|
||||
|
@ -94,7 +100,9 @@ pub async fn delete_backup_version_route(
|
|||
) -> Result<delete_backup_version::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().key_backups.delete_backup(sender_user, &body.version)?;
|
||||
services()
|
||||
.key_backups
|
||||
.delete_backup(sender_user, &body.version)?;
|
||||
|
||||
Ok(delete_backup_version::v3::Response {})
|
||||
}
|
||||
|
@ -136,8 +144,13 @@ pub async fn add_backup_keys_route(
|
|||
}
|
||||
|
||||
Ok(add_backup_keys::v3::Response {
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -176,8 +189,13 @@ pub async fn add_backup_keys_for_room_route(
|
|||
}
|
||||
|
||||
Ok(add_backup_keys_for_room::v3::Response {
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -214,8 +232,13 @@ pub async fn add_backup_keys_for_session_route(
|
|||
)?;
|
||||
|
||||
Ok(add_backup_keys_for_session::v3::Response {
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -274,11 +297,18 @@ pub async fn delete_backup_keys_route(
|
|||
) -> Result<delete_backup_keys::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().key_backups.delete_all_keys(sender_user, &body.version)?;
|
||||
services()
|
||||
.key_backups
|
||||
.delete_all_keys(sender_user, &body.version)?;
|
||||
|
||||
Ok(delete_backup_keys::v3::Response {
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -290,12 +320,18 @@ pub async fn delete_backup_keys_for_room_route(
|
|||
) -> Result<delete_backup_keys_for_room::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().key_backups
|
||||
services()
|
||||
.key_backups
|
||||
.delete_room_keys(sender_user, &body.version, &body.room_id)?;
|
||||
|
||||
Ok(delete_backup_keys_for_room::v3::Response {
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -307,11 +343,20 @@ pub async fn delete_backup_keys_for_session_route(
|
|||
) -> Result<delete_backup_keys_for_session::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().key_backups
|
||||
.delete_room_key(sender_user, &body.version, &body.room_id, &body.session_id)?;
|
||||
services().key_backups.delete_room_key(
|
||||
sender_user,
|
||||
&body.version,
|
||||
&body.room_id,
|
||||
&body.session_id,
|
||||
)?;
|
||||
|
||||
Ok(delete_backup_keys_for_session::v3::Response {
|
||||
count: (services().key_backups.count_keys(sender_user, &body.version)? as u32).into(),
|
||||
etag: services().key_backups.get_etag(sender_user, &body.version)?,
|
||||
count: (services()
|
||||
.key_backups
|
||||
.count_keys(sender_user, &body.version)? as u32)
|
||||
.into(),
|
||||
etag: services()
|
||||
.key_backups
|
||||
.get_etag(sender_user, &body.version)?,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Result, Ruma, services};
|
||||
use crate::{services, Result, Ruma};
|
||||
use ruma::api::client::discovery::get_capabilities::{
|
||||
self, Capabilities, RoomVersionStability, RoomVersionsCapability,
|
||||
};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
config::{
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{context::get_context, error::ErrorKind, filter::LazyLoadOptions},
|
||||
events::StateEventType,
|
||||
|
@ -49,7 +49,11 @@ pub async fn get_context_route(
|
|||
|
||||
let room_id = base_event.room_id.clone();
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
@ -141,7 +145,11 @@ pub async fn get_context_route(
|
|||
.expect("All rooms have state"),
|
||||
};
|
||||
|
||||
let state_ids = services().rooms.state_accessor.state_full_ids(shortstatehash).await?;
|
||||
let state_ids = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(shortstatehash)
|
||||
.await?;
|
||||
|
||||
let end_token = events_after
|
||||
.last()
|
||||
|
@ -156,7 +164,10 @@ pub async fn get_context_route(
|
|||
let mut state = Vec::new();
|
||||
|
||||
for (shortstatekey, id) in state_ids {
|
||||
let (event_type, state_key) = services().rooms.short.get_statekey_from_short(shortstatekey)?;
|
||||
let (event_type, state_key) = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_statekey_from_short(shortstatekey)?;
|
||||
|
||||
if event_type != StateEventType::RoomMember {
|
||||
let pdu = match services().rooms.timeline.get_pdu(&id)? {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils, Error, Result, Ruma, services};
|
||||
use crate::{services, utils, Error, Result, Ruma};
|
||||
use ruma::api::client::{
|
||||
device::{self, delete_device, delete_devices, get_device, get_devices, update_device},
|
||||
error::ErrorKind,
|
||||
|
@ -55,7 +55,8 @@ pub async fn update_device_route(
|
|||
|
||||
device.display_name = body.display_name.clone();
|
||||
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.update_device_metadata(sender_user, &body.device_id, &device)?;
|
||||
|
||||
Ok(update_device::v3::Response {})
|
||||
|
@ -88,26 +89,27 @@ pub async fn delete_device_route(
|
|||
};
|
||||
|
||||
if let Some(auth) = &body.auth {
|
||||
let (worked, uiaainfo) = services().uiaa.try_auth(
|
||||
sender_user,
|
||||
sender_device,
|
||||
auth,
|
||||
&uiaainfo,
|
||||
)?;
|
||||
let (worked, uiaainfo) =
|
||||
services()
|
||||
.uiaa
|
||||
.try_auth(sender_user, sender_device, auth, &uiaainfo)?;
|
||||
if !worked {
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
}
|
||||
// Success!
|
||||
} else if let Some(json) = body.json_body {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
services().uiaa
|
||||
services()
|
||||
.uiaa
|
||||
.create(sender_user, sender_device, &uiaainfo, &json)?;
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
} else {
|
||||
return Err(Error::BadRequest(ErrorKind::NotJson, "Not json."));
|
||||
}
|
||||
|
||||
services().users.remove_device(sender_user, &body.device_id)?;
|
||||
services()
|
||||
.users
|
||||
.remove_device(sender_user, &body.device_id)?;
|
||||
|
||||
Ok(delete_device::v3::Response {})
|
||||
}
|
||||
|
@ -141,19 +143,18 @@ pub async fn delete_devices_route(
|
|||
};
|
||||
|
||||
if let Some(auth) = &body.auth {
|
||||
let (worked, uiaainfo) = services().uiaa.try_auth(
|
||||
sender_user,
|
||||
sender_device,
|
||||
auth,
|
||||
&uiaainfo,
|
||||
)?;
|
||||
let (worked, uiaainfo) =
|
||||
services()
|
||||
.uiaa
|
||||
.try_auth(sender_user, sender_device, auth, &uiaainfo)?;
|
||||
if !worked {
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
}
|
||||
// Success!
|
||||
} else if let Some(json) = body.json_body {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
services().uiaa
|
||||
services()
|
||||
.uiaa
|
||||
.create(sender_user, sender_device, &uiaainfo, &json)?;
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
} else {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::{
|
||||
client::{
|
||||
|
@ -123,7 +123,8 @@ pub(crate) async fn get_public_rooms_filtered_helper(
|
|||
filter: &IncomingFilter,
|
||||
_network: &IncomingRoomNetwork,
|
||||
) -> Result<get_public_rooms_filtered::v3::Response> {
|
||||
if let Some(other_server) = server.filter(|server| *server != services().globals.server_name().as_str())
|
||||
if let Some(other_server) =
|
||||
server.filter(|server| *server != services().globals.server_name().as_str())
|
||||
{
|
||||
let response = services()
|
||||
.sending
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::api::client::{
|
||||
error::ErrorKind,
|
||||
filter::{create_filter, get_filter},
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use super::SESSION_ID_LENGTH;
|
||||
use crate::{utils, Error, Result, Ruma, services};
|
||||
use crate::{services, utils, Error, Result, Ruma};
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt};
|
||||
use ruma::{
|
||||
api::{
|
||||
|
@ -32,7 +32,8 @@ pub async fn upload_keys_route(
|
|||
let sender_device = body.sender_device.as_ref().expect("user is authenticated");
|
||||
|
||||
for (key_key, key_value) in &body.one_time_keys {
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.add_one_time_key(sender_user, sender_device, key_key, key_value)?;
|
||||
}
|
||||
|
||||
|
@ -44,16 +45,16 @@ pub async fn upload_keys_route(
|
|||
.get_device_keys(sender_user, sender_device)?
|
||||
.is_none()
|
||||
{
|
||||
services().users.add_device_keys(
|
||||
sender_user,
|
||||
sender_device,
|
||||
device_keys,
|
||||
)?;
|
||||
services()
|
||||
.users
|
||||
.add_device_keys(sender_user, sender_device, device_keys)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(upload_keys::v3::Response {
|
||||
one_time_key_counts: services().users.count_one_time_keys(sender_user, sender_device)?,
|
||||
one_time_key_counts: services()
|
||||
.users
|
||||
.count_one_time_keys(sender_user, sender_device)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -69,12 +70,8 @@ pub async fn get_keys_route(
|
|||
) -> Result<get_keys::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let response = get_keys_helper(
|
||||
Some(sender_user),
|
||||
&body.device_keys,
|
||||
|u| u == sender_user,
|
||||
)
|
||||
.await?;
|
||||
let response =
|
||||
get_keys_helper(Some(sender_user), &body.device_keys, |u| u == sender_user).await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
@ -113,19 +110,18 @@ pub async fn upload_signing_keys_route(
|
|||
};
|
||||
|
||||
if let Some(auth) = &body.auth {
|
||||
let (worked, uiaainfo) = services().uiaa.try_auth(
|
||||
sender_user,
|
||||
sender_device,
|
||||
auth,
|
||||
&uiaainfo,
|
||||
)?;
|
||||
let (worked, uiaainfo) =
|
||||
services()
|
||||
.uiaa
|
||||
.try_auth(sender_user, sender_device, auth, &uiaainfo)?;
|
||||
if !worked {
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
}
|
||||
// Success!
|
||||
} else if let Some(json) = body.json_body {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
services().uiaa
|
||||
services()
|
||||
.uiaa
|
||||
.create(sender_user, sender_device, &uiaainfo, &json)?;
|
||||
return Err(Error::Uiaa(uiaainfo));
|
||||
} else {
|
||||
|
@ -187,12 +183,9 @@ pub async fn upload_signatures_route(
|
|||
))?
|
||||
.to_owned(),
|
||||
);
|
||||
services().users.sign_key(
|
||||
user_id,
|
||||
key_id,
|
||||
signature,
|
||||
sender_user,
|
||||
)?;
|
||||
services()
|
||||
.users
|
||||
.sign_key(user_id, key_id, signature, sender_user)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -215,7 +208,8 @@ pub async fn get_key_changes_route(
|
|||
let mut device_list_updates = HashSet::new();
|
||||
|
||||
device_list_updates.extend(
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.keys_changed(
|
||||
sender_user.as_str(),
|
||||
body.from
|
||||
|
@ -230,9 +224,15 @@ pub async fn get_key_changes_route(
|
|||
.filter_map(|r| r.ok()),
|
||||
);
|
||||
|
||||
for room_id in services().rooms.state_cache.rooms_joined(sender_user).filter_map(|r| r.ok()) {
|
||||
for room_id in services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(sender_user)
|
||||
.filter_map(|r| r.ok())
|
||||
{
|
||||
device_list_updates.extend(
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.keys_changed(
|
||||
&room_id.to_string(),
|
||||
body.from.parse().map_err(|_| {
|
||||
|
@ -296,12 +296,13 @@ pub(crate) async fn get_keys_helper<F: Fn(&UserId) -> bool>(
|
|||
for device_id in device_ids {
|
||||
let mut container = BTreeMap::new();
|
||||
if let Some(mut keys) = services().users.get_device_keys(user_id, device_id)? {
|
||||
let metadata = services().users.get_device_metadata(user_id, device_id)?.ok_or(
|
||||
Error::BadRequest(
|
||||
let metadata = services()
|
||||
.users
|
||||
.get_device_metadata(user_id, device_id)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Tried to get keys for nonexistent device.",
|
||||
),
|
||||
)?;
|
||||
))?;
|
||||
|
||||
add_unsigned_device_display_name(&mut keys, metadata)
|
||||
.map_err(|_| Error::bad_database("invalid device keys in database"))?;
|
||||
|
@ -311,7 +312,10 @@ pub(crate) async fn get_keys_helper<F: Fn(&UserId) -> bool>(
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(master_key) = services().users.get_master_key(user_id, &allowed_signatures)? {
|
||||
if let Some(master_key) = services()
|
||||
.users
|
||||
.get_master_key(user_id, &allowed_signatures)?
|
||||
{
|
||||
master_keys.insert(user_id.to_owned(), master_key);
|
||||
}
|
||||
if let Some(self_signing_key) = services()
|
||||
|
@ -338,7 +342,8 @@ pub(crate) async fn get_keys_helper<F: Fn(&UserId) -> bool>(
|
|||
}
|
||||
(
|
||||
server,
|
||||
services().sending
|
||||
services()
|
||||
.sending
|
||||
.send_federation_request(
|
||||
server,
|
||||
federation::keys::get_keys::v1::Request {
|
||||
|
@ -408,7 +413,8 @@ pub(crate) async fn claim_keys_helper(
|
|||
let mut container = BTreeMap::new();
|
||||
for (device_id, key_algorithm) in map {
|
||||
if let Some(one_time_keys) =
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.take_one_time_key(user_id, device_id, key_algorithm)?
|
||||
{
|
||||
let mut c = BTreeMap::new();
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use crate::{
|
||||
utils, Error, Result, Ruma, services, service::media::FileMeta,
|
||||
};
|
||||
use crate::{service::media::FileMeta, services, utils, Error, Result, Ruma};
|
||||
use ruma::api::client::{
|
||||
error::ErrorKind,
|
||||
media::{
|
||||
|
@ -37,11 +35,11 @@ pub async fn create_content_route(
|
|||
utils::random_string(MXC_LENGTH)
|
||||
);
|
||||
|
||||
services().media
|
||||
services()
|
||||
.media
|
||||
.create(
|
||||
mxc.clone(),
|
||||
body
|
||||
.filename
|
||||
body.filename
|
||||
.as_ref()
|
||||
.map(|filename| "inline; filename=".to_owned() + filename)
|
||||
.as_deref(),
|
||||
|
@ -73,7 +71,8 @@ pub async fn get_remote_content(
|
|||
)
|
||||
.await?;
|
||||
|
||||
services().media
|
||||
services()
|
||||
.media
|
||||
.create(
|
||||
mxc.to_string(),
|
||||
content_response.content_disposition.as_deref(),
|
||||
|
@ -192,7 +191,8 @@ pub async fn get_content_thumbnail_route(
|
|||
)
|
||||
.await?;
|
||||
|
||||
services().media
|
||||
services()
|
||||
.media
|
||||
.upload_thumbnail(
|
||||
mxc,
|
||||
None,
|
||||
|
|
|
@ -30,7 +30,11 @@ use std::{
|
|||
};
|
||||
use tracing::{debug, error, warn};
|
||||
|
||||
use crate::{Result, services, PduEvent, service::pdu::{gen_event_id_canonical_json, PduBuilder}, Error, api::{server_server, client_server}, utils, Ruma};
|
||||
use crate::{
|
||||
api::{client_server, server_server},
|
||||
service::pdu::{gen_event_id_canonical_json, PduBuilder},
|
||||
services, utils, Error, PduEvent, Result, Ruma,
|
||||
};
|
||||
|
||||
use super::get_alias_helper;
|
||||
|
||||
|
@ -47,8 +51,9 @@ pub async fn join_room_by_id_route(
|
|||
|
||||
let mut servers = Vec::new(); // There is no body.server_name for /roomId/join
|
||||
servers.extend(
|
||||
services().rooms
|
||||
.state_cache
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.invite_state(sender_user, &body.room_id)?
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
|
@ -88,8 +93,9 @@ pub async fn join_room_by_id_or_alias_route(
|
|||
Ok(room_id) => {
|
||||
let mut servers = body.server_name.clone();
|
||||
servers.extend(
|
||||
services().rooms
|
||||
.state_cache
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.invite_state(sender_user, &room_id)?
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
|
@ -163,8 +169,9 @@ pub async fn kick_user_route(
|
|||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let mut event: RoomMemberEventContent = serde_json::from_str(
|
||||
services().rooms
|
||||
.state_accessor
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(
|
||||
&body.room_id,
|
||||
&StateEventType::RoomMember,
|
||||
|
@ -183,7 +190,8 @@ pub async fn kick_user_route(
|
|||
// TODO: reason
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -250,7 +258,8 @@ pub async fn ban_user_route(
|
|||
)?;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -286,8 +295,9 @@ pub async fn unban_user_route(
|
|||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let mut event: RoomMemberEventContent = serde_json::from_str(
|
||||
services().rooms
|
||||
.state_accessor
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(
|
||||
&body.room_id,
|
||||
&StateEventType::RoomMember,
|
||||
|
@ -305,7 +315,8 @@ pub async fn unban_user_route(
|
|||
event.membership = MembershipState::Leave;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -345,7 +356,10 @@ pub async fn forget_room_route(
|
|||
) -> Result<forget_room::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().rooms.state_cache.forget(&body.room_id, sender_user)?;
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.forget(&body.room_id, sender_user)?;
|
||||
|
||||
Ok(forget_room::v3::Response::new())
|
||||
}
|
||||
|
@ -379,7 +393,11 @@ pub async fn get_member_events_route(
|
|||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
// TODO: check history visibility?
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
@ -410,7 +428,11 @@ pub async fn joined_members_route(
|
|||
) -> Result<joined_members::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You aren't a member of the room.",
|
||||
|
@ -418,7 +440,12 @@ pub async fn joined_members_route(
|
|||
}
|
||||
|
||||
let mut joined = BTreeMap::new();
|
||||
for user_id in services().rooms.state_cache.room_members(&body.room_id).filter_map(|r| r.ok()) {
|
||||
for user_id in services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_members(&body.room_id)
|
||||
.filter_map(|r| r.ok())
|
||||
{
|
||||
let display_name = services().users.displayname(&user_id)?;
|
||||
let avatar_url = services().users.avatar_url(&user_id)?;
|
||||
|
||||
|
@ -443,7 +470,8 @@ async fn join_room_by_id_helper(
|
|||
let sender_user = sender_user.expect("user is authenticated");
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -481,7 +509,14 @@ async fn join_room_by_id_helper(
|
|||
let (make_join_response, remote_server) = make_join_response_and_server?;
|
||||
|
||||
let room_version = match make_join_response.room_version {
|
||||
Some(room_version) if services().globals.supported_room_versions().contains(&room_version) => room_version,
|
||||
Some(room_version)
|
||||
if services()
|
||||
.globals
|
||||
.supported_room_versions()
|
||||
.contains(&room_version) =>
|
||||
{
|
||||
room_version
|
||||
}
|
||||
_ => return Err(Error::BadServerResponse("Room version is not supported")),
|
||||
};
|
||||
|
||||
|
@ -568,12 +603,11 @@ async fn join_room_by_id_helper(
|
|||
let mut state = HashMap::new();
|
||||
let pub_key_map = RwLock::new(BTreeMap::new());
|
||||
|
||||
services().rooms.event_handler.fetch_join_signing_keys(
|
||||
&send_join_response,
|
||||
&room_version,
|
||||
&pub_key_map,
|
||||
)
|
||||
.await?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.fetch_join_signing_keys(&send_join_response, &room_version, &pub_key_map)
|
||||
.await?;
|
||||
|
||||
for result in send_join_response
|
||||
.room_state
|
||||
|
@ -591,12 +625,15 @@ async fn join_room_by_id_helper(
|
|||
Error::BadServerResponse("Invalid PDU in send_join response.")
|
||||
})?;
|
||||
|
||||
services().rooms.outlier.add_pdu_outlier(&event_id, &value)?;
|
||||
services()
|
||||
.rooms
|
||||
.outlier
|
||||
.add_pdu_outlier(&event_id, &value)?;
|
||||
if let Some(state_key) = &pdu.state_key {
|
||||
let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
|
||||
&pdu.kind.to_string().into(),
|
||||
state_key,
|
||||
)?;
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(&pdu.kind.to_string().into(), state_key)?;
|
||||
state.insert(shortstatekey, pdu.event_id.clone());
|
||||
}
|
||||
}
|
||||
|
@ -632,7 +669,10 @@ async fn join_room_by_id_helper(
|
|||
Err(_) => continue,
|
||||
};
|
||||
|
||||
services().rooms.outlier.add_pdu_outlier(&event_id, &value)?;
|
||||
services()
|
||||
.rooms
|
||||
.outlier
|
||||
.add_pdu_outlier(&event_id, &value)?;
|
||||
}
|
||||
|
||||
let shortstatehash = services().rooms.state.set_event_state(
|
||||
|
@ -640,7 +680,12 @@ async fn join_room_by_id_helper(
|
|||
room_id,
|
||||
state
|
||||
.into_iter()
|
||||
.map(|(k, id)| services().rooms.state_compressor.compress_state_event(k, &id))
|
||||
.map(|(k, id)| {
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.compress_state_event(k, &id)
|
||||
})
|
||||
.collect::<Result<_>>()?,
|
||||
)?;
|
||||
|
||||
|
@ -650,12 +695,15 @@ async fn join_room_by_id_helper(
|
|||
&parsed_pdu,
|
||||
join_event,
|
||||
vec![(*parsed_pdu.event_id).to_owned()],
|
||||
&state_lock
|
||||
&state_lock,
|
||||
)?;
|
||||
|
||||
// We set the room state after inserting the pdu, so that we never have a moment in time
|
||||
// where events in the current room state do not exist
|
||||
services().rooms.state.set_room_state(room_id, shortstatehash, &state_lock)?;
|
||||
services()
|
||||
.rooms
|
||||
.state
|
||||
.set_room_state(room_id, shortstatehash, &state_lock)?;
|
||||
|
||||
let statehashid = services().rooms.state.append_to_state(&parsed_pdu)?;
|
||||
} else {
|
||||
|
@ -705,7 +753,13 @@ fn validate_and_add_event_id(
|
|||
))
|
||||
.expect("ruma's reference hashes are valid event ids");
|
||||
|
||||
let back_off = |id| match services().globals.bad_event_ratelimiter.write().unwrap().entry(id) {
|
||||
let back_off = |id| match services()
|
||||
.globals
|
||||
.bad_event_ratelimiter
|
||||
.write()
|
||||
.unwrap()
|
||||
.entry(id)
|
||||
{
|
||||
Entry::Vacant(e) => {
|
||||
e.insert((Instant::now(), 1));
|
||||
}
|
||||
|
@ -760,7 +814,8 @@ pub(crate) async fn invite_helper<'a>(
|
|||
if user_id.server_name() != services().globals.server_name() {
|
||||
let (pdu_json, invite_room_state) = {
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -781,13 +836,18 @@ pub(crate) async fn invite_helper<'a>(
|
|||
})
|
||||
.expect("member event is valid value");
|
||||
|
||||
let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(PduBuilder {
|
||||
event_type: RoomEventType::RoomMember,
|
||||
content,
|
||||
unsigned: None,
|
||||
state_key: Some(user_id.to_string()),
|
||||
redacts: None,
|
||||
}, sender_user, room_id, &state_lock)?;
|
||||
let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(
|
||||
PduBuilder {
|
||||
event_type: RoomEventType::RoomMember,
|
||||
content,
|
||||
unsigned: None,
|
||||
state_key: Some(user_id.to_string()),
|
||||
redacts: None,
|
||||
},
|
||||
sender_user,
|
||||
room_id,
|
||||
&state_lock,
|
||||
)?;
|
||||
|
||||
let invite_room_state = services().rooms.state.calculate_invite_state(&pdu)?;
|
||||
|
||||
|
@ -799,8 +859,11 @@ pub(crate) async fn invite_helper<'a>(
|
|||
// Generate event id
|
||||
let expected_event_id = format!(
|
||||
"${}",
|
||||
ruma::signatures::reference_hash(&pdu_json, &services().rooms.state.get_room_version(&room_id)?)
|
||||
.expect("ruma can calculate reference hashes")
|
||||
ruma::signatures::reference_hash(
|
||||
&pdu_json,
|
||||
&services().rooms.state.get_room_version(&room_id)?
|
||||
)
|
||||
.expect("ruma can calculate reference hashes")
|
||||
);
|
||||
let expected_event_id = <&EventId>::try_from(expected_event_id.as_str())
|
||||
.expect("ruma's reference hashes are valid event ids");
|
||||
|
@ -822,8 +885,7 @@ pub(crate) async fn invite_helper<'a>(
|
|||
let pub_key_map = RwLock::new(BTreeMap::new());
|
||||
|
||||
// We do not add the event_id field to the pdu here because of signature and hashes checks
|
||||
let (event_id, value) = match gen_event_id_canonical_json(&response.event)
|
||||
{
|
||||
let (event_id, value) = match gen_event_id_canonical_json(&response.event) {
|
||||
Ok(t) => t,
|
||||
Err(_) => {
|
||||
// Event could not be converted to canonical json
|
||||
|
@ -847,22 +909,20 @@ pub(crate) async fn invite_helper<'a>(
|
|||
)
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Origin field is invalid."))?;
|
||||
|
||||
let pdu_id: Vec<u8> = services().rooms.event_handler.handle_incoming_pdu(
|
||||
&origin,
|
||||
&event_id,
|
||||
room_id,
|
||||
value,
|
||||
true,
|
||||
&pub_key_map,
|
||||
)
|
||||
.await?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Could not accept incoming PDU as timeline event.",
|
||||
))?;
|
||||
let pdu_id: Vec<u8> = services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.handle_incoming_pdu(&origin, &event_id, room_id, value, true, &pub_key_map)
|
||||
.await?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Could not accept incoming PDU as timeline event.",
|
||||
))?;
|
||||
|
||||
// Bind to variable because of lifetimes
|
||||
let servers = services().rooms.state_cache
|
||||
let servers = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_servers(room_id)
|
||||
.filter_map(|r| r.ok())
|
||||
.filter(|server| &**server != services().globals.server_name());
|
||||
|
@ -872,7 +932,11 @@ pub(crate) async fn invite_helper<'a>(
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
@ -880,7 +944,8 @@ pub(crate) async fn invite_helper<'a>(
|
|||
}
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -923,7 +988,13 @@ pub async fn leave_all_rooms(user_id: &UserId) -> Result<()> {
|
|||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(user_id)
|
||||
.chain(services().rooms.state_cache.rooms_invited(user_id).map(|t| t.map(|(r, _)| r)))
|
||||
.chain(
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_invited(user_id)
|
||||
.map(|t| t.map(|(r, _)| r)),
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for room_id in all_rooms {
|
||||
|
@ -938,20 +1009,24 @@ pub async fn leave_all_rooms(user_id: &UserId) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn leave_room(
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
pub async fn leave_room(user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
// Ask a remote server if we don't have this room
|
||||
if !services().rooms.metadata.exists(room_id)? && room_id.server_name() != services().globals.server_name() {
|
||||
if !services().rooms.metadata.exists(room_id)?
|
||||
&& room_id.server_name() != services().globals.server_name()
|
||||
{
|
||||
if let Err(e) = remote_leave_room(user_id, room_id).await {
|
||||
warn!("Failed to leave room {} remotely: {}", user_id, e);
|
||||
// Don't tell the client about this error
|
||||
}
|
||||
|
||||
let last_state = services().rooms.state_cache
|
||||
let last_state = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.invite_state(user_id, room_id)?
|
||||
.map_or_else(|| services().rooms.state_cache.left_state(user_id, room_id), |s| Ok(Some(s)))?;
|
||||
.map_or_else(
|
||||
|| services().rooms.state_cache.left_state(user_id, room_id),
|
||||
|s| Ok(Some(s)),
|
||||
)?;
|
||||
|
||||
// We always drop the invite, we can't rely on other servers
|
||||
services().rooms.state_cache.update_membership(
|
||||
|
@ -964,7 +1039,8 @@ pub async fn leave_room(
|
|||
)?;
|
||||
} else {
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -974,7 +1050,10 @@ pub async fn leave_room(
|
|||
let state_lock = mutex_state.lock().await;
|
||||
|
||||
let mut event: RoomMemberEventContent = serde_json::from_str(
|
||||
services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())?
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::BadState,
|
||||
"Cannot leave a room you are not a member of.",
|
||||
|
@ -1003,10 +1082,7 @@ pub async fn leave_room(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn remote_leave_room(
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
async fn remote_leave_room(user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
let mut make_leave_response_and_server = Err(Error::BadServerResponse(
|
||||
"No server available to assist in leaving.",
|
||||
));
|
||||
|
@ -1048,14 +1124,21 @@ async fn remote_leave_room(
|
|||
let (make_leave_response, remote_server) = make_leave_response_and_server?;
|
||||
|
||||
let room_version_id = match make_leave_response.room_version {
|
||||
Some(version) if services().globals.supported_room_versions().contains(&version) => version,
|
||||
Some(version)
|
||||
if services()
|
||||
.globals
|
||||
.supported_room_versions()
|
||||
.contains(&version) =>
|
||||
{
|
||||
version
|
||||
}
|
||||
_ => return Err(Error::BadServerResponse("Room version is not supported")),
|
||||
};
|
||||
|
||||
let mut leave_event_stub =
|
||||
serde_json::from_str::<CanonicalJsonObject>(make_leave_response.event.get()).map_err(
|
||||
|_| Error::BadServerResponse("Invalid make_leave event json received from server."),
|
||||
)?;
|
||||
let mut leave_event_stub = serde_json::from_str::<CanonicalJsonObject>(
|
||||
make_leave_response.event.get(),
|
||||
)
|
||||
.map_err(|_| Error::BadServerResponse("Invalid make_leave event json received from server."))?;
|
||||
|
||||
// TODO: Is origin needed?
|
||||
leave_event_stub.insert(
|
||||
|
@ -1099,7 +1182,8 @@ async fn remote_leave_room(
|
|||
// It has enough fields to be called a proper event now
|
||||
let leave_event = leave_event_stub;
|
||||
|
||||
services().sending
|
||||
services()
|
||||
.sending
|
||||
.send_federation_request(
|
||||
&remote_server,
|
||||
federation::membership::create_leave_event::v2::Request {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils, Error, Result, Ruma, services, service::pdu::PduBuilder};
|
||||
use crate::{service::pdu::PduBuilder, services, utils, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
|
@ -25,7 +25,8 @@ pub async fn send_message_event_route(
|
|||
let sender_device = body.sender_device.as_deref();
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -46,7 +47,8 @@ pub async fn send_message_event_route(
|
|||
|
||||
// Check if this is a new transaction id
|
||||
if let Some(response) =
|
||||
services().transaction_ids
|
||||
services()
|
||||
.transaction_ids
|
||||
.existing_txnid(sender_user, sender_device, &body.txn_id)?
|
||||
{
|
||||
// The client might have sent a txnid of the /sendToDevice endpoint
|
||||
|
@ -108,7 +110,11 @@ pub async fn get_message_events_route(
|
|||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
let sender_device = body.sender_device.as_ref().expect("user is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
@ -128,8 +134,12 @@ pub async fn get_message_events_route(
|
|||
|
||||
let to = body.to.as_ref().map(|t| t.parse());
|
||||
|
||||
services().rooms
|
||||
.lazy_loading.lazy_load_confirm_delivery(sender_user, sender_device, &body.room_id, from)?;
|
||||
services().rooms.lazy_loading.lazy_load_confirm_delivery(
|
||||
sender_user,
|
||||
sender_device,
|
||||
&body.room_id,
|
||||
from,
|
||||
)?;
|
||||
|
||||
// Use limit or else 10
|
||||
let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize);
|
||||
|
@ -149,8 +159,10 @@ pub async fn get_message_events_route(
|
|||
.take(limit)
|
||||
.filter_map(|r| r.ok()) // Filter out buggy events
|
||||
.filter_map(|(pdu_id, pdu)| {
|
||||
services().rooms
|
||||
.timeline.pdu_count(&pdu_id)
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.pdu_count(&pdu_id)
|
||||
.map(|pdu_count| (pdu_count, pdu))
|
||||
.ok()
|
||||
})
|
||||
|
@ -187,7 +199,8 @@ pub async fn get_message_events_route(
|
|||
.take(limit)
|
||||
.filter_map(|r| r.ok()) // Filter out buggy events
|
||||
.filter_map(|(pdu_id, pdu)| {
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.pdu_count(&pdu_id)
|
||||
.map(|pdu_count| (pdu_count, pdu))
|
||||
|
@ -222,10 +235,11 @@ pub async fn get_message_events_route(
|
|||
|
||||
resp.state = Vec::new();
|
||||
for ll_id in &lazy_loaded {
|
||||
if let Some(member_event) =
|
||||
services().rooms.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomMember, ll_id.as_str())?
|
||||
{
|
||||
if let Some(member_event) = services().rooms.state_accessor.room_state_get(
|
||||
&body.room_id,
|
||||
&StateEventType::RoomMember,
|
||||
ll_id.as_str(),
|
||||
)? {
|
||||
resp.state.push(member_event.to_state_event());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils, Result, Ruma, services};
|
||||
use crate::{services, utils, Result, Ruma};
|
||||
use ruma::api::client::presence::{get_presence, set_presence};
|
||||
use std::time::Duration;
|
||||
|
||||
|
@ -51,7 +51,8 @@ pub async fn get_presence_route(
|
|||
|
||||
for room_id in services()
|
||||
.rooms
|
||||
.user.get_shared_rooms(vec![sender_user.clone(), body.user_id.clone()])?
|
||||
.user
|
||||
.get_shared_rooms(vec![sender_user.clone(), body.user_id.clone()])?
|
||||
{
|
||||
let room_id = room_id?;
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils, Error, Result, Ruma, services, service::pdu::PduBuilder};
|
||||
use crate::{service::pdu::PduBuilder, services, utils, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::{
|
||||
client::{
|
||||
|
@ -24,7 +24,8 @@ pub async fn set_displayname_route(
|
|||
) -> Result<set_display_name::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.set_displayname(sender_user, body.displayname.clone())?;
|
||||
|
||||
// Send a new membership event and presence update into all joined rooms
|
||||
|
@ -40,8 +41,9 @@ pub async fn set_displayname_route(
|
|||
content: to_raw_value(&RoomMemberEventContent {
|
||||
displayname: body.displayname.clone(),
|
||||
..serde_json::from_str(
|
||||
services().rooms
|
||||
.state_accessor
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(
|
||||
&room_id,
|
||||
&StateEventType::RoomMember,
|
||||
|
@ -71,7 +73,8 @@ pub async fn set_displayname_route(
|
|||
|
||||
for (pdu_builder, room_id) in all_rooms_joined {
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -80,10 +83,12 @@ pub async fn set_displayname_route(
|
|||
);
|
||||
let state_lock = mutex_state.lock().await;
|
||||
|
||||
let _ = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock);
|
||||
let _ = services().rooms.timeline.build_and_append_pdu(
|
||||
pdu_builder,
|
||||
sender_user,
|
||||
&room_id,
|
||||
&state_lock,
|
||||
);
|
||||
|
||||
// Presence update
|
||||
services().rooms.edus.presence.update_presence(
|
||||
|
@ -150,10 +155,13 @@ pub async fn set_avatar_url_route(
|
|||
) -> Result<set_avatar_url::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.set_avatar_url(sender_user, body.avatar_url.clone())?;
|
||||
|
||||
services().users.set_blurhash(sender_user, body.blurhash.clone())?;
|
||||
services()
|
||||
.users
|
||||
.set_blurhash(sender_user, body.blurhash.clone())?;
|
||||
|
||||
// Send a new membership event and presence update into all joined rooms
|
||||
let all_joined_rooms: Vec<_> = services()
|
||||
|
@ -168,8 +176,9 @@ pub async fn set_avatar_url_route(
|
|||
content: to_raw_value(&RoomMemberEventContent {
|
||||
avatar_url: body.avatar_url.clone(),
|
||||
..serde_json::from_str(
|
||||
services().rooms
|
||||
.state_accessor
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(
|
||||
&room_id,
|
||||
&StateEventType::RoomMember,
|
||||
|
@ -199,7 +208,8 @@ pub async fn set_avatar_url_route(
|
|||
|
||||
for (pdu_builder, room_id) in all_joined_rooms {
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -208,10 +218,12 @@ pub async fn set_avatar_url_route(
|
|||
);
|
||||
let state_lock = mutex_state.lock().await;
|
||||
|
||||
let _ = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock);
|
||||
let _ = services().rooms.timeline.build_and_append_pdu(
|
||||
pdu_builder,
|
||||
sender_user,
|
||||
&room_id,
|
||||
&state_lock,
|
||||
);
|
||||
|
||||
// Presence update
|
||||
services().rooms.edus.presence.update_presence(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{error::ErrorKind, read_marker::set_read_marker, receipt::create_receipt},
|
||||
events::RoomAccountDataEventType,
|
||||
|
@ -34,12 +34,18 @@ pub async fn set_read_marker_route(
|
|||
services().rooms.edus.read_receipt.private_read_set(
|
||||
&body.room_id,
|
||||
sender_user,
|
||||
services().rooms.timeline.get_pdu_count(event)?.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Event does not exist.",
|
||||
))?,
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_count(event)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Event does not exist.",
|
||||
))?,
|
||||
)?;
|
||||
services().rooms.user
|
||||
services()
|
||||
.rooms
|
||||
.user
|
||||
.reset_notification_counts(sender_user, &body.room_id)?;
|
||||
|
||||
let mut user_receipts = BTreeMap::new();
|
||||
|
@ -80,7 +86,8 @@ pub async fn create_receipt_route(
|
|||
services().rooms.edus.read_receipt.private_read_set(
|
||||
&body.room_id,
|
||||
sender_user,
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_count(&body.event_id)?
|
||||
.ok_or(Error::BadRequest(
|
||||
|
@ -88,7 +95,9 @@ pub async fn create_receipt_route(
|
|||
"Event does not exist.",
|
||||
))?,
|
||||
)?;
|
||||
services().rooms.user
|
||||
services()
|
||||
.rooms
|
||||
.user
|
||||
.reset_notification_counts(sender_user, &body.room_id)?;
|
||||
|
||||
let mut user_receipts = BTreeMap::new();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::{Result, Ruma, services, service::pdu::PduBuilder};
|
||||
use crate::{service::pdu::PduBuilder, services, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::redact::redact_event,
|
||||
events::{room::redaction::RoomRedactionEventContent, RoomEventType},
|
||||
|
@ -20,7 +20,8 @@ pub async fn redact_event_route(
|
|||
let body = body.body;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils::HtmlEscape, Error, Result, Ruma, services};
|
||||
use crate::{services, utils::HtmlEscape, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{error::ErrorKind, room::report_content},
|
||||
events::room::message,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{
|
||||
Error, Result, Ruma, service::pdu::PduBuilder, services, api::client_server::invite_helper,
|
||||
api::client_server::invite_helper, service::pdu::PduBuilder, services, Error, Result, Ruma,
|
||||
};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
|
@ -57,7 +57,8 @@ pub async fn create_room_route(
|
|||
services().rooms.short.get_or_create_shortroomid(&room_id)?;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -81,13 +82,19 @@ pub async fn create_room_route(
|
|||
.as_ref()
|
||||
.map_or(Ok(None), |localpart| {
|
||||
// TODO: Check for invalid characters and maximum length
|
||||
let alias =
|
||||
RoomAliasId::parse(format!("#{}:{}", localpart, services().globals.server_name()))
|
||||
.map_err(|_| {
|
||||
Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias.")
|
||||
})?;
|
||||
let alias = RoomAliasId::parse(format!(
|
||||
"#{}:{}",
|
||||
localpart,
|
||||
services().globals.server_name()
|
||||
))
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias."))?;
|
||||
|
||||
if services().rooms.alias.resolve_local_alias(&alias)?.is_some() {
|
||||
if services()
|
||||
.rooms
|
||||
.alias
|
||||
.resolve_local_alias(&alias)?
|
||||
.is_some()
|
||||
{
|
||||
Err(Error::BadRequest(
|
||||
ErrorKind::RoomInUse,
|
||||
"Room alias already exists.",
|
||||
|
@ -99,7 +106,11 @@ pub async fn create_room_route(
|
|||
|
||||
let room_version = match body.room_version.clone() {
|
||||
Some(room_version) => {
|
||||
if services().globals.supported_room_versions().contains(&room_version) {
|
||||
if services()
|
||||
.globals
|
||||
.supported_room_versions()
|
||||
.contains(&room_version)
|
||||
{
|
||||
room_version
|
||||
} else {
|
||||
return Err(Error::BadRequest(
|
||||
|
@ -338,13 +349,18 @@ pub async fn create_room_route(
|
|||
pdu_builder.state_key.get_or_insert_with(|| "".to_owned());
|
||||
|
||||
// Silently skip encryption events if they are not allowed
|
||||
if pdu_builder.event_type == RoomEventType::RoomEncryption && !services().globals.allow_encryption()
|
||||
if pdu_builder.event_type == RoomEventType::RoomEncryption
|
||||
&& !services().globals.allow_encryption()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
services().rooms
|
||||
.timeline.build_and_append_pdu(pdu_builder, sender_user, &room_id, &state_lock)?;
|
||||
services().rooms.timeline.build_and_append_pdu(
|
||||
pdu_builder,
|
||||
sender_user,
|
||||
&room_id,
|
||||
&state_lock,
|
||||
)?;
|
||||
}
|
||||
|
||||
// 7. Events implied by name and topic
|
||||
|
@ -412,7 +428,11 @@ pub async fn get_room_event_route(
|
|||
) -> Result<get_room_event::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
@ -439,7 +459,11 @@ pub async fn get_room_aliases_route(
|
|||
) -> Result<aliases::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
@ -449,7 +473,8 @@ pub async fn get_room_aliases_route(
|
|||
Ok(aliases::v3::Response {
|
||||
aliases: services()
|
||||
.rooms
|
||||
.alias.local_aliases_for_room(&body.room_id)
|
||||
.alias
|
||||
.local_aliases_for_room(&body.room_id)
|
||||
.filter_map(|a| a.ok())
|
||||
.collect(),
|
||||
})
|
||||
|
@ -470,7 +495,11 @@ pub async fn upgrade_room_route(
|
|||
) -> Result<upgrade_room::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
if !services().globals.supported_room_versions().contains(&body.new_version) {
|
||||
if !services()
|
||||
.globals
|
||||
.supported_room_versions()
|
||||
.contains(&body.new_version)
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::UnsupportedRoomVersion,
|
||||
"This server does not support that room version.",
|
||||
|
@ -479,11 +508,14 @@ pub async fn upgrade_room_route(
|
|||
|
||||
// Create a replacement room
|
||||
let replacement_room = RoomId::new(services().globals.server_name());
|
||||
services().rooms
|
||||
.short.get_or_create_shortroomid(&replacement_room)?;
|
||||
services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortroomid(&replacement_room)?;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -514,7 +546,8 @@ pub async fn upgrade_room_route(
|
|||
// Change lock to replacement room
|
||||
drop(state_lock);
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -525,7 +558,8 @@ pub async fn upgrade_room_route(
|
|||
|
||||
// Get the old room creation event
|
||||
let mut create_event_content = serde_json::from_str::<CanonicalJsonObject>(
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomCreate, "")?
|
||||
.ok_or_else(|| Error::bad_database("Found room without m.room.create event."))?
|
||||
|
@ -627,10 +661,15 @@ pub async fn upgrade_room_route(
|
|||
|
||||
// Replicate transferable state events to the new room
|
||||
for event_type in transferable_state_events {
|
||||
let event_content = match services().rooms.state_accessor.room_state_get(&body.room_id, &event_type, "")? {
|
||||
Some(v) => v.content.clone(),
|
||||
None => continue, // Skipping missing events.
|
||||
};
|
||||
let event_content =
|
||||
match services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &event_type, "")?
|
||||
{
|
||||
Some(v) => v.content.clone(),
|
||||
None => continue, // Skipping missing events.
|
||||
};
|
||||
|
||||
services().rooms.timeline.build_and_append_pdu(
|
||||
PduBuilder {
|
||||
|
@ -647,14 +686,22 @@ pub async fn upgrade_room_route(
|
|||
}
|
||||
|
||||
// Moves any local aliases to the new room
|
||||
for alias in services().rooms.alias.local_aliases_for_room(&body.room_id).filter_map(|r| r.ok()) {
|
||||
services().rooms
|
||||
.alias.set_alias(&alias, &replacement_room)?;
|
||||
for alias in services()
|
||||
.rooms
|
||||
.alias
|
||||
.local_aliases_for_room(&body.room_id)
|
||||
.filter_map(|r| r.ok())
|
||||
{
|
||||
services()
|
||||
.rooms
|
||||
.alias
|
||||
.set_alias(&alias, &replacement_room)?;
|
||||
}
|
||||
|
||||
// Get the old room power levels
|
||||
let mut power_levels_event_content: RoomPowerLevelsEventContent = serde_json::from_str(
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomPowerLevels, "")?
|
||||
.ok_or_else(|| Error::bad_database("Found room without m.room.create event."))?
|
||||
|
@ -688,4 +735,3 @@ pub async fn upgrade_room_route(
|
|||
// Return the replacement room id
|
||||
Ok(upgrade_room::v3::Response { replacement_room })
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::api::client::{
|
||||
error::ErrorKind,
|
||||
search::search_events::{
|
||||
|
@ -23,7 +23,8 @@ pub async fn search_events_route(
|
|||
let filter = &search_criteria.filter;
|
||||
|
||||
let room_ids = filter.rooms.clone().unwrap_or_else(|| {
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(sender_user)
|
||||
.filter_map(|r| r.ok())
|
||||
|
@ -35,7 +36,11 @@ pub async fn search_events_route(
|
|||
let mut searches = Vec::new();
|
||||
|
||||
for room_id in room_ids {
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You don't have permission to view this room.",
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
|
||||
use crate::{utils, Error, Result, Ruma, services};
|
||||
use crate::{services, utils, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
|
@ -40,9 +40,7 @@ pub async fn get_login_types_route(
|
|||
///
|
||||
/// Note: You can use [`GET /_matrix/client/r0/login`](fn.get_supported_versions_route.html) to see
|
||||
/// supported login types.
|
||||
pub async fn login_route(
|
||||
body: Ruma<login::v3::IncomingRequest>,
|
||||
) -> Result<login::v3::Response> {
|
||||
pub async fn login_route(body: Ruma<login::v3::IncomingRequest>) -> Result<login::v3::Response> {
|
||||
// Validate login method
|
||||
// TODO: Other login methods
|
||||
let user_id = match &body.login_info {
|
||||
|
@ -55,15 +53,18 @@ pub async fn login_route(
|
|||
} else {
|
||||
return Err(Error::BadRequest(ErrorKind::Forbidden, "Bad login type."));
|
||||
};
|
||||
let user_id =
|
||||
UserId::parse_with_server_name(username.to_owned(), services().globals.server_name())
|
||||
.map_err(|_| {
|
||||
Error::BadRequest(ErrorKind::InvalidUsername, "Username is invalid.")
|
||||
})?;
|
||||
let hash = services().users.password_hash(&user_id)?.ok_or(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Wrong username or password.",
|
||||
))?;
|
||||
let user_id = UserId::parse_with_server_name(
|
||||
username.to_owned(),
|
||||
services().globals.server_name(),
|
||||
)
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidUsername, "Username is invalid."))?;
|
||||
let hash = services()
|
||||
.users
|
||||
.password_hash(&user_id)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Wrong username or password.",
|
||||
))?;
|
||||
|
||||
if hash.is_empty() {
|
||||
return Err(Error::BadRequest(
|
||||
|
@ -121,7 +122,8 @@ pub async fn login_route(
|
|||
|
||||
// Determine if device_id was provided and exists in the db for this user
|
||||
let device_exists = body.device_id.as_ref().map_or(false, |device_id| {
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.all_device_ids(&user_id)
|
||||
.any(|x| x.as_ref().map_or(false, |v| v == device_id))
|
||||
});
|
||||
|
@ -156,9 +158,7 @@ pub async fn login_route(
|
|||
/// - Deletes device metadata (device id, device display name, last seen ip, last seen ts)
|
||||
/// - Forgets to-device events
|
||||
/// - Triggers device list updates
|
||||
pub async fn logout_route(
|
||||
body: Ruma<logout::v3::Request>,
|
||||
) -> Result<logout::v3::Response> {
|
||||
pub async fn logout_route(body: Ruma<logout::v3::Request>) -> Result<logout::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
let sender_device = body.sender_device.as_ref().expect("user is authenticated");
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::{
|
||||
Error, Result, Ruma, RumaResponse, services, service::pdu::PduBuilder,
|
||||
};
|
||||
use crate::{service::pdu::PduBuilder, services, Error, Result, Ruma, RumaResponse};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
|
@ -90,10 +88,14 @@ pub async fn get_state_events_route(
|
|||
#[allow(clippy::blocks_in_if_conditions)]
|
||||
// Users not in the room should not be able to access the state unless history_visibility is
|
||||
// WorldReadable
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)?
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
&& !matches!(
|
||||
services().rooms
|
||||
.state_accessor
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map(|event| {
|
||||
serde_json::from_str(event.content.get())
|
||||
|
@ -138,10 +140,15 @@ pub async fn get_state_events_for_key_route(
|
|||
#[allow(clippy::blocks_in_if_conditions)]
|
||||
// Users not in the room should not be able to access the state unless history_visibility is
|
||||
// WorldReadable
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)?
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
&& !matches!(
|
||||
services().rooms
|
||||
.state_accessor.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map(|event| {
|
||||
serde_json::from_str(event.content.get())
|
||||
.map(|e: RoomHistoryVisibilityEventContent| e.history_visibility)
|
||||
|
@ -162,7 +169,8 @@ pub async fn get_state_events_for_key_route(
|
|||
|
||||
let event = services()
|
||||
.rooms
|
||||
.state_accessor.room_state_get(&body.room_id, &body.event_type, &body.state_key)?
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &body.event_type, &body.state_key)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
"State event not found.",
|
||||
|
@ -187,10 +195,15 @@ pub async fn get_state_events_for_empty_key_route(
|
|||
#[allow(clippy::blocks_in_if_conditions)]
|
||||
// Users not in the room should not be able to access the state unless history_visibility is
|
||||
// WorldReadable
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)?
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
&& !matches!(
|
||||
services().rooms
|
||||
.state_accessor.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map(|event| {
|
||||
serde_json::from_str(event.content.get())
|
||||
.map(|e: RoomHistoryVisibilityEventContent| e.history_visibility)
|
||||
|
@ -211,7 +224,8 @@ pub async fn get_state_events_for_empty_key_route(
|
|||
|
||||
let event = services()
|
||||
.rooms
|
||||
.state_accessor.room_state_get(&body.room_id, &body.event_type, "")?
|
||||
.state_accessor
|
||||
.room_state_get(&body.room_id, &body.event_type, "")?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
"State event not found.",
|
||||
|
@ -248,7 +262,8 @@ async fn send_state_event_for_key_helper(
|
|||
if alias.server_name() != services().globals.server_name()
|
||||
|| services()
|
||||
.rooms
|
||||
.alias.resolve_local_alias(&alias)?
|
||||
.alias
|
||||
.resolve_local_alias(&alias)?
|
||||
.filter(|room| room == room_id) // Make sure it's the right room
|
||||
.is_none()
|
||||
{
|
||||
|
@ -262,7 +277,8 @@ async fn send_state_event_for_key_helper(
|
|||
}
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, Result, Ruma, RumaResponse, services};
|
||||
use crate::{services, Error, Result, Ruma, RumaResponse};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
filter::{IncomingFilterDefinition, LazyLoadOptions},
|
||||
|
@ -129,12 +129,7 @@ async fn sync_helper_wrapper(
|
|||
) {
|
||||
let since = body.since.clone();
|
||||
|
||||
let r = sync_helper(
|
||||
sender_user.clone(),
|
||||
sender_device.clone(),
|
||||
body,
|
||||
)
|
||||
.await;
|
||||
let r = sync_helper(sender_user.clone(), sender_device.clone(), body).await;
|
||||
|
||||
if let Ok((_, caching_allowed)) = r {
|
||||
if !caching_allowed {
|
||||
|
@ -211,12 +206,17 @@ async fn sync_helper(
|
|||
|
||||
// Look for device list updates of this account
|
||||
device_list_updates.extend(
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.keys_changed(&sender_user.to_string(), since, None)
|
||||
.filter_map(|r| r.ok()),
|
||||
);
|
||||
|
||||
let all_joined_rooms = services().rooms.state_cache.rooms_joined(&sender_user).collect::<Vec<_>>();
|
||||
let all_joined_rooms = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(&sender_user)
|
||||
.collect::<Vec<_>>();
|
||||
for room_id in all_joined_rooms {
|
||||
let room_id = room_id?;
|
||||
|
||||
|
@ -224,7 +224,8 @@ async fn sync_helper(
|
|||
// Get and drop the lock to wait for remaining operations to finish
|
||||
// This will make sure the we have all events until next_batch
|
||||
let mutex_insert = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_insert
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -237,7 +238,12 @@ async fn sync_helper(
|
|||
|
||||
let timeline_pdus;
|
||||
let limited;
|
||||
if services().rooms.timeline.last_timeline_count(&sender_user, &room_id)? > since {
|
||||
if services()
|
||||
.rooms
|
||||
.timeline
|
||||
.last_timeline_count(&sender_user, &room_id)?
|
||||
> since
|
||||
{
|
||||
let mut non_timeline_pdus = services()
|
||||
.rooms
|
||||
.timeline
|
||||
|
@ -250,7 +256,8 @@ async fn sync_helper(
|
|||
r.ok()
|
||||
})
|
||||
.take_while(|(pduid, _)| {
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.pdu_count(pduid)
|
||||
.map_or(false, |count| count > since)
|
||||
|
@ -286,24 +293,40 @@ async fn sync_helper(
|
|||
timeline_users.insert(event.sender.as_str().to_owned());
|
||||
}
|
||||
|
||||
services().rooms.lazy_loading
|
||||
.lazy_load_confirm_delivery(&sender_user, &sender_device, &room_id, since)?;
|
||||
services().rooms.lazy_loading.lazy_load_confirm_delivery(
|
||||
&sender_user,
|
||||
&sender_device,
|
||||
&room_id,
|
||||
since,
|
||||
)?;
|
||||
|
||||
// Database queries:
|
||||
|
||||
let current_shortstatehash = if let Some(s) = services().rooms.state.get_room_shortstatehash(&room_id)? {
|
||||
s
|
||||
} else {
|
||||
error!("Room {} has no state", room_id);
|
||||
continue;
|
||||
};
|
||||
let current_shortstatehash =
|
||||
if let Some(s) = services().rooms.state.get_room_shortstatehash(&room_id)? {
|
||||
s
|
||||
} else {
|
||||
error!("Room {} has no state", room_id);
|
||||
continue;
|
||||
};
|
||||
|
||||
let since_shortstatehash = services().rooms.user.get_token_shortstatehash(&room_id, since)?;
|
||||
let since_shortstatehash = services()
|
||||
.rooms
|
||||
.user
|
||||
.get_token_shortstatehash(&room_id, since)?;
|
||||
|
||||
// Calculates joined_member_count, invited_member_count and heroes
|
||||
let calculate_counts = || {
|
||||
let joined_member_count = services().rooms.state_cache.room_joined_count(&room_id)?.unwrap_or(0);
|
||||
let invited_member_count = services().rooms.state_cache.room_invited_count(&room_id)?.unwrap_or(0);
|
||||
let joined_member_count = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_joined_count(&room_id)?
|
||||
.unwrap_or(0);
|
||||
let invited_member_count = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_invited_count(&room_id)?
|
||||
.unwrap_or(0);
|
||||
|
||||
// Recalculate heroes (first 5 members)
|
||||
let mut heroes = Vec::new();
|
||||
|
@ -314,7 +337,8 @@ async fn sync_helper(
|
|||
|
||||
for hero in services()
|
||||
.rooms
|
||||
.timeline.all_pdus(&sender_user, &room_id)?
|
||||
.timeline
|
||||
.all_pdus(&sender_user, &room_id)?
|
||||
.filter_map(|pdu| pdu.ok()) // Ignore all broken pdus
|
||||
.filter(|(_, pdu)| pdu.kind == RoomEventType::RoomMember)
|
||||
.map(|(_, pdu)| {
|
||||
|
@ -333,7 +357,10 @@ async fn sync_helper(
|
|||
content.membership,
|
||||
MembershipState::Join | MembershipState::Invite
|
||||
) && (services().rooms.state_cache.is_joined(&user_id, &room_id)?
|
||||
|| services().rooms.state_cache.is_invited(&user_id, &room_id)?)
|
||||
|| services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_invited(&user_id, &room_id)?)
|
||||
{
|
||||
Ok::<_, Error>(Some(state_key.clone()))
|
||||
} else {
|
||||
|
@ -374,14 +401,21 @@ async fn sync_helper(
|
|||
|
||||
let (joined_member_count, invited_member_count, heroes) = calculate_counts()?;
|
||||
|
||||
let current_state_ids = services().rooms.state_accessor.state_full_ids(current_shortstatehash).await?;
|
||||
let current_state_ids = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(current_shortstatehash)
|
||||
.await?;
|
||||
|
||||
let mut state_events = Vec::new();
|
||||
let mut lazy_loaded = HashSet::new();
|
||||
|
||||
let mut i = 0;
|
||||
for (shortstatekey, id) in current_state_ids {
|
||||
let (event_type, state_key) = services().rooms.short.get_statekey_from_short(shortstatekey)?;
|
||||
let (event_type, state_key) = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_statekey_from_short(shortstatekey)?;
|
||||
|
||||
if event_type != StateEventType::RoomMember {
|
||||
let pdu = match services().rooms.timeline.get_pdu(&id)? {
|
||||
|
@ -423,8 +457,11 @@ async fn sync_helper(
|
|||
}
|
||||
|
||||
// Reset lazy loading because this is an initial sync
|
||||
services().rooms.lazy_loading
|
||||
.lazy_load_reset(&sender_user, &sender_device, &room_id)?;
|
||||
services().rooms.lazy_loading.lazy_load_reset(
|
||||
&sender_user,
|
||||
&sender_device,
|
||||
&room_id,
|
||||
)?;
|
||||
|
||||
// The state_events above should contain all timeline_users, let's mark them as lazy
|
||||
// loaded.
|
||||
|
@ -471,8 +508,16 @@ async fn sync_helper(
|
|||
let mut lazy_loaded = HashSet::new();
|
||||
|
||||
if since_shortstatehash != current_shortstatehash {
|
||||
let current_state_ids = services().rooms.state_accessor.state_full_ids(current_shortstatehash).await?;
|
||||
let since_state_ids = services().rooms.state_accessor.state_full_ids(since_shortstatehash).await?;
|
||||
let current_state_ids = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(current_shortstatehash)
|
||||
.await?;
|
||||
let since_state_ids = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(since_shortstatehash)
|
||||
.await?;
|
||||
|
||||
for (key, id) in current_state_ids {
|
||||
if body.full_state || since_state_ids.get(&key) != Some(&id) {
|
||||
|
@ -537,13 +582,15 @@ async fn sync_helper(
|
|||
|
||||
let encrypted_room = services()
|
||||
.rooms
|
||||
.state_accessor.state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")?
|
||||
.state_accessor
|
||||
.state_get(current_shortstatehash, &StateEventType::RoomEncryption, "")?
|
||||
.is_some();
|
||||
|
||||
let since_encryption =
|
||||
services().rooms
|
||||
.state_accessor
|
||||
.state_get(since_shortstatehash, &StateEventType::RoomEncryption, "")?;
|
||||
let since_encryption = services().rooms.state_accessor.state_get(
|
||||
since_shortstatehash,
|
||||
&StateEventType::RoomEncryption,
|
||||
"",
|
||||
)?;
|
||||
|
||||
// Calculations:
|
||||
let new_encrypted_room = encrypted_room && since_encryption.is_none();
|
||||
|
@ -592,8 +639,9 @@ async fn sync_helper(
|
|||
if joined_since_last_sync && encrypted_room || new_encrypted_room {
|
||||
// If the user is in a new encrypted room, give them all joined users
|
||||
device_list_updates.extend(
|
||||
services().rooms
|
||||
.state_cache
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_members(&room_id)
|
||||
.flatten()
|
||||
.filter(|user_id| {
|
||||
|
@ -602,8 +650,7 @@ async fn sync_helper(
|
|||
})
|
||||
.filter(|user_id| {
|
||||
// Only send keys if the sender doesn't share an encrypted room with the target already
|
||||
!share_encrypted_room(&sender_user, user_id, &room_id)
|
||||
.unwrap_or(false)
|
||||
!share_encrypted_room(&sender_user, user_id, &room_id).unwrap_or(false)
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
@ -625,15 +672,17 @@ async fn sync_helper(
|
|||
|
||||
// Look for device list updates in this room
|
||||
device_list_updates.extend(
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.keys_changed(&room_id.to_string(), since, None)
|
||||
.filter_map(|r| r.ok()),
|
||||
);
|
||||
|
||||
let notification_count = if send_notification_counts {
|
||||
Some(
|
||||
services().rooms
|
||||
.user
|
||||
services()
|
||||
.rooms
|
||||
.user
|
||||
.notification_count(&sender_user, &room_id)?
|
||||
.try_into()
|
||||
.expect("notification count can't go that high"),
|
||||
|
@ -644,8 +693,9 @@ async fn sync_helper(
|
|||
|
||||
let highlight_count = if send_notification_counts {
|
||||
Some(
|
||||
services().rooms
|
||||
.user
|
||||
services()
|
||||
.rooms
|
||||
.user
|
||||
.highlight_count(&sender_user, &room_id)?
|
||||
.try_into()
|
||||
.expect("highlight count can't go that high"),
|
||||
|
@ -657,7 +707,9 @@ async fn sync_helper(
|
|||
let prev_batch = timeline_pdus
|
||||
.first()
|
||||
.map_or(Ok::<_, Error>(None), |(pdu_id, _)| {
|
||||
Ok(Some(services().rooms.timeline.pdu_count(pdu_id)?.to_string()))
|
||||
Ok(Some(
|
||||
services().rooms.timeline.pdu_count(pdu_id)?.to_string(),
|
||||
))
|
||||
})?;
|
||||
|
||||
let room_events: Vec<_> = timeline_pdus
|
||||
|
@ -685,8 +737,11 @@ async fn sync_helper(
|
|||
}
|
||||
|
||||
// Save the state after this sync so we can send the correct state diff next sync
|
||||
services().rooms.user
|
||||
.associate_token_shortstatehash(&room_id, next_batch, current_shortstatehash)?;
|
||||
services().rooms.user.associate_token_shortstatehash(
|
||||
&room_id,
|
||||
next_batch,
|
||||
current_shortstatehash,
|
||||
)?;
|
||||
|
||||
let joined_room = JoinedRoom {
|
||||
account_data: RoomAccountData {
|
||||
|
@ -729,11 +784,11 @@ async fn sync_helper(
|
|||
}
|
||||
|
||||
// Take presence updates from this room
|
||||
for (user_id, presence) in
|
||||
services().rooms
|
||||
.edus
|
||||
.presence
|
||||
.presence_since(&room_id, since)?
|
||||
for (user_id, presence) in services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.presence_since(&room_id, since)?
|
||||
{
|
||||
match presence_updates.entry(user_id) {
|
||||
Entry::Vacant(v) => {
|
||||
|
@ -765,14 +820,19 @@ async fn sync_helper(
|
|||
}
|
||||
|
||||
let mut left_rooms = BTreeMap::new();
|
||||
let all_left_rooms: Vec<_> = services().rooms.state_cache.rooms_left(&sender_user).collect();
|
||||
let all_left_rooms: Vec<_> = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_left(&sender_user)
|
||||
.collect();
|
||||
for result in all_left_rooms {
|
||||
let (room_id, left_state_events) = result?;
|
||||
|
||||
{
|
||||
// Get and drop the lock to wait for remaining operations to finish
|
||||
let mutex_insert = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_insert
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -783,7 +843,10 @@ async fn sync_helper(
|
|||
drop(insert_lock);
|
||||
}
|
||||
|
||||
let left_count = services().rooms.state_cache.get_left_count(&room_id, &sender_user)?;
|
||||
let left_count = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.get_left_count(&room_id, &sender_user)?;
|
||||
|
||||
// Left before last sync
|
||||
if Some(since) >= left_count {
|
||||
|
@ -807,14 +870,19 @@ async fn sync_helper(
|
|||
}
|
||||
|
||||
let mut invited_rooms = BTreeMap::new();
|
||||
let all_invited_rooms: Vec<_> = services().rooms.state_cache.rooms_invited(&sender_user).collect();
|
||||
let all_invited_rooms: Vec<_> = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_invited(&sender_user)
|
||||
.collect();
|
||||
for result in all_invited_rooms {
|
||||
let (room_id, invite_state_events) = result?;
|
||||
|
||||
{
|
||||
// Get and drop the lock to wait for remaining operations to finish
|
||||
let mutex_insert = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_insert
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -825,7 +893,10 @@ async fn sync_helper(
|
|||
drop(insert_lock);
|
||||
}
|
||||
|
||||
let invite_count = services().rooms.state_cache.get_invite_count(&room_id, &sender_user)?;
|
||||
let invite_count = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.get_invite_count(&room_id, &sender_user)?;
|
||||
|
||||
// Invited before last sync
|
||||
if Some(since) >= invite_count {
|
||||
|
@ -850,8 +921,10 @@ async fn sync_helper(
|
|||
.filter_map(|r| r.ok())
|
||||
.filter_map(|other_room_id| {
|
||||
Some(
|
||||
services().rooms
|
||||
.state_accessor.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
|
||||
.ok()?
|
||||
.is_some(),
|
||||
)
|
||||
|
@ -865,7 +938,8 @@ async fn sync_helper(
|
|||
}
|
||||
|
||||
// Remove all to-device events the device received *last time*
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.remove_to_device_events(&sender_user, &sender_device, since)?;
|
||||
|
||||
let response = sync_events::v3::Response {
|
||||
|
@ -898,7 +972,9 @@ async fn sync_helper(
|
|||
changed: device_list_updates.into_iter().collect(),
|
||||
left: device_list_left.into_iter().collect(),
|
||||
},
|
||||
device_one_time_keys_count: services().users.count_one_time_keys(&sender_user, &sender_device)?,
|
||||
device_one_time_keys_count: services()
|
||||
.users
|
||||
.count_one_time_keys(&sender_user, &sender_device)?,
|
||||
to_device: ToDevice {
|
||||
events: services()
|
||||
.users
|
||||
|
@ -942,8 +1018,9 @@ fn share_encrypted_room(
|
|||
.filter(|room_id| room_id != ignore_room)
|
||||
.filter_map(|other_room_id| {
|
||||
Some(
|
||||
services().rooms
|
||||
.state_accessor
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&other_room_id, &StateEventType::RoomEncryption, "")
|
||||
.ok()?
|
||||
.is_some(),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Result, Ruma, services, Error};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::tag::{create_tag, delete_tag, get_tags},
|
||||
events::{
|
||||
|
@ -18,21 +18,24 @@ pub async fn update_tag_route(
|
|||
) -> Result<create_tag::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let event = services()
|
||||
.account_data
|
||||
.get(
|
||||
Some(&body.room_id),
|
||||
sender_user,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?;
|
||||
let event = services().account_data.get(
|
||||
Some(&body.room_id),
|
||||
sender_user,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?;
|
||||
|
||||
let mut tags_event = event.map(|e| serde_json::from_str(e.get())
|
||||
.map_err(|_| Error::bad_database("Invalid account data event in db.")))
|
||||
.unwrap_or_else(|| Ok(TagEvent {
|
||||
content: TagEventContent {
|
||||
tags: BTreeMap::new(),
|
||||
},
|
||||
}))?;
|
||||
let mut tags_event = event
|
||||
.map(|e| {
|
||||
serde_json::from_str(e.get())
|
||||
.map_err(|_| Error::bad_database("Invalid account data event in db."))
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
Ok(TagEvent {
|
||||
content: TagEventContent {
|
||||
tags: BTreeMap::new(),
|
||||
},
|
||||
})
|
||||
})?;
|
||||
|
||||
tags_event
|
||||
.content
|
||||
|
@ -59,21 +62,24 @@ pub async fn delete_tag_route(
|
|||
) -> Result<delete_tag::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let mut event = services()
|
||||
.account_data
|
||||
.get(
|
||||
Some(&body.room_id),
|
||||
sender_user,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?;
|
||||
let mut event = services().account_data.get(
|
||||
Some(&body.room_id),
|
||||
sender_user,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?;
|
||||
|
||||
let mut tags_event = event.map(|e| serde_json::from_str(e.get())
|
||||
.map_err(|_| Error::bad_database("Invalid account data event in db.")))
|
||||
.unwrap_or_else(|| Ok(TagEvent {
|
||||
content: TagEventContent {
|
||||
tags: BTreeMap::new(),
|
||||
},
|
||||
}))?;
|
||||
let mut tags_event = event
|
||||
.map(|e| {
|
||||
serde_json::from_str(e.get())
|
||||
.map_err(|_| Error::bad_database("Invalid account data event in db."))
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
Ok(TagEvent {
|
||||
content: TagEventContent {
|
||||
tags: BTreeMap::new(),
|
||||
},
|
||||
})
|
||||
})?;
|
||||
|
||||
tags_event.content.tags.remove(&body.tag.clone().into());
|
||||
|
||||
|
@ -97,21 +103,24 @@ pub async fn get_tags_route(
|
|||
) -> Result<get_tags::v3::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
let mut event = services()
|
||||
.account_data
|
||||
.get(
|
||||
Some(&body.room_id),
|
||||
sender_user,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?;
|
||||
let mut event = services().account_data.get(
|
||||
Some(&body.room_id),
|
||||
sender_user,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?;
|
||||
|
||||
let mut tags_event = event.map(|e| serde_json::from_str(e.get())
|
||||
.map_err(|_| Error::bad_database("Invalid account data event in db.")))
|
||||
.unwrap_or_else(|| Ok(TagEvent {
|
||||
content: TagEventContent {
|
||||
tags: BTreeMap::new(),
|
||||
},
|
||||
}))?;
|
||||
let mut tags_event = event
|
||||
.map(|e| {
|
||||
serde_json::from_str(e.get())
|
||||
.map_err(|_| Error::bad_database("Invalid account data event in db."))
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
Ok(TagEvent {
|
||||
content: TagEventContent {
|
||||
tags: BTreeMap::new(),
|
||||
},
|
||||
})
|
||||
})?;
|
||||
|
||||
Ok(get_tags::v3::Response {
|
||||
tags: tags_event.content.tags,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::events::ToDeviceEventType;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::{Error, Result, Ruma, services};
|
||||
use crate::{services, Error, Result, Ruma};
|
||||
use ruma::{
|
||||
api::{
|
||||
client::{error::ErrorKind, to_device::send_event_to_device},
|
||||
|
@ -54,15 +54,17 @@ pub async fn send_event_to_device_route(
|
|||
}
|
||||
|
||||
match target_device_id_maybe {
|
||||
DeviceIdOrAllDevices::DeviceId(target_device_id) => services().users.add_to_device_event(
|
||||
sender_user,
|
||||
target_user_id,
|
||||
&target_device_id,
|
||||
&body.event_type,
|
||||
event.deserialize_as().map_err(|_| {
|
||||
Error::BadRequest(ErrorKind::InvalidParam, "Event is invalid")
|
||||
})?,
|
||||
)?,
|
||||
DeviceIdOrAllDevices::DeviceId(target_device_id) => {
|
||||
services().users.add_to_device_event(
|
||||
sender_user,
|
||||
target_user_id,
|
||||
&target_device_id,
|
||||
&body.event_type,
|
||||
event.deserialize_as().map_err(|_| {
|
||||
Error::BadRequest(ErrorKind::InvalidParam, "Event is invalid")
|
||||
})?,
|
||||
)?
|
||||
}
|
||||
|
||||
DeviceIdOrAllDevices::AllDevices => {
|
||||
for target_device_id in services().users.all_device_ids(target_user_id) {
|
||||
|
@ -82,7 +84,8 @@ pub async fn send_event_to_device_route(
|
|||
}
|
||||
|
||||
// Save transaction id with empty data
|
||||
services().transaction_ids
|
||||
services()
|
||||
.transaction_ids
|
||||
.add_txnid(sender_user, sender_device, &body.txn_id, &[])?;
|
||||
|
||||
Ok(send_event_to_device::v3::Response {})
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{utils, Error, Result, Ruma, services};
|
||||
use crate::{services, utils, Error, Result, Ruma};
|
||||
use ruma::api::client::{error::ErrorKind, typing::create_typing_event};
|
||||
|
||||
/// # `PUT /_matrix/client/r0/rooms/{roomId}/typing/{userId}`
|
||||
|
@ -11,7 +11,11 @@ pub async fn create_typing_event_route(
|
|||
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"You are not in this room.",
|
||||
|
@ -25,8 +29,10 @@ pub async fn create_typing_event_route(
|
|||
duration.as_millis() as u64 + utils::millis_since_unix_epoch(),
|
||||
)?;
|
||||
} else {
|
||||
services().rooms
|
||||
.edus.typing
|
||||
services()
|
||||
.rooms
|
||||
.edus
|
||||
.typing
|
||||
.typing_remove(sender_user, &body.room_id)?;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Result, Ruma, services};
|
||||
use crate::{services, Result, Ruma};
|
||||
use ruma::{
|
||||
api::client::user_directory::search_users,
|
||||
events::{
|
||||
|
@ -48,22 +48,25 @@ pub async fn search_users_route(
|
|||
return None;
|
||||
}
|
||||
|
||||
let user_is_in_public_rooms =
|
||||
services().rooms
|
||||
.state_cache.rooms_joined(&user_id)
|
||||
.filter_map(|r| r.ok())
|
||||
.any(|room| {
|
||||
services().rooms
|
||||
.state_accessor.room_state_get(&room, &StateEventType::RoomJoinRules, "")
|
||||
.map_or(false, |event| {
|
||||
event.map_or(false, |event| {
|
||||
serde_json::from_str(event.content.get())
|
||||
.map_or(false, |r: RoomJoinRulesEventContent| {
|
||||
r.join_rule == JoinRule::Public
|
||||
})
|
||||
})
|
||||
let user_is_in_public_rooms = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(&user_id)
|
||||
.filter_map(|r| r.ok())
|
||||
.any(|room| {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&room, &StateEventType::RoomJoinRules, "")
|
||||
.map_or(false, |event| {
|
||||
event.map_or(false, |event| {
|
||||
serde_json::from_str(event.content.get())
|
||||
.map_or(false, |r: RoomJoinRulesEventContent| {
|
||||
r.join_rule == JoinRule::Public
|
||||
})
|
||||
})
|
||||
});
|
||||
})
|
||||
});
|
||||
|
||||
if user_is_in_public_rooms {
|
||||
return Some(user);
|
||||
|
@ -71,7 +74,8 @@ pub async fn search_users_route(
|
|||
|
||||
let user_is_in_shared_rooms = services()
|
||||
.rooms
|
||||
.user.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])
|
||||
.user
|
||||
.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])
|
||||
.ok()?
|
||||
.next()
|
||||
.is_some();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Result, Ruma, services};
|
||||
use crate::{services, Result, Ruma};
|
||||
use hmac::{Hmac, Mac, NewMac};
|
||||
use ruma::{api::client::voip::get_turn_server_info, SecondsSinceUnixEpoch};
|
||||
use sha1::Sha1;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
pub mod client_server;
|
||||
pub mod server_server;
|
||||
pub mod appservice_server;
|
||||
pub mod client_server;
|
||||
pub mod ruma_wrapper;
|
||||
pub mod server_server;
|
||||
|
|
|
@ -24,7 +24,7 @@ use serde::Deserialize;
|
|||
use tracing::{debug, error, warn};
|
||||
|
||||
use super::{Ruma, RumaResponse};
|
||||
use crate::{Error, Result, api::server_server, services};
|
||||
use crate::{api::server_server, services, Error, Result};
|
||||
|
||||
#[async_trait]
|
||||
impl<T, B> FromRequest<B> for Ruma<T>
|
||||
|
@ -197,11 +197,11 @@ where
|
|||
request_map.insert("content".to_owned(), json_body.clone());
|
||||
};
|
||||
|
||||
let keys_result = services().rooms.event_handler.fetch_signing_keys(
|
||||
&x_matrix.origin,
|
||||
vec![x_matrix.key.to_owned()],
|
||||
)
|
||||
.await;
|
||||
let keys_result = services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.fetch_signing_keys(&x_matrix.origin, vec![x_matrix.key.to_owned()])
|
||||
.await;
|
||||
|
||||
let keys = match keys_result {
|
||||
Ok(b) => b,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use crate::{
|
||||
api::client_server::{self, claim_keys_helper, get_keys_helper},
|
||||
utils, Error, PduEvent, Result, Ruma, services, service::pdu::{gen_event_id_canonical_json, PduBuilder},
|
||||
service::pdu::{gen_event_id_canonical_json, PduBuilder},
|
||||
services, utils, Error, PduEvent, Result, Ruma,
|
||||
};
|
||||
use axum::{response::IntoResponse, Json};
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt};
|
||||
|
@ -138,7 +139,8 @@ where
|
|||
|
||||
let mut write_destination_to_cache = false;
|
||||
|
||||
let cached_result = services().globals
|
||||
let cached_result = services()
|
||||
.globals
|
||||
.actual_destination_cache
|
||||
.read()
|
||||
.unwrap()
|
||||
|
@ -191,7 +193,10 @@ where
|
|||
.to_string()
|
||||
.into(),
|
||||
);
|
||||
request_map.insert("origin".to_owned(), services().globals.server_name().as_str().into());
|
||||
request_map.insert(
|
||||
"origin".to_owned(),
|
||||
services().globals.server_name().as_str().into(),
|
||||
);
|
||||
request_map.insert("destination".to_owned(), destination.as_str().into());
|
||||
|
||||
let mut request_json =
|
||||
|
@ -238,7 +243,11 @@ where
|
|||
|
||||
let url = reqwest_request.url().clone();
|
||||
|
||||
let response = services().globals.federation_client().execute(reqwest_request).await;
|
||||
let response = services()
|
||||
.globals
|
||||
.federation_client()
|
||||
.execute(reqwest_request)
|
||||
.await;
|
||||
|
||||
match response {
|
||||
Ok(mut response) => {
|
||||
|
@ -278,10 +287,15 @@ where
|
|||
if status == 200 {
|
||||
let response = T::IncomingResponse::try_from_http_response(http_response);
|
||||
if response.is_ok() && write_destination_to_cache {
|
||||
services().globals.actual_destination_cache.write().unwrap().insert(
|
||||
Box::<ServerName>::from(destination),
|
||||
(actual_destination, host),
|
||||
);
|
||||
services()
|
||||
.globals
|
||||
.actual_destination_cache
|
||||
.write()
|
||||
.unwrap()
|
||||
.insert(
|
||||
Box::<ServerName>::from(destination),
|
||||
(actual_destination, host),
|
||||
);
|
||||
}
|
||||
|
||||
response.map_err(|e| {
|
||||
|
@ -329,9 +343,7 @@ fn add_port_to_hostname(destination_str: &str) -> FedDest {
|
|||
/// Returns: actual_destination, host header
|
||||
/// Implemented according to the specification at https://matrix.org/docs/spec/server_server/r0.1.4#resolving-server-names
|
||||
/// Numbers in comments below refer to bullet points in linked section of specification
|
||||
async fn find_actual_destination(
|
||||
destination: &'_ ServerName,
|
||||
) -> (FedDest, FedDest) {
|
||||
async fn find_actual_destination(destination: &'_ ServerName) -> (FedDest, FedDest) {
|
||||
let destination_str = destination.as_str().to_owned();
|
||||
let mut hostname = destination_str.clone();
|
||||
let actual_destination = match get_ip_with_port(&destination_str) {
|
||||
|
@ -364,18 +376,24 @@ async fn find_actual_destination(
|
|||
// 3.3: SRV lookup successful
|
||||
let force_port = hostname_override.port();
|
||||
|
||||
if let Ok(override_ip) = services().globals
|
||||
if let Ok(override_ip) = services()
|
||||
.globals
|
||||
.dns_resolver()
|
||||
.lookup_ip(hostname_override.hostname())
|
||||
.await
|
||||
{
|
||||
services().globals.tls_name_override.write().unwrap().insert(
|
||||
delegated_hostname.clone(),
|
||||
(
|
||||
override_ip.iter().collect(),
|
||||
force_port.unwrap_or(8448),
|
||||
),
|
||||
);
|
||||
services()
|
||||
.globals
|
||||
.tls_name_override
|
||||
.write()
|
||||
.unwrap()
|
||||
.insert(
|
||||
delegated_hostname.clone(),
|
||||
(
|
||||
override_ip.iter().collect(),
|
||||
force_port.unwrap_or(8448),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
warn!("Using SRV record, but could not resolve to IP");
|
||||
}
|
||||
|
@ -400,15 +418,24 @@ async fn find_actual_destination(
|
|||
Some(hostname_override) => {
|
||||
let force_port = hostname_override.port();
|
||||
|
||||
if let Ok(override_ip) = services().globals
|
||||
if let Ok(override_ip) = services()
|
||||
.globals
|
||||
.dns_resolver()
|
||||
.lookup_ip(hostname_override.hostname())
|
||||
.await
|
||||
{
|
||||
services().globals.tls_name_override.write().unwrap().insert(
|
||||
hostname.clone(),
|
||||
(override_ip.iter().collect(), force_port.unwrap_or(8448)),
|
||||
);
|
||||
services()
|
||||
.globals
|
||||
.tls_name_override
|
||||
.write()
|
||||
.unwrap()
|
||||
.insert(
|
||||
hostname.clone(),
|
||||
(
|
||||
override_ip.iter().collect(),
|
||||
force_port.unwrap_or(8448),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
warn!("Using SRV record, but could not resolve to IP");
|
||||
}
|
||||
|
@ -443,10 +470,9 @@ async fn find_actual_destination(
|
|||
(actual_destination, hostname)
|
||||
}
|
||||
|
||||
async fn query_srv_record(
|
||||
hostname: &'_ str,
|
||||
) -> Option<FedDest> {
|
||||
if let Ok(Some(host_port)) = services().globals
|
||||
async fn query_srv_record(hostname: &'_ str) -> Option<FedDest> {
|
||||
if let Ok(Some(host_port)) = services()
|
||||
.globals
|
||||
.dns_resolver()
|
||||
.srv_lookup(format!("_matrix._tcp.{}", hostname))
|
||||
.await
|
||||
|
@ -465,11 +491,10 @@ async fn query_srv_record(
|
|||
}
|
||||
}
|
||||
|
||||
async fn request_well_known(
|
||||
destination: &str,
|
||||
) -> Option<String> {
|
||||
async fn request_well_known(destination: &str) -> Option<String> {
|
||||
let body: serde_json::Value = serde_json::from_str(
|
||||
&services().globals
|
||||
&services()
|
||||
.globals
|
||||
.default_client()
|
||||
.get(&format!(
|
||||
"https://{}/.well-known/matrix/server",
|
||||
|
@ -664,15 +689,22 @@ pub async fn send_transaction_message_route(
|
|||
Some(id) => id,
|
||||
None => {
|
||||
// Event is invalid
|
||||
resolved_map.insert(event_id, Err(Error::bad_database("Event needs a valid RoomId.")));
|
||||
resolved_map.insert(
|
||||
event_id,
|
||||
Err(Error::bad_database("Event needs a valid RoomId.")),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &room_id)?;
|
||||
|
||||
let mutex = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_federation
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -683,16 +715,19 @@ pub async fn send_transaction_message_route(
|
|||
let start_time = Instant::now();
|
||||
resolved_map.insert(
|
||||
event_id.clone(),
|
||||
services().rooms.event_handler.handle_incoming_pdu(
|
||||
&sender_servername,
|
||||
&event_id,
|
||||
&room_id,
|
||||
value,
|
||||
true,
|
||||
&pub_key_map,
|
||||
)
|
||||
.await
|
||||
.map(|_| ()),
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.handle_incoming_pdu(
|
||||
&sender_servername,
|
||||
&event_id,
|
||||
&room_id,
|
||||
value,
|
||||
true,
|
||||
&pub_key_map,
|
||||
)
|
||||
.await
|
||||
.map(|_| ()),
|
||||
);
|
||||
drop(mutex_lock);
|
||||
|
||||
|
@ -727,7 +762,13 @@ pub async fn send_transaction_message_route(
|
|||
.event_ids
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
services().rooms.timeline.get_pdu_count(id).ok().flatten().map(|r| (id, r))
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_count(id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|r| (id, r))
|
||||
})
|
||||
.max_by_key(|(_, count)| *count)
|
||||
{
|
||||
|
@ -744,11 +785,11 @@ pub async fn send_transaction_message_route(
|
|||
content: ReceiptEventContent(receipt_content),
|
||||
room_id: room_id.clone(),
|
||||
};
|
||||
services().rooms.edus.read_receipt.readreceipt_update(
|
||||
&user_id,
|
||||
&room_id,
|
||||
event,
|
||||
)?;
|
||||
services()
|
||||
.rooms
|
||||
.edus
|
||||
.read_receipt
|
||||
.readreceipt_update(&user_id, &room_id, event)?;
|
||||
} else {
|
||||
// TODO fetch missing events
|
||||
info!("No known event ids in read receipt: {:?}", user_updates);
|
||||
|
@ -757,7 +798,11 @@ pub async fn send_transaction_message_route(
|
|||
}
|
||||
}
|
||||
Edu::Typing(typing) => {
|
||||
if services().rooms.state_cache.is_joined(&typing.user_id, &typing.room_id)? {
|
||||
if services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(&typing.user_id, &typing.room_id)?
|
||||
{
|
||||
if typing.typing {
|
||||
services().rooms.edus.typing.typing_add(
|
||||
&typing.user_id,
|
||||
|
@ -765,16 +810,16 @@ pub async fn send_transaction_message_route(
|
|||
3000 + utils::millis_since_unix_epoch(),
|
||||
)?;
|
||||
} else {
|
||||
services().rooms.edus.typing.typing_remove(
|
||||
&typing.user_id,
|
||||
&typing.room_id,
|
||||
)?;
|
||||
services()
|
||||
.rooms
|
||||
.edus
|
||||
.typing
|
||||
.typing_remove(&typing.user_id, &typing.room_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Edu::DeviceListUpdate(DeviceListUpdateContent { user_id, .. }) => {
|
||||
services().users
|
||||
.mark_device_key_update(&user_id)?;
|
||||
services().users.mark_device_key_update(&user_id)?;
|
||||
}
|
||||
Edu::DirectToDevice(DirectDeviceContent {
|
||||
sender,
|
||||
|
@ -810,7 +855,9 @@ pub async fn send_transaction_message_route(
|
|||
}
|
||||
|
||||
DeviceIdOrAllDevices::AllDevices => {
|
||||
for target_device_id in services().users.all_device_ids(target_user_id) {
|
||||
for target_device_id in
|
||||
services().users.all_device_ids(target_user_id)
|
||||
{
|
||||
services().users.add_to_device_event(
|
||||
&sender,
|
||||
target_user_id,
|
||||
|
@ -830,7 +877,8 @@ pub async fn send_transaction_message_route(
|
|||
}
|
||||
|
||||
// Save transaction id with empty data
|
||||
services().transaction_ids
|
||||
services()
|
||||
.transaction_ids
|
||||
.add_txnid(&sender, None, &message_id, &[])?;
|
||||
}
|
||||
Edu::SigningKeyUpdate(SigningKeyUpdateContent {
|
||||
|
@ -854,7 +902,12 @@ pub async fn send_transaction_message_route(
|
|||
}
|
||||
}
|
||||
|
||||
Ok(send_transaction_message::v1::Response { pdus: resolved_map.into_iter().map(|(e, r)| (e, r.map_err(|e| e.to_string()))).collect() })
|
||||
Ok(send_transaction_message::v1::Response {
|
||||
pdus: resolved_map
|
||||
.into_iter()
|
||||
.map(|(e, r)| (e, r.map_err(|e| e.to_string())))
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
/// # `GET /_matrix/federation/v1/event/{eventId}`
|
||||
|
@ -875,7 +928,8 @@ pub async fn get_event_route(
|
|||
.expect("server is authenticated");
|
||||
|
||||
let event = services()
|
||||
.rooms.timeline
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_json(&body.event_id)?
|
||||
.ok_or(Error::BadRequest(ErrorKind::NotFound, "Event not found."))?;
|
||||
|
||||
|
@ -887,7 +941,11 @@ pub async fn get_event_route(
|
|||
let room_id = <&RoomId>::try_from(room_id_str)
|
||||
.map_err(|_| Error::bad_database("Invalid room id field in event in database"))?;
|
||||
|
||||
if !services().rooms.state_cache.server_in_room(sender_servername, room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.server_in_room(sender_servername, room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Server is not in room",
|
||||
|
@ -916,14 +974,21 @@ pub async fn get_missing_events_route(
|
|||
.as_ref()
|
||||
.expect("server is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.server_in_room(sender_servername, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Server is not in room",
|
||||
));
|
||||
}
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &body.room_id)?;
|
||||
|
||||
let mut queued_events = body.latest_events.clone();
|
||||
let mut events = Vec::new();
|
||||
|
@ -988,17 +1053,25 @@ pub async fn get_event_authorization_route(
|
|||
.as_ref()
|
||||
.expect("server is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.server_in_room(sender_servername, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Server is not in room.",
|
||||
));
|
||||
}
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &body.room_id)?;
|
||||
|
||||
let event = services()
|
||||
.rooms.timeline
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_json(&body.event_id)?
|
||||
.ok_or(Error::BadRequest(ErrorKind::NotFound, "Event not found."))?;
|
||||
|
||||
|
@ -1010,7 +1083,11 @@ pub async fn get_event_authorization_route(
|
|||
let room_id = <&RoomId>::try_from(room_id_str)
|
||||
.map_err(|_| Error::bad_database("Invalid room id field in event in database"))?;
|
||||
|
||||
let auth_chain_ids = services().rooms.auth_chain.get_auth_chain(room_id, vec![Arc::from(&*body.event_id)]).await?;
|
||||
let auth_chain_ids = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_auth_chain(room_id, vec![Arc::from(&*body.event_id)])
|
||||
.await?;
|
||||
|
||||
Ok(get_event_authorization::v1::Response {
|
||||
auth_chain: auth_chain_ids
|
||||
|
@ -1035,17 +1112,25 @@ pub async fn get_room_state_route(
|
|||
.as_ref()
|
||||
.expect("server is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.server_in_room(sender_servername, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Server is not in room.",
|
||||
));
|
||||
}
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &body.room_id)?;
|
||||
|
||||
let shortstatehash = services()
|
||||
.rooms.state_accessor
|
||||
.rooms
|
||||
.state_accessor
|
||||
.pdu_shortstatehash(&body.event_id)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
|
@ -1053,26 +1138,39 @@ pub async fn get_room_state_route(
|
|||
))?;
|
||||
|
||||
let pdus = services()
|
||||
.rooms.state_accessor
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(shortstatehash)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|(_, id)| {
|
||||
PduEvent::convert_to_outgoing_federation_event(
|
||||
services().rooms.timeline.get_pdu_json(&id).unwrap().unwrap(),
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_json(&id)
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let auth_chain_ids =
|
||||
services().rooms.auth_chain.get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)]).await?;
|
||||
let auth_chain_ids = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)])
|
||||
.await?;
|
||||
|
||||
Ok(get_room_state::v1::Response {
|
||||
auth_chain: auth_chain_ids
|
||||
.map(|id| {
|
||||
services().rooms.timeline.get_pdu_json(&id).map(|maybe_json| {
|
||||
PduEvent::convert_to_outgoing_federation_event(maybe_json.unwrap())
|
||||
})
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_json(&id)
|
||||
.map(|maybe_json| {
|
||||
PduEvent::convert_to_outgoing_federation_event(maybe_json.unwrap())
|
||||
})
|
||||
})
|
||||
.filter_map(|r| r.ok())
|
||||
.collect(),
|
||||
|
@ -1095,17 +1193,25 @@ pub async fn get_room_state_ids_route(
|
|||
.as_ref()
|
||||
.expect("server is authenticated");
|
||||
|
||||
if !services().rooms.state_cache.server_in_room(sender_servername, &body.room_id)? {
|
||||
if !services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.server_in_room(sender_servername, &body.room_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Server is not in room.",
|
||||
));
|
||||
}
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &body.room_id)?;
|
||||
|
||||
let shortstatehash = services()
|
||||
.rooms.state_accessor
|
||||
.rooms
|
||||
.state_accessor
|
||||
.pdu_shortstatehash(&body.event_id)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
|
@ -1113,15 +1219,19 @@ pub async fn get_room_state_ids_route(
|
|||
))?;
|
||||
|
||||
let pdu_ids = services()
|
||||
.rooms.state_accessor
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(shortstatehash)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|(_, id)| (*id).to_owned())
|
||||
.collect();
|
||||
|
||||
let auth_chain_ids =
|
||||
services().rooms.auth_chain.get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)]).await?;
|
||||
let auth_chain_ids = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_auth_chain(&body.room_id, vec![Arc::from(&*body.event_id)])
|
||||
.await?;
|
||||
|
||||
Ok(get_room_state_ids::v1::Response {
|
||||
auth_chain_ids: auth_chain_ids.map(|id| (*id).to_owned()).collect(),
|
||||
|
@ -1151,10 +1261,14 @@ pub async fn create_join_event_template_route(
|
|||
.as_ref()
|
||||
.expect("server is authenticated");
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &body.room_id)?;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -1164,9 +1278,11 @@ pub async fn create_join_event_template_route(
|
|||
let state_lock = mutex_state.lock().await;
|
||||
|
||||
// TODO: Conduit does not implement restricted join rules yet, we always reject
|
||||
let join_rules_event =
|
||||
services().rooms.state_accessor
|
||||
.room_state_get(&body.room_id, &StateEventType::RoomJoinRules, "")?;
|
||||
let join_rules_event = services().rooms.state_accessor.room_state_get(
|
||||
&body.room_id,
|
||||
&StateEventType::RoomJoinRules,
|
||||
"",
|
||||
)?;
|
||||
|
||||
let join_rules_event_content: Option<RoomJoinRulesEventContent> = join_rules_event
|
||||
.as_ref()
|
||||
|
@ -1212,13 +1328,18 @@ pub async fn create_join_event_template_route(
|
|||
})
|
||||
.expect("member event is valid value");
|
||||
|
||||
let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(PduBuilder {
|
||||
event_type: RoomEventType::RoomMember,
|
||||
content,
|
||||
unsigned: None,
|
||||
state_key: Some(body.user_id.to_string()),
|
||||
redacts: None,
|
||||
}, &body.user_id, &body.room_id, &state_lock)?;
|
||||
let (pdu, pdu_json) = services().rooms.timeline.create_hash_and_sign_event(
|
||||
PduBuilder {
|
||||
event_type: RoomEventType::RoomMember,
|
||||
content,
|
||||
unsigned: None,
|
||||
state_key: Some(body.user_id.to_string()),
|
||||
redacts: None,
|
||||
},
|
||||
&body.user_id,
|
||||
&body.room_id,
|
||||
&state_lock,
|
||||
)?;
|
||||
|
||||
drop(state_lock);
|
||||
|
||||
|
@ -1244,12 +1365,17 @@ async fn create_join_event(
|
|||
));
|
||||
}
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, room_id)?;
|
||||
|
||||
// TODO: Conduit does not implement restricted join rules yet, we always reject
|
||||
let join_rules_event = services()
|
||||
.rooms.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomJoinRules, "")?;
|
||||
let join_rules_event = services().rooms.state_accessor.room_state_get(
|
||||
room_id,
|
||||
&StateEventType::RoomJoinRules,
|
||||
"",
|
||||
)?;
|
||||
|
||||
let join_rules_event_content: Option<RoomJoinRulesEventContent> = join_rules_event
|
||||
.as_ref()
|
||||
|
@ -1275,7 +1401,8 @@ async fn create_join_event(
|
|||
|
||||
// We need to return the state prior to joining, let's keep a reference to that here
|
||||
let shortstatehash = services()
|
||||
.rooms.state
|
||||
.rooms
|
||||
.state
|
||||
.get_room_shortstatehash(room_id)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
|
@ -1307,7 +1434,8 @@ async fn create_join_event(
|
|||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Origin field is invalid."))?;
|
||||
|
||||
let mutex = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_federation
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -1315,7 +1443,10 @@ async fn create_join_event(
|
|||
.or_default(),
|
||||
);
|
||||
let mutex_lock = mutex.lock().await;
|
||||
let pdu_id: Vec<u8> = services().rooms.event_handler.handle_incoming_pdu(&origin, &event_id, room_id, value, true, &pub_key_map)
|
||||
let pdu_id: Vec<u8> = services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.handle_incoming_pdu(&origin, &event_id, room_id, value, true, &pub_key_map)
|
||||
.await?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
|
@ -1323,12 +1454,19 @@ async fn create_join_event(
|
|||
))?;
|
||||
drop(mutex_lock);
|
||||
|
||||
let state_ids = services().rooms.state_accessor.state_full_ids(shortstatehash).await?;
|
||||
let auth_chain_ids = services().rooms.auth_chain.get_auth_chain(
|
||||
room_id,
|
||||
state_ids.iter().map(|(_, id)| id.clone()).collect(),
|
||||
)
|
||||
.await?;
|
||||
let state_ids = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(shortstatehash)
|
||||
.await?;
|
||||
let auth_chain_ids = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_auth_chain(
|
||||
room_id,
|
||||
state_ids.iter().map(|(_, id)| id.clone()).collect(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let servers = services()
|
||||
.rooms
|
||||
|
@ -1399,9 +1537,16 @@ pub async fn create_invite_route(
|
|||
.as_ref()
|
||||
.expect("server is authenticated");
|
||||
|
||||
services().rooms.event_handler.acl_check(&sender_servername, &body.room_id)?;
|
||||
services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.acl_check(&sender_servername, &body.room_id)?;
|
||||
|
||||
if !services().globals.supported_room_versions().contains(&body.room_version) {
|
||||
if !services()
|
||||
.globals
|
||||
.supported_room_versions()
|
||||
.contains(&body.room_version)
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::IncompatibleRoomVersion {
|
||||
room_version: body.room_version.clone(),
|
||||
|
@ -1549,7 +1694,8 @@ pub async fn get_room_information_route(
|
|||
|
||||
let room_id = services()
|
||||
.rooms
|
||||
.alias.resolve_local_alias(&body.room_alias)?
|
||||
.alias
|
||||
.resolve_local_alias(&body.room_alias)?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::NotFound,
|
||||
"Room alias not found.",
|
||||
|
@ -1576,7 +1722,9 @@ pub async fn get_profile_information_route(
|
|||
let mut blurhash = None;
|
||||
|
||||
match &body.field {
|
||||
Some(ProfileField::DisplayName) => displayname = services().users.displayname(&body.user_id)?,
|
||||
Some(ProfileField::DisplayName) => {
|
||||
displayname = services().users.displayname(&body.user_id)?
|
||||
}
|
||||
Some(ProfileField::AvatarUrl) => {
|
||||
avatar_url = services().users.avatar_url(&body.user_id)?;
|
||||
blurhash = services().users.blurhash(&body.user_id)?
|
||||
|
@ -1600,18 +1748,14 @@ pub async fn get_profile_information_route(
|
|||
/// # `POST /_matrix/federation/v1/user/keys/query`
|
||||
///
|
||||
/// Gets devices and identity keys for the given users.
|
||||
pub async fn get_keys_route(
|
||||
body: Ruma<get_keys::v1::Request>,
|
||||
) -> Result<get_keys::v1::Response> {
|
||||
pub async fn get_keys_route(body: Ruma<get_keys::v1::Request>) -> Result<get_keys::v1::Response> {
|
||||
if !services().globals.allow_federation() {
|
||||
return Err(Error::bad_config("Federation is disabled."));
|
||||
}
|
||||
|
||||
let result = get_keys_helper(
|
||||
None,
|
||||
&body.device_keys,
|
||||
|u| Some(u.server_name()) == body.sender_servername.as_deref(),
|
||||
)
|
||||
let result = get_keys_helper(None, &body.device_keys, |u| {
|
||||
Some(u.server_name()) == body.sender_servername.as_deref()
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(get_keys::v1::Response {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use super::{super::Config, watchers::Watchers, KvTree, KeyValueDatabaseEngine};
|
||||
use super::{super::Config, watchers::Watchers, KeyValueDatabaseEngine, KvTree};
|
||||
use crate::{utils, Result};
|
||||
use std::{
|
||||
future::Future,
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::{uiaa::UiaaInfo, error::ErrorKind}, events::{RoomAccountDataEventType, AnyEphemeralRoomEvent}, serde::Raw, RoomId};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use ruma::{
|
||||
api::client::{error::ErrorKind, uiaa::UiaaInfo},
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
signatures::CanonicalJsonValue,
|
||||
DeviceId, RoomId, UserId,
|
||||
};
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
|
||||
use crate::{Result, database::KeyValueDatabase, service, Error, utils, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::account_data::Data for KeyValueDatabase {
|
||||
/// Places one event in the account data of the user and removes the previous entry.
|
||||
|
|
|
@ -55,10 +55,13 @@ impl service::appservice::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
fn iter_ids<'a>(&'a self) -> Result<Box<dyn Iterator<Item = Result<String>> + 'a>> {
|
||||
Ok(Box::new(self.id_appserviceregistrations.iter().map(|(id, _)| {
|
||||
utils::string_from_bytes(&id)
|
||||
.map_err(|_| Error::bad_database("Invalid id bytes in id_appserviceregistrations."))
|
||||
})))
|
||||
Ok(Box::new(self.id_appserviceregistrations.iter().map(
|
||||
|(id, _)| {
|
||||
utils::string_from_bytes(&id).map_err(|_| {
|
||||
Error::bad_database("Invalid id bytes in id_appserviceregistrations.")
|
||||
})
|
||||
},
|
||||
)))
|
||||
}
|
||||
|
||||
fn all(&self) -> Result<Vec<(String, serde_yaml::Value)>> {
|
||||
|
|
|
@ -2,9 +2,13 @@ use std::collections::BTreeMap;
|
|||
|
||||
use async_trait::async_trait;
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt};
|
||||
use ruma::{signatures::Ed25519KeyPair, UserId, DeviceId, ServerName, api::federation::discovery::{ServerSigningKeys, VerifyKey}, ServerSigningKeyId, MilliSecondsSinceUnixEpoch};
|
||||
use ruma::{
|
||||
api::federation::discovery::{ServerSigningKeys, VerifyKey},
|
||||
signatures::Ed25519KeyPair,
|
||||
DeviceId, MilliSecondsSinceUnixEpoch, ServerName, ServerSigningKeyId, UserId,
|
||||
};
|
||||
|
||||
use crate::{Result, service, database::KeyValueDatabase, Error, utils, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
pub const COUNTER: &[u8] = b"c";
|
||||
|
||||
|
@ -35,28 +39,24 @@ impl service::globals::Data for KeyValueDatabase {
|
|||
|
||||
// Return when *any* user changed his key
|
||||
// TODO: only send for user they share a room with
|
||||
futures.push(
|
||||
self.todeviceid_events
|
||||
.watch_prefix(&userdeviceid_prefix),
|
||||
);
|
||||
futures.push(self.todeviceid_events.watch_prefix(&userdeviceid_prefix));
|
||||
|
||||
futures.push(self.userroomid_joined.watch_prefix(&userid_prefix));
|
||||
futures.push(
|
||||
self.userroomid_invitestate
|
||||
.watch_prefix(&userid_prefix),
|
||||
);
|
||||
futures.push(self.userroomid_invitestate.watch_prefix(&userid_prefix));
|
||||
futures.push(self.userroomid_leftstate.watch_prefix(&userid_prefix));
|
||||
futures.push(
|
||||
self.userroomid_notificationcount
|
||||
.watch_prefix(&userid_prefix),
|
||||
);
|
||||
futures.push(
|
||||
self.userroomid_highlightcount
|
||||
.watch_prefix(&userid_prefix),
|
||||
);
|
||||
futures.push(self.userroomid_highlightcount.watch_prefix(&userid_prefix));
|
||||
|
||||
// Events for rooms we are in
|
||||
for room_id in services().rooms.state_cache.rooms_joined(user_id).filter_map(|r| r.ok()) {
|
||||
for room_id in services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(user_id)
|
||||
.filter_map(|r| r.ok())
|
||||
{
|
||||
let short_roomid = services()
|
||||
.rooms
|
||||
.short
|
||||
|
@ -75,15 +75,9 @@ impl service::globals::Data for KeyValueDatabase {
|
|||
futures.push(self.pduid_pdu.watch_prefix(&short_roomid));
|
||||
|
||||
// EDUs
|
||||
futures.push(
|
||||
self.roomid_lasttypingupdate
|
||||
.watch_prefix(&roomid_bytes),
|
||||
);
|
||||
futures.push(self.roomid_lasttypingupdate.watch_prefix(&roomid_bytes));
|
||||
|
||||
futures.push(
|
||||
self.readreceiptid_readreceipt
|
||||
.watch_prefix(&roomid_prefix),
|
||||
);
|
||||
futures.push(self.readreceiptid_readreceipt.watch_prefix(&roomid_prefix));
|
||||
|
||||
// Key changes
|
||||
futures.push(self.keychangeid_userid.watch_prefix(&roomid_prefix));
|
||||
|
@ -110,10 +104,7 @@ impl service::globals::Data for KeyValueDatabase {
|
|||
futures.push(self.keychangeid_userid.watch_prefix(&userid_prefix));
|
||||
|
||||
// One time keys
|
||||
futures.push(
|
||||
self.userid_lastonetimekeyupdate
|
||||
.watch_prefix(&userid_bytes),
|
||||
);
|
||||
futures.push(self.userid_lastonetimekeyupdate.watch_prefix(&userid_bytes));
|
||||
|
||||
futures.push(Box::pin(services().globals.rotate.watch()));
|
||||
|
||||
|
@ -238,10 +229,7 @@ impl service::globals::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
fn bump_database_version(&self, new_version: u64) -> Result<()> {
|
||||
self.global
|
||||
.insert(b"version", &new_version.to_be_bytes())?;
|
||||
self.global.insert(b"version", &new_version.to_be_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,8 +1,15 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use ruma::{UserId, serde::Raw, api::client::{backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup}, error::ErrorKind}, RoomId};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
error::ErrorKind,
|
||||
},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::{Result, service, database::KeyValueDatabase, services, Error, utils};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::key_backups::Data for KeyValueDatabase {
|
||||
fn create_backup(
|
||||
|
@ -118,11 +125,7 @@ impl service::key_backups::Data for KeyValueDatabase {
|
|||
.transpose()
|
||||
}
|
||||
|
||||
fn get_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
) -> Result<Option<Raw<BackupAlgorithm>>> {
|
||||
fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>> {
|
||||
let mut key = user_id.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(version.as_bytes());
|
||||
|
@ -322,12 +325,7 @@ impl service::key_backups::Data for KeyValueDatabase {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn delete_room_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()> {
|
||||
let mut key = user_id.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(version.as_bytes());
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
use ruma::api::client::error::ErrorKind;
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, Error, utils, Result};
|
||||
use crate::{database::KeyValueDatabase, service, utils, Error, Result};
|
||||
|
||||
impl service::media::Data for KeyValueDatabase {
|
||||
fn create_file_metadata(&self, mxc: String, width: u32, height: u32, content_disposition: Option<&str>, content_type: Option<&str>) -> Result<Vec<u8>> {
|
||||
fn create_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
content_disposition: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
) -> Result<Vec<u8>> {
|
||||
let mut key = mxc.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(&width.to_be_bytes());
|
||||
|
@ -28,14 +35,23 @@ impl service::media::Data for KeyValueDatabase {
|
|||
Ok(key)
|
||||
}
|
||||
|
||||
fn search_file_metadata(&self, mxc: String, width: u32, height: u32) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
|
||||
fn search_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
|
||||
let mut prefix = mxc.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
|
||||
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
|
||||
prefix.push(0xff);
|
||||
|
||||
let (key, _) = self.mediaid_file.scan_prefix(prefix).next().ok_or(Error::BadRequest(ErrorKind::NotFound, "Media not found"))?;
|
||||
let (key, _) = self
|
||||
.mediaid_file
|
||||
.scan_prefix(prefix)
|
||||
.next()
|
||||
.ok_or(Error::BadRequest(ErrorKind::NotFound, "Media not found"))?;
|
||||
|
||||
let mut parts = key.rsplit(|&b| b == 0xff);
|
||||
|
||||
|
@ -57,9 +73,7 @@ impl service::media::Data for KeyValueDatabase {
|
|||
} else {
|
||||
Some(
|
||||
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
|
||||
Error::bad_database(
|
||||
"Content Disposition in mediaid_file is invalid unicode.",
|
||||
)
|
||||
Error::bad_database("Content Disposition in mediaid_file is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
};
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use ruma::{UserId, api::client::push::{set_pusher, get_pushers}};
|
||||
use ruma::{
|
||||
api::client::push::{get_pushers, set_pusher},
|
||||
UserId,
|
||||
};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, Error, Result};
|
||||
use crate::{database::KeyValueDatabase, service, Error, Result};
|
||||
|
||||
impl service::pusher::Data for KeyValueDatabase {
|
||||
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()> {
|
||||
|
@ -48,10 +51,7 @@ impl service::pusher::Data for KeyValueDatabase {
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn get_pusher_senderkeys<'a>(
|
||||
&'a self,
|
||||
sender: &UserId,
|
||||
) -> Box<dyn Iterator<Item = Vec<u8>>> {
|
||||
fn get_pusher_senderkeys<'a>(&'a self, sender: &UserId) -> Box<dyn Iterator<Item = Vec<u8>>> {
|
||||
let mut prefix = sender.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
use ruma::{RoomId, RoomAliasId, api::client::error::ErrorKind};
|
||||
use ruma::{api::client::error::ErrorKind, RoomAliasId, RoomId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Error, services, Result};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::alias::Data for KeyValueDatabase {
|
||||
fn set_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
room_id: &RoomId
|
||||
) -> Result<()> {
|
||||
fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()> {
|
||||
self.alias_roomid
|
||||
.insert(alias.alias().as_bytes(), room_id.as_bytes())?;
|
||||
let mut aliasid = room_id.as_bytes().to_vec();
|
||||
|
@ -17,10 +13,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
) -> Result<()> {
|
||||
fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> {
|
||||
if let Some(room_id) = self.alias_roomid.get(alias.alias().as_bytes())? {
|
||||
let mut prefix = room_id.to_vec();
|
||||
prefix.push(0xff);
|
||||
|
@ -38,10 +31,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_local_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId
|
||||
) -> Result<Option<Box<RoomId>>> {
|
||||
fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>> {
|
||||
self.alias_roomid
|
||||
.get(alias.alias().as_bytes())?
|
||||
.map(|bytes| {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{collections::HashSet, mem::size_of, sync::Arc};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, Result, utils};
|
||||
use crate::{database::KeyValueDatabase, service, utils, Result};
|
||||
|
||||
impl service::rooms::auth_chain::Data for KeyValueDatabase {
|
||||
fn get_cached_eventid_authchain(&self, key: &[u64]) -> Result<Option<Arc<HashSet<u64>>>> {
|
||||
|
@ -12,14 +12,13 @@ impl service::rooms::auth_chain::Data for KeyValueDatabase {
|
|||
// We only save auth chains for single events in the db
|
||||
if key.len() == 1 {
|
||||
// Check DB cache
|
||||
let chain = self.shorteventid_authchain
|
||||
let chain = self
|
||||
.shorteventid_authchain
|
||||
.get(&key[0].to_be_bytes())?
|
||||
.map(|chain| {
|
||||
chain
|
||||
.chunks_exact(size_of::<u64>())
|
||||
.map(|chunk| {
|
||||
utils::u64_from_bytes(chunk).expect("byte length is correct")
|
||||
})
|
||||
.map(|chunk| utils::u64_from_bytes(chunk).expect("byte length is correct"))
|
||||
.collect()
|
||||
});
|
||||
|
||||
|
@ -37,7 +36,6 @@ impl service::rooms::auth_chain::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
Ok(None)
|
||||
|
||||
}
|
||||
|
||||
fn cache_auth_chain(&self, key: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()> {
|
||||
|
@ -53,7 +51,10 @@ impl service::rooms::auth_chain::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
// Cache in RAM
|
||||
self.auth_chain_cache.lock().unwrap().insert(key, auth_chain);
|
||||
self.auth_chain_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(key, auth_chain);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::RoomId;
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Error, Result};
|
||||
use crate::{database::KeyValueDatabase, service, utils, Error, Result};
|
||||
|
||||
impl service::rooms::directory::Data for KeyValueDatabase {
|
||||
fn set_public(&self, room_id: &RoomId) -> Result<()> {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
mod presence;
|
||||
mod typing;
|
||||
mod read_receipt;
|
||||
mod typing;
|
||||
|
||||
use crate::{service, database::KeyValueDatabase};
|
||||
use crate::{database::KeyValueDatabase, service};
|
||||
|
||||
impl service::rooms::edus::Data for KeyValueDatabase {}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use ruma::{UserId, RoomId, events::presence::PresenceEvent, presence::PresenceState, UInt};
|
||||
use ruma::{events::presence::PresenceEvent, presence::PresenceState, RoomId, UInt, UserId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Error, services, Result};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::edus::presence::Data for KeyValueDatabase {
|
||||
fn update_presence(
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
use std::mem;
|
||||
|
||||
use ruma::{UserId, RoomId, events::receipt::ReceiptEvent, serde::Raw, signatures::CanonicalJsonObject};
|
||||
use ruma::{
|
||||
events::receipt::ReceiptEvent, serde::Raw, signatures::CanonicalJsonObject, RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, utils, Error, services, Result};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
|
||||
fn readreceipt_update(
|
||||
|
@ -50,13 +52,15 @@ impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
|
|||
&'a self,
|
||||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Box<dyn Iterator<
|
||||
Item=Result<(
|
||||
Box<UserId>,
|
||||
u64,
|
||||
Raw<ruma::events::AnySyncEphemeralRoomEvent>,
|
||||
)>,
|
||||
>> {
|
||||
) -> Box<
|
||||
dyn Iterator<
|
||||
Item = Result<(
|
||||
Box<UserId>,
|
||||
u64,
|
||||
Raw<ruma::events::AnySyncEphemeralRoomEvent>,
|
||||
)>,
|
||||
>,
|
||||
> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
let prefix2 = prefix.clone();
|
||||
|
@ -64,42 +68,44 @@ impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
|
|||
let mut first_possible_edu = prefix.clone();
|
||||
first_possible_edu.extend_from_slice(&(since + 1).to_be_bytes()); // +1 so we don't send the event at since
|
||||
|
||||
Box::new(self.readreceiptid_readreceipt
|
||||
.iter_from(&first_possible_edu, false)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix2))
|
||||
.map(move |(k, v)| {
|
||||
let count =
|
||||
utils::u64_from_bytes(&k[prefix.len()..prefix.len() + mem::size_of::<u64>()])
|
||||
.map_err(|_| Error::bad_database("Invalid readreceiptid count in db."))?;
|
||||
let user_id = UserId::parse(
|
||||
utils::string_from_bytes(&k[prefix.len() + mem::size_of::<u64>() + 1..])
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid readreceiptid userid bytes in db.")
|
||||
})?,
|
||||
)
|
||||
Box::new(
|
||||
self.readreceiptid_readreceipt
|
||||
.iter_from(&first_possible_edu, false)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix2))
|
||||
.map(move |(k, v)| {
|
||||
let count = utils::u64_from_bytes(
|
||||
&k[prefix.len()..prefix.len() + mem::size_of::<u64>()],
|
||||
)
|
||||
.map_err(|_| Error::bad_database("Invalid readreceiptid count in db."))?;
|
||||
let user_id = UserId::parse(
|
||||
utils::string_from_bytes(&k[prefix.len() + mem::size_of::<u64>() + 1..])
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid readreceiptid userid bytes in db.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("Invalid readreceiptid userid in db."))?;
|
||||
|
||||
let mut json = serde_json::from_slice::<CanonicalJsonObject>(&v).map_err(|_| {
|
||||
Error::bad_database("Read receipt in roomlatestid_roomlatest is invalid json.")
|
||||
})?;
|
||||
json.remove("room_id");
|
||||
let mut json =
|
||||
serde_json::from_slice::<CanonicalJsonObject>(&v).map_err(|_| {
|
||||
Error::bad_database(
|
||||
"Read receipt in roomlatestid_roomlatest is invalid json.",
|
||||
)
|
||||
})?;
|
||||
json.remove("room_id");
|
||||
|
||||
Ok((
|
||||
user_id,
|
||||
count,
|
||||
Raw::from_json(
|
||||
serde_json::value::to_raw_value(&json).expect("json is valid raw value"),
|
||||
),
|
||||
))
|
||||
}))
|
||||
Ok((
|
||||
user_id,
|
||||
count,
|
||||
Raw::from_json(
|
||||
serde_json::value::to_raw_value(&json)
|
||||
.expect("json is valid raw value"),
|
||||
),
|
||||
))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn private_read_set(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
user_id: &UserId,
|
||||
count: u64,
|
||||
) -> Result<()> {
|
||||
fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()> {
|
||||
let mut key = room_id.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(user_id.as_bytes());
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use ruma::{UserId, RoomId};
|
||||
use ruma::{RoomId, UserId};
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, utils, Error, services, Result};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::edus::typing::Data for KeyValueDatabase {
|
||||
fn typing_add(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
timeout: u64,
|
||||
) -> Result<()> {
|
||||
fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
|
@ -30,11 +25,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn typing_remove(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
fn typing_remove(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
|
@ -53,17 +44,16 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
if found_outdated {
|
||||
self.roomid_lasttypingupdate
|
||||
.insert(room_id.as_bytes(), &services().globals.next_count()?.to_be_bytes())?;
|
||||
self.roomid_lasttypingupdate.insert(
|
||||
room_id.as_bytes(),
|
||||
&services().globals.next_count()?.to_be_bytes(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn last_typing_update(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<u64> {
|
||||
fn last_typing_update(&self, room_id: &RoomId) -> Result<u64> {
|
||||
Ok(self
|
||||
.roomid_lasttypingupdate
|
||||
.get(room_id.as_bytes())?
|
||||
|
@ -76,10 +66,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
|
|||
.unwrap_or(0))
|
||||
}
|
||||
|
||||
fn typings_all(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<HashSet<Box<UserId>>> {
|
||||
fn typings_all(&self, room_id: &RoomId) -> Result<HashSet<Box<UserId>>> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
|
@ -89,7 +76,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
|
|||
let user_id = UserId::parse(utils::string_from_bytes(&user_id).map_err(|_| {
|
||||
Error::bad_database("User ID in typingid_userid is invalid unicode.")
|
||||
})?)
|
||||
.map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))?;
|
||||
.map_err(|_| Error::bad_database("User ID in typingid_userid is invalid."))?;
|
||||
|
||||
user_ids.insert(user_id);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::{UserId, DeviceId, RoomId};
|
||||
use ruma::{DeviceId, RoomId, UserId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, Result};
|
||||
use crate::{database::KeyValueDatabase, service, Result};
|
||||
|
||||
impl service::rooms::lazy_loading::Data for KeyValueDatabase {
|
||||
fn lazy_load_was_sent_before(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::RoomId;
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, Result, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, Result};
|
||||
|
||||
impl service::rooms::metadata::Data for KeyValueDatabase {
|
||||
fn exists(&self, room_id: &RoomId) -> Result<bool> {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::{EventId, signatures::CanonicalJsonObject};
|
||||
use ruma::{signatures::CanonicalJsonObject, EventId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, PduEvent, Error, Result};
|
||||
use crate::{database::KeyValueDatabase, service, Error, PduEvent, Result};
|
||||
|
||||
impl service::rooms::outlier::Data for KeyValueDatabase {
|
||||
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ruma::{RoomId, EventId};
|
||||
use ruma::{EventId, RoomId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, Result};
|
||||
use crate::{database::KeyValueDatabase, service, Result};
|
||||
|
||||
impl service::rooms::pdu_metadata::Data for KeyValueDatabase {
|
||||
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::mem::size_of;
|
|||
|
||||
use ruma::RoomId;
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Result, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Result};
|
||||
|
||||
impl service::rooms::search::Data for KeyValueDatabase {
|
||||
fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()> {
|
||||
|
@ -27,7 +27,9 @@ impl service::rooms::search::Data for KeyValueDatabase {
|
|||
room_id: &RoomId,
|
||||
search_string: &str,
|
||||
) -> Result<Option<(Box<dyn Iterator<Item = Vec<u8>>>, Vec<String>)>> {
|
||||
let prefix = services().rooms.short
|
||||
let prefix = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists")
|
||||
.to_be_bytes()
|
||||
|
@ -63,10 +65,10 @@ impl service::rooms::search::Data for KeyValueDatabase {
|
|||
};
|
||||
|
||||
let mapped = common_elements.map(move |id| {
|
||||
let mut pduid = prefix_clone.clone();
|
||||
pduid.extend_from_slice(&id);
|
||||
pduid
|
||||
});
|
||||
let mut pduid = prefix_clone.clone();
|
||||
pduid.extend_from_slice(&id);
|
||||
pduid
|
||||
});
|
||||
|
||||
Ok(Some((Box::new(mapped), words)))
|
||||
}
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ruma::{EventId, events::StateEventType, RoomId};
|
||||
use ruma::{events::StateEventType, EventId, RoomId};
|
||||
|
||||
use crate::{Result, database::KeyValueDatabase, service, utils, Error, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::short::Data for KeyValueDatabase {
|
||||
fn get_or_create_shorteventid(
|
||||
&self,
|
||||
event_id: &EventId,
|
||||
) -> Result<u64> {
|
||||
fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64> {
|
||||
if let Some(short) = self.eventidshort_cache.lock().unwrap().get_mut(event_id) {
|
||||
return Ok(*short);
|
||||
}
|
||||
|
@ -180,10 +177,7 @@ impl service::rooms::short::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
fn get_or_create_shortstatehash(
|
||||
&self,
|
||||
state_hash: &[u8],
|
||||
) -> Result<(u64, bool)> {
|
||||
fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)> {
|
||||
Ok(match self.statehash_shortstatehash.get(state_hash)? {
|
||||
Some(shortstatehash) => (
|
||||
utils::u64_from_bytes(&shortstatehash)
|
||||
|
@ -209,10 +203,7 @@ impl service::rooms::short::Data for KeyValueDatabase {
|
|||
.transpose()
|
||||
}
|
||||
|
||||
fn get_or_create_shortroomid(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<u64> {
|
||||
fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64> {
|
||||
Ok(match self.roomid_shortroomid.get(room_id.as_bytes())? {
|
||||
Some(short) => utils::u64_from_bytes(&short)
|
||||
.map_err(|_| Error::bad_database("Invalid shortroomid in db."))?,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use ruma::{RoomId, EventId};
|
||||
use tokio::sync::MutexGuard;
|
||||
use std::sync::Arc;
|
||||
use ruma::{EventId, RoomId};
|
||||
use std::collections::HashSet;
|
||||
use std::fmt::Debug;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::MutexGuard;
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Error, Result};
|
||||
use crate::{database::KeyValueDatabase, service, utils, Error, Result};
|
||||
|
||||
impl service::rooms::state::Data for KeyValueDatabase {
|
||||
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
||||
|
@ -17,9 +17,12 @@ impl service::rooms::state::Data for KeyValueDatabase {
|
|||
})
|
||||
}
|
||||
|
||||
fn set_room_state(&self, room_id: &RoomId, new_shortstatehash: u64,
|
||||
fn set_room_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
new_shortstatehash: u64,
|
||||
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
) -> Result<()> {
|
||||
self.roomid_shortstatehash
|
||||
.insert(room_id.as_bytes(), &new_shortstatehash.to_be_bytes())?;
|
||||
Ok(())
|
||||
|
|
|
@ -1,13 +1,18 @@
|
|||
use std::{collections::{BTreeMap, HashMap}, sync::Arc};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, PduEvent, Error, utils, Result, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, PduEvent, Result};
|
||||
use async_trait::async_trait;
|
||||
use ruma::{EventId, events::StateEventType, RoomId};
|
||||
use ruma::{events::StateEventType, EventId, RoomId};
|
||||
|
||||
#[async_trait]
|
||||
impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
||||
async fn state_full_ids(&self, shortstatehash: u64) -> Result<BTreeMap<u64, Arc<EventId>>> {
|
||||
let full_state = services().rooms.state_compressor
|
||||
let full_state = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(shortstatehash)?
|
||||
.pop()
|
||||
.expect("there is always one layer")
|
||||
|
@ -15,7 +20,10 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
let mut result = BTreeMap::new();
|
||||
let mut i = 0;
|
||||
for compressed in full_state.into_iter() {
|
||||
let parsed = services().rooms.state_compressor.parse_compressed_state_event(compressed)?;
|
||||
let parsed = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.parse_compressed_state_event(compressed)?;
|
||||
result.insert(parsed.0, parsed.1);
|
||||
|
||||
i += 1;
|
||||
|
@ -30,7 +38,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
&self,
|
||||
shortstatehash: u64,
|
||||
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
|
||||
let full_state = services().rooms.state_compressor
|
||||
let full_state = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(shortstatehash)?
|
||||
.pop()
|
||||
.expect("there is always one layer")
|
||||
|
@ -39,7 +49,10 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
let mut result = HashMap::new();
|
||||
let mut i = 0;
|
||||
for compressed in full_state {
|
||||
let (_, eventid) = services().rooms.state_compressor.parse_compressed_state_event(compressed)?;
|
||||
let (_, eventid) = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.parse_compressed_state_event(compressed)?;
|
||||
if let Some(pdu) = services().rooms.timeline.get_pdu(&eventid)? {
|
||||
result.insert(
|
||||
(
|
||||
|
@ -69,11 +82,17 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>> {
|
||||
let shortstatekey = match services().rooms.short.get_shortstatekey(event_type, state_key)? {
|
||||
let shortstatekey = match services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortstatekey(event_type, state_key)?
|
||||
{
|
||||
Some(s) => s,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let full_state = services().rooms.state_compressor
|
||||
let full_state = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(shortstatehash)?
|
||||
.pop()
|
||||
.expect("there is always one layer")
|
||||
|
@ -82,7 +101,10 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
.into_iter()
|
||||
.find(|bytes| bytes.starts_with(&shortstatekey.to_be_bytes()))
|
||||
.and_then(|compressed| {
|
||||
services().rooms.state_compressor.parse_compressed_state_event(compressed)
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.parse_compressed_state_event(compressed)
|
||||
.ok()
|
||||
.map(|(_, id)| id)
|
||||
}))
|
||||
|
@ -96,7 +118,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>> {
|
||||
self.state_get_id(shortstatehash, event_type, state_key)?
|
||||
.map_or(Ok(None), |event_id| services().rooms.timeline.get_pdu(&event_id))
|
||||
.map_or(Ok(None), |event_id| {
|
||||
services().rooms.timeline.get_pdu(&event_id)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the state hash for this pdu.
|
||||
|
@ -122,7 +146,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
|
||||
if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
|
||||
if let Some(current_shortstatehash) =
|
||||
services().rooms.state.get_room_shortstatehash(room_id)?
|
||||
{
|
||||
self.state_full(current_shortstatehash).await
|
||||
} else {
|
||||
Ok(HashMap::new())
|
||||
|
@ -136,7 +162,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>> {
|
||||
if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
|
||||
if let Some(current_shortstatehash) =
|
||||
services().rooms.state.get_room_shortstatehash(room_id)?
|
||||
{
|
||||
self.state_get_id(current_shortstatehash, event_type, state_key)
|
||||
} else {
|
||||
Ok(None)
|
||||
|
@ -150,7 +178,9 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
|
|||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>> {
|
||||
if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
|
||||
if let Some(current_shortstatehash) =
|
||||
services().rooms.state.get_room_shortstatehash(room_id)?
|
||||
{
|
||||
self.state_get(current_shortstatehash, event_type, state_key)
|
||||
} else {
|
||||
Ok(None)
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use regex::Regex;
|
||||
use ruma::{UserId, RoomId, events::{AnyStrippedStateEvent, AnySyncStateEvent}, serde::Raw, ServerName};
|
||||
use ruma::{
|
||||
events::{AnyStrippedStateEvent, AnySyncStateEvent},
|
||||
serde::Raw,
|
||||
RoomId, ServerName, UserId,
|
||||
};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, services, Result, Error, utils};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::state_cache::Data for KeyValueDatabase {
|
||||
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
|
@ -31,8 +35,13 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn mark_as_invited(&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>) -> Result<()> {
|
||||
|
||||
fn mark_as_invited(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>,
|
||||
) -> Result<()> {
|
||||
let mut roomuser_id = room_id.as_bytes().to_vec();
|
||||
roomuser_id.push(0xff);
|
||||
roomuser_id.extend_from_slice(user_id.as_bytes());
|
||||
|
@ -46,8 +55,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
&serde_json::to_vec(&last_state.unwrap_or_default())
|
||||
.expect("state to bytes always works"),
|
||||
)?;
|
||||
self.roomuserid_invitecount
|
||||
.insert(&roomuser_id, &services().globals.next_count()?.to_be_bytes())?;
|
||||
self.roomuserid_invitecount.insert(
|
||||
&roomuser_id,
|
||||
&services().globals.next_count()?.to_be_bytes(),
|
||||
)?;
|
||||
self.userroomid_joined.remove(&userroom_id)?;
|
||||
self.roomuserid_joined.remove(&roomuser_id)?;
|
||||
self.userroomid_leftstate.remove(&userroom_id)?;
|
||||
|
@ -69,8 +80,10 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
&userroom_id,
|
||||
&serde_json::to_vec(&Vec::<Raw<AnySyncStateEvent>>::new()).unwrap(),
|
||||
)?; // TODO
|
||||
self.roomuserid_leftcount
|
||||
.insert(&roomuser_id, &services().globals.next_count()?.to_be_bytes())?;
|
||||
self.roomuserid_leftcount.insert(
|
||||
&roomuser_id,
|
||||
&services().globals.next_count()?.to_be_bytes(),
|
||||
)?;
|
||||
self.userroomid_joined.remove(&userroom_id)?;
|
||||
self.roomuserid_joined.remove(&roomuser_id)?;
|
||||
self.userroomid_invitestate.remove(&userroom_id)?;
|
||||
|
@ -324,21 +337,25 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
Box::new(self.roomuseroncejoinedids
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, _)| {
|
||||
UserId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
Box::new(
|
||||
self.roomuseroncejoinedids
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, _)| {
|
||||
UserId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database(
|
||||
"User ID in room_useroncejoined is invalid unicode.",
|
||||
)
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("User ID in room_useroncejoined is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))
|
||||
}))
|
||||
.map_err(|_| Error::bad_database("User ID in room_useroncejoined is invalid."))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all invited members of a room.
|
||||
|
@ -350,21 +367,23 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
Box::new(self.roomuserid_invitecount
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, _)| {
|
||||
UserId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
Box::new(
|
||||
self.roomuserid_invitecount
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, _)| {
|
||||
UserId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("User ID in roomuserid_invited is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("User ID in roomuserid_invited is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))
|
||||
}))
|
||||
.map_err(|_| Error::bad_database("User ID in roomuserid_invited is invalid."))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
|
@ -403,21 +422,23 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> Box<dyn Iterator<Item = Result<Box<RoomId>>> + 'a> {
|
||||
Box::new(self.userroomid_joined
|
||||
.scan_prefix(user_id.as_bytes().to_vec())
|
||||
.map(|(key, _)| {
|
||||
RoomId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
Box::new(
|
||||
self.userroomid_joined
|
||||
.scan_prefix(user_id.as_bytes().to_vec())
|
||||
.map(|(key, _)| {
|
||||
RoomId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Room ID in userroomid_joined is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Room ID in userroomid_joined is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))
|
||||
}))
|
||||
.map_err(|_| Error::bad_database("Room ID in userroomid_joined is invalid."))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all rooms a user was invited to.
|
||||
|
@ -429,26 +450,31 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
let mut prefix = user_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
Box::new(self.userroomid_invitestate
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, state)| {
|
||||
let room_id = RoomId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
Box::new(
|
||||
self.userroomid_invitestate
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, state)| {
|
||||
let room_id = RoomId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("Room ID in userroomid_invited is invalid."))?;
|
||||
Error::bad_database("Room ID in userroomid_invited is invalid.")
|
||||
})?;
|
||||
|
||||
let state = serde_json::from_slice(&state)
|
||||
.map_err(|_| Error::bad_database("Invalid state in userroomid_invitestate."))?;
|
||||
let state = serde_json::from_slice(&state).map_err(|_| {
|
||||
Error::bad_database("Invalid state in userroomid_invitestate.")
|
||||
})?;
|
||||
|
||||
Ok((room_id, state))
|
||||
}))
|
||||
Ok((room_id, state))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
|
@ -502,26 +528,31 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
let mut prefix = user_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
Box::new(self.userroomid_leftstate
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, state)| {
|
||||
let room_id = RoomId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
Box::new(
|
||||
self.userroomid_leftstate
|
||||
.scan_prefix(prefix)
|
||||
.map(|(key, state)| {
|
||||
let room_id = RoomId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Room ID in userroomid_invited is invalid unicode.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("Room ID in userroomid_invited is invalid."))?;
|
||||
Error::bad_database("Room ID in userroomid_invited is invalid.")
|
||||
})?;
|
||||
|
||||
let state = serde_json::from_slice(&state)
|
||||
.map_err(|_| Error::bad_database("Invalid state in userroomid_leftstate."))?;
|
||||
let state = serde_json::from_slice(&state).map_err(|_| {
|
||||
Error::bad_database("Invalid state in userroomid_leftstate.")
|
||||
})?;
|
||||
|
||||
Ok((room_id, state))
|
||||
}))
|
||||
Ok((room_id, state))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
use std::{collections::HashSet, mem::size_of};
|
||||
|
||||
use crate::{service::{self, rooms::state_compressor::data::StateDiff}, database::KeyValueDatabase, Error, utils, Result};
|
||||
use crate::{
|
||||
database::KeyValueDatabase,
|
||||
service::{self, rooms::state_compressor::data::StateDiff},
|
||||
utils, Error, Result,
|
||||
};
|
||||
|
||||
impl service::rooms::state_compressor::Data for KeyValueDatabase {
|
||||
fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff> {
|
||||
|
@ -10,11 +14,7 @@ impl service::rooms::state_compressor::Data for KeyValueDatabase {
|
|||
.ok_or_else(|| Error::bad_database("State hash does not exist"))?;
|
||||
let parent =
|
||||
utils::u64_from_bytes(&value[0..size_of::<u64>()]).expect("bytes have right length");
|
||||
let parent = if parent != 0 {
|
||||
Some(parent)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let parent = if parent != 0 { Some(parent) } else { None };
|
||||
|
||||
let mut add_mode = true;
|
||||
let mut added = HashSet::new();
|
||||
|
@ -35,7 +35,11 @@ impl service::rooms::state_compressor::Data for KeyValueDatabase {
|
|||
i += 2 * size_of::<u64>();
|
||||
}
|
||||
|
||||
Ok(StateDiff { parent, added, removed })
|
||||
Ok(StateDiff {
|
||||
parent,
|
||||
added,
|
||||
removed,
|
||||
})
|
||||
}
|
||||
|
||||
fn save_statediff(&self, shortstatehash: u64, diff: StateDiff) -> Result<()> {
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
use std::{collections::hash_map, mem::size_of, sync::Arc};
|
||||
|
||||
use ruma::{UserId, RoomId, api::client::error::ErrorKind, EventId, signatures::CanonicalJsonObject};
|
||||
use ruma::{
|
||||
api::client::error::ErrorKind, signatures::CanonicalJsonObject, EventId, RoomId, UserId,
|
||||
};
|
||||
use tracing::error;
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Error, PduEvent, Result, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, PduEvent, Result};
|
||||
|
||||
impl service::rooms::timeline::Data for KeyValueDatabase {
|
||||
fn first_pdu_in_room(&self, room_id: &RoomId) -> Result<Option<Arc<PduEvent>>> {
|
||||
let prefix = services().rooms.short
|
||||
let prefix = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists")
|
||||
.to_be_bytes()
|
||||
|
@ -82,10 +86,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
/// Returns the json of a pdu.
|
||||
fn get_non_outlier_pdu_json(
|
||||
&self,
|
||||
event_id: &EventId,
|
||||
) -> Result<Option<CanonicalJsonObject>> {
|
||||
fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
||||
self.eventid_pduid
|
||||
.get(event_id.as_bytes())?
|
||||
.map(|pduid| {
|
||||
|
@ -187,10 +188,17 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
.map_err(|_| Error::bad_database("PDU has invalid count bytes."))
|
||||
}
|
||||
|
||||
fn append_pdu(&self, pdu_id: &[u8], pdu: &PduEvent, json: &CanonicalJsonObject, count: u64) -> Result<()> {
|
||||
fn append_pdu(
|
||||
&self,
|
||||
pdu_id: &[u8],
|
||||
pdu: &PduEvent,
|
||||
json: &CanonicalJsonObject,
|
||||
count: u64,
|
||||
) -> Result<()> {
|
||||
self.pduid_pdu.insert(
|
||||
pdu_id,
|
||||
&serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"))?;
|
||||
&serde_json::to_vec(json).expect("CanonicalJsonObject is always a valid"),
|
||||
)?;
|
||||
|
||||
self.lasttimelinecount_cache
|
||||
.lock()
|
||||
|
@ -209,7 +217,8 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
if self.pduid_pdu.get(pdu_id)?.is_some() {
|
||||
self.pduid_pdu.insert(
|
||||
pdu_id,
|
||||
&serde_json::to_vec(pdu).expect("CanonicalJsonObject is always a valid"))?;
|
||||
&serde_json::to_vec(pdu).expect("CanonicalJsonObject is always a valid"),
|
||||
)?;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::BadRequest(
|
||||
|
@ -227,7 +236,9 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
|
||||
let prefix = services().rooms.short
|
||||
let prefix = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists")
|
||||
.to_be_bytes()
|
||||
|
@ -239,18 +250,19 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
|
||||
let user_id = user_id.to_owned();
|
||||
|
||||
Ok(Box::new(self
|
||||
.pduid_pdu
|
||||
.iter_from(&first_pdu_id, false)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(move |(pdu_id, v)| {
|
||||
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
||||
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
||||
if pdu.sender != user_id {
|
||||
pdu.remove_transaction_id()?;
|
||||
}
|
||||
Ok((pdu_id, pdu))
|
||||
})))
|
||||
Ok(Box::new(
|
||||
self.pduid_pdu
|
||||
.iter_from(&first_pdu_id, false)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(move |(pdu_id, v)| {
|
||||
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
||||
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
||||
if pdu.sender != user_id {
|
||||
pdu.remove_transaction_id()?;
|
||||
}
|
||||
Ok((pdu_id, pdu))
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns an iterator over all events and their tokens in a room that happened before the
|
||||
|
@ -262,7 +274,9 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
until: u64,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
|
||||
// Create the first part of the full pdu id
|
||||
let prefix = services().rooms.short
|
||||
let prefix = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists")
|
||||
.to_be_bytes()
|
||||
|
@ -275,18 +289,19 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
|
||||
let user_id = user_id.to_owned();
|
||||
|
||||
Ok(Box::new(self
|
||||
.pduid_pdu
|
||||
.iter_from(current, true)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(move |(pdu_id, v)| {
|
||||
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
||||
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
||||
if pdu.sender != user_id {
|
||||
pdu.remove_transaction_id()?;
|
||||
}
|
||||
Ok((pdu_id, pdu))
|
||||
})))
|
||||
Ok(Box::new(
|
||||
self.pduid_pdu
|
||||
.iter_from(current, true)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(move |(pdu_id, v)| {
|
||||
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
||||
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
||||
if pdu.sender != user_id {
|
||||
pdu.remove_transaction_id()?;
|
||||
}
|
||||
Ok((pdu_id, pdu))
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
fn pdus_after<'a>(
|
||||
|
@ -296,7 +311,9 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
from: u64,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
|
||||
// Create the first part of the full pdu id
|
||||
let prefix = services().rooms.short
|
||||
let prefix = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists")
|
||||
.to_be_bytes()
|
||||
|
@ -309,21 +326,27 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
|
|||
|
||||
let user_id = user_id.to_owned();
|
||||
|
||||
Ok(Box::new(self
|
||||
.pduid_pdu
|
||||
.iter_from(current, false)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(move |(pdu_id, v)| {
|
||||
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
||||
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
||||
if pdu.sender != user_id {
|
||||
pdu.remove_transaction_id()?;
|
||||
}
|
||||
Ok((pdu_id, pdu))
|
||||
})))
|
||||
Ok(Box::new(
|
||||
self.pduid_pdu
|
||||
.iter_from(current, false)
|
||||
.take_while(move |(k, _)| k.starts_with(&prefix))
|
||||
.map(move |(pdu_id, v)| {
|
||||
let mut pdu = serde_json::from_slice::<PduEvent>(&v)
|
||||
.map_err(|_| Error::bad_database("PDU in db is invalid."))?;
|
||||
if pdu.sender != user_id {
|
||||
pdu.remove_transaction_id()?;
|
||||
}
|
||||
Ok((pdu_id, pdu))
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
fn increment_notification_counts(&self, room_id: &RoomId, notifies: Vec<Box<UserId>>, highlights: Vec<Box<UserId>>) -> Result<()> {
|
||||
fn increment_notification_counts(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
notifies: Vec<Box<UserId>>,
|
||||
highlights: Vec<Box<UserId>>,
|
||||
) -> Result<()> {
|
||||
let notifies_batch = Vec::new();
|
||||
let highlights_batch = Vec::new();
|
||||
for user in notifies {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::{UserId, RoomId};
|
||||
use ruma::{RoomId, UserId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, utils, Error, Result, services};
|
||||
use crate::{database::KeyValueDatabase, service, services, utils, Error, Result};
|
||||
|
||||
impl service::rooms::user::Data for KeyValueDatabase {
|
||||
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
|
@ -50,7 +50,11 @@ impl service::rooms::user::Data for KeyValueDatabase {
|
|||
token: u64,
|
||||
shortstatehash: u64,
|
||||
) -> Result<()> {
|
||||
let shortroomid = services().rooms.short.get_shortroomid(room_id)?.expect("room exists");
|
||||
let shortroomid = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists");
|
||||
|
||||
let mut key = shortroomid.to_be_bytes().to_vec();
|
||||
key.extend_from_slice(&token.to_be_bytes());
|
||||
|
@ -60,7 +64,11 @@ impl service::rooms::user::Data for KeyValueDatabase {
|
|||
}
|
||||
|
||||
fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> {
|
||||
let shortroomid = services().rooms.short.get_shortroomid(room_id)?.expect("room exists");
|
||||
let shortroomid = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortroomid(room_id)?
|
||||
.expect("room exists");
|
||||
|
||||
let mut key = shortroomid.to_be_bytes().to_vec();
|
||||
key.extend_from_slice(&token.to_be_bytes());
|
||||
|
@ -102,13 +110,15 @@ impl service::rooms::user::Data for KeyValueDatabase {
|
|||
});
|
||||
|
||||
// We use the default compare function because keys are sorted correctly (not reversed)
|
||||
Ok(Box::new(Box::new(utils::common_elements(iterators, Ord::cmp)
|
||||
.expect("users is not empty")
|
||||
.map(|bytes| {
|
||||
RoomId::parse(utils::string_from_bytes(&*bytes).map_err(|_| {
|
||||
Error::bad_database("Invalid RoomId bytes in userroomid_joined")
|
||||
})?)
|
||||
.map_err(|_| Error::bad_database("Invalid RoomId in userroomid_joined."))
|
||||
}))))
|
||||
Ok(Box::new(Box::new(
|
||||
utils::common_elements(iterators, Ord::cmp)
|
||||
.expect("users is not empty")
|
||||
.map(|bytes| {
|
||||
RoomId::parse(utils::string_from_bytes(&*bytes).map_err(|_| {
|
||||
Error::bad_database("Invalid RoomId bytes in userroomid_joined")
|
||||
})?)
|
||||
.map_err(|_| Error::bad_database("Invalid RoomId in userroomid_joined."))
|
||||
}),
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::{UserId, DeviceId, TransactionId};
|
||||
use ruma::{DeviceId, TransactionId, UserId};
|
||||
|
||||
use crate::{service, database::KeyValueDatabase, Result};
|
||||
use crate::{database::KeyValueDatabase, service, Result};
|
||||
|
||||
impl service::transaction_ids::Data for KeyValueDatabase {
|
||||
fn add_txnid(
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::{uiaa::UiaaInfo, error::ErrorKind}};
|
||||
use ruma::{
|
||||
api::client::{error::ErrorKind, uiaa::UiaaInfo},
|
||||
signatures::CanonicalJsonValue,
|
||||
DeviceId, UserId,
|
||||
};
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, Error, Result};
|
||||
|
||||
|
|
|
@ -1,9 +1,20 @@
|
|||
use std::{mem::size_of, collections::BTreeMap};
|
||||
use std::{collections::BTreeMap, mem::size_of};
|
||||
|
||||
use ruma::{api::client::{filter::IncomingFilterDefinition, error::ErrorKind, device::Device}, UserId, RoomAliasId, MxcUri, DeviceId, MilliSecondsSinceUnixEpoch, DeviceKeyId, encryption::{OneTimeKey, CrossSigningKey, DeviceKeys}, serde::Raw, events::{AnyToDeviceEvent, StateEventType}, DeviceKeyAlgorithm, UInt};
|
||||
use ruma::{
|
||||
api::client::{device::Device, error::ErrorKind, filter::IncomingFilterDefinition},
|
||||
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
|
||||
events::{AnyToDeviceEvent, StateEventType},
|
||||
serde::Raw,
|
||||
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, MxcUri, RoomAliasId,
|
||||
UInt, UserId,
|
||||
};
|
||||
use tracing::warn;
|
||||
|
||||
use crate::{service::{self, users::clean_signatures}, database::KeyValueDatabase, Error, utils, services, Result};
|
||||
use crate::{
|
||||
database::KeyValueDatabase,
|
||||
service::{self, users::clean_signatures},
|
||||
services, utils, Error, Result,
|
||||
};
|
||||
|
||||
impl service::users::Data for KeyValueDatabase {
|
||||
/// Check if a user has an account on this homeserver.
|
||||
|
@ -274,18 +285,21 @@ impl service::users::Data for KeyValueDatabase {
|
|||
let mut prefix = user_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
// All devices have metadata
|
||||
Box::new(self.userdeviceid_metadata
|
||||
.scan_prefix(prefix)
|
||||
.map(|(bytes, _)| {
|
||||
Ok(utils::string_from_bytes(
|
||||
bytes
|
||||
.rsplit(|&b| b == 0xff)
|
||||
.next()
|
||||
.ok_or_else(|| Error::bad_database("UserDevice ID in db is invalid."))?,
|
||||
)
|
||||
.map_err(|_| Error::bad_database("Device ID in userdeviceid_metadata is invalid."))?
|
||||
.into())
|
||||
}))
|
||||
Box::new(
|
||||
self.userdeviceid_metadata
|
||||
.scan_prefix(prefix)
|
||||
.map(|(bytes, _)| {
|
||||
Ok(utils::string_from_bytes(
|
||||
bytes.rsplit(|&b| b == 0xff).next().ok_or_else(|| {
|
||||
Error::bad_database("UserDevice ID in db is invalid.")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Device ID in userdeviceid_metadata is invalid.")
|
||||
})?
|
||||
.into())
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Replaces the access token of one device.
|
||||
|
@ -341,8 +355,10 @@ impl service::users::Data for KeyValueDatabase {
|
|||
&serde_json::to_vec(&one_time_key_value).expect("OneTimeKey::to_vec always works"),
|
||||
)?;
|
||||
|
||||
self.userid_lastonetimekeyupdate
|
||||
.insert(user_id.as_bytes(), &services().globals.next_count()?.to_be_bytes())?;
|
||||
self.userid_lastonetimekeyupdate.insert(
|
||||
user_id.as_bytes(),
|
||||
&services().globals.next_count()?.to_be_bytes(),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -372,8 +388,10 @@ impl service::users::Data for KeyValueDatabase {
|
|||
prefix.extend_from_slice(key_algorithm.as_ref().as_bytes());
|
||||
prefix.push(b':');
|
||||
|
||||
self.userid_lastonetimekeyupdate
|
||||
.insert(user_id.as_bytes(), &services().globals.next_count()?.to_be_bytes())?;
|
||||
self.userid_lastonetimekeyupdate.insert(
|
||||
user_id.as_bytes(),
|
||||
&services().globals.next_count()?.to_be_bytes(),
|
||||
)?;
|
||||
|
||||
self.onetimekeyid_onetimekeys
|
||||
.scan_prefix(prefix)
|
||||
|
@ -617,38 +635,47 @@ impl service::users::Data for KeyValueDatabase {
|
|||
|
||||
let to = to.unwrap_or(u64::MAX);
|
||||
|
||||
Box::new(self.keychangeid_userid
|
||||
.iter_from(&start, false)
|
||||
.take_while(move |(k, _)| {
|
||||
k.starts_with(&prefix)
|
||||
&& if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) {
|
||||
if let Ok(c) = utils::u64_from_bytes(current) {
|
||||
c <= to
|
||||
Box::new(
|
||||
self.keychangeid_userid
|
||||
.iter_from(&start, false)
|
||||
.take_while(move |(k, _)| {
|
||||
k.starts_with(&prefix)
|
||||
&& if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) {
|
||||
if let Ok(c) = utils::u64_from_bytes(current) {
|
||||
c <= to
|
||||
} else {
|
||||
warn!("BadDatabase: Could not parse keychangeid_userid bytes");
|
||||
false
|
||||
}
|
||||
} else {
|
||||
warn!("BadDatabase: Could not parse keychangeid_userid bytes");
|
||||
warn!("BadDatabase: Could not parse keychangeid_userid");
|
||||
false
|
||||
}
|
||||
} else {
|
||||
warn!("BadDatabase: Could not parse keychangeid_userid");
|
||||
false
|
||||
}
|
||||
})
|
||||
.map(|(_, bytes)| {
|
||||
UserId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
|
||||
Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.")
|
||||
})?)
|
||||
.map_err(|_| Error::bad_database("User ID in devicekeychangeid_userid is invalid."))
|
||||
}))
|
||||
})
|
||||
.map(|(_, bytes)| {
|
||||
UserId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
|
||||
Error::bad_database(
|
||||
"User ID in devicekeychangeid_userid is invalid unicode.",
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("User ID in devicekeychangeid_userid is invalid.")
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn mark_device_key_update(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
) -> Result<()> {
|
||||
fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> {
|
||||
let count = services().globals.next_count()?.to_be_bytes();
|
||||
for room_id in services().rooms.state_cache.rooms_joined(user_id).filter_map(|r| r.ok()) {
|
||||
for room_id in services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.rooms_joined(user_id)
|
||||
.filter_map(|r| r.ok())
|
||||
{
|
||||
// Don't send key updates to unencrypted rooms
|
||||
if services().rooms
|
||||
if services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&room_id, &StateEventType::RoomEncryption, "")?
|
||||
.is_none()
|
||||
|
@ -883,20 +910,19 @@ impl service::users::Data for KeyValueDatabase {
|
|||
let mut key = user_id.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
|
||||
Box::new(self.userdeviceid_metadata
|
||||
.scan_prefix(key)
|
||||
.map(|(_, bytes)| {
|
||||
serde_json::from_slice::<Device>(&bytes)
|
||||
.map_err(|_| Error::bad_database("Device in userdeviceid_metadata is invalid."))
|
||||
}))
|
||||
Box::new(
|
||||
self.userdeviceid_metadata
|
||||
.scan_prefix(key)
|
||||
.map(|(_, bytes)| {
|
||||
serde_json::from_slice::<Device>(&bytes).map_err(|_| {
|
||||
Error::bad_database("Device in userdeviceid_metadata is invalid.")
|
||||
})
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new sync filter. Returns the filter id.
|
||||
fn create_filter(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
filter: &IncomingFilterDefinition,
|
||||
) -> Result<String> {
|
||||
fn create_filter(&self, user_id: &UserId, filter: &IncomingFilterDefinition) -> Result<String> {
|
||||
let filter_id = utils::random_string(4);
|
||||
|
||||
let mut key = user_id.as_bytes().to_vec();
|
||||
|
|
|
@ -1,8 +1,16 @@
|
|||
pub mod abstraction;
|
||||
pub mod key_value;
|
||||
|
||||
use crate::{utils, Config, Error, Result, service::{users, globals, uiaa, rooms::{self, state_compressor::CompressedStateEvent}, account_data, media, key_backups, transaction_ids, sending, appservice, pusher}, services, PduEvent, Services, SERVICES};
|
||||
use crate::{
|
||||
service::{
|
||||
account_data, appservice, globals, key_backups, media, pusher,
|
||||
rooms::{self, state_compressor::CompressedStateEvent},
|
||||
sending, transaction_ids, uiaa, users,
|
||||
},
|
||||
services, utils, Config, Error, PduEvent, Result, Services, SERVICES,
|
||||
};
|
||||
use abstraction::KeyValueDatabaseEngine;
|
||||
use abstraction::KvTree;
|
||||
use directories::ProjectDirs;
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt};
|
||||
use lru_cache::LruCache;
|
||||
|
@ -12,7 +20,8 @@ use ruma::{
|
|||
GlobalAccountDataEvent, GlobalAccountDataEventType, StateEventType,
|
||||
},
|
||||
push::Ruleset,
|
||||
DeviceId, EventId, RoomId, UserId, signatures::CanonicalJsonValue,
|
||||
signatures::CanonicalJsonValue,
|
||||
DeviceId, EventId, RoomId, UserId,
|
||||
};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap, HashSet},
|
||||
|
@ -25,7 +34,6 @@ use std::{
|
|||
};
|
||||
use tokio::sync::{mpsc, OwnedRwLockReadGuard, RwLock as TokioRwLock, Semaphore};
|
||||
use tracing::{debug, error, info, warn};
|
||||
use abstraction::KvTree;
|
||||
|
||||
pub struct KeyValueDatabase {
|
||||
_db: Arc<dyn KeyValueDatabaseEngine>,
|
||||
|
@ -65,9 +73,9 @@ pub struct KeyValueDatabase {
|
|||
pub(super) readreceiptid_readreceipt: Arc<dyn KvTree>, // ReadReceiptId = RoomId + Count + UserId
|
||||
pub(super) roomuserid_privateread: Arc<dyn KvTree>, // RoomUserId = Room + User, PrivateRead = Count
|
||||
pub(super) roomuserid_lastprivatereadupdate: Arc<dyn KvTree>, // LastPrivateReadUpdate = Count
|
||||
pub(super) typingid_userid: Arc<dyn KvTree>, // TypingId = RoomId + TimeoutTime + Count
|
||||
pub(super) typingid_userid: Arc<dyn KvTree>, // TypingId = RoomId + TimeoutTime + Count
|
||||
pub(super) roomid_lasttypingupdate: Arc<dyn KvTree>, // LastRoomTypingUpdate = Count
|
||||
pub(super) presenceid_presence: Arc<dyn KvTree>, // PresenceId = RoomId + Count + UserId
|
||||
pub(super) presenceid_presence: Arc<dyn KvTree>, // PresenceId = RoomId + Count + UserId
|
||||
pub(super) userid_lastpresenceupdate: Arc<dyn KvTree>, // LastPresenceUpdate = Count
|
||||
|
||||
//pub rooms: rooms::Rooms,
|
||||
|
@ -279,127 +287,126 @@ impl KeyValueDatabase {
|
|||
|
||||
let db = Arc::new(Self {
|
||||
_db: builder.clone(),
|
||||
userid_password: builder.open_tree("userid_password")?,
|
||||
userid_displayname: builder.open_tree("userid_displayname")?,
|
||||
userid_avatarurl: builder.open_tree("userid_avatarurl")?,
|
||||
userid_blurhash: builder.open_tree("userid_blurhash")?,
|
||||
userdeviceid_token: builder.open_tree("userdeviceid_token")?,
|
||||
userdeviceid_metadata: builder.open_tree("userdeviceid_metadata")?,
|
||||
userid_devicelistversion: builder.open_tree("userid_devicelistversion")?,
|
||||
token_userdeviceid: builder.open_tree("token_userdeviceid")?,
|
||||
onetimekeyid_onetimekeys: builder.open_tree("onetimekeyid_onetimekeys")?,
|
||||
userid_lastonetimekeyupdate: builder.open_tree("userid_lastonetimekeyupdate")?,
|
||||
keychangeid_userid: builder.open_tree("keychangeid_userid")?,
|
||||
keyid_key: builder.open_tree("keyid_key")?,
|
||||
userid_masterkeyid: builder.open_tree("userid_masterkeyid")?,
|
||||
userid_selfsigningkeyid: builder.open_tree("userid_selfsigningkeyid")?,
|
||||
userid_usersigningkeyid: builder.open_tree("userid_usersigningkeyid")?,
|
||||
userfilterid_filter: builder.open_tree("userfilterid_filter")?,
|
||||
todeviceid_events: builder.open_tree("todeviceid_events")?,
|
||||
userid_password: builder.open_tree("userid_password")?,
|
||||
userid_displayname: builder.open_tree("userid_displayname")?,
|
||||
userid_avatarurl: builder.open_tree("userid_avatarurl")?,
|
||||
userid_blurhash: builder.open_tree("userid_blurhash")?,
|
||||
userdeviceid_token: builder.open_tree("userdeviceid_token")?,
|
||||
userdeviceid_metadata: builder.open_tree("userdeviceid_metadata")?,
|
||||
userid_devicelistversion: builder.open_tree("userid_devicelistversion")?,
|
||||
token_userdeviceid: builder.open_tree("token_userdeviceid")?,
|
||||
onetimekeyid_onetimekeys: builder.open_tree("onetimekeyid_onetimekeys")?,
|
||||
userid_lastonetimekeyupdate: builder.open_tree("userid_lastonetimekeyupdate")?,
|
||||
keychangeid_userid: builder.open_tree("keychangeid_userid")?,
|
||||
keyid_key: builder.open_tree("keyid_key")?,
|
||||
userid_masterkeyid: builder.open_tree("userid_masterkeyid")?,
|
||||
userid_selfsigningkeyid: builder.open_tree("userid_selfsigningkeyid")?,
|
||||
userid_usersigningkeyid: builder.open_tree("userid_usersigningkeyid")?,
|
||||
userfilterid_filter: builder.open_tree("userfilterid_filter")?,
|
||||
todeviceid_events: builder.open_tree("todeviceid_events")?,
|
||||
|
||||
userdevicesessionid_uiaainfo: builder.open_tree("userdevicesessionid_uiaainfo")?,
|
||||
userdevicesessionid_uiaarequest: RwLock::new(BTreeMap::new()),
|
||||
readreceiptid_readreceipt: builder.open_tree("readreceiptid_readreceipt")?,
|
||||
roomuserid_privateread: builder.open_tree("roomuserid_privateread")?, // "Private" read receipt
|
||||
roomuserid_lastprivatereadupdate: builder
|
||||
.open_tree("roomuserid_lastprivatereadupdate")?,
|
||||
typingid_userid: builder.open_tree("typingid_userid")?,
|
||||
roomid_lasttypingupdate: builder.open_tree("roomid_lasttypingupdate")?,
|
||||
presenceid_presence: builder.open_tree("presenceid_presence")?,
|
||||
userid_lastpresenceupdate: builder.open_tree("userid_lastpresenceupdate")?,
|
||||
pduid_pdu: builder.open_tree("pduid_pdu")?,
|
||||
eventid_pduid: builder.open_tree("eventid_pduid")?,
|
||||
roomid_pduleaves: builder.open_tree("roomid_pduleaves")?,
|
||||
userdevicesessionid_uiaainfo: builder.open_tree("userdevicesessionid_uiaainfo")?,
|
||||
userdevicesessionid_uiaarequest: RwLock::new(BTreeMap::new()),
|
||||
readreceiptid_readreceipt: builder.open_tree("readreceiptid_readreceipt")?,
|
||||
roomuserid_privateread: builder.open_tree("roomuserid_privateread")?, // "Private" read receipt
|
||||
roomuserid_lastprivatereadupdate: builder
|
||||
.open_tree("roomuserid_lastprivatereadupdate")?,
|
||||
typingid_userid: builder.open_tree("typingid_userid")?,
|
||||
roomid_lasttypingupdate: builder.open_tree("roomid_lasttypingupdate")?,
|
||||
presenceid_presence: builder.open_tree("presenceid_presence")?,
|
||||
userid_lastpresenceupdate: builder.open_tree("userid_lastpresenceupdate")?,
|
||||
pduid_pdu: builder.open_tree("pduid_pdu")?,
|
||||
eventid_pduid: builder.open_tree("eventid_pduid")?,
|
||||
roomid_pduleaves: builder.open_tree("roomid_pduleaves")?,
|
||||
|
||||
alias_roomid: builder.open_tree("alias_roomid")?,
|
||||
aliasid_alias: builder.open_tree("aliasid_alias")?,
|
||||
publicroomids: builder.open_tree("publicroomids")?,
|
||||
alias_roomid: builder.open_tree("alias_roomid")?,
|
||||
aliasid_alias: builder.open_tree("aliasid_alias")?,
|
||||
publicroomids: builder.open_tree("publicroomids")?,
|
||||
|
||||
tokenids: builder.open_tree("tokenids")?,
|
||||
tokenids: builder.open_tree("tokenids")?,
|
||||
|
||||
roomserverids: builder.open_tree("roomserverids")?,
|
||||
serverroomids: builder.open_tree("serverroomids")?,
|
||||
userroomid_joined: builder.open_tree("userroomid_joined")?,
|
||||
roomuserid_joined: builder.open_tree("roomuserid_joined")?,
|
||||
roomid_joinedcount: builder.open_tree("roomid_joinedcount")?,
|
||||
roomid_invitedcount: builder.open_tree("roomid_invitedcount")?,
|
||||
roomuseroncejoinedids: builder.open_tree("roomuseroncejoinedids")?,
|
||||
userroomid_invitestate: builder.open_tree("userroomid_invitestate")?,
|
||||
roomuserid_invitecount: builder.open_tree("roomuserid_invitecount")?,
|
||||
userroomid_leftstate: builder.open_tree("userroomid_leftstate")?,
|
||||
roomuserid_leftcount: builder.open_tree("roomuserid_leftcount")?,
|
||||
roomserverids: builder.open_tree("roomserverids")?,
|
||||
serverroomids: builder.open_tree("serverroomids")?,
|
||||
userroomid_joined: builder.open_tree("userroomid_joined")?,
|
||||
roomuserid_joined: builder.open_tree("roomuserid_joined")?,
|
||||
roomid_joinedcount: builder.open_tree("roomid_joinedcount")?,
|
||||
roomid_invitedcount: builder.open_tree("roomid_invitedcount")?,
|
||||
roomuseroncejoinedids: builder.open_tree("roomuseroncejoinedids")?,
|
||||
userroomid_invitestate: builder.open_tree("userroomid_invitestate")?,
|
||||
roomuserid_invitecount: builder.open_tree("roomuserid_invitecount")?,
|
||||
userroomid_leftstate: builder.open_tree("userroomid_leftstate")?,
|
||||
roomuserid_leftcount: builder.open_tree("roomuserid_leftcount")?,
|
||||
|
||||
disabledroomids: builder.open_tree("disabledroomids")?,
|
||||
disabledroomids: builder.open_tree("disabledroomids")?,
|
||||
|
||||
lazyloadedids: builder.open_tree("lazyloadedids")?,
|
||||
lazyloadedids: builder.open_tree("lazyloadedids")?,
|
||||
|
||||
userroomid_notificationcount: builder.open_tree("userroomid_notificationcount")?,
|
||||
userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?,
|
||||
userroomid_notificationcount: builder.open_tree("userroomid_notificationcount")?,
|
||||
userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?,
|
||||
|
||||
statekey_shortstatekey: builder.open_tree("statekey_shortstatekey")?,
|
||||
shortstatekey_statekey: builder.open_tree("shortstatekey_statekey")?,
|
||||
statekey_shortstatekey: builder.open_tree("statekey_shortstatekey")?,
|
||||
shortstatekey_statekey: builder.open_tree("shortstatekey_statekey")?,
|
||||
|
||||
shorteventid_authchain: builder.open_tree("shorteventid_authchain")?,
|
||||
shorteventid_authchain: builder.open_tree("shorteventid_authchain")?,
|
||||
|
||||
roomid_shortroomid: builder.open_tree("roomid_shortroomid")?,
|
||||
roomid_shortroomid: builder.open_tree("roomid_shortroomid")?,
|
||||
|
||||
shortstatehash_statediff: builder.open_tree("shortstatehash_statediff")?,
|
||||
eventid_shorteventid: builder.open_tree("eventid_shorteventid")?,
|
||||
shorteventid_eventid: builder.open_tree("shorteventid_eventid")?,
|
||||
shorteventid_shortstatehash: builder.open_tree("shorteventid_shortstatehash")?,
|
||||
roomid_shortstatehash: builder.open_tree("roomid_shortstatehash")?,
|
||||
roomsynctoken_shortstatehash: builder.open_tree("roomsynctoken_shortstatehash")?,
|
||||
statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?,
|
||||
shortstatehash_statediff: builder.open_tree("shortstatehash_statediff")?,
|
||||
eventid_shorteventid: builder.open_tree("eventid_shorteventid")?,
|
||||
shorteventid_eventid: builder.open_tree("shorteventid_eventid")?,
|
||||
shorteventid_shortstatehash: builder.open_tree("shorteventid_shortstatehash")?,
|
||||
roomid_shortstatehash: builder.open_tree("roomid_shortstatehash")?,
|
||||
roomsynctoken_shortstatehash: builder.open_tree("roomsynctoken_shortstatehash")?,
|
||||
statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?,
|
||||
|
||||
eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?,
|
||||
softfailedeventids: builder.open_tree("softfailedeventids")?,
|
||||
eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?,
|
||||
softfailedeventids: builder.open_tree("softfailedeventids")?,
|
||||
|
||||
referencedevents: builder.open_tree("referencedevents")?,
|
||||
roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?,
|
||||
roomusertype_roomuserdataid: builder.open_tree("roomusertype_roomuserdataid")?,
|
||||
mediaid_file: builder.open_tree("mediaid_file")?,
|
||||
backupid_algorithm: builder.open_tree("backupid_algorithm")?,
|
||||
backupid_etag: builder.open_tree("backupid_etag")?,
|
||||
backupkeyid_backup: builder.open_tree("backupkeyid_backup")?,
|
||||
userdevicetxnid_response: builder.open_tree("userdevicetxnid_response")?,
|
||||
servername_educount: builder.open_tree("servername_educount")?,
|
||||
servernameevent_data: builder.open_tree("servernameevent_data")?,
|
||||
servercurrentevent_data: builder.open_tree("servercurrentevent_data")?,
|
||||
id_appserviceregistrations: builder.open_tree("id_appserviceregistrations")?,
|
||||
senderkey_pusher: builder.open_tree("senderkey_pusher")?,
|
||||
global: builder.open_tree("global")?,
|
||||
server_signingkeys: builder.open_tree("server_signingkeys")?,
|
||||
|
||||
cached_registrations: Arc::new(RwLock::new(HashMap::new())),
|
||||
pdu_cache: Mutex::new(LruCache::new(
|
||||
config
|
||||
.pdu_cache_capacity
|
||||
.try_into()
|
||||
.expect("pdu cache capacity fits into usize"),
|
||||
)),
|
||||
auth_chain_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
shorteventid_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
eventidshort_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
shortstatekey_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
statekeyshort_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
our_real_users_cache: RwLock::new(HashMap::new()),
|
||||
appservice_in_room_cache: RwLock::new(HashMap::new()),
|
||||
lazy_load_waiting: Mutex::new(HashMap::new()),
|
||||
stateinfo_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
lasttimelinecount_cache: Mutex::new(HashMap::new()),
|
||||
referencedevents: builder.open_tree("referencedevents")?,
|
||||
roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?,
|
||||
roomusertype_roomuserdataid: builder.open_tree("roomusertype_roomuserdataid")?,
|
||||
mediaid_file: builder.open_tree("mediaid_file")?,
|
||||
backupid_algorithm: builder.open_tree("backupid_algorithm")?,
|
||||
backupid_etag: builder.open_tree("backupid_etag")?,
|
||||
backupkeyid_backup: builder.open_tree("backupkeyid_backup")?,
|
||||
userdevicetxnid_response: builder.open_tree("userdevicetxnid_response")?,
|
||||
servername_educount: builder.open_tree("servername_educount")?,
|
||||
servernameevent_data: builder.open_tree("servernameevent_data")?,
|
||||
servercurrentevent_data: builder.open_tree("servercurrentevent_data")?,
|
||||
id_appserviceregistrations: builder.open_tree("id_appserviceregistrations")?,
|
||||
senderkey_pusher: builder.open_tree("senderkey_pusher")?,
|
||||
global: builder.open_tree("global")?,
|
||||
server_signingkeys: builder.open_tree("server_signingkeys")?,
|
||||
|
||||
cached_registrations: Arc::new(RwLock::new(HashMap::new())),
|
||||
pdu_cache: Mutex::new(LruCache::new(
|
||||
config
|
||||
.pdu_cache_capacity
|
||||
.try_into()
|
||||
.expect("pdu cache capacity fits into usize"),
|
||||
)),
|
||||
auth_chain_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
shorteventid_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
eventidshort_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
shortstatekey_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
statekeyshort_cache: Mutex::new(LruCache::new(
|
||||
(100_000.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
our_real_users_cache: RwLock::new(HashMap::new()),
|
||||
appservice_in_room_cache: RwLock::new(HashMap::new()),
|
||||
lazy_load_waiting: Mutex::new(HashMap::new()),
|
||||
stateinfo_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
lasttimelinecount_cache: Mutex::new(HashMap::new()),
|
||||
});
|
||||
|
||||
let services_raw = Box::new(Services::build(Arc::clone(&db), config)?);
|
||||
|
@ -407,7 +414,6 @@ impl KeyValueDatabase {
|
|||
// This is the first and only time we initialize the SERVICE static
|
||||
*SERVICES.write().unwrap() = Some(Box::leak(services_raw));
|
||||
|
||||
|
||||
// Matrix resource ownership is based on the server name; changing it
|
||||
// requires recreating the database from scratch.
|
||||
if services().users.count()? > 0 {
|
||||
|
@ -570,7 +576,10 @@ impl KeyValueDatabase {
|
|||
let states_parents = last_roomsstatehash.map_or_else(
|
||||
|| Ok(Vec::new()),
|
||||
|&last_roomsstatehash| {
|
||||
services().rooms.state_compressor.load_shortstatehash_info(dbg!(last_roomsstatehash))
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(dbg!(last_roomsstatehash))
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -643,14 +652,15 @@ impl KeyValueDatabase {
|
|||
current_state = HashSet::new();
|
||||
current_sstatehash = Some(sstatehash);
|
||||
|
||||
let event_id = db
|
||||
.shorteventid_eventid
|
||||
.get(&seventid)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let event_id = db.shorteventid_eventid.get(&seventid).unwrap().unwrap();
|
||||
let string = utils::string_from_bytes(&event_id).unwrap();
|
||||
let event_id = <&EventId>::try_from(string.as_str()).unwrap();
|
||||
let pdu = services().rooms.timeline.get_pdu(event_id).unwrap().unwrap();
|
||||
let pdu = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu(event_id)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
if Some(&pdu.room_id) != current_room.as_ref() {
|
||||
current_room = Some(pdu.room_id.clone());
|
||||
|
@ -764,8 +774,7 @@ impl KeyValueDatabase {
|
|||
.peekable();
|
||||
|
||||
while iter.peek().is_some() {
|
||||
db.tokenids
|
||||
.insert_batch(&mut iter.by_ref().take(1000))?;
|
||||
db.tokenids.insert_batch(&mut iter.by_ref().take(1000))?;
|
||||
println!("smaller batch done");
|
||||
}
|
||||
|
||||
|
@ -803,8 +812,7 @@ impl KeyValueDatabase {
|
|||
|
||||
// Force E2EE device list updates so we can send them over federation
|
||||
for user_id in services().users.iter().filter_map(|r| r.ok()) {
|
||||
services().users
|
||||
.mark_device_key_update(&user_id)?;
|
||||
services().users.mark_device_key_update(&user_id)?;
|
||||
}
|
||||
|
||||
services().globals.bump_database_version(10)?;
|
||||
|
@ -825,7 +833,8 @@ impl KeyValueDatabase {
|
|||
|
||||
info!(
|
||||
"Loaded {} database with version {}",
|
||||
services().globals.config.database_backend, latest_database_version
|
||||
services().globals.config.database_backend,
|
||||
latest_database_version
|
||||
);
|
||||
} else {
|
||||
services()
|
||||
|
@ -837,7 +846,8 @@ impl KeyValueDatabase {
|
|||
|
||||
warn!(
|
||||
"Created new {} database with version {}",
|
||||
services().globals.config.database_backend, latest_database_version
|
||||
services().globals.config.database_backend,
|
||||
latest_database_version
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -862,9 +872,7 @@ impl KeyValueDatabase {
|
|||
}
|
||||
};
|
||||
|
||||
services()
|
||||
.sending
|
||||
.start_handler(sending_receiver);
|
||||
services().sending.start_handler(sending_receiver);
|
||||
|
||||
Self::start_cleanup_task().await;
|
||||
|
||||
|
@ -898,7 +906,8 @@ impl KeyValueDatabase {
|
|||
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
let timer_interval = Duration::from_secs(services().globals.config.cleanup_second_interval as u64);
|
||||
let timer_interval =
|
||||
Duration::from_secs(services().globals.config.cleanup_second_interval as u64);
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut i = interval(timer_interval);
|
||||
|
@ -937,8 +946,10 @@ fn set_emergency_access() -> Result<bool> {
|
|||
let conduit_user = UserId::parse_with_server_name("conduit", services().globals.server_name())
|
||||
.expect("@conduit:server_name is a valid UserId");
|
||||
|
||||
services().users
|
||||
.set_password(&conduit_user, services().globals.emergency_password().as_deref())?;
|
||||
services().users.set_password(
|
||||
&conduit_user,
|
||||
services().globals.emergency_password().as_deref(),
|
||||
)?;
|
||||
|
||||
let (ruleset, res) = match services().globals.emergency_password() {
|
||||
Some(_) => (Ruleset::server_default(&conduit_user), Ok(true)),
|
||||
|
@ -951,7 +962,8 @@ fn set_emergency_access() -> Result<bool> {
|
|||
GlobalAccountDataEventType::PushRules.to_string().into(),
|
||||
&serde_json::to_value(&GlobalAccountDataEvent {
|
||||
content: PushRulesEventContent { global: ruleset },
|
||||
}).expect("to json value always works"),
|
||||
})
|
||||
.expect("to json value always works"),
|
||||
)?;
|
||||
|
||||
res
|
||||
|
|
19
src/lib.rs
19
src/lib.rs
|
@ -7,22 +7,27 @@
|
|||
#![allow(clippy::suspicious_else_formatting)]
|
||||
#![deny(clippy::dbg_macro)]
|
||||
|
||||
pub mod api;
|
||||
mod config;
|
||||
mod database;
|
||||
mod service;
|
||||
pub mod api;
|
||||
mod utils;
|
||||
|
||||
use std::{cell::Cell, sync::{RwLock, Arc}};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
pub use config::Config;
|
||||
pub use utils::error::{Error, Result};
|
||||
pub use service::{Services, pdu::PduEvent};
|
||||
pub use api::ruma_wrapper::{Ruma, RumaResponse};
|
||||
pub use config::Config;
|
||||
pub use service::{pdu::PduEvent, Services};
|
||||
pub use utils::error::{Error, Result};
|
||||
|
||||
pub static SERVICES: RwLock<Option<&'static Services>> = RwLock::new(None);
|
||||
|
||||
pub fn services<'a>() -> &'static Services {
|
||||
&SERVICES.read().unwrap().expect("SERVICES should be initialized when this is called")
|
||||
&SERVICES
|
||||
.read()
|
||||
.unwrap()
|
||||
.expect("SERVICES should be initialized when this is called")
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use ruma::{UserId, RoomId, events::{RoomAccountDataEventType, AnyEphemeralRoomEvent}, serde::Raw};
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Places one event in the account data of the user and removes the previous entry.
|
||||
|
|
|
@ -3,9 +3,7 @@ mod data;
|
|||
pub use data::Data;
|
||||
|
||||
use ruma::{
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
},
|
||||
api::client::error::ErrorKind,
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
signatures::CanonicalJsonValue,
|
||||
|
|
|
@ -28,7 +28,15 @@ use ruma::{
|
|||
use serde_json::value::to_raw_value;
|
||||
use tokio::sync::{mpsc, MutexGuard, RwLock, RwLockReadGuard};
|
||||
|
||||
use crate::{Result, services, Error, api::{server_server, client_server::{AUTO_GEN_PASSWORD_LENGTH, leave_all_rooms}}, PduEvent, utils::{HtmlEscape, self}};
|
||||
use crate::{
|
||||
api::{
|
||||
client_server::{leave_all_rooms, AUTO_GEN_PASSWORD_LENGTH},
|
||||
server_server,
|
||||
},
|
||||
services,
|
||||
utils::{self, HtmlEscape},
|
||||
Error, PduEvent, Result,
|
||||
};
|
||||
|
||||
use super::pdu::PduBuilder;
|
||||
|
||||
|
@ -153,7 +161,6 @@ enum AdminCommand {
|
|||
EnableRoom { room_id: Box<RoomId> },
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AdminRoomEvent {
|
||||
ProcessMessage(String),
|
||||
|
@ -166,16 +173,14 @@ pub struct Service {
|
|||
}
|
||||
|
||||
impl Service {
|
||||
pub fn start_handler(
|
||||
&self,
|
||||
mut receiver: mpsc::UnboundedReceiver<AdminRoomEvent>,
|
||||
) {
|
||||
pub fn start_handler(&self, mut receiver: mpsc::UnboundedReceiver<AdminRoomEvent>) {
|
||||
tokio::spawn(async move {
|
||||
// TODO: Use futures when we have long admin commands
|
||||
//let mut futures = FuturesUnordered::new();
|
||||
|
||||
let conduit_user = UserId::parse(format!("@conduit:{}", services().globals.server_name()))
|
||||
.expect("@conduit:server_name is valid");
|
||||
let conduit_user =
|
||||
UserId::parse(format!("@conduit:{}", services().globals.server_name()))
|
||||
.expect("@conduit:server_name is valid");
|
||||
|
||||
let conduit_room = services()
|
||||
.rooms
|
||||
|
@ -193,7 +198,8 @@ impl Service {
|
|||
mutex_lock: &MutexGuard<'_, ()>| {
|
||||
services()
|
||||
.rooms
|
||||
.timeline.build_and_append_pdu(
|
||||
.timeline
|
||||
.build_and_append_pdu(
|
||||
PduBuilder {
|
||||
event_type: RoomEventType::RoomMessage,
|
||||
content: to_raw_value(&message)
|
||||
|
@ -316,9 +322,11 @@ impl Service {
|
|||
) -> Result<RoomMessageEventContent> {
|
||||
let reply_message_content = match command {
|
||||
AdminCommand::RegisterAppservice => {
|
||||
if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```" {
|
||||
if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```"
|
||||
{
|
||||
let appservice_config = body[1..body.len() - 1].join("\n");
|
||||
let parsed_config = serde_yaml::from_str::<serde_yaml::Value>(&appservice_config);
|
||||
let parsed_config =
|
||||
serde_yaml::from_str::<serde_yaml::Value>(&appservice_config);
|
||||
match parsed_config {
|
||||
Ok(yaml) => match services().appservice.register_appservice(yaml) {
|
||||
Ok(id) => RoomMessageEventContent::text_plain(format!(
|
||||
|
@ -343,7 +351,10 @@ impl Service {
|
|||
}
|
||||
AdminCommand::UnregisterAppservice {
|
||||
appservice_identifier,
|
||||
} => match services().appservice.unregister_appservice(&appservice_identifier) {
|
||||
} => match services()
|
||||
.appservice
|
||||
.unregister_appservice(&appservice_identifier)
|
||||
{
|
||||
Ok(()) => RoomMessageEventContent::text_plain("Appservice unregistered."),
|
||||
Err(e) => RoomMessageEventContent::text_plain(format!(
|
||||
"Failed to unregister appservice: {}",
|
||||
|
@ -351,7 +362,11 @@ impl Service {
|
|||
)),
|
||||
},
|
||||
AdminCommand::ListAppservices => {
|
||||
if let Ok(appservices) = services().appservice.iter_ids().map(|ids| ids.collect::<Vec<_>>()) {
|
||||
if let Ok(appservices) = services()
|
||||
.appservice
|
||||
.iter_ids()
|
||||
.map(|ids| ids.collect::<Vec<_>>())
|
||||
{
|
||||
let count = appservices.len();
|
||||
let output = format!(
|
||||
"Appservices ({}): {}",
|
||||
|
@ -399,7 +414,11 @@ impl Service {
|
|||
Err(e) => RoomMessageEventContent::text_plain(e.to_string()),
|
||||
},
|
||||
AdminCommand::IncomingFederation => {
|
||||
let map = services().globals.roomid_federationhandletime.read().unwrap();
|
||||
let map = services()
|
||||
.globals
|
||||
.roomid_federationhandletime
|
||||
.read()
|
||||
.unwrap();
|
||||
let mut msg: String = format!("Handling {} incoming pdus:\n", map.len());
|
||||
|
||||
for (r, (e, i)) in map.iter() {
|
||||
|
@ -426,7 +445,10 @@ impl Service {
|
|||
Error::bad_database("Invalid room id field in event in database")
|
||||
})?;
|
||||
let start = Instant::now();
|
||||
let count = services().rooms.auth_chain.get_auth_chain(room_id, vec![event_id])
|
||||
let count = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_auth_chain(room_id, vec![event_id])
|
||||
.await?
|
||||
.count();
|
||||
let elapsed = start.elapsed();
|
||||
|
@ -439,7 +461,8 @@ impl Service {
|
|||
}
|
||||
}
|
||||
AdminCommand::ParsePdu => {
|
||||
if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```" {
|
||||
if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```"
|
||||
{
|
||||
let string = body[1..body.len() - 1].join("\n");
|
||||
match serde_json::from_str(&string) {
|
||||
Ok(value) => {
|
||||
|
@ -477,15 +500,18 @@ impl Service {
|
|||
}
|
||||
AdminCommand::GetPdu { event_id } => {
|
||||
let mut outlier = false;
|
||||
let mut pdu_json = services().rooms.timeline.get_non_outlier_pdu_json(&event_id)?;
|
||||
let mut pdu_json = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_non_outlier_pdu_json(&event_id)?;
|
||||
if pdu_json.is_none() {
|
||||
outlier = true;
|
||||
pdu_json = services().rooms.timeline.get_pdu_json(&event_id)?;
|
||||
}
|
||||
match pdu_json {
|
||||
Some(json) => {
|
||||
let json_text =
|
||||
serde_json::to_string_pretty(&json).expect("canonical json is valid json");
|
||||
let json_text = serde_json::to_string_pretty(&json)
|
||||
.expect("canonical json is valid json");
|
||||
RoomMessageEventContent::text_html(
|
||||
format!(
|
||||
"{}\n```json\n{}\n```",
|
||||
|
@ -539,8 +565,11 @@ impl Service {
|
|||
if !services().users.exists(&user_id)?
|
||||
|| services().users.is_deactivated(&user_id)?
|
||||
|| user_id
|
||||
== UserId::parse_with_server_name("conduit", services().globals.server_name())
|
||||
.expect("conduit user exists")
|
||||
== UserId::parse_with_server_name(
|
||||
"conduit",
|
||||
services().globals.server_name(),
|
||||
)
|
||||
.expect("conduit user exists")
|
||||
{
|
||||
return Ok(RoomMessageEventContent::text_plain(
|
||||
"The specified user does not exist or is deactivated!",
|
||||
|
@ -549,7 +578,10 @@ impl Service {
|
|||
|
||||
let new_password = utils::random_string(AUTO_GEN_PASSWORD_LENGTH);
|
||||
|
||||
match services().users.set_password(&user_id, Some(new_password.as_str())) {
|
||||
match services()
|
||||
.users
|
||||
.set_password(&user_id, Some(new_password.as_str()))
|
||||
{
|
||||
Ok(()) => RoomMessageEventContent::text_plain(format!(
|
||||
"Successfully reset the password for user {}: {}",
|
||||
user_id, new_password
|
||||
|
@ -590,7 +622,8 @@ impl Service {
|
|||
|
||||
// Default to pretty displayname
|
||||
let displayname = format!("{} ⚡️", user_id.localpart());
|
||||
services().users
|
||||
services()
|
||||
.users
|
||||
.set_displayname(&user_id, Some(displayname.clone()))?;
|
||||
|
||||
// Initial account data
|
||||
|
@ -604,7 +637,8 @@ impl Service {
|
|||
content: ruma::events::push_rules::PushRulesEventContent {
|
||||
global: ruma::push::Ruleset::server_default(&user_id),
|
||||
},
|
||||
}).expect("to json value always works"),
|
||||
})
|
||||
.expect("to json value always works"),
|
||||
)?;
|
||||
|
||||
// we dont add a device since we're not the user, just the creator
|
||||
|
@ -651,7 +685,8 @@ impl Service {
|
|||
}
|
||||
}
|
||||
AdminCommand::DeactivateAll { leave_rooms, force } => {
|
||||
if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```" {
|
||||
if body.len() > 2 && body[0].trim() == "```" && body.last().unwrap().trim() == "```"
|
||||
{
|
||||
let usernames = body.clone().drain(1..body.len() - 1).collect::<Vec<_>>();
|
||||
|
||||
let mut user_ids: Vec<&UserId> = Vec::new();
|
||||
|
@ -672,17 +707,15 @@ impl Service {
|
|||
let mut admins = Vec::new();
|
||||
|
||||
if !force {
|
||||
user_ids.retain(|&user_id| {
|
||||
match services().users.is_admin(user_id) {
|
||||
Ok(is_admin) => match is_admin {
|
||||
true => {
|
||||
admins.push(user_id.localpart());
|
||||
false
|
||||
}
|
||||
false => true,
|
||||
},
|
||||
Err(_) => false,
|
||||
}
|
||||
user_ids.retain(|&user_id| match services().users.is_admin(user_id) {
|
||||
Ok(is_admin) => match is_admin {
|
||||
true => {
|
||||
admins.push(user_id.localpart());
|
||||
false
|
||||
}
|
||||
false => true,
|
||||
},
|
||||
Err(_) => false,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -783,8 +816,8 @@ impl Service {
|
|||
} else {
|
||||
// Wrap the usage line in a code block, and add a yaml block example
|
||||
// This makes the usage of e.g. `register-appservice` more accurate
|
||||
let re =
|
||||
Regex::new("(?m)^USAGE:\n (.*?)\n\n").expect("Regex compilation should not fail");
|
||||
let re = Regex::new("(?m)^USAGE:\n (.*?)\n\n")
|
||||
.expect("Regex compilation should not fail");
|
||||
re.replace_all(&text, "USAGE:\n<pre>$1[nobr]\n[commandbodyblock]</pre>")
|
||||
.replace("[commandbodyblock]", &command_body)
|
||||
};
|
||||
|
@ -808,7 +841,8 @@ impl Service {
|
|||
services().rooms.short.get_or_create_shortroomid(&room_id)?;
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -818,8 +852,9 @@ impl Service {
|
|||
let state_lock = mutex_state.lock().await;
|
||||
|
||||
// Create a user for the server
|
||||
let conduit_user = UserId::parse_with_server_name("conduit", services().globals.server_name())
|
||||
.expect("@conduit:server_name is valid");
|
||||
let conduit_user =
|
||||
UserId::parse_with_server_name("conduit", services().globals.server_name())
|
||||
.expect("@conduit:server_name is valid");
|
||||
|
||||
services().users.create(&conduit_user, None)?;
|
||||
|
||||
|
@ -1002,9 +1037,10 @@ impl Service {
|
|||
user_id: &UserId,
|
||||
displayname: String,
|
||||
) -> Result<()> {
|
||||
let admin_room_alias: Box<RoomAliasId> = format!("#admins:{}", services().globals.server_name())
|
||||
.try_into()
|
||||
.expect("#admins:server_name is a valid alias name");
|
||||
let admin_room_alias: Box<RoomAliasId> =
|
||||
format!("#admins:{}", services().globals.server_name())
|
||||
.try_into()
|
||||
.expect("#admins:server_name is a valid alias name");
|
||||
let room_id = services()
|
||||
.rooms
|
||||
.alias
|
||||
|
@ -1012,7 +1048,8 @@ impl Service {
|
|||
.expect("Admin room must exist");
|
||||
|
||||
let mutex_state = Arc::clone(
|
||||
services().globals
|
||||
services()
|
||||
.globals
|
||||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
|
@ -1022,8 +1059,9 @@ impl Service {
|
|||
let state_lock = mutex_state.lock().await;
|
||||
|
||||
// Use the server user to grant the new admin's power level
|
||||
let conduit_user = UserId::parse_with_server_name("conduit", services().globals.server_name())
|
||||
.expect("@conduit:server_name is valid");
|
||||
let conduit_user =
|
||||
UserId::parse_with_server_name("conduit", services().globals.server_name())
|
||||
.expect("@conduit:server_name is valid");
|
||||
|
||||
// Invite and join the real user
|
||||
services().rooms.timeline.build_and_append_pdu(
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use ruma::{signatures::Ed25519KeyPair, DeviceId, UserId, ServerName, api::federation::discovery::{ServerSigningKeys, VerifyKey}, ServerSigningKeyId};
|
||||
use ruma::{
|
||||
api::federation::discovery::{ServerSigningKeys, VerifyKey},
|
||||
signatures::Ed25519KeyPair,
|
||||
DeviceId, ServerName, ServerSigningKeyId, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ pub use data::Data;
|
|||
use crate::api::server_server::FedDest;
|
||||
use crate::service::*;
|
||||
|
||||
use crate::{Config, utils, Error, Result};
|
||||
use crate::{utils, Config, Error, Result};
|
||||
use ruma::{
|
||||
api::{
|
||||
client::sync::sync_events,
|
||||
|
@ -89,12 +89,8 @@ impl Default for RotationHandler {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
impl Service {
|
||||
pub fn load(
|
||||
db: Arc<dyn Data>,
|
||||
config: Config,
|
||||
) -> Result<Self> {
|
||||
pub fn load(db: Arc<dyn Data>, config: Config) -> Result<Self> {
|
||||
let keypair = db.load_keypair();
|
||||
|
||||
let keypair = match keypair {
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use ruma::{api::client::backup::{BackupAlgorithm, RoomKeyBackup, KeyBackupData}, serde::Raw, UserId, RoomId};
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
api::client::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn create_backup(
|
||||
|
@ -21,16 +25,10 @@ pub trait Data: Send + Sync {
|
|||
|
||||
fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
|
||||
fn get_latest_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
) -> Result<Option<(String, Raw<BackupAlgorithm>)>>;
|
||||
fn get_latest_backup(&self, user_id: &UserId)
|
||||
-> Result<Option<(String, Raw<BackupAlgorithm>)>>;
|
||||
|
||||
fn get_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
) -> Result<Option<Raw<BackupAlgorithm>>>;
|
||||
fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>>;
|
||||
|
||||
fn add_key(
|
||||
&self,
|
||||
|
@ -68,12 +66,7 @@ pub trait Data: Send + Sync {
|
|||
|
||||
fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()>;
|
||||
|
||||
fn delete_room_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()>;
|
||||
fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
fn delete_room_key(
|
||||
&self,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
mod data;
|
||||
pub use data::Data;
|
||||
|
||||
use crate::{utils, Error, Result, services};
|
||||
use crate::{services, utils, Error, Result};
|
||||
use ruma::{
|
||||
api::client::{
|
||||
backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
|
@ -65,7 +65,8 @@ impl Service {
|
|||
session_id: &str,
|
||||
key_data: &Raw<KeyBackupData>,
|
||||
) -> Result<()> {
|
||||
self.db.add_key(user_id, version, room_id, session_id, key_data)
|
||||
self.db
|
||||
.add_key(user_id, version, room_id, session_id, key_data)
|
||||
}
|
||||
|
||||
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> {
|
||||
|
@ -123,6 +124,7 @@ impl Service {
|
|||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
) -> Result<()> {
|
||||
self.db.delete_room_key(user_id, version, room_id, session_id)
|
||||
self.db
|
||||
.delete_room_key(user_id, version, room_id, session_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,20 @@
|
|||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn create_file_metadata(&self, mxc: String, width: u32, height: u32, content_disposition: Option<&str>, content_type: Option<&str>) -> Result<Vec<u8>>;
|
||||
fn create_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
content_disposition: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
) -> Result<Vec<u8>>;
|
||||
|
||||
/// Returns content_disposition, content_type and the metadata key.
|
||||
fn search_file_metadata(&self, mxc: String, width: u32, height: u32) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
|
||||
fn search_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
mod data;
|
||||
pub use data::Data;
|
||||
|
||||
use crate::{services, utils, Error, Result};
|
||||
use image::{imageops::FilterType, GenericImageView};
|
||||
use crate::{utils, Error, Result, services};
|
||||
use std::{mem, sync::Arc};
|
||||
use tokio::{
|
||||
fs::File,
|
||||
|
@ -29,7 +29,9 @@ impl Service {
|
|||
file: &[u8],
|
||||
) -> Result<()> {
|
||||
// Width, Height = 0 if it's not a thumbnail
|
||||
let key = self.db.create_file_metadata(mxc, 0, 0, content_disposition, content_type)?;
|
||||
let key = self
|
||||
.db
|
||||
.create_file_metadata(mxc, 0, 0, content_disposition, content_type)?;
|
||||
|
||||
let path = services().globals.get_media_file(&key);
|
||||
let mut f = File::create(path).await?;
|
||||
|
@ -48,7 +50,9 @@ impl Service {
|
|||
height: u32,
|
||||
file: &[u8],
|
||||
) -> Result<()> {
|
||||
let key = self.db.create_file_metadata(mxc, width, height, content_disposition, content_type)?;
|
||||
let key =
|
||||
self.db
|
||||
.create_file_metadata(mxc, width, height, content_disposition, content_type)?;
|
||||
|
||||
let path = services().globals.get_media_file(&key);
|
||||
let mut f = File::create(path).await?;
|
||||
|
@ -59,12 +63,13 @@ impl Service {
|
|||
|
||||
/// Downloads a file.
|
||||
pub async fn get(&self, mxc: String) -> Result<Option<FileMeta>> {
|
||||
if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, 0, 0) {
|
||||
if let Ok((content_disposition, content_type, key)) =
|
||||
self.db.search_file_metadata(mxc, 0, 0)
|
||||
{
|
||||
let path = services().globals.get_media_file(&key);
|
||||
let mut file = Vec::new();
|
||||
File::open(path).await?.read_to_end(&mut file).await?;
|
||||
|
||||
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
|
@ -108,7 +113,9 @@ impl Service {
|
|||
.thumbnail_properties(width, height)
|
||||
.unwrap_or((0, 0, false)); // 0, 0 because that's the original file
|
||||
|
||||
if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), width, height) {
|
||||
if let Ok((content_disposition, content_type, key)) =
|
||||
self.db.search_file_metadata(mxc.clone(), width, height)
|
||||
{
|
||||
// Using saved thumbnail
|
||||
let path = services().globals.get_media_file(&key);
|
||||
let mut file = Vec::new();
|
||||
|
@ -119,7 +126,9 @@ impl Service {
|
|||
content_type,
|
||||
file: file.to_vec(),
|
||||
}))
|
||||
} else if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), 0, 0) {
|
||||
} else if let Ok((content_disposition, content_type, key)) =
|
||||
self.db.search_file_metadata(mxc.clone(), 0, 0)
|
||||
{
|
||||
// Generate a thumbnail
|
||||
let path = services().globals.get_media_file(&key);
|
||||
let mut file = Vec::new();
|
||||
|
@ -180,7 +189,13 @@ impl Service {
|
|||
thumbnail.write_to(&mut thumbnail_bytes, image::ImageOutputFormat::Png)?;
|
||||
|
||||
// Save thumbnail in database so we don't have to generate it again next time
|
||||
let thumbnail_key = self.db.create_file_metadata(mxc, width, height, content_disposition.as_deref(), content_type.as_deref())?;
|
||||
let thumbnail_key = self.db.create_file_metadata(
|
||||
mxc,
|
||||
width,
|
||||
height,
|
||||
content_disposition.as_deref(),
|
||||
content_type.as_deref(),
|
||||
)?;
|
||||
|
||||
let path = services().globals.get_media_file(&thumbnail_key);
|
||||
let mut f = File::create(path).await?;
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::{
|
|||
|
||||
use lru_cache::LruCache;
|
||||
|
||||
use crate::{Result, Config};
|
||||
use crate::{Config, Result};
|
||||
|
||||
pub mod account_data;
|
||||
pub mod admin;
|
||||
|
@ -49,7 +49,8 @@ impl Services {
|
|||
+ key_backups::Data
|
||||
+ media::Data,
|
||||
>(
|
||||
db: Arc<D>, config: Config
|
||||
db: Arc<D>,
|
||||
config: Config,
|
||||
) -> Result<Self> {
|
||||
Ok(Self {
|
||||
appservice: appservice::Service { db: db.clone() },
|
||||
|
@ -76,30 +77,26 @@ impl Services {
|
|||
state: rooms::state::Service { db: db.clone() },
|
||||
state_accessor: rooms::state_accessor::Service { db: db.clone() },
|
||||
state_cache: rooms::state_cache::Service { db: db.clone() },
|
||||
state_compressor: rooms::state_compressor::Service { db: db.clone(), stateinfo_cache: Mutex::new(LruCache::new((100.0 * config.conduit_cache_capacity_modifier) as usize,)) },
|
||||
timeline: rooms::timeline::Service { db: db.clone(), lasttimelinecount_cache: Mutex::new(HashMap::new()) },
|
||||
state_compressor: rooms::state_compressor::Service {
|
||||
db: db.clone(),
|
||||
stateinfo_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
timeline: rooms::timeline::Service {
|
||||
db: db.clone(),
|
||||
lasttimelinecount_cache: Mutex::new(HashMap::new()),
|
||||
},
|
||||
user: rooms::user::Service { db: db.clone() },
|
||||
},
|
||||
transaction_ids: transaction_ids::Service {
|
||||
db: db.clone()
|
||||
},
|
||||
uiaa: uiaa::Service {
|
||||
db: db.clone()
|
||||
},
|
||||
users: users::Service {
|
||||
db: db.clone()
|
||||
},
|
||||
account_data: account_data::Service {
|
||||
db: db.clone()
|
||||
},
|
||||
transaction_ids: transaction_ids::Service { db: db.clone() },
|
||||
uiaa: uiaa::Service { db: db.clone() },
|
||||
users: users::Service { db: db.clone() },
|
||||
account_data: account_data::Service { db: db.clone() },
|
||||
admin: admin::Service { sender: todo!() },
|
||||
globals: globals::Service::load(db.clone(), config)?,
|
||||
key_backups: key_backups::Service {
|
||||
db: db.clone()
|
||||
},
|
||||
media: media::Service {
|
||||
db: db.clone()
|
||||
},
|
||||
key_backups: key_backups::Service { db: db.clone() },
|
||||
media: media::Service { db: db.clone() },
|
||||
sending: sending::Service {
|
||||
maximum_requests: todo!(),
|
||||
sender: todo!(),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{Error, services};
|
||||
use crate::{services, Error};
|
||||
use ruma::{
|
||||
events::{
|
||||
room::member::RoomMemberEventContent, AnyEphemeralRoomEvent, AnyRoomEvent, AnyStateEvent,
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
use ruma::{UserId, api::client::push::{set_pusher, get_pushers}};
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
api::client::push::{get_pushers, set_pusher},
|
||||
UserId,
|
||||
};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()>;
|
||||
|
@ -8,8 +11,5 @@ pub trait Data: Send + Sync {
|
|||
|
||||
fn get_pushers(&self, sender: &UserId) -> Result<Vec<get_pushers::v3::Pusher>>;
|
||||
|
||||
fn get_pusher_senderkeys<'a>(
|
||||
&'a self,
|
||||
sender: &UserId,
|
||||
) -> Box<dyn Iterator<Item = Vec<u8>>>;
|
||||
fn get_pusher_senderkeys<'a>(&'a self, sender: &UserId) -> Box<dyn Iterator<Item = Vec<u8>>>;
|
||||
}
|
||||
|
|
|
@ -79,7 +79,11 @@ impl Service {
|
|||
//*reqwest_request.timeout_mut() = Some(Duration::from_secs(5));
|
||||
|
||||
let url = reqwest_request.url().clone();
|
||||
let response = services().globals.default_client().execute(reqwest_request).await;
|
||||
let response = services()
|
||||
.globals
|
||||
.default_client()
|
||||
.execute(reqwest_request)
|
||||
.await;
|
||||
|
||||
match response {
|
||||
Ok(mut response) => {
|
||||
|
@ -196,7 +200,8 @@ impl Service {
|
|||
let ctx = PushConditionRoomCtx {
|
||||
room_id: room_id.to_owned(),
|
||||
member_count: 10_u32.into(), // TODO: get member count efficiently
|
||||
user_display_name: services().users
|
||||
user_display_name: services()
|
||||
.users
|
||||
.displayname(user)?
|
||||
.unwrap_or_else(|| user.localpart().to_owned()),
|
||||
users_power_levels: power_levels.users.clone(),
|
||||
|
@ -276,10 +281,10 @@ impl Service {
|
|||
let user_name = services().users.displayname(&event.sender)?;
|
||||
notifi.sender_display_name = user_name.as_deref();
|
||||
|
||||
let room_name = if let Some(room_name_pdu) =
|
||||
services().rooms
|
||||
let room_name = if let Some(room_name_pdu) = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&event.room_id, &StateEventType::RoomName, "")?
|
||||
.room_state_get(&event.room_id, &StateEventType::RoomName, "")?
|
||||
{
|
||||
serde_json::from_str::<RoomNameEventContent>(room_name_pdu.content.get())
|
||||
.map_err(|_| Error::bad_database("Invalid room name event in database."))?
|
||||
|
@ -290,11 +295,8 @@ impl Service {
|
|||
|
||||
notifi.room_name = room_name.as_deref();
|
||||
|
||||
self.send_request(
|
||||
url,
|
||||
send_event_notification::v1::Request::new(notifi),
|
||||
)
|
||||
.await?;
|
||||
self.send_request(url, send_event_notification::v1::Request::new(notifi))
|
||||
.await?;
|
||||
}
|
||||
|
||||
// TODO: email
|
||||
|
|
|
@ -1,25 +1,15 @@
|
|||
use ruma::{RoomId, RoomAliasId};
|
||||
use crate::Result;
|
||||
use ruma::{RoomAliasId, RoomId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Creates or updates the alias to the given room id.
|
||||
fn set_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
room_id: &RoomId
|
||||
) -> Result<()>;
|
||||
fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
/// Forgets about an alias. Returns an error if the alias did not exist.
|
||||
fn remove_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
) -> Result<()>;
|
||||
fn remove_alias(&self, alias: &RoomAliasId) -> Result<()>;
|
||||
|
||||
/// Looks up the roomid for the given alias.
|
||||
fn resolve_local_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
) -> Result<Option<Box<RoomId>>>;
|
||||
fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>>;
|
||||
|
||||
/// Returns all local aliases that point to the given room
|
||||
fn local_aliases_for_room(
|
||||
|
|
|
@ -3,8 +3,8 @@ use std::sync::Arc;
|
|||
|
||||
pub use data::Data;
|
||||
|
||||
use ruma::{RoomAliasId, RoomId};
|
||||
use crate::Result;
|
||||
use ruma::{RoomAliasId, RoomId};
|
||||
|
||||
pub struct Service {
|
||||
db: Arc<dyn Data>,
|
||||
|
@ -12,19 +12,12 @@ pub struct Service {
|
|||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
pub fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.set_alias(alias, room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn remove_alias(
|
||||
&self,
|
||||
alias: &RoomAliasId,
|
||||
) -> Result<()> {
|
||||
pub fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> {
|
||||
self.db.remove_alias(alias)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
use std::{collections::HashSet, sync::Arc};
|
||||
use crate::Result;
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_cached_eventid_authchain(&self, shorteventid: &[u64]) -> Result<Option<Arc<HashSet<u64>>>>;
|
||||
fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()>;
|
||||
fn get_cached_eventid_authchain(
|
||||
&self,
|
||||
shorteventid: &[u64],
|
||||
) -> Result<Option<Arc<HashSet<u64>>>>;
|
||||
fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>)
|
||||
-> Result<()>;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
mod data;
|
||||
use std::{sync::Arc, collections::{HashSet, BTreeSet}};
|
||||
use std::{
|
||||
collections::{BTreeSet, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{RoomId, EventId, api::client::error::ErrorKind};
|
||||
use ruma::{api::client::error::ErrorKind, EventId, RoomId};
|
||||
use tracing::log::warn;
|
||||
|
||||
use crate::{Result, services, Error};
|
||||
use crate::{services, Error, Result};
|
||||
|
||||
pub struct Service {
|
||||
db: Arc<dyn Data>,
|
||||
|
@ -56,7 +59,11 @@ impl Service {
|
|||
}
|
||||
|
||||
let chunk_key: Vec<u64> = chunk.iter().map(|(short, _)| short).copied().collect();
|
||||
if let Some(cached) = services().rooms.auth_chain.get_cached_eventid_authchain(&chunk_key)? {
|
||||
if let Some(cached) = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_cached_eventid_authchain(&chunk_key)?
|
||||
{
|
||||
hits += 1;
|
||||
full_auth_chain.extend(cached.iter().copied());
|
||||
continue;
|
||||
|
@ -68,13 +75,18 @@ impl Service {
|
|||
let mut misses2 = 0;
|
||||
let mut i = 0;
|
||||
for (sevent_id, event_id) in chunk {
|
||||
if let Some(cached) = services().rooms.auth_chain.get_cached_eventid_authchain(&[sevent_id])? {
|
||||
if let Some(cached) = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_cached_eventid_authchain(&[sevent_id])?
|
||||
{
|
||||
hits2 += 1;
|
||||
chunk_cache.extend(cached.iter().copied());
|
||||
} else {
|
||||
misses2 += 1;
|
||||
let auth_chain = Arc::new(self.get_auth_chain_inner(room_id, &event_id)?);
|
||||
services().rooms
|
||||
services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.cache_auth_chain(vec![sevent_id], Arc::clone(&auth_chain))?;
|
||||
println!(
|
||||
|
@ -97,8 +109,10 @@ impl Service {
|
|||
misses2
|
||||
);
|
||||
let chunk_cache = Arc::new(chunk_cache);
|
||||
services().rooms
|
||||
.auth_chain.cache_auth_chain(chunk_key, Arc::clone(&chunk_cache))?;
|
||||
services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.cache_auth_chain(chunk_key, Arc::clone(&chunk_cache))?;
|
||||
full_auth_chain.extend(chunk_cache.iter());
|
||||
}
|
||||
|
||||
|
@ -115,11 +129,7 @@ impl Service {
|
|||
}
|
||||
|
||||
#[tracing::instrument(skip(self, event_id))]
|
||||
fn get_auth_chain_inner(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_id: &EventId,
|
||||
) -> Result<HashSet<u64>> {
|
||||
fn get_auth_chain_inner(&self, room_id: &RoomId, event_id: &EventId) -> Result<HashSet<u64>> {
|
||||
let mut todo = vec![Arc::from(event_id)];
|
||||
let mut found = HashSet::new();
|
||||
|
||||
|
@ -131,7 +141,8 @@ impl Service {
|
|||
}
|
||||
for auth_event in &pdu.auth_events {
|
||||
let sauthevent = services()
|
||||
.rooms.short
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shorteventid(auth_event)?;
|
||||
|
||||
if !found.contains(&sauthevent) {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruma::RoomId;
|
||||
use crate::Result;
|
||||
use ruma::RoomId;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Adds the room to the public room directory
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use ruma::{UserId, RoomId, events::presence::PresenceEvent};
|
||||
use crate::Result;
|
||||
use ruma::{events::presence::PresenceEvent, RoomId, UserId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Adds a presence event which will be saved until a new event replaces it.
|
||||
|
|
|
@ -2,7 +2,7 @@ mod data;
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{RoomId, UserId, events::presence::PresenceEvent};
|
||||
use ruma::{events::presence::PresenceEvent, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruma::{RoomId, events::receipt::ReceiptEvent, UserId, serde::Raw};
|
||||
use crate::Result;
|
||||
use ruma::{events::receipt::ReceiptEvent, serde::Raw, RoomId, UserId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Replaces the previous read receipt.
|
||||
|
@ -15,13 +15,15 @@ pub trait Data: Send + Sync {
|
|||
&self,
|
||||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Box<dyn Iterator<
|
||||
Item = Result<(
|
||||
Box<UserId>,
|
||||
u64,
|
||||
Raw<ruma::events::AnySyncEphemeralRoomEvent>,
|
||||
)>,
|
||||
>>;
|
||||
) -> Box<
|
||||
dyn Iterator<
|
||||
Item = Result<(
|
||||
Box<UserId>,
|
||||
u64,
|
||||
Raw<ruma::events::AnySyncEphemeralRoomEvent>,
|
||||
)>,
|
||||
>,
|
||||
>;
|
||||
|
||||
/// Sets a private read marker at `count`.
|
||||
fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()>;
|
||||
|
|
|
@ -3,8 +3,8 @@ use std::sync::Arc;
|
|||
|
||||
pub use data::Data;
|
||||
|
||||
use ruma::{RoomId, UserId, events::receipt::ReceiptEvent, serde::Raw};
|
||||
use crate::Result;
|
||||
use ruma::{events::receipt::ReceiptEvent, serde::Raw, RoomId, UserId};
|
||||
|
||||
pub struct Service {
|
||||
db: Arc<dyn Data>,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::collections::HashSet;
|
||||
use crate::Result;
|
||||
use ruma::{UserId, RoomId};
|
||||
use ruma::{RoomId, UserId};
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
|
||||
|
|
|
@ -2,7 +2,7 @@ mod data;
|
|||
use std::sync::Arc;
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{UserId, RoomId, events::SyncEphemeralRoomEvent};
|
||||
use ruma::{events::SyncEphemeralRoomEvent, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
|
|
|
@ -1,22 +1,33 @@
|
|||
/// An async function that can recursively call itself.
|
||||
type AsyncRecursiveType<'a, T> = Pin<Box<dyn Future<Output = T> + 'a + Send>>;
|
||||
|
||||
use ruma::{RoomVersionId, signatures::CanonicalJsonObject, api::federation::discovery::{get_server_keys, get_remote_server_keys}};
|
||||
use tokio::sync::Semaphore;
|
||||
use ruma::{
|
||||
api::federation::discovery::{get_remote_server_keys, get_server_keys},
|
||||
signatures::CanonicalJsonObject,
|
||||
RoomVersionId,
|
||||
};
|
||||
use std::{
|
||||
collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet},
|
||||
pin::Pin,
|
||||
sync::{Arc, RwLock, RwLockWriteGuard},
|
||||
time::{Duration, Instant, SystemTime},
|
||||
};
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
use futures_util::{Future, stream::FuturesUnordered, StreamExt};
|
||||
use futures_util::{stream::FuturesUnordered, Future, StreamExt};
|
||||
use ruma::{
|
||||
api::{
|
||||
client::error::ErrorKind,
|
||||
federation::{event::{get_event, get_room_state_ids}, membership::create_join_event, discovery::get_remote_server_keys_batch::{v2::QueryCriteria, self}},
|
||||
federation::{
|
||||
discovery::get_remote_server_keys_batch::{self, v2::QueryCriteria},
|
||||
event::{get_event, get_room_state_ids},
|
||||
membership::create_join_event,
|
||||
},
|
||||
},
|
||||
events::{
|
||||
room::{create::RoomCreateEventContent, server_acl::RoomServerAclEventContent},
|
||||
StateEventType,
|
||||
},
|
||||
events::{room::{create::RoomCreateEventContent, server_acl::RoomServerAclEventContent}, StateEventType},
|
||||
int,
|
||||
serde::Base64,
|
||||
signatures::CanonicalJsonValue,
|
||||
|
@ -24,9 +35,9 @@ use ruma::{
|
|||
uint, EventId, MilliSecondsSinceUnixEpoch, RoomId, ServerName, ServerSigningKeyId,
|
||||
};
|
||||
use serde_json::value::{to_raw_value, RawValue as RawJsonValue};
|
||||
use tracing::{error, info, trace, warn, debug};
|
||||
use tracing::{debug, error, info, trace, warn};
|
||||
|
||||
use crate::{service::*, services, Result, Error, PduEvent};
|
||||
use crate::{service::*, services, Error, PduEvent, Result};
|
||||
|
||||
pub struct Service;
|
||||
|
||||
|
@ -72,10 +83,7 @@ impl Service {
|
|||
));
|
||||
}
|
||||
|
||||
if services()
|
||||
.rooms
|
||||
.metadata
|
||||
.is_disabled(room_id)? {
|
||||
if services().rooms.metadata.is_disabled(room_id)? {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Federation of this room is currently disabled on this server.",
|
||||
|
@ -94,7 +102,8 @@ impl Service {
|
|||
.ok_or_else(|| Error::bad_database("Failed to find create event in db."))?;
|
||||
|
||||
let first_pdu_in_room = services()
|
||||
.rooms.timeline
|
||||
.rooms
|
||||
.timeline
|
||||
.first_pdu_in_room(room_id)?
|
||||
.ok_or_else(|| Error::bad_database("Failed to find first pdu in db."))?;
|
||||
|
||||
|
@ -113,21 +122,20 @@ impl Service {
|
|||
}
|
||||
|
||||
// 9. Fetch any missing prev events doing all checks listed here starting at 1. These are timeline events
|
||||
let (sorted_prev_events, mut eventid_info) = self.fetch_unknown_prev_events(
|
||||
origin,
|
||||
&create_event,
|
||||
room_id,
|
||||
pub_key_map,
|
||||
incoming_pdu.prev_events.clone(),
|
||||
).await?;
|
||||
let (sorted_prev_events, mut eventid_info) = self
|
||||
.fetch_unknown_prev_events(
|
||||
origin,
|
||||
&create_event,
|
||||
room_id,
|
||||
pub_key_map,
|
||||
incoming_pdu.prev_events.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut errors = 0;
|
||||
for prev_id in dbg!(sorted_prev_events) {
|
||||
// Check for disabled again because it might have changed
|
||||
if services()
|
||||
.rooms
|
||||
.metadata
|
||||
.is_disabled(room_id)? {
|
||||
if services().rooms.metadata.is_disabled(room_id)? {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Federation of this room is currently disabled on this server.",
|
||||
|
@ -224,15 +232,18 @@ impl Service {
|
|||
.write()
|
||||
.unwrap()
|
||||
.insert(room_id.to_owned(), (event_id.to_owned(), start_time));
|
||||
let r = services().rooms.event_handler.upgrade_outlier_to_timeline_pdu(
|
||||
incoming_pdu,
|
||||
val,
|
||||
&create_event,
|
||||
origin,
|
||||
room_id,
|
||||
pub_key_map,
|
||||
)
|
||||
.await;
|
||||
let r = services()
|
||||
.rooms
|
||||
.event_handler
|
||||
.upgrade_outlier_to_timeline_pdu(
|
||||
incoming_pdu,
|
||||
val,
|
||||
&create_event,
|
||||
origin,
|
||||
room_id,
|
||||
pub_key_map,
|
||||
)
|
||||
.await;
|
||||
services()
|
||||
.globals
|
||||
.roomid_federationhandletime
|
||||
|
@ -252,8 +263,7 @@ impl Service {
|
|||
room_id: &'a RoomId,
|
||||
value: BTreeMap<String, CanonicalJsonValue>,
|
||||
pub_key_map: &'a RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
|
||||
) -> AsyncRecursiveType<'a, Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>>
|
||||
{
|
||||
) -> AsyncRecursiveType<'a, Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>> {
|
||||
Box::pin(async move {
|
||||
// TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json
|
||||
|
||||
|
@ -282,14 +292,22 @@ impl Service {
|
|||
Err(e) => {
|
||||
// Drop
|
||||
warn!("Dropping bad event {}: {}", event_id, e);
|
||||
return Err(Error::BadRequest(ErrorKind::InvalidParam, "Signature verification failed"));
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Signature verification failed",
|
||||
));
|
||||
}
|
||||
Ok(ruma::signatures::Verified::Signatures) => {
|
||||
// Redact
|
||||
warn!("Calculated hash does not match: {}", event_id);
|
||||
match ruma::signatures::redact(&value, room_version_id) {
|
||||
Ok(obj) => obj,
|
||||
Err(_) => return Err(Error::BadRequest(ErrorKind::InvalidParam, "Redaction failed")),
|
||||
Err(_) => {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Redaction failed",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ruma::signatures::Verified::All) => value,
|
||||
|
@ -376,7 +394,8 @@ impl Service {
|
|||
&incoming_pdu,
|
||||
None::<PduEvent>, // TODO: third party invite
|
||||
|k, s| auth_events.get(&(k.to_string().into(), s.to_owned())),
|
||||
).map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed"))?
|
||||
)
|
||||
.map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed"))?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
|
@ -415,9 +434,13 @@ impl Service {
|
|||
|
||||
if services()
|
||||
.rooms
|
||||
.pdu_metadata.is_event_soft_failed(&incoming_pdu.event_id)?
|
||||
.pdu_metadata
|
||||
.is_event_soft_failed(&incoming_pdu.event_id)?
|
||||
{
|
||||
return Err(Error::BadRequest(ErrorKind::InvalidParam, "Event has been soft failed"));
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Event has been soft failed",
|
||||
));
|
||||
}
|
||||
|
||||
info!("Upgrading {} to timeline pdu", incoming_pdu.event_id);
|
||||
|
@ -448,7 +471,13 @@ impl Service {
|
|||
.pdu_shortstatehash(prev_event)?;
|
||||
|
||||
let state = if let Some(shortstatehash) = prev_event_sstatehash {
|
||||
Some(services().rooms.state_accessor.state_full_ids(shortstatehash).await)
|
||||
Some(
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(shortstatehash)
|
||||
.await,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -466,10 +495,10 @@ impl Service {
|
|||
})?;
|
||||
|
||||
if let Some(state_key) = &prev_pdu.state_key {
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(&prev_pdu.kind.to_string().into(), state_key)?;
|
||||
let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
|
||||
&prev_pdu.kind.to_string().into(),
|
||||
state_key,
|
||||
)?;
|
||||
|
||||
state.insert(shortstatekey, Arc::from(prev_event));
|
||||
// Now it's the state after the pdu
|
||||
|
@ -483,21 +512,25 @@ impl Service {
|
|||
|
||||
let mut okay = true;
|
||||
for prev_eventid in &incoming_pdu.prev_events {
|
||||
let prev_event = if let Ok(Some(pdu)) = services().rooms.timeline.get_pdu(prev_eventid) {
|
||||
pdu
|
||||
} else {
|
||||
okay = false;
|
||||
break;
|
||||
};
|
||||
|
||||
let sstatehash =
|
||||
if let Ok(Some(s)) = services().rooms.state_accessor.pdu_shortstatehash(prev_eventid) {
|
||||
s
|
||||
let prev_event =
|
||||
if let Ok(Some(pdu)) = services().rooms.timeline.get_pdu(prev_eventid) {
|
||||
pdu
|
||||
} else {
|
||||
okay = false;
|
||||
break;
|
||||
};
|
||||
|
||||
let sstatehash = if let Ok(Some(s)) = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.pdu_shortstatehash(prev_eventid)
|
||||
{
|
||||
s
|
||||
} else {
|
||||
okay = false;
|
||||
break;
|
||||
};
|
||||
|
||||
extremity_sstatehashes.insert(sstatehash, prev_event);
|
||||
}
|
||||
|
||||
|
@ -513,13 +546,10 @@ impl Service {
|
|||
.await?;
|
||||
|
||||
if let Some(state_key) = &prev_event.state_key {
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(
|
||||
&prev_event.kind.to_string().into(),
|
||||
state_key,
|
||||
)?;
|
||||
let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
|
||||
&prev_event.kind.to_string().into(),
|
||||
state_key,
|
||||
)?;
|
||||
leaf_state.insert(shortstatekey, Arc::from(&*prev_event.event_id));
|
||||
// Now it's the state after the pdu
|
||||
}
|
||||
|
@ -528,7 +558,8 @@ impl Service {
|
|||
let mut starting_events = Vec::with_capacity(leaf_state.len());
|
||||
|
||||
for (k, id) in leaf_state {
|
||||
if let Ok((ty, st_key)) = services().rooms.short.get_statekey_from_short(k) {
|
||||
if let Ok((ty, st_key)) = services().rooms.short.get_statekey_from_short(k)
|
||||
{
|
||||
// FIXME: Undo .to_string().into() when StateMap
|
||||
// is updated to use StateEventType
|
||||
state.insert((ty.to_string().into(), st_key), id.clone());
|
||||
|
@ -567,10 +598,8 @@ impl Service {
|
|||
new_state
|
||||
.into_iter()
|
||||
.map(|((event_type, state_key), event_id)| {
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(
|
||||
let shortstatekey =
|
||||
services().rooms.short.get_or_create_shortstatekey(
|
||||
&event_type.to_string().into(),
|
||||
&state_key,
|
||||
)?;
|
||||
|
@ -618,15 +647,14 @@ impl Service {
|
|||
|
||||
let mut state: BTreeMap<_, Arc<EventId>> = BTreeMap::new();
|
||||
for (pdu, _) in state_vec {
|
||||
let state_key = pdu
|
||||
.state_key
|
||||
.clone()
|
||||
.ok_or_else(|| Error::bad_database("Found non-state pdu in state events."))?;
|
||||
let state_key = pdu.state_key.clone().ok_or_else(|| {
|
||||
Error::bad_database("Found non-state pdu in state events.")
|
||||
})?;
|
||||
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(&pdu.kind.to_string().into(), &state_key)?;
|
||||
let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
|
||||
&pdu.kind.to_string().into(),
|
||||
&state_key,
|
||||
)?;
|
||||
|
||||
match state.entry(shortstatekey) {
|
||||
btree_map::Entry::Vacant(v) => {
|
||||
|
@ -648,7 +676,9 @@ impl Service {
|
|||
if state.get(&create_shortstatekey).map(|id| id.as_ref())
|
||||
!= Some(&create_event.event_id)
|
||||
{
|
||||
return Err(Error::bad_database("Incoming event refers to wrong create event."));
|
||||
return Err(Error::bad_database(
|
||||
"Incoming event refers to wrong create event.",
|
||||
));
|
||||
}
|
||||
|
||||
state_at_incoming_event = Some(state);
|
||||
|
@ -683,7 +713,9 @@ impl Service {
|
|||
.map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed."))?;
|
||||
|
||||
if !check_result {
|
||||
return Err(Error::bad_database("Event has failed auth check with state at the event."));
|
||||
return Err(Error::bad_database(
|
||||
"Event has failed auth check with state at the event.",
|
||||
));
|
||||
}
|
||||
info!("Auth check succeeded");
|
||||
|
||||
|
@ -703,10 +735,7 @@ impl Service {
|
|||
// Now we calculate the set of extremities this room has after the incoming event has been
|
||||
// applied. We start with the previous extremities (aka leaves)
|
||||
info!("Calculating extremities");
|
||||
let mut extremities = services()
|
||||
.rooms
|
||||
.state
|
||||
.get_forward_extremities(room_id)?;
|
||||
let mut extremities = services().rooms.state.get_forward_extremities(room_id)?;
|
||||
|
||||
// Remove any forward extremities that are referenced by this incoming event's prev_events
|
||||
for prev_event in &incoming_pdu.prev_events {
|
||||
|
@ -716,8 +745,15 @@ impl Service {
|
|||
}
|
||||
|
||||
// Only keep those extremities were not referenced yet
|
||||
extremities
|
||||
.retain(|id| !matches!(services().rooms.pdu_metadata.is_event_referenced(room_id, id), Ok(true)));
|
||||
extremities.retain(|id| {
|
||||
!matches!(
|
||||
services()
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.is_event_referenced(room_id, id),
|
||||
Ok(true)
|
||||
)
|
||||
});
|
||||
|
||||
info!("Compressing state at event");
|
||||
let state_ids_compressed = state_at_incoming_event
|
||||
|
@ -733,23 +769,21 @@ impl Service {
|
|||
// 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it
|
||||
info!("Starting soft fail auth check");
|
||||
|
||||
let auth_events = services()
|
||||
.rooms
|
||||
.state
|
||||
.get_auth_events(
|
||||
room_id,
|
||||
&incoming_pdu.kind,
|
||||
&incoming_pdu.sender,
|
||||
incoming_pdu.state_key.as_deref(),
|
||||
&incoming_pdu.content,
|
||||
)?;
|
||||
let auth_events = services().rooms.state.get_auth_events(
|
||||
room_id,
|
||||
&incoming_pdu.kind,
|
||||
&incoming_pdu.sender,
|
||||
incoming_pdu.state_key.as_deref(),
|
||||
&incoming_pdu.content,
|
||||
)?;
|
||||
|
||||
let soft_fail = !state_res::event_auth::auth_check(
|
||||
&room_version,
|
||||
&incoming_pdu,
|
||||
None::<PduEvent>,
|
||||
|k, s| auth_events.get(&(k.clone(), s.to_owned())),
|
||||
).map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed."))?;
|
||||
)
|
||||
.map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed."))?;
|
||||
|
||||
if soft_fail {
|
||||
services().rooms.timeline.append_incoming_pdu(
|
||||
|
@ -767,7 +801,10 @@ impl Service {
|
|||
.rooms
|
||||
.pdu_metadata
|
||||
.mark_event_soft_failed(&incoming_pdu.event_id)?;
|
||||
return Err(Error::BadRequest(ErrorKind::InvalidParam, "Event has been soft failed"));
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Event has been soft failed",
|
||||
));
|
||||
}
|
||||
|
||||
if incoming_pdu.state_key.is_some() {
|
||||
|
@ -789,15 +826,12 @@ impl Service {
|
|||
|
||||
info!("Loading extremities");
|
||||
for id in dbg!(&extremities) {
|
||||
match services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu(id)?
|
||||
{
|
||||
match services().rooms.timeline.get_pdu(id)? {
|
||||
Some(leaf_pdu) => {
|
||||
extremity_sstatehashes.insert(
|
||||
services()
|
||||
.rooms.state_accessor
|
||||
.rooms
|
||||
.state_accessor
|
||||
.pdu_shortstatehash(&leaf_pdu.event_id)?
|
||||
.ok_or_else(|| {
|
||||
error!(
|
||||
|
@ -829,10 +863,10 @@ impl Service {
|
|||
// We also add state after incoming event to the fork states
|
||||
let mut state_after = state_at_incoming_event.clone();
|
||||
if let Some(state_key) = &incoming_pdu.state_key {
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(&incoming_pdu.kind.to_string().into(), state_key)?;
|
||||
let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
|
||||
&incoming_pdu.kind.to_string().into(),
|
||||
state_key,
|
||||
)?;
|
||||
|
||||
state_after.insert(shortstatekey, Arc::from(&*incoming_pdu.event_id));
|
||||
}
|
||||
|
@ -921,10 +955,10 @@ impl Service {
|
|||
state
|
||||
.into_iter()
|
||||
.map(|((event_type, state_key), event_id)| {
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(&event_type.to_string().into(), &state_key)?;
|
||||
let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
|
||||
&event_type.to_string().into(),
|
||||
&state_key,
|
||||
)?;
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
|
@ -936,7 +970,10 @@ impl Service {
|
|||
// Set the new room state to the resolved state
|
||||
if update_state {
|
||||
info!("Forcing new room state");
|
||||
let sstatehash = services().rooms.state_compressor.save_state(room_id, new_room_state)?;
|
||||
let sstatehash = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.save_state(room_id, new_room_state)?;
|
||||
services()
|
||||
.rooms
|
||||
.state
|
||||
|
@ -951,15 +988,14 @@ impl Service {
|
|||
// We use the `state_at_event` instead of `state_after` so we accurately
|
||||
// represent the state for this event.
|
||||
|
||||
let pdu_id = services().rooms.timeline
|
||||
.append_incoming_pdu(
|
||||
&incoming_pdu,
|
||||
val,
|
||||
extremities.iter().map(|e| (**e).to_owned()).collect(),
|
||||
state_ids_compressed,
|
||||
soft_fail,
|
||||
&state_lock,
|
||||
)?;
|
||||
let pdu_id = services().rooms.timeline.append_incoming_pdu(
|
||||
&incoming_pdu,
|
||||
val,
|
||||
extremities.iter().map(|e| (**e).to_owned()).collect(),
|
||||
state_ids_compressed,
|
||||
soft_fail,
|
||||
&state_lock,
|
||||
)?;
|
||||
|
||||
info!("Appended incoming pdu");
|
||||
|
||||
|
@ -1141,8 +1177,10 @@ impl Service {
|
|||
room_id: &RoomId,
|
||||
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
|
||||
initial_set: Vec<Arc<EventId>>,
|
||||
) -> Result<(Vec<Arc<EventId>>, HashMap<Arc<EventId>,
|
||||
(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>)> {
|
||||
) -> Result<(
|
||||
Vec<Arc<EventId>>,
|
||||
HashMap<Arc<EventId>, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>,
|
||||
)> {
|
||||
let mut graph: HashMap<Arc<EventId>, _> = HashMap::new();
|
||||
let mut eventid_info = HashMap::new();
|
||||
let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set;
|
||||
|
@ -1223,7 +1261,8 @@ impl Service {
|
|||
.map_or_else(|| uint!(0), |info| info.0.origin_server_ts),
|
||||
),
|
||||
))
|
||||
}).map_err(|_| Error::bad_database("Error sorting prev events"))?;
|
||||
})
|
||||
.map_err(|_| Error::bad_database("Error sorting prev events"))?;
|
||||
|
||||
Ok((sorted, eventid_info))
|
||||
}
|
||||
|
@ -1253,13 +1292,16 @@ impl Service {
|
|||
|
||||
let signature_ids = signature_object.keys().cloned().collect::<Vec<_>>();
|
||||
|
||||
let fetch_res = self.fetch_signing_keys(
|
||||
signature_server.as_str().try_into().map_err(|_| {
|
||||
Error::BadServerResponse("Invalid servername in signatures of server response pdu.")
|
||||
})?,
|
||||
signature_ids,
|
||||
)
|
||||
.await;
|
||||
let fetch_res = self
|
||||
.fetch_signing_keys(
|
||||
signature_server.as_str().try_into().map_err(|_| {
|
||||
Error::BadServerResponse(
|
||||
"Invalid servername in signatures of server response pdu.",
|
||||
)
|
||||
})?,
|
||||
signature_ids,
|
||||
)
|
||||
.await;
|
||||
|
||||
let keys = match fetch_res {
|
||||
Ok(keys) => keys,
|
||||
|
@ -1336,8 +1378,9 @@ impl Service {
|
|||
|
||||
let signature_ids = signature_object.keys().cloned().collect::<Vec<_>>();
|
||||
|
||||
let contains_all_ids =
|
||||
|keys: &BTreeMap<String, Base64>| signature_ids.iter().all(|id| keys.contains_key(id));
|
||||
let contains_all_ids = |keys: &BTreeMap<String, Base64>| {
|
||||
signature_ids.iter().all(|id| keys.contains_key(id))
|
||||
};
|
||||
|
||||
let origin = <&ServerName>::try_from(signature_server.as_str()).map_err(|_| {
|
||||
Error::BadServerResponse("Invalid servername in signatures of server response pdu.")
|
||||
|
@ -1373,8 +1416,10 @@ impl Service {
|
|||
room_version: &RoomVersionId,
|
||||
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
|
||||
) -> Result<()> {
|
||||
let mut servers: BTreeMap<Box<ServerName>, BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>> =
|
||||
BTreeMap::new();
|
||||
let mut servers: BTreeMap<
|
||||
Box<ServerName>,
|
||||
BTreeMap<Box<ServerSigningKeyId>, QueryCriteria>,
|
||||
> = BTreeMap::new();
|
||||
|
||||
{
|
||||
let mut pkm = pub_key_map
|
||||
|
@ -1440,11 +1485,9 @@ impl Service {
|
|||
.into_iter()
|
||||
.map(|(server, _)| async move {
|
||||
(
|
||||
services().sending
|
||||
.send_federation_request(
|
||||
&server,
|
||||
get_server_keys::v2::Request::new(),
|
||||
)
|
||||
services()
|
||||
.sending
|
||||
.send_federation_request(&server, get_server_keys::v2::Request::new())
|
||||
.await,
|
||||
server,
|
||||
)
|
||||
|
@ -1472,10 +1515,11 @@ impl Service {
|
|||
|
||||
/// Returns Ok if the acl allows the server
|
||||
pub fn acl_check(&self, server_name: &ServerName, room_id: &RoomId) -> Result<()> {
|
||||
let acl_event = match services()
|
||||
.rooms.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomServerAcl, "")?
|
||||
{
|
||||
let acl_event = match services().rooms.state_accessor.room_state_get(
|
||||
room_id,
|
||||
&StateEventType::RoomServerAcl,
|
||||
"",
|
||||
)? {
|
||||
Some(acl) => acl,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
@ -1587,7 +1631,9 @@ impl Service {
|
|||
.ok()
|
||||
.and_then(|resp| resp.server_key.deserialize().ok())
|
||||
{
|
||||
services().globals.add_signing_key(origin, server_key.clone())?;
|
||||
services()
|
||||
.globals
|
||||
.add_signing_key(origin, server_key.clone())?;
|
||||
|
||||
result.extend(
|
||||
server_key
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruma::{RoomId, DeviceId, UserId};
|
||||
use crate::Result;
|
||||
use ruma::{DeviceId, RoomId, UserId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn lazy_load_was_sent_before(
|
||||
|
@ -15,7 +15,7 @@ pub trait Data: Send + Sync {
|
|||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
confirmed_user_ids: &mut dyn Iterator<Item=&UserId>,
|
||||
confirmed_user_ids: &mut dyn Iterator<Item = &UserId>,
|
||||
) -> Result<()>;
|
||||
|
||||
fn lazy_load_reset(
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
mod data;
|
||||
use std::{collections::{HashSet, HashMap}, sync::{Mutex, Arc}};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{DeviceId, UserId, RoomId};
|
||||
use ruma::{DeviceId, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
db: Arc<dyn Data>,
|
||||
|
||||
lazy_load_waiting: Mutex<HashMap<(Box<UserId>, Box<DeviceId>, Box<RoomId>, u64), HashSet<Box<UserId>>>>,
|
||||
lazy_load_waiting:
|
||||
Mutex<HashMap<(Box<UserId>, Box<DeviceId>, Box<RoomId>, u64), HashSet<Box<UserId>>>>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
|
@ -21,7 +25,8 @@ impl Service {
|
|||
room_id: &RoomId,
|
||||
ll_user: &UserId,
|
||||
) -> Result<bool> {
|
||||
self.db.lazy_load_was_sent_before(user_id, device_id, room_id, ll_user)
|
||||
self.db
|
||||
.lazy_load_was_sent_before(user_id, device_id, room_id, ll_user)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
|
@ -58,7 +63,12 @@ impl Service {
|
|||
room_id.to_owned(),
|
||||
since,
|
||||
)) {
|
||||
self.db.lazy_load_confirm_delivery(user_id, device_id, room_id, &mut user_ids.iter().map(|&u| &*u))?;
|
||||
self.db.lazy_load_confirm_delivery(
|
||||
user_id,
|
||||
device_id,
|
||||
room_id,
|
||||
&mut user_ids.iter().map(|&u| &*u),
|
||||
)?;
|
||||
} else {
|
||||
// Ignore
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruma::RoomId;
|
||||
use crate::Result;
|
||||
use ruma::RoomId;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn exists(&self, room_id: &RoomId) -> Result<bool>;
|
||||
|
|
|
@ -16,7 +16,25 @@ pub mod state_compressor;
|
|||
pub mod timeline;
|
||||
pub mod user;
|
||||
|
||||
pub trait Data: alias::Data + auth_chain::Data + directory::Data + edus::Data + lazy_loading::Data + metadata::Data + outlier::Data + pdu_metadata::Data + search::Data + short::Data + state::Data + state_accessor::Data + state_cache::Data + state_compressor::Data + timeline::Data + user::Data {}
|
||||
pub trait Data:
|
||||
alias::Data
|
||||
+ auth_chain::Data
|
||||
+ directory::Data
|
||||
+ edus::Data
|
||||
+ lazy_loading::Data
|
||||
+ metadata::Data
|
||||
+ outlier::Data
|
||||
+ pdu_metadata::Data
|
||||
+ search::Data
|
||||
+ short::Data
|
||||
+ state::Data
|
||||
+ state_accessor::Data
|
||||
+ state_cache::Data
|
||||
+ state_compressor::Data
|
||||
+ timeline::Data
|
||||
+ user::Data
|
||||
{
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub alias: alias::Service,
|
||||
|
|
|
@ -2,9 +2,9 @@ mod data;
|
|||
use std::sync::Arc;
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{EventId, signatures::CanonicalJsonObject};
|
||||
use ruma::{signatures::CanonicalJsonObject, EventId};
|
||||
|
||||
use crate::{Result, PduEvent};
|
||||
use crate::{PduEvent, Result};
|
||||
|
||||
pub struct Service {
|
||||
db: Arc<dyn Data>,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ruma::{EventId, RoomId};
|
||||
use crate::Result;
|
||||
use ruma::{EventId, RoomId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>;
|
||||
|
|
|
@ -2,7 +2,7 @@ mod data;
|
|||
use std::sync::Arc;
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{RoomId, EventId};
|
||||
use ruma::{EventId, RoomId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ruma::RoomId;
|
||||
use crate::Result;
|
||||
use ruma::RoomId;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()>;
|
||||
|
|
|
@ -12,7 +12,12 @@ pub struct Service {
|
|||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()> {
|
||||
pub fn index_pdu<'a>(
|
||||
&self,
|
||||
shortroomid: u64,
|
||||
pdu_id: &[u8],
|
||||
message_body: String,
|
||||
) -> Result<()> {
|
||||
self.db.index_pdu(shortroomid, pdu_id, message_body)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ruma::{EventId, events::StateEventType, RoomId};
|
||||
use crate::Result;
|
||||
use ruma::{events::StateEventType, EventId, RoomId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_or_create_shorteventid(
|
||||
&self,
|
||||
event_id: &EventId,
|
||||
) -> Result<u64>;
|
||||
fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64>;
|
||||
|
||||
fn get_shortstatekey(
|
||||
&self,
|
||||
|
@ -26,15 +23,9 @@ pub trait Data: Send + Sync {
|
|||
fn get_statekey_from_short(&self, shortstatekey: u64) -> Result<(StateEventType, String)>;
|
||||
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
fn get_or_create_shortstatehash(
|
||||
&self,
|
||||
state_hash: &[u8],
|
||||
) -> Result<(u64, bool)>;
|
||||
fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)>;
|
||||
|
||||
fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
|
||||
fn get_or_create_shortroomid(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<u64>;
|
||||
fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64>;
|
||||
}
|
||||
|
|
|
@ -2,19 +2,16 @@ mod data;
|
|||
use std::sync::Arc;
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{EventId, events::StateEventType, RoomId};
|
||||
use ruma::{events::StateEventType, EventId, RoomId};
|
||||
|
||||
use crate::{Result, Error, utils, services};
|
||||
use crate::{services, utils, Error, Result};
|
||||
|
||||
pub struct Service {
|
||||
db: Arc<dyn Data>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn get_or_create_shorteventid(
|
||||
&self,
|
||||
event_id: &EventId,
|
||||
) -> Result<u64> {
|
||||
pub fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64> {
|
||||
self.db.get_or_create_shorteventid(event_id)
|
||||
}
|
||||
|
||||
|
@ -43,10 +40,7 @@ impl Service {
|
|||
}
|
||||
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
pub fn get_or_create_shortstatehash(
|
||||
&self,
|
||||
state_hash: &[u8],
|
||||
) -> Result<(u64, bool)> {
|
||||
pub fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)> {
|
||||
self.db.get_or_create_shortstatehash(state_hash)
|
||||
}
|
||||
|
||||
|
@ -54,10 +48,7 @@ impl Service {
|
|||
self.db.get_shortroomid(room_id)
|
||||
}
|
||||
|
||||
pub fn get_or_create_shortroomid(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<u64> {
|
||||
pub fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.get_or_create_shortroomid(room_id)
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue