mirror of
https://gitlab.com/famedly/conduit.git
synced 2024-12-29 09:44:33 +01:00
Fix review issues, Remove EventHash's in prev/auth_events in StateEvent
The latest state-res crate uses ruma's PduRoomV3 PDU's which don't have tuples of (EventId, EventHashs) like previous versions did (this was left from rebasing onto master). The Media DB now takes an optional content_type like the updated ruma structs.
This commit is contained in:
parent
acd144e934
commit
234b226468
5 changed files with 23 additions and 26 deletions
|
@ -39,7 +39,7 @@ pub async fn create_content_route(
|
|||
db.media.create(
|
||||
mxc.clone(),
|
||||
&body.filename.as_deref(),
|
||||
body.content_type.as_deref().unwrap_or("img"), // TODO this is now optional handle
|
||||
&body.content_type.as_deref(), // TODO this is now optional handle
|
||||
&body.file,
|
||||
)?;
|
||||
|
||||
|
@ -85,10 +85,7 @@ pub async fn get_content_route(
|
|||
db.media.create(
|
||||
mxc,
|
||||
&get_content_response.content_disposition.as_deref(),
|
||||
get_content_response // TODO this is now optional handle
|
||||
.content_type
|
||||
.as_deref()
|
||||
.unwrap_or("img"),
|
||||
&get_content_response.content_type.as_deref(),
|
||||
&get_content_response.file,
|
||||
)?;
|
||||
|
||||
|
@ -142,10 +139,7 @@ pub async fn get_content_thumbnail_route(
|
|||
db.media.upload_thumbnail(
|
||||
mxc,
|
||||
&None,
|
||||
get_thumbnail_response
|
||||
.content_type
|
||||
.as_deref()
|
||||
.unwrap_or("img"), // TODO now optional, deal with it somehow
|
||||
&get_thumbnail_response.content_type,
|
||||
body.width.try_into().expect("all UInts are valid u32s"),
|
||||
body.height.try_into().expect("all UInts are valid u32s"),
|
||||
&get_thumbnail_response.file,
|
||||
|
|
|
@ -20,7 +20,7 @@ impl Media {
|
|||
&self,
|
||||
mxc: String,
|
||||
filename: &Option<&str>,
|
||||
content_type: &str,
|
||||
content_type: &Option<&str>,
|
||||
file: &[u8],
|
||||
) -> Result<()> {
|
||||
let mut key = mxc.as_bytes().to_vec();
|
||||
|
@ -30,7 +30,12 @@ impl Media {
|
|||
key.push(0xff);
|
||||
key.extend_from_slice(filename.as_ref().map(|f| f.as_bytes()).unwrap_or_default());
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(content_type.as_bytes());
|
||||
key.extend_from_slice(
|
||||
content_type
|
||||
.as_ref()
|
||||
.map(|c| c.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
|
||||
self.mediaid_file.insert(key, file)?;
|
||||
|
||||
|
@ -42,7 +47,7 @@ impl Media {
|
|||
&self,
|
||||
mxc: String,
|
||||
filename: &Option<String>,
|
||||
content_type: &str,
|
||||
content_type: &Option<String>,
|
||||
width: u32,
|
||||
height: u32,
|
||||
file: &[u8],
|
||||
|
@ -54,7 +59,12 @@ impl Media {
|
|||
key.push(0xff);
|
||||
key.extend_from_slice(filename.as_ref().map(|f| f.as_bytes()).unwrap_or_default());
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(content_type.as_bytes());
|
||||
key.extend_from_slice(
|
||||
content_type
|
||||
.as_ref()
|
||||
.map(|c| c.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
|
||||
self.mediaid_file.insert(key, file)?;
|
||||
|
||||
|
|
|
@ -647,6 +647,7 @@ impl Rooms {
|
|||
}
|
||||
|
||||
/// Creates a new persisted data unit and adds it to a room.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn build_and_append_pdu(
|
||||
&self,
|
||||
pdu_builder: PduBuilder,
|
||||
|
|
14
src/pdu.rs
14
src/pdu.rs
|
@ -225,7 +225,7 @@ impl PduEvent {
|
|||
impl From<&state_res::StateEvent> for PduEvent {
|
||||
fn from(pdu: &state_res::StateEvent) -> Self {
|
||||
Self {
|
||||
event_id: pdu.event_id().clone(),
|
||||
event_id: pdu.event_id(),
|
||||
room_id: pdu.room_id().unwrap().clone(),
|
||||
sender: pdu.sender().clone(),
|
||||
origin_server_ts: (pdu
|
||||
|
@ -260,17 +260,9 @@ impl PduEvent {
|
|||
"type": self.kind,
|
||||
"content": self.content,
|
||||
"state_key": self.state_key,
|
||||
"prev_events": self.prev_events
|
||||
.iter()
|
||||
// TODO How do we create one of these
|
||||
.map(|id| (id, EventHash { sha256: "hello".into() }))
|
||||
.collect::<Vec<_>>(),
|
||||
"prev_events": self.prev_events,
|
||||
"depth": self.depth,
|
||||
"auth_events": self.auth_events
|
||||
.iter()
|
||||
// TODO How do we create one of these
|
||||
.map(|id| (id, EventHash { sha256: "hello".into() }))
|
||||
.collect::<Vec<_>>(),
|
||||
"auth_events": self.auth_events,
|
||||
"redacts": self.redacts,
|
||||
"unsigned": self.unsigned,
|
||||
"hashes": self.hashes,
|
||||
|
|
|
@ -419,7 +419,7 @@ pub async fn send_transaction_message_route<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: For RoomVersion6 we must check that Raw<..> is canonical do we?
|
||||
// TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?
|
||||
// SPEC:
|
||||
// Servers MUST strictly enforce the JSON format specified in the appendices.
|
||||
// This translates to a 400 M_BAD_JSON error on most endpoints, or discarding of
|
||||
|
@ -554,7 +554,7 @@ pub async fn send_transaction_message_route<'a>(
|
|||
// TODO we may not want the auth events chained in here for resolution?
|
||||
their_current_state
|
||||
.iter()
|
||||
.map(|(_id, v)| ((v.kind(), v.state_key()), v.event_id().clone()))
|
||||
.map(|(_id, v)| ((v.kind(), v.state_key()), v.event_id()))
|
||||
.collect::<BTreeMap<_, _>>(),
|
||||
],
|
||||
Some(
|
||||
|
|
Loading…
Reference in a new issue