0
0
Fork 0
mirror of https://github.com/dani-garcia/vaultwarden synced 2024-12-04 21:02:44 +01:00

Change API and structs to camelCase (#4386)

* Change API inputs/outputs and structs to camelCase

* Fix fields and password history

* Use convert_json_key_lcase_first

* Make sends lowercase

* Update admin and templates

* Update org revoke

* Fix sends expecting size to be a string on mobile

* Convert two-factor providers to string
This commit is contained in:
Daniel García 2024-06-23 21:31:02 +02:00 committed by GitHub
parent 8f05a90b96
commit a2bf8def2a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 1950 additions and 2003 deletions

View file

@ -265,8 +265,8 @@ fn admin_page_login() -> ApiResult<Html<String>> {
render_admin_login(None, None) render_admin_login(None, None)
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct InviteData { struct InviteData {
email: String, email: String,
} }
@ -326,9 +326,9 @@ async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
let mut users_json = Vec::with_capacity(users.len()); let mut users_json = Vec::with_capacity(users.len());
for u in users { for u in users {
let mut usr = u.to_json(&mut conn).await; let mut usr = u.to_json(&mut conn).await;
usr["UserEnabled"] = json!(u.enabled); usr["userEnabled"] = json!(u.enabled);
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
usr["LastActive"] = match u.last_active(&mut conn).await { usr["lastActive"] = match u.last_active(&mut conn).await {
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
None => json!(None::<String>), None => json!(None::<String>),
}; };
@ -475,7 +475,7 @@ async fn resend_user_invite(uuid: &str, _token: AdminToken, mut conn: DbConn) ->
} }
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
struct UserOrgTypeData { struct UserOrgTypeData {
user_type: NumberOrString, user_type: NumberOrString,
user_uuid: String, user_uuid: String,

File diff suppressed because it is too large Load diff

View file

@ -12,7 +12,7 @@ use serde_json::Value;
use crate::util::NumberOrString; use crate::util::NumberOrString;
use crate::{ use crate::{
api::{self, core::log_event, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordOrOtpData, UpdateType}, api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType},
auth::Headers, auth::Headers,
crypto, crypto,
db::{models::*, DbConn, DbPool}, db::{models::*, DbConn, DbPool},
@ -141,15 +141,15 @@ async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json<Value>
}; };
Json(json!({ Json(json!({
"Profile": user_json, "profile": user_json,
"Folders": folders_json, "folders": folders_json,
"Collections": collections_json, "collections": collections_json,
"Policies": policies_json, "policies": policies_json,
"Ciphers": ciphers_json, "ciphers": ciphers_json,
"Domains": domains_json, "domains": domains_json,
"Sends": sends_json, "sends": sends_json,
"unofficialServer": true, "unofficialServer": true,
"Object": "sync" "object": "sync"
})) }))
} }
@ -167,9 +167,9 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
} }
Json(json!({ Json(json!({
"Data": ciphers_json, "data": ciphers_json,
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
})) }))
} }
@ -198,17 +198,17 @@ async fn get_cipher_details(uuid: &str, headers: Headers, conn: DbConn) -> JsonR
get_cipher(uuid, headers, conn).await get_cipher(uuid, headers, conn).await
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct CipherData { pub struct CipherData {
// Id is optional as it is included only in bulk share // Id is optional as it is included only in bulk share
pub Id: Option<String>, pub id: Option<String>,
// Folder id is not included in import // Folder id is not included in import
FolderId: Option<String>, folder_id: Option<String>,
// TODO: Some of these might appear all the time, no need for Option // TODO: Some of these might appear all the time, no need for Option
pub OrganizationId: Option<String>, pub organization_id: Option<String>,
Key: Option<String>, key: Option<String>,
/* /*
Login = 1, Login = 1,
@ -216,27 +216,27 @@ pub struct CipherData {
Card = 3, Card = 3,
Identity = 4 Identity = 4
*/ */
pub Type: i32, pub r#type: i32,
pub Name: String, pub name: String,
pub Notes: Option<String>, pub notes: Option<String>,
Fields: Option<Value>, fields: Option<Value>,
// Only one of these should exist, depending on type // Only one of these should exist, depending on type
Login: Option<Value>, login: Option<Value>,
SecureNote: Option<Value>, secure_note: Option<Value>,
Card: Option<Value>, card: Option<Value>,
Identity: Option<Value>, identity: Option<Value>,
Favorite: Option<bool>, favorite: Option<bool>,
Reprompt: Option<i32>, reprompt: Option<i32>,
PasswordHistory: Option<Value>, password_history: Option<Value>,
// These are used during key rotation // These are used during key rotation
// 'Attachments' is unused, contains map of {id: filename} // 'Attachments' is unused, contains map of {id: filename}
#[serde(rename = "Attachments")] #[allow(dead_code)]
_Attachments: Option<Value>, attachments: Option<Value>,
Attachments2: Option<HashMap<String, Attachments2Data>>, attachments2: Option<HashMap<String, Attachments2Data>>,
// The revision datetime (in ISO 8601 format) of the client's local copy // The revision datetime (in ISO 8601 format) of the client's local copy
// of the cipher. This is used to prevent a client from updating a cipher // of the cipher. This is used to prevent a client from updating a cipher
@ -244,31 +244,26 @@ pub struct CipherData {
// loss. It's not an error when no value is provided; this can happen // loss. It's not an error when no value is provided; this can happen
// when using older client versions, or if the operation doesn't involve // when using older client versions, or if the operation doesn't involve
// updating an existing cipher. // updating an existing cipher.
LastKnownRevisionDate: Option<String>, last_known_revision_date: Option<String>,
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct PartialCipherData { pub struct PartialCipherData {
FolderId: Option<String>, folder_id: Option<String>,
Favorite: bool, favorite: bool,
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct Attachments2Data { pub struct Attachments2Data {
FileName: String, file_name: String,
Key: String, key: String,
} }
/// Called when an org admin clones an org cipher. /// Called when an org admin clones an org cipher.
#[post("/ciphers/admin", data = "<data>")] #[post("/ciphers/admin", data = "<data>")]
async fn post_ciphers_admin( async fn post_ciphers_admin(data: Json<ShareCipherData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
data: JsonUpcase<ShareCipherData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
post_ciphers_create(data, headers, conn, nt).await post_ciphers_create(data, headers, conn, nt).await
} }
@ -277,25 +272,25 @@ async fn post_ciphers_admin(
/// `organizationId` is null. /// `organizationId` is null.
#[post("/ciphers/create", data = "<data>")] #[post("/ciphers/create", data = "<data>")]
async fn post_ciphers_create( async fn post_ciphers_create(
data: JsonUpcase<ShareCipherData>, data: Json<ShareCipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let mut data: ShareCipherData = data.into_inner().data; let mut data: ShareCipherData = data.into_inner();
// Check if there are one more more collections selected when this cipher is part of an organization. // Check if there are one more more collections selected when this cipher is part of an organization.
// err if this is not the case before creating an empty cipher. // err if this is not the case before creating an empty cipher.
if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() {
err!("You must select at least one collection."); err!("You must select at least one collection.");
} }
// This check is usually only needed in update_cipher_from_data(), but we // This check is usually only needed in update_cipher_from_data(), but we
// need it here as well to avoid creating an empty cipher in the call to // need it here as well to avoid creating an empty cipher in the call to
// cipher.save() below. // cipher.save() below.
enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &mut conn).await?; enforce_personal_ownership_policy(Some(&data.cipher), &headers, &mut conn).await?;
let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone()); let mut cipher = Cipher::new(data.cipher.r#type, data.cipher.name.clone());
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
cipher.save(&mut conn).await?; cipher.save(&mut conn).await?;
@ -305,23 +300,23 @@ async fn post_ciphers_create(
// the current time, so the stale data check will end up failing down the // the current time, so the stale data check will end up failing down the
// line. Since this function only creates new ciphers (whether by cloning // line. Since this function only creates new ciphers (whether by cloning
// or otherwise), we can just ignore this field entirely. // or otherwise), we can just ignore this field entirely.
data.Cipher.LastKnownRevisionDate = None; data.cipher.last_known_revision_date = None;
share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await
} }
/// Called when creating a new user-owned cipher. /// Called when creating a new user-owned cipher.
#[post("/ciphers", data = "<data>")] #[post("/ciphers", data = "<data>")]
async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_ciphers(data: Json<CipherData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
let mut data: CipherData = data.into_inner().data; let mut data: CipherData = data.into_inner();
// The web/browser clients set this field to null as expected, but the // The web/browser clients set this field to null as expected, but the
// mobile clients seem to set the invalid value `0001-01-01T00:00:00`, // mobile clients seem to set the invalid value `0001-01-01T00:00:00`,
// which results in a warning message being logged. This field isn't // which results in a warning message being logged. This field isn't
// needed when creating a new cipher, so just ignore it unconditionally. // needed when creating a new cipher, so just ignore it unconditionally.
data.LastKnownRevisionDate = None; data.last_known_revision_date = None;
let mut cipher = Cipher::new(data.Type, data.Name.clone()); let mut cipher = Cipher::new(data.r#type, data.name.clone());
update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?; update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
@ -339,7 +334,7 @@ async fn enforce_personal_ownership_policy(
headers: &Headers, headers: &Headers,
conn: &mut DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
if data.is_none() || data.unwrap().OrganizationId.is_none() { if data.is_none() || data.unwrap().organization_id.is_none() {
let user_uuid = &headers.user.uuid; let user_uuid = &headers.user.uuid;
let policy_type = OrgPolicyType::PersonalOwnership; let policy_type = OrgPolicyType::PersonalOwnership;
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, None, conn).await { if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, None, conn).await {
@ -363,7 +358,7 @@ pub async fn update_cipher_from_data(
// Check that the client isn't updating an existing cipher with stale data. // Check that the client isn't updating an existing cipher with stale data.
// And only perform this check when not importing ciphers, else the date/time check will fail. // And only perform this check when not importing ciphers, else the date/time check will fail.
if ut != UpdateType::None { if ut != UpdateType::None {
if let Some(dt) = data.LastKnownRevisionDate { if let Some(dt) = data.last_known_revision_date {
match NaiveDateTime::parse_from_str(&dt, "%+") { match NaiveDateTime::parse_from_str(&dt, "%+") {
// ISO 8601 format // ISO 8601 format
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
@ -375,20 +370,20 @@ pub async fn update_cipher_from_data(
} }
} }
if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.OrganizationId { if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.organization_id {
err!("Organization mismatch. Please resync the client before updating the cipher") err!("Organization mismatch. Please resync the client before updating the cipher")
} }
if let Some(note) = &data.Notes { if let Some(note) = &data.notes {
if note.len() > 10_000 { if note.len() > 10_000 {
err!("The field Notes exceeds the maximum encrypted value length of 10000 characters.") err!("The field Notes exceeds the maximum encrypted value length of 10000 characters.")
} }
} }
// Check if this cipher is being transferred from a personal to an organization vault // Check if this cipher is being transferred from a personal to an organization vault
let transfer_cipher = cipher.organization_uuid.is_none() && data.OrganizationId.is_some(); let transfer_cipher = cipher.organization_uuid.is_none() && data.organization_id.is_some();
if let Some(org_id) = data.OrganizationId { if let Some(org_id) = data.organization_id {
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn).await { match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn).await {
None => err!("You don't have permission to add item to organization"), None => err!("You don't have permission to add item to organization"),
Some(org_user) => { Some(org_user) => {
@ -412,7 +407,7 @@ pub async fn update_cipher_from_data(
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
} }
if let Some(ref folder_id) = data.FolderId { if let Some(ref folder_id) = data.folder_id {
match Folder::find_by_uuid(folder_id, conn).await { match Folder::find_by_uuid(folder_id, conn).await {
Some(folder) => { Some(folder) => {
if folder.user_uuid != headers.user.uuid { if folder.user_uuid != headers.user.uuid {
@ -424,7 +419,7 @@ pub async fn update_cipher_from_data(
} }
// Modify attachments name and keys when rotating // Modify attachments name and keys when rotating
if let Some(attachments) = data.Attachments2 { if let Some(attachments) = data.attachments2 {
for (id, attachment) in attachments { for (id, attachment) in attachments {
let mut saved_att = match Attachment::find_by_id(&id, conn).await { let mut saved_att = match Attachment::find_by_id(&id, conn).await {
Some(att) => att, Some(att) => att,
@ -445,8 +440,8 @@ pub async fn update_cipher_from_data(
break; break;
} }
saved_att.akey = Some(attachment.Key); saved_att.akey = Some(attachment.key);
saved_att.file_name = attachment.FileName; saved_att.file_name = attachment.file_name;
saved_att.save(conn).await?; saved_att.save(conn).await?;
} }
@ -460,44 +455,44 @@ pub async fn update_cipher_from_data(
fn _clean_cipher_data(mut json_data: Value) -> Value { fn _clean_cipher_data(mut json_data: Value) -> Value {
if json_data.is_array() { if json_data.is_array() {
json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| { json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| {
f.as_object_mut().unwrap().remove("Response"); f.as_object_mut().unwrap().remove("response");
}); });
}; };
json_data json_data
} }
let type_data_opt = match data.Type { let type_data_opt = match data.r#type {
1 => data.Login, 1 => data.login,
2 => data.SecureNote, 2 => data.secure_note,
3 => data.Card, 3 => data.card,
4 => data.Identity, 4 => data.identity,
_ => err!("Invalid type"), _ => err!("Invalid type"),
}; };
let type_data = match type_data_opt { let type_data = match type_data_opt {
Some(mut data) => { Some(mut data) => {
// Remove the 'Response' key from the base object. // Remove the 'Response' key from the base object.
data.as_object_mut().unwrap().remove("Response"); data.as_object_mut().unwrap().remove("response");
// Remove the 'Response' key from every Uri. // Remove the 'Response' key from every Uri.
if data["Uris"].is_array() { if data["uris"].is_array() {
data["Uris"] = _clean_cipher_data(data["Uris"].clone()); data["uris"] = _clean_cipher_data(data["uris"].clone());
} }
data data
} }
None => err!("Data missing"), None => err!("Data missing"),
}; };
cipher.key = data.Key; cipher.key = data.key;
cipher.name = data.Name; cipher.name = data.name;
cipher.notes = data.Notes; cipher.notes = data.notes;
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string()); cipher.fields = data.fields.map(|f| _clean_cipher_data(f).to_string());
cipher.data = type_data.to_string(); cipher.data = type_data.to_string();
cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); cipher.password_history = data.password_history.map(|f| f.to_string());
cipher.reprompt = data.Reprompt; cipher.reprompt = data.reprompt;
cipher.save(conn).await?; cipher.save(conn).await?;
cipher.move_to_folder(data.FolderId, &headers.user.uuid, conn).await?; cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?;
cipher.set_favorite(data.Favorite, &headers.user.uuid, conn).await?; cipher.set_favorite(data.favorite, &headers.user.uuid, conn).await?;
if ut != UpdateType::None { if ut != UpdateType::None {
// Only log events for organizational ciphers // Only log events for organizational ciphers
@ -533,43 +528,43 @@ pub async fn update_cipher_from_data(
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct ImportData { struct ImportData {
Ciphers: Vec<CipherData>, ciphers: Vec<CipherData>,
Folders: Vec<FolderData>, folders: Vec<FolderData>,
FolderRelationships: Vec<RelationsData>, folder_relationships: Vec<RelationsData>,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct RelationsData { struct RelationsData {
// Cipher id // Cipher id
Key: usize, key: usize,
// Folder id // Folder id
Value: usize, value: usize,
} }
#[post("/ciphers/import", data = "<data>")] #[post("/ciphers/import", data = "<data>")]
async fn post_ciphers_import( async fn post_ciphers_import(
data: JsonUpcase<ImportData>, data: Json<ImportData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
enforce_personal_ownership_policy(None, &headers, &mut conn).await?; enforce_personal_ownership_policy(None, &headers, &mut conn).await?;
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner();
// Validate the import before continuing // Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid. // Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it. // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher::validate_notes(&data.Ciphers)?; Cipher::validate_notes(&data.ciphers)?;
// Read and create the folders // Read and create the folders
let mut folders: Vec<_> = Vec::new(); let mut folders: Vec<_> = Vec::new();
for folder in data.Folders.into_iter() { for folder in data.folders.into_iter() {
let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.Name); let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name);
new_folder.save(&mut conn).await?; new_folder.save(&mut conn).await?;
folders.push(new_folder); folders.push(new_folder);
@ -578,16 +573,16 @@ async fn post_ciphers_import(
// Read the relations between folders and ciphers // Read the relations between folders and ciphers
let mut relations_map = HashMap::new(); let mut relations_map = HashMap::new();
for relation in data.FolderRelationships { for relation in data.folder_relationships {
relations_map.insert(relation.Key, relation.Value); relations_map.insert(relation.key, relation.value);
} }
// Read and create the ciphers // Read and create the ciphers
for (index, mut cipher_data) in data.Ciphers.into_iter().enumerate() { for (index, mut cipher_data) in data.ciphers.into_iter().enumerate() {
let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone()); let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone());
cipher_data.FolderId = folder_uuid; cipher_data.folder_id = folder_uuid;
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?; update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?;
} }
@ -602,7 +597,7 @@ async fn post_ciphers_import(
#[put("/ciphers/<uuid>/admin", data = "<data>")] #[put("/ciphers/<uuid>/admin", data = "<data>")]
async fn put_cipher_admin( async fn put_cipher_admin(
uuid: &str, uuid: &str,
data: JsonUpcase<CipherData>, data: Json<CipherData>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -613,7 +608,7 @@ async fn put_cipher_admin(
#[post("/ciphers/<uuid>/admin", data = "<data>")] #[post("/ciphers/<uuid>/admin", data = "<data>")]
async fn post_cipher_admin( async fn post_cipher_admin(
uuid: &str, uuid: &str,
data: JsonUpcase<CipherData>, data: Json<CipherData>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -622,25 +617,19 @@ async fn post_cipher_admin(
} }
#[post("/ciphers/<uuid>", data = "<data>")] #[post("/ciphers/<uuid>", data = "<data>")]
async fn post_cipher( async fn post_cipher(uuid: &str, data: Json<CipherData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
uuid: &str,
data: JsonUpcase<CipherData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
put_cipher(uuid, data, headers, conn, nt).await put_cipher(uuid, data, headers, conn, nt).await
} }
#[put("/ciphers/<uuid>", data = "<data>")] #[put("/ciphers/<uuid>", data = "<data>")]
async fn put_cipher( async fn put_cipher(
uuid: &str, uuid: &str,
data: JsonUpcase<CipherData>, data: Json<CipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: CipherData = data.into_inner().data; let data: CipherData = data.into_inner();
let mut cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let mut cipher = match Cipher::find_by_uuid(uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
@ -662,12 +651,7 @@ async fn put_cipher(
} }
#[post("/ciphers/<uuid>/partial", data = "<data>")] #[post("/ciphers/<uuid>/partial", data = "<data>")]
async fn post_cipher_partial( async fn post_cipher_partial(uuid: &str, data: Json<PartialCipherData>, headers: Headers, conn: DbConn) -> JsonResult {
uuid: &str,
data: JsonUpcase<PartialCipherData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
put_cipher_partial(uuid, data, headers, conn).await put_cipher_partial(uuid, data, headers, conn).await
} }
@ -675,18 +659,18 @@ async fn post_cipher_partial(
#[put("/ciphers/<uuid>/partial", data = "<data>")] #[put("/ciphers/<uuid>/partial", data = "<data>")]
async fn put_cipher_partial( async fn put_cipher_partial(
uuid: &str, uuid: &str,
data: JsonUpcase<PartialCipherData>, data: Json<PartialCipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let data: PartialCipherData = data.into_inner().data; let data: PartialCipherData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
if let Some(ref folder_id) = data.FolderId { if let Some(ref folder_id) = data.folder_id {
match Folder::find_by_uuid(folder_id, &mut conn).await { match Folder::find_by_uuid(folder_id, &mut conn).await {
Some(folder) => { Some(folder) => {
if folder.user_uuid != headers.user.uuid { if folder.user_uuid != headers.user.uuid {
@ -698,23 +682,23 @@ async fn put_cipher_partial(
} }
// Move cipher // Move cipher
cipher.move_to_folder(data.FolderId.clone(), &headers.user.uuid, &mut conn).await?; cipher.move_to_folder(data.folder_id.clone(), &headers.user.uuid, &mut conn).await?;
// Update favorite // Update favorite
cipher.set_favorite(Some(data.Favorite), &headers.user.uuid, &mut conn).await?; cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct CollectionsAdminData { struct CollectionsAdminData {
CollectionIds: Vec<String>, collection_ids: Vec<String>,
} }
#[put("/ciphers/<uuid>/collections", data = "<data>")] #[put("/ciphers/<uuid>/collections", data = "<data>")]
async fn put_collections_update( async fn put_collections_update(
uuid: &str, uuid: &str,
data: JsonUpcase<CollectionsAdminData>, data: Json<CollectionsAdminData>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -725,7 +709,7 @@ async fn put_collections_update(
#[post("/ciphers/<uuid>/collections", data = "<data>")] #[post("/ciphers/<uuid>/collections", data = "<data>")]
async fn post_collections_update( async fn post_collections_update(
uuid: &str, uuid: &str,
data: JsonUpcase<CollectionsAdminData>, data: Json<CollectionsAdminData>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -736,7 +720,7 @@ async fn post_collections_update(
#[put("/ciphers/<uuid>/collections-admin", data = "<data>")] #[put("/ciphers/<uuid>/collections-admin", data = "<data>")]
async fn put_collections_admin( async fn put_collections_admin(
uuid: &str, uuid: &str,
data: JsonUpcase<CollectionsAdminData>, data: Json<CollectionsAdminData>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -747,12 +731,12 @@ async fn put_collections_admin(
#[post("/ciphers/<uuid>/collections-admin", data = "<data>")] #[post("/ciphers/<uuid>/collections-admin", data = "<data>")]
async fn post_collections_admin( async fn post_collections_admin(
uuid: &str, uuid: &str,
data: JsonUpcase<CollectionsAdminData>, data: Json<CollectionsAdminData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let data: CollectionsAdminData = data.into_inner().data; let data: CollectionsAdminData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
@ -763,7 +747,7 @@ async fn post_collections_admin(
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect(); let posted_collections: HashSet<String> = data.collection_ids.iter().cloned().collect();
let current_collections: HashSet<String> = let current_collections: HashSet<String> =
cipher.get_collections(headers.user.uuid.clone(), &mut conn).await.iter().cloned().collect(); cipher.get_collections(headers.user.uuid.clone(), &mut conn).await.iter().cloned().collect();
@ -811,21 +795,21 @@ async fn post_collections_admin(
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct ShareCipherData { struct ShareCipherData {
Cipher: CipherData, cipher: CipherData,
CollectionIds: Vec<String>, collection_ids: Vec<String>,
} }
#[post("/ciphers/<uuid>/share", data = "<data>")] #[post("/ciphers/<uuid>/share", data = "<data>")]
async fn post_cipher_share( async fn post_cipher_share(
uuid: &str, uuid: &str,
data: JsonUpcase<ShareCipherData>, data: Json<ShareCipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner();
share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await
} }
@ -833,53 +817,53 @@ async fn post_cipher_share(
#[put("/ciphers/<uuid>/share", data = "<data>")] #[put("/ciphers/<uuid>/share", data = "<data>")]
async fn put_cipher_share( async fn put_cipher_share(
uuid: &str, uuid: &str,
data: JsonUpcase<ShareCipherData>, data: Json<ShareCipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner();
share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct ShareSelectedCipherData { struct ShareSelectedCipherData {
Ciphers: Vec<CipherData>, ciphers: Vec<CipherData>,
CollectionIds: Vec<String>, collection_ids: Vec<String>,
} }
#[put("/ciphers/share", data = "<data>")] #[put("/ciphers/share", data = "<data>")]
async fn put_cipher_share_selected( async fn put_cipher_share_selected(
data: JsonUpcase<ShareSelectedCipherData>, data: Json<ShareSelectedCipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let mut data: ShareSelectedCipherData = data.into_inner().data; let mut data: ShareSelectedCipherData = data.into_inner();
if data.Ciphers.is_empty() { if data.ciphers.is_empty() {
err!("You must select at least one cipher.") err!("You must select at least one cipher.")
} }
if data.CollectionIds.is_empty() { if data.collection_ids.is_empty() {
err!("You must select at least one collection.") err!("You must select at least one collection.")
} }
for cipher in data.Ciphers.iter() { for cipher in data.ciphers.iter() {
if cipher.Id.is_none() { if cipher.id.is_none() {
err!("Request missing ids field") err!("Request missing ids field")
} }
} }
while let Some(cipher) = data.Ciphers.pop() { while let Some(cipher) = data.ciphers.pop() {
let mut shared_cipher_data = ShareCipherData { let mut shared_cipher_data = ShareCipherData {
Cipher: cipher, cipher,
CollectionIds: data.CollectionIds.clone(), collection_ids: data.collection_ids.clone(),
}; };
match shared_cipher_data.Cipher.Id.take() { match shared_cipher_data.cipher.id.take() {
Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?, Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?,
None => err!("Request missing ids field"), None => err!("Request missing ids field"),
}; };
@ -908,8 +892,8 @@ async fn share_cipher_by_uuid(
let mut shared_to_collections = vec![]; let mut shared_to_collections = vec![];
if let Some(organization_uuid) = &data.Cipher.OrganizationId { if let Some(organization_uuid) = &data.cipher.organization_id {
for uuid in &data.CollectionIds { for uuid in &data.collection_ids {
match Collection::find_by_uuid_and_org(uuid, organization_uuid, conn).await { match Collection::find_by_uuid_and_org(uuid, organization_uuid, conn).await {
None => err!("Invalid collection ID provided"), None => err!("Invalid collection ID provided"),
Some(collection) => { Some(collection) => {
@ -925,13 +909,13 @@ async fn share_cipher_by_uuid(
}; };
// When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate. // When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate.
let ut = if data.Cipher.LastKnownRevisionDate.is_some() { let ut = if data.cipher.last_known_revision_date.is_some() {
UpdateType::SyncCipherUpdate UpdateType::SyncCipherUpdate
} else { } else {
UpdateType::SyncCipherCreate UpdateType::SyncCipherCreate
}; };
update_cipher_from_data(&mut cipher, data.Cipher, headers, Some(shared_to_collections), conn, nt, ut).await?; update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await))
} }
@ -961,12 +945,12 @@ async fn get_attachment(uuid: &str, attachment_id: &str, headers: Headers, mut c
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct AttachmentRequestData { struct AttachmentRequestData {
Key: String, key: String,
FileName: String, file_name: String,
FileSize: NumberOrString, file_size: NumberOrString,
AdminRequest: Option<bool>, // true when attaching from an org vault view admin_request: Option<bool>, // true when attaching from an org vault view
} }
enum FileUploadType { enum FileUploadType {
@ -981,7 +965,7 @@ enum FileUploadType {
#[post("/ciphers/<uuid>/attachment/v2", data = "<data>")] #[post("/ciphers/<uuid>/attachment/v2", data = "<data>")]
async fn post_attachment_v2( async fn post_attachment_v2(
uuid: &str, uuid: &str,
data: JsonUpcase<AttachmentRequestData>, data: Json<AttachmentRequestData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
@ -994,28 +978,28 @@ async fn post_attachment_v2(
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
let data: AttachmentRequestData = data.into_inner().data; let data: AttachmentRequestData = data.into_inner();
let file_size = data.FileSize.into_i64()?; let file_size = data.file_size.into_i64()?;
if file_size < 0 { if file_size < 0 {
err!("Attachment size can't be negative") err!("Attachment size can't be negative")
} }
let attachment_id = crypto::generate_attachment_id(); let attachment_id = crypto::generate_attachment_id();
let attachment = let attachment =
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, file_size, Some(data.Key)); Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key));
attachment.save(&mut conn).await.expect("Error saving attachment"); attachment.save(&mut conn).await.expect("Error saving attachment");
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id); let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
let response_key = match data.AdminRequest { let response_key = match data.admin_request {
Some(b) if b => "CipherMiniResponse", Some(b) if b => "cipherMiniResponse",
_ => "CipherResponse", _ => "cipherResponse",
}; };
Ok(Json(json!({ // AttachmentUploadDataResponseModel Ok(Json(json!({ // AttachmentUploadDataResponseModel
"Object": "attachment-fileUpload", "object": "attachment-fileUpload",
"AttachmentId": attachment_id, "attachmentId": attachment_id,
"Url": url, "url": url,
"FileUploadType": FileUploadType::Direct as i32, "fileUploadType": FileUploadType::Direct as i32,
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await, response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await,
}))) })))
} }
@ -1350,38 +1334,23 @@ async fn delete_cipher_admin(uuid: &str, headers: Headers, mut conn: DbConn, nt:
} }
#[delete("/ciphers", data = "<data>")] #[delete("/ciphers", data = "<data>")]
async fn delete_cipher_selected( async fn delete_cipher_selected(data: Json<Value>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
} }
#[post("/ciphers/delete", data = "<data>")] #[post("/ciphers/delete", data = "<data>")]
async fn delete_cipher_selected_post( async fn delete_cipher_selected_post(data: Json<Value>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
} }
#[put("/ciphers/delete", data = "<data>")] #[put("/ciphers/delete", data = "<data>")]
async fn delete_cipher_selected_put( async fn delete_cipher_selected_put(data: Json<Value>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
data: JsonUpcase<Value>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
_delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete _delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete
} }
#[delete("/ciphers/admin", data = "<data>")] #[delete("/ciphers/admin", data = "<data>")]
async fn delete_cipher_selected_admin( async fn delete_cipher_selected_admin(
data: JsonUpcase<Value>, data: Json<Value>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -1391,7 +1360,7 @@ async fn delete_cipher_selected_admin(
#[post("/ciphers/delete-admin", data = "<data>")] #[post("/ciphers/delete-admin", data = "<data>")]
async fn delete_cipher_selected_post_admin( async fn delete_cipher_selected_post_admin(
data: JsonUpcase<Value>, data: Json<Value>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -1401,7 +1370,7 @@ async fn delete_cipher_selected_post_admin(
#[put("/ciphers/delete-admin", data = "<data>")] #[put("/ciphers/delete-admin", data = "<data>")]
async fn delete_cipher_selected_put_admin( async fn delete_cipher_selected_put_admin(
data: JsonUpcase<Value>, data: Json<Value>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -1420,33 +1389,28 @@ async fn restore_cipher_put_admin(uuid: &str, headers: Headers, mut conn: DbConn
} }
#[put("/ciphers/restore", data = "<data>")] #[put("/ciphers/restore", data = "<data>")]
async fn restore_cipher_selected( async fn restore_cipher_selected(data: Json<Value>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
data: JsonUpcase<Value>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
_restore_multiple_ciphers(data, &headers, &mut conn, &nt).await _restore_multiple_ciphers(data, &headers, &mut conn, &nt).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct MoveCipherData { struct MoveCipherData {
FolderId: Option<String>, folder_id: Option<String>,
Ids: Vec<String>, ids: Vec<String>,
} }
#[post("/ciphers/move", data = "<data>")] #[post("/ciphers/move", data = "<data>")]
async fn move_cipher_selected( async fn move_cipher_selected(
data: JsonUpcase<MoveCipherData>, data: Json<MoveCipherData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let data = data.into_inner().data; let data = data.into_inner();
let user_uuid = headers.user.uuid; let user_uuid = headers.user.uuid;
if let Some(ref folder_id) = data.FolderId { if let Some(ref folder_id) = data.folder_id {
match Folder::find_by_uuid(folder_id, &mut conn).await { match Folder::find_by_uuid(folder_id, &mut conn).await {
Some(folder) => { Some(folder) => {
if folder.user_uuid != user_uuid { if folder.user_uuid != user_uuid {
@ -1457,7 +1421,7 @@ async fn move_cipher_selected(
} }
} }
for uuid in data.Ids { for uuid in data.ids {
let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await { let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
@ -1468,7 +1432,7 @@ async fn move_cipher_selected(
} }
// Move cipher // Move cipher
cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &mut conn).await?; cipher.move_to_folder(data.folder_id.clone(), &user_uuid, &mut conn).await?;
nt.send_cipher_update( nt.send_cipher_update(
UpdateType::SyncCipherUpdate, UpdateType::SyncCipherUpdate,
@ -1486,7 +1450,7 @@ async fn move_cipher_selected(
#[put("/ciphers/move", data = "<data>")] #[put("/ciphers/move", data = "<data>")]
async fn move_cipher_selected_put( async fn move_cipher_selected_put(
data: JsonUpcase<MoveCipherData>, data: Json<MoveCipherData>,
headers: Headers, headers: Headers,
conn: DbConn, conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -1503,12 +1467,12 @@ struct OrganizationId {
#[post("/ciphers/purge?<organization..>", data = "<data>")] #[post("/ciphers/purge?<organization..>", data = "<data>")]
async fn delete_all( async fn delete_all(
organization: Option<OrganizationId>, organization: Option<OrganizationId>,
data: JsonUpcase<PasswordOrOtpData>, data: Json<PasswordOrOtpData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let mut user = headers.user; let mut user = headers.user;
data.validate(&user, true, &mut conn).await?; data.validate(&user, true, &mut conn).await?;
@ -1616,13 +1580,13 @@ async fn _delete_cipher_by_uuid(
} }
async fn _delete_multiple_ciphers( async fn _delete_multiple_ciphers(
data: JsonUpcase<Value>, data: Json<Value>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
soft_delete: bool, soft_delete: bool,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let data: Value = data.into_inner().data; let data: Value = data.into_inner();
let uuids = match data.get("Ids") { let uuids = match data.get("Ids") {
Some(ids) => match ids.as_array() { Some(ids) => match ids.as_array() {
@ -1681,12 +1645,12 @@ async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &mut DbCon
} }
async fn _restore_multiple_ciphers( async fn _restore_multiple_ciphers(
data: JsonUpcase<Value>, data: Json<Value>,
headers: &Headers, headers: &Headers,
conn: &mut DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: Value = data.into_inner().data; let data: Value = data.into_inner();
let uuids = match data.get("Ids") { let uuids = match data.get("Ids") {
Some(ids) => match ids.as_array() { Some(ids) => match ids.as_array() {
@ -1705,9 +1669,9 @@ async fn _restore_multiple_ciphers(
} }
Ok(Json(json!({ Ok(Json(json!({
"Data": ciphers, "data": ciphers,
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
}))) })))
} }

View file

@ -5,7 +5,7 @@ use serde_json::Value;
use crate::{ use crate::{
api::{ api::{
core::{CipherSyncData, CipherSyncType}, core::{CipherSyncData, CipherSyncType},
EmptyResult, JsonResult, JsonUpcase, EmptyResult, JsonResult,
}, },
auth::{decode_emergency_access_invite, Headers}, auth::{decode_emergency_access_invite, Headers},
db::{models::*, DbConn, DbPool}, db::{models::*, DbConn, DbPool},
@ -43,19 +43,19 @@ pub fn routes() -> Vec<Route> {
async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> { async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
if !CONFIG.emergency_access_allowed() { if !CONFIG.emergency_access_allowed() {
return Json(json!({ return Json(json!({
"Data": [{ "data": [{
"Id": "", "id": "",
"Status": 2, "status": 2,
"Type": 0, "type": 0,
"WaitTimeDays": 0, "waitTimeDays": 0,
"GranteeId": "", "granteeId": "",
"Email": "", "email": "",
"Name": "NOTE: Emergency Access is disabled!", "name": "NOTE: Emergency Access is disabled!",
"Object": "emergencyAccessGranteeDetails", "object": "emergencyAccessGranteeDetails",
}], }],
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
})); }));
} }
let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await; let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await;
@ -67,9 +67,9 @@ async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
} }
Json(json!({ Json(json!({
"Data": emergency_access_list_json, "data": emergency_access_list_json,
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
})) }))
} }
@ -86,9 +86,9 @@ async fn get_grantees(headers: Headers, mut conn: DbConn) -> Json<Value> {
} }
Json(json!({ Json(json!({
"Data": emergency_access_list_json, "data": emergency_access_list_json,
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
})) }))
} }
@ -109,42 +109,38 @@ async fn get_emergency_access(emer_id: &str, mut conn: DbConn) -> JsonResult {
// region put/post // region put/post
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EmergencyAccessUpdateData { struct EmergencyAccessUpdateData {
Type: NumberOrString, r#type: NumberOrString,
WaitTimeDays: i32, wait_time_days: i32,
KeyEncrypted: Option<String>, key_encrypted: Option<String>,
} }
#[put("/emergency-access/<emer_id>", data = "<data>")] #[put("/emergency-access/<emer_id>", data = "<data>")]
async fn put_emergency_access(emer_id: &str, data: JsonUpcase<EmergencyAccessUpdateData>, conn: DbConn) -> JsonResult { async fn put_emergency_access(emer_id: &str, data: Json<EmergencyAccessUpdateData>, conn: DbConn) -> JsonResult {
post_emergency_access(emer_id, data, conn).await post_emergency_access(emer_id, data, conn).await
} }
#[post("/emergency-access/<emer_id>", data = "<data>")] #[post("/emergency-access/<emer_id>", data = "<data>")]
async fn post_emergency_access( async fn post_emergency_access(emer_id: &str, data: Json<EmergencyAccessUpdateData>, mut conn: DbConn) -> JsonResult {
emer_id: &str,
data: JsonUpcase<EmergencyAccessUpdateData>,
mut conn: DbConn,
) -> JsonResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let data: EmergencyAccessUpdateData = data.into_inner().data; let data: EmergencyAccessUpdateData = data.into_inner();
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
Some(emergency_access) => emergency_access, Some(emergency_access) => emergency_access,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
let new_type = match EmergencyAccessType::from_str(&data.Type.into_string()) { let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) {
Some(new_type) => new_type as i32, Some(new_type) => new_type as i32,
None => err!("Invalid emergency access type."), None => err!("Invalid emergency access type."),
}; };
emergency_access.atype = new_type; emergency_access.atype = new_type;
emergency_access.wait_time_days = data.WaitTimeDays; emergency_access.wait_time_days = data.wait_time_days;
if data.KeyEncrypted.is_some() { if data.key_encrypted.is_some() {
emergency_access.key_encrypted = data.KeyEncrypted; emergency_access.key_encrypted = data.key_encrypted;
} }
emergency_access.save(&mut conn).await?; emergency_access.save(&mut conn).await?;
@ -184,24 +180,24 @@ async fn post_delete_emergency_access(emer_id: &str, headers: Headers, conn: DbC
// region invite // region invite
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EmergencyAccessInviteData { struct EmergencyAccessInviteData {
Email: String, email: String,
Type: NumberOrString, r#type: NumberOrString,
WaitTimeDays: i32, wait_time_days: i32,
} }
#[post("/emergency-access/invite", data = "<data>")] #[post("/emergency-access/invite", data = "<data>")]
async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let data: EmergencyAccessInviteData = data.into_inner().data; let data: EmergencyAccessInviteData = data.into_inner();
let email = data.Email.to_lowercase(); let email = data.email.to_lowercase();
let wait_time_days = data.WaitTimeDays; let wait_time_days = data.wait_time_days;
let emergency_access_status = EmergencyAccessStatus::Invited as i32; let emergency_access_status = EmergencyAccessStatus::Invited as i32;
let new_type = match EmergencyAccessType::from_str(&data.Type.into_string()) { let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) {
Some(new_type) => new_type as i32, Some(new_type) => new_type as i32,
None => err!("Invalid emergency access type."), None => err!("Invalid emergency access type."),
}; };
@ -319,17 +315,17 @@ async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> Emp
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct AcceptData { struct AcceptData {
Token: String, token: String,
} }
#[post("/emergency-access/<emer_id>/accept", data = "<data>")] #[post("/emergency-access/<emer_id>/accept", data = "<data>")]
async fn accept_invite(emer_id: &str, data: JsonUpcase<AcceptData>, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn accept_invite(emer_id: &str, data: Json<AcceptData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let data: AcceptData = data.into_inner().data; let data: AcceptData = data.into_inner();
let token = &data.Token; let token = &data.token;
let claims = decode_emergency_access_invite(token)?; let claims = decode_emergency_access_invite(token)?;
// This can happen if the user who received the invite used a different email to signup. // This can happen if the user who received the invite used a different email to signup.
@ -374,23 +370,23 @@ async fn accept_invite(emer_id: &str, data: JsonUpcase<AcceptData>, headers: Hea
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct ConfirmData { struct ConfirmData {
Key: String, key: String,
} }
#[post("/emergency-access/<emer_id>/confirm", data = "<data>")] #[post("/emergency-access/<emer_id>/confirm", data = "<data>")]
async fn confirm_emergency_access( async fn confirm_emergency_access(
emer_id: &str, emer_id: &str,
data: JsonUpcase<ConfirmData>, data: Json<ConfirmData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let confirming_user = headers.user; let confirming_user = headers.user;
let data: ConfirmData = data.into_inner().data; let data: ConfirmData = data.into_inner();
let key = data.Key; let key = data.key;
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
@ -585,9 +581,9 @@ async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn
} }
Ok(Json(json!({ Ok(Json(json!({
"Ciphers": ciphers_json, "ciphers": ciphers_json,
"KeyEncrypted": &emergency_access.key_encrypted, "keyEncrypted": &emergency_access.key_encrypted,
"Object": "emergencyAccessView", "object": "emergencyAccessView",
}))) })))
} }
@ -611,35 +607,35 @@ async fn takeover_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
}; };
let result = json!({ let result = json!({
"Kdf": grantor_user.client_kdf_type, "kdf": grantor_user.client_kdf_type,
"KdfIterations": grantor_user.client_kdf_iter, "kdfIterations": grantor_user.client_kdf_iter,
"KdfMemory": grantor_user.client_kdf_memory, "kdfMemory": grantor_user.client_kdf_memory,
"KdfParallelism": grantor_user.client_kdf_parallelism, "kdfParallelism": grantor_user.client_kdf_parallelism,
"KeyEncrypted": &emergency_access.key_encrypted, "keyEncrypted": &emergency_access.key_encrypted,
"Object": "emergencyAccessTakeover", "object": "emergencyAccessTakeover",
}); });
Ok(Json(result)) Ok(Json(result))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EmergencyAccessPasswordData { struct EmergencyAccessPasswordData {
NewMasterPasswordHash: String, new_master_password_hash: String,
Key: String, key: String,
} }
#[post("/emergency-access/<emer_id>/password", data = "<data>")] #[post("/emergency-access/<emer_id>/password", data = "<data>")]
async fn password_emergency_access( async fn password_emergency_access(
emer_id: &str, emer_id: &str,
data: JsonUpcase<EmergencyAccessPasswordData>, data: Json<EmergencyAccessPasswordData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
check_emergency_access_enabled()?; check_emergency_access_enabled()?;
let data: EmergencyAccessPasswordData = data.into_inner().data; let data: EmergencyAccessPasswordData = data.into_inner();
let new_master_password_hash = &data.NewMasterPasswordHash; let new_master_password_hash = &data.new_master_password_hash;
//let key = &data.Key; //let key = &data.Key;
let requesting_user = headers.user; let requesting_user = headers.user;
@ -658,7 +654,7 @@ async fn password_emergency_access(
}; };
// change grantor_user password // change grantor_user password
grantor_user.set_password(new_master_password_hash, Some(data.Key), true, None); grantor_user.set_password(new_master_password_hash, Some(data.key), true, None);
grantor_user.save(&mut conn).await?; grantor_user.save(&mut conn).await?;
// Disable TwoFactor providers since they will otherwise block logins // Disable TwoFactor providers since they will otherwise block logins
@ -696,9 +692,9 @@ async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect(); let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
"Data": policies_json, "data": policies_json,
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
}))) })))
} }

View file

@ -5,7 +5,7 @@ use rocket::{form::FromForm, serde::json::Json, Route};
use serde_json::Value; use serde_json::Value;
use crate::{ use crate::{
api::{EmptyResult, JsonResult, JsonUpcaseVec}, api::{EmptyResult, JsonResult},
auth::{AdminHeaders, Headers}, auth::{AdminHeaders, Headers},
db::{ db::{
models::{Cipher, Event, UserOrganization}, models::{Cipher, Event, UserOrganization},
@ -22,7 +22,6 @@ pub fn routes() -> Vec<Route> {
} }
#[derive(FromForm)] #[derive(FromForm)]
#[allow(non_snake_case)]
struct EventRange { struct EventRange {
start: String, start: String,
end: String, end: String,
@ -53,9 +52,9 @@ async fn get_org_events(org_id: &str, data: EventRange, _headers: AdminHeaders,
}; };
Ok(Json(json!({ Ok(Json(json!({
"Data": events_json, "data": events_json,
"Object": "list", "object": "list",
"ContinuationToken": get_continuation_token(&events_json), "continuationToken": get_continuation_token(&events_json),
}))) })))
} }
@ -85,9 +84,9 @@ async fn get_cipher_events(cipher_id: &str, data: EventRange, headers: Headers,
}; };
Ok(Json(json!({ Ok(Json(json!({
"Data": events_json, "data": events_json,
"Object": "list", "object": "list",
"ContinuationToken": get_continuation_token(&events_json), "continuationToken": get_continuation_token(&events_json),
}))) })))
} }
@ -119,9 +118,9 @@ async fn get_user_events(
}; };
Ok(Json(json!({ Ok(Json(json!({
"Data": events_json, "data": events_json,
"Object": "list", "object": "list",
"ContinuationToken": get_continuation_token(&events_json), "continuationToken": get_continuation_token(&events_json),
}))) })))
} }
@ -145,33 +144,33 @@ pub fn main_routes() -> Vec<Route> {
routes![post_events_collect,] routes![post_events_collect,]
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EventCollection { struct EventCollection {
// Mandatory // Mandatory
Type: i32, r#type: i32,
Date: String, date: String,
// Optional // Optional
CipherId: Option<String>, cipher_id: Option<String>,
OrganizationId: Option<String>, organization_id: Option<String>,
} }
// Upstream: // Upstream:
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs // https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs // https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs
#[post("/collect", format = "application/json", data = "<data>")] #[post("/collect", format = "application/json", data = "<data>")]
async fn post_events_collect(data: JsonUpcaseVec<EventCollection>, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult {
if !CONFIG.org_events_enabled() { if !CONFIG.org_events_enabled() {
return Ok(()); return Ok(());
} }
for event in data.iter().map(|d| &d.data) { for event in data.iter() {
let event_date = parse_date(&event.Date); let event_date = parse_date(&event.date);
match event.Type { match event.r#type {
1000..=1099 => { 1000..=1099 => {
_log_user_event( _log_user_event(
event.Type, event.r#type,
&headers.user.uuid, &headers.user.uuid,
headers.device.atype, headers.device.atype,
Some(event_date), Some(event_date),
@ -181,9 +180,9 @@ async fn post_events_collect(data: JsonUpcaseVec<EventCollection>, headers: Head
.await; .await;
} }
1600..=1699 => { 1600..=1699 => {
if let Some(org_uuid) = &event.OrganizationId { if let Some(org_uuid) = &event.organization_id {
_log_event( _log_event(
event.Type, event.r#type,
org_uuid, org_uuid,
org_uuid, org_uuid,
&headers.user.uuid, &headers.user.uuid,
@ -196,11 +195,11 @@ async fn post_events_collect(data: JsonUpcaseVec<EventCollection>, headers: Head
} }
} }
_ => { _ => {
if let Some(cipher_uuid) = &event.CipherId { if let Some(cipher_uuid) = &event.cipher_id {
if let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &mut conn).await { if let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &mut conn).await {
if let Some(org_uuid) = cipher.organization_uuid { if let Some(org_uuid) = cipher.organization_uuid {
_log_event( _log_event(
event.Type, event.r#type,
cipher_uuid, cipher_uuid,
&org_uuid, &org_uuid,
&headers.user.uuid, &headers.user.uuid,

View file

@ -2,7 +2,7 @@ use rocket::serde::json::Json;
use serde_json::Value; use serde_json::Value;
use crate::{ use crate::{
api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}, api::{EmptyResult, JsonResult, Notify, UpdateType},
auth::Headers, auth::Headers,
db::{models::*, DbConn}, db::{models::*, DbConn},
}; };
@ -17,9 +17,9 @@ async fn get_folders(headers: Headers, mut conn: DbConn) -> Json<Value> {
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect(); let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
Json(json!({ Json(json!({
"Data": folders_json, "data": folders_json,
"Object": "list", "object": "list",
"ContinuationToken": null, "continuationToken": null,
})) }))
} }
@ -38,16 +38,16 @@ async fn get_folder(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResul
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct FolderData { pub struct FolderData {
pub Name: String, pub name: String,
} }
#[post("/folders", data = "<data>")] #[post("/folders", data = "<data>")]
async fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_folders(data: Json<FolderData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
let data: FolderData = data.into_inner().data; let data: FolderData = data.into_inner();
let mut folder = Folder::new(headers.user.uuid, data.Name); let mut folder = Folder::new(headers.user.uuid, data.name);
folder.save(&mut conn).await?; folder.save(&mut conn).await?;
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await; nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await;
@ -56,25 +56,19 @@ async fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, mut conn:
} }
#[post("/folders/<uuid>", data = "<data>")] #[post("/folders/<uuid>", data = "<data>")]
async fn post_folder( async fn post_folder(uuid: &str, data: Json<FolderData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
uuid: &str,
data: JsonUpcase<FolderData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
put_folder(uuid, data, headers, conn, nt).await put_folder(uuid, data, headers, conn, nt).await
} }
#[put("/folders/<uuid>", data = "<data>")] #[put("/folders/<uuid>", data = "<data>")]
async fn put_folder( async fn put_folder(
uuid: &str, uuid: &str,
data: JsonUpcase<FolderData>, data: Json<FolderData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: FolderData = data.into_inner().data; let data: FolderData = data.into_inner();
let mut folder = match Folder::find_by_uuid(uuid, &mut conn).await { let mut folder = match Folder::find_by_uuid(uuid, &mut conn).await {
Some(folder) => folder, Some(folder) => folder,
@ -85,7 +79,7 @@ async fn put_folder(
err!("Folder belongs to another user") err!("Folder belongs to another user")
} }
folder.name = data.Name; folder.name = data.name;
folder.save(&mut conn).await?; folder.save(&mut conn).await?;
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await; nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await;

View file

@ -49,19 +49,19 @@ pub fn events_routes() -> Vec<Route> {
use rocket::{serde::json::Json, serde::json::Value, Catcher, Route}; use rocket::{serde::json::Json, serde::json::Value, Catcher, Route};
use crate::{ use crate::{
api::{JsonResult, JsonUpcase, Notify, UpdateType}, api::{JsonResult, Notify, UpdateType},
auth::Headers, auth::Headers,
db::DbConn, db::DbConn,
error::Error, error::Error,
util::{get_reqwest_client, parse_experimental_client_feature_flags}, util::{get_reqwest_client, parse_experimental_client_feature_flags},
}; };
#[derive(Serialize, Deserialize, Debug)] #[derive(Debug, Serialize, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct GlobalDomain { struct GlobalDomain {
Type: i32, r#type: i32,
Domains: Vec<String>, domains: Vec<String>,
Excluded: bool, excluded: bool,
} }
const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json"); const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json");
@ -81,38 +81,38 @@ fn _get_eq_domains(headers: Headers, no_excluded: bool) -> Json<Value> {
let mut globals: Vec<GlobalDomain> = from_str(GLOBAL_DOMAINS).unwrap(); let mut globals: Vec<GlobalDomain> = from_str(GLOBAL_DOMAINS).unwrap();
for global in &mut globals { for global in &mut globals {
global.Excluded = excluded_globals.contains(&global.Type); global.excluded = excluded_globals.contains(&global.r#type);
} }
if no_excluded { if no_excluded {
globals.retain(|g| !g.Excluded); globals.retain(|g| !g.excluded);
} }
Json(json!({ Json(json!({
"EquivalentDomains": equivalent_domains, "equivalentDomains": equivalent_domains,
"GlobalEquivalentDomains": globals, "globalEquivalentDomains": globals,
"Object": "domains", "object": "domains",
})) }))
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EquivDomainData { struct EquivDomainData {
ExcludedGlobalEquivalentDomains: Option<Vec<i32>>, excluded_global_equivalent_domains: Option<Vec<i32>>,
EquivalentDomains: Option<Vec<Vec<String>>>, equivalent_domains: Option<Vec<Vec<String>>>,
} }
#[post("/settings/domains", data = "<data>")] #[post("/settings/domains", data = "<data>")]
async fn post_eq_domains( async fn post_eq_domains(
data: JsonUpcase<EquivDomainData>, data: Json<EquivDomainData>,
headers: Headers, headers: Headers,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: EquivDomainData = data.into_inner().data; let data: EquivDomainData = data.into_inner();
let excluded_globals = data.ExcludedGlobalEquivalentDomains.unwrap_or_default(); let excluded_globals = data.excluded_global_equivalent_domains.unwrap_or_default();
let equivalent_domains = data.EquivalentDomains.unwrap_or_default(); let equivalent_domains = data.equivalent_domains.unwrap_or_default();
let mut user = headers.user; let mut user = headers.user;
use serde_json::to_string; use serde_json::to_string;
@ -128,12 +128,7 @@ async fn post_eq_domains(
} }
#[put("/settings/domains", data = "<data>")] #[put("/settings/domains", data = "<data>")]
async fn put_eq_domains( async fn put_eq_domains(data: Json<EquivDomainData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
data: JsonUpcase<EquivDomainData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
post_eq_domains(data, headers, conn, nt).await post_eq_domains(data, headers, conn, nt).await
} }
@ -157,15 +152,15 @@ async fn hibp_breach(username: &str) -> JsonResult {
Ok(Json(value)) Ok(Json(value))
} else { } else {
Ok(Json(json!([{ Ok(Json(json!([{
"Name": "HaveIBeenPwned", "name": "HaveIBeenPwned",
"Title": "Manual HIBP Check", "title": "Manual HIBP Check",
"Domain": "haveibeenpwned.com", "domain": "haveibeenpwned.com",
"BreachDate": "2019-08-18T00:00:00Z", "breachDate": "2019-08-18T00:00:00Z",
"AddedDate": "2019-08-18T00:00:00Z", "addedDate": "2019-08-18T00:00:00Z",
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{username}\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/account/{username}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>"), "description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{username}\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/account/{username}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>"),
"LogoPath": "vw_static/hibp.png", "logoPath": "vw_static/hibp.png",
"PwnCount": 0, "pwnCount": 0,
"DataClasses": [ "dataClasses": [
"Error - No API key set!" "Error - No API key set!"
] ]
}]))) }])))

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,14 @@
use chrono::Utc; use chrono::Utc;
use rocket::{ use rocket::{
request::{self, FromRequest, Outcome}, request::{self, FromRequest, Outcome},
serde::json::Json,
Request, Route, Request, Route,
}; };
use std::collections::HashSet; use std::collections::HashSet;
use crate::{ use crate::{
api::{EmptyResult, JsonUpcase}, api::EmptyResult,
auth, auth,
db::{models::*, DbConn}, db::{models::*, DbConn},
mail, CONFIG, mail, CONFIG,
@ -18,43 +19,43 @@ pub fn routes() -> Vec<Route> {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct OrgImportGroupData { struct OrgImportGroupData {
Name: String, name: String,
ExternalId: String, external_id: String,
MemberExternalIds: Vec<String>, member_external_ids: Vec<String>,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct OrgImportUserData { struct OrgImportUserData {
Email: String, email: String,
ExternalId: String, external_id: String,
Deleted: bool, deleted: bool,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct OrgImportData { struct OrgImportData {
Groups: Vec<OrgImportGroupData>, groups: Vec<OrgImportGroupData>,
Members: Vec<OrgImportUserData>, members: Vec<OrgImportUserData>,
OverwriteExisting: bool, overwrite_existing: bool,
// LargeImport: bool, // For now this will not be used, upstream uses this to prevent syncs of more then 2000 users or groups without the flag set. // largeImport: bool, // For now this will not be used, upstream uses this to prevent syncs of more then 2000 users or groups without the flag set.
} }
#[post("/public/organization/import", data = "<data>")] #[post("/public/organization/import", data = "<data>")]
async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult { async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult {
// Most of the logic for this function can be found here // Most of the logic for this function can be found here
// https://github.com/bitwarden/server/blob/fd892b2ff4547648a276734fb2b14a8abae2c6f5/src/Core/Services/Implementations/OrganizationService.cs#L1797 // https://github.com/bitwarden/server/blob/fd892b2ff4547648a276734fb2b14a8abae2c6f5/src/Core/Services/Implementations/OrganizationService.cs#L1797
let org_id = token.0; let org_id = token.0;
let data = data.into_inner().data; let data = data.into_inner();
for user_data in &data.Members { for user_data in &data.members {
if user_data.Deleted { if user_data.deleted {
// If user is marked for deletion and it exists, revoke it // If user is marked for deletion and it exists, revoke it
if let Some(mut user_org) = if let Some(mut user_org) =
UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &mut conn).await UserOrganization::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await
{ {
// Only revoke a user if it is not the last confirmed owner // Only revoke a user if it is not the last confirmed owner
let revoked = if user_org.atype == UserOrgType::Owner let revoked = if user_org.atype == UserOrgType::Owner
@ -72,27 +73,27 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
user_org.revoke() user_org.revoke()
}; };
let ext_modified = user_org.set_external_id(Some(user_data.ExternalId.clone())); let ext_modified = user_org.set_external_id(Some(user_data.external_id.clone()));
if revoked || ext_modified { if revoked || ext_modified {
user_org.save(&mut conn).await?; user_org.save(&mut conn).await?;
} }
} }
// If user is part of the organization, restore it // If user is part of the organization, restore it
} else if let Some(mut user_org) = } else if let Some(mut user_org) =
UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &mut conn).await UserOrganization::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await
{ {
let restored = user_org.restore(); let restored = user_org.restore();
let ext_modified = user_org.set_external_id(Some(user_data.ExternalId.clone())); let ext_modified = user_org.set_external_id(Some(user_data.external_id.clone()));
if restored || ext_modified { if restored || ext_modified {
user_org.save(&mut conn).await?; user_org.save(&mut conn).await?;
} }
} else { } else {
// If user is not part of the organization // If user is not part of the organization
let user = match User::find_by_mail(&user_data.Email, &mut conn).await { let user = match User::find_by_mail(&user_data.email, &mut conn).await {
Some(user) => user, // exists in vaultwarden Some(user) => user, // exists in vaultwarden
None => { None => {
// User does not exist yet // User does not exist yet
let mut new_user = User::new(user_data.Email.clone()); let mut new_user = User::new(user_data.email.clone());
new_user.save(&mut conn).await?; new_user.save(&mut conn).await?;
if !CONFIG.mail_enabled() { if !CONFIG.mail_enabled() {
@ -109,7 +110,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
}; };
let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone()); let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone());
new_org_user.set_external_id(Some(user_data.ExternalId.clone())); new_org_user.set_external_id(Some(user_data.external_id.clone()));
new_org_user.access_all = false; new_org_user.access_all = false;
new_org_user.atype = UserOrgType::User as i32; new_org_user.atype = UserOrgType::User as i32;
new_org_user.status = user_org_status; new_org_user.status = user_org_status;
@ -123,7 +124,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
}; };
mail::send_invite( mail::send_invite(
&user_data.Email, &user_data.email,
&user.uuid, &user.uuid,
Some(org_id.clone()), Some(org_id.clone()),
Some(new_org_user.uuid), Some(new_org_user.uuid),
@ -136,13 +137,17 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
} }
if CONFIG.org_groups_enabled() { if CONFIG.org_groups_enabled() {
for group_data in &data.Groups { for group_data in &data.groups {
let group_uuid = match Group::find_by_external_id_and_org(&group_data.ExternalId, &org_id, &mut conn).await let group_uuid = match Group::find_by_external_id_and_org(&group_data.external_id, &org_id, &mut conn).await
{ {
Some(group) => group.uuid, Some(group) => group.uuid,
None => { None => {
let mut group = let mut group = Group::new(
Group::new(org_id.clone(), group_data.Name.clone(), false, Some(group_data.ExternalId.clone())); org_id.clone(),
group_data.name.clone(),
false,
Some(group_data.external_id.clone()),
);
group.save(&mut conn).await?; group.save(&mut conn).await?;
group.uuid group.uuid
} }
@ -150,7 +155,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
GroupUser::delete_all_by_group(&group_uuid, &mut conn).await?; GroupUser::delete_all_by_group(&group_uuid, &mut conn).await?;
for ext_id in &group_data.MemberExternalIds { for ext_id in &group_data.member_external_ids {
if let Some(user_org) = UserOrganization::find_by_external_id_and_org(ext_id, &org_id, &mut conn).await if let Some(user_org) = UserOrganization::find_by_external_id_and_org(ext_id, &org_id, &mut conn).await
{ {
let mut group_user = GroupUser::new(group_uuid.clone(), user_org.uuid.clone()); let mut group_user = GroupUser::new(group_uuid.clone(), user_org.uuid.clone());
@ -163,9 +168,9 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
} }
// If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true)
if data.OverwriteExisting { if data.overwrite_existing {
// Generate a HashSet to quickly verify if a member is listed or not. // Generate a HashSet to quickly verify if a member is listed or not.
let sync_members: HashSet<String> = data.Members.into_iter().map(|m| m.ExternalId).collect(); let sync_members: HashSet<String> = data.members.into_iter().map(|m| m.external_id).collect();
for user_org in UserOrganization::find_by_org(&org_id, &mut conn).await { for user_org in UserOrganization::find_by_org(&org_id, &mut conn).await {
if let Some(ref user_external_id) = user_org.external_id { if let Some(ref user_external_id) = user_org.external_id {
if !sync_members.contains(user_external_id) { if !sync_members.contains(user_external_id) {

View file

@ -9,7 +9,7 @@ use rocket::serde::json::Json;
use serde_json::Value; use serde_json::Value;
use crate::{ use crate::{
api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}, api::{ApiResult, EmptyResult, JsonResult, Notify, UpdateType},
auth::{ClientIp, Headers, Host}, auth::{ClientIp, Headers, Host},
db::{models::*, DbConn, DbPool}, db::{models::*, DbConn, DbPool},
util::{NumberOrString, SafeString}, util::{NumberOrString, SafeString},
@ -48,26 +48,26 @@ pub async fn purge_sends(pool: DbPool) {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct SendData { pub struct SendData {
Type: i32, r#type: i32,
Key: String, key: String,
Password: Option<String>, password: Option<String>,
MaxAccessCount: Option<NumberOrString>, max_access_count: Option<NumberOrString>,
ExpirationDate: Option<DateTime<Utc>>, expiration_date: Option<DateTime<Utc>>,
DeletionDate: DateTime<Utc>, deletion_date: DateTime<Utc>,
Disabled: bool, disabled: bool,
HideEmail: Option<bool>, hide_email: Option<bool>,
// Data field // Data field
Name: String, name: String,
Notes: Option<String>, notes: Option<String>,
Text: Option<Value>, text: Option<Value>,
File: Option<Value>, file: Option<Value>,
FileLength: Option<NumberOrString>, file_length: Option<NumberOrString>,
// Used for key rotations // Used for key rotations
pub Id: Option<String>, pub id: Option<String>,
} }
/// Enforces the `Disable Send` policy. A non-owner/admin user belonging to /// Enforces the `Disable Send` policy. A non-owner/admin user belonging to
@ -96,7 +96,7 @@ async fn enforce_disable_send_policy(headers: &Headers, conn: &mut DbConn) -> Em
/// Ref: https://bitwarden.com/help/article/policies/#send-options /// Ref: https://bitwarden.com/help/article/policies/#send-options
async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &mut DbConn) -> EmptyResult { async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &mut DbConn) -> EmptyResult {
let user_uuid = &headers.user.uuid; let user_uuid = &headers.user.uuid;
let hide_email = data.HideEmail.unwrap_or(false); let hide_email = data.hide_email.unwrap_or(false);
if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await { if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await {
err!( err!(
"Due to an Enterprise Policy, you are not allowed to hide your email address \ "Due to an Enterprise Policy, you are not allowed to hide your email address \
@ -107,40 +107,40 @@ async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, c
} }
fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> { fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
let data_val = if data.Type == SendType::Text as i32 { let data_val = if data.r#type == SendType::Text as i32 {
data.Text data.text
} else if data.Type == SendType::File as i32 { } else if data.r#type == SendType::File as i32 {
data.File data.file
} else { } else {
err!("Invalid Send type") err!("Invalid Send type")
}; };
let data_str = if let Some(mut d) = data_val { let data_str = if let Some(mut d) = data_val {
d.as_object_mut().and_then(|o| o.remove("Response")); d.as_object_mut().and_then(|o| o.remove("response"));
serde_json::to_string(&d)? serde_json::to_string(&d)?
} else { } else {
err!("Send data not provided"); err!("Send data not provided");
}; };
if data.DeletionDate > Utc::now() + TimeDelta::try_days(31).unwrap() { if data.deletion_date > Utc::now() + TimeDelta::try_days(31).unwrap() {
err!( err!(
"You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again." "You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again."
); );
} }
let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc()); let mut send = Send::new(data.r#type, data.name, data_str, data.key, data.deletion_date.naive_utc());
send.user_uuid = Some(user_uuid); send.user_uuid = Some(user_uuid);
send.notes = data.Notes; send.notes = data.notes;
send.max_access_count = match data.MaxAccessCount { send.max_access_count = match data.max_access_count {
Some(m) => Some(m.into_i32()?), Some(m) => Some(m.into_i32()?),
_ => None, _ => None,
}; };
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc()); send.expiration_date = data.expiration_date.map(|d| d.naive_utc());
send.disabled = data.Disabled; send.disabled = data.disabled;
send.hide_email = data.HideEmail; send.hide_email = data.hide_email;
send.atype = data.Type; send.atype = data.r#type;
send.set_password(data.Password.as_deref()); send.set_password(data.password.as_deref());
Ok(send) Ok(send)
} }
@ -151,9 +151,9 @@ async fn get_sends(headers: Headers, mut conn: DbConn) -> Json<Value> {
let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect(); let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect();
Json(json!({ Json(json!({
"Data": sends_json, "data": sends_json,
"Object": "list", "object": "list",
"ContinuationToken": null "continuationToken": null
})) }))
} }
@ -172,13 +172,13 @@ async fn get_send(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult
} }
#[post("/sends", data = "<data>")] #[post("/sends", data = "<data>")]
async fn post_send(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_send(data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data: SendData = data.into_inner().data; let data: SendData = data.into_inner();
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
if data.Type == SendType::File as i32 { if data.r#type == SendType::File as i32 {
err!("File sends should use /api/sends/file") err!("File sends should use /api/sends/file")
} }
@ -198,7 +198,7 @@ async fn post_send(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbCon
#[derive(FromForm)] #[derive(FromForm)]
struct UploadData<'f> { struct UploadData<'f> {
model: Json<crate::util::UpCase<SendData>>, model: Json<SendData>,
data: TempFile<'f>, data: TempFile<'f>,
} }
@ -218,7 +218,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
model, model,
mut data, mut data,
} = data.into_inner(); } = data.into_inner();
let model = model.into_inner().data; let model = model.into_inner();
let Some(size) = data.len().to_i64() else { let Some(size) = data.len().to_i64() else {
err!("Invalid send size"); err!("Invalid send size");
@ -266,9 +266,9 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
let mut data_value: Value = serde_json::from_str(&send.data)?; let mut data_value: Value = serde_json::from_str(&send.data)?;
if let Some(o) = data_value.as_object_mut() { if let Some(o) = data_value.as_object_mut() {
o.insert(String::from("Id"), Value::String(file_id)); o.insert(String::from("id"), Value::String(file_id));
o.insert(String::from("Size"), Value::Number(size.into())); o.insert(String::from("size"), Value::Number(size.into()));
o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size))); o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(size)));
} }
send.data = serde_json::to_string(&data_value)?; send.data = serde_json::to_string(&data_value)?;
@ -288,18 +288,18 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
// Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190 // Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190
#[post("/sends/file/v2", data = "<data>")] #[post("/sends/file/v2", data = "<data>")]
async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data = data.into_inner().data; let data = data.into_inner();
if data.Type != SendType::File as i32 { if data.r#type != SendType::File as i32 {
err!("Send content is not a file"); err!("Send content is not a file");
} }
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
let file_length = match &data.FileLength { let file_length = match &data.file_length {
Some(m) => m.into_i64()?, Some(m) => m.into_i64()?,
_ => err!("Invalid send length"), _ => err!("Invalid send length"),
}; };
@ -334,9 +334,9 @@ async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, mut con
let mut data_value: Value = serde_json::from_str(&send.data)?; let mut data_value: Value = serde_json::from_str(&send.data)?;
if let Some(o) = data_value.as_object_mut() { if let Some(o) = data_value.as_object_mut() {
o.insert(String::from("Id"), Value::String(file_id.clone())); o.insert(String::from("id"), Value::String(file_id.clone()));
o.insert(String::from("Size"), Value::Number(file_length.into())); o.insert(String::from("size"), Value::Number(file_length.into()));
o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(file_length))); o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(file_length)));
} }
send.data = serde_json::to_string(&data_value)?; send.data = serde_json::to_string(&data_value)?;
send.save(&mut conn).await?; send.save(&mut conn).await?;
@ -395,15 +395,15 @@ async fn post_send_file_v2_data(
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct SendAccessData { pub struct SendAccessData {
pub Password: Option<String>, pub password: Option<String>,
} }
#[post("/sends/access/<access_id>", data = "<data>")] #[post("/sends/access/<access_id>", data = "<data>")]
async fn post_access( async fn post_access(
access_id: &str, access_id: &str,
data: JsonUpcase<SendAccessData>, data: Json<SendAccessData>,
mut conn: DbConn, mut conn: DbConn,
ip: ClientIp, ip: ClientIp,
nt: Notify<'_>, nt: Notify<'_>,
@ -434,7 +434,7 @@ async fn post_access(
} }
if send.password_hash.is_some() { if send.password_hash.is_some() {
match data.into_inner().data.Password { match data.into_inner().password {
Some(ref p) if send.check_password(p) => { /* Nothing to do here */ } Some(ref p) if send.check_password(p) => { /* Nothing to do here */ }
Some(_) => err!("Invalid password", format!("IP: {}.", ip.ip)), Some(_) => err!("Invalid password", format!("IP: {}.", ip.ip)),
None => err_code!("Password not provided", format!("IP: {}.", ip.ip), 401), None => err_code!("Password not provided", format!("IP: {}.", ip.ip), 401),
@ -464,7 +464,7 @@ async fn post_access(
async fn post_access_file( async fn post_access_file(
send_id: &str, send_id: &str,
file_id: &str, file_id: &str,
data: JsonUpcase<SendAccessData>, data: Json<SendAccessData>,
host: Host, host: Host,
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
@ -495,7 +495,7 @@ async fn post_access_file(
} }
if send.password_hash.is_some() { if send.password_hash.is_some() {
match data.into_inner().data.Password { match data.into_inner().password {
Some(ref p) if send.check_password(p) => { /* Nothing to do here */ } Some(ref p) if send.check_password(p) => { /* Nothing to do here */ }
Some(_) => err!("Invalid password."), Some(_) => err!("Invalid password."),
None => err_code!("Password not provided", 401), None => err_code!("Password not provided", 401),
@ -518,9 +518,9 @@ async fn post_access_file(
let token_claims = crate::auth::generate_send_claims(send_id, file_id); let token_claims = crate::auth::generate_send_claims(send_id, file_id);
let token = crate::auth::encode_jwt(&token_claims); let token = crate::auth::encode_jwt(&token_claims);
Ok(Json(json!({ Ok(Json(json!({
"Object": "send-fileDownload", "object": "send-fileDownload",
"Id": file_id, "id": file_id,
"Url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token) "url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token)
}))) })))
} }
@ -535,16 +535,10 @@ async fn download_send(send_id: SafeString, file_id: SafeString, t: &str) -> Opt
} }
#[put("/sends/<id>", data = "<data>")] #[put("/sends/<id>", data = "<data>")]
async fn put_send( async fn put_send(id: &str, data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
id: &str,
data: JsonUpcase<SendData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data: SendData = data.into_inner().data; let data: SendData = data.into_inner();
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
let mut send = match Send::find_by_uuid(id, &mut conn).await { let mut send = match Send::find_by_uuid(id, &mut conn).await {
@ -569,11 +563,11 @@ pub async fn update_send_from_data(
err!("Send is not owned by user") err!("Send is not owned by user")
} }
if send.atype != data.Type { if send.atype != data.r#type {
err!("Sends can't change type") err!("Sends can't change type")
} }
if data.DeletionDate > Utc::now() + TimeDelta::try_days(31).unwrap() { if data.deletion_date > Utc::now() + TimeDelta::try_days(31).unwrap() {
err!( err!(
"You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again." "You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again."
); );
@ -581,9 +575,9 @@ pub async fn update_send_from_data(
// When updating a file Send, we receive nulls in the File field, as it's immutable, // When updating a file Send, we receive nulls in the File field, as it's immutable,
// so we only need to update the data field in the Text case // so we only need to update the data field in the Text case
if data.Type == SendType::Text as i32 { if data.r#type == SendType::Text as i32 {
let data_str = if let Some(mut d) = data.Text { let data_str = if let Some(mut d) = data.text {
d.as_object_mut().and_then(|d| d.remove("Response")); d.as_object_mut().and_then(|d| d.remove("response"));
serde_json::to_string(&d)? serde_json::to_string(&d)?
} else { } else {
err!("Send data not provided"); err!("Send data not provided");
@ -591,20 +585,20 @@ pub async fn update_send_from_data(
send.data = data_str; send.data = data_str;
} }
send.name = data.Name; send.name = data.name;
send.akey = data.Key; send.akey = data.key;
send.deletion_date = data.DeletionDate.naive_utc(); send.deletion_date = data.deletion_date.naive_utc();
send.notes = data.Notes; send.notes = data.notes;
send.max_access_count = match data.MaxAccessCount { send.max_access_count = match data.max_access_count {
Some(m) => Some(m.into_i32()?), Some(m) => Some(m.into_i32()?),
_ => None, _ => None,
}; };
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc()); send.expiration_date = data.expiration_date.map(|d| d.naive_utc());
send.hide_email = data.HideEmail; send.hide_email = data.hide_email;
send.disabled = data.Disabled; send.disabled = data.disabled;
// Only change the value if it's present // Only change the value if it's present
if let Some(password) = data.Password { if let Some(password) = data.password {
send.set_password(Some(&password)); send.set_password(Some(&password));
} }

View file

@ -3,10 +3,7 @@ use rocket::serde::json::Json;
use rocket::Route; use rocket::Route;
use crate::{ use crate::{
api::{ api::{core::log_user_event, core::two_factor::_generate_recover_code, EmptyResult, JsonResult, PasswordOrOtpData},
core::log_user_event, core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase,
PasswordOrOtpData,
},
auth::{ClientIp, Headers}, auth::{ClientIp, Headers},
crypto, crypto,
db::{ db::{
@ -23,8 +20,8 @@ pub fn routes() -> Vec<Route> {
} }
#[post("/two-factor/get-authenticator", data = "<data>")] #[post("/two-factor/get-authenticator", data = "<data>")]
async fn generate_authenticator(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let user = headers.user; let user = headers.user;
data.validate(&user, false, &mut conn).await?; data.validate(&user, false, &mut conn).await?;
@ -38,36 +35,32 @@ async fn generate_authenticator(data: JsonUpcase<PasswordOrOtpData>, headers: He
}; };
Ok(Json(json!({ Ok(Json(json!({
"Enabled": enabled, "enabled": enabled,
"Key": key, "key": key,
"Object": "twoFactorAuthenticator" "object": "twoFactorAuthenticator"
}))) })))
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EnableAuthenticatorData { struct EnableAuthenticatorData {
Key: String, key: String,
Token: NumberOrString, token: NumberOrString,
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
#[post("/two-factor/authenticator", data = "<data>")] #[post("/two-factor/authenticator", data = "<data>")]
async fn activate_authenticator( async fn activate_authenticator(data: Json<EnableAuthenticatorData>, headers: Headers, mut conn: DbConn) -> JsonResult {
data: JsonUpcase<EnableAuthenticatorData>, let data: EnableAuthenticatorData = data.into_inner();
headers: Headers, let key = data.key;
mut conn: DbConn, let token = data.token.into_string();
) -> JsonResult {
let data: EnableAuthenticatorData = data.into_inner().data;
let key = data.Key;
let token = data.Token.into_string();
let mut user = headers.user; let mut user = headers.user;
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash, master_password_hash: data.master_password_hash,
Otp: data.Otp, otp: data.otp,
} }
.validate(&user, true, &mut conn) .validate(&user, true, &mut conn)
.await?; .await?;
@ -90,18 +83,14 @@ async fn activate_authenticator(
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
Ok(Json(json!({ Ok(Json(json!({
"Enabled": true, "enabled": true,
"Key": key, "key": key,
"Object": "twoFactorAuthenticator" "object": "twoFactorAuthenticator"
}))) })))
} }
#[put("/two-factor/authenticator", data = "<data>")] #[put("/two-factor/authenticator", data = "<data>")]
async fn activate_authenticator_put( async fn activate_authenticator_put(data: Json<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
data: JsonUpcase<EnableAuthenticatorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
activate_authenticator(data, headers, conn).await activate_authenticator(data, headers, conn).await
} }

View file

@ -5,7 +5,7 @@ use rocket::Route;
use crate::{ use crate::{
api::{ api::{
core::log_user_event, core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase, core::log_user_event, core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult,
PasswordOrOtpData, PasswordOrOtpData,
}, },
auth::Headers, auth::Headers,
@ -92,8 +92,8 @@ impl DuoStatus {
const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, please leave these fields untouched>"; const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, please leave these fields untouched>";
#[post("/two-factor/get-duo", data = "<data>")] #[post("/two-factor/get-duo", data = "<data>")]
async fn get_duo(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_duo(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let user = headers.user; let user = headers.user;
data.validate(&user, false, &mut conn).await?; data.validate(&user, false, &mut conn).await?;
@ -109,16 +109,16 @@ async fn get_duo(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn
let json = if let Some(data) = data { let json = if let Some(data) = data {
json!({ json!({
"Enabled": enabled, "enabled": enabled,
"Host": data.host, "host": data.host,
"SecretKey": data.sk, "secretKey": data.sk,
"IntegrationKey": data.ik, "integrationKey": data.ik,
"Object": "twoFactorDuo" "object": "twoFactorDuo"
}) })
} else { } else {
json!({ json!({
"Enabled": enabled, "enabled": enabled,
"Object": "twoFactorDuo" "object": "twoFactorDuo"
}) })
}; };
@ -126,21 +126,21 @@ async fn get_duo(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case, dead_code)] #[serde(rename_all = "camelCase")]
struct EnableDuoData { struct EnableDuoData {
Host: String, host: String,
SecretKey: String, secret_key: String,
IntegrationKey: String, integration_key: String,
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
impl From<EnableDuoData> for DuoData { impl From<EnableDuoData> for DuoData {
fn from(d: EnableDuoData) -> Self { fn from(d: EnableDuoData) -> Self {
Self { Self {
host: d.Host, host: d.host,
ik: d.IntegrationKey, ik: d.integration_key,
sk: d.SecretKey, sk: d.secret_key,
} }
} }
} }
@ -151,17 +151,17 @@ fn check_duo_fields_custom(data: &EnableDuoData) -> bool {
st.is_empty() || s == DISABLED_MESSAGE_DEFAULT st.is_empty() || s == DISABLED_MESSAGE_DEFAULT
} }
!empty_or_default(&data.Host) && !empty_or_default(&data.SecretKey) && !empty_or_default(&data.IntegrationKey) !empty_or_default(&data.host) && !empty_or_default(&data.secret_key) && !empty_or_default(&data.integration_key)
} }
#[post("/two-factor/duo", data = "<data>")] #[post("/two-factor/duo", data = "<data>")]
async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn activate_duo(data: Json<EnableDuoData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EnableDuoData = data.into_inner().data; let data: EnableDuoData = data.into_inner();
let mut user = headers.user; let mut user = headers.user;
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash.clone(), master_password_hash: data.master_password_hash.clone(),
Otp: data.Otp.clone(), otp: data.otp.clone(),
} }
.validate(&user, true, &mut conn) .validate(&user, true, &mut conn)
.await?; .await?;
@ -184,16 +184,16 @@ async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, mut con
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
Ok(Json(json!({ Ok(Json(json!({
"Enabled": true, "enabled": true,
"Host": data.host, "host": data.host,
"SecretKey": data.sk, "secretKey": data.sk,
"IntegrationKey": data.ik, "integrationKey": data.ik,
"Object": "twoFactorDuo" "object": "twoFactorDuo"
}))) })))
} }
#[put("/two-factor/duo", data = "<data>")] #[put("/two-factor/duo", data = "<data>")]
async fn activate_duo_put(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult { async fn activate_duo_put(data: Json<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_duo(data, headers, conn).await activate_duo(data, headers, conn).await
} }

View file

@ -5,7 +5,7 @@ use rocket::Route;
use crate::{ use crate::{
api::{ api::{
core::{log_user_event, two_factor::_generate_recover_code}, core::{log_user_event, two_factor::_generate_recover_code},
EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, EmptyResult, JsonResult, PasswordOrOtpData,
}, },
auth::Headers, auth::Headers,
crypto, crypto,
@ -22,28 +22,28 @@ pub fn routes() -> Vec<Route> {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct SendEmailLoginData { struct SendEmailLoginData {
Email: String, email: String,
MasterPasswordHash: String, master_password_hash: String,
} }
/// User is trying to login and wants to use email 2FA. /// User is trying to login and wants to use email 2FA.
/// Does not require Bearer token /// Does not require Bearer token
#[post("/two-factor/send-email-login", data = "<data>")] // JsonResult #[post("/two-factor/send-email-login", data = "<data>")] // JsonResult
async fn send_email_login(data: JsonUpcase<SendEmailLoginData>, mut conn: DbConn) -> EmptyResult { async fn send_email_login(data: Json<SendEmailLoginData>, mut conn: DbConn) -> EmptyResult {
let data: SendEmailLoginData = data.into_inner().data; let data: SendEmailLoginData = data.into_inner();
use crate::db::models::User; use crate::db::models::User;
// Get the user // Get the user
let user = match User::find_by_mail(&data.Email, &mut conn).await { let user = match User::find_by_mail(&data.email, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Username or password is incorrect. Try again."), None => err!("Username or password is incorrect. Try again."),
}; };
// Check password // Check password
if !user.check_valid_password(&data.MasterPasswordHash) { if !user.check_valid_password(&data.master_password_hash) {
err!("Username or password is incorrect. Try again.") err!("Username or password is incorrect. Try again.")
} }
@ -76,8 +76,8 @@ pub async fn send_token(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
/// When user clicks on Manage email 2FA show the user the related information /// When user clicks on Manage email 2FA show the user the related information
#[post("/two-factor/get-email", data = "<data>")] #[post("/two-factor/get-email", data = "<data>")]
async fn get_email(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_email(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let user = headers.user; let user = headers.user;
data.validate(&user, false, &mut conn).await?; data.validate(&user, false, &mut conn).await?;
@ -92,30 +92,30 @@ async fn get_email(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut co
}; };
Ok(Json(json!({ Ok(Json(json!({
"Email": mfa_email, "email": mfa_email,
"Enabled": enabled, "enabled": enabled,
"Object": "twoFactorEmail" "object": "twoFactorEmail"
}))) })))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct SendEmailData { struct SendEmailData {
/// Email where 2FA codes will be sent to, can be different than user email account. /// Email where 2FA codes will be sent to, can be different than user email account.
Email: String, email: String,
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
/// Send a verification email to the specified email address to check whether it exists/belongs to user. /// Send a verification email to the specified email address to check whether it exists/belongs to user.
#[post("/two-factor/send-email", data = "<data>")] #[post("/two-factor/send-email", data = "<data>")]
async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn send_email(data: Json<SendEmailData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: SendEmailData = data.into_inner().data; let data: SendEmailData = data.into_inner();
let user = headers.user; let user = headers.user;
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash, master_password_hash: data.master_password_hash,
Otp: data.Otp, otp: data.otp,
} }
.validate(&user, false, &mut conn) .validate(&user, false, &mut conn)
.await?; .await?;
@ -131,7 +131,7 @@ async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn:
} }
let generated_token = crypto::generate_email_token(CONFIG.email_token_size()); let generated_token = crypto::generate_email_token(CONFIG.email_token_size());
let twofactor_data = EmailTokenData::new(data.Email, generated_token); let twofactor_data = EmailTokenData::new(data.email, generated_token);
// Uses EmailVerificationChallenge as type to show that it's not verified yet. // Uses EmailVerificationChallenge as type to show that it's not verified yet.
let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json()); let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json());
@ -143,24 +143,24 @@ async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn:
} }
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EmailData { struct EmailData {
Email: String, email: String,
Token: String, token: String,
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
/// Verify email belongs to user and can be used for 2FA email codes. /// Verify email belongs to user and can be used for 2FA email codes.
#[put("/two-factor/email", data = "<data>")] #[put("/two-factor/email", data = "<data>")]
async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn email(data: Json<EmailData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EmailData = data.into_inner().data; let data: EmailData = data.into_inner();
let mut user = headers.user; let mut user = headers.user;
// This is the last step in the verification process, delete the otp directly afterwards // This is the last step in the verification process, delete the otp directly afterwards
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash, master_password_hash: data.master_password_hash,
Otp: data.Otp, otp: data.otp,
} }
.validate(&user, true, &mut conn) .validate(&user, true, &mut conn)
.await?; .await?;
@ -176,7 +176,7 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn)
_ => err!("No token available"), _ => err!("No token available"),
}; };
if !crypto::ct_eq(issued_token, data.Token) { if !crypto::ct_eq(issued_token, data.token) {
err!("Token is invalid") err!("Token is invalid")
} }
@ -190,9 +190,9 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn)
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
Ok(Json(json!({ Ok(Json(json!({
"Email": email_data.email, "email": email_data.email,
"Enabled": "true", "enabled": "true",
"Object": "twoFactorEmail" "object": "twoFactorEmail"
}))) })))
} }

View file

@ -7,7 +7,7 @@ use serde_json::Value;
use crate::{ use crate::{
api::{ api::{
core::{log_event, log_user_event}, core::{log_event, log_user_event},
EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, EmptyResult, JsonResult, PasswordOrOtpData,
}, },
auth::{ClientHeaders, Headers}, auth::{ClientHeaders, Headers},
crypto, crypto,
@ -50,52 +50,52 @@ async fn get_twofactor(headers: Headers, mut conn: DbConn) -> Json<Value> {
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_provider).collect(); let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_provider).collect();
Json(json!({ Json(json!({
"Data": twofactors_json, "data": twofactors_json,
"Object": "list", "object": "list",
"ContinuationToken": null, "continuationToken": null,
})) }))
} }
#[post("/two-factor/get-recover", data = "<data>")] #[post("/two-factor/get-recover", data = "<data>")]
async fn get_recover(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_recover(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let user = headers.user; let user = headers.user;
data.validate(&user, true, &mut conn).await?; data.validate(&user, true, &mut conn).await?;
Ok(Json(json!({ Ok(Json(json!({
"Code": user.totp_recover, "code": user.totp_recover,
"Object": "twoFactorRecover" "object": "twoFactorRecover"
}))) })))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct RecoverTwoFactor { struct RecoverTwoFactor {
MasterPasswordHash: String, master_password_hash: String,
Email: String, email: String,
RecoveryCode: String, recovery_code: String,
} }
#[post("/two-factor/recover", data = "<data>")] #[post("/two-factor/recover", data = "<data>")]
async fn recover(data: JsonUpcase<RecoverTwoFactor>, client_headers: ClientHeaders, mut conn: DbConn) -> JsonResult { async fn recover(data: Json<RecoverTwoFactor>, client_headers: ClientHeaders, mut conn: DbConn) -> JsonResult {
let data: RecoverTwoFactor = data.into_inner().data; let data: RecoverTwoFactor = data.into_inner();
use crate::db::models::User; use crate::db::models::User;
// Get the user // Get the user
let mut user = match User::find_by_mail(&data.Email, &mut conn).await { let mut user = match User::find_by_mail(&data.email, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Username or password is incorrect. Try again."), None => err!("Username or password is incorrect. Try again."),
}; };
// Check password // Check password
if !user.check_valid_password(&data.MasterPasswordHash) { if !user.check_valid_password(&data.master_password_hash) {
err!("Username or password is incorrect. Try again.") err!("Username or password is incorrect. Try again.")
} }
// Check if recovery code is correct // Check if recovery code is correct
if !user.check_valid_recovery_code(&data.RecoveryCode) { if !user.check_valid_recovery_code(&data.recovery_code) {
err!("Recovery code is incorrect. Try again.") err!("Recovery code is incorrect. Try again.")
} }
@ -127,27 +127,27 @@ async fn _generate_recover_code(user: &mut User, conn: &mut DbConn) {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct DisableTwoFactorData { struct DisableTwoFactorData {
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
Type: NumberOrString, r#type: NumberOrString,
} }
#[post("/two-factor/disable", data = "<data>")] #[post("/two-factor/disable", data = "<data>")]
async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn disable_twofactor(data: Json<DisableTwoFactorData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: DisableTwoFactorData = data.into_inner().data; let data: DisableTwoFactorData = data.into_inner();
let user = headers.user; let user = headers.user;
// Delete directly after a valid token has been provided // Delete directly after a valid token has been provided
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash, master_password_hash: data.master_password_hash,
Otp: data.Otp, otp: data.otp,
} }
.validate(&user, true, &mut conn) .validate(&user, true, &mut conn)
.await?; .await?;
let type_ = data.Type.into_i32()?; let type_ = data.r#type.into_i32()?;
if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await { if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await {
twofactor.delete(&mut conn).await?; twofactor.delete(&mut conn).await?;
@ -160,14 +160,14 @@ async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Head
} }
Ok(Json(json!({ Ok(Json(json!({
"Enabled": false, "enabled": false,
"Type": type_, "type": type_,
"Object": "twoFactorProvider" "object": "twoFactorProvider"
}))) })))
} }
#[put("/two-factor/disable", data = "<data>")] #[put("/two-factor/disable", data = "<data>")]
async fn disable_twofactor_put(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult { async fn disable_twofactor_put(data: Json<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
disable_twofactor(data, headers, conn).await disable_twofactor(data, headers, conn).await
} }

View file

@ -1,8 +1,8 @@
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use rocket::Route; use rocket::{serde::json::Json, Route};
use crate::{ use crate::{
api::{EmptyResult, JsonUpcase}, api::EmptyResult,
auth::Headers, auth::Headers,
crypto, crypto,
db::{ db::{
@ -18,7 +18,7 @@ pub fn routes() -> Vec<Route> {
} }
/// Data stored in the TwoFactor table in the db /// Data stored in the TwoFactor table in the db
#[derive(Serialize, Deserialize, Debug)] #[derive(Debug, Serialize, Deserialize)]
pub struct ProtectedActionData { pub struct ProtectedActionData {
/// Token issued to validate the protected action /// Token issued to validate the protected action
pub token: String, pub token: String,
@ -82,23 +82,24 @@ async fn request_otp(headers: Headers, mut conn: DbConn) -> EmptyResult {
} }
#[derive(Deserialize, Serialize, Debug)] #[derive(Deserialize, Serialize, Debug)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct ProtectedActionVerify { struct ProtectedActionVerify {
OTP: String, #[serde(rename = "OTP", alias = "otp")]
otp: String,
} }
#[post("/accounts/verify-otp", data = "<data>")] #[post("/accounts/verify-otp", data = "<data>")]
async fn verify_otp(data: JsonUpcase<ProtectedActionVerify>, headers: Headers, mut conn: DbConn) -> EmptyResult { async fn verify_otp(data: Json<ProtectedActionVerify>, headers: Headers, mut conn: DbConn) -> EmptyResult {
if !CONFIG.mail_enabled() { if !CONFIG.mail_enabled() {
err!("Email is disabled for this server. Either enable email or login using your master password instead of login via device."); err!("Email is disabled for this server. Either enable email or login using your master password instead of login via device.");
} }
let user = headers.user; let user = headers.user;
let data: ProtectedActionVerify = data.into_inner().data; let data: ProtectedActionVerify = data.into_inner();
// Delete the token after one validation attempt // Delete the token after one validation attempt
// This endpoint only gets called for the vault export, and doesn't need a second attempt // This endpoint only gets called for the vault export, and doesn't need a second attempt
validate_protected_action_otp(&data.OTP, &user.uuid, true, &mut conn).await validate_protected_action_otp(&data.otp, &user.uuid, true, &mut conn).await
} }
pub async fn validate_protected_action_otp( pub async fn validate_protected_action_otp(

View file

@ -7,7 +7,7 @@ use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState,
use crate::{ use crate::{
api::{ api::{
core::{log_user_event, two_factor::_generate_recover_code}, core::{log_user_event, two_factor::_generate_recover_code},
EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, EmptyResult, JsonResult, PasswordOrOtpData,
}, },
auth::Headers, auth::Headers,
db::{ db::{
@ -96,20 +96,20 @@ pub struct WebauthnRegistration {
impl WebauthnRegistration { impl WebauthnRegistration {
fn to_json(&self) -> Value { fn to_json(&self) -> Value {
json!({ json!({
"Id": self.id, "id": self.id,
"Name": self.name, "name": self.name,
"migrated": self.migrated, "migrated": self.migrated,
}) })
} }
} }
#[post("/two-factor/get-webauthn", data = "<data>")] #[post("/two-factor/get-webauthn", data = "<data>")]
async fn get_webauthn(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn get_webauthn(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
if !CONFIG.domain_set() { if !CONFIG.domain_set() {
err!("`DOMAIN` environment variable is not set. Webauthn disabled") err!("`DOMAIN` environment variable is not set. Webauthn disabled")
} }
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let user = headers.user; let user = headers.user;
data.validate(&user, false, &mut conn).await?; data.validate(&user, false, &mut conn).await?;
@ -118,19 +118,15 @@ async fn get_webauthn(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut
let registrations_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect(); let registrations_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
"Enabled": enabled, "enabled": enabled,
"Keys": registrations_json, "keys": registrations_json,
"Object": "twoFactorWebAuthn" "object": "twoFactorWebAuthn"
}))) })))
} }
#[post("/two-factor/get-webauthn-challenge", data = "<data>")] #[post("/two-factor/get-webauthn-challenge", data = "<data>")]
async fn generate_webauthn_challenge( async fn generate_webauthn_challenge(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
data: JsonUpcase<PasswordOrOtpData>, let data: PasswordOrOtpData = data.into_inner();
headers: Headers,
mut conn: DbConn,
) -> JsonResult {
let data: PasswordOrOtpData = data.into_inner().data;
let user = headers.user; let user = headers.user;
data.validate(&user, false, &mut conn).await?; data.validate(&user, false, &mut conn).await?;
@ -161,102 +157,94 @@ async fn generate_webauthn_challenge(
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EnableWebauthnData { struct EnableWebauthnData {
Id: NumberOrString, // 1..5 id: NumberOrString, // 1..5
Name: String, name: String,
DeviceResponse: RegisterPublicKeyCredentialCopy, device_response: RegisterPublicKeyCredentialCopy,
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
// This is copied from RegisterPublicKeyCredential to change the Response objects casing
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct RegisterPublicKeyCredentialCopy { struct RegisterPublicKeyCredentialCopy {
pub Id: String, pub id: String,
pub RawId: Base64UrlSafeData, pub raw_id: Base64UrlSafeData,
pub Response: AuthenticatorAttestationResponseRawCopy, pub response: AuthenticatorAttestationResponseRawCopy,
pub Type: String, pub r#type: String,
} }
// This is copied from AuthenticatorAttestationResponseRaw to change clientDataJSON to clientDataJson // This is copied from AuthenticatorAttestationResponseRaw to change clientDataJSON to clientDataJson
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct AuthenticatorAttestationResponseRawCopy { pub struct AuthenticatorAttestationResponseRawCopy {
pub AttestationObject: Base64UrlSafeData, #[serde(rename = "AttestationObject", alias = "attestationObject")]
pub ClientDataJson: Base64UrlSafeData, pub attestation_object: Base64UrlSafeData,
#[serde(rename = "clientDataJson", alias = "clientDataJSON")]
pub client_data_json: Base64UrlSafeData,
} }
impl From<RegisterPublicKeyCredentialCopy> for RegisterPublicKeyCredential { impl From<RegisterPublicKeyCredentialCopy> for RegisterPublicKeyCredential {
fn from(r: RegisterPublicKeyCredentialCopy) -> Self { fn from(r: RegisterPublicKeyCredentialCopy) -> Self {
Self { Self {
id: r.Id, id: r.id,
raw_id: r.RawId, raw_id: r.raw_id,
response: AuthenticatorAttestationResponseRaw { response: AuthenticatorAttestationResponseRaw {
attestation_object: r.Response.AttestationObject, attestation_object: r.response.attestation_object,
client_data_json: r.Response.ClientDataJson, client_data_json: r.response.client_data_json,
}, },
type_: r.Type, type_: r.r#type,
} }
} }
} }
// This is copied from PublicKeyCredential to change the Response objects casing
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct PublicKeyCredentialCopy { pub struct PublicKeyCredentialCopy {
pub Id: String, pub id: String,
pub RawId: Base64UrlSafeData, pub raw_id: Base64UrlSafeData,
pub Response: AuthenticatorAssertionResponseRawCopy, pub response: AuthenticatorAssertionResponseRawCopy,
pub Extensions: Option<AuthenticationExtensionsClientOutputsCopy>, pub extensions: Option<AuthenticationExtensionsClientOutputs>,
pub Type: String, pub r#type: String,
} }
// This is copied from AuthenticatorAssertionResponseRaw to change clientDataJSON to clientDataJson // This is copied from AuthenticatorAssertionResponseRaw to change clientDataJSON to clientDataJson
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct AuthenticatorAssertionResponseRawCopy { pub struct AuthenticatorAssertionResponseRawCopy {
pub AuthenticatorData: Base64UrlSafeData, pub authenticator_data: Base64UrlSafeData,
pub ClientDataJson: Base64UrlSafeData, #[serde(rename = "clientDataJson", alias = "clientDataJSON")]
pub Signature: Base64UrlSafeData, pub client_data_json: Base64UrlSafeData,
pub UserHandle: Option<Base64UrlSafeData>, pub signature: Base64UrlSafeData,
} pub user_handle: Option<Base64UrlSafeData>,
#[derive(Debug, Deserialize)]
#[allow(non_snake_case)]
pub struct AuthenticationExtensionsClientOutputsCopy {
#[serde(default)]
pub Appid: bool,
} }
impl From<PublicKeyCredentialCopy> for PublicKeyCredential { impl From<PublicKeyCredentialCopy> for PublicKeyCredential {
fn from(r: PublicKeyCredentialCopy) -> Self { fn from(r: PublicKeyCredentialCopy) -> Self {
Self { Self {
id: r.Id, id: r.id,
raw_id: r.RawId, raw_id: r.raw_id,
response: AuthenticatorAssertionResponseRaw { response: AuthenticatorAssertionResponseRaw {
authenticator_data: r.Response.AuthenticatorData, authenticator_data: r.response.authenticator_data,
client_data_json: r.Response.ClientDataJson, client_data_json: r.response.client_data_json,
signature: r.Response.Signature, signature: r.response.signature,
user_handle: r.Response.UserHandle, user_handle: r.response.user_handle,
}, },
extensions: r.Extensions.map(|e| AuthenticationExtensionsClientOutputs { extensions: r.extensions,
appid: e.Appid, type_: r.r#type,
}),
type_: r.Type,
} }
} }
} }
#[post("/two-factor/webauthn", data = "<data>")] #[post("/two-factor/webauthn", data = "<data>")]
async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EnableWebauthnData = data.into_inner().data; let data: EnableWebauthnData = data.into_inner();
let mut user = headers.user; let mut user = headers.user;
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash, master_password_hash: data.master_password_hash,
Otp: data.Otp, otp: data.otp,
} }
.validate(&user, true, &mut conn) .validate(&user, true, &mut conn)
.await?; .await?;
@ -274,13 +262,13 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
// Verify the credentials with the saved state // Verify the credentials with the saved state
let (credential, _data) = let (credential, _data) =
WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?; WebauthnConfig::load().register_credential(&data.device_response.into(), &state, |_| Ok(false))?;
let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &mut conn).await?.1; let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &mut conn).await?.1;
// TODO: Check for repeated ID's // TODO: Check for repeated ID's
registrations.push(WebauthnRegistration { registrations.push(WebauthnRegistration {
id: data.Id.into_i32()?, id: data.id.into_i32()?,
name: data.Name, name: data.name,
migrated: false, migrated: false,
credential, credential,
@ -296,28 +284,28 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
let keys_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect(); let keys_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
"Enabled": true, "enabled": true,
"Keys": keys_json, "keys": keys_json,
"Object": "twoFactorU2f" "object": "twoFactorU2f"
}))) })))
} }
#[put("/two-factor/webauthn", data = "<data>")] #[put("/two-factor/webauthn", data = "<data>")]
async fn activate_webauthn_put(data: JsonUpcase<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult { async fn activate_webauthn_put(data: Json<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_webauthn(data, headers, conn).await activate_webauthn(data, headers, conn).await
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct DeleteU2FData { struct DeleteU2FData {
Id: NumberOrString, id: NumberOrString,
MasterPasswordHash: String, master_password_hash: String,
} }
#[delete("/two-factor/webauthn", data = "<data>")] #[delete("/two-factor/webauthn", data = "<data>")]
async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn delete_webauthn(data: Json<DeleteU2FData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let id = data.data.Id.into_i32()?; let id = data.id.into_i32()?;
if !headers.user.check_valid_password(&data.data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.master_password_hash) {
err!("Invalid password"); err!("Invalid password");
} }
@ -358,9 +346,9 @@ async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, mut
let keys_json: Vec<Value> = data.iter().map(WebauthnRegistration::to_json).collect(); let keys_json: Vec<Value> = data.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
"Enabled": true, "enabled": true,
"Keys": keys_json, "keys": keys_json,
"Object": "twoFactorU2f" "object": "twoFactorU2f"
}))) })))
} }
@ -413,8 +401,8 @@ pub async fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &mut
), ),
}; };
let rsp: crate::util::UpCase<PublicKeyCredentialCopy> = serde_json::from_str(response)?; let rsp: PublicKeyCredentialCopy = serde_json::from_str(response)?;
let rsp: PublicKeyCredential = rsp.data.into(); let rsp: PublicKeyCredential = rsp.into();
let mut registrations = get_webauthn_registrations(user_uuid, conn).await?.1; let mut registrations = get_webauthn_registrations(user_uuid, conn).await?.1;

View file

@ -6,7 +6,7 @@ use yubico::{config::Config, verify_async};
use crate::{ use crate::{
api::{ api::{
core::{log_user_event, two_factor::_generate_recover_code}, core::{log_user_event, two_factor::_generate_recover_code},
EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, EmptyResult, JsonResult, PasswordOrOtpData,
}, },
auth::Headers, auth::Headers,
db::{ db::{
@ -21,28 +21,30 @@ pub fn routes() -> Vec<Route> {
routes![generate_yubikey, activate_yubikey, activate_yubikey_put,] routes![generate_yubikey, activate_yubikey, activate_yubikey_put,]
} }
#[derive(Deserialize, Debug)] #[derive(Debug, Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct EnableYubikeyData { struct EnableYubikeyData {
Key1: Option<String>, key1: Option<String>,
Key2: Option<String>, key2: Option<String>,
Key3: Option<String>, key3: Option<String>,
Key4: Option<String>, key4: Option<String>,
Key5: Option<String>, key5: Option<String>,
Nfc: bool, nfc: bool,
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
#[derive(Deserialize, Serialize, Debug)] #[derive(Deserialize, Serialize, Debug)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct YubikeyMetadata { pub struct YubikeyMetadata {
Keys: Vec<String>, #[serde(rename = "keys", alias = "Keys")]
pub Nfc: bool, keys: Vec<String>,
#[serde(rename = "nfc", alias = "Nfc")]
pub nfc: bool,
} }
fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> { fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
let data_keys = [&data.Key1, &data.Key2, &data.Key3, &data.Key4, &data.Key5]; let data_keys = [&data.key1, &data.key2, &data.key3, &data.key4, &data.key5];
data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect() data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect()
} }
@ -81,11 +83,11 @@ async fn verify_yubikey_otp(otp: String) -> EmptyResult {
} }
#[post("/two-factor/get-yubikey", data = "<data>")] #[post("/two-factor/get-yubikey", data = "<data>")]
async fn generate_yubikey(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn generate_yubikey(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
// Make sure the credentials are set // Make sure the credentials are set
get_yubico_credentials()?; get_yubico_credentials()?;
let data: PasswordOrOtpData = data.into_inner().data; let data: PasswordOrOtpData = data.into_inner();
let user = headers.user; let user = headers.user;
data.validate(&user, false, &mut conn).await?; data.validate(&user, false, &mut conn).await?;
@ -98,29 +100,29 @@ async fn generate_yubikey(data: JsonUpcase<PasswordOrOtpData>, headers: Headers,
if let Some(r) = r { if let Some(r) = r {
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?; let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?;
let mut result = jsonify_yubikeys(yubikey_metadata.Keys); let mut result = jsonify_yubikeys(yubikey_metadata.keys);
result["Enabled"] = Value::Bool(true); result["enabled"] = Value::Bool(true);
result["Nfc"] = Value::Bool(yubikey_metadata.Nfc); result["nfc"] = Value::Bool(yubikey_metadata.nfc);
result["Object"] = Value::String("twoFactorU2f".to_owned()); result["object"] = Value::String("twoFactorU2f".to_owned());
Ok(Json(result)) Ok(Json(result))
} else { } else {
Ok(Json(json!({ Ok(Json(json!({
"Enabled": false, "enabled": false,
"Object": "twoFactorU2f", "object": "twoFactorU2f",
}))) })))
} }
} }
#[post("/two-factor/yubikey", data = "<data>")] #[post("/two-factor/yubikey", data = "<data>")]
async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, mut conn: DbConn) -> JsonResult { async fn activate_yubikey(data: Json<EnableYubikeyData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EnableYubikeyData = data.into_inner().data; let data: EnableYubikeyData = data.into_inner();
let mut user = headers.user; let mut user = headers.user;
PasswordOrOtpData { PasswordOrOtpData {
MasterPasswordHash: data.MasterPasswordHash.clone(), master_password_hash: data.master_password_hash.clone(),
Otp: data.Otp.clone(), otp: data.otp.clone(),
} }
.validate(&user, true, &mut conn) .validate(&user, true, &mut conn)
.await?; .await?;
@ -136,8 +138,8 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
if yubikeys.is_empty() { if yubikeys.is_empty() {
return Ok(Json(json!({ return Ok(Json(json!({
"Enabled": false, "enabled": false,
"Object": "twoFactorU2f", "object": "twoFactorU2f",
}))); })));
} }
@ -154,8 +156,8 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (x[..12]).to_owned()).collect(); let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (x[..12]).to_owned()).collect();
let yubikey_metadata = YubikeyMetadata { let yubikey_metadata = YubikeyMetadata {
Keys: yubikey_ids, keys: yubikey_ids,
Nfc: data.Nfc, nfc: data.nfc,
}; };
yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap(); yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap();
@ -165,17 +167,17 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
let mut result = jsonify_yubikeys(yubikey_metadata.Keys); let mut result = jsonify_yubikeys(yubikey_metadata.keys);
result["Enabled"] = Value::Bool(true); result["enabled"] = Value::Bool(true);
result["Nfc"] = Value::Bool(yubikey_metadata.Nfc); result["nfc"] = Value::Bool(yubikey_metadata.nfc);
result["Object"] = Value::String("twoFactorU2f".to_owned()); result["object"] = Value::String("twoFactorU2f".to_owned());
Ok(Json(result)) Ok(Json(result))
} }
#[put("/two-factor/yubikey", data = "<data>")] #[put("/two-factor/yubikey", data = "<data>")]
async fn activate_yubikey_put(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult { async fn activate_yubikey_put(data: Json<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_yubikey(data, headers, conn).await activate_yubikey(data, headers, conn).await
} }
@ -187,7 +189,7 @@ pub async fn validate_yubikey_login(response: &str, twofactor_data: &str) -> Emp
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(twofactor_data).expect("Can't parse Yubikey Metadata"); let yubikey_metadata: YubikeyMetadata = serde_json::from_str(twofactor_data).expect("Can't parse Yubikey Metadata");
let response_id = &response[..12]; let response_id = &response[..12];
if !yubikey_metadata.Keys.contains(&response_id.to_owned()) { if !yubikey_metadata.keys.contains(&response_id.to_owned()) {
err!("Given Yubikey is not registered"); err!("Given Yubikey is not registered");
} }

View file

@ -15,7 +15,7 @@ use crate::{
two_factor::{authenticator, duo, email, enforce_2fa_policy, webauthn, yubikey}, two_factor::{authenticator, duo, email, enforce_2fa_policy, webauthn, yubikey},
}, },
push::register_push_device, push::register_push_device,
ApiResult, EmptyResult, JsonResult, JsonUpcase, ApiResult, EmptyResult, JsonResult,
}, },
auth::{generate_organization_api_key_login_claims, ClientHeaders, ClientIp}, auth::{generate_organization_api_key_login_claims, ClientHeaders, ClientIp},
db::{models::*, DbConn}, db::{models::*, DbConn},
@ -564,8 +564,11 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo
let mut result = json!({ let mut result = json!({
"error" : "invalid_grant", "error" : "invalid_grant",
"error_description" : "Two factor required.", "error_description" : "Two factor required.",
"TwoFactorProviders" : providers, "TwoFactorProviders" : providers.iter().map(ToString::to_string).collect::<Vec<String>>(),
"TwoFactorProviders2" : {} // { "0" : null } "TwoFactorProviders2" : {}, // { "0" : null }
"MasterPasswordPolicy": {
"Object": "masterPasswordPolicy"
}
}); });
for provider in providers { for provider in providers {
@ -602,7 +605,7 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo
let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?; let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?;
result["TwoFactorProviders2"][provider.to_string()] = json!({ result["TwoFactorProviders2"][provider.to_string()] = json!({
"Nfc": yubikey_metadata.Nfc, "Nfc": yubikey_metadata.nfc,
}) })
} }
@ -631,19 +634,18 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo
} }
#[post("/accounts/prelogin", data = "<data>")] #[post("/accounts/prelogin", data = "<data>")]
async fn prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> Json<Value> { async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
_prelogin(data, conn).await _prelogin(data, conn).await
} }
#[post("/accounts/register", data = "<data>")] #[post("/accounts/register", data = "<data>")]
async fn identity_register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult { async fn identity_register(data: Json<RegisterData>, conn: DbConn) -> JsonResult {
_register(data, conn).await _register(data, conn).await
} }
// https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts // https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts
// https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs // https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs
#[derive(Debug, Clone, Default, FromForm)] #[derive(Debug, Clone, Default, FromForm)]
#[allow(non_snake_case)]
struct ConnectData { struct ConnectData {
#[field(name = uncased("grant_type"))] #[field(name = uncased("grant_type"))]
#[field(name = uncased("granttype"))] #[field(name = uncased("granttype"))]

View file

@ -33,23 +33,18 @@ pub use crate::api::{
web::static_files, web::static_files,
}; };
use crate::db::{models::User, DbConn}; use crate::db::{models::User, DbConn};
use crate::util;
// Type aliases for API methods results // Type aliases for API methods results
type ApiResult<T> = Result<T, crate::error::Error>; type ApiResult<T> = Result<T, crate::error::Error>;
pub type JsonResult = ApiResult<Json<Value>>; pub type JsonResult = ApiResult<Json<Value>>;
pub type EmptyResult = ApiResult<()>; pub type EmptyResult = ApiResult<()>;
type JsonUpcase<T> = Json<util::UpCase<T>>;
type JsonUpcaseVec<T> = Json<Vec<util::UpCase<T>>>;
type JsonVec<T> = Json<Vec<T>>;
// Common structs representing JSON data received // Common structs representing JSON data received
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
struct PasswordOrOtpData { struct PasswordOrOtpData {
MasterPasswordHash: Option<String>, master_password_hash: Option<String>,
Otp: Option<String>, otp: Option<String>,
} }
impl PasswordOrOtpData { impl PasswordOrOtpData {
@ -59,7 +54,7 @@ impl PasswordOrOtpData {
pub async fn validate(&self, user: &User, delete_if_valid: bool, conn: &mut DbConn) -> EmptyResult { pub async fn validate(&self, user: &User, delete_if_valid: bool, conn: &mut DbConn) -> EmptyResult {
use crate::api::core::two_factor::protected_actions::validate_protected_action_otp; use crate::api::core::two_factor::protected_actions::validate_protected_action_otp;
match (self.MasterPasswordHash.as_deref(), self.Otp.as_deref()) { match (self.master_password_hash.as_deref(), self.otp.as_deref()) {
(Some(pw_hash), None) => { (Some(pw_hash), None) => {
if !user.check_valid_password(pw_hash) { if !user.check_valid_password(pw_hash) {
err!("Invalid password"); err!("Invalid password");

View file

@ -42,13 +42,13 @@ impl Attachment {
pub fn to_json(&self, host: &str) -> Value { pub fn to_json(&self, host: &str) -> Value {
json!({ json!({
"Id": self.id, "id": self.id,
"Url": self.get_url(host), "url": self.get_url(host),
"FileName": self.file_name, "fileName": self.file_name,
"Size": self.file_size.to_string(), "size": self.file_size.to_string(),
"SizeName": crate::util::get_display_size(self.file_size), "sizeName": crate::util::get_display_size(self.file_size),
"Key": self.akey, "key": self.akey,
"Object": "attachment" "object": "attachment"
}) })
} }
} }

View file

@ -1,3 +1,4 @@
use crate::util::LowerCase;
use crate::CONFIG; use crate::CONFIG;
use chrono::{NaiveDateTime, TimeDelta, Utc}; use chrono::{NaiveDateTime, TimeDelta, Utc};
use serde_json::Value; use serde_json::Value;
@ -81,7 +82,7 @@ impl Cipher {
pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult { pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
let mut validation_errors = serde_json::Map::new(); let mut validation_errors = serde_json::Map::new();
for (index, cipher) in cipher_data.iter().enumerate() { for (index, cipher) in cipher_data.iter().enumerate() {
if let Some(note) = &cipher.Notes { if let Some(note) = &cipher.notes {
if note.len() > 10_000 { if note.len() > 10_000 {
validation_errors.insert( validation_errors.insert(
format!("Ciphers[{index}].Notes"), format!("Ciphers[{index}].Notes"),
@ -135,10 +136,6 @@ impl Cipher {
} }
} }
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let password_history_json =
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
// We don't need these values at all for Organizational syncs // We don't need these values at all for Organizational syncs
// Skip any other database calls if this is the case and just return false. // Skip any other database calls if this is the case and just return false.
let (read_only, hide_passwords) = if sync_type == CipherSyncType::User { let (read_only, hide_passwords) = if sync_type == CipherSyncType::User {
@ -153,20 +150,42 @@ impl Cipher {
(false, false) (false, false)
}; };
let fields_json: Vec<_> = self
.fields
.as_ref()
.and_then(|s| {
serde_json::from_str::<Vec<LowerCase<Value>>>(s)
.inspect_err(|e| warn!("Error parsing fields {:?}", e))
.ok()
})
.map(|d| d.into_iter().map(|d| d.data).collect())
.unwrap_or_default();
let password_history_json: Vec<_> = self
.password_history
.as_ref()
.and_then(|s| {
serde_json::from_str::<Vec<LowerCase<Value>>>(s)
.inspect_err(|e| warn!("Error parsing password history {:?}", e))
.ok()
})
.map(|d| d.into_iter().map(|d| d.data).collect())
.unwrap_or_default();
// Get the type_data or a default to an empty json object '{}'. // Get the type_data or a default to an empty json object '{}'.
// If not passing an empty object, mobile clients will crash. // If not passing an empty object, mobile clients will crash.
let mut type_data_json: Value = let mut type_data_json = serde_json::from_str::<LowerCase<Value>>(&self.data)
serde_json::from_str(&self.data).unwrap_or_else(|_| Value::Object(serde_json::Map::new())); .map(|d| d.data)
.unwrap_or_else(|_| Value::Object(serde_json::Map::new()));
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients. // Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
if self.atype == 1 { if self.atype == 1 {
if type_data_json["Uris"].is_array() { if type_data_json["uris"].is_array() {
let uri = type_data_json["Uris"][0]["Uri"].clone(); let uri = type_data_json["uris"][0]["uri"].clone();
type_data_json["Uri"] = uri; type_data_json["uri"] = uri;
} else { } else {
// Upstream always has an Uri key/value // Upstream always has an Uri key/value
type_data_json["Uri"] = Value::Null; type_data_json["uri"] = Value::Null;
} }
} }
@ -175,10 +194,10 @@ impl Cipher {
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
// data_json should always contain the following keys with every atype // data_json should always contain the following keys with every atype
data_json["Fields"] = fields_json.clone(); data_json["fields"] = Value::Array(fields_json.clone());
data_json["Name"] = json!(self.name); data_json["name"] = json!(self.name);
data_json["Notes"] = json!(self.notes); data_json["notes"] = json!(self.notes);
data_json["PasswordHistory"] = password_history_json.clone(); data_json["passwordHistory"] = Value::Array(password_history_json.clone());
let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data { let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data {
if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) { if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) {
@ -198,48 +217,48 @@ impl Cipher {
// //
// Ref: https://github.com/bitwarden/server/blob/master/src/Core/Models/Api/Response/CipherResponseModel.cs // Ref: https://github.com/bitwarden/server/blob/master/src/Core/Models/Api/Response/CipherResponseModel.cs
let mut json_object = json!({ let mut json_object = json!({
"Object": "cipherDetails", "object": "cipherDetails",
"Id": self.uuid, "id": self.uuid,
"Type": self.atype, "type": self.atype,
"CreationDate": format_date(&self.created_at), "creationDate": format_date(&self.created_at),
"RevisionDate": format_date(&self.updated_at), "revisionDate": format_date(&self.updated_at),
"DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))), "deletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
"Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32), "reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
"OrganizationId": self.organization_uuid, "organizationId": self.organization_uuid,
"Key": self.key, "key": self.key,
"Attachments": attachments_json, "attachments": attachments_json,
// We have UseTotp set to true by default within the Organization model. // We have UseTotp set to true by default within the Organization model.
// This variable together with UsersGetPremium is used to show or hide the TOTP counter. // This variable together with UsersGetPremium is used to show or hide the TOTP counter.
"OrganizationUseTotp": true, "organizationUseTotp": true,
// This field is specific to the cipherDetails type. // This field is specific to the cipherDetails type.
"CollectionIds": collection_ids, "collectionIds": collection_ids,
"Name": self.name, "name": self.name,
"Notes": self.notes, "notes": self.notes,
"Fields": fields_json, "fields": fields_json,
"Data": data_json, "data": data_json,
"PasswordHistory": password_history_json, "passwordHistory": password_history_json,
// All Cipher types are included by default as null, but only the matching one will be populated // All Cipher types are included by default as null, but only the matching one will be populated
"Login": null, "login": null,
"SecureNote": null, "secureNote": null,
"Card": null, "card": null,
"Identity": null, "identity": null,
}); });
// These values are only needed for user/default syncs // These values are only needed for user/default syncs
// Not during an organizational sync like `get_org_details` // Not during an organizational sync like `get_org_details`
// Skip adding these fields in that case // Skip adding these fields in that case
if sync_type == CipherSyncType::User { if sync_type == CipherSyncType::User {
json_object["FolderId"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { json_object["folderId"] = json!(if let Some(cipher_sync_data) = cipher_sync_data {
cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string()) cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string())
} else { } else {
self.get_folder_uuid(user_uuid, conn).await self.get_folder_uuid(user_uuid, conn).await
}); });
json_object["Favorite"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { json_object["favorite"] = json!(if let Some(cipher_sync_data) = cipher_sync_data {
cipher_sync_data.cipher_favorites.contains(&self.uuid) cipher_sync_data.cipher_favorites.contains(&self.uuid)
} else { } else {
self.is_favorite(user_uuid, conn).await self.is_favorite(user_uuid, conn).await
@ -247,15 +266,15 @@ impl Cipher {
// These values are true by default, but can be false if the // These values are true by default, but can be false if the
// cipher belongs to a collection or group where the org owner has enabled // cipher belongs to a collection or group where the org owner has enabled
// the "Read Only" or "Hide Passwords" restrictions for the user. // the "Read Only" or "Hide Passwords" restrictions for the user.
json_object["Edit"] = json!(!read_only); json_object["edit"] = json!(!read_only);
json_object["ViewPassword"] = json!(!hide_passwords); json_object["viewPassword"] = json!(!hide_passwords);
} }
let key = match self.atype { let key = match self.atype {
1 => "Login", 1 => "login",
2 => "SecureNote", 2 => "secureNote",
3 => "Card", 3 => "card",
4 => "Identity", 4 => "identity",
_ => panic!("Wrong type"), _ => panic!("Wrong type"),
}; };

View file

@ -49,11 +49,11 @@ impl Collection {
pub fn to_json(&self) -> Value { pub fn to_json(&self) -> Value {
json!({ json!({
"ExternalId": self.external_id, "externalId": self.external_id,
"Id": self.uuid, "id": self.uuid,
"OrganizationId": self.org_uuid, "organizationId": self.org_uuid,
"Name": self.name, "name": self.name,
"Object": "collection", "object": "collection",
}) })
} }
@ -97,9 +97,9 @@ impl Collection {
}; };
let mut json_object = self.to_json(); let mut json_object = self.to_json();
json_object["Object"] = json!("collectionDetails"); json_object["object"] = json!("collectionDetails");
json_object["ReadOnly"] = json!(read_only); json_object["readOnly"] = json!(read_only);
json_object["HidePasswords"] = json!(hide_passwords); json_object["hidePasswords"] = json!(hide_passwords);
json_object json_object
} }

View file

@ -58,11 +58,11 @@ impl EmergencyAccess {
pub fn to_json(&self) -> Value { pub fn to_json(&self) -> Value {
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"Status": self.status, "status": self.status,
"Type": self.atype, "type": self.atype,
"WaitTimeDays": self.wait_time_days, "waitTimeDays": self.wait_time_days,
"Object": "emergencyAccess", "object": "emergencyAccess",
}) })
} }
@ -70,14 +70,14 @@ impl EmergencyAccess {
let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found."); let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found.");
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"Status": self.status, "status": self.status,
"Type": self.atype, "type": self.atype,
"WaitTimeDays": self.wait_time_days, "waitTimeDays": self.wait_time_days,
"GrantorId": grantor_user.uuid, "grantorId": grantor_user.uuid,
"Email": grantor_user.email, "email": grantor_user.email,
"Name": grantor_user.name, "name": grantor_user.name,
"Object": "emergencyAccessGrantorDetails", "object": "emergencyAccessGrantorDetails",
}) })
} }
@ -98,14 +98,14 @@ impl EmergencyAccess {
}; };
Some(json!({ Some(json!({
"Id": self.uuid, "id": self.uuid,
"Status": self.status, "status": self.status,
"Type": self.atype, "type": self.atype,
"WaitTimeDays": self.wait_time_days, "waitTimeDays": self.wait_time_days,
"GranteeId": grantee_user.uuid, "granteeId": grantee_user.uuid,
"Email": grantee_user.email, "email": grantee_user.email,
"Name": grantee_user.name, "name": grantee_user.name,
"Object": "emergencyAccessGranteeDetails", "object": "emergencyAccessGranteeDetails",
})) }))
} }
} }

View file

@ -43,10 +43,10 @@ impl Folder {
use crate::util::format_date; use crate::util::format_date;
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"RevisionDate": format_date(&self.updated_at), "revisionDate": format_date(&self.updated_at),
"Name": self.name, "name": self.name,
"Object": "folder", "object": "folder",
}) })
} }
} }

View file

@ -58,14 +58,14 @@ impl Group {
use crate::util::format_date; use crate::util::format_date;
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"OrganizationId": self.organizations_uuid, "organizationId": self.organizations_uuid,
"Name": self.name, "name": self.name,
"AccessAll": self.access_all, "accessAll": self.access_all,
"ExternalId": self.external_id, "externalId": self.external_id,
"CreationDate": format_date(&self.creation_date), "creationDate": format_date(&self.creation_date),
"RevisionDate": format_date(&self.revision_date), "revisionDate": format_date(&self.revision_date),
"Object": "group" "object": "group"
}) })
} }
@ -75,21 +75,21 @@ impl Group {
.iter() .iter()
.map(|entry| { .map(|entry| {
json!({ json!({
"Id": entry.collections_uuid, "id": entry.collections_uuid,
"ReadOnly": entry.read_only, "readOnly": entry.read_only,
"HidePasswords": entry.hide_passwords "hidePasswords": entry.hide_passwords
}) })
}) })
.collect(); .collect();
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"OrganizationId": self.organizations_uuid, "organizationId": self.organizations_uuid,
"Name": self.name, "name": self.name,
"AccessAll": self.access_all, "accessAll": self.access_all,
"ExternalId": self.external_id, "externalId": self.external_id,
"Collections": collections_groups, "collections": collections_groups,
"Object": "groupDetails" "object": "groupDetails"
}) })
} }

View file

@ -4,7 +4,6 @@ use serde_json::Value;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::db::DbConn; use crate::db::DbConn;
use crate::error::MapResult; use crate::error::MapResult;
use crate::util::UpCase;
use super::{TwoFactor, UserOrgStatus, UserOrgType, UserOrganization}; use super::{TwoFactor, UserOrgStatus, UserOrgType, UserOrganization};
@ -39,16 +38,18 @@ pub enum OrgPolicyType {
// https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs // https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct SendOptionsPolicyData { pub struct SendOptionsPolicyData {
pub DisableHideEmail: bool, #[serde(rename = "disableHideEmail", alias = "DisableHideEmail")]
pub disable_hide_email: bool,
} }
// https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/ResetPasswordDataModel.cs // https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/ResetPasswordDataModel.cs
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[serde(rename_all = "camelCase")]
pub struct ResetPasswordDataModel { pub struct ResetPasswordDataModel {
pub AutoEnrollEnabled: bool, #[serde(rename = "autoEnrollEnabled", alias = "AutoEnrollEnabled")]
pub auto_enroll_enabled: bool,
} }
pub type OrgPolicyResult = Result<(), OrgPolicyErr>; pub type OrgPolicyResult = Result<(), OrgPolicyErr>;
@ -78,12 +79,12 @@ impl OrgPolicy {
pub fn to_json(&self) -> Value { pub fn to_json(&self) -> Value {
let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null); let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null);
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"OrganizationId": self.org_uuid, "organizationId": self.org_uuid,
"Type": self.atype, "type": self.atype,
"Data": data_json, "data": data_json,
"Enabled": self.enabled, "enabled": self.enabled,
"Object": "policy", "object": "policy",
}) })
} }
} }
@ -307,9 +308,9 @@ impl OrgPolicy {
pub async fn org_is_reset_password_auto_enroll(org_uuid: &str, conn: &mut DbConn) -> bool { pub async fn org_is_reset_password_auto_enroll(org_uuid: &str, conn: &mut DbConn) -> bool {
match OrgPolicy::find_by_org_and_type(org_uuid, OrgPolicyType::ResetPassword, conn).await { match OrgPolicy::find_by_org_and_type(org_uuid, OrgPolicyType::ResetPassword, conn).await {
Some(policy) => match serde_json::from_str::<UpCase<ResetPasswordDataModel>>(&policy.data) { Some(policy) => match serde_json::from_str::<ResetPasswordDataModel>(&policy.data) {
Ok(opts) => { Ok(opts) => {
return policy.enabled && opts.data.AutoEnrollEnabled; return policy.enabled && opts.auto_enroll_enabled;
} }
_ => error!("Failed to deserialize ResetPasswordDataModel: {}", policy.data), _ => error!("Failed to deserialize ResetPasswordDataModel: {}", policy.data),
}, },
@ -327,9 +328,9 @@ impl OrgPolicy {
{ {
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, &policy.org_uuid, conn).await { if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, &policy.org_uuid, conn).await {
if user.atype < UserOrgType::Admin { if user.atype < UserOrgType::Admin {
match serde_json::from_str::<UpCase<SendOptionsPolicyData>>(&policy.data) { match serde_json::from_str::<SendOptionsPolicyData>(&policy.data) {
Ok(opts) => { Ok(opts) => {
if opts.data.DisableHideEmail { if opts.disable_hide_email {
return true; return true;
} }
} }

View file

@ -153,39 +153,39 @@ impl Organization {
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/Organizations/OrganizationResponseModel.cs // https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/Organizations/OrganizationResponseModel.cs
pub fn to_json(&self) -> Value { pub fn to_json(&self) -> Value {
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"Identifier": null, // not supported by us "identifier": null, // not supported by us
"Name": self.name, "name": self.name,
"Seats": 10, // The value doesn't matter, we don't check server-side "seats": 10, // The value doesn't matter, we don't check server-side
// "MaxAutoscaleSeats": null, // The value doesn't matter, we don't check server-side // "maxAutoscaleSeats": null, // The value doesn't matter, we don't check server-side
"MaxCollections": 10, // The value doesn't matter, we don't check server-side "maxCollections": 10, // The value doesn't matter, we don't check server-side
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side "maxStorageGb": 10, // The value doesn't matter, we don't check server-side
"Use2fa": true, "use2fa": true,
"UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet) "useDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
"UseEvents": CONFIG.org_events_enabled(), "useEvents": CONFIG.org_events_enabled(),
"UseGroups": CONFIG.org_groups_enabled(), "useGroups": CONFIG.org_groups_enabled(),
"UseTotp": true, "useTotp": true,
"UsePolicies": true, "usePolicies": true,
// "UseScim": false, // Not supported (Not AGPLv3 Licensed) // "useScim": false, // Not supported (Not AGPLv3 Licensed)
"UseSso": false, // Not supported "useSso": false, // Not supported
// "UseKeyConnector": false, // Not supported // "useKeyConnector": false, // Not supported
"SelfHost": true, "selfHost": true,
"UseApi": true, "useApi": true,
"HasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(), "hasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(),
"UseResetPassword": CONFIG.mail_enabled(), "useResetPassword": CONFIG.mail_enabled(),
"BusinessName": null, "businessName": null,
"BusinessAddress1": null, "businessAddress1": null,
"BusinessAddress2": null, "businessAddress2": null,
"BusinessAddress3": null, "businessAddress3": null,
"BusinessCountry": null, "businessCountry": null,
"BusinessTaxNumber": null, "businessTaxNumber": null,
"BillingEmail": self.billing_email, "billingEmail": self.billing_email,
"Plan": "TeamsAnnually", "plan": "TeamsAnnually",
"PlanType": 5, // TeamsAnnually plan "planType": 5, // TeamsAnnually plan
"UsersGetPremium": true, "usersGetPremium": true,
"Object": "organization", "object": "organization",
}) })
} }
} }
@ -366,43 +366,60 @@ impl UserOrganization {
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs // https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs
json!({ json!({
"Id": self.org_uuid, "id": self.org_uuid,
"Identifier": null, // Not supported "identifier": null, // Not supported
"Name": org.name, "name": org.name,
"Seats": 10, // The value doesn't matter, we don't check server-side "seats": 10, // The value doesn't matter, we don't check server-side
"MaxCollections": 10, // The value doesn't matter, we don't check server-side "maxCollections": 10, // The value doesn't matter, we don't check server-side
"UsersGetPremium": true, "usersGetPremium": true,
"Use2fa": true, "use2fa": true,
"UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet) "useDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
"UseEvents": CONFIG.org_events_enabled(), "useEvents": CONFIG.org_events_enabled(),
"UseGroups": CONFIG.org_groups_enabled(), "useGroups": CONFIG.org_groups_enabled(),
"UseTotp": true, "useTotp": true,
// "UseScim": false, // Not supported (Not AGPLv3 Licensed) "useScim": false, // Not supported (Not AGPLv3 Licensed)
"UsePolicies": true, "usePolicies": true,
"UseApi": true, "useApi": true,
"SelfHost": true, "selfHost": true,
"HasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(), "hasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(),
"ResetPasswordEnrolled": self.reset_password_key.is_some(), "resetPasswordEnrolled": self.reset_password_key.is_some(),
"UseResetPassword": CONFIG.mail_enabled(), "useResetPassword": CONFIG.mail_enabled(),
"SsoBound": false, // Not supported "ssoBound": false, // Not supported
"UseSso": false, // Not supported "useSso": false, // Not supported
"ProviderId": null, "useKeyConnector": false,
"ProviderName": null, "useSecretsManager": false,
// "KeyConnectorEnabled": false, "usePasswordManager": true,
// "KeyConnectorUrl": null, "useCustomPermissions": false,
"useActivateAutofillPolicy": false,
"providerId": null,
"providerName": null,
"providerType": null,
"familySponsorshipFriendlyName": null,
"familySponsorshipAvailable": false,
"planProductType": 0,
"keyConnectorEnabled": false,
"keyConnectorUrl": null,
"familySponsorshipLastSyncDate": null,
"familySponsorshipValidUntil": null,
"familySponsorshipToDelete": null,
"accessSecretsManager": false,
"limitCollectionCreationDeletion": true,
"allowAdminAccessToAllCollectionItems": true,
"flexibleCollections": true,
"permissions": permissions, "permissions": permissions,
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side "maxStorageGb": 10, // The value doesn't matter, we don't check server-side
// These are per user // These are per user
"UserId": self.user_uuid, "userId": self.user_uuid,
"Key": self.akey, "key": self.akey,
"Status": self.status, "status": self.status,
"Type": self.atype, "type": self.atype,
"Enabled": true, "enabled": true,
"Object": "profileOrganization", "object": "profileOrganization",
}) })
} }
@ -438,9 +455,9 @@ impl UserOrganization {
.iter() .iter()
.map(|cu| { .map(|cu| {
json!({ json!({
"Id": cu.collection_uuid, "id": cu.collection_uuid,
"ReadOnly": cu.read_only, "readOnly": cu.read_only,
"HidePasswords": cu.hide_passwords, "hidePasswords": cu.hide_passwords,
}) })
}) })
.collect() .collect()
@ -449,29 +466,29 @@ impl UserOrganization {
}; };
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"UserId": self.user_uuid, "userId": self.user_uuid,
"Name": user.name, "name": user.name,
"Email": user.email, "email": user.email,
"ExternalId": self.external_id, "externalId": self.external_id,
"Groups": groups, "groups": groups,
"Collections": collections, "collections": collections,
"Status": status, "status": status,
"Type": self.atype, "type": self.atype,
"AccessAll": self.access_all, "accessAll": self.access_all,
"TwoFactorEnabled": twofactor_enabled, "twoFactorEnabled": twofactor_enabled,
"ResetPasswordEnrolled": self.reset_password_key.is_some(), "resetPasswordEnrolled": self.reset_password_key.is_some(),
"Object": "organizationUserUserDetails", "object": "organizationUserUserDetails",
}) })
} }
pub fn to_json_user_access_restrictions(&self, col_user: &CollectionUser) -> Value { pub fn to_json_user_access_restrictions(&self, col_user: &CollectionUser) -> Value {
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"ReadOnly": col_user.read_only, "readOnly": col_user.read_only,
"HidePasswords": col_user.hide_passwords, "hidePasswords": col_user.hide_passwords,
}) })
} }
@ -485,9 +502,9 @@ impl UserOrganization {
.iter() .iter()
.map(|c| { .map(|c| {
json!({ json!({
"Id": c.collection_uuid, "id": c.collection_uuid,
"ReadOnly": c.read_only, "readOnly": c.read_only,
"HidePasswords": c.hide_passwords, "hidePasswords": c.hide_passwords,
}) })
}) })
.collect() .collect()
@ -502,15 +519,15 @@ impl UserOrganization {
}; };
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"UserId": self.user_uuid, "userId": self.user_uuid,
"Status": status, "status": status,
"Type": self.atype, "type": self.atype,
"AccessAll": self.access_all, "accessAll": self.access_all,
"Collections": coll_uuids, "collections": coll_uuids,
"Object": "organizationUserDetails", "object": "organizationUserDetails",
}) })
} }
pub async fn save(&self, conn: &mut DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {

View file

@ -1,6 +1,8 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use crate::util::LowerCase;
use super::User; use super::User;
db_object! { db_object! {
@ -122,48 +124,58 @@ impl Send {
use data_encoding::BASE64URL_NOPAD; use data_encoding::BASE64URL_NOPAD;
use uuid::Uuid; use uuid::Uuid;
let data: Value = serde_json::from_str(&self.data).unwrap_or_default(); let mut data = serde_json::from_str::<LowerCase<Value>>(&self.data).map(|d| d.data).unwrap_or_default();
// Mobile clients expect size to be a string instead of a number
if let Some(size) = data.get("size").and_then(|v| v.as_i64()) {
data["size"] = Value::String(size.to_string());
}
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"AccessId": BASE64URL_NOPAD.encode(Uuid::parse_str(&self.uuid).unwrap_or_default().as_bytes()), "accessId": BASE64URL_NOPAD.encode(Uuid::parse_str(&self.uuid).unwrap_or_default().as_bytes()),
"Type": self.atype, "type": self.atype,
"Name": self.name, "name": self.name,
"Notes": self.notes, "notes": self.notes,
"Text": if self.atype == SendType::Text as i32 { Some(&data) } else { None }, "text": if self.atype == SendType::Text as i32 { Some(&data) } else { None },
"File": if self.atype == SendType::File as i32 { Some(&data) } else { None }, "file": if self.atype == SendType::File as i32 { Some(&data) } else { None },
"Key": self.akey, "key": self.akey,
"MaxAccessCount": self.max_access_count, "maxAccessCount": self.max_access_count,
"AccessCount": self.access_count, "accessCount": self.access_count,
"Password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)), "password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)),
"Disabled": self.disabled, "disabled": self.disabled,
"HideEmail": self.hide_email, "hideEmail": self.hide_email,
"RevisionDate": format_date(&self.revision_date), "revisionDate": format_date(&self.revision_date),
"ExpirationDate": self.expiration_date.as_ref().map(format_date), "expirationDate": self.expiration_date.as_ref().map(format_date),
"DeletionDate": format_date(&self.deletion_date), "deletionDate": format_date(&self.deletion_date),
"Object": "send", "object": "send",
}) })
} }
pub async fn to_json_access(&self, conn: &mut DbConn) -> Value { pub async fn to_json_access(&self, conn: &mut DbConn) -> Value {
use crate::util::format_date; use crate::util::format_date;
let data: Value = serde_json::from_str(&self.data).unwrap_or_default(); let mut data = serde_json::from_str::<LowerCase<Value>>(&self.data).map(|d| d.data).unwrap_or_default();
// Mobile clients expect size to be a string instead of a number
if let Some(size) = data.get("size").and_then(|v| v.as_i64()) {
data["size"] = Value::String(size.to_string());
}
json!({ json!({
"Id": self.uuid, "id": self.uuid,
"Type": self.atype, "type": self.atype,
"Name": self.name, "name": self.name,
"Text": if self.atype == SendType::Text as i32 { Some(&data) } else { None }, "text": if self.atype == SendType::Text as i32 { Some(&data) } else { None },
"File": if self.atype == SendType::File as i32 { Some(&data) } else { None }, "file": if self.atype == SendType::File as i32 { Some(&data) } else { None },
"ExpirationDate": self.expiration_date.as_ref().map(format_date), "expirationDate": self.expiration_date.as_ref().map(format_date),
"CreatorIdentifier": self.creator_identifier(conn).await, "creatorIdentifier": self.creator_identifier(conn).await,
"Object": "send-access", "object": "send-access",
}) })
} }
} }
@ -290,25 +302,18 @@ impl Send {
pub async fn size_by_user(user_uuid: &str, conn: &mut DbConn) -> Option<i64> { pub async fn size_by_user(user_uuid: &str, conn: &mut DbConn) -> Option<i64> {
let sends = Self::find_by_user(user_uuid, conn).await; let sends = Self::find_by_user(user_uuid, conn).await;
#[allow(non_snake_case)] #[derive(serde::Deserialize)]
#[derive(serde::Deserialize, Default)]
struct FileData { struct FileData {
Size: Option<NumberOrString>, #[serde(rename = "size", alias = "Size")]
size: Option<NumberOrString>, size: NumberOrString,
} }
let mut total: i64 = 0; let mut total: i64 = 0;
for send in sends { for send in sends {
if send.atype == SendType::File as i32 { if send.atype == SendType::File as i32 {
let data: FileData = serde_json::from_str(&send.data).unwrap_or_default(); if let Ok(size) =
serde_json::from_str::<FileData>(&send.data).map_err(Into::into).and_then(|d| d.size.into_i64())
let size = match (data.size, data.Size) { {
(Some(s), _) => s.into_i64(),
(_, Some(s)) => s.into_i64(),
(None, None) => continue,
};
if let Ok(size) = size {
total = total.checked_add(size)?; total = total.checked_add(size)?;
}; };
} }

View file

@ -54,17 +54,17 @@ impl TwoFactor {
pub fn to_json(&self) -> Value { pub fn to_json(&self) -> Value {
json!({ json!({
"Enabled": self.enabled, "enabled": self.enabled,
"Key": "", // This key and value vary "key": "", // This key and value vary
"Object": "twoFactorAuthenticator" // This value varies "Oobject": "twoFactorAuthenticator" // This value varies
}) })
} }
pub fn to_json_provider(&self) -> Value { pub fn to_json_provider(&self) -> Value {
json!({ json!({
"Enabled": self.enabled, "enabled": self.enabled,
"Type": self.atype, "type": self.atype,
"Object": "twoFactorProvider" "object": "twoFactorProvider"
}) })
} }
} }

View file

@ -240,26 +240,26 @@ impl User {
}; };
json!({ json!({
"_Status": status as i32, "_status": status as i32,
"Id": self.uuid, "id": self.uuid,
"Name": self.name, "name": self.name,
"Email": self.email, "email": self.email,
"EmailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(), "emailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(),
"Premium": true, "premium": true,
"PremiumFromOrganization": false, "premiumFromOrganization": false,
"MasterPasswordHint": self.password_hint, "masterPasswordHint": self.password_hint,
"Culture": "en-US", "culture": "en-US",
"TwoFactorEnabled": twofactor_enabled, "twoFactorEnabled": twofactor_enabled,
"Key": self.akey, "key": self.akey,
"PrivateKey": self.private_key, "privateKey": self.private_key,
"SecurityStamp": self.security_stamp, "securityStamp": self.security_stamp,
"Organizations": orgs_json, "organizations": orgs_json,
"Providers": [], "providers": [],
"ProviderOrganizations": [], "providerOrganizations": [],
"ForcePasswordReset": false, "forcePasswordReset": false,
"AvatarColor": self.avatar_color, "avatarColor": self.avatar_color,
"UsesKeyConnector": false, "usesKeyConnector": false,
"Object": "profile", "object": "profile",
}) })
} }

View file

@ -179,18 +179,18 @@ fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String {
fn _api_error(_: &impl std::any::Any, msg: &str) -> String { fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
let json = json!({ let json = json!({
"Message": msg, "message": msg,
"error": "", "error": "",
"error_description": "", "error_description": "",
"ValidationErrors": {"": [ msg ]}, "validationErrors": {"": [ msg ]},
"ErrorModel": { "errorModel": {
"Message": msg, "message": msg,
"Object": "error" "object": "error"
}, },
"ExceptionMessage": null, "exceptionMessage": null,
"ExceptionStackTrace": null, "exceptionStackTrace": null,
"InnerExceptionMessage": null, "innerExceptionMessage": null,
"Object": "error" "object": "error"
}); });
_serialize(&json, "") _serialize(&json, "")
} }

View file

@ -3,7 +3,7 @@
// The more key/value pairs there are the more recursion occurs. // The more key/value pairs there are the more recursion occurs.
// We want to keep this as low as possible, but not higher then 128. // We want to keep this as low as possible, but not higher then 128.
// If you go above 128 it will cause rust-analyzer to fail, // If you go above 128 it will cause rust-analyzer to fail,
#![recursion_limit = "90"] #![recursion_limit = "200"]
// When enabled use MiMalloc as malloc instead of the default malloc // When enabled use MiMalloc as malloc instead of the default malloc
#[cfg(feature = "enable_mimalloc")] #[cfg(feature = "enable_mimalloc")]

File diff suppressed because it is too large Load diff

View file

@ -17,12 +17,12 @@
{{#each page_data}} {{#each page_data}}
<tr> <tr>
<td> <td>
<svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{Id}}"> <svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{id}}">
<div class="float-start"> <div class="float-start">
<strong>{{Name}}</strong> <strong>{{name}}</strong>
<span class="me-2">({{BillingEmail}})</span> <span class="me-2">({{billingEmail}})</span>
<span class="d-block"> <span class="d-block">
<span class="badge bg-success font-monospace">{{Id}}</span> <span class="badge bg-success font-monospace">{{id}}</span>
</span> </span>
</div> </div>
</td> </td>
@ -44,7 +44,7 @@
<span class="d-block"><strong>Events:</strong> {{event_count}}</span> <span class="d-block"><strong>Events:</strong> {{event_count}}</span>
</td> </td>
<td class="text-end px-0 small"> <td class="text-end px-0 small">
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-delete-organization data-vw-org-uuid="{{jsesc Id no_quote}}" data-vw-org-name="{{jsesc Name no_quote}}" data-vw-billing-email="{{jsesc BillingEmail no_quote}}">Delete Organization</button><br> <button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-delete-organization data-vw-org-uuid="{{jsesc id no_quote}}" data-vw-org-name="{{jsesc name no_quote}}" data-vw-billing-email="{{jsesc billingEmail no_quote}}">Delete Organization</button><br>
</td> </td>
</tr> </tr>
{{/each}} {{/each}}

View file

@ -18,21 +18,21 @@
{{#each page_data}} {{#each page_data}}
<tr> <tr>
<td> <td>
<svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{Email}}"> <svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{email}}">
<div class="float-start"> <div class="float-start">
<strong>{{Name}}</strong> <strong>{{name}}</strong>
<span class="d-block">{{Email}}</span> <span class="d-block">{{email}}</span>
<span class="d-block"> <span class="d-block">
{{#unless user_enabled}} {{#unless user_enabled}}
<span class="badge bg-danger me-2" title="User is disabled">Disabled</span> <span class="badge bg-danger me-2" title="User is disabled">Disabled</span>
{{/unless}} {{/unless}}
{{#if TwoFactorEnabled}} {{#if twoFactorEnabled}}
<span class="badge bg-success me-2" title="2FA is enabled">2FA</span> <span class="badge bg-success me-2" title="2FA is enabled">2FA</span>
{{/if}} {{/if}}
{{#case _Status 1}} {{#case _status 1}}
<span class="badge bg-warning text-dark me-2" title="User is invited">Invited</span> <span class="badge bg-warning text-dark me-2" title="User is invited">Invited</span>
{{/case}} {{/case}}
{{#if EmailVerified}} {{#if emailVerified}}
<span class="badge bg-success me-2" title="Email has been verified">Verified</span> <span class="badge bg-success me-2" title="Email has been verified">Verified</span>
{{/if}} {{/if}}
</span> </span>
@ -54,15 +54,15 @@
{{/if}} {{/if}}
</td> </td>
<td> <td>
<div class="overflow-auto vw-org-cell" data-vw-user-email="{{jsesc Email no_quote}}" data-vw-user-uuid="{{jsesc Id no_quote}}"> <div class="overflow-auto vw-org-cell" data-vw-user-email="{{jsesc email no_quote}}" data-vw-user-uuid="{{jsesc id no_quote}}">
{{#each Organizations}} {{#each organizations}}
<button class="badge" data-bs-toggle="modal" data-bs-target="#userOrgTypeDialog" data-vw-org-type="{{Type}}" data-vw-org-uuid="{{jsesc Id no_quote}}" data-vw-org-name="{{jsesc Name no_quote}}">{{Name}}</button> <button class="badge" data-bs-toggle="modal" data-bs-target="#userOrgTypeDialog" data-vw-org-type="{{type}}" data-vw-org-uuid="{{jsesc id no_quote}}" data-vw-org-name="{{jsesc name no_quote}}">{{name}}</button>
{{/each}} {{/each}}
</div> </div>
</td> </td>
<td class="text-end px-0 small"> <td class="text-end px-0 small">
<span data-vw-user-uuid="{{jsesc Id no_quote}}" data-vw-user-email="{{jsesc Email no_quote}}"> <span data-vw-user-uuid="{{jsesc id no_quote}}" data-vw-user-email="{{jsesc email no_quote}}">
{{#if TwoFactorEnabled}} {{#if twoFactorEnabled}}
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-remove2fa>Remove all 2FA</button><br> <button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-remove2fa>Remove all 2FA</button><br>
{{/if}} {{/if}}
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-deauth-user>Deauthorize sessions</button><br> <button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-deauth-user>Deauthorize sessions</button><br>
@ -72,7 +72,7 @@
{{else}} {{else}}
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-enable-user>Enable User</button><br> <button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-enable-user>Enable User</button><br>
{{/if}} {{/if}}
{{#case _Status 1}} {{#case _status 1}}
<button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-resend-user-invite>Resend invite</button><br> <button type="button" class="btn btn-sm btn-link p-0 border-0 float-right" vw-resend-user-invite>Resend invite</button><br>
{{/case}} {{/case}}
</span> </span>

View file

@ -526,25 +526,33 @@ use serde_json::Value;
pub type JsonMap = serde_json::Map<String, Value>; pub type JsonMap = serde_json::Map<String, Value>;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct UpCase<T: DeserializeOwned> { pub struct LowerCase<T: DeserializeOwned> {
#[serde(deserialize_with = "upcase_deserialize")] #[serde(deserialize_with = "lowercase_deserialize")]
#[serde(flatten)] #[serde(flatten)]
pub data: T, pub data: T,
} }
impl Default for LowerCase<Value> {
fn default() -> Self {
Self {
data: Value::Null,
}
}
}
// https://github.com/serde-rs/serde/issues/586 // https://github.com/serde-rs/serde/issues/586
pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error> pub fn lowercase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where where
T: DeserializeOwned, T: DeserializeOwned,
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let d = deserializer.deserialize_any(UpCaseVisitor)?; let d = deserializer.deserialize_any(LowerCaseVisitor)?;
T::deserialize(d).map_err(de::Error::custom) T::deserialize(d).map_err(de::Error::custom)
} }
struct UpCaseVisitor; struct LowerCaseVisitor;
impl<'de> Visitor<'de> for UpCaseVisitor { impl<'de> Visitor<'de> for LowerCaseVisitor {
type Value = Value; type Value = Value;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -558,7 +566,7 @@ impl<'de> Visitor<'de> for UpCaseVisitor {
let mut result_map = JsonMap::new(); let mut result_map = JsonMap::new();
while let Some((key, value)) = map.next_entry()? { while let Some((key, value)) = map.next_entry()? {
result_map.insert(upcase_first(key), upcase_value(value)); result_map.insert(_process_key(key), convert_json_key_lcase_first(value));
} }
Ok(Value::Object(result_map)) Ok(Value::Object(result_map))
@ -571,45 +579,23 @@ impl<'de> Visitor<'de> for UpCaseVisitor {
let mut result_seq = Vec::<Value>::new(); let mut result_seq = Vec::<Value>::new();
while let Some(value) = seq.next_element()? { while let Some(value) = seq.next_element()? {
result_seq.push(upcase_value(value)); result_seq.push(convert_json_key_lcase_first(value));
} }
Ok(Value::Array(result_seq)) Ok(Value::Array(result_seq))
} }
} }
fn upcase_value(value: Value) -> Value {
if let Value::Object(map) = value {
let mut new_value = Value::Object(serde_json::Map::new());
for (key, val) in map.into_iter() {
let processed_key = _process_key(&key);
new_value[processed_key] = upcase_value(val);
}
new_value
} else if let Value::Array(array) = value {
// Initialize array with null values
let mut new_value = Value::Array(vec![Value::Null; array.len()]);
for (index, val) in array.into_iter().enumerate() {
new_value[index] = upcase_value(val);
}
new_value
} else {
value
}
}
// Inner function to handle a special case for the 'ssn' key. // Inner function to handle a special case for the 'ssn' key.
// This key is part of the Identity Cipher (Social Security Number) // This key is part of the Identity Cipher (Social Security Number)
fn _process_key(key: &str) -> String { fn _process_key(key: &str) -> String {
match key.to_lowercase().as_ref() { match key.to_lowercase().as_ref() {
"ssn" => "SSN".into(), "ssn" => "ssn".into(),
_ => self::upcase_first(key), _ => self::lcase_first(key),
} }
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Clone, Debug, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub enum NumberOrString { pub enum NumberOrString {
Number(i64), Number(i64),
@ -726,25 +712,25 @@ pub fn convert_json_key_lcase_first(src_json: Value) -> Value {
Value::Object(obj) => { Value::Object(obj) => {
let mut json_map = JsonMap::new(); let mut json_map = JsonMap::new();
for (key, value) in obj.iter() { for (key, value) in obj.into_iter() {
match (key, value) { match (key, value) {
(key, Value::Object(elm)) => { (key, Value::Object(elm)) => {
let inner_value = convert_json_key_lcase_first(Value::Object(elm.clone())); let inner_value = convert_json_key_lcase_first(Value::Object(elm));
json_map.insert(lcase_first(key), inner_value); json_map.insert(_process_key(&key), inner_value);
} }
(key, Value::Array(elm)) => { (key, Value::Array(elm)) => {
let mut inner_array: Vec<Value> = Vec::with_capacity(elm.len()); let mut inner_array: Vec<Value> = Vec::with_capacity(elm.len());
for inner_obj in elm { for inner_obj in elm {
inner_array.push(convert_json_key_lcase_first(inner_obj.clone())); inner_array.push(convert_json_key_lcase_first(inner_obj));
} }
json_map.insert(lcase_first(key), Value::Array(inner_array)); json_map.insert(_process_key(&key), Value::Array(inner_array));
} }
(key, value) => { (key, value) => {
json_map.insert(lcase_first(key), value.clone()); json_map.insert(_process_key(&key), value);
} }
} }
} }

View file

@ -71,9 +71,9 @@ with urllib.request.urlopen(DOMAIN_LISTS_URL) as response:
global_domains = [] global_domains = []
for name, domain_list in domain_lists.items(): for name, domain_list in domain_lists.items():
entry = OrderedDict() entry = OrderedDict()
entry["Type"] = enums[name] entry["type"] = enums[name]
entry["Domains"] = domain_list entry["domains"] = domain_list
entry["Excluded"] = False entry["excluded"] = False
global_domains.append(entry) global_domains.append(entry)
# Write out the global domains JSON file. # Write out the global domains JSON file.