mirror of
https://github.com/dani-garcia/vaultwarden
synced 2024-12-14 17:43:46 +01:00
Merge pull request #1584 from BlackDex/admin-interface
Some admin interface updates.
This commit is contained in:
commit
cf6ad3cb15
10 changed files with 50 additions and 56 deletions
|
@ -215,9 +215,6 @@ RUN apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
{% if "sqlite" in features %}
|
|
||||||
sqlite \
|
|
||||||
{% endif %}
|
|
||||||
{% if "mysql" in features %}
|
{% if "mysql" in features %}
|
||||||
mariadb-connector-c \
|
mariadb-connector-c \
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -232,7 +229,6 @@ RUN apt-get update && apt-get install -y \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite3 \
|
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -86,7 +86,6 @@ RUN apt-get update && apt-get install -y \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite3 \
|
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -82,7 +82,6 @@ RUN apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite \
|
|
||||||
postgresql-libs \
|
postgresql-libs \
|
||||||
ca-certificates
|
ca-certificates
|
||||||
|
|
||||||
|
|
|
@ -129,7 +129,6 @@ RUN apt-get update && apt-get install -y \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite3 \
|
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -129,7 +129,6 @@ RUN apt-get update && apt-get install -y \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite3 \
|
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -129,7 +129,6 @@ RUN apt-get update && apt-get install -y \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite3 \
|
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -86,7 +86,6 @@ RUN apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
sqlite \
|
|
||||||
ca-certificates
|
ca-certificates
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::{env, process::Command, time::Duration};
|
use std::{env, time::Duration};
|
||||||
|
|
||||||
use reqwest::{blocking::Client, header::USER_AGENT};
|
use reqwest::{blocking::Client, header::USER_AGENT};
|
||||||
use rocket::{
|
use rocket::{
|
||||||
|
@ -68,7 +68,6 @@ static CAN_BACKUP: Lazy<bool> = Lazy::new(|| {
|
||||||
DbConnType::from_url(&CONFIG.database_url())
|
DbConnType::from_url(&CONFIG.database_url())
|
||||||
.map(|t| t == DbConnType::sqlite)
|
.map(|t| t == DbConnType::sqlite)
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
&& Command::new("sqlite3").arg("-version").status().is_ok()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
|
@ -502,9 +501,17 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
use std::net::ToSocketAddrs;
|
use std::net::ToSocketAddrs;
|
||||||
|
|
||||||
// Get current running versions
|
// Get current running versions
|
||||||
let vault_version_path = format!("{}/{}", CONFIG.web_vault_folder(), "version.json");
|
let web_vault_version: WebVaultVersion = match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
|
||||||
let vault_version_str = read_file_string(&vault_version_path)?;
|
Ok(s) => serde_json::from_str(&s)?,
|
||||||
let web_vault_version: WebVaultVersion = serde_json::from_str(&vault_version_str)?;
|
_ => {
|
||||||
|
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
||||||
|
Ok(s) => serde_json::from_str(&s)?,
|
||||||
|
_ => {
|
||||||
|
WebVaultVersion{version: String::from("Version file missing")}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
// Execute some environment checks
|
// Execute some environment checks
|
||||||
let running_within_docker = is_running_in_docker();
|
let running_within_docker = is_running_in_docker();
|
||||||
|
@ -557,9 +564,10 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
|
|
||||||
let diagnostics_json = json!({
|
let diagnostics_json = json!({
|
||||||
"dns_resolved": dns_resolved,
|
"dns_resolved": dns_resolved,
|
||||||
"web_vault_version": web_vault_version.version,
|
|
||||||
"latest_release": latest_release,
|
"latest_release": latest_release,
|
||||||
"latest_commit": latest_commit,
|
"latest_commit": latest_commit,
|
||||||
|
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
||||||
|
"web_vault_version": web_vault_version.version,
|
||||||
"latest_web_build": latest_web_build,
|
"latest_web_build": latest_web_build,
|
||||||
"running_within_docker": running_within_docker,
|
"running_within_docker": running_within_docker,
|
||||||
"has_http_access": has_http_access,
|
"has_http_access": has_http_access,
|
||||||
|
@ -571,6 +579,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
"db_type": *DB_TYPE,
|
"db_type": *DB_TYPE,
|
||||||
"db_version": get_sql_server_version(&conn),
|
"db_version": get_sql_server_version(&conn),
|
||||||
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
|
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
|
||||||
|
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
||||||
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
|
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -596,11 +605,11 @@ fn delete_config(_token: AdminToken) -> EmptyResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/config/backup_db")]
|
#[post("/config/backup_db")]
|
||||||
fn backup_db(_token: AdminToken) -> EmptyResult {
|
fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
if *CAN_BACKUP {
|
if *CAN_BACKUP {
|
||||||
backup_database()
|
backup_database(&conn)
|
||||||
} else {
|
} else {
|
||||||
err!("Can't back up current DB (either it's not SQLite or the 'sqlite' binary is not present)");
|
err!("Can't back up current DB (Only SQLite supports this feature)");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
|
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
|
||||||
use rocket::{
|
use rocket::{
|
||||||
|
@ -144,6 +142,7 @@ macro_rules! db_run {
|
||||||
// Different code for each db
|
// Different code for each db
|
||||||
( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => {
|
( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => {
|
||||||
#[allow(unused)] use diesel::prelude::*;
|
#[allow(unused)] use diesel::prelude::*;
|
||||||
|
#[allow(unused_variables)]
|
||||||
match $conn {
|
match $conn {
|
||||||
$($(
|
$($(
|
||||||
#[cfg($db)]
|
#[cfg($db)]
|
||||||
|
@ -221,21 +220,21 @@ macro_rules! db_object {
|
||||||
// Reexport the models, needs to be after the macros are defined so it can access them
|
// Reexport the models, needs to be after the macros are defined so it can access them
|
||||||
pub mod models;
|
pub mod models;
|
||||||
|
|
||||||
/// Creates a back-up of the database using sqlite3
|
/// Creates a back-up of the sqlite database
|
||||||
pub fn backup_database() -> Result<(), Error> {
|
/// MySQL/MariaDB and PostgreSQL are not supported.
|
||||||
use std::path::Path;
|
pub fn backup_database(conn: &DbConn) -> Result<(), Error> {
|
||||||
let db_url = CONFIG.database_url();
|
db_run! {@raw conn:
|
||||||
let db_path = Path::new(&db_url).parent().unwrap();
|
postgresql, mysql {
|
||||||
|
err!("PostgreSQL and MySQL/MariaDB do not support this backup feature");
|
||||||
let now: DateTime<Utc> = Utc::now();
|
}
|
||||||
let file_date = now.format("%Y%m%d").to_string();
|
sqlite {
|
||||||
let backup_command: String = format!("{}{}{}", ".backup 'db_", file_date, ".sqlite3'");
|
use std::path::Path;
|
||||||
|
let db_url = CONFIG.database_url();
|
||||||
Command::new("sqlite3")
|
let db_path = Path::new(&db_url).parent().unwrap().to_string_lossy();
|
||||||
.current_dir(db_path)
|
let file_date = Utc::now().format("%Y%m%d_%H%M%S").to_string();
|
||||||
.args(&["db.sqlite3", &backup_command])
|
diesel::sql_query(format!("VACUUM INTO '{}/db_{}.sqlite3'", db_path, file_date)).execute(conn)?;
|
||||||
.output()
|
}
|
||||||
.expect("Can't open database, sqlite3 is not available, make sure it's installed and available on the PATH");
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -243,29 +242,14 @@ pub fn backup_database() -> Result<(), Error> {
|
||||||
|
|
||||||
/// Get the SQL Server version
|
/// Get the SQL Server version
|
||||||
pub fn get_sql_server_version(conn: &DbConn) -> String {
|
pub fn get_sql_server_version(conn: &DbConn) -> String {
|
||||||
use diesel::sql_types::Text;
|
|
||||||
#[derive(QueryableByName)]
|
|
||||||
struct SqlVersion {
|
|
||||||
#[sql_type = "Text"]
|
|
||||||
version: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
db_run! {@raw conn:
|
db_run! {@raw conn:
|
||||||
postgresql, mysql {
|
postgresql, mysql {
|
||||||
match diesel::sql_query("SELECT version() AS version;").get_result::<SqlVersion>(conn).ok() {
|
no_arg_sql_function!(version, diesel::sql_types::Text);
|
||||||
Some(v) => {
|
diesel::select(version).get_result::<String>(conn).unwrap_or_else(|_| "Unknown".to_string())
|
||||||
v.version
|
|
||||||
},
|
|
||||||
_ => "Unknown".to_string()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
sqlite {
|
sqlite {
|
||||||
match diesel::sql_query("SELECT sqlite_version() AS version;").get_result::<SqlVersion>(conn).ok() {
|
no_arg_sql_function!(sqlite_version, diesel::sql_types::Text);
|
||||||
Some(v) => {
|
diesel::select(sqlite_version).get_result::<String>(conn).unwrap_or_else(|_| "Unknown".to_string())
|
||||||
v.version
|
|
||||||
},
|
|
||||||
_ => "Unknown".to_string()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="server-latest">{{diagnostics.latest_release}}<span id="server-latest-commit" class="d-none">-{{diagnostics.latest_commit}}</span></span>
|
<span id="server-latest">{{diagnostics.latest_release}}<span id="server-latest-commit" class="d-none">-{{diagnostics.latest_commit}}</span></span>
|
||||||
</dd>
|
</dd>
|
||||||
|
{{#if diagnostics.web_vault_enabled}}
|
||||||
<dt class="col-sm-5">Web Installed
|
<dt class="col-sm-5">Web Installed
|
||||||
<span class="badge badge-success d-none" id="web-success" title="Latest version is installed.">Ok</span>
|
<span class="badge badge-success d-none" id="web-success" title="Latest version is installed.">Ok</span>
|
||||||
<span class="badge badge-warning d-none" id="web-warning" title="There seems to be an update available.">Update</span>
|
<span class="badge badge-warning d-none" id="web-warning" title="There seems to be an update available.">Update</span>
|
||||||
|
@ -35,6 +36,13 @@
|
||||||
<span id="web-latest">{{diagnostics.latest_web_build}}</span>
|
<span id="web-latest">{{diagnostics.latest_web_build}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
|
{{/if}}
|
||||||
|
{{#unless diagnostics.web_vault_enabled}}
|
||||||
|
<dt class="col-sm-5">Web Installed</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="web-installed">Web Vault is disabled</span>
|
||||||
|
</dd>
|
||||||
|
{{/unless}}
|
||||||
<dt class="col-sm-5">Database</dt>
|
<dt class="col-sm-5">Database</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span><b>{{diagnostics.db_type}}:</b> {{diagnostics.db_version}}</span>
|
<span><b>{{diagnostics.db_type}}:</b> {{diagnostics.db_version}}</span>
|
||||||
|
@ -118,7 +126,10 @@
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="dns-resolved">{{diagnostics.dns_resolved}}</span>
|
<span id="dns-resolved">{{diagnostics.dns_resolved}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
|
<dt class="col-sm-5">Date & Time (Local)</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span><b>Server:</b> {{diagnostics.server_time_local}}</span>
|
||||||
|
</dd>
|
||||||
<dt class="col-sm-5">Date & Time (UTC)
|
<dt class="col-sm-5">Date & Time (UTC)
|
||||||
<span class="badge badge-success d-none" id="time-success" title="Time offsets seem to be correct.">Ok</span>
|
<span class="badge badge-success d-none" id="time-success" title="Time offsets seem to be correct.">Ok</span>
|
||||||
<span class="badge badge-danger d-none" id="time-warning" title="Time offsets are too mouch at drift.">Error</span>
|
<span class="badge badge-danger d-none" id="time-warning" title="Time offsets are too mouch at drift.">Error</span>
|
||||||
|
|
Loading…
Reference in a new issue