2019-10-24 23:21:04 +02:00
|
|
|
#![feature(proc_macro_hygiene, vec_remove_item, try_trait, ip)]
|
2019-02-03 00:22:18 +01:00
|
|
|
#![recursion_limit = "256"]
|
2018-10-10 20:40:39 +02:00
|
|
|
|
2019-09-12 22:12:22 +02:00
|
|
|
extern crate openssl;
|
2018-12-30 23:34:31 +01:00
|
|
|
#[macro_use]
|
|
|
|
extern crate rocket;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate serde_derive;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate serde_json;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate log;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate diesel;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate diesel_migrations;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate derive_more;
|
|
|
|
#[macro_use]
|
|
|
|
extern crate num_derive;
|
|
|
|
|
2020-03-16 17:53:22 +01:00
|
|
|
extern crate backtrace;
|
|
|
|
|
2018-12-30 23:34:31 +01:00
|
|
|
use std::{
|
2019-12-06 22:19:07 +01:00
|
|
|
fs::create_dir_all,
|
2018-12-30 23:34:31 +01:00
|
|
|
path::Path,
|
|
|
|
process::{exit, Command},
|
2019-12-06 22:19:07 +01:00
|
|
|
str::FromStr,
|
2020-03-16 17:53:22 +01:00
|
|
|
panic, thread, fmt // For panic logging
|
2018-12-30 23:34:31 +01:00
|
|
|
};
|
2018-02-10 01:00:55 +01:00
|
|
|
|
2018-12-30 23:34:31 +01:00
|
|
|
#[macro_use]
|
|
|
|
mod error;
|
2018-02-10 01:00:55 +01:00
|
|
|
mod api;
|
|
|
|
mod auth;
|
2019-01-25 18:23:51 +01:00
|
|
|
mod config;
|
2018-12-30 23:34:31 +01:00
|
|
|
mod crypto;
|
|
|
|
mod db;
|
2018-08-15 08:32:19 +02:00
|
|
|
mod mail;
|
2018-12-30 23:34:31 +01:00
|
|
|
mod util;
|
2018-02-10 01:00:55 +01:00
|
|
|
|
2019-01-25 18:23:51 +01:00
|
|
|
pub use config::CONFIG;
|
2019-02-14 02:03:37 +01:00
|
|
|
pub use error::{Error, MapResult};
|
2019-01-25 18:23:51 +01:00
|
|
|
|
2020-03-02 20:57:06 +01:00
|
|
|
use structopt::StructOpt;
|
|
|
|
|
2020-03-16 17:53:22 +01:00
|
|
|
// Used for catching panics and log them to file instead of stderr
|
|
|
|
use backtrace::Backtrace;
|
|
|
|
struct Shim(Backtrace);
|
|
|
|
|
|
|
|
impl fmt::Debug for Shim {
|
|
|
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
write!(fmt, "\n{:?}", self.0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-02 20:57:06 +01:00
|
|
|
#[derive(Debug, StructOpt)]
|
|
|
|
#[structopt(name = "bitwarden_rs", about = "A Bitwarden API server written in Rust")]
|
|
|
|
struct Opt {
|
|
|
|
/// Prints the app version
|
|
|
|
#[structopt(short, long)]
|
|
|
|
version: bool,
|
|
|
|
}
|
|
|
|
|
2018-02-10 01:00:55 +01:00
|
|
|
fn main() {
|
2020-03-02 20:57:06 +01:00
|
|
|
parse_args();
|
2019-02-20 20:59:37 +01:00
|
|
|
launch_info();
|
|
|
|
|
2019-12-06 22:19:07 +01:00
|
|
|
use log::LevelFilter as LF;
|
|
|
|
let level = LF::from_str(&CONFIG.log_level()).expect("Valid log level");
|
|
|
|
init_logging(level).ok();
|
|
|
|
|
|
|
|
let extra_debug = match level {
|
|
|
|
LF::Trace | LF::Debug => true,
|
|
|
|
_ => false,
|
|
|
|
};
|
2018-12-06 20:35:25 +01:00
|
|
|
|
2018-05-12 22:55:18 +02:00
|
|
|
check_db();
|
|
|
|
check_rsa_keys();
|
2018-09-13 20:59:51 +02:00
|
|
|
check_web_vault();
|
2018-08-30 17:43:46 +02:00
|
|
|
migrations::run_migrations();
|
2018-02-10 01:00:55 +01:00
|
|
|
|
2019-11-06 20:21:47 +01:00
|
|
|
create_icon_cache_folder();
|
|
|
|
|
2019-12-06 22:19:07 +01:00
|
|
|
launch_rocket(extra_debug);
|
2018-02-10 01:00:55 +01:00
|
|
|
}
|
|
|
|
|
2020-03-02 20:57:06 +01:00
|
|
|
fn parse_args() {
|
|
|
|
let opt = Opt::from_args();
|
|
|
|
if opt.version {
|
|
|
|
if let Some(version) = option_env!("GIT_VERSION") {
|
|
|
|
println!("bitwarden_rs {}", version);
|
|
|
|
} else {
|
|
|
|
println!("bitwarden_rs (Version info from Git not present)");
|
|
|
|
}
|
|
|
|
exit(0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
fn launch_info() {
|
|
|
|
println!("/--------------------------------------------------------------------\\");
|
|
|
|
println!("| Starting Bitwarden_RS |");
|
|
|
|
|
|
|
|
if let Some(version) = option_env!("GIT_VERSION") {
|
|
|
|
println!("|{:^68}|", format!("Version {}", version));
|
|
|
|
}
|
|
|
|
|
|
|
|
println!("|--------------------------------------------------------------------|");
|
|
|
|
println!("| This is an *unofficial* Bitwarden implementation, DO NOT use the |");
|
|
|
|
println!("| official channels to report bugs/features, regardless of client. |");
|
|
|
|
println!("| Report URL: https://github.com/dani-garcia/bitwarden_rs/issues/new |");
|
|
|
|
println!("\\--------------------------------------------------------------------/\n");
|
|
|
|
}
|
|
|
|
|
2019-12-06 22:19:07 +01:00
|
|
|
fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
2018-12-06 20:35:25 +01:00
|
|
|
let mut logger = fern::Dispatch::new()
|
2019-12-06 22:19:07 +01:00
|
|
|
.level(level)
|
|
|
|
// Hide unknown certificate errors if using self-signed
|
|
|
|
.level_for("rustls::session", log::LevelFilter::Off)
|
|
|
|
// Hide failed to close stream messages
|
|
|
|
.level_for("hyper::server", log::LevelFilter::Warn)
|
|
|
|
// Silence rocket logs
|
|
|
|
.level_for("_", log::LevelFilter::Off)
|
|
|
|
.level_for("launch", log::LevelFilter::Off)
|
|
|
|
.level_for("launch_", log::LevelFilter::Off)
|
|
|
|
.level_for("rocket::rocket", log::LevelFilter::Off)
|
|
|
|
.level_for("rocket::fairing", log::LevelFilter::Off)
|
|
|
|
.chain(std::io::stdout());
|
|
|
|
|
|
|
|
if CONFIG.extended_logging() {
|
|
|
|
logger = logger.format(|out, message, record| {
|
2018-12-30 23:34:31 +01:00
|
|
|
out.finish(format_args!(
|
|
|
|
"{}[{}][{}] {}",
|
|
|
|
chrono::Local::now().format("[%Y-%m-%d %H:%M:%S]"),
|
|
|
|
record.target(),
|
|
|
|
record.level(),
|
|
|
|
message
|
|
|
|
))
|
2019-12-06 22:19:07 +01:00
|
|
|
});
|
|
|
|
} else {
|
|
|
|
logger = logger.format(|out, message, _| out.finish(format_args!("{}", message)));
|
|
|
|
}
|
2018-12-06 20:35:25 +01:00
|
|
|
|
2019-01-25 18:23:51 +01:00
|
|
|
if let Some(log_file) = CONFIG.log_file() {
|
2018-12-06 20:35:25 +01:00
|
|
|
logger = logger.chain(fern::log_file(log_file)?);
|
|
|
|
}
|
|
|
|
|
2019-04-26 22:08:26 +02:00
|
|
|
#[cfg(not(windows))]
|
|
|
|
{
|
|
|
|
if cfg!(feature = "enable_syslog") || CONFIG.use_syslog() {
|
2019-03-29 20:27:20 +01:00
|
|
|
logger = chain_syslog(logger);
|
|
|
|
}
|
|
|
|
}
|
2018-12-06 20:35:25 +01:00
|
|
|
|
|
|
|
logger.apply()?;
|
|
|
|
|
2020-02-25 14:10:52 +01:00
|
|
|
// Catch panics and log them instead of default output to StdErr
|
|
|
|
panic::set_hook(Box::new(|info| {
|
2020-03-16 17:53:22 +01:00
|
|
|
let backtrace = Backtrace::new();
|
|
|
|
|
|
|
|
let thread = thread::current();
|
|
|
|
let thread = thread.name().unwrap_or("unnamed");
|
|
|
|
|
|
|
|
let msg = match info.payload().downcast_ref::<&'static str>() {
|
|
|
|
Some(s) => *s,
|
|
|
|
None => match info.payload().downcast_ref::<String>() {
|
|
|
|
Some(s) => &**s,
|
|
|
|
None => "Box<Any>",
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
match info.location() {
|
|
|
|
Some(location) => {
|
|
|
|
error!(
|
|
|
|
target: "panic", "thread '{}' panicked at '{}': {}:{}{:?}",
|
|
|
|
thread,
|
|
|
|
msg,
|
|
|
|
location.file(),
|
|
|
|
location.line(),
|
|
|
|
Shim(backtrace)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
error!(
|
|
|
|
target: "panic",
|
|
|
|
"thread '{}' panicked at '{}'{:?}",
|
|
|
|
thread,
|
|
|
|
msg,
|
|
|
|
Shim(backtrace)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2020-02-25 14:10:52 +01:00
|
|
|
}));
|
|
|
|
|
2018-12-06 20:35:25 +01:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-03-29 20:27:20 +01:00
|
|
|
#[cfg(not(windows))]
|
2018-12-06 20:35:25 +01:00
|
|
|
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
|
|
|
let syslog_fmt = syslog::Formatter3164 {
|
|
|
|
facility: syslog::Facility::LOG_USER,
|
|
|
|
hostname: None,
|
|
|
|
process: "bitwarden_rs".into(),
|
|
|
|
pid: 0,
|
|
|
|
};
|
|
|
|
|
|
|
|
match syslog::unix(syslog_fmt) {
|
|
|
|
Ok(sl) => logger.chain(sl),
|
|
|
|
Err(e) => {
|
|
|
|
error!("Unable to connect to syslog: {:?}", e);
|
|
|
|
logger
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-12 22:55:18 +02:00
|
|
|
fn check_db() {
|
2019-05-27 22:58:52 +02:00
|
|
|
if cfg!(feature = "sqlite") {
|
2019-06-02 13:35:01 +02:00
|
|
|
let url = CONFIG.database_url();
|
2019-05-27 22:58:52 +02:00
|
|
|
let path = Path::new(&url);
|
|
|
|
|
|
|
|
if let Some(parent) = path.parent() {
|
2019-11-06 20:21:47 +01:00
|
|
|
if create_dir_all(parent).is_err() {
|
2019-05-27 22:58:52 +02:00
|
|
|
error!("Error creating database directory");
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Turn on WAL in SQLite
|
|
|
|
if CONFIG.enable_db_wal() {
|
|
|
|
use diesel::RunQueryDsl;
|
2020-01-04 23:37:29 +01:00
|
|
|
let connection = db::get_connection().expect("Can't connect to DB");
|
2019-05-27 22:58:52 +02:00
|
|
|
diesel::sql_query("PRAGMA journal_mode=wal")
|
|
|
|
.execute(&connection)
|
|
|
|
.expect("Failed to turn on WAL");
|
|
|
|
}
|
|
|
|
}
|
2019-05-28 07:48:17 +02:00
|
|
|
db::get_connection().expect("Can't connect to DB");
|
2018-05-12 22:55:18 +02:00
|
|
|
}
|
|
|
|
|
2019-11-06 20:21:47 +01:00
|
|
|
fn create_icon_cache_folder() {
|
|
|
|
// Try to create the icon cache folder, and generate an error if it could not.
|
|
|
|
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache directory");
|
|
|
|
}
|
|
|
|
|
2018-02-17 01:13:02 +01:00
|
|
|
fn check_rsa_keys() {
|
|
|
|
// If the RSA keys don't exist, try to create them
|
2019-01-25 18:23:51 +01:00
|
|
|
if !util::file_exists(&CONFIG.private_rsa_key()) || !util::file_exists(&CONFIG.public_rsa_key()) {
|
2018-12-06 20:35:25 +01:00
|
|
|
info!("JWT keys don't exist, checking if OpenSSL is available...");
|
2018-02-17 01:13:02 +01:00
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
Command::new("openssl").arg("version").status().unwrap_or_else(|_| {
|
2020-03-02 20:57:06 +01:00
|
|
|
info!(
|
|
|
|
"Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"
|
|
|
|
);
|
2018-02-17 01:13:02 +01:00
|
|
|
exit(1);
|
|
|
|
});
|
|
|
|
|
2018-12-06 20:35:25 +01:00
|
|
|
info!("OpenSSL detected, creating keys...");
|
2018-02-17 01:13:02 +01:00
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
let key = CONFIG.rsa_key_filename();
|
|
|
|
|
|
|
|
let pem = format!("{}.pem", key);
|
|
|
|
let priv_der = format!("{}.der", key);
|
|
|
|
let pub_der = format!("{}.pub.der", key);
|
|
|
|
|
2018-12-30 23:34:31 +01:00
|
|
|
let mut success = Command::new("openssl")
|
2019-02-20 20:59:37 +01:00
|
|
|
.args(&["genrsa", "-out", &pem])
|
|
|
|
.status()
|
2018-12-30 23:34:31 +01:00
|
|
|
.expect("Failed to create private pem file")
|
|
|
|
.success();
|
|
|
|
|
|
|
|
success &= Command::new("openssl")
|
2019-02-20 20:59:37 +01:00
|
|
|
.args(&["rsa", "-in", &pem, "-outform", "DER", "-out", &priv_der])
|
|
|
|
.status()
|
2018-12-30 23:34:31 +01:00
|
|
|
.expect("Failed to create private der file")
|
|
|
|
.success();
|
|
|
|
|
|
|
|
success &= Command::new("openssl")
|
2019-02-20 20:59:37 +01:00
|
|
|
.args(&["rsa", "-in", &priv_der, "-inform", "DER"])
|
|
|
|
.args(&["-RSAPublicKey_out", "-outform", "DER", "-out", &pub_der])
|
|
|
|
.status()
|
2018-12-30 23:34:31 +01:00
|
|
|
.expect("Failed to create public der file")
|
|
|
|
.success();
|
2018-02-17 01:13:02 +01:00
|
|
|
|
|
|
|
if success {
|
2018-12-06 20:35:25 +01:00
|
|
|
info!("Keys created correctly.");
|
2018-02-17 01:13:02 +01:00
|
|
|
} else {
|
2018-12-06 20:35:25 +01:00
|
|
|
error!("Error creating keys, exiting...");
|
2018-02-17 01:13:02 +01:00
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-24 22:38:23 +02:00
|
|
|
fn check_web_vault() {
|
2019-01-25 18:23:51 +01:00
|
|
|
if !CONFIG.web_vault_enabled() {
|
2018-06-12 21:09:42 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-01-25 18:23:51 +01:00
|
|
|
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
|
2018-04-24 22:38:23 +02:00
|
|
|
|
|
|
|
if !index_path.exists() {
|
2019-12-29 15:29:46 +01:00
|
|
|
error!("Web vault is not found. To install it, please follow the steps in: ");
|
|
|
|
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
|
|
|
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
|
2018-04-24 22:38:23 +02:00
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
// Embed the migrations from the migrations folder into the application
|
|
|
|
// This way, the program automatically migrates the database to the latest version
|
|
|
|
// https://docs.rs/diesel_migrations/*/diesel_migrations/macro.embed_migrations.html
|
|
|
|
#[allow(unused_imports)]
|
|
|
|
mod migrations {
|
2019-06-02 13:35:01 +02:00
|
|
|
|
2019-05-26 23:02:41 +02:00
|
|
|
#[cfg(feature = "sqlite")]
|
|
|
|
embed_migrations!("migrations/sqlite");
|
|
|
|
#[cfg(feature = "mysql")]
|
|
|
|
embed_migrations!("migrations/mysql");
|
2019-09-12 22:12:22 +02:00
|
|
|
#[cfg(feature = "postgresql")]
|
|
|
|
embed_migrations!("migrations/postgresql");
|
2019-02-12 22:47:00 +01:00
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
pub fn run_migrations() {
|
|
|
|
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
2019-02-25 01:08:38 +01:00
|
|
|
let connection = crate::db::get_connection().expect("Can't connect to DB");
|
2019-02-12 22:47:00 +01:00
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
use std::io::stdout;
|
2020-03-18 18:11:11 +01:00
|
|
|
|
|
|
|
// Disable Foreign Key Checks during migration
|
|
|
|
use diesel::RunQueryDsl;
|
|
|
|
#[cfg(feature = "postgres")]
|
|
|
|
diesel::sql_query("SET CONSTRAINTS ALL DEFERRED").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
|
|
|
#[cfg(feature = "mysql")]
|
|
|
|
diesel::sql_query("SET FOREIGN_KEY_CHECKS = 0").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
|
|
|
#[cfg(feature = "sqlite")]
|
|
|
|
diesel::sql_query("PRAGMA defer_foreign_keys = ON").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
|
|
|
|
2019-02-20 20:59:37 +01:00
|
|
|
embedded_migrations::run_with_output(&connection, &mut stdout()).expect("Can't run migrations");
|
2019-02-12 22:47:00 +01:00
|
|
|
}
|
2019-02-20 20:59:37 +01:00
|
|
|
}
|
|
|
|
|
2019-12-06 22:19:07 +01:00
|
|
|
fn launch_rocket(extra_debug: bool) {
|
2020-02-19 06:27:00 +01:00
|
|
|
// Create Rocket object, this stores current log level and sets its own
|
2019-02-20 20:59:37 +01:00
|
|
|
let rocket = rocket::ignite();
|
2019-02-12 22:47:00 +01:00
|
|
|
|
2020-02-19 06:27:00 +01:00
|
|
|
let basepath = &CONFIG.domain_path();
|
|
|
|
|
|
|
|
// If adding more paths here, consider also adding them to
|
2019-12-06 22:19:07 +01:00
|
|
|
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
2019-02-20 20:59:37 +01:00
|
|
|
let rocket = rocket
|
2020-02-19 06:27:00 +01:00
|
|
|
.mount(&[basepath, "/"].concat(), api::web_routes())
|
|
|
|
.mount(&[basepath, "/api"].concat(), api::core_routes())
|
|
|
|
.mount(&[basepath, "/admin"].concat(), api::admin_routes())
|
|
|
|
.mount(&[basepath, "/identity"].concat(), api::identity_routes())
|
|
|
|
.mount(&[basepath, "/icons"].concat(), api::icons_routes())
|
|
|
|
.mount(&[basepath, "/notifications"].concat(), api::notifications_routes())
|
2019-02-20 20:59:37 +01:00
|
|
|
.manage(db::init_pool())
|
|
|
|
.manage(api::start_notification_server())
|
2019-09-01 13:00:12 +02:00
|
|
|
.attach(util::AppHeaders())
|
2019-12-06 22:19:07 +01:00
|
|
|
.attach(util::CORS())
|
|
|
|
.attach(util::BetterLogging(extra_debug));
|
2019-02-20 20:59:37 +01:00
|
|
|
|
|
|
|
// Launch and print error if there is one
|
|
|
|
// The launch will restore the original logging level
|
|
|
|
error!("Launch error {:#?}", rocket.launch());
|
2019-01-11 14:18:13 +01:00
|
|
|
}
|