mirror of
https://github.com/dani-garcia/vaultwarden
synced 2024-12-13 17:22:58 +01:00
Merge pull request #621 from swedishborgie/postgresql
Adds support for PostgreSQL which adds #87 and is mentioned in #246.
This commit is contained in:
commit
3a90364b32
18 changed files with 724 additions and 12 deletions
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -116,6 +116,7 @@ dependencies = [
|
||||||
"num-derive 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-derive 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"oath 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"oath 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"openssl 0.10.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quoted_printable 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quoted_printable 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -419,11 +420,13 @@ name = "diesel"
|
||||||
version = "1.4.2"
|
version = "1.4.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"diesel_derives 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"diesel_derives 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libsqlite3-sys 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libsqlite3-sys 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"mysqlclient-sys 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"mysqlclient-sys 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"pq-sys 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"r2d2 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"r2d2 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -1649,6 +1652,14 @@ name = "ppv-lite86"
|
||||||
version = "0.2.5"
|
version = "0.2.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pq-sys"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "precomputed-hash"
|
name = "precomputed-hash"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
|
@ -3095,6 +3106,7 @@ dependencies = [
|
||||||
"checksum phf_shared 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)" = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0"
|
"checksum phf_shared 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)" = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0"
|
||||||
"checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af"
|
"checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af"
|
||||||
"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b"
|
"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b"
|
||||||
|
"checksum pq-sys 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac25eee5a0582f45a67e837e350d784e7003bd29a5f460796772061ca49ffda"
|
||||||
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||||
"checksum proc-macro-hack 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "463bf29e7f11344e58c9e01f171470ab15c925c6822ad75028cc1c0e1d1eb63b"
|
"checksum proc-macro-hack 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "463bf29e7f11344e58c9e01f171470ab15c925c6822ad75028cc1c0e1d1eb63b"
|
||||||
"checksum proc-macro-hack-impl 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "38c47dcb1594802de8c02f3b899e2018c78291168a22c281be21ea0fb4796842"
|
"checksum proc-macro-hack-impl 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "38c47dcb1594802de8c02f3b899e2018c78291168a22c281be21ea0fb4796842"
|
||||||
|
|
|
@ -14,6 +14,7 @@ build = "build.rs"
|
||||||
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||||
enable_syslog = []
|
enable_syslog = []
|
||||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||||
|
postgresql = ["diesel/postgres", "diesel_migrations/postgres", "openssl"]
|
||||||
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
||||||
|
|
||||||
[target."cfg(not(windows))".dependencies]
|
[target."cfg(not(windows))".dependencies]
|
||||||
|
@ -105,6 +106,9 @@ handlebars = "2.0.2"
|
||||||
soup = "0.4.1"
|
soup = "0.4.1"
|
||||||
regex = "1.3.1"
|
regex = "1.3.1"
|
||||||
|
|
||||||
|
# Required for SSL support for PostgreSQL
|
||||||
|
openssl = { version = "0.10.24", optional = true }
|
||||||
|
|
||||||
# URL encoding library
|
# URL encoding library
|
||||||
percent-encoding = "2.1.0"
|
percent-encoding = "2.1.0"
|
||||||
|
|
||||||
|
|
8
build.rs
8
build.rs
|
@ -2,9 +2,13 @@ use std::process::Command;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
||||||
compile_error!("Can't enable both backends");
|
compile_error!("Can't enable both sqlite and mysql at the same time");
|
||||||
|
#[cfg(all(feature = "sqlite", feature = "postgresql"))]
|
||||||
|
compile_error!("Can't enable both sqlite and postgresql at the same time");
|
||||||
|
#[cfg(all(feature = "mysql", feature = "postgresql"))]
|
||||||
|
compile_error!("Can't enable both mysql and postgresql at the same time");
|
||||||
|
|
||||||
#[cfg(not(any(feature = "sqlite", feature = "mysql")))]
|
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||||
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
||||||
|
|
||||||
read_git_info().ok();
|
read_git_info().ok();
|
||||||
|
|
103
docker/amd64/postgresql/Dockerfile
Normal file
103
docker/amd64/postgresql/Dockerfile
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.10 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.36 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
# Using bundled SQLite, no need to install it
|
||||||
|
# RUN apt-get update && apt-get install -y\
|
||||||
|
# --no-install-recommends \
|
||||||
|
# sqlite3\
|
||||||
|
# && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libpq-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin app
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM debian:stretch-slim
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
libpq5 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
CMD ["./bitwarden_rs"]
|
85
docker/amd64/postgresql/Dockerfile.alpine
Normal file
85
docker/amd64/postgresql/Dockerfile.alpine
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.10 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# Musl build image for statically compiled binary
|
||||||
|
FROM clux/muslrust:nightly-2019-07-08 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libpq-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM alpine:3.10
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
openssl \
|
||||||
|
postgresql-libs \
|
||||||
|
curl \
|
||||||
|
ca-certificates
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
CMD ["./bitwarden_rs"]
|
|
@ -0,0 +1,13 @@
|
||||||
|
DROP TABLE devices;
|
||||||
|
DROP TABLE attachments;
|
||||||
|
DROP TABLE users_collections;
|
||||||
|
DROP TABLE users_organizations;
|
||||||
|
DROP TABLE folders_ciphers;
|
||||||
|
DROP TABLE ciphers_collections;
|
||||||
|
DROP TABLE twofactor;
|
||||||
|
DROP TABLE invitations;
|
||||||
|
DROP TABLE collections;
|
||||||
|
DROP TABLE folders;
|
||||||
|
DROP TABLE ciphers;
|
||||||
|
DROP TABLE users;
|
||||||
|
DROP TABLE organizations;
|
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
CREATE TABLE users (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
password_hash BYTEA NOT NULL,
|
||||||
|
salt BYTEA NOT NULL,
|
||||||
|
password_iterations INTEGER NOT NULL,
|
||||||
|
password_hint TEXT,
|
||||||
|
akey TEXT NOT NULL,
|
||||||
|
private_key TEXT,
|
||||||
|
public_key TEXT,
|
||||||
|
totp_secret TEXT,
|
||||||
|
totp_recover TEXT,
|
||||||
|
security_stamp TEXT NOT NULL,
|
||||||
|
equivalent_domains TEXT NOT NULL,
|
||||||
|
excluded_globals TEXT NOT NULL,
|
||||||
|
client_kdf_type INTEGER NOT NULL DEFAULT 0,
|
||||||
|
client_kdf_iter INTEGER NOT NULL DEFAULT 100000
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE devices (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
push_token TEXT,
|
||||||
|
refresh_token TEXT NOT NULL,
|
||||||
|
twofactor_remember TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE organizations (
|
||||||
|
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
billing_email TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE ciphers (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
user_uuid CHAR(36) REFERENCES users (uuid),
|
||||||
|
organization_uuid CHAR(36) REFERENCES organizations (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
notes TEXT,
|
||||||
|
fields TEXT,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
favorite BOOLEAN NOT NULL,
|
||||||
|
password_history TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE attachments (
|
||||||
|
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
file_name TEXT NOT NULL,
|
||||||
|
file_size INTEGER NOT NULL,
|
||||||
|
akey TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE folders (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE collections (
|
||||||
|
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||||
|
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users_collections (
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||||
|
read_only BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
PRIMARY KEY (user_uuid, collection_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users_organizations (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
|
||||||
|
access_all BOOLEAN NOT NULL,
|
||||||
|
akey TEXT NOT NULL,
|
||||||
|
status INTEGER NOT NULL,
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (user_uuid, org_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE folders_ciphers (
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||||
|
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE ciphers_collections (
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||||
|
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE twofactor (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
UNIQUE (user_uuid, atype)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE invitations (
|
||||||
|
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||||
|
);
|
|
@ -19,6 +19,8 @@ use crate::CONFIG;
|
||||||
type Connection = diesel::sqlite::SqliteConnection;
|
type Connection = diesel::sqlite::SqliteConnection;
|
||||||
#[cfg(feature = "mysql")]
|
#[cfg(feature = "mysql")]
|
||||||
type Connection = diesel::mysql::MysqlConnection;
|
type Connection = diesel::mysql::MysqlConnection;
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
type Connection = diesel::pg::PgConnection;
|
||||||
|
|
||||||
/// An alias to the type for a pool of Diesel connections.
|
/// An alias to the type for a pool of Diesel connections.
|
||||||
type Pool = r2d2::Pool<ConnectionManager<Connection>>;
|
type Pool = r2d2::Pool<ConnectionManager<Connection>>;
|
||||||
|
@ -33,6 +35,9 @@ pub mod schema;
|
||||||
#[cfg(feature = "mysql")]
|
#[cfg(feature = "mysql")]
|
||||||
#[path = "schemas/mysql/schema.rs"]
|
#[path = "schemas/mysql/schema.rs"]
|
||||||
pub mod schema;
|
pub mod schema;
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
#[path = "schemas/postgresql/schema.rs"]
|
||||||
|
pub mod schema;
|
||||||
|
|
||||||
/// Initializes a database pool.
|
/// Initializes a database pool.
|
||||||
pub fn init_pool() -> Pool {
|
pub fn init_pool() -> Pool {
|
||||||
|
|
|
@ -3,7 +3,7 @@ use serde_json::Value;
|
||||||
use super::Cipher;
|
use super::Cipher;
|
||||||
use crate::CONFIG;
|
use crate::CONFIG;
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "attachments"]
|
#[table_name = "attachments"]
|
||||||
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
||||||
#[primary_key(id)]
|
#[primary_key(id)]
|
||||||
|
@ -59,8 +59,20 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Attachment {
|
impl Attachment {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
diesel::replace_into(attachments::table)
|
return diesel::insert_into(attachments::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(attachments::id)
|
||||||
|
.do_update()
|
||||||
|
.set(self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving attachment")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
return diesel::replace_into(attachments::table)
|
||||||
.values(self)
|
.values(self)
|
||||||
.execute(&**conn)
|
.execute(&**conn)
|
||||||
.map_res("Error saving attachment")
|
.map_res("Error saving attachment")
|
||||||
|
|
|
@ -5,7 +5,7 @@ use super::{
|
||||||
Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization,
|
Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "ciphers"]
|
#[table_name = "ciphers"]
|
||||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||||
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
||||||
|
@ -148,6 +148,21 @@ impl Cipher {
|
||||||
user_uuids
|
user_uuids
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
|
self.update_users_revision(conn);
|
||||||
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
|
diesel::insert_into(ciphers::table)
|
||||||
|
.values(&*self)
|
||||||
|
.on_conflict(ciphers::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(&*self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving cipher")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
self.update_users_revision(conn);
|
self.update_users_revision(conn);
|
||||||
self.updated_at = Utc::now().naive_utc();
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
|
|
@ -2,7 +2,7 @@ use serde_json::Value;
|
||||||
|
|
||||||
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "collections"]
|
#[table_name = "collections"]
|
||||||
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
|
@ -43,6 +43,20 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Collection {
|
impl Collection {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
self.update_users_revision(conn);
|
||||||
|
|
||||||
|
diesel::insert_into(collections::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(collections::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving collection")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
self.update_users_revision(conn);
|
self.update_users_revision(conn);
|
||||||
|
|
||||||
|
@ -200,6 +214,24 @@ impl CollectionUser {
|
||||||
.expect("Error loading users_collections")
|
.expect("Error loading users_collections")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
||||||
|
User::update_uuid_revision(&user_uuid, conn);
|
||||||
|
|
||||||
|
diesel::insert_into(users_collections::table)
|
||||||
|
.values((
|
||||||
|
users_collections::user_uuid.eq(user_uuid),
|
||||||
|
users_collections::collection_uuid.eq(collection_uuid),
|
||||||
|
users_collections::read_only.eq(read_only),
|
||||||
|
))
|
||||||
|
.on_conflict((users_collections::user_uuid, users_collections::collection_uuid))
|
||||||
|
.do_update()
|
||||||
|
.set(users_collections::read_only.eq(read_only))
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error adding user to collection")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(&user_uuid, conn);
|
||||||
|
|
||||||
|
@ -277,6 +309,21 @@ pub struct CollectionCipher {
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl CollectionCipher {
|
impl CollectionCipher {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
|
Self::update_users_revision(&collection_uuid, conn);
|
||||||
|
diesel::insert_into(ciphers_collections::table)
|
||||||
|
.values((
|
||||||
|
ciphers_collections::cipher_uuid.eq(cipher_uuid),
|
||||||
|
ciphers_collections::collection_uuid.eq(collection_uuid),
|
||||||
|
))
|
||||||
|
.on_conflict((ciphers_collections::cipher_uuid, ciphers_collections::collection_uuid))
|
||||||
|
.do_nothing()
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error adding cipher to collection")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
Self::update_users_revision(&collection_uuid, conn);
|
Self::update_users_revision(&collection_uuid, conn);
|
||||||
diesel::replace_into(ciphers_collections::table)
|
diesel::replace_into(ciphers_collections::table)
|
||||||
|
|
|
@ -2,7 +2,7 @@ use chrono::{NaiveDateTime, Utc};
|
||||||
|
|
||||||
use super::User;
|
use super::User;
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "devices"]
|
#[table_name = "devices"]
|
||||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
|
@ -114,6 +114,18 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Device {
|
impl Device {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
|
crate::util::retry(
|
||||||
|
|| diesel::insert_into(devices::table).values(&*self).on_conflict(devices::uuid).do_update().set(&*self).execute(&**conn),
|
||||||
|
10,
|
||||||
|
)
|
||||||
|
.map_res("Error saving device")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
self.updated_at = Utc::now().naive_utc();
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ use serde_json::Value;
|
||||||
|
|
||||||
use super::{Cipher, User};
|
use super::{Cipher, User};
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "folders"]
|
#[table_name = "folders"]
|
||||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
|
@ -71,6 +71,21 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Folder {
|
impl Folder {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
|
diesel::insert_into(folders::table)
|
||||||
|
.values(&*self)
|
||||||
|
.on_conflict(folders::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(&*self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving folder")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&self.user_uuid, conn);
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
self.updated_at = Utc::now().naive_utc();
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
@ -113,6 +128,17 @@ impl Folder {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FolderCipher {
|
impl FolderCipher {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
diesel::insert_into(folders_ciphers::table)
|
||||||
|
.values(&*self)
|
||||||
|
.on_conflict((folders_ciphers::cipher_uuid, folders_ciphers::folder_uuid))
|
||||||
|
.do_nothing()
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error adding cipher to folder")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
diesel::replace_into(folders_ciphers::table)
|
diesel::replace_into(folders_ciphers::table)
|
||||||
.values(&*self)
|
.values(&*self)
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::cmp::Ordering;
|
||||||
|
|
||||||
use super::{CollectionUser, User};
|
use super::{CollectionUser, User};
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
#[table_name = "organizations"]
|
#[table_name = "organizations"]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
pub struct Organization {
|
pub struct Organization {
|
||||||
|
@ -12,7 +12,7 @@ pub struct Organization {
|
||||||
pub billing_email: String,
|
pub billing_email: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
#[table_name = "users_organizations"]
|
#[table_name = "users_organizations"]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
pub struct UserOrganization {
|
pub struct UserOrganization {
|
||||||
|
@ -213,6 +213,24 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Organization {
|
impl Organization {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
UserOrganization::find_by_org(&self.uuid, conn)
|
||||||
|
.iter()
|
||||||
|
.for_each(|user_org| {
|
||||||
|
User::update_uuid_revision(&user_org.user_uuid, conn);
|
||||||
|
});
|
||||||
|
|
||||||
|
diesel::insert_into(organizations::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(organizations::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving organization")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
UserOrganization::find_by_org(&self.uuid, conn)
|
UserOrganization::find_by_org(&self.uuid, conn)
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -323,6 +341,20 @@ impl UserOrganization {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
|
|
||||||
|
diesel::insert_into(users_organizations::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(users_organizations::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error adding user to organization")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&self.user_uuid, conn);
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
use super::User;
|
use super::User;
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "twofactor"]
|
#[table_name = "twofactor"]
|
||||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
|
@ -69,6 +69,18 @@ impl TwoFactor {
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl TwoFactor {
|
impl TwoFactor {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
diesel::insert_into(twofactor::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(twofactor::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving twofactor")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
diesel::replace_into(twofactor::table)
|
diesel::replace_into(twofactor::table)
|
||||||
.values(self)
|
.values(self)
|
||||||
|
|
|
@ -4,7 +4,7 @@ use serde_json::Value;
|
||||||
use crate::crypto;
|
use crate::crypto;
|
||||||
use crate::CONFIG;
|
use crate::CONFIG;
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
#[table_name = "users"]
|
#[table_name = "users"]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
pub struct User {
|
pub struct User {
|
||||||
|
@ -148,6 +148,24 @@ impl User {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
|
if self.email.trim().is_empty() {
|
||||||
|
err!("User email can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
self.updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
|
diesel::insert_into(users::table) // Insert or update
|
||||||
|
.values(&*self)
|
||||||
|
.on_conflict(users::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(&*self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving user")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||||
if self.email.trim().is_empty() {
|
if self.email.trim().is_empty() {
|
||||||
err!("User email can't be empty")
|
err!("User email can't be empty")
|
||||||
|
@ -250,6 +268,21 @@ impl Invitation {
|
||||||
Self { email }
|
Self { email }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
if self.email.trim().is_empty() {
|
||||||
|
err!("Invitation email can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::insert_into(invitations::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(invitations::email)
|
||||||
|
.do_nothing()
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving invitation")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
if self.email.trim().is_empty() {
|
if self.email.trim().is_empty() {
|
||||||
err!("Invitation email can't be empty")
|
err!("Invitation email can't be empty")
|
||||||
|
|
172
src/db/schemas/postgresql/schema.rs
Normal file
172
src/db/schemas/postgresql/schema.rs
Normal file
|
@ -0,0 +1,172 @@
|
||||||
|
table! {
|
||||||
|
attachments (id) {
|
||||||
|
id -> Text,
|
||||||
|
cipher_uuid -> Text,
|
||||||
|
file_name -> Text,
|
||||||
|
file_size -> Integer,
|
||||||
|
akey -> Nullable<Text>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
ciphers (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
updated_at -> Timestamp,
|
||||||
|
user_uuid -> Nullable<Text>,
|
||||||
|
organization_uuid -> Nullable<Text>,
|
||||||
|
atype -> Integer,
|
||||||
|
name -> Text,
|
||||||
|
notes -> Nullable<Text>,
|
||||||
|
fields -> Nullable<Text>,
|
||||||
|
data -> Text,
|
||||||
|
favorite -> Bool,
|
||||||
|
password_history -> Nullable<Text>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
ciphers_collections (cipher_uuid, collection_uuid) {
|
||||||
|
cipher_uuid -> Text,
|
||||||
|
collection_uuid -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
collections (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
org_uuid -> Text,
|
||||||
|
name -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
devices (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
updated_at -> Timestamp,
|
||||||
|
user_uuid -> Text,
|
||||||
|
name -> Text,
|
||||||
|
atype -> Integer,
|
||||||
|
push_token -> Nullable<Text>,
|
||||||
|
refresh_token -> Text,
|
||||||
|
twofactor_remember -> Nullable<Text>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
folders (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
updated_at -> Timestamp,
|
||||||
|
user_uuid -> Text,
|
||||||
|
name -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
folders_ciphers (cipher_uuid, folder_uuid) {
|
||||||
|
cipher_uuid -> Text,
|
||||||
|
folder_uuid -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
invitations (email) {
|
||||||
|
email -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
organizations (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
name -> Text,
|
||||||
|
billing_email -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
twofactor (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
user_uuid -> Text,
|
||||||
|
atype -> Integer,
|
||||||
|
enabled -> Bool,
|
||||||
|
data -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
users (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
created_at -> Timestamp,
|
||||||
|
updated_at -> Timestamp,
|
||||||
|
email -> Text,
|
||||||
|
name -> Text,
|
||||||
|
password_hash -> Binary,
|
||||||
|
salt -> Binary,
|
||||||
|
password_iterations -> Integer,
|
||||||
|
password_hint -> Nullable<Text>,
|
||||||
|
akey -> Text,
|
||||||
|
private_key -> Nullable<Text>,
|
||||||
|
public_key -> Nullable<Text>,
|
||||||
|
totp_secret -> Nullable<Text>,
|
||||||
|
totp_recover -> Nullable<Text>,
|
||||||
|
security_stamp -> Text,
|
||||||
|
equivalent_domains -> Text,
|
||||||
|
excluded_globals -> Text,
|
||||||
|
client_kdf_type -> Integer,
|
||||||
|
client_kdf_iter -> Integer,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
users_collections (user_uuid, collection_uuid) {
|
||||||
|
user_uuid -> Text,
|
||||||
|
collection_uuid -> Text,
|
||||||
|
read_only -> Bool,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
users_organizations (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
user_uuid -> Text,
|
||||||
|
org_uuid -> Text,
|
||||||
|
access_all -> Bool,
|
||||||
|
akey -> Text,
|
||||||
|
status -> Integer,
|
||||||
|
atype -> Integer,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
joinable!(attachments -> ciphers (cipher_uuid));
|
||||||
|
joinable!(ciphers -> organizations (organization_uuid));
|
||||||
|
joinable!(ciphers -> users (user_uuid));
|
||||||
|
joinable!(ciphers_collections -> ciphers (cipher_uuid));
|
||||||
|
joinable!(ciphers_collections -> collections (collection_uuid));
|
||||||
|
joinable!(collections -> organizations (org_uuid));
|
||||||
|
joinable!(devices -> users (user_uuid));
|
||||||
|
joinable!(folders -> users (user_uuid));
|
||||||
|
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||||
|
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||||
|
joinable!(twofactor -> users (user_uuid));
|
||||||
|
joinable!(users_collections -> collections (collection_uuid));
|
||||||
|
joinable!(users_collections -> users (user_uuid));
|
||||||
|
joinable!(users_organizations -> organizations (org_uuid));
|
||||||
|
joinable!(users_organizations -> users (user_uuid));
|
||||||
|
|
||||||
|
allow_tables_to_appear_in_same_query!(
|
||||||
|
attachments,
|
||||||
|
ciphers,
|
||||||
|
ciphers_collections,
|
||||||
|
collections,
|
||||||
|
devices,
|
||||||
|
folders,
|
||||||
|
folders_ciphers,
|
||||||
|
invitations,
|
||||||
|
organizations,
|
||||||
|
twofactor,
|
||||||
|
users,
|
||||||
|
users_collections,
|
||||||
|
users_organizations,
|
||||||
|
);
|
|
@ -1,6 +1,8 @@
|
||||||
#![feature(proc_macro_hygiene, decl_macro, vec_remove_item, try_trait)]
|
#![feature(proc_macro_hygiene, decl_macro, vec_remove_item, try_trait)]
|
||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
|
|
||||||
|
#[cfg(feature = "openssl")]
|
||||||
|
extern crate openssl;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rocket;
|
extern crate rocket;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
@ -215,6 +217,8 @@ mod migrations {
|
||||||
embed_migrations!("migrations/sqlite");
|
embed_migrations!("migrations/sqlite");
|
||||||
#[cfg(feature = "mysql")]
|
#[cfg(feature = "mysql")]
|
||||||
embed_migrations!("migrations/mysql");
|
embed_migrations!("migrations/mysql");
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
embed_migrations!("migrations/postgresql");
|
||||||
|
|
||||||
pub fn run_migrations() {
|
pub fn run_migrations() {
|
||||||
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
||||||
|
|
Loading…
Reference in a new issue