Merge branch 'master' of github.com:dani-garcia/bitwarden_rs into 2fa_enforcement
# Conflicts: # src/db/models/org_policy.rs # src/db/models/organization.rs
This commit is contained in:
commit
2421d49d9a
|
@ -89,7 +89,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target-triple }}
|
target: ${{ matrix.target-triple }}
|
||||||
components: clippy
|
components: clippy, rustfmt
|
||||||
# End Uses the rust-toolchain file to determine version
|
# End Uses the rust-toolchain file to determine version
|
||||||
|
|
||||||
|
|
||||||
|
@ -111,6 +111,15 @@ jobs:
|
||||||
# End Run cargo clippy
|
# End Run cargo clippy
|
||||||
|
|
||||||
|
|
||||||
|
# Run cargo fmt
|
||||||
|
- name: '`cargo fmt`'
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: fmt
|
||||||
|
args: --all -- --check
|
||||||
|
# End Run cargo fmt
|
||||||
|
|
||||||
|
|
||||||
# Build the binary
|
# Build the binary
|
||||||
- name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
|
- name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
|
|
|
@ -471,9 +471,9 @@ version = "0.3.0"
|
||||||
source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089"
|
source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -499,9 +499,9 @@ version = "1.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3"
|
checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -563,6 +563,12 @@ version = "0.15.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
|
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "either"
|
||||||
|
version = "1.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "encoding_rs"
|
name = "encoding_rs"
|
||||||
version = "0.8.28"
|
version = "0.8.28"
|
||||||
|
@ -680,9 +686,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures"
|
name = "futures"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7f55667319111d593ba876406af7c409c0ebb44dc4be6132a783ccf163ea14c1"
|
checksum = "a9d5813545e459ad3ca1bff9915e9ad7f1a47dc6a91b627ce321d5863b7dd253"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
@ -695,9 +701,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-channel"
|
name = "futures-channel"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8c2dd2df839b57db9ab69c2c9d8f3e8c81984781937fe2807dc6dcf3b2ad2939"
|
checksum = "ce79c6a52a299137a6013061e0cf0e688fce5d7f1bc60125f520912fdb29ec25"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-sink",
|
"futures-sink",
|
||||||
|
@ -705,15 +711,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-core"
|
name = "futures-core"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "15496a72fabf0e62bdc3df11a59a3787429221dd0710ba8ef163d6f7a9112c94"
|
checksum = "098cd1c6dda6ca01650f1a37a794245eb73181d0d4d4e955e2f3c37db7af1815"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-executor"
|
name = "futures-executor"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "891a4b7b96d84d5940084b2a37632dd65deeae662c114ceaa2c879629c9c0ad1"
|
checksum = "10f6cb7042eda00f0049b1d2080aa4b93442997ee507eb3828e8bd7577f94c9d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-task",
|
"futures-task",
|
||||||
|
@ -722,39 +728,39 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-io"
|
name = "futures-io"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d71c2c65c57704c32f5241c1223167c2c3294fd34ac020c807ddbe6db287ba59"
|
checksum = "365a1a1fb30ea1c03a830fdb2158f5236833ac81fa0ad12fe35b29cddc35cb04"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-macro"
|
name = "futures-macro"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ea405816a5139fb39af82c2beb921d52143f556038378d6db21183a5c37fbfb7"
|
checksum = "668c6733a182cd7deb4f1de7ba3bf2120823835b3bcfbeacf7d2c4a773c1bb8b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-sink"
|
name = "futures-sink"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "85754d98985841b7d4f5e8e6fbfa4a4ac847916893ec511a2917ccd8525b8bb3"
|
checksum = "5c5629433c555de3d82861a7a4e3794a4c40040390907cfbfd7143a92a426c23"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-task"
|
name = "futures-task"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fa189ef211c15ee602667a6fcfe1c1fd9e07d42250d2156382820fba33c9df80"
|
checksum = "ba7aa51095076f3ba6d9a1f702f74bd05ec65f555d70d2033d55ba8d69f581bc"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-util"
|
name = "futures-util"
|
||||||
version = "0.3.13"
|
version = "0.3.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1812c7ab8aedf8d6f2701a43e1243acdbcc2b36ab26e2ad421eb99ac963d96d1"
|
checksum = "3c144ad54d60f23927f0a6b6d816e4271278b64f005ad65e4e35291d2de9c025"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
@ -923,16 +929,16 @@ dependencies = [
|
||||||
"log 0.4.14",
|
"log 0.4.14",
|
||||||
"mac",
|
"mac",
|
||||||
"markup5ever",
|
"markup5ever",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "http"
|
name = "http"
|
||||||
version = "0.2.3"
|
version = "0.2.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747"
|
checksum = "527e8c9ac747e28542699a951517aa9a6945af506cd1f2e1b53a576c17b6cc11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes 1.0.1",
|
"bytes 1.0.1",
|
||||||
"fnv",
|
"fnv",
|
||||||
|
@ -952,9 +958,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httparse"
|
name = "httparse"
|
||||||
version = "1.3.5"
|
version = "1.3.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691"
|
checksum = "bc35c995b9d93ec174cf9a27d425c7892722101e14993cd227fdb51d70cf9589"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httpdate"
|
name = "httpdate"
|
||||||
|
@ -1121,9 +1127,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.49"
|
version = "0.3.50"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc15e39392125075f60c95ba416f5381ff6c3a948ff02ab12464715adf56c821"
|
checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
@ -1194,9 +1200,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.91"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7"
|
checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libsqlite3-sys"
|
name = "libsqlite3-sys"
|
||||||
|
@ -1211,9 +1217,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lock_api"
|
name = "lock_api"
|
||||||
version = "0.4.2"
|
version = "0.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312"
|
checksum = "5a3c91c24eae6777794bb1997ad98bbb87daf92890acab859f7eaa4320333176"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"scopeguard",
|
"scopeguard",
|
||||||
]
|
]
|
||||||
|
@ -1317,9 +1323,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c"
|
checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"migrations_internals",
|
"migrations_internals",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1544,9 +1550,9 @@ version = "0.3.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d"
|
checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1819,9 +1825,9 @@ checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_meta",
|
"pest_meta",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1894,9 +1900,9 @@ version = "1.0.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5"
|
checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1961,9 +1967,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.24"
|
version = "1.0.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
|
checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-xid 0.2.1",
|
"unicode-xid 0.2.1",
|
||||||
]
|
]
|
||||||
|
@ -1995,14 +2001,14 @@ version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quoted_printable"
|
name = "quoted_printable"
|
||||||
version = "0.4.2"
|
version = "0.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "47b080c5db639b292ac79cbd34be0cfc5d36694768d8341109634d90b86930e2"
|
checksum = "1238256b09923649ec89b08104c4dfe9f6cb2fea734a5db5384e44916d59e9c5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "r2d2"
|
name = "r2d2"
|
||||||
|
@ -2151,9 +2157,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redox_syscall"
|
name = "redox_syscall"
|
||||||
version = "0.2.5"
|
version = "0.2.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9"
|
checksum = "8270314b5ccceb518e7e578952f0b72b88222d02e8f77f5ecf7abbb673539041"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
]
|
]
|
||||||
|
@ -2186,9 +2192,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "reqwest"
|
name = "reqwest"
|
||||||
version = "0.11.2"
|
version = "0.11.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bf12057f289428dbf5c591c74bf10392e4a8003f993405a902f20117019022d4"
|
checksum = "2296f2fac53979e8ccbc4a1136b25dcefd37be9ed7e4a1f6b05a6029c84ff124"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-compression",
|
"async-compression",
|
||||||
"base64 0.13.0",
|
"base64 0.13.0",
|
||||||
|
@ -2213,6 +2219,7 @@ dependencies = [
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-native-tls",
|
"tokio-native-tls",
|
||||||
|
"tokio-socks",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"url 2.2.1",
|
"url 2.2.1",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
|
@ -2401,9 +2408,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sct"
|
name = "sct"
|
||||||
version = "0.6.0"
|
version = "0.6.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c"
|
checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ring",
|
"ring",
|
||||||
"untrusted",
|
"untrusted",
|
||||||
|
@ -2462,9 +2469,9 @@ version = "1.0.125"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d"
|
checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2642,11 +2649,11 @@ version = "0.5.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef"
|
checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2656,13 +2663,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11"
|
checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base-x",
|
"base-x",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sha1",
|
"sha1",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2692,7 +2699,7 @@ checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"phf_generator",
|
"phf_generator",
|
||||||
"phf_shared",
|
"phf_shared",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -2715,11 +2722,11 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.65"
|
version = "1.0.69"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f3a1d708c221c5a612956ef9f75b37e454e88d1f7b899fbd3a18d4252012d663"
|
checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"unicode-xid 0.2.1",
|
"unicode-xid 0.2.1",
|
||||||
]
|
]
|
||||||
|
@ -2767,6 +2774,26 @@ dependencies = [
|
||||||
"utf-8",
|
"utf-8",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror"
|
||||||
|
version = "1.0.24"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
|
||||||
|
dependencies = [
|
||||||
|
"thiserror-impl",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror-impl"
|
||||||
|
version = "1.0.24"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 1.0.26",
|
||||||
|
"quote 1.0.9",
|
||||||
|
"syn 1.0.69",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "threadpool"
|
name = "threadpool"
|
||||||
version = "1.8.1"
|
version = "1.8.1"
|
||||||
|
@ -2819,17 +2846,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e5c3be1edfad6027c69f5491cf4cb310d1a71ecd6af742788c6ff8bced86b8fa"
|
checksum = "e5c3be1edfad6027c69f5491cf4cb310d1a71ecd6af742788c6ff8bced86b8fa"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"standback",
|
"standback",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tinyvec"
|
name = "tinyvec"
|
||||||
version = "1.1.1"
|
version = "1.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023"
|
checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"tinyvec_macros",
|
"tinyvec_macros",
|
||||||
]
|
]
|
||||||
|
@ -2842,9 +2869,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "134af885d758d645f0f0505c9a8b3f9bf8a348fd822e112ab5248138348f1722"
|
checksum = "83f0c8e7c0addab50b663055baf787d0af7f413a46e6e7fb9559a4e4db7137a5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"bytes 1.0.1",
|
"bytes 1.0.1",
|
||||||
|
@ -2866,10 +2893,22 @@ dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-util"
|
name = "tokio-socks"
|
||||||
version = "0.6.5"
|
version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5143d049e85af7fbc36f5454d990e62c2df705b3589f123b71f441b6b59f443f"
|
checksum = "51165dfa029d2a65969413a6cc96f354b86b464498702f174a4efa13608fd8c0"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
"futures-util",
|
||||||
|
"thiserror",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-util"
|
||||||
|
version = "0.6.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "940a12c99365c31ea8dd9ba04ec1be183ffe4920102bb7122c2f515437601e8e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes 1.0.1",
|
"bytes 1.0.1",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
@ -2913,9 +2952,9 @@ version = "0.1.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2"
|
checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -3003,9 +3042,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-bidi"
|
name = "unicode-bidi"
|
||||||
version = "0.3.4"
|
version = "0.3.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
checksum = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"matches",
|
"matches",
|
||||||
]
|
]
|
||||||
|
@ -3128,9 +3167,9 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.72"
|
version = "0.2.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8fe8f61dba8e5d645a4d8132dc7a0a66861ed5e1045d2c0ed940fab33bac0fbe"
|
checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -3140,24 +3179,24 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-backend"
|
name = "wasm-bindgen-backend"
|
||||||
version = "0.2.72"
|
version = "0.2.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "046ceba58ff062da072c7cb4ba5b22a37f00a302483f7e2a6cdc18fedbdc1fd3"
|
checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log 0.4.14",
|
"log 0.4.14",
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-futures"
|
name = "wasm-bindgen-futures"
|
||||||
version = "0.4.22"
|
version = "0.4.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "73157efb9af26fb564bb59a009afd1c7c334a44db171d280690d0c3faaec3468"
|
checksum = "81b8b767af23de6ac18bf2168b690bed2902743ddf0fb39252e36f9e2bfc63ea"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
|
@ -3167,9 +3206,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.72"
|
version = "0.2.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0ef9aa01d36cda046f797c57959ff5f3c615c9cc63997a8d545831ec7976819b"
|
checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
|
@ -3177,28 +3216,28 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.72"
|
version = "0.2.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "96eb45c1b2ee33545a813a92dbb53856418bf7eb54ab34f7f7ff1448a5b3735d"
|
checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.24",
|
"proc-macro2 1.0.26",
|
||||||
"quote 1.0.9",
|
"quote 1.0.9",
|
||||||
"syn 1.0.65",
|
"syn 1.0.69",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.72"
|
version = "0.2.73"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b7148f4696fb4960a346eaa60bbfb42a1ac4ebba21f750f75fc1375b098d5ffa"
|
checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
version = "0.3.49"
|
version = "0.3.50"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "59fe19d70f5dacc03f6e46777213facae5ac3801575d56ca6cbd4c93dcd12310"
|
checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
|
|
|
@ -32,7 +32,7 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false }
|
||||||
rocket_contrib = "0.5.0-dev"
|
rocket_contrib = "0.5.0-dev"
|
||||||
|
|
||||||
# HTTP client
|
# HTTP client
|
||||||
reqwest = { version = "0.11.2", features = ["blocking", "json", "gzip", "brotli"] }
|
reqwest = { version = "0.11.3", features = ["blocking", "json", "gzip", "brotli", "socks"] }
|
||||||
|
|
||||||
# multipart/form-data support
|
# multipart/form-data support
|
||||||
multipart = { version = "0.17.1", features = ["server"], default-features = false }
|
multipart = { version = "0.17.1", features = ["server"], default-features = false }
|
||||||
|
@ -102,7 +102,7 @@ num-traits = "0.2.14"
|
||||||
num-derive = "0.3.3"
|
num-derive = "0.3.3"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
tracing = { version = "0.1", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled.
|
tracing = { version = "0.1.25", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled.
|
||||||
lettre = { version = "0.10.0-beta.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
|
lettre = { version = "0.10.0-beta.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
|
||||||
newline-converter = "0.2.0"
|
newline-converter = "0.2.0"
|
||||||
|
|
||||||
|
|
|
@ -60,10 +60,10 @@ Thanks for your contribution to the project!
|
||||||
<table>
|
<table>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center">
|
<td align="center">
|
||||||
<a href="https://github.com/netDpay">
|
<a href="https://github.com/netdadaltd">
|
||||||
<img src="https://avatars.githubusercontent.com/u/77323954?s=75&v=4" width="75px;" alt="netDpay"/>
|
<img src="https://avatars.githubusercontent.com/u/77323954?s=75&v=4" width="75px;" alt="netdadaltd"/>
|
||||||
<br />
|
<br />
|
||||||
<sub><b>netDpay</b></sub>
|
<sub><b>netDada Ltd.</b></sub>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
6
build.rs
6
build.rs
|
@ -1,5 +1,5 @@
|
||||||
use std::process::Command;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros
|
// This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros
|
||||||
|
@ -11,7 +11,9 @@ fn main() {
|
||||||
println!("cargo:rustc-cfg=postgresql");
|
println!("cargo:rustc-cfg=postgresql");
|
||||||
|
|
||||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||||
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
compile_error!(
|
||||||
|
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
|
||||||
|
);
|
||||||
|
|
||||||
if let Ok(version) = env::var("BWRS_VERSION") {
|
if let Ok(version) = env::var("BWRS_VERSION") {
|
||||||
println!("cargo:rustc-env=BWRS_VERSION={}", version);
|
println!("cargo:rustc-env=BWRS_VERSION={}", version);
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
{% set build_stage_base_image = "rust:1.50" %}
|
{% set build_stage_base_image = "rust:1.51" %}
|
||||||
{% if "alpine" in target_file %}
|
{% if "alpine" in target_file %}
|
||||||
{% if "amd64" in target_file %}
|
{% if "amd64" in target_file %}
|
||||||
{% set build_stage_base_image = "clux/muslrust:nightly-2021-02-22" %}
|
{% set build_stage_base_image = "clux/muslrust:nightly-2021-04-14" %}
|
||||||
{% set runtime_stage_base_image = "alpine:3.13" %}
|
{% set runtime_stage_base_image = "alpine:3.13" %}
|
||||||
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
||||||
{% elif "armv7" in target_file %}
|
{% elif "armv7" in target_file %}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
nightly-2021-02-22
|
nightly-2021-04-14
|
|
@ -1,2 +1,7 @@
|
||||||
version = "Two"
|
version = "Two"
|
||||||
|
edition = "2018"
|
||||||
max_width = 120
|
max_width = 120
|
||||||
|
newline_style = "Unix"
|
||||||
|
use_small_heuristics = "Off"
|
||||||
|
struct_lit_single_line = false
|
||||||
|
overflow_delimited_expr = true
|
||||||
|
|
|
@ -3,7 +3,6 @@ use serde::de::DeserializeOwned;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::{env, time::Duration};
|
use std::{env, time::Duration};
|
||||||
|
|
||||||
use reqwest::{blocking::Client, header::USER_AGENT};
|
|
||||||
use rocket::{
|
use rocket::{
|
||||||
http::{Cookie, Cookies, SameSite},
|
http::{Cookie, Cookies, SameSite},
|
||||||
request::{self, FlashMessage, Form, FromRequest, Outcome, Request},
|
request::{self, FlashMessage, Form, FromRequest, Outcome, Request},
|
||||||
|
@ -19,7 +18,7 @@ use crate::{
|
||||||
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
||||||
error::{Error, MapResult},
|
error::{Error, MapResult},
|
||||||
mail,
|
mail,
|
||||||
util::{format_naive_datetime_local, get_display_size, is_running_in_docker},
|
util::{format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker},
|
||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -64,11 +63,8 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| {
|
||||||
.unwrap_or("Unknown")
|
.unwrap_or("Unknown")
|
||||||
});
|
});
|
||||||
|
|
||||||
static CAN_BACKUP: Lazy<bool> = Lazy::new(|| {
|
static CAN_BACKUP: Lazy<bool> =
|
||||||
DbConnType::from_url(&CONFIG.database_url())
|
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
|
||||||
.map(|t| t == DbConnType::sqlite)
|
|
||||||
.unwrap_or(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn admin_disabled() -> &'static str {
|
fn admin_disabled() -> &'static str {
|
||||||
|
@ -141,7 +137,12 @@ fn admin_url(referer: Referer) -> String {
|
||||||
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
|
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
|
||||||
// If there is an error, show it
|
// If there is an error, show it
|
||||||
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
|
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
|
||||||
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()});
|
let json = json!({
|
||||||
|
"page_content": "admin/login",
|
||||||
|
"version": VERSION,
|
||||||
|
"error": msg,
|
||||||
|
"urlpath": CONFIG.domain_path()
|
||||||
|
});
|
||||||
|
|
||||||
// Return the page
|
// Return the page
|
||||||
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
||||||
|
@ -165,10 +166,7 @@ fn post_admin_login(
|
||||||
// If the token is invalid, redirect to login page
|
// If the token is invalid, redirect to login page
|
||||||
if !_validate_token(&data.token) {
|
if !_validate_token(&data.token) {
|
||||||
error!("Invalid admin token. IP: {}", ip.ip);
|
error!("Invalid admin token. IP: {}", ip.ip);
|
||||||
Err(Flash::error(
|
Err(Flash::error(Redirect::to(admin_url(referer)), "Invalid admin token, please try again."))
|
||||||
Redirect::to(admin_url(referer)),
|
|
||||||
"Invalid admin token, please try again.",
|
|
||||||
))
|
|
||||||
} else {
|
} else {
|
||||||
// If the token received is valid, generate JWT and save it as a cookie
|
// If the token received is valid, generate JWT and save it as a cookie
|
||||||
let claims = generate_admin_claims();
|
let claims = generate_admin_claims();
|
||||||
|
@ -328,7 +326,8 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
|
||||||
fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
let users = User::get_all(&conn);
|
let users = User::get_all(&conn);
|
||||||
let dt_fmt = "%Y-%m-%d %H:%M:%S %Z";
|
let dt_fmt = "%Y-%m-%d %H:%M:%S %Z";
|
||||||
let users_json: Vec<Value> = users.iter()
|
let users_json: Vec<Value> = users
|
||||||
|
.iter()
|
||||||
.map(|u| {
|
.map(|u| {
|
||||||
let mut usr = u.to_json(&conn);
|
let mut usr = u.to_json(&conn);
|
||||||
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn));
|
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn));
|
||||||
|
@ -338,7 +337,7 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt));
|
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt));
|
||||||
usr["last_active"] = match u.last_active(&conn) {
|
usr["last_active"] = match u.last_active(&conn) {
|
||||||
Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)),
|
Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)),
|
||||||
None => json!("Never")
|
None => json!("Never"),
|
||||||
};
|
};
|
||||||
usr
|
usr
|
||||||
})
|
})
|
||||||
|
@ -423,7 +422,6 @@ fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: D
|
||||||
user_to_edit.save(&conn)
|
user_to_edit.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[post("/users/update_revision")]
|
#[post("/users/update_revision")]
|
||||||
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
User::update_all_revisions(&conn)
|
User::update_all_revisions(&conn)
|
||||||
|
@ -432,7 +430,8 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
#[get("/organizations/overview")]
|
#[get("/organizations/overview")]
|
||||||
fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
let organizations = Organization::get_all(&conn);
|
let organizations = Organization::get_all(&conn);
|
||||||
let organizations_json: Vec<Value> = organizations.iter()
|
let organizations_json: Vec<Value> = organizations
|
||||||
|
.iter()
|
||||||
.map(|o| {
|
.map(|o| {
|
||||||
let mut org = o.to_json();
|
let mut org = o.to_json();
|
||||||
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn));
|
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn));
|
||||||
|
@ -469,26 +468,15 @@ struct GitCommit {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
||||||
let github_api = Client::builder().build()?;
|
let github_api = get_reqwest_client();
|
||||||
|
|
||||||
Ok(github_api
|
Ok(github_api.get(url).timeout(Duration::from_secs(10)).send()?.error_for_status()?.json::<T>()?)
|
||||||
.get(url)
|
|
||||||
.timeout(Duration::from_secs(10))
|
|
||||||
.header(USER_AGENT, "Bitwarden_RS")
|
|
||||||
.send()?
|
|
||||||
.error_for_status()?
|
|
||||||
.json::<T>()?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_http_access() -> bool {
|
fn has_http_access() -> bool {
|
||||||
let http_access = Client::builder().build().unwrap();
|
let http_access = get_reqwest_client();
|
||||||
|
|
||||||
match http_access
|
match http_access.head("https://github.com/dani-garcia/bitwarden_rs").timeout(Duration::from_secs(10)).send() {
|
||||||
.head("https://github.com/dani-garcia/bitwarden_rs")
|
|
||||||
.timeout(Duration::from_secs(10))
|
|
||||||
.header(USER_AGENT, "Bitwarden_RS")
|
|
||||||
.send()
|
|
||||||
{
|
|
||||||
Ok(r) => r.status().is_success(),
|
Ok(r) => r.status().is_success(),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
@ -501,17 +489,16 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
use std::net::ToSocketAddrs;
|
use std::net::ToSocketAddrs;
|
||||||
|
|
||||||
// Get current running versions
|
// Get current running versions
|
||||||
let web_vault_version: WebVaultVersion = match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
|
let web_vault_version: WebVaultVersion =
|
||||||
Ok(s) => serde_json::from_str(&s)?,
|
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
|
||||||
_ => {
|
Ok(s) => serde_json::from_str(&s)?,
|
||||||
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
_ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
||||||
Ok(s) => serde_json::from_str(&s)?,
|
Ok(s) => serde_json::from_str(&s)?,
|
||||||
_ => {
|
_ => WebVaultVersion {
|
||||||
WebVaultVersion{version: String::from("Version file missing")}
|
version: String::from("Version file missing"),
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
};
|
||||||
};
|
|
||||||
|
|
||||||
// Execute some environment checks
|
// Execute some environment checks
|
||||||
let running_within_docker = is_running_in_docker();
|
let running_within_docker = is_running_in_docker();
|
||||||
|
@ -531,7 +518,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
// TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already.
|
// TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already.
|
||||||
let (latest_release, latest_commit, latest_web_build) = if has_http_access {
|
let (latest_release, latest_commit, latest_web_build) = if has_http_access {
|
||||||
(
|
(
|
||||||
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") {
|
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest")
|
||||||
|
{
|
||||||
Ok(r) => r.tag_name,
|
Ok(r) => r.tag_name,
|
||||||
_ => "-".to_string(),
|
_ => "-".to_string(),
|
||||||
},
|
},
|
||||||
|
@ -547,7 +535,9 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
if running_within_docker {
|
if running_within_docker {
|
||||||
"-".to_string()
|
"-".to_string()
|
||||||
} else {
|
} else {
|
||||||
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") {
|
match get_github_api::<GitRelease>(
|
||||||
|
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
|
||||||
|
) {
|
||||||
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
|
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
|
||||||
_ => "-".to_string(),
|
_ => "-".to_string(),
|
||||||
}
|
}
|
||||||
|
@ -559,7 +549,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||||
|
|
||||||
let ip_header_name = match &ip_header.0 {
|
let ip_header_name = match &ip_header.0 {
|
||||||
Some(h) => h,
|
Some(h) => h,
|
||||||
_ => ""
|
_ => "",
|
||||||
};
|
};
|
||||||
|
|
||||||
let diagnostics_json = json!({
|
let diagnostics_json = json!({
|
||||||
|
|
|
@ -320,15 +320,7 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
|
||||||
err!("The cipher is not owned by the user")
|
err!("The cipher is not owned by the user")
|
||||||
}
|
}
|
||||||
|
|
||||||
update_cipher_from_data(
|
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?
|
||||||
&mut saved_cipher,
|
|
||||||
cipher_data,
|
|
||||||
&headers,
|
|
||||||
false,
|
|
||||||
&conn,
|
|
||||||
&nt,
|
|
||||||
UpdateType::CipherUpdate,
|
|
||||||
)?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update user data
|
// Update user data
|
||||||
|
|
|
@ -100,24 +100,18 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
|
||||||
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
||||||
|
|
||||||
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
||||||
let collections_json: Vec<Value> = collections.iter()
|
let collections_json: Vec<Value> =
|
||||||
.map(|c| c.to_json_details(&headers.user.uuid, &conn))
|
collections.iter().map(|c| c.to_json_details(&headers.user.uuid, &conn)).collect();
|
||||||
.collect();
|
|
||||||
|
|
||||||
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
|
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
|
||||||
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||||
|
|
||||||
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
|
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
|
||||||
let ciphers_json: Vec<Value> = ciphers
|
let ciphers_json: Vec<Value> =
|
||||||
.iter()
|
ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
|
||||||
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let sends = Send::find_by_user(&headers.user.uuid, &conn);
|
let sends = Send::find_by_user(&headers.user.uuid, &conn);
|
||||||
let sends_json: Vec<Value> = sends
|
let sends_json: Vec<Value> = sends.iter().map(|s| s.to_json()).collect();
|
||||||
.iter()
|
|
||||||
.map(|s| s.to_json())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let domains_json = if data.exclude_domains {
|
let domains_json = if data.exclude_domains {
|
||||||
Value::Null
|
Value::Null
|
||||||
|
@ -142,10 +136,8 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
|
||||||
fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
|
fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||||
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
|
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn);
|
||||||
|
|
||||||
let ciphers_json: Vec<Value> = ciphers
|
let ciphers_json: Vec<Value> =
|
||||||
.iter()
|
ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
|
||||||
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Json(json!({
|
Json(json!({
|
||||||
"Data": ciphers_json,
|
"Data": ciphers_json,
|
||||||
|
@ -246,7 +238,7 @@ fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn
|
||||||
|
|
||||||
// Check if there are one more more collections selected when this cipher is part of an organization.
|
// Check if there are one more more collections selected when this cipher is part of an organization.
|
||||||
// err if this is not the case before creating an empty cipher.
|
// err if this is not the case before creating an empty cipher.
|
||||||
if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() {
|
if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() {
|
||||||
err!("You must select at least one collection.");
|
err!("You must select at least one collection.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,17 +280,12 @@ fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt
|
||||||
/// allowed to delete or share such ciphers to an org, however.
|
/// allowed to delete or share such ciphers to an org, however.
|
||||||
///
|
///
|
||||||
/// Ref: https://bitwarden.com/help/article/policies/#personal-ownership
|
/// Ref: https://bitwarden.com/help/article/policies/#personal-ownership
|
||||||
fn enforce_personal_ownership_policy(
|
fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: &DbConn) -> EmptyResult {
|
||||||
data: &CipherData,
|
|
||||||
headers: &Headers,
|
|
||||||
conn: &DbConn
|
|
||||||
) -> EmptyResult {
|
|
||||||
if data.OrganizationId.is_none() {
|
if data.OrganizationId.is_none() {
|
||||||
let user_uuid = &headers.user.uuid;
|
let user_uuid = &headers.user.uuid;
|
||||||
let policy_type = OrgPolicyType::PersonalOwnership;
|
let policy_type = OrgPolicyType::PersonalOwnership;
|
||||||
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) {
|
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) {
|
||||||
err!("Due to an Enterprise Policy, you are restricted from \
|
err!("Due to an Enterprise Policy, you are restricted from saving items to your personal vault.")
|
||||||
saving items to your personal vault.")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -317,11 +304,12 @@ pub fn update_cipher_from_data(
|
||||||
|
|
||||||
// Check that the client isn't updating an existing cipher with stale data.
|
// Check that the client isn't updating an existing cipher with stale data.
|
||||||
if let Some(dt) = data.LastKnownRevisionDate {
|
if let Some(dt) = data.LastKnownRevisionDate {
|
||||||
match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format
|
match NaiveDateTime::parse_from_str(&dt, "%+") {
|
||||||
Err(err) =>
|
// ISO 8601 format
|
||||||
warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
|
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
|
||||||
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 =>
|
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
|
||||||
err!("The client copy of this cipher is out of date. Resync the client and try again."),
|
err!("The client copy of this cipher is out of date. Resync the client and try again.")
|
||||||
|
}
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -394,12 +382,9 @@ pub fn update_cipher_from_data(
|
||||||
// But, we at least know we do not need to store and return this specific key.
|
// But, we at least know we do not need to store and return this specific key.
|
||||||
fn _clean_cipher_data(mut json_data: Value) -> Value {
|
fn _clean_cipher_data(mut json_data: Value) -> Value {
|
||||||
if json_data.is_array() {
|
if json_data.is_array() {
|
||||||
json_data.as_array_mut()
|
json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| {
|
||||||
.unwrap()
|
f.as_object_mut().unwrap().remove("Response");
|
||||||
.iter_mut()
|
});
|
||||||
.for_each(|ref mut f| {
|
|
||||||
f.as_object_mut().unwrap().remove("Response");
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
json_data
|
json_data
|
||||||
}
|
}
|
||||||
|
@ -421,13 +406,13 @@ pub fn update_cipher_from_data(
|
||||||
data["Uris"] = _clean_cipher_data(data["Uris"].clone());
|
data["Uris"] = _clean_cipher_data(data["Uris"].clone());
|
||||||
}
|
}
|
||||||
data
|
data
|
||||||
},
|
}
|
||||||
None => err!("Data missing"),
|
None => err!("Data missing"),
|
||||||
};
|
};
|
||||||
|
|
||||||
cipher.name = data.Name;
|
cipher.name = data.Name;
|
||||||
cipher.notes = data.Notes;
|
cipher.notes = data.Notes;
|
||||||
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string() );
|
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string());
|
||||||
cipher.data = type_data.to_string();
|
cipher.data = type_data.to_string();
|
||||||
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
||||||
|
|
||||||
|
@ -602,11 +587,8 @@ fn post_collections_admin(
|
||||||
}
|
}
|
||||||
|
|
||||||
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
|
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
|
||||||
let current_collections: HashSet<String> = cipher
|
let current_collections: HashSet<String> =
|
||||||
.get_collections(&headers.user.uuid, &conn)
|
cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect();
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
for collection in posted_collections.symmetric_difference(¤t_collections) {
|
for collection in posted_collections.symmetric_difference(¤t_collections) {
|
||||||
match Collection::find_by_uuid(&collection, &conn) {
|
match Collection::find_by_uuid(&collection, &conn) {
|
||||||
|
@ -842,24 +824,25 @@ fn post_attachment(
|
||||||
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
|
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
|
||||||
let path = base_path.join(&file_name);
|
let path = base_path.join(&file_name);
|
||||||
|
|
||||||
let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
|
let size =
|
||||||
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
|
||||||
SaveResult::Full(other) => {
|
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
||||||
std::fs::remove_file(path).ok();
|
SaveResult::Full(other) => {
|
||||||
error = Some(format!("Attachment is not a file: {:?}", other));
|
std::fs::remove_file(path).ok();
|
||||||
return;
|
error = Some(format!("Attachment is not a file: {:?}", other));
|
||||||
}
|
return;
|
||||||
SaveResult::Partial(_, reason) => {
|
}
|
||||||
std::fs::remove_file(path).ok();
|
SaveResult::Partial(_, reason) => {
|
||||||
error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason));
|
std::fs::remove_file(path).ok();
|
||||||
return;
|
error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason));
|
||||||
}
|
return;
|
||||||
SaveResult::Error(e) => {
|
}
|
||||||
std::fs::remove_file(path).ok();
|
SaveResult::Error(e) => {
|
||||||
error = Some(format!("Error: {:?}", e));
|
std::fs::remove_file(path).ok();
|
||||||
return;
|
error = Some(format!("Error: {:?}", e));
|
||||||
}
|
return;
|
||||||
};
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
|
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
|
||||||
attachment.akey = attachment_key.clone();
|
attachment.akey = attachment_key.clone();
|
||||||
|
@ -994,12 +977,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase<Value>, headers: Headers, conn:
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/ciphers/delete-admin", data = "<data>")]
|
#[post("/ciphers/delete-admin", data = "<data>")]
|
||||||
fn delete_cipher_selected_post_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_selected_post_admin(
|
||||||
|
data: JsonUpcase<Value>,
|
||||||
|
headers: Headers,
|
||||||
|
conn: DbConn,
|
||||||
|
nt: Notify,
|
||||||
|
) -> EmptyResult {
|
||||||
delete_cipher_selected_post(data, headers, conn, nt)
|
delete_cipher_selected_post(data, headers, conn, nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[put("/ciphers/delete-admin", data = "<data>")]
|
#[put("/ciphers/delete-admin", data = "<data>")]
|
||||||
fn delete_cipher_selected_put_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_selected_put_admin(
|
||||||
|
data: JsonUpcase<Value>,
|
||||||
|
headers: Headers,
|
||||||
|
conn: DbConn,
|
||||||
|
nt: Notify,
|
||||||
|
) -> EmptyResult {
|
||||||
delete_cipher_selected_put(data, headers, conn, nt)
|
delete_cipher_selected_put(data, headers, conn, nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1150,7 +1143,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _delete_multiple_ciphers(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult {
|
fn _delete_multiple_ciphers(
|
||||||
|
data: JsonUpcase<Value>,
|
||||||
|
headers: Headers,
|
||||||
|
conn: DbConn,
|
||||||
|
soft_delete: bool,
|
||||||
|
nt: Notify,
|
||||||
|
) -> EmptyResult {
|
||||||
let data: Value = data.into_inner().data;
|
let data: Value = data.into_inner().data;
|
||||||
|
|
||||||
let uuids = match data.get("Ids") {
|
let uuids = match data.get("Ids") {
|
||||||
|
@ -1202,7 +1201,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: &Headers, conn: &
|
||||||
for uuid in uuids {
|
for uuid in uuids {
|
||||||
match _restore_cipher_by_uuid(uuid, headers, conn, nt) {
|
match _restore_cipher_by_uuid(uuid, headers, conn, nt) {
|
||||||
Ok(json) => ciphers.push(json.into_inner()),
|
Ok(json) => ciphers.push(json.into_inner()),
|
||||||
err => return err
|
err => return err,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,15 +8,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn routes() -> Vec<rocket::Route> {
|
pub fn routes() -> Vec<rocket::Route> {
|
||||||
routes![
|
routes![get_folders, get_folder, post_folders, post_folder, put_folder, delete_folder_post, delete_folder,]
|
||||||
get_folders,
|
|
||||||
get_folder,
|
|
||||||
post_folders,
|
|
||||||
post_folder,
|
|
||||||
put_folder,
|
|
||||||
delete_folder_post,
|
|
||||||
delete_folder,
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/folders")]
|
#[get("/folders")]
|
||||||
|
|
|
@ -2,21 +2,15 @@ mod accounts;
|
||||||
mod ciphers;
|
mod ciphers;
|
||||||
mod folders;
|
mod folders;
|
||||||
mod organizations;
|
mod organizations;
|
||||||
pub mod two_factor;
|
|
||||||
mod sends;
|
mod sends;
|
||||||
|
pub mod two_factor;
|
||||||
|
|
||||||
pub use ciphers::purge_trashed_ciphers;
|
pub use ciphers::purge_trashed_ciphers;
|
||||||
pub use sends::purge_sends;
|
pub use sends::purge_sends;
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
let mut mod_routes = routes![
|
let mut mod_routes =
|
||||||
clear_device_token,
|
routes![clear_device_token, put_device_token, get_eq_domains, post_eq_domains, put_eq_domains, hibp_breach,];
|
||||||
put_device_token,
|
|
||||||
get_eq_domains,
|
|
||||||
post_eq_domains,
|
|
||||||
put_eq_domains,
|
|
||||||
hibp_breach,
|
|
||||||
];
|
|
||||||
|
|
||||||
let mut routes = Vec::new();
|
let mut routes = Vec::new();
|
||||||
routes.append(&mut accounts::routes());
|
routes.append(&mut accounts::routes());
|
||||||
|
@ -33,9 +27,9 @@ pub fn routes() -> Vec<Route> {
|
||||||
//
|
//
|
||||||
// Move this somewhere else
|
// Move this somewhere else
|
||||||
//
|
//
|
||||||
|
use rocket::response::Response;
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use rocket::response::Response;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -43,6 +37,7 @@ use crate::{
|
||||||
auth::Headers,
|
auth::Headers,
|
||||||
db::DbConn,
|
db::DbConn,
|
||||||
error::Error,
|
error::Error,
|
||||||
|
util::get_reqwest_client,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[put("/devices/identifier/<uuid>/clear-token")]
|
#[put("/devices/identifier/<uuid>/clear-token")]
|
||||||
|
@ -147,22 +142,15 @@ fn put_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: DbC
|
||||||
|
|
||||||
#[get("/hibp/breach?<username>")]
|
#[get("/hibp/breach?<username>")]
|
||||||
fn hibp_breach(username: String) -> JsonResult {
|
fn hibp_breach(username: String) -> JsonResult {
|
||||||
let user_agent = "Bitwarden_RS";
|
|
||||||
let url = format!(
|
let url = format!(
|
||||||
"https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false",
|
"https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false",
|
||||||
username
|
username
|
||||||
);
|
);
|
||||||
|
|
||||||
use reqwest::{blocking::Client, header::USER_AGENT};
|
|
||||||
|
|
||||||
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
||||||
let hibp_client = Client::builder().build()?;
|
let hibp_client = get_reqwest_client();
|
||||||
|
|
||||||
let res = hibp_client
|
let res = hibp_client.get(&url).header("hibp-api-key", api_key).send()?;
|
||||||
.get(&url)
|
|
||||||
.header(USER_AGENT, user_agent)
|
|
||||||
.header("hibp-api-key", api_key)
|
|
||||||
.send()?;
|
|
||||||
|
|
||||||
// If we get a 404, return a 404, it means no breached accounts
|
// If we get a 404, return a 404, it means no breached accounts
|
||||||
if res.status() == 404 {
|
if res.status() == 404 {
|
||||||
|
|
|
@ -5,7 +5,7 @@ use serde_json::Value;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType},
|
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType},
|
||||||
auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders, ManagerHeaders, ManagerHeadersLoose},
|
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
||||||
db::{models::*, DbConn},
|
db::{models::*, DbConn},
|
||||||
mail, CONFIG,
|
mail, CONFIG,
|
||||||
};
|
};
|
||||||
|
@ -333,7 +333,12 @@ fn post_organization_collection_delete_user(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/organizations/<org_id>/collections/<col_id>")]
|
#[delete("/organizations/<org_id>/collections/<col_id>")]
|
||||||
fn delete_organization_collection(org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn) -> EmptyResult {
|
fn delete_organization_collection(
|
||||||
|
org_id: String,
|
||||||
|
col_id: String,
|
||||||
|
_headers: ManagerHeaders,
|
||||||
|
conn: DbConn,
|
||||||
|
) -> EmptyResult {
|
||||||
match Collection::find_by_uuid(&col_id, &conn) {
|
match Collection::find_by_uuid(&col_id, &conn) {
|
||||||
None => err!("Collection not found"),
|
None => err!("Collection not found"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
|
@ -426,9 +431,7 @@ fn put_collection_users(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
CollectionUser::save(&user.user_uuid, &coll_id,
|
CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?;
|
||||||
d.ReadOnly, d.HidePasswords,
|
|
||||||
&conn)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -443,10 +446,8 @@ struct OrgIdData {
|
||||||
#[get("/ciphers/organization-details?<data..>")]
|
#[get("/ciphers/organization-details?<data..>")]
|
||||||
fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> Json<Value> {
|
fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> Json<Value> {
|
||||||
let ciphers = Cipher::find_by_org(&data.organization_id, &conn);
|
let ciphers = Cipher::find_by_org(&data.organization_id, &conn);
|
||||||
let ciphers_json: Vec<Value> = ciphers
|
let ciphers_json: Vec<Value> =
|
||||||
.iter()
|
ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
|
||||||
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Json(json!({
|
Json(json!({
|
||||||
"Data": ciphers_json,
|
"Data": ciphers_json,
|
||||||
|
@ -544,9 +545,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
||||||
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
||||||
None => err!("Collection not found in Organization"),
|
None => err!("Collection not found in Organization"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
CollectionUser::save(&user.uuid, &collection.uuid,
|
CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?;
|
||||||
col.ReadOnly, col.HidePasswords,
|
|
||||||
&conn)?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -814,9 +813,13 @@ fn edit_user(
|
||||||
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
||||||
None => err!("Collection not found in Organization"),
|
None => err!("Collection not found in Organization"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid,
|
CollectionUser::save(
|
||||||
col.ReadOnly, col.HidePasswords,
|
&user_to_edit.user_uuid,
|
||||||
&conn)?;
|
&collection.uuid,
|
||||||
|
col.ReadOnly,
|
||||||
|
col.HidePasswords,
|
||||||
|
&conn,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -912,16 +915,8 @@ fn post_org_import(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|cipher_data| {
|
.map(|cipher_data| {
|
||||||
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
|
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
|
||||||
update_cipher_from_data(
|
update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate)
|
||||||
&mut cipher,
|
.ok();
|
||||||
cipher_data,
|
|
||||||
&headers,
|
|
||||||
false,
|
|
||||||
&conn,
|
|
||||||
&nt,
|
|
||||||
UpdateType::CipherCreate,
|
|
||||||
)
|
|
||||||
.ok();
|
|
||||||
cipher
|
cipher
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -1002,7 +997,13 @@ struct PolicyData {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")]
|
#[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")]
|
||||||
fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
fn put_policy(
|
||||||
|
org_id: String,
|
||||||
|
pol_type: i32,
|
||||||
|
data: Json<PolicyData>,
|
||||||
|
_headers: AdminHeaders,
|
||||||
|
conn: DbConn,
|
||||||
|
) -> JsonResult {
|
||||||
let data: PolicyData = data.into_inner();
|
let data: PolicyData = data.into_inner();
|
||||||
|
|
||||||
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
|
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
|
||||||
|
@ -1160,8 +1161,7 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con
|
||||||
|
|
||||||
// If user is not part of the organization, but it exists
|
// If user is not part of the organization, but it exists
|
||||||
} else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() {
|
} else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() {
|
||||||
if let Some (user) = User::find_by_mail(&user_data.Email, &conn) {
|
if let Some(user) = User::find_by_mail(&user_data.Email, &conn) {
|
||||||
|
|
||||||
let user_org_status = if CONFIG.mail_enabled() {
|
let user_org_status = if CONFIG.mail_enabled() {
|
||||||
UserOrgStatus::Invited as i32
|
UserOrgStatus::Invited as i32
|
||||||
} else {
|
} else {
|
||||||
|
@ -1197,7 +1197,7 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con
|
||||||
// If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true)
|
// If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true)
|
||||||
if data.OverwriteExisting {
|
if data.OverwriteExisting {
|
||||||
for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) {
|
for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) {
|
||||||
if let Some (user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) {
|
if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) {
|
||||||
if !data.Users.iter().any(|u| u.Email == user_email) {
|
if !data.Users.iter().any(|u| u.Email == user_email) {
|
||||||
user_org.delete(&conn)?;
|
user_org.delete(&conn)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,15 +16,7 @@ use crate::{
|
||||||
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
||||||
|
|
||||||
pub fn routes() -> Vec<rocket::Route> {
|
pub fn routes() -> Vec<rocket::Route> {
|
||||||
routes![
|
routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password]
|
||||||
post_send,
|
|
||||||
post_send_file,
|
|
||||||
post_access,
|
|
||||||
post_access_file,
|
|
||||||
put_send,
|
|
||||||
delete_send,
|
|
||||||
put_remove_password
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn purge_sends(pool: DbPool) {
|
pub fn purge_sends(pool: DbPool) {
|
||||||
|
@ -171,13 +163,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
|
||||||
None => err!("No model entry present"),
|
None => err!("No model entry present"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let size = match data_entry
|
let size = match data_entry.data.save().memory_threshold(0).size_limit(size_limit).with_path(&file_path) {
|
||||||
.data
|
|
||||||
.save()
|
|
||||||
.memory_threshold(0)
|
|
||||||
.size_limit(size_limit)
|
|
||||||
.with_path(&file_path)
|
|
||||||
{
|
|
||||||
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
||||||
SaveResult::Full(other) => {
|
SaveResult::Full(other) => {
|
||||||
std::fs::remove_file(&file_path).ok();
|
std::fs::remove_file(&file_path).ok();
|
||||||
|
@ -198,10 +184,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
|
||||||
if let Some(o) = data_value.as_object_mut() {
|
if let Some(o) = data_value.as_object_mut() {
|
||||||
o.insert(String::from("Id"), Value::String(file_id));
|
o.insert(String::from("Id"), Value::String(file_id));
|
||||||
o.insert(String::from("Size"), Value::Number(size.into()));
|
o.insert(String::from("Size"), Value::Number(size.into()));
|
||||||
o.insert(
|
o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size)));
|
||||||
String::from("SizeName"),
|
|
||||||
Value::String(crate::util::get_display_size(size)),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
send.data = serde_json::to_string(&data_value)?;
|
send.data = serde_json::to_string(&data_value)?;
|
||||||
|
|
||||||
|
|
|
@ -17,11 +17,7 @@ use crate::{
|
||||||
pub use crate::config::CONFIG;
|
pub use crate::config::CONFIG;
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![generate_authenticator, activate_authenticator, activate_authenticator_put,]
|
||||||
generate_authenticator,
|
|
||||||
activate_authenticator,
|
|
||||||
activate_authenticator_put,
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/two-factor/get-authenticator", data = "<data>")]
|
#[post("/two-factor/get-authenticator", data = "<data>")]
|
||||||
|
@ -141,7 +137,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
|
||||||
// The amount of steps back and forward in time
|
// The amount of steps back and forward in time
|
||||||
// Also check if we need to disable time drifted TOTP codes.
|
// Also check if we need to disable time drifted TOTP codes.
|
||||||
// If that is the case, we set the steps to 0 so only the current TOTP is valid.
|
// If that is the case, we set the steps to 0 so only the current TOTP is valid.
|
||||||
let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 };
|
let steps = !CONFIG.authenticator_disable_time_drift() as i64;
|
||||||
|
|
||||||
for step in -steps..=steps {
|
for step in -steps..=steps {
|
||||||
let time_step = current_timestamp / 30i64 + step;
|
let time_step = current_timestamp / 30i64 + step;
|
||||||
|
@ -163,22 +159,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(&conn)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
|
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
|
||||||
warn!(
|
warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
|
||||||
"This or a TOTP code within {} steps back and forward has already been used!",
|
err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip));
|
||||||
steps
|
|
||||||
);
|
|
||||||
err!(format!(
|
|
||||||
"Invalid TOTP code! Server time: {} IP: {}",
|
|
||||||
current_time.format("%F %T UTC"),
|
|
||||||
ip.ip
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Else no valide code received, deny access
|
// Else no valide code received, deny access
|
||||||
err!(format!(
|
err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip));
|
||||||
"Invalid TOTP code! Server time: {} IP: {}",
|
|
||||||
current_time.format("%F %T UTC"),
|
|
||||||
ip.ip
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ use crate::{
|
||||||
DbConn,
|
DbConn,
|
||||||
},
|
},
|
||||||
error::MapResult,
|
error::MapResult,
|
||||||
|
util::get_reqwest_client,
|
||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -59,7 +60,11 @@ impl DuoData {
|
||||||
ik.replace_range(digits.., replaced);
|
ik.replace_range(digits.., replaced);
|
||||||
sk.replace_range(digits.., replaced);
|
sk.replace_range(digits.., replaced);
|
||||||
|
|
||||||
Self { host, ik, sk }
|
Self {
|
||||||
|
host,
|
||||||
|
ik,
|
||||||
|
sk,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,9 +190,7 @@ fn activate_duo_put(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbC
|
||||||
}
|
}
|
||||||
|
|
||||||
fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult {
|
fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult {
|
||||||
const AGENT: &str = "bitwarden_rs:Duo/1.0 (Rust)";
|
use reqwest::{header, Method};
|
||||||
|
|
||||||
use reqwest::{blocking::Client, header::*, Method};
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
// https://duo.com/docs/authapi#api-details
|
// https://duo.com/docs/authapi#api-details
|
||||||
|
@ -199,11 +202,13 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em
|
||||||
|
|
||||||
let m = Method::from_str(method).unwrap_or_default();
|
let m = Method::from_str(method).unwrap_or_default();
|
||||||
|
|
||||||
Client::new()
|
let client = get_reqwest_client();
|
||||||
|
|
||||||
|
client
|
||||||
.request(m, &url)
|
.request(m, &url)
|
||||||
.basic_auth(username, Some(password))
|
.basic_auth(username, Some(password))
|
||||||
.header(USER_AGENT, AGENT)
|
.header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)")
|
||||||
.header(DATE, date)
|
.header(header::DATE, date)
|
||||||
.send()?
|
.send()?
|
||||||
.error_for_status()?;
|
.error_for_status()?;
|
||||||
|
|
||||||
|
|
|
@ -125,11 +125,7 @@ fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -
|
||||||
let twofactor_data = EmailTokenData::new(data.Email, generated_token);
|
let twofactor_data = EmailTokenData::new(data.Email, generated_token);
|
||||||
|
|
||||||
// Uses EmailVerificationChallenge as type to show that it's not verified yet.
|
// Uses EmailVerificationChallenge as type to show that it's not verified yet.
|
||||||
let twofactor = TwoFactor::new(
|
let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json());
|
||||||
user.uuid,
|
|
||||||
TwoFactorType::EmailVerificationChallenge,
|
|
||||||
twofactor_data.to_json(),
|
|
||||||
);
|
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(&conn)?;
|
||||||
|
|
||||||
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
|
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
|
||||||
|
@ -186,7 +182,8 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes
|
||||||
/// Validate the email code when used as TwoFactor token mechanism
|
/// Validate the email code when used as TwoFactor token mechanism
|
||||||
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let mut email_data = EmailTokenData::from_json(&data)?;
|
let mut email_data = EmailTokenData::from_json(&data)?;
|
||||||
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?;
|
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn)
|
||||||
|
.map_res("Two factor not found")?;
|
||||||
let issued_token = match &email_data.last_token {
|
let issued_token = match &email_data.last_token {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
_ => err!("No token available"),
|
_ => err!("No token available"),
|
||||||
|
|
|
@ -21,13 +21,7 @@ pub mod u2f;
|
||||||
pub mod yubikey;
|
pub mod yubikey;
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
let mut routes = routes![
|
let mut routes = routes![get_twofactor, get_recover, recover, disable_twofactor, disable_twofactor_put,];
|
||||||
get_twofactor,
|
|
||||||
get_recover,
|
|
||||||
recover,
|
|
||||||
disable_twofactor,
|
|
||||||
disable_twofactor_put,
|
|
||||||
];
|
|
||||||
|
|
||||||
routes.append(&mut authenticator::routes());
|
routes.append(&mut authenticator::routes());
|
||||||
routes.append(&mut duo::routes());
|
routes.append(&mut duo::routes());
|
||||||
|
|
|
@ -28,13 +28,7 @@ static APP_ID: Lazy<String> = Lazy::new(|| format!("{}/app-id.json", &CONFIG.dom
|
||||||
static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone()));
|
static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone()));
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![generate_u2f, generate_u2f_challenge, activate_u2f, activate_u2f_put, delete_u2f,]
|
||||||
generate_u2f,
|
|
||||||
generate_u2f_challenge,
|
|
||||||
activate_u2f,
|
|
||||||
activate_u2f_put,
|
|
||||||
delete_u2f,
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/two-factor/get-u2f", data = "<data>")]
|
#[post("/two-factor/get-u2f", data = "<data>")]
|
||||||
|
@ -161,10 +155,7 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
|
||||||
|
|
||||||
let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?;
|
let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?;
|
||||||
|
|
||||||
let error_code = response
|
let error_code = response.error_code.clone().map_or("0".into(), NumberOrString::into_string);
|
||||||
.error_code
|
|
||||||
.clone()
|
|
||||||
.map_or("0".into(), NumberOrString::into_string);
|
|
||||||
|
|
||||||
if error_code != "0" {
|
if error_code != "0" {
|
||||||
err!("Error registering U2F token")
|
err!("Error registering U2F token")
|
||||||
|
@ -300,20 +291,13 @@ fn _old_parse_registrations(registations: &str) -> Vec<Registration> {
|
||||||
|
|
||||||
let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data");
|
let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data");
|
||||||
|
|
||||||
regs.into_iter()
|
regs.into_iter().map(|r| serde_json::from_value(r).unwrap()).map(|Helper(r)| r).collect()
|
||||||
.map(|r| serde_json::from_value(r).unwrap())
|
|
||||||
.map(|Helper(r)| r)
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> {
|
pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> {
|
||||||
let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn);
|
let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn);
|
||||||
|
|
||||||
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?
|
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.reg).collect();
|
||||||
.1
|
|
||||||
.into_iter()
|
|
||||||
.map(|r| r.reg)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if registrations.is_empty() {
|
if registrations.is_empty() {
|
||||||
err!("No U2F devices registered")
|
err!("No U2F devices registered")
|
||||||
|
|
|
@ -12,7 +12,11 @@ use regex::Regex;
|
||||||
use reqwest::{blocking::Client, blocking::Response, header, Url};
|
use reqwest::{blocking::Client, blocking::Response, header, Url};
|
||||||
use rocket::{http::ContentType, http::Cookie, response::Content, Route};
|
use rocket::{http::ContentType, http::Cookie, response::Content, Route};
|
||||||
|
|
||||||
use crate::{error::Error, util::Cached, CONFIG};
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
util::{get_reqwest_client_builder, Cached},
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![icon]
|
routes![icon]
|
||||||
|
@ -25,14 +29,19 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||||
default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8"));
|
default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8"));
|
||||||
default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache"));
|
default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache"));
|
||||||
default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache"));
|
default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache"));
|
||||||
default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8"));
|
default_headers.insert(
|
||||||
|
header::ACCEPT,
|
||||||
|
header::HeaderValue::from_static(
|
||||||
|
"text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
// Reuse the client between requests
|
// Reuse the client between requests
|
||||||
Client::builder()
|
get_reqwest_client_builder()
|
||||||
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
||||||
.default_headers(default_headers)
|
.default_headers(default_headers)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.expect("Failed to build icon client")
|
||||||
});
|
});
|
||||||
|
|
||||||
// Build Regex only once since this takes a lot of time.
|
// Build Regex only once since this takes a lot of time.
|
||||||
|
@ -49,13 +58,16 @@ fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
|
||||||
|
|
||||||
if !is_valid_domain(&domain) {
|
if !is_valid_domain(&domain) {
|
||||||
warn!("Invalid domain: {}", domain);
|
warn!("Invalid domain: {}", domain);
|
||||||
return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl());
|
return Cached::ttl(
|
||||||
|
Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||||
|
CONFIG.icon_cache_negttl(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
match get_icon(&domain) {
|
match get_icon(&domain) {
|
||||||
Some((icon, icon_type)) => {
|
Some((icon, icon_type)) => {
|
||||||
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl())
|
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl())
|
||||||
},
|
}
|
||||||
_ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()),
|
_ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -77,7 +89,10 @@ fn is_valid_domain(domain: &str) -> bool {
|
||||||
|| domain.starts_with('-')
|
|| domain.starts_with('-')
|
||||||
|| domain.ends_with('-')
|
|| domain.ends_with('-')
|
||||||
{
|
{
|
||||||
debug!("Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", domain);
|
debug!(
|
||||||
|
"Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'",
|
||||||
|
domain
|
||||||
|
);
|
||||||
return false;
|
return false;
|
||||||
} else if domain.len() > 255 {
|
} else if domain.len() > 255 {
|
||||||
debug!("Domain validation error: '{}' exceeds 255 characters", domain);
|
debug!("Domain validation error: '{}' exceeds 255 characters", domain);
|
||||||
|
@ -255,7 +270,7 @@ fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(icon) = get_cached_icon(&path) {
|
if let Some(icon) = get_cached_icon(&path) {
|
||||||
let icon_type = match get_icon_type(&icon) {
|
let icon_type = match get_icon_type(&icon) {
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
_ => "x-icon",
|
_ => "x-icon",
|
||||||
};
|
};
|
||||||
|
@ -338,12 +353,20 @@ struct Icon {
|
||||||
|
|
||||||
impl Icon {
|
impl Icon {
|
||||||
const fn new(priority: u8, href: String) -> Self {
|
const fn new(priority: u8, href: String) -> Self {
|
||||||
Self { href, priority }
|
Self {
|
||||||
|
href,
|
||||||
|
priority,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) {
|
fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) {
|
||||||
if let markup5ever_rcdom::NodeData::Element { name, attrs, .. } = &node.data {
|
if let markup5ever_rcdom::NodeData::Element {
|
||||||
|
name,
|
||||||
|
attrs,
|
||||||
|
..
|
||||||
|
} = &node.data
|
||||||
|
{
|
||||||
if name.local.as_ref() == "link" {
|
if name.local.as_ref() == "link" {
|
||||||
let mut has_rel = false;
|
let mut has_rel = false;
|
||||||
let mut href = None;
|
let mut href = None;
|
||||||
|
@ -354,7 +377,8 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve
|
||||||
let attr_name = attr.name.local.as_ref();
|
let attr_name = attr.name.local.as_ref();
|
||||||
let attr_value = attr.value.as_ref();
|
let attr_value = attr.value.as_ref();
|
||||||
|
|
||||||
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) {
|
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value)
|
||||||
|
{
|
||||||
has_rel = true;
|
has_rel = true;
|
||||||
} else if attr_name == "href" {
|
} else if attr_name == "href" {
|
||||||
href = Some(attr_value);
|
href = Some(attr_value);
|
||||||
|
@ -683,6 +707,6 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> {
|
||||||
[82, 73, 70, 70, ..] => Some("webp"),
|
[82, 73, 70, 70, ..] => Some("webp"),
|
||||||
[255, 216, 255, ..] => Some("jpeg"),
|
[255, 216, 255, ..] => Some("jpeg"),
|
||||||
[66, 77, ..] => Some("bmp"),
|
[66, 77, ..] => Some("bmp"),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,34 +88,28 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||||
let username = data.username.as_ref().unwrap();
|
let username = data.username.as_ref().unwrap();
|
||||||
let user = match User::find_by_mail(username, &conn) {
|
let user = match User::find_by_mail(username, &conn) {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!(
|
None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)),
|
||||||
"Username or password is incorrect. Try again",
|
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username)
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check password
|
// Check password
|
||||||
let password = data.password.as_ref().unwrap();
|
let password = data.password.as_ref().unwrap();
|
||||||
if !user.check_valid_password(password) {
|
if !user.check_valid_password(password) {
|
||||||
err!(
|
err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username))
|
||||||
"Username or password is incorrect. Try again",
|
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the user is disabled
|
// Check if the user is disabled
|
||||||
if !user.enabled {
|
if !user.enabled {
|
||||||
err!(
|
err!("This user has been disabled", format!("IP: {}. Username: {}.", ip.ip, username))
|
||||||
"This user has been disabled",
|
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let now = Local::now();
|
let now = Local::now();
|
||||||
|
|
||||||
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
||||||
let now = now.naive_utc();
|
let now = now.naive_utc();
|
||||||
if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 {
|
if user.last_verifying_at.is_none()
|
||||||
|
|| now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds()
|
||||||
|
> CONFIG.signups_verify_resend_time() as i64
|
||||||
|
{
|
||||||
let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
|
let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
|
||||||
if resend_limit == 0 || user.login_verify_count < resend_limit {
|
if resend_limit == 0 || user.login_verify_count < resend_limit {
|
||||||
// We want to send another email verification if we require signups to verify
|
// We want to send another email verification if we require signups to verify
|
||||||
|
@ -135,10 +129,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||||
}
|
}
|
||||||
|
|
||||||
// We still want the login to fail until they actually verified the email address
|
// We still want the login to fail until they actually verified the email address
|
||||||
err!(
|
err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username))
|
||||||
"Please verify your email before trying again.",
|
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut device, new_device) = get_device(&data, &conn, &user);
|
let (mut device, new_device) = get_device(&data, &conn, &user);
|
||||||
|
@ -236,9 +227,7 @@ fn twofactor_auth(
|
||||||
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"),
|
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let selected_twofactor = twofactors
|
let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled);
|
||||||
.into_iter()
|
|
||||||
.find(|tf| tf.atype == selected_id && tf.enabled);
|
|
||||||
|
|
||||||
use crate::api::core::two_factor as _tf;
|
use crate::api::core::two_factor as _tf;
|
||||||
use crate::crypto::ct_eq;
|
use crate::crypto::ct_eq;
|
||||||
|
@ -247,18 +236,26 @@ fn twofactor_auth(
|
||||||
let mut remember = data.two_factor_remember.unwrap_or(0);
|
let mut remember = data.two_factor_remember.unwrap_or(0);
|
||||||
|
|
||||||
match TwoFactorType::from_i32(selected_id) {
|
match TwoFactorType::from_i32(selected_id) {
|
||||||
Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?,
|
Some(TwoFactorType::Authenticator) => {
|
||||||
|
_tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?
|
||||||
|
}
|
||||||
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
||||||
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
||||||
Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?,
|
Some(TwoFactorType::Duo) => {
|
||||||
Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?,
|
_tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?
|
||||||
|
}
|
||||||
|
Some(TwoFactorType::Email) => {
|
||||||
|
_tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?
|
||||||
|
}
|
||||||
|
|
||||||
Some(TwoFactorType::Remember) => {
|
Some(TwoFactorType::Remember) => {
|
||||||
match device.twofactor_remember {
|
match device.twofactor_remember {
|
||||||
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
|
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
|
||||||
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
|
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
|
||||||
}
|
}
|
||||||
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"),
|
_ => {
|
||||||
|
err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => err!("Invalid two factor provider"),
|
_ => err!("Invalid two factor provider"),
|
||||||
|
|
|
@ -55,9 +55,9 @@ impl NumberOrString {
|
||||||
use std::num::ParseIntError as PIE;
|
use std::num::ParseIntError as PIE;
|
||||||
match self {
|
match self {
|
||||||
NumberOrString::Number(n) => Ok(n),
|
NumberOrString::Number(n) => Ok(n),
|
||||||
NumberOrString::String(s) => s
|
NumberOrString::String(s) => {
|
||||||
.parse()
|
s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string()))
|
||||||
.map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())),
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,12 +4,7 @@ use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
|
|
||||||
use crate::{
|
use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG};
|
||||||
api::EmptyResult,
|
|
||||||
auth::Headers,
|
|
||||||
db::DbConn,
|
|
||||||
Error, CONFIG,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![negotiate, websockets_err]
|
routes![negotiate, websockets_err]
|
||||||
|
@ -19,12 +14,16 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true);
|
||||||
|
|
||||||
#[get("/hub")]
|
#[get("/hub")]
|
||||||
fn websockets_err() -> EmptyResult {
|
fn websockets_err() -> EmptyResult {
|
||||||
if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() {
|
if CONFIG.websocket_enabled()
|
||||||
err!("
|
&& SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok()
|
||||||
|
{
|
||||||
|
err!(
|
||||||
|
"
|
||||||
###########################################################
|
###########################################################
|
||||||
'/notifications/hub' should be proxied to the websocket server or notifications won't work.
|
'/notifications/hub' should be proxied to the websocket server or notifications won't work.
|
||||||
Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false.
|
Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false.
|
||||||
###########################################################################################\n")
|
###########################################################################################\n"
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
Err(Error::empty())
|
Err(Error::empty())
|
||||||
}
|
}
|
||||||
|
@ -204,9 +203,7 @@ impl Handler for WsHandler {
|
||||||
let handler_insert = self.out.clone();
|
let handler_insert = self.out.clone();
|
||||||
let handler_update = self.out.clone();
|
let handler_update = self.out.clone();
|
||||||
|
|
||||||
self.users
|
self.users.map.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
|
||||||
.map
|
|
||||||
.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
|
|
||||||
|
|
||||||
// Schedule a ping to keep the connection alive
|
// Schedule a ping to keep the connection alive
|
||||||
self.out.timeout(PING_MS, PING)
|
self.out.timeout(PING_MS, PING)
|
||||||
|
@ -216,7 +213,11 @@ impl Handler for WsHandler {
|
||||||
if let Message::Text(text) = msg.clone() {
|
if let Message::Text(text) = msg.clone() {
|
||||||
let json = &text[..text.len() - 1]; // Remove last char
|
let json = &text[..text.len() - 1]; // Remove last char
|
||||||
|
|
||||||
if let Ok(InitialMessage { protocol, version }) = from_str::<InitialMessage>(json) {
|
if let Ok(InitialMessage {
|
||||||
|
protocol,
|
||||||
|
version,
|
||||||
|
}) = from_str::<InitialMessage>(json)
|
||||||
|
{
|
||||||
if &protocol == "messagepack" && version == 1 {
|
if &protocol == "messagepack" && version == 1 {
|
||||||
return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message
|
return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message
|
||||||
}
|
}
|
||||||
|
@ -295,10 +296,7 @@ impl WebSocketUsers {
|
||||||
// NOTE: The last modified date needs to be updated before calling these methods
|
// NOTE: The last modified date needs to be updated before calling these methods
|
||||||
pub fn send_user_update(&self, ut: UpdateType, user: &User) {
|
pub fn send_user_update(&self, ut: UpdateType, user: &User) {
|
||||||
let data = create_update(
|
let data = create_update(
|
||||||
vec![
|
vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))],
|
||||||
("UserId".into(), user.uuid.clone().into()),
|
|
||||||
("Date".into(), serialize_date(user.updated_at)),
|
|
||||||
],
|
|
||||||
ut,
|
ut,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -83,11 +83,15 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
||||||
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
||||||
|
|
||||||
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
|
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
|
||||||
"bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))),
|
"bootstrap-native.js" => {
|
||||||
|
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js")))
|
||||||
|
}
|
||||||
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
||||||
"datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
|
"datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
|
||||||
"datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
|
"datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
|
||||||
"jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))),
|
"jquery-3.5.1.slim.js" => {
|
||||||
|
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js")))
|
||||||
|
}
|
||||||
_ => err!(format!("Static file not found: {}", filename)),
|
_ => err!(format!("Static file not found: {}", filename)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
33
src/auth.rs
33
src/auth.rs
|
@ -223,10 +223,9 @@ use crate::db::{
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct Host {
|
pub struct Host {
|
||||||
pub host: String
|
pub host: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl<'a, 'r> FromRequest<'a, 'r> for Host {
|
impl<'a, 'r> FromRequest<'a, 'r> for Host {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
|
@ -261,7 +260,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for Host {
|
||||||
format!("{}://{}", protocol, host)
|
format!("{}://{}", protocol, host)
|
||||||
};
|
};
|
||||||
|
|
||||||
Outcome::Success(Host { host })
|
Outcome::Success(Host {
|
||||||
|
host,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -317,10 +318,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
|
||||||
};
|
};
|
||||||
|
|
||||||
if user.security_stamp != claims.sstamp {
|
if user.security_stamp != claims.sstamp {
|
||||||
if let Some(stamp_exception) = user
|
if let Some(stamp_exception) =
|
||||||
.stamp_exception
|
user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
|
||||||
.as_deref()
|
|
||||||
.and_then(|s| serde_json::from_str::<UserStampException>(s).ok())
|
|
||||||
{
|
{
|
||||||
let current_route = match request.route().and_then(|r| r.name) {
|
let current_route = match request.route().and_then(|r| r.name) {
|
||||||
Some(name) => name,
|
Some(name) => name,
|
||||||
|
@ -338,7 +337,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Outcome::Success(Headers { host, device, user })
|
Outcome::Success(Headers {
|
||||||
|
host,
|
||||||
|
device,
|
||||||
|
user,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -506,7 +509,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders {
|
||||||
};
|
};
|
||||||
|
|
||||||
if !headers.org_user.has_full_access() {
|
if !headers.org_user.has_full_access() {
|
||||||
match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) {
|
match CollectionUser::find_by_collection_and_user(
|
||||||
|
&col_id,
|
||||||
|
&headers.org_user.user_uuid,
|
||||||
|
&conn,
|
||||||
|
) {
|
||||||
Some(_) => (),
|
Some(_) => (),
|
||||||
None => err_handler!("The current user isn't a manager for this collection"),
|
None => err_handler!("The current user isn't a manager for this collection"),
|
||||||
}
|
}
|
||||||
|
@ -636,10 +643,10 @@ impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let ip = ip
|
let ip = ip.or_else(|| req.remote().map(|r| r.ip())).unwrap_or_else(|| "0.0.0.0".parse().unwrap());
|
||||||
.or_else(|| req.remote().map(|r| r.ip()))
|
|
||||||
.unwrap_or_else(|| "0.0.0.0".parse().unwrap());
|
|
||||||
|
|
||||||
Outcome::Success(ClientIp { ip })
|
Outcome::Success(ClientIp {
|
||||||
|
ip,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -527,10 +527,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
||||||
|
|
||||||
let limit = 256;
|
let limit = 256;
|
||||||
if cfg.database_max_conns < 1 || cfg.database_max_conns > limit {
|
if cfg.database_max_conns < 1 || cfg.database_max_conns > limit {
|
||||||
err!(format!(
|
err!(format!("`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", limit,));
|
||||||
"`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.",
|
|
||||||
limit,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let dom = cfg.domain.to_lowercase();
|
let dom = cfg.domain.to_lowercase();
|
||||||
|
@ -872,9 +869,7 @@ fn case_helper<'reg, 'rc>(
|
||||||
rc: &mut RenderContext<'reg, 'rc>,
|
rc: &mut RenderContext<'reg, 'rc>,
|
||||||
out: &mut dyn Output,
|
out: &mut dyn Output,
|
||||||
) -> HelperResult {
|
) -> HelperResult {
|
||||||
let param = h
|
let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
|
||||||
.param(0)
|
|
||||||
.ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
|
|
||||||
let value = param.value().clone();
|
let value = param.value().clone();
|
||||||
|
|
||||||
if h.params().iter().skip(1).any(|x| x.value() == &value) {
|
if h.params().iter().skip(1).any(|x| x.value() == &value) {
|
||||||
|
@ -891,21 +886,15 @@ fn js_escape_helper<'reg, 'rc>(
|
||||||
_rc: &mut RenderContext<'reg, 'rc>,
|
_rc: &mut RenderContext<'reg, 'rc>,
|
||||||
out: &mut dyn Output,
|
out: &mut dyn Output,
|
||||||
) -> HelperResult {
|
) -> HelperResult {
|
||||||
let param = h
|
let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
|
||||||
.param(0)
|
|
||||||
.ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
|
|
||||||
|
|
||||||
let no_quote = h
|
let no_quote = h.param(1).is_some();
|
||||||
.param(1)
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let value = param
|
let value =
|
||||||
.value()
|
param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
|
||||||
.as_str()
|
|
||||||
.ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
|
|
||||||
|
|
||||||
let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27");
|
let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27");
|
||||||
if ! no_quote {
|
if !no_quote {
|
||||||
escaped_value = format!(""{}"", escaped_value);
|
escaped_value = format!(""{}"", escaped_value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,9 +47,7 @@ pub fn get_random_64() -> Vec<u8> {
|
||||||
pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
|
pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
|
||||||
use ring::rand::{SecureRandom, SystemRandom};
|
use ring::rand::{SecureRandom, SystemRandom};
|
||||||
|
|
||||||
SystemRandom::new()
|
SystemRandom::new().fill(&mut array).expect("Error generating random values");
|
||||||
.fill(&mut array)
|
|
||||||
.expect("Error generating random values");
|
|
||||||
|
|
||||||
array
|
array
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ pub mod __mysql_schema;
|
||||||
#[path = "schemas/postgresql/schema.rs"]
|
#[path = "schemas/postgresql/schema.rs"]
|
||||||
pub mod __postgresql_schema;
|
pub mod __postgresql_schema;
|
||||||
|
|
||||||
|
|
||||||
// This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported
|
// This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported
|
||||||
macro_rules! generate_connections {
|
macro_rules! generate_connections {
|
||||||
( $( $name:ident: $ty:ty ),+ ) => {
|
( $( $name:ident: $ty:ty ),+ ) => {
|
||||||
|
@ -108,7 +107,6 @@ impl DbConnType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! db_run {
|
macro_rules! db_run {
|
||||||
// Same for all dbs
|
// Same for all dbs
|
||||||
|
@ -154,7 +152,6 @@ macro_rules! db_run {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub trait FromDb {
|
pub trait FromDb {
|
||||||
type Output;
|
type Output;
|
||||||
#[allow(clippy::wrong_self_convention)]
|
#[allow(clippy::wrong_self_convention)]
|
||||||
|
@ -239,7 +236,6 @@ pub fn backup_database(conn: &DbConn) -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Get the SQL Server version
|
/// Get the SQL Server version
|
||||||
pub fn get_sql_server_version(conn: &DbConn) -> String {
|
pub fn get_sql_server_version(conn: &DbConn) -> String {
|
||||||
db_run! {@raw conn:
|
db_run! {@raw conn:
|
||||||
|
@ -292,8 +288,7 @@ mod sqlite_migrations {
|
||||||
|
|
||||||
use diesel::{Connection, RunQueryDsl};
|
use diesel::{Connection, RunQueryDsl};
|
||||||
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
||||||
let connection =
|
let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
|
||||||
diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
|
|
||||||
// Disable Foreign Key Checks during migration
|
// Disable Foreign Key Checks during migration
|
||||||
|
|
||||||
// Scoped to a connection.
|
// Scoped to a connection.
|
||||||
|
@ -303,9 +298,7 @@ mod sqlite_migrations {
|
||||||
|
|
||||||
// Turn on WAL in SQLite
|
// Turn on WAL in SQLite
|
||||||
if crate::CONFIG.enable_db_wal() {
|
if crate::CONFIG.enable_db_wal() {
|
||||||
diesel::sql_query("PRAGMA journal_mode=wal")
|
diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL");
|
||||||
.execute(&connection)
|
|
||||||
.expect("Failed to turn on WAL");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?;
|
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?;
|
||||||
|
@ -321,8 +314,7 @@ mod mysql_migrations {
|
||||||
pub fn run_migrations() -> Result<(), super::Error> {
|
pub fn run_migrations() -> Result<(), super::Error> {
|
||||||
use diesel::{Connection, RunQueryDsl};
|
use diesel::{Connection, RunQueryDsl};
|
||||||
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
||||||
let connection =
|
let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
|
||||||
diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
|
|
||||||
// Disable Foreign Key Checks during migration
|
// Disable Foreign Key Checks during migration
|
||||||
|
|
||||||
// Scoped to a connection/session.
|
// Scoped to a connection/session.
|
||||||
|
@ -343,8 +335,7 @@ mod postgresql_migrations {
|
||||||
pub fn run_migrations() -> Result<(), super::Error> {
|
pub fn run_migrations() -> Result<(), super::Error> {
|
||||||
use diesel::{Connection, RunQueryDsl};
|
use diesel::{Connection, RunQueryDsl};
|
||||||
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
||||||
let connection =
|
let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
|
||||||
diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
|
|
||||||
// Disable Foreign Key Checks during migration
|
// Disable Foreign Key Checks during migration
|
||||||
|
|
||||||
// FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html,
|
// FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html,
|
||||||
|
|
|
@ -59,7 +59,6 @@ use crate::error::MapResult;
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Attachment {
|
impl Attachment {
|
||||||
|
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
db_run! { conn:
|
db_run! { conn:
|
||||||
sqlite, mysql {
|
sqlite, mysql {
|
||||||
|
|
|
@ -4,14 +4,7 @@ use serde_json::Value;
|
||||||
use crate::CONFIG;
|
use crate::CONFIG;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
Attachment,
|
Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType,
|
||||||
CollectionCipher,
|
|
||||||
Favorite,
|
|
||||||
FolderCipher,
|
|
||||||
Organization,
|
|
||||||
User,
|
|
||||||
UserOrgStatus,
|
|
||||||
UserOrgType,
|
|
||||||
UserOrganization,
|
UserOrganization,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -93,16 +86,16 @@ impl Cipher {
|
||||||
};
|
};
|
||||||
|
|
||||||
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
let password_history_json =
|
||||||
|
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
|
|
||||||
let (read_only, hide_passwords) =
|
let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) {
|
||||||
match self.get_access_restrictions(&user_uuid, conn) {
|
Some((ro, hp)) => (ro, hp),
|
||||||
Some((ro, hp)) => (ro, hp),
|
None => {
|
||||||
None => {
|
error!("Cipher ownership assertion failure");
|
||||||
error!("Cipher ownership assertion failure");
|
(true, true)
|
||||||
(true, true)
|
}
|
||||||
},
|
};
|
||||||
};
|
|
||||||
|
|
||||||
// Get the type_data or a default to an empty json object '{}'.
|
// Get the type_data or a default to an empty json object '{}'.
|
||||||
// If not passing an empty object, mobile clients will crash.
|
// If not passing an empty object, mobile clients will crash.
|
||||||
|
@ -197,12 +190,10 @@ impl Cipher {
|
||||||
None => {
|
None => {
|
||||||
// Belongs to Organization, need to update affected users
|
// Belongs to Organization, need to update affected users
|
||||||
if let Some(ref org_uuid) = self.organization_uuid {
|
if let Some(ref org_uuid) = self.organization_uuid {
|
||||||
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn)
|
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn).iter().for_each(|user_org| {
|
||||||
.iter()
|
User::update_uuid_revision(&user_org.user_uuid, conn);
|
||||||
.for_each(|user_org| {
|
user_uuids.push(user_org.user_uuid.clone())
|
||||||
User::update_uuid_revision(&user_org.user_uuid, conn);
|
});
|
||||||
user_uuids.push(user_org.user_uuid.clone())
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher};
|
use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization};
|
||||||
|
|
||||||
db_object! {
|
db_object! {
|
||||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
|
@ -127,11 +127,9 @@ impl Collection {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_users_revision(&self, conn: &DbConn) {
|
pub fn update_users_revision(&self, conn: &DbConn) {
|
||||||
UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn)
|
UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).iter().for_each(|user_org| {
|
||||||
.iter()
|
User::update_uuid_revision(&user_org.user_uuid, conn);
|
||||||
.for_each(|user_org| {
|
});
|
||||||
User::update_uuid_revision(&user_org.user_uuid, conn);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
||||||
|
@ -170,10 +168,7 @@ impl Collection {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
Self::find_by_user_uuid(user_uuid, conn)
|
Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect()
|
||||||
.into_iter()
|
|
||||||
.filter(|c| c.org_uuid == org_uuid)
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
|
@ -284,7 +279,13 @@ impl CollectionUser {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult {
|
pub fn save(
|
||||||
|
user_uuid: &str,
|
||||||
|
collection_uuid: &str,
|
||||||
|
read_only: bool,
|
||||||
|
hide_passwords: bool,
|
||||||
|
conn: &DbConn,
|
||||||
|
) -> EmptyResult {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(&user_uuid, conn);
|
||||||
|
|
||||||
db_run! { conn:
|
db_run! { conn:
|
||||||
|
@ -374,11 +375,9 @@ impl CollectionUser {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
CollectionUser::find_by_collection(&collection_uuid, conn)
|
CollectionUser::find_by_collection(&collection_uuid, conn).iter().for_each(|collection| {
|
||||||
.iter()
|
User::update_uuid_revision(&collection.user_uuid, conn);
|
||||||
.for_each(|collection| {
|
});
|
||||||
User::update_uuid_revision(&collection.user_uuid, conn);
|
|
||||||
});
|
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid)))
|
diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid)))
|
||||||
|
|
|
@ -20,7 +20,7 @@ use crate::error::MapResult;
|
||||||
impl Favorite {
|
impl Favorite {
|
||||||
// Returns whether the specified cipher is a favorite of the specified user.
|
// Returns whether the specified cipher is a favorite of the specified user.
|
||||||
pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
db_run!{ conn: {
|
db_run! { conn: {
|
||||||
let query = favorites::table
|
let query = favorites::table
|
||||||
.filter(favorites::cipher_uuid.eq(cipher_uuid))
|
.filter(favorites::cipher_uuid.eq(cipher_uuid))
|
||||||
.filter(favorites::user_uuid.eq(user_uuid))
|
.filter(favorites::user_uuid.eq(user_uuid))
|
||||||
|
@ -36,19 +36,19 @@ impl Favorite {
|
||||||
match (old, new) {
|
match (old, new) {
|
||||||
(false, true) => {
|
(false, true) => {
|
||||||
User::update_uuid_revision(user_uuid, &conn);
|
User::update_uuid_revision(user_uuid, &conn);
|
||||||
db_run!{ conn: {
|
db_run! { conn: {
|
||||||
diesel::insert_into(favorites::table)
|
diesel::insert_into(favorites::table)
|
||||||
.values((
|
.values((
|
||||||
favorites::user_uuid.eq(user_uuid),
|
favorites::user_uuid.eq(user_uuid),
|
||||||
favorites::cipher_uuid.eq(cipher_uuid),
|
favorites::cipher_uuid.eq(cipher_uuid),
|
||||||
))
|
))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
.map_res("Error adding favorite")
|
.map_res("Error adding favorite")
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
(true, false) => {
|
(true, false) => {
|
||||||
User::update_uuid_revision(user_uuid, &conn);
|
User::update_uuid_revision(user_uuid, &conn);
|
||||||
db_run!{ conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(
|
diesel::delete(
|
||||||
favorites::table
|
favorites::table
|
||||||
.filter(favorites::user_uuid.eq(user_uuid))
|
.filter(favorites::user_uuid.eq(user_uuid))
|
||||||
|
@ -59,7 +59,7 @@ impl Favorite {
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
// Otherwise, the favorite status is already what it should be.
|
// Otherwise, the favorite status is already what it should be.
|
||||||
_ => Ok(())
|
_ => Ok(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -109,7 +109,6 @@ impl Folder {
|
||||||
User::update_uuid_revision(&self.user_uuid, conn);
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
|
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
|
||||||
|
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
|
diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
|
|
|
@ -6,9 +6,9 @@ mod favorite;
|
||||||
mod folder;
|
mod folder;
|
||||||
mod org_policy;
|
mod org_policy;
|
||||||
mod organization;
|
mod organization;
|
||||||
|
mod send;
|
||||||
mod two_factor;
|
mod two_factor;
|
||||||
mod user;
|
mod user;
|
||||||
mod send;
|
|
||||||
|
|
||||||
pub use self::attachment::Attachment;
|
pub use self::attachment::Attachment;
|
||||||
pub use self::cipher::Cipher;
|
pub use self::cipher::Cipher;
|
||||||
|
@ -18,6 +18,6 @@ pub use self::favorite::Favorite;
|
||||||
pub use self::folder::{Folder, FolderCipher};
|
pub use self::folder::{Folder, FolderCipher};
|
||||||
pub use self::org_policy::{OrgPolicy, OrgPolicyType};
|
pub use self::org_policy::{OrgPolicy, OrgPolicyType};
|
||||||
pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||||
|
pub use self::send::{Send, SendType};
|
||||||
pub use self::two_factor::{TwoFactor, TwoFactorType};
|
pub use self::two_factor::{TwoFactor, TwoFactorType};
|
||||||
pub use self::user::{Invitation, User, UserStampException};
|
pub use self::user::{Invitation, User, UserStampException};
|
||||||
pub use self::send::{Send, SendType};
|
|
|
@ -4,7 +4,7 @@ use crate::api::EmptyResult;
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use crate::error::MapResult;
|
use crate::error::MapResult;
|
||||||
|
|
||||||
use super::{Organization, UserOrganization, UserOrgStatus, UserOrgType};
|
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||||
|
|
||||||
db_object! {
|
db_object! {
|
||||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
|
@ -20,9 +20,7 @@ db_object! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone, PartialEq, num_derive::FromPrimitive)]
|
||||||
#[derive(num_derive::FromPrimitive)]
|
|
||||||
#[derive(PartialEq)]
|
|
||||||
pub enum OrgPolicyType {
|
pub enum OrgPolicyType {
|
||||||
TwoFactorAuthentication = 0,
|
TwoFactorAuthentication = 0,
|
||||||
MasterPassword = 1,
|
MasterPassword = 1,
|
||||||
|
@ -176,7 +174,8 @@ impl OrgPolicy {
|
||||||
/// and the user is not an owner or admin of that org. This is only useful for checking
|
/// and the user is not an owner or admin of that org. This is only useful for checking
|
||||||
/// applicability of policy types that have these particular semantics.
|
/// applicability of policy types that have these particular semantics.
|
||||||
pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool {
|
pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool {
|
||||||
for policy in OrgPolicy::find_by_user(user_uuid, conn) { // Returns confirmed users only.
|
// Returns confirmed users only.
|
||||||
|
for policy in OrgPolicy::find_by_user(user_uuid, conn) {
|
||||||
if policy.enabled && policy.has_type(policy_type) {
|
if policy.enabled && policy.has_type(policy_type) {
|
||||||
let org_uuid = &policy.org_uuid;
|
let org_uuid = &policy.org_uuid;
|
||||||
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) {
|
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
use num_traits::FromPrimitive;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use num_traits::FromPrimitive;
|
|
||||||
|
|
||||||
use super::{CollectionUser, User, OrgPolicy, OrgPolicyType};
|
use super::{CollectionUser, OrgPolicy, OrgPolicyType, User};
|
||||||
|
|
||||||
db_object! {
|
db_object! {
|
||||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
|
@ -35,8 +35,7 @@ pub enum UserOrgStatus {
|
||||||
Confirmed = 2,
|
Confirmed = 2,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)]
|
||||||
#[derive(num_derive::FromPrimitive)]
|
|
||||||
pub enum UserOrgType {
|
pub enum UserOrgType {
|
||||||
Owner = 0,
|
Owner = 0,
|
||||||
Admin = 1,
|
Admin = 1,
|
||||||
|
@ -190,11 +189,9 @@ use crate::error::MapResult;
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Organization {
|
impl Organization {
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
UserOrganization::find_by_org(&self.uuid, conn)
|
UserOrganization::find_by_org(&self.uuid, conn).iter().for_each(|user_org| {
|
||||||
.iter()
|
User::update_uuid_revision(&user_org.user_uuid, conn);
|
||||||
.for_each(|user_org| {
|
});
|
||||||
User::update_uuid_revision(&user_org.user_uuid, conn);
|
|
||||||
});
|
|
||||||
|
|
||||||
db_run! { conn:
|
db_run! { conn:
|
||||||
sqlite, mysql {
|
sqlite, mysql {
|
||||||
|
@ -236,7 +233,6 @@ impl Organization {
|
||||||
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
|
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
|
||||||
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
|
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
|
||||||
|
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
|
@ -347,11 +343,13 @@ impl UserOrganization {
|
||||||
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
|
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
|
||||||
collections
|
collections
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| json!({
|
.map(|c| {
|
||||||
"Id": c.collection_uuid,
|
json!({
|
||||||
"ReadOnly": c.read_only,
|
"Id": c.collection_uuid,
|
||||||
"HidePasswords": c.hide_passwords,
|
"ReadOnly": c.read_only,
|
||||||
}))
|
"HidePasswords": c.hide_passwords,
|
||||||
|
})
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -446,8 +444,7 @@ impl UserOrganization {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_full_access(&self) -> bool {
|
pub fn has_full_access(&self) -> bool {
|
||||||
(self.access_all || self.atype >= UserOrgType::Admin) &&
|
(self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed)
|
||||||
self.has_status(UserOrgStatus::Confirmed)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
||||||
|
|
|
@ -63,8 +63,8 @@ enum UserStatus {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct UserStampException {
|
pub struct UserStampException {
|
||||||
pub route: String,
|
pub route: String,
|
||||||
pub security_stamp: String
|
pub security_stamp: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
|
@ -162,7 +162,7 @@ impl User {
|
||||||
pub fn set_stamp_exception(&mut self, route_exception: &str) {
|
pub fn set_stamp_exception(&mut self, route_exception: &str) {
|
||||||
let stamp_exception = UserStampException {
|
let stamp_exception = UserStampException {
|
||||||
route: route_exception.to_string(),
|
route: route_exception.to_string(),
|
||||||
security_stamp: self.security_stamp.to_string()
|
security_stamp: self.security_stamp.to_string(),
|
||||||
};
|
};
|
||||||
self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default());
|
self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default());
|
||||||
}
|
}
|
||||||
|
@ -341,14 +341,16 @@ impl User {
|
||||||
pub fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> {
|
pub fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> {
|
||||||
match Device::find_latest_active_by_user(&self.uuid, conn) {
|
match Device::find_latest_active_by_user(&self.uuid, conn) {
|
||||||
Some(device) => Some(device.updated_at),
|
Some(device) => Some(device.updated_at),
|
||||||
None => None
|
None => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Invitation {
|
impl Invitation {
|
||||||
pub const fn new(email: String) -> Self {
|
pub const fn new(email: String) -> Self {
|
||||||
Self { email }
|
Self {
|
||||||
|
email,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
|
10
src/error.rs
10
src/error.rs
|
@ -33,10 +33,10 @@ macro_rules! make_error {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use diesel::r2d2::PoolError as R2d2Err;
|
||||||
use diesel::result::Error as DieselErr;
|
use diesel::result::Error as DieselErr;
|
||||||
use diesel::ConnectionError as DieselConErr;
|
use diesel::ConnectionError as DieselConErr;
|
||||||
use diesel_migrations::RunMigrationsError as DieselMigErr;
|
use diesel_migrations::RunMigrationsError as DieselMigErr;
|
||||||
use diesel::r2d2::PoolError as R2d2Err;
|
|
||||||
use handlebars::RenderError as HbErr;
|
use handlebars::RenderError as HbErr;
|
||||||
use jsonwebtoken::errors::Error as JwtErr;
|
use jsonwebtoken::errors::Error as JwtErr;
|
||||||
use regex::Error as RegexErr;
|
use regex::Error as RegexErr;
|
||||||
|
@ -191,18 +191,14 @@ use rocket::response::{self, Responder, Response};
|
||||||
impl<'r> Responder<'r> for Error {
|
impl<'r> Responder<'r> for Error {
|
||||||
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
||||||
match self.error {
|
match self.error {
|
||||||
ErrorKind::EmptyError(_) => {} // Don't print the error in this situation
|
ErrorKind::EmptyError(_) => {} // Don't print the error in this situation
|
||||||
ErrorKind::SimpleError(_) => {} // Don't print the error in this situation
|
ErrorKind::SimpleError(_) => {} // Don't print the error in this situation
|
||||||
_ => error!(target: "error", "{:#?}", self),
|
_ => error!(target: "error", "{:#?}", self),
|
||||||
};
|
};
|
||||||
|
|
||||||
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
||||||
|
|
||||||
Response::build()
|
Response::build().status(code).header(ContentType::JSON).sized_body(Cursor::new(format!("{}", self))).ok()
|
||||||
.status(code)
|
|
||||||
.header(ContentType::JSON)
|
|
||||||
.sized_body(Cursor::new(format!("{}", self)))
|
|
||||||
.ok()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
24
src/mail.rs
24
src/mail.rs
|
@ -1,4 +1,4 @@
|
||||||
use std::{str::FromStr};
|
use std::str::FromStr;
|
||||||
|
|
||||||
use chrono::{DateTime, Local};
|
use chrono::{DateTime, Local};
|
||||||
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
|
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
|
||||||
|
@ -62,11 +62,13 @@ fn mailer() -> SmtpTransport {
|
||||||
let mut selected_mechanisms = vec![];
|
let mut selected_mechanisms = vec![];
|
||||||
for wanted_mechanism in mechanism.split(',') {
|
for wanted_mechanism in mechanism.split(',') {
|
||||||
for m in &allowed_mechanisms {
|
for m in &allowed_mechanisms {
|
||||||
if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() {
|
if m.to_string().to_lowercase()
|
||||||
|
== wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase()
|
||||||
|
{
|
||||||
selected_mechanisms.push(*m);
|
selected_mechanisms.push(*m);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
if !selected_mechanisms.is_empty() {
|
if !selected_mechanisms.is_empty() {
|
||||||
smtp_client.authentication(selected_mechanisms)
|
smtp_client.authentication(selected_mechanisms)
|
||||||
|
@ -332,31 +334,23 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String
|
||||||
|
|
||||||
let smtp_from = &CONFIG.smtp_from();
|
let smtp_from = &CONFIG.smtp_from();
|
||||||
let email = Message::builder()
|
let email = Message::builder()
|
||||||
.message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1] )))
|
.message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1])))
|
||||||
.to(Mailbox::new(None, Address::from_str(&address)?))
|
.to(Mailbox::new(None, Address::from_str(&address)?))
|
||||||
.from(Mailbox::new(
|
.from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?))
|
||||||
Some(CONFIG.smtp_from_name()),
|
|
||||||
Address::from_str(smtp_from)?,
|
|
||||||
))
|
|
||||||
.subject(subject)
|
.subject(subject)
|
||||||
.multipart(
|
.multipart(MultiPart::alternative().singlepart(text).singlepart(html))?;
|
||||||
MultiPart::alternative()
|
|
||||||
.singlepart(text)
|
|
||||||
.singlepart(html)
|
|
||||||
)?;
|
|
||||||
|
|
||||||
match mailer().send(&email) {
|
match mailer().send(&email) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
// Match some common errors and make them more user friendly
|
// Match some common errors and make them more user friendly
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
||||||
if e.is_client() {
|
if e.is_client() {
|
||||||
err!(format!("SMTP Client error: {}", e));
|
err!(format!("SMTP Client error: {}", e));
|
||||||
} else if e.is_transient() {
|
} else if e.is_transient() {
|
||||||
err!(format!("SMTP 4xx error: {:?}", e));
|
err!(format!("SMTP 4xx error: {:?}", e));
|
||||||
} else if e.is_permanent() {
|
} else if e.is_permanent() {
|
||||||
err!(format!("SMTP 5xx error: {:?}", e));
|
err!(format!("SMTP 5xx error: {:?}", e));
|
||||||
} else if e.is_timeout() {
|
} else if e.is_timeout() {
|
||||||
err!(format!("SMTP timeout error: {:?}", e));
|
err!(format!("SMTP timeout error: {:?}", e));
|
||||||
} else {
|
} else {
|
||||||
Err(e.into())
|
Err(e.into())
|
||||||
|
|
62
src/main.rs
62
src/main.rs
|
@ -16,7 +16,7 @@ extern crate diesel;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate diesel_migrations;
|
extern crate diesel_migrations;
|
||||||
|
|
||||||
use job_scheduler::{JobScheduler, Job};
|
use job_scheduler::{Job, JobScheduler};
|
||||||
use std::{
|
use std::{
|
||||||
fs::create_dir_all,
|
fs::create_dir_all,
|
||||||
panic,
|
panic,
|
||||||
|
@ -127,7 +127,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
||||||
// Enable smtp debug logging only specifically for smtp when need.
|
// Enable smtp debug logging only specifically for smtp when need.
|
||||||
// This can contain sensitive information we do not want in the default debug/trace logging.
|
// This can contain sensitive information we do not want in the default debug/trace logging.
|
||||||
if CONFIG.smtp_debug() {
|
if CONFIG.smtp_debug() {
|
||||||
println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!");
|
println!(
|
||||||
|
"[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"
|
||||||
|
);
|
||||||
println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n");
|
println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n");
|
||||||
logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug)
|
logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug)
|
||||||
} else {
|
} else {
|
||||||
|
@ -298,7 +300,10 @@ fn check_web_vault() {
|
||||||
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
|
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
|
||||||
|
|
||||||
if !index_path.exists() {
|
if !index_path.exists() {
|
||||||
error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder());
|
error!(
|
||||||
|
"Web vault is not found at '{}'. To install it, please follow the steps in: ",
|
||||||
|
CONFIG.web_vault_folder()
|
||||||
|
);
|
||||||
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
||||||
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
|
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
|
||||||
exit(1);
|
exit(1);
|
||||||
|
@ -344,31 +349,34 @@ fn schedule_jobs(pool: db::DbPool) {
|
||||||
info!("Job scheduler disabled.");
|
info!("Job scheduler disabled.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
thread::Builder::new().name("job-scheduler".to_string()).spawn(move || {
|
thread::Builder::new()
|
||||||
let mut sched = JobScheduler::new();
|
.name("job-scheduler".to_string())
|
||||||
|
.spawn(move || {
|
||||||
|
let mut sched = JobScheduler::new();
|
||||||
|
|
||||||
// Purge sends that are past their deletion date.
|
// Purge sends that are past their deletion date.
|
||||||
if !CONFIG.send_purge_schedule().is_empty() {
|
if !CONFIG.send_purge_schedule().is_empty() {
|
||||||
sched.add(Job::new(CONFIG.send_purge_schedule().parse().unwrap(), || {
|
sched.add(Job::new(CONFIG.send_purge_schedule().parse().unwrap(), || {
|
||||||
api::purge_sends(pool.clone());
|
api::purge_sends(pool.clone());
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Purge trashed items that are old enough to be auto-deleted.
|
// Purge trashed items that are old enough to be auto-deleted.
|
||||||
if !CONFIG.trash_purge_schedule().is_empty() {
|
if !CONFIG.trash_purge_schedule().is_empty() {
|
||||||
sched.add(Job::new(CONFIG.trash_purge_schedule().parse().unwrap(), || {
|
sched.add(Job::new(CONFIG.trash_purge_schedule().parse().unwrap(), || {
|
||||||
api::purge_trashed_ciphers(pool.clone());
|
api::purge_trashed_ciphers(pool.clone());
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Periodically check for jobs to run. We probably won't need any
|
// Periodically check for jobs to run. We probably won't need any
|
||||||
// jobs that run more often than once a minute, so a default poll
|
// jobs that run more often than once a minute, so a default poll
|
||||||
// interval of 30 seconds should be sufficient. Users who want to
|
// interval of 30 seconds should be sufficient. Users who want to
|
||||||
// schedule jobs to run more frequently for some reason can reduce
|
// schedule jobs to run more frequently for some reason can reduce
|
||||||
// the poll interval accordingly.
|
// the poll interval accordingly.
|
||||||
loop {
|
loop {
|
||||||
sched.tick();
|
sched.tick();
|
||||||
thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms()));
|
thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms()));
|
||||||
}
|
}
|
||||||
}).expect("Error spawning job scheduler thread");
|
})
|
||||||
|
.expect("Error spawning job scheduler thread");
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,7 +116,11 @@
|
||||||
data-target="#g_database">Backup Database</button></div>
|
data-target="#g_database">Backup Database</button></div>
|
||||||
<div id="g_database" class="card-body collapse" data-parent="#config-form">
|
<div id="g_database" class="card-body collapse" data-parent="#config-form">
|
||||||
<div class="small mb-3">
|
<div class="small mb-3">
|
||||||
NOTE: A local installation of sqlite3 is required for this section to work.
|
WARNING: This function only creates a backup copy of the SQLite database.
|
||||||
|
This does not include any configuration or file attachment data that may
|
||||||
|
also be needed to fully restore a bitwarden_rs instance. For details on
|
||||||
|
how to perform complete backups, refer to the wiki page on
|
||||||
|
<a href="https://github.com/dani-garcia/bitwarden_rs/wiki/Backing-up-your-vault">backups</a>.
|
||||||
</div>
|
</div>
|
||||||
<button type="button" class="btn btn-primary" onclick="backupDatabase();">Backup Database</button>
|
<button type="button" class="btn btn-primary" onclick="backupDatabase();">Backup Database</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
47
src/util.rs
47
src/util.rs
|
@ -28,7 +28,10 @@ impl Fairing for AppHeaders {
|
||||||
res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
|
res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
|
||||||
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
||||||
res.set_raw_header("X-XSS-Protection", "1; mode=block");
|
res.set_raw_header("X-XSS-Protection", "1; mode=block");
|
||||||
let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors());
|
let csp = format!(
|
||||||
|
"frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};",
|
||||||
|
CONFIG.allowed_iframe_ancestors()
|
||||||
|
);
|
||||||
res.set_raw_header("Content-Security-Policy", csp);
|
res.set_raw_header("Content-Security-Policy", csp);
|
||||||
|
|
||||||
// Disable cache unless otherwise specified
|
// Disable cache unless otherwise specified
|
||||||
|
@ -124,14 +127,8 @@ impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> {
|
||||||
|
|
||||||
// Log all the routes from the main paths list, and the attachments endpoint
|
// Log all the routes from the main paths list, and the attachments endpoint
|
||||||
// Effectively ignores, any static file route, and the alive endpoint
|
// Effectively ignores, any static file route, and the alive endpoint
|
||||||
const LOGGED_ROUTES: [&str; 6] = [
|
const LOGGED_ROUTES: [&str; 6] =
|
||||||
"/api",
|
["/api", "/admin", "/identity", "/icons", "/notifications/hub/negotiate", "/attachments"];
|
||||||
"/admin",
|
|
||||||
"/identity",
|
|
||||||
"/icons",
|
|
||||||
"/notifications/hub/negotiate",
|
|
||||||
"/attachments",
|
|
||||||
];
|
|
||||||
|
|
||||||
// Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts
|
// Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts
|
||||||
pub struct BetterLogging(pub bool);
|
pub struct BetterLogging(pub bool);
|
||||||
|
@ -158,7 +155,11 @@ impl Fairing for BetterLogging {
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = rocket.config();
|
let config = rocket.config();
|
||||||
let scheme = if config.tls_enabled() { "https" } else { "http" };
|
let scheme = if config.tls_enabled() {
|
||||||
|
"https"
|
||||||
|
} else {
|
||||||
|
"http"
|
||||||
|
};
|
||||||
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
|
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
|
||||||
info!(target: "start", "Rocket has launched from {}", addr);
|
info!(target: "start", "Rocket has launched from {}", addr);
|
||||||
}
|
}
|
||||||
|
@ -293,8 +294,7 @@ where
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
pub fn get_env_str_value(key: &str) -> Option<String>
|
pub fn get_env_str_value(key: &str) -> Option<String> {
|
||||||
{
|
|
||||||
let key_file = format!("{}_FILE", key);
|
let key_file = format!("{}_FILE", key);
|
||||||
let value_from_env = env::var(key);
|
let value_from_env = env::var(key);
|
||||||
let value_file = env::var(&key_file);
|
let value_file = env::var(&key_file);
|
||||||
|
@ -304,9 +304,9 @@ pub fn get_env_str_value(key: &str) -> Option<String>
|
||||||
(Ok(v_env), Err(_)) => Some(v_env),
|
(Ok(v_env), Err(_)) => Some(v_env),
|
||||||
(Err(_), Ok(v_file)) => match fs::read_to_string(v_file) {
|
(Err(_), Ok(v_file)) => match fs::read_to_string(v_file) {
|
||||||
Ok(content) => Some(content.trim().to_string()),
|
Ok(content) => Some(content.trim().to_string()),
|
||||||
Err(e) => panic!("Failed to load {}: {:?}", key, e)
|
Err(e) => panic!("Failed to load {}: {:?}", key, e),
|
||||||
},
|
},
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -478,7 +478,6 @@ pub fn retry<F, T, E>(func: F, max_tries: u32) -> Result<T, E>
|
||||||
where
|
where
|
||||||
F: Fn() -> Result<T, E>,
|
F: Fn() -> Result<T, E>,
|
||||||
{
|
{
|
||||||
use std::{thread::sleep, time::Duration};
|
|
||||||
let mut tries = 0;
|
let mut tries = 0;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -497,12 +496,13 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use std::{thread::sleep, time::Duration};
|
||||||
|
|
||||||
pub fn retry_db<F, T, E>(func: F, max_tries: u32) -> Result<T, E>
|
pub fn retry_db<F, T, E>(func: F, max_tries: u32) -> Result<T, E>
|
||||||
where
|
where
|
||||||
F: Fn() -> Result<T, E>,
|
F: Fn() -> Result<T, E>,
|
||||||
E: std::error::Error,
|
E: std::error::Error,
|
||||||
{
|
{
|
||||||
use std::{thread::sleep, time::Duration};
|
|
||||||
let mut tries = 0;
|
let mut tries = 0;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -522,3 +522,18 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use reqwest::{
|
||||||
|
blocking::{Client, ClientBuilder},
|
||||||
|
header,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn get_reqwest_client() -> Client {
|
||||||
|
get_reqwest_client_builder().build().expect("Failed to build client")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_reqwest_client_builder() -> ClientBuilder {
|
||||||
|
let mut headers = header::HeaderMap::new();
|
||||||
|
headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS"));
|
||||||
|
Client::builder().default_headers(headers).timeout(Duration::from_secs(10))
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue