massive error overhaul
* fully stop using ancient `failure` crate in favor of own error type * set an `ErrorKind` on everything
This commit is contained in:
parent
6a5b751bd6
commit
64ca096ff3
|
@ -2,15 +2,6 @@
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 3
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "addr2line"
|
|
||||||
version = "0.19.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"
|
|
||||||
dependencies = [
|
|
||||||
"gimli",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "adler"
|
name = "adler"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
|
@ -77,21 +68,6 @@ version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "backtrace"
|
|
||||||
version = "0.3.67"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca"
|
|
||||||
dependencies = [
|
|
||||||
"addr2line",
|
|
||||||
"cc",
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
"miniz_oxide",
|
|
||||||
"object",
|
|
||||||
"rustc-demangle",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.13.1"
|
version = "0.13.1"
|
||||||
|
@ -234,6 +210,11 @@ dependencies = [
|
||||||
"inout",
|
"inout",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "coded"
|
||||||
|
version = "0.2.0-pre"
|
||||||
|
source = "git+https://github.com/scottlamb/coded?rev=2c97994974a73243d5dd12134831814f42cdb0e8#2c97994974a73243d5dd12134831814f42cdb0e8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "constant_time_eq"
|
name = "constant_time_eq"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
|
@ -478,28 +459,6 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "failure"
|
|
||||||
version = "0.1.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
|
|
||||||
dependencies = [
|
|
||||||
"backtrace",
|
|
||||||
"failure_derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "failure_derive"
|
|
||||||
version = "0.1.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 1.0.107",
|
|
||||||
"synstructure",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fallible-iterator"
|
name = "fallible-iterator"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
|
@ -523,9 +482,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flate2"
|
name = "flate2"
|
||||||
version = "1.0.25"
|
version = "1.0.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
|
checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"crc32fast",
|
"crc32fast",
|
||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
|
@ -662,12 +621,6 @@ dependencies = [
|
||||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "gimli"
|
|
||||||
version = "0.27.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "h2"
|
name = "h2"
|
||||||
version = "0.3.15"
|
version = "0.3.15"
|
||||||
|
@ -1060,9 +1013,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "miniz_oxide"
|
name = "miniz_oxide"
|
||||||
version = "0.6.2"
|
version = "0.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
|
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"adler",
|
"adler",
|
||||||
]
|
]
|
||||||
|
@ -1084,10 +1037,12 @@ name = "moonfire-base"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"failure",
|
"coded",
|
||||||
"futures",
|
"futures",
|
||||||
"libc",
|
"libc",
|
||||||
|
"nix",
|
||||||
"nom",
|
"nom",
|
||||||
|
"rusqlite",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"slab",
|
"slab",
|
||||||
|
@ -1107,7 +1062,6 @@ dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"cstr",
|
"cstr",
|
||||||
"diff",
|
"diff",
|
||||||
"failure",
|
|
||||||
"fnv",
|
"fnv",
|
||||||
"futures",
|
"futures",
|
||||||
"h264-reader",
|
"h264-reader",
|
||||||
|
@ -1147,7 +1101,6 @@ dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
"cursive",
|
"cursive",
|
||||||
"failure",
|
|
||||||
"fnv",
|
"fnv",
|
||||||
"futures",
|
"futures",
|
||||||
"h264-reader",
|
"h264-reader",
|
||||||
|
@ -1360,15 +1313,6 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "object"
|
|
||||||
version = "0.30.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8d864c91689fdc196779b98dba0aceac6118594c2df6ee5d943eb6a8df4d107a"
|
|
||||||
dependencies = [
|
|
||||||
"memchr",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "odds"
|
name = "odds"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
@ -1764,12 +1708,6 @@ dependencies = [
|
||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustc-demangle"
|
|
||||||
version = "0.1.21"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustix"
|
name = "rustix"
|
||||||
version = "0.38.2"
|
version = "0.38.2"
|
||||||
|
@ -2001,18 +1939,6 @@ version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8"
|
checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "synstructure"
|
|
||||||
version = "0.12.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 1.0.107",
|
|
||||||
"unicode-xid",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tempfile"
|
name = "tempfile"
|
||||||
version = "3.3.0"
|
version = "3.3.0"
|
||||||
|
@ -2394,12 +2320,6 @@ version = "0.1.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-xid"
|
|
||||||
version = "0.2.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "untrusted"
|
name = "untrusted"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
|
|
|
@ -21,6 +21,11 @@ bundled = ["rusqlite/bundled"]
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["base", "db"]
|
members = ["base", "db"]
|
||||||
|
|
||||||
|
[workspace.dependencies]
|
||||||
|
nix = "0.26.1"
|
||||||
|
tracing = { version = "0.1", features = ["log"] }
|
||||||
|
rusqlite = "0.28.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base = { package = "moonfire-base", path = "base" }
|
base = { package = "moonfire-base", path = "base" }
|
||||||
base64 = "0.13.0"
|
base64 = "0.13.0"
|
||||||
|
@ -31,7 +36,6 @@ byteorder = "1.0"
|
||||||
chrono = "0.4.23"
|
chrono = "0.4.23"
|
||||||
cursive = "0.20.0"
|
cursive = "0.20.0"
|
||||||
db = { package = "moonfire-db", path = "db" }
|
db = { package = "moonfire-db", path = "db" }
|
||||||
failure = "0.1.1"
|
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
fnv = "1.0"
|
fnv = "1.0"
|
||||||
h264-reader = "0.6.0"
|
h264-reader = "0.6.0"
|
||||||
|
@ -42,14 +46,14 @@ itertools = "0.10.0"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
log = { version = "0.4" }
|
log = { version = "0.4" }
|
||||||
memchr = "2.0.2"
|
memchr = "2.0.2"
|
||||||
nix = "0.26.1"
|
nix = { workspace = true}
|
||||||
nom = "7.0.0"
|
nom = "7.0.0"
|
||||||
password-hash = "0.4.2"
|
password-hash = "0.4.2"
|
||||||
protobuf = "3.0"
|
protobuf = "3.0"
|
||||||
reffers = "0.7.0"
|
reffers = "0.7.0"
|
||||||
retina = "0.4.0"
|
retina = "0.4.0"
|
||||||
ring = "0.16.2"
|
ring = "0.16.2"
|
||||||
rusqlite = "0.28.0"
|
rusqlite = { workspace = true }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
smallvec = { version = "1.7", features = ["union"] }
|
smallvec = { version = "1.7", features = ["union"] }
|
||||||
|
@ -59,7 +63,7 @@ tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "signal", "
|
||||||
tokio-stream = "0.1.5"
|
tokio-stream = "0.1.5"
|
||||||
tokio-tungstenite = "0.18.0"
|
tokio-tungstenite = "0.18.0"
|
||||||
toml = "0.5"
|
toml = "0.5"
|
||||||
tracing = { version = "0.1", features = ["log"] }
|
tracing = { workspace = true }
|
||||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
|
||||||
tracing-core = "0.1.30"
|
tracing-core = "0.1.30"
|
||||||
tracing-futures = { version = "0.2.5", features = ["futures-03", "std-future"] }
|
tracing-futures = { version = "0.2.5", features = ["futures-03", "std-future"] }
|
||||||
|
|
|
@ -15,15 +15,17 @@ path = "lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = "0.4.23"
|
chrono = "0.4.23"
|
||||||
failure = "0.1.1"
|
coded = { git = "https://github.com/scottlamb/coded", rev = "2c97994974a73243d5dd12134831814f42cdb0e8"}
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
|
nix = { workspace = true }
|
||||||
nom = "7.0.0"
|
nom = "7.0.0"
|
||||||
|
rusqlite = { workspace = true }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
slab = "0.4"
|
slab = "0.4"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
tracing = "0.1.37"
|
tracing = { workspace = true }
|
||||||
tracing-core = "0.1.30"
|
tracing-core = "0.1.30"
|
||||||
tracing-log = "0.1.3"
|
tracing-log = "0.1.3"
|
||||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
//! Clock interface and implementations for testability.
|
//! Clock interface and implementations for testability.
|
||||||
|
|
||||||
use failure::Error;
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::sync::{mpsc, Arc};
|
use std::sync::{mpsc, Arc};
|
||||||
|
@ -13,6 +12,7 @@ use std::time::Duration as StdDuration;
|
||||||
use time::{Duration, Timespec};
|
use time::{Duration, Timespec};
|
||||||
use tracing::warn;
|
use tracing::warn;
|
||||||
|
|
||||||
|
use crate::error::Error;
|
||||||
use crate::shutdown::ShutdownError;
|
use crate::shutdown::ShutdownError;
|
||||||
|
|
||||||
/// Abstract interface to the system clocks. This is for testability.
|
/// Abstract interface to the system clocks. This is for testability.
|
||||||
|
@ -54,7 +54,7 @@ where
|
||||||
shutdown_rx.check()?;
|
shutdown_rx.check()?;
|
||||||
let sleep_time = Duration::seconds(1);
|
let sleep_time = Duration::seconds(1);
|
||||||
warn!(
|
warn!(
|
||||||
err = crate::error::prettify_failure(&e),
|
exception = %e.chain(),
|
||||||
"sleeping for 1 s after error"
|
"sleeping for 1 s after error"
|
||||||
);
|
);
|
||||||
clocks.sleep(sleep_time);
|
clocks.sleep(sleep_time);
|
||||||
|
|
|
@ -2,131 +2,317 @@
|
||||||
// Copyright (C) 2018 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
// Copyright (C) 2018 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
use failure::{Backtrace, Context, Fail};
|
use std::backtrace::Backtrace;
|
||||||
use std::fmt::{self, Write};
|
use std::error::Error as StdError;
|
||||||
|
use std::fmt::{Debug, Display};
|
||||||
|
//use std::num::NonZeroU16;
|
||||||
|
|
||||||
/// Returns a pretty-and-informative version of `e`.
|
pub use coded::ErrorKind;
|
||||||
pub fn prettify_failure(e: &failure::Error) -> String {
|
|
||||||
let mut msg = e.to_string();
|
/// Like [`coded::ToErrKind`] but with more third-party implementations.
|
||||||
for cause in e.iter_causes() {
|
///
|
||||||
write!(&mut msg, "\ncaused by: {cause}").unwrap();
|
/// It's not possible to implement those here on that trait because of the orphan rule.
|
||||||
}
|
pub trait ToErrKind {
|
||||||
if e.backtrace().is_empty() {
|
fn err_kind(&self) -> ErrorKind;
|
||||||
write!(
|
|
||||||
&mut msg,
|
|
||||||
"\n\n(set environment variable RUST_BACKTRACE=1 to see backtraces)"
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
} else {
|
|
||||||
write!(&mut msg, "\n\nBacktrace:\n{}", e.backtrace()).unwrap();
|
|
||||||
}
|
|
||||||
msg
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
impl ToErrKind for Error {
|
||||||
pub struct Error {
|
#[inline]
|
||||||
inner: Context<ErrorKind>,
|
fn err_kind(&self) -> ErrorKind {
|
||||||
|
self.0.kind
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToErrKind for std::io::Error {
|
||||||
|
#[inline]
|
||||||
|
fn err_kind(&self) -> ErrorKind {
|
||||||
|
self.kind().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToErrKind for rusqlite::ErrorCode {
|
||||||
|
fn err_kind(&self) -> ErrorKind {
|
||||||
|
use rusqlite::ErrorCode;
|
||||||
|
// https://www.sqlite.org/rescode.html
|
||||||
|
match self {
|
||||||
|
ErrorCode::InternalMalfunction => ErrorKind::Internal,
|
||||||
|
ErrorCode::PermissionDenied => ErrorKind::PermissionDenied,
|
||||||
|
ErrorCode::OperationAborted => ErrorKind::Aborted,
|
||||||
|
|
||||||
|
// Conflict with another database connection in a process which is accessing
|
||||||
|
// the database, apparently without using Moonfire NVR's scheme of acquiring
|
||||||
|
// a lock on the db directory.
|
||||||
|
// https://www.sqlite.org/wal.html#sometimes_queries_return_sqlite_busy_in_wal_mode
|
||||||
|
ErrorCode::DatabaseBusy => ErrorKind::Unavailable,
|
||||||
|
|
||||||
|
// Conflict within the same database connection. Shouldn't happen for Moonfire.
|
||||||
|
ErrorCode::DatabaseLocked => ErrorKind::Internal,
|
||||||
|
ErrorCode::OutOfMemory => ErrorKind::ResourceExhausted,
|
||||||
|
ErrorCode::ReadOnly => ErrorKind::FailedPrecondition,
|
||||||
|
ErrorCode::OperationInterrupted => ErrorKind::Aborted,
|
||||||
|
ErrorCode::SystemIoFailure => ErrorKind::Unavailable,
|
||||||
|
ErrorCode::DatabaseCorrupt => ErrorKind::DataLoss,
|
||||||
|
ErrorCode::NotFound => ErrorKind::NotFound,
|
||||||
|
ErrorCode::DiskFull => ErrorKind::ResourceExhausted,
|
||||||
|
ErrorCode::CannotOpen => ErrorKind::Unavailable,
|
||||||
|
|
||||||
|
// Similar to DatabaseBusy in this implies a conflict with another conn.
|
||||||
|
ErrorCode::FileLockingProtocolFailed => ErrorKind::Unavailable,
|
||||||
|
|
||||||
|
// Likewise: Moonfire NVR should never change the schema
|
||||||
|
// mid-statement, so the most plausible explanation for
|
||||||
|
// SchemaChange is another process.
|
||||||
|
ErrorCode::SchemaChanged => ErrorKind::Unavailable,
|
||||||
|
|
||||||
|
ErrorCode::TooBig => ErrorKind::ResourceExhausted,
|
||||||
|
ErrorCode::ConstraintViolation => ErrorKind::Internal,
|
||||||
|
ErrorCode::TypeMismatch => ErrorKind::Internal,
|
||||||
|
ErrorCode::ApiMisuse => ErrorKind::Internal,
|
||||||
|
ErrorCode::NoLargeFileSupport => ErrorKind::ResourceExhausted,
|
||||||
|
ErrorCode::AuthorizationForStatementDenied => ErrorKind::Internal,
|
||||||
|
ErrorCode::ParameterOutOfRange => ErrorKind::Internal,
|
||||||
|
ErrorCode::NotADatabase => ErrorKind::FailedPrecondition,
|
||||||
|
_ => ErrorKind::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToErrKind for rusqlite::Error {
|
||||||
|
#[inline]
|
||||||
|
fn err_kind(&self) -> ErrorKind {
|
||||||
|
match self {
|
||||||
|
rusqlite::Error::SqliteFailure(e, _) => e.code.err_kind(),
|
||||||
|
_ => ErrorKind::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToErrKind for rusqlite::types::FromSqlError {
|
||||||
|
fn err_kind(&self) -> ErrorKind {
|
||||||
|
match self {
|
||||||
|
rusqlite::types::FromSqlError::InvalidType => ErrorKind::FailedPrecondition,
|
||||||
|
rusqlite::types::FromSqlError::OutOfRange(_) => ErrorKind::OutOfRange,
|
||||||
|
rusqlite::types::FromSqlError::InvalidBlobSize { .. } => ErrorKind::OutOfRange,
|
||||||
|
/* rusqlite::types::FromSqlError::Other(_) | */ _ => ErrorKind::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToErrKind for nix::Error {
|
||||||
|
fn err_kind(&self) -> ErrorKind {
|
||||||
|
use nix::Error;
|
||||||
|
match self {
|
||||||
|
Error::EACCES | Error::EPERM => ErrorKind::PermissionDenied,
|
||||||
|
Error::EDQUOT => ErrorKind::ResourceExhausted,
|
||||||
|
Error::EBUSY
|
||||||
|
| Error::EEXIST
|
||||||
|
| Error::ENOTDIR
|
||||||
|
| Error::EROFS
|
||||||
|
| Error::EFBIG
|
||||||
|
| Error::EOVERFLOW
|
||||||
|
| Error::ENXIO
|
||||||
|
| Error::ETXTBSY => ErrorKind::FailedPrecondition,
|
||||||
|
Error::EINVAL | Error::ENAMETOOLONG => ErrorKind::InvalidArgument,
|
||||||
|
Error::ELOOP => ErrorKind::FailedPrecondition,
|
||||||
|
Error::EMLINK | Error::ENOMEM | Error::ENOSPC | Error::EMFILE | Error::ENFILE => {
|
||||||
|
ErrorKind::ResourceExhausted
|
||||||
|
}
|
||||||
|
Error::EBADF | Error::EFAULT => ErrorKind::InvalidArgument,
|
||||||
|
Error::EINTR | Error::EAGAIN => ErrorKind::Aborted,
|
||||||
|
Error::ENOENT | Error::ENODEV => ErrorKind::NotFound,
|
||||||
|
Error::EOPNOTSUPP => ErrorKind::Unimplemented,
|
||||||
|
_ => ErrorKind::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Error(Box<ErrorInner>);
|
||||||
|
|
||||||
|
struct ErrorInner {
|
||||||
|
kind: ErrorKind,
|
||||||
|
msg: Option<String>,
|
||||||
|
//http_status: Option<NonZeroU16>,
|
||||||
|
backtrace: Option<Backtrace>,
|
||||||
|
source: Option<Box<dyn StdError + Sync + Send>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ErrorBuilder(Box<ErrorInner>);
|
||||||
|
|
||||||
|
impl Default for ErrorBuilder {
|
||||||
|
#[inline]
|
||||||
|
fn default() -> Self {
|
||||||
|
Self(Box::new(ErrorInner {
|
||||||
|
kind: ErrorKind::Unknown,
|
||||||
|
msg: None,
|
||||||
|
// http_status: None,
|
||||||
|
backtrace: None,
|
||||||
|
source: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ErrorKind> for ErrorBuilder {
|
||||||
|
#[inline]
|
||||||
|
fn from(value: ErrorKind) -> Self {
|
||||||
|
Self::default().kind(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorBuilder {
|
||||||
|
#[inline]
|
||||||
|
pub fn kind(mut self, kind: ErrorKind) -> Self {
|
||||||
|
self.0.kind = kind;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn map<F: Fn(ErrorKind) -> ErrorKind>(mut self, f: F) -> Self {
|
||||||
|
self.0.kind = f(self.0.kind);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn msg(mut self, msg: String) -> Self {
|
||||||
|
self.0.msg = Some(msg);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn source<S: Into<Box<dyn StdError + Send + Sync + 'static>>>(mut self, source: S) -> Self {
|
||||||
|
self.0.source = Some(source.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn build(self) -> Error {
|
||||||
|
Error(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! cvt {
|
||||||
|
($t:ty) => {
|
||||||
|
impl From<$t> for ErrorBuilder {
|
||||||
|
#[inline]
|
||||||
|
fn from(t: $t) -> Self {
|
||||||
|
Self::default().kind(ToErrKind::err_kind(&t)).source(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<$t> for Error {
|
||||||
|
#[inline(always)]
|
||||||
|
fn from(t: $t) -> Self {
|
||||||
|
Self($crate::ErrorBuilder::from(t).0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
cvt!(rusqlite::Error);
|
||||||
|
cvt!(rusqlite::types::FromSqlError);
|
||||||
|
cvt!(std::io::Error);
|
||||||
|
cvt!(nix::Error);
|
||||||
|
|
||||||
|
impl From<Error> for ErrorBuilder {
|
||||||
|
#[inline]
|
||||||
|
fn from(value: Error) -> Self {
|
||||||
|
Self::default()
|
||||||
|
.kind(ToErrKind::err_kind(&value))
|
||||||
|
.source(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Captures a backtrace if enabled for the given error kind.
|
||||||
|
// TODO: make this more configurable at runtime.
|
||||||
|
fn maybe_backtrace(kind: ErrorKind) -> Option<Backtrace> {
|
||||||
|
if matches!(kind, ErrorKind::Internal | ErrorKind::Unknown) {
|
||||||
|
Some(Backtrace::capture())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
pub fn wrap<E: Into<failure::Error>>(kind: ErrorKind, e: E) -> Self {
|
#[inline]
|
||||||
Self {
|
pub fn wrap<E: StdError + Sync + Send + 'static>(kind: ErrorKind, e: E) -> Self {
|
||||||
inner: e.into().context(kind),
|
Self(Box::new(ErrorInner {
|
||||||
}
|
kind,
|
||||||
|
msg: None,
|
||||||
|
// http_status: None,
|
||||||
|
backtrace: maybe_backtrace(kind),
|
||||||
|
source: Some(Box::new(e)),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn map<F: FnOnce(ErrorKind) -> ErrorKind>(mut self, f: F) -> Self {
|
||||||
|
self.0.kind = f(self.0.kind);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
pub fn kind(&self) -> ErrorKind {
|
pub fn kind(&self) -> ErrorKind {
|
||||||
*self.inner.get_context()
|
self.0.kind
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compat(self) -> failure::Compat<Context<ErrorKind>> {
|
#[inline]
|
||||||
self.inner.compat()
|
pub fn msg(&self) -> Option<&str> {
|
||||||
|
self.0.msg.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map<F>(self, op: F) -> Self
|
/// Returns a borrowed value which can display not only this error but also
|
||||||
where
|
/// the full chain of causes and (where applicable) the stack trace.
|
||||||
F: FnOnce(ErrorKind) -> ErrorKind,
|
///
|
||||||
{
|
/// The exact format may change. Currently, it displays the stack trace for
|
||||||
Self {
|
/// the current error but not any of the sources.
|
||||||
inner: self.inner.map(op),
|
#[inline]
|
||||||
}
|
pub fn chain(&self) -> impl Display + '_ {
|
||||||
|
ErrorChain(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Fail for Error {
|
/// Formats this error alone (*not* its full chain).
|
||||||
fn cause(&self) -> Option<&dyn Fail> {
|
impl Display for Error {
|
||||||
self.inner.cause()
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self.0.msg {
|
||||||
|
None => std::fmt::Display::fmt(self.0.kind.grpc_name(), f)?,
|
||||||
|
Some(ref msg) => write!(f, "{}: {}", self.0.kind.grpc_name(), msg)?,
|
||||||
}
|
}
|
||||||
|
if let Some(ref bt) = self.0.backtrace {
|
||||||
fn backtrace(&self) -> Option<&Backtrace> {
|
// TODO: only with "alternate"/# modifier?
|
||||||
self.inner.backtrace()
|
// Shorten this, maybe by switching to `backtrace` + using
|
||||||
|
// `backtrace_ext::short_frames_strict` or similar.
|
||||||
|
write!(f, "\nBacktrace:\n{}", bt)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ErrorKind> for Error {
|
impl Debug for Error {
|
||||||
fn from(kind: ErrorKind) -> Error {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
Error {
|
std::fmt::Display::fmt(&ErrorChain(self), f)
|
||||||
inner: Context::new(kind),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Context<ErrorKind>> for Error {
|
/// Value returned by [`Error::chain`].
|
||||||
fn from(inner: Context<ErrorKind>) -> Error {
|
struct ErrorChain<'a>(&'a Error);
|
||||||
Error { inner }
|
|
||||||
|
impl Display for ErrorChain<'_> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
Display::fmt(self.0, f)?;
|
||||||
|
let mut source = self.0.source();
|
||||||
|
while let Some(n) = source {
|
||||||
|
write!(f, "\ncaused by: {}", n)?;
|
||||||
|
source = n.source()
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*impl From<failure::Error> for Error {
|
impl StdError for Error {
|
||||||
fn from(e: failure::Error) -> Error {
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
Error { inner: e.context(ErrorKind::Unknown) }
|
// https://users.rust-lang.org/t/question-about-error-source-s-static-return-type/34515/8
|
||||||
|
self.0.source.as_ref().map(|e| e.as_ref() as &_)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: std::error::Error + Send + Sync + 'static> From<E> for Error {
|
|
||||||
fn from(e: E) -> Error {
|
|
||||||
let f = e as Fail;
|
|
||||||
Error { inner: f.context(ErrorKind::Unknown) }
|
|
||||||
}
|
|
||||||
}*/
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self.inner.cause() {
|
|
||||||
None => fmt::Display::fmt(&self.kind(), f),
|
|
||||||
Some(c) => write!(f, "{}: {}", self.kind(), c),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Error kind.
|
|
||||||
///
|
|
||||||
/// These codes are taken from
|
|
||||||
/// [grpc::StatusCode](https://github.com/grpc/grpc/blob/0e00c430827e81d61e1e7164ef04ca21ccbfaa77/include/grpcpp/impl/codegen/status_code_enum.h),
|
|
||||||
/// which is a nice general-purpose classification of errors. See that link for descriptions of
|
|
||||||
/// each error.
|
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
#[rustfmt::skip]
|
|
||||||
pub enum ErrorKind {
|
|
||||||
#[fail(display = "Cancelled")] Cancelled,
|
|
||||||
#[fail(display = "Unknown")] Unknown,
|
|
||||||
#[fail(display = "Invalid argument")] InvalidArgument,
|
|
||||||
#[fail(display = "Deadline exceeded")] DeadlineExceeded,
|
|
||||||
#[fail(display = "Not found")] NotFound,
|
|
||||||
#[fail(display = "Already exists")] AlreadyExists,
|
|
||||||
#[fail(display = "Permission denied")] PermissionDenied,
|
|
||||||
#[fail(display = "Unauthenticated")] Unauthenticated,
|
|
||||||
#[fail(display = "Resource exhausted")] ResourceExhausted,
|
|
||||||
#[fail(display = "Failed precondition")] FailedPrecondition,
|
|
||||||
#[fail(display = "Aborted")] Aborted,
|
|
||||||
#[fail(display = "Out of range")] OutOfRange,
|
|
||||||
#[fail(display = "Unimplemented")] Unimplemented,
|
|
||||||
#[fail(display = "Internal")] Internal,
|
|
||||||
#[fail(display = "Unavailable")] Unavailable,
|
|
||||||
#[fail(display = "Data loss")] DataLoss,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extension methods for `Result`.
|
/// Extension methods for `Result`.
|
||||||
pub trait ResultExt<T, E> {
|
pub trait ResultExt<T, E> {
|
||||||
/// Annotates an error with the given kind.
|
/// Annotates an error with the given kind.
|
||||||
|
@ -143,70 +329,130 @@ pub trait ResultExt<T, E> {
|
||||||
|
|
||||||
impl<T, E> ResultExt<T, E> for Result<T, E>
|
impl<T, E> ResultExt<T, E> for Result<T, E>
|
||||||
where
|
where
|
||||||
E: Into<failure::Error>,
|
E: StdError + Sync + Send + 'static,
|
||||||
{
|
{
|
||||||
fn err_kind(self, k: ErrorKind) -> Result<T, Error> {
|
fn err_kind(self, k: ErrorKind) -> Result<T, Error> {
|
||||||
self.map_err(|e| e.into().context(k).into())
|
self.map_err(|e| ErrorBuilder::default().kind(k).source(e).build())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Like `failure::bail!`, but the first argument specifies a type as an `ErrorKind`.
|
/// Wrapper around `err!` which returns the error.
|
||||||
///
|
///
|
||||||
/// Example with positional arguments:
|
/// Example with positional arguments:
|
||||||
/// ```
|
/// ```
|
||||||
/// use moonfire_base::bail_t;
|
/// use moonfire_base::bail;
|
||||||
/// let e = || -> Result<(), moonfire_base::Error> {
|
/// let e = || -> Result<(), moonfire_base::Error> {
|
||||||
/// bail_t!(Unauthenticated, "unknown user: {}", "slamb");
|
/// bail!(Unauthenticated, msg("unknown user: {}", "slamb"));
|
||||||
/// }().unwrap_err();
|
/// }().unwrap_err();
|
||||||
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
|
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
|
||||||
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
|
/// assert_eq!(e.to_string(), "UNAUTHENTICATED: unknown user: slamb");
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// Example with named arguments:
|
/// Example with named arguments:
|
||||||
/// ```
|
/// ```
|
||||||
/// use moonfire_base::bail_t;
|
/// use moonfire_base::bail;
|
||||||
/// let e = || -> Result<(), moonfire_base::Error> {
|
/// let e = || -> Result<(), moonfire_base::Error> {
|
||||||
/// let user = "slamb";
|
/// let user = "slamb";
|
||||||
/// bail_t!(Unauthenticated, "unknown user: {user}");
|
/// bail!(Unauthenticated, msg("unknown user: {user}"));
|
||||||
/// }().unwrap_err();
|
/// }().unwrap_err();
|
||||||
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
|
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
|
||||||
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
|
/// assert_eq!(e.to_string(), "UNAUTHENTICATED: unknown user: slamb");
|
||||||
/// ```
|
/// ```
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! bail_t {
|
macro_rules! bail {
|
||||||
($t:ident, $fmt:expr) => {
|
($($arg:tt)+) => {
|
||||||
return Err($crate::Error::from(failure::err_msg(format!($fmt)).context($crate::ErrorKind::$t)).into());
|
return Err($crate::err!($($arg)+).into());
|
||||||
};
|
|
||||||
($t:ident, $fmt:expr, $($arg:tt)+) => {
|
|
||||||
return Err($crate::Error::from(failure::err_msg(format!($fmt, $($arg)+)).context($crate::ErrorKind::$t)).into());
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Like `failure::format_err!`, but the first argument specifies a type as an `ErrorKind`.
|
/// Constructs an [`Error`], tersely.
|
||||||
///
|
///
|
||||||
/// Example with positional arguments:
|
/// This is a shorthand way to use [`ErrorBuilder`].
|
||||||
/// ```
|
///
|
||||||
/// use moonfire_base::format_err_t;
|
/// The first argument is an `Into<ErrorBuilder>`, such as the following:
|
||||||
/// let e = format_err_t!(Unauthenticated, "unknown user: {}", "slamb");
|
///
|
||||||
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
|
/// * an [`ErrorKind`] enum variant name like `Unauthenticated`.
|
||||||
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
|
/// There's an implicit `use ::coded::ErrorKind::*` to allow the bare
|
||||||
|
/// variant names just within this restrictive scope where you're unlikely
|
||||||
|
/// to have conflicts with other identifiers.
|
||||||
|
/// * an [`std::io::Error`] as a source, which sets the new `Error`'s
|
||||||
|
/// `ErrorKind` based on the `std::io::Error`.
|
||||||
|
/// * an `Error` as a source, which similarly copies the `ErrorKind`.
|
||||||
|
/// * an existing `ErrorBuilder`, which does not create a new source link.
|
||||||
|
///
|
||||||
|
/// Following arguments may be of these forms:
|
||||||
|
///
|
||||||
|
/// * `msg(...)`, which expands to `.msg(format!(...))`. See [`ErrorBuilder::msg`].
|
||||||
|
/// * `source(...)`, which simply expands to `.source($src)`. See [`ErrorBuilder::source`].
|
||||||
|
///
|
||||||
|
/// ## Examples
|
||||||
|
///
|
||||||
|
/// Simplest:
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// # use coded::err;
|
||||||
|
/// let e = err!(InvalidArgument);
|
||||||
|
/// let e = err!(InvalidArgument,); // trailing commas are allowed
|
||||||
|
/// assert_eq!(e.kind(), coded::ErrorKind::InvalidArgument);
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// Example with named arguments:
|
/// Constructing with a fixed error variant name:
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// # use {coded::err, std::error::Error, std::num::ParseIntError};
|
||||||
|
/// let input = "a12";
|
||||||
|
/// let src = i32::from_str_radix(input, 10).unwrap_err();
|
||||||
|
///
|
||||||
|
/// let e = err!(InvalidArgument, source(src.clone()), msg("bad argument {:?}", input));
|
||||||
|
/// // The line above is equivalent to:
|
||||||
|
/// let e2 = ::coded::ErrorBuilder::from(::coded::ErrorKind::InvalidArgument)
|
||||||
|
/// .source(src.clone())
|
||||||
|
/// .msg(format!("bad argument {:?}", input))
|
||||||
|
/// .build();
|
||||||
|
///
|
||||||
|
/// assert_eq!(e.kind(), coded::ErrorKind::InvalidArgument);
|
||||||
|
/// assert_eq!(e.source().unwrap().downcast_ref::<ParseIntError>().unwrap(), &src);
|
||||||
/// ```
|
/// ```
|
||||||
/// use moonfire_base::format_err_t;
|
///
|
||||||
/// let user = "slamb";
|
/// Constructing from an `std::io::Error`:
|
||||||
/// let e = format_err_t!(Unauthenticated, "unknown user: {user}");
|
///
|
||||||
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
|
/// ```rust
|
||||||
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
|
/// # use coded::err;
|
||||||
|
/// let e = std::io::Error::new(std::io::ErrorKind::NotFound, "file not found");
|
||||||
|
/// let e = err!(e, msg("path {} not found", "foo"));
|
||||||
|
/// assert_eq!(e.kind(), coded::ErrorKind::NotFound);
|
||||||
/// ```
|
/// ```
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! format_err_t {
|
macro_rules! err {
|
||||||
($t:ident, $fmt:expr) => {
|
// This uses the "incremental TT munchers", "internal rules", and "push-down accumulation"
|
||||||
Into::<$crate::Error>::into(failure::err_msg(format!($fmt)).context($crate::ErrorKind::$t))
|
// patterns explained in the excellent "The Little Book of Rust Macros":
|
||||||
|
// <https://veykril.github.io/tlborm/decl-macros/patterns/push-down-acc.html>.
|
||||||
|
|
||||||
|
(@accum $body:tt $(,)?) => {
|
||||||
|
$body.build()
|
||||||
};
|
};
|
||||||
($t:ident, $fmt:expr, $($arg:tt)+) => {
|
|
||||||
Into::<$crate::Error>::into(failure::err_msg(format!($fmt, $($arg)+))
|
(@accum ($($body:tt)*), source($src:expr) $($tail:tt)*) => {
|
||||||
.context($crate::ErrorKind::$t))
|
$crate::err!(@accum ($($body)*.source($src)) $($tail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
// msg(...) uses the `format!` form even when there's only the format string.
|
||||||
|
// This can catch errors (e.g. https://github.com/dtolnay/anyhow/issues/55)
|
||||||
|
// and will allow supporting implicit named parameters:
|
||||||
|
// https://rust-lang.github.io/rfcs/2795-format-args-implicit-identifiers.html
|
||||||
|
(@accum ($($body:tt)*), msg($format:expr) $($tail:tt)*) => {
|
||||||
|
$crate::err!(@accum ($($body)*.msg(format!($format))) $($tail)*)
|
||||||
|
};
|
||||||
|
(@accum ($($body:tt)*), msg($format:expr, $($args:tt)*) $($tail:tt)*) => {
|
||||||
|
$crate::err!(@accum ($($body)*.msg(format!($format, $($args)*))) $($tail)*)
|
||||||
|
};
|
||||||
|
|
||||||
|
($builder:expr $(, $($tail:tt)*)? ) => {
|
||||||
|
$crate::err!(@accum ({
|
||||||
|
use $crate::ErrorKind::*;
|
||||||
|
$crate::ErrorBuilder::from($builder)
|
||||||
|
})
|
||||||
|
, $($($tail)*)*
|
||||||
|
)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,10 @@
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
pub mod clock;
|
pub mod clock;
|
||||||
mod error;
|
pub mod error;
|
||||||
pub mod shutdown;
|
pub mod shutdown;
|
||||||
pub mod strutil;
|
pub mod strutil;
|
||||||
pub mod time;
|
pub mod time;
|
||||||
pub mod tracing_setup;
|
pub mod tracing_setup;
|
||||||
|
|
||||||
pub use crate::error::{prettify_failure, Error, ErrorKind, ResultExt};
|
pub use crate::error::{Error, ErrorBuilder, ErrorKind, ResultExt};
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
//! Time and durations for Moonfire NVR's internal format.
|
//! Time and durations for Moonfire NVR's internal format.
|
||||||
|
|
||||||
use failure::{bail, format_err, Error};
|
use crate::{bail, err, Error};
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::{tag, take_while_m_n};
|
use nom::bytes::complete::{tag, take_while_m_n};
|
||||||
use nom::combinator::{map, map_res, opt};
|
use nom::combinator::{map, map_res, opt};
|
||||||
|
@ -106,13 +106,16 @@ impl Time {
|
||||||
opt(parse_zone),
|
opt(parse_zone),
|
||||||
))(input)
|
))(input)
|
||||||
.map_err(|e| match e {
|
.map_err(|e| match e {
|
||||||
nom::Err::Incomplete(_) => format_err!("incomplete"),
|
nom::Err::Incomplete(_) => err!(InvalidArgument, msg("incomplete")),
|
||||||
nom::Err::Error(e) | nom::Err::Failure(e) => {
|
nom::Err::Error(e) | nom::Err::Failure(e) => {
|
||||||
format_err!("{}", nom::error::convert_error(input, e))
|
err!(InvalidArgument, source(nom::error::convert_error(input, e)))
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
if !remaining.is_empty() {
|
if !remaining.is_empty() {
|
||||||
bail!("unexpected suffix {:?} following time string", remaining);
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("unexpected suffix {remaining:?} following time string")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let (tm_hour, tm_min, tm_sec, subsec) = opt_time.unwrap_or((0, 0, 0, 0));
|
let (tm_hour, tm_min, tm_sec, subsec) = opt_time.unwrap_or((0, 0, 0, 0));
|
||||||
let mut tm = time::Tm {
|
let mut tm = time::Tm {
|
||||||
|
@ -129,11 +132,11 @@ impl Time {
|
||||||
tm_nsec: 0,
|
tm_nsec: 0,
|
||||||
};
|
};
|
||||||
if tm.tm_mon == 0 {
|
if tm.tm_mon == 0 {
|
||||||
bail!("time {:?} has month 0", input);
|
bail!(InvalidArgument, msg("time {input:?} has month 0"));
|
||||||
}
|
}
|
||||||
tm.tm_mon -= 1;
|
tm.tm_mon -= 1;
|
||||||
if tm.tm_year < 1900 {
|
if tm.tm_year < 1900 {
|
||||||
bail!("time {:?} has year before 1900", input);
|
bail!(InvalidArgument, msg("time {input:?} has year before 1900"));
|
||||||
}
|
}
|
||||||
tm.tm_year -= 1900;
|
tm.tm_year -= 1900;
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ blake3 = "1.0.0"
|
||||||
byteorder = "1.0"
|
byteorder = "1.0"
|
||||||
cstr = "0.2.5"
|
cstr = "0.2.5"
|
||||||
diff = "0.1.12"
|
diff = "0.1.12"
|
||||||
failure = "0.1.1"
|
|
||||||
fnv = "1.0"
|
fnv = "1.0"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
h264-reader = "0.6.0"
|
h264-reader = "0.6.0"
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
use crate::json::UserConfig;
|
use crate::json::UserConfig;
|
||||||
use crate::schema::Permissions;
|
use crate::schema::Permissions;
|
||||||
use base::{bail_t, format_err_t, strutil, Error, ErrorKind, ResultExt as _};
|
use base::{bail, err, strutil, Error, ErrorKind, ResultExt as _};
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use protobuf::Message;
|
use protobuf::Message;
|
||||||
use ring::rand::{SecureRandom, SystemRandom};
|
use ring::rand::{SecureRandom, SystemRandom};
|
||||||
|
@ -96,11 +96,10 @@ impl User {
|
||||||
_ => return Ok(false),
|
_ => return Ok(false),
|
||||||
};
|
};
|
||||||
let hash = PasswordHash::new(hash).map_err(|e| {
|
let hash = PasswordHash::new(hash).map_err(|e| {
|
||||||
format_err_t!(
|
err!(
|
||||||
DataLoss,
|
DataLoss,
|
||||||
"bad stored password hash for user {:?}: {}",
|
msg("bad stored password hash for user {:?}", self.username),
|
||||||
self.username,
|
source(e),
|
||||||
e,
|
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
match scrypt::Scrypt.verify_password(password.as_bytes(), &hash) {
|
match scrypt::Scrypt.verify_password(password.as_bytes(), &hash) {
|
||||||
|
@ -110,11 +109,10 @@ impl User {
|
||||||
self.password_failure_count += 1;
|
self.password_failure_count += 1;
|
||||||
Ok(false)
|
Ok(false)
|
||||||
}
|
}
|
||||||
Err(e) => Err(format_err_t!(
|
Err(e) => Err(err!(
|
||||||
Internal,
|
Internal,
|
||||||
"unable to verify password for user {:?}: {}",
|
msg("unable to verify password for user {:?}", self.username),
|
||||||
self.username,
|
source(e),
|
||||||
e
|
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -234,7 +232,7 @@ impl FromStr for SessionFlag {
|
||||||
"secure" => Ok(Self::Secure),
|
"secure" => Ok(Self::Secure),
|
||||||
"same-site" => Ok(Self::SameSite),
|
"same-site" => Ok(Self::SameSite),
|
||||||
"same-site-strict" => Ok(Self::SameSiteStrict),
|
"same-site-strict" => Ok(Self::SameSiteStrict),
|
||||||
_ => bail_t!(InvalidArgument, "No such session flag {:?}", s),
|
_ => bail!(InvalidArgument, msg("No such session flag {s:?}")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -285,9 +283,9 @@ impl RawSessionId {
|
||||||
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
|
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
|
||||||
let mut s = RawSessionId([0u8; 48]);
|
let mut s = RawSessionId([0u8; 48]);
|
||||||
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut s.0[..])
|
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut s.0[..])
|
||||||
.map_err(|e| format_err_t!(InvalidArgument, "bad session id: {e}"))?;
|
.map_err(|e| err!(InvalidArgument, msg("bad session id"), source(e)))?;
|
||||||
if l != 48 {
|
if l != 48 {
|
||||||
bail_t!(InvalidArgument, "session id must be 48 bytes");
|
bail!(InvalidArgument, msg("session id must be 48 bytes"));
|
||||||
}
|
}
|
||||||
Ok(s)
|
Ok(s)
|
||||||
}
|
}
|
||||||
|
@ -334,9 +332,9 @@ impl SessionHash {
|
||||||
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
|
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
|
||||||
let mut h = SessionHash([0u8; 24]);
|
let mut h = SessionHash([0u8; 24]);
|
||||||
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut h.0[..])
|
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut h.0[..])
|
||||||
.map_err(|e| format_err_t!(InvalidArgument, "invalid session hash: {e}"))?;
|
.map_err(|e| err!(InvalidArgument, msg("invalid session hash"), source(e)))?;
|
||||||
if l != 24 {
|
if l != 24 {
|
||||||
bail_t!(InvalidArgument, "session hash must be 24 bytes");
|
bail!(InvalidArgument, msg("session hash must be 24 bytes"));
|
||||||
}
|
}
|
||||||
Ok(h)
|
Ok(h)
|
||||||
}
|
}
|
||||||
|
@ -361,9 +359,10 @@ impl rusqlite::types::FromSql for Seed {
|
||||||
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
|
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
|
||||||
let b = value.as_blob()?;
|
let b = value.as_blob()?;
|
||||||
if b.len() != 32 {
|
if b.len() != 32 {
|
||||||
return Err(rusqlite::types::FromSqlError::Other(Box::new(
|
return Err(rusqlite::types::FromSqlError::Other(Box::new(err!(
|
||||||
format_err_t!(Internal, "expected a 32-byte seed").compat(),
|
Internal,
|
||||||
)));
|
msg("expected a 32-byte seed")
|
||||||
|
))));
|
||||||
}
|
}
|
||||||
let mut s = Seed::default();
|
let mut s = Seed::default();
|
||||||
s.0.copy_from_slice(b);
|
s.0.copy_from_slice(b);
|
||||||
|
@ -395,8 +394,7 @@ impl State {
|
||||||
sessions: FnvHashMap::default(),
|
sessions: FnvHashMap::default(),
|
||||||
rand: ring::rand::SystemRandom::new(),
|
rand: ring::rand::SystemRandom::new(),
|
||||||
};
|
};
|
||||||
let mut stmt = conn
|
let mut stmt = conn.prepare(
|
||||||
.prepare(
|
|
||||||
r#"
|
r#"
|
||||||
select
|
select
|
||||||
id,
|
id,
|
||||||
|
@ -409,30 +407,24 @@ impl State {
|
||||||
from
|
from
|
||||||
user
|
user
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
let mut rows = stmt.query(params![])?;
|
||||||
let mut rows = stmt.query(params![]).err_kind(ErrorKind::Unknown)?;
|
while let Some(row) = rows.next()? {
|
||||||
while let Some(row) = rows.next().err_kind(ErrorKind::Unknown)? {
|
let id = row.get(0)?;
|
||||||
let id = row.get(0).err_kind(ErrorKind::Unknown)?;
|
let name: String = row.get(1)?;
|
||||||
let name: String = row.get(1).err_kind(ErrorKind::Unknown)?;
|
|
||||||
let mut permissions = Permissions::new();
|
let mut permissions = Permissions::new();
|
||||||
permissions
|
permissions
|
||||||
.merge_from_bytes(
|
.merge_from_bytes(row.get_ref(6)?.as_blob()?)
|
||||||
row.get_ref(6)
|
.err_kind(ErrorKind::DataLoss)?;
|
||||||
.err_kind(ErrorKind::Unknown)?
|
|
||||||
.as_blob()
|
|
||||||
.err_kind(ErrorKind::Unknown)?,
|
|
||||||
)
|
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
state.users_by_id.insert(
|
state.users_by_id.insert(
|
||||||
id,
|
id,
|
||||||
User {
|
User {
|
||||||
id,
|
id,
|
||||||
username: name.clone(),
|
username: name.clone(),
|
||||||
config: row.get(2).err_kind(ErrorKind::Unknown)?,
|
config: row.get(2)?,
|
||||||
password_hash: row.get(3).err_kind(ErrorKind::Unknown)?,
|
password_hash: row.get(3)?,
|
||||||
password_id: row.get(4).err_kind(ErrorKind::Unknown)?,
|
password_id: row.get(4)?,
|
||||||
password_failure_count: row.get(5).err_kind(ErrorKind::Unknown)?,
|
password_failure_count: row.get(5)?,
|
||||||
dirty: false,
|
dirty: false,
|
||||||
permissions,
|
permissions,
|
||||||
},
|
},
|
||||||
|
@ -464,8 +456,7 @@ impl State {
|
||||||
id: i32,
|
id: i32,
|
||||||
change: UserChange,
|
change: UserChange,
|
||||||
) -> Result<&User, base::Error> {
|
) -> Result<&User, base::Error> {
|
||||||
let mut stmt = conn
|
let mut stmt = conn.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
update user
|
update user
|
||||||
set
|
set
|
||||||
|
@ -478,8 +469,7 @@ impl State {
|
||||||
where
|
where
|
||||||
id = :id
|
id = :id
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let e = self.users_by_id.entry(id);
|
let e = self.users_by_id.entry(id);
|
||||||
let e = match e {
|
let e = match e {
|
||||||
::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {id}!"),
|
::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {id}!"),
|
||||||
|
@ -505,8 +495,7 @@ impl State {
|
||||||
":config": &change.config,
|
":config": &change.config,
|
||||||
":id": &id,
|
":id": &id,
|
||||||
":permissions": &permissions,
|
":permissions": &permissions,
|
||||||
})
|
})?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
}
|
}
|
||||||
let u = e.into_mut();
|
let u = e.into_mut();
|
||||||
if u.username != change.username {
|
if u.username != change.username {
|
||||||
|
@ -525,14 +514,12 @@ impl State {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_user(&mut self, conn: &Connection, change: UserChange) -> Result<&User, base::Error> {
|
fn add_user(&mut self, conn: &Connection, change: UserChange) -> Result<&User, base::Error> {
|
||||||
let mut stmt = conn
|
let mut stmt = conn.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
insert into user (username, password_hash, config, permissions)
|
insert into user (username, password_hash, config, permissions)
|
||||||
values (:username, :password_hash, :config, :permissions)
|
values (:username, :password_hash, :config, :permissions)
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let password_hash = change.set_password_hash.unwrap_or(None);
|
let password_hash = change.set_password_hash.unwrap_or(None);
|
||||||
let permissions = change
|
let permissions = change
|
||||||
.permissions
|
.permissions
|
||||||
|
@ -543,8 +530,7 @@ impl State {
|
||||||
":password_hash": &password_hash,
|
":password_hash": &password_hash,
|
||||||
":config": &change.config,
|
":config": &change.config,
|
||||||
":permissions": &permissions,
|
":permissions": &permissions,
|
||||||
})
|
})?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let id = conn.last_insert_rowid() as i32;
|
let id = conn.last_insert_rowid() as i32;
|
||||||
self.users_by_name.insert(change.username.clone(), id);
|
self.users_by_name.insert(change.username.clone(), id);
|
||||||
let e = self.users_by_id.entry(id);
|
let e = self.users_by_id.entry(id);
|
||||||
|
@ -565,22 +551,15 @@ impl State {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_user(&mut self, conn: &mut Connection, id: i32) -> Result<(), base::Error> {
|
pub fn delete_user(&mut self, conn: &mut Connection, id: i32) -> Result<(), base::Error> {
|
||||||
let tx = conn.transaction().err_kind(ErrorKind::Unknown)?;
|
let tx = conn.transaction()?;
|
||||||
tx.execute("delete from user_session where user_id = ?", params![id])
|
tx.execute("delete from user_session where user_id = ?", params![id])?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
{
|
{
|
||||||
let mut user_stmt = tx
|
let mut user_stmt = tx.prepare_cached("delete from user where id = ?")?;
|
||||||
.prepare_cached("delete from user where id = ?")
|
if user_stmt.execute(params![id])? != 1 {
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
bail!(NotFound, msg("user {id} not found"));
|
||||||
if user_stmt
|
|
||||||
.execute(params![id])
|
|
||||||
.err_kind(ErrorKind::Unknown)?
|
|
||||||
!= 1
|
|
||||||
{
|
|
||||||
bail_t!(NotFound, "user {} not found", id);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tx.commit().err_kind(ErrorKind::Unknown)?;
|
tx.commit()?;
|
||||||
let name = self.users_by_id.remove(&id).unwrap().username;
|
let name = self.users_by_id.remove(&id).unwrap().username;
|
||||||
self.users_by_name
|
self.users_by_name
|
||||||
.remove(&name)
|
.remove(&name)
|
||||||
|
@ -609,16 +588,16 @@ impl State {
|
||||||
let id = self
|
let id = self
|
||||||
.users_by_name
|
.users_by_name
|
||||||
.get(username)
|
.get(username)
|
||||||
.ok_or_else(|| format_err_t!(Unauthenticated, "no such user {username:?}"))?;
|
.ok_or_else(|| err!(Unauthenticated, msg("no such user {username:?}")))?;
|
||||||
let u = self
|
let u = self
|
||||||
.users_by_id
|
.users_by_id
|
||||||
.get_mut(id)
|
.get_mut(id)
|
||||||
.expect("users_by_name implies users_by_id");
|
.expect("users_by_name implies users_by_id");
|
||||||
if u.config.disabled {
|
if u.config.disabled {
|
||||||
bail_t!(Unauthenticated, "user {username:?} is disabled");
|
bail!(Unauthenticated, msg("user {username:?} is disabled"));
|
||||||
}
|
}
|
||||||
if !u.check_password(Some(&password))? {
|
if !u.check_password(Some(&password))? {
|
||||||
bail_t!(Unauthenticated, "incorrect password");
|
bail!(Unauthenticated, msg("incorrect password"));
|
||||||
}
|
}
|
||||||
let password_id = u.password_id;
|
let password_id = u.password_id;
|
||||||
State::make_session_int(
|
State::make_session_int(
|
||||||
|
@ -647,9 +626,9 @@ impl State {
|
||||||
let u = self
|
let u = self
|
||||||
.users_by_id
|
.users_by_id
|
||||||
.get_mut(&uid)
|
.get_mut(&uid)
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such uid {:?}", uid))?;
|
.ok_or_else(|| err!(NotFound, msg("no such uid {uid:?}")))?;
|
||||||
if u.config.disabled {
|
if u.config.disabled {
|
||||||
bail_t!(FailedPrecondition, "user is disabled");
|
bail!(FailedPrecondition, msg("user is disabled"));
|
||||||
}
|
}
|
||||||
State::make_session_int(
|
State::make_session_int(
|
||||||
&self.rand,
|
&self.rand,
|
||||||
|
@ -681,8 +660,7 @@ impl State {
|
||||||
let mut seed = [0u8; 32];
|
let mut seed = [0u8; 32];
|
||||||
rand.fill(&mut seed).unwrap();
|
rand.fill(&mut seed).unwrap();
|
||||||
let hash = session_id.hash();
|
let hash = session_id.hash();
|
||||||
let mut stmt = conn
|
let mut stmt = conn.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
insert into user_session (session_id_hash, user_id, seed, flags, domain,
|
insert into user_session (session_id_hash, user_id, seed, flags, domain,
|
||||||
creation_password_id, creation_time_sec,
|
creation_password_id, creation_time_sec,
|
||||||
|
@ -693,8 +671,7 @@ impl State {
|
||||||
:creation_user_agent, :creation_peer_addr,
|
:creation_user_agent, :creation_peer_addr,
|
||||||
:permissions)
|
:permissions)
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let addr = creation.addr_buf();
|
let addr = creation.addr_buf();
|
||||||
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
|
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
|
||||||
let permissions_blob = permissions
|
let permissions_blob = permissions
|
||||||
|
@ -711,8 +688,7 @@ impl State {
|
||||||
":creation_user_agent": &creation.user_agent,
|
":creation_user_agent": &creation.user_agent,
|
||||||
":creation_peer_addr": &addr,
|
":creation_peer_addr": &addr,
|
||||||
":permissions": &permissions_blob,
|
":permissions": &permissions_blob,
|
||||||
})
|
})?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let e = match sessions.entry(hash) {
|
let e = match sessions.entry(hash) {
|
||||||
::std::collections::hash_map::Entry::Occupied(_) => panic!("duplicate session hash!"),
|
::std::collections::hash_map::Entry::Occupied(_) => panic!("duplicate session hash!"),
|
||||||
::std::collections::hash_map::Entry::Vacant(e) => e,
|
::std::collections::hash_map::Entry::Vacant(e) => e,
|
||||||
|
@ -749,17 +725,20 @@ impl State {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let u = match self.users_by_id.get(&s.user_id) {
|
let u = match self.users_by_id.get(&s.user_id) {
|
||||||
None => bail_t!(Internal, "session references nonexistent user!"),
|
None => bail!(Internal, msg("session references nonexistent user!")),
|
||||||
Some(u) => u,
|
Some(u) => u,
|
||||||
};
|
};
|
||||||
if let Some(r) = s.revocation_reason {
|
if let Some(r) = s.revocation_reason {
|
||||||
bail_t!(Unauthenticated, "session is no longer valid (reason={})", r);
|
bail!(
|
||||||
|
Unauthenticated,
|
||||||
|
msg("session is no longer valid (reason={r})")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
s.last_use = req;
|
s.last_use = req;
|
||||||
s.use_count += 1;
|
s.use_count += 1;
|
||||||
s.dirty = true;
|
s.dirty = true;
|
||||||
if u.config.disabled {
|
if u.config.disabled {
|
||||||
bail_t!(Unauthenticated, "user {:?} is disabled", &u.username);
|
bail!(Unauthenticated, msg("user {:?} is disabled", &u.username));
|
||||||
}
|
}
|
||||||
Ok((s, u))
|
Ok((s, u))
|
||||||
}
|
}
|
||||||
|
@ -777,8 +756,7 @@ impl State {
|
||||||
::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?),
|
::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?),
|
||||||
};
|
};
|
||||||
if s.revocation_reason.is_none() {
|
if s.revocation_reason.is_none() {
|
||||||
let mut stmt = conn
|
let mut stmt = conn.prepare(
|
||||||
.prepare(
|
|
||||||
r#"
|
r#"
|
||||||
update user_session
|
update user_session
|
||||||
set
|
set
|
||||||
|
@ -790,8 +768,7 @@ impl State {
|
||||||
where
|
where
|
||||||
session_id_hash = ?
|
session_id_hash = ?
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let addr = req.addr_buf();
|
let addr = req.addr_buf();
|
||||||
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
|
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
|
||||||
stmt.execute(params![
|
stmt.execute(params![
|
||||||
|
@ -801,8 +778,7 @@ impl State {
|
||||||
reason as i32,
|
reason as i32,
|
||||||
detail,
|
detail,
|
||||||
&hash.0[..],
|
&hash.0[..],
|
||||||
])
|
])?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
s.revocation = req;
|
s.revocation = req;
|
||||||
s.revocation_reason = Some(reason as i32);
|
s.revocation_reason = Some(reason as i32);
|
||||||
}
|
}
|
||||||
|
@ -814,8 +790,7 @@ impl State {
|
||||||
/// The caller is expected to call `post_flush` afterward if the transaction is
|
/// The caller is expected to call `post_flush` afterward if the transaction is
|
||||||
/// successfully committed.
|
/// successfully committed.
|
||||||
pub fn flush(&self, tx: &Transaction) -> Result<(), Error> {
|
pub fn flush(&self, tx: &Transaction) -> Result<(), Error> {
|
||||||
let mut u_stmt = tx
|
let mut u_stmt = tx.prepare(
|
||||||
.prepare(
|
|
||||||
r#"
|
r#"
|
||||||
update user
|
update user
|
||||||
set
|
set
|
||||||
|
@ -824,10 +799,8 @@ impl State {
|
||||||
where
|
where
|
||||||
id = :id
|
id = :id
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
let mut s_stmt = tx.prepare(
|
||||||
let mut s_stmt = tx
|
|
||||||
.prepare(
|
|
||||||
r#"
|
r#"
|
||||||
update user_session
|
update user_session
|
||||||
set
|
set
|
||||||
|
@ -838,8 +811,7 @@ impl State {
|
||||||
where
|
where
|
||||||
session_id_hash = :hash
|
session_id_hash = :hash
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
for (&id, u) in &self.users_by_id {
|
for (&id, u) in &self.users_by_id {
|
||||||
if !u.dirty {
|
if !u.dirty {
|
||||||
continue;
|
continue;
|
||||||
|
@ -848,13 +820,11 @@ impl State {
|
||||||
"flushing user with hash: {}",
|
"flushing user with hash: {}",
|
||||||
u.password_hash.as_ref().unwrap()
|
u.password_hash.as_ref().unwrap()
|
||||||
);
|
);
|
||||||
u_stmt
|
u_stmt.execute(named_params! {
|
||||||
.execute(named_params! {
|
|
||||||
":password_failure_count": &u.password_failure_count,
|
":password_failure_count": &u.password_failure_count,
|
||||||
":password_hash": &u.password_hash,
|
":password_hash": &u.password_hash,
|
||||||
":id": &id,
|
":id": &id,
|
||||||
})
|
})?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
}
|
}
|
||||||
for (sh, s) in &self.sessions {
|
for (sh, s) in &self.sessions {
|
||||||
if !s.dirty {
|
if !s.dirty {
|
||||||
|
@ -862,15 +832,13 @@ impl State {
|
||||||
}
|
}
|
||||||
let addr = s.last_use.addr_buf();
|
let addr = s.last_use.addr_buf();
|
||||||
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
|
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
|
||||||
let cnt = s_stmt
|
let cnt = s_stmt.execute(named_params! {
|
||||||
.execute(named_params! {
|
|
||||||
":last_use_time_sec": &s.last_use.when_sec,
|
":last_use_time_sec": &s.last_use.when_sec,
|
||||||
":last_use_user_agent": &s.last_use.user_agent,
|
":last_use_user_agent": &s.last_use.user_agent,
|
||||||
":last_use_peer_addr": &addr,
|
":last_use_peer_addr": &addr,
|
||||||
":use_count": &s.use_count,
|
":use_count": &s.use_count,
|
||||||
":hash": &sh.0[..],
|
":hash": &sh.0[..],
|
||||||
})
|
})?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
debug_assert_eq!(cnt, 1);
|
debug_assert_eq!(cnt, 1);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -890,8 +858,7 @@ impl State {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, base::Error> {
|
fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, base::Error> {
|
||||||
let mut stmt = conn
|
let mut stmt = conn.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
select
|
select
|
||||||
user_id,
|
user_id,
|
||||||
|
@ -918,52 +885,43 @@ fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, base
|
||||||
where
|
where
|
||||||
session_id_hash = ?
|
session_id_hash = ?
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
let mut rows = stmt.query(params![&hash.0[..]])?;
|
||||||
let mut rows = stmt
|
|
||||||
.query(params![&hash.0[..]])
|
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
let row = rows
|
let row = rows
|
||||||
.next()
|
.next()?
|
||||||
.err_kind(ErrorKind::Unknown)?
|
.ok_or_else(|| err!(NotFound, msg("no such session")))?;
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such session"))?;
|
let creation_addr: FromSqlIpAddr = row.get(8)?;
|
||||||
let creation_addr: FromSqlIpAddr = row.get(8).err_kind(ErrorKind::Unknown)?;
|
let revocation_addr: FromSqlIpAddr = row.get(11)?;
|
||||||
let revocation_addr: FromSqlIpAddr = row.get(11).err_kind(ErrorKind::Unknown)?;
|
let last_use_addr: FromSqlIpAddr = row.get(16)?;
|
||||||
let last_use_addr: FromSqlIpAddr = row.get(16).err_kind(ErrorKind::Unknown)?;
|
|
||||||
let mut permissions = Permissions::new();
|
let mut permissions = Permissions::new();
|
||||||
permissions
|
permissions
|
||||||
.merge_from_bytes(
|
.merge_from_bytes(row.get_ref(18)?.as_blob()?)
|
||||||
row.get_ref(18)
|
.err_kind(ErrorKind::DataLoss)?;
|
||||||
.err_kind(ErrorKind::Unknown)?
|
|
||||||
.as_blob()
|
|
||||||
.err_kind(ErrorKind::Unknown)?,
|
|
||||||
)
|
|
||||||
.err_kind(ErrorKind::Internal)?;
|
|
||||||
Ok(Session {
|
Ok(Session {
|
||||||
user_id: row.get(0).err_kind(ErrorKind::Unknown)?,
|
user_id: row.get(0)?,
|
||||||
seed: row.get(1).err_kind(ErrorKind::Unknown)?,
|
seed: row.get(1)?,
|
||||||
flags: row.get(2).err_kind(ErrorKind::Unknown)?,
|
flags: row.get(2)?,
|
||||||
domain: row.get(3).err_kind(ErrorKind::Unknown)?,
|
domain: row.get(3)?,
|
||||||
description: row.get(4).err_kind(ErrorKind::Unknown)?,
|
description: row.get(4)?,
|
||||||
creation_password_id: row.get(5).err_kind(ErrorKind::Unknown)?,
|
creation_password_id: row.get(5)?,
|
||||||
creation: Request {
|
creation: Request {
|
||||||
when_sec: row.get(6).err_kind(ErrorKind::Unknown)?,
|
when_sec: row.get(6)?,
|
||||||
user_agent: row.get(7).err_kind(ErrorKind::Unknown)?,
|
user_agent: row.get(7)?,
|
||||||
addr: creation_addr.0,
|
addr: creation_addr.0,
|
||||||
},
|
},
|
||||||
revocation: Request {
|
revocation: Request {
|
||||||
when_sec: row.get(9).err_kind(ErrorKind::Unknown)?,
|
when_sec: row.get(9)?,
|
||||||
user_agent: row.get(10).err_kind(ErrorKind::Unknown)?,
|
user_agent: row.get(10)?,
|
||||||
addr: revocation_addr.0,
|
addr: revocation_addr.0,
|
||||||
},
|
},
|
||||||
revocation_reason: row.get(12).err_kind(ErrorKind::Unknown)?,
|
revocation_reason: row.get(12)?,
|
||||||
revocation_reason_detail: row.get(13).err_kind(ErrorKind::Unknown)?,
|
revocation_reason_detail: row.get(13)?,
|
||||||
last_use: Request {
|
last_use: Request {
|
||||||
when_sec: row.get(14).err_kind(ErrorKind::Unknown)?,
|
when_sec: row.get(14)?,
|
||||||
user_agent: row.get(15).err_kind(ErrorKind::Unknown)?,
|
user_agent: row.get(15)?,
|
||||||
addr: last_use_addr.0,
|
addr: last_use_addr.0,
|
||||||
},
|
},
|
||||||
use_count: row.get(17).err_kind(ErrorKind::Unknown)?,
|
use_count: row.get(17)?,
|
||||||
dirty: false,
|
dirty: false,
|
||||||
permissions,
|
permissions,
|
||||||
})
|
})
|
||||||
|
@ -1014,7 +972,8 @@ mod tests {
|
||||||
0,
|
0,
|
||||||
)
|
)
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(format!("{e}"), "Unauthenticated: incorrect password");
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
|
assert_eq!(e.msg().unwrap(), "incorrect password");
|
||||||
c.set_password("hunter2".to_owned());
|
c.set_password("hunter2".to_owned());
|
||||||
state.apply(&conn, c).unwrap();
|
state.apply(&conn, c).unwrap();
|
||||||
let e = state
|
let e = state
|
||||||
|
@ -1027,7 +986,8 @@ mod tests {
|
||||||
0,
|
0,
|
||||||
)
|
)
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(format!("{e}"), "Unauthenticated: incorrect password");
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
|
assert_eq!(e.msg().unwrap(), "incorrect password");
|
||||||
let sid = {
|
let sid = {
|
||||||
let (sid, s) = state
|
let (sid, s) = state
|
||||||
.login_by_password(
|
.login_by_password(
|
||||||
|
@ -1061,10 +1021,7 @@ mod tests {
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req.clone(), &sid.hash())
|
.authenticate_session(&conn, req.clone(), &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(
|
assert_eq!(e.msg().unwrap(), "session is no longer valid (reason=1)");
|
||||||
format!("{e}"),
|
|
||||||
"Unauthenticated: session is no longer valid (reason=1)"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Everything should persist across reload.
|
// Everything should persist across reload.
|
||||||
drop(state);
|
drop(state);
|
||||||
|
@ -1072,10 +1029,7 @@ mod tests {
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req, &sid.hash())
|
.authenticate_session(&conn, req, &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(
|
assert_eq!(e.msg().unwrap(), "session is no longer valid (reason=1)");
|
||||||
format!("{e}"),
|
|
||||||
"Unauthenticated: session is no longer valid (reason=1)"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tests that flush works, including updating dirty sessions.
|
/// Tests that flush works, including updating dirty sessions.
|
||||||
|
@ -1173,10 +1127,8 @@ mod tests {
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req, &sid.hash())
|
.authenticate_session(&conn, req, &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
format!("{e}"),
|
assert_eq!(e.msg().unwrap(), "session is no longer valid (reason=1)");
|
||||||
"Unauthenticated: session is no longer valid (reason=1)"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1229,16 +1181,15 @@ mod tests {
|
||||||
0,
|
0,
|
||||||
)
|
)
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(e.to_string(), "Unauthenticated: user \"slamb\" is disabled");
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
|
assert_eq!(e.msg().unwrap(), "user \"slamb\" is disabled");
|
||||||
|
|
||||||
// Authenticating existing sessions shouldn't work either.
|
// Authenticating existing sessions shouldn't work either.
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req.clone(), &sid.hash())
|
.authenticate_session(&conn, req.clone(), &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
format!("{e}"),
|
assert_eq!(e.msg().unwrap(), "user \"slamb\" is disabled");
|
||||||
"Unauthenticated: user \"slamb\" is disabled"
|
|
||||||
);
|
|
||||||
|
|
||||||
// The user should still be disabled after reload.
|
// The user should still be disabled after reload.
|
||||||
drop(state);
|
drop(state);
|
||||||
|
@ -1246,10 +1197,8 @@ mod tests {
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req, &sid.hash())
|
.authenticate_session(&conn, req, &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
format!("{e}"),
|
assert_eq!(e.msg().unwrap(), "user \"slamb\" is disabled");
|
||||||
"Unauthenticated: user \"slamb\" is disabled"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1309,7 +1258,8 @@ mod tests {
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req.clone(), &sid.hash())
|
.authenticate_session(&conn, req.clone(), &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(format!("{e}"), "Unauthenticated: no such session");
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
|
assert_eq!(e.msg().unwrap(), "no such session");
|
||||||
|
|
||||||
// The user should still be deleted after reload.
|
// The user should still be deleted after reload.
|
||||||
drop(state);
|
drop(state);
|
||||||
|
@ -1318,7 +1268,8 @@ mod tests {
|
||||||
let e = state
|
let e = state
|
||||||
.authenticate_session(&conn, req, &sid.hash())
|
.authenticate_session(&conn, req, &sid.hash())
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_eq!(format!("{e}"), "Unauthenticated: no such session");
|
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
|
||||||
|
assert_eq!(e.msg().unwrap(), "no such session");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::json::SampleFileDirConfig;
|
||||||
use crate::raw;
|
use crate::raw;
|
||||||
use crate::recording;
|
use crate::recording;
|
||||||
use crate::schema;
|
use crate::schema;
|
||||||
use failure::Error;
|
use base::{err, Error};
|
||||||
use fnv::{FnvHashMap, FnvHashSet};
|
use fnv::{FnvHashMap, FnvHashSet};
|
||||||
use nix::fcntl::AtFlags;
|
use nix::fcntl::AtFlags;
|
||||||
use rusqlite::params;
|
use rusqlite::params;
|
||||||
|
@ -104,7 +104,7 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
|
||||||
|
|
||||||
// Open the directory (checking its metadata) and hold it open (for the lock).
|
// Open the directory (checking its metadata) and hold it open (for the lock).
|
||||||
let dir = dir::SampleFileDir::open(&config.path, &meta)
|
let dir = dir::SampleFileDir::open(&config.path, &meta)
|
||||||
.map_err(|e| e.context(format!("unable to open dir {}", config.path.display())))?;
|
.map_err(|e| err!(e, msg("unable to open dir {}", config.path.display())))?;
|
||||||
let mut streams = read_dir(&dir, opts)?;
|
let mut streams = read_dir(&dir, opts)?;
|
||||||
let mut rows = garbage_stmt.query(params![dir_id])?;
|
let mut rows = garbage_stmt.query(params![dir_id])?;
|
||||||
while let Some(row) = rows.next()? {
|
while let Some(row) = rows.next()? {
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
//! This is used as part of the `moonfire-nvr check` database integrity checking
|
//! This is used as part of the `moonfire-nvr check` database integrity checking
|
||||||
//! and for tests of `moonfire-nvr upgrade`.
|
//! and for tests of `moonfire-nvr upgrade`.
|
||||||
|
|
||||||
use failure::Error;
|
use base::Error;
|
||||||
use rusqlite::params;
|
use rusqlite::params;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
@ -168,7 +168,8 @@ pub fn get_diffs(
|
||||||
let tables1 = get_tables(c1)?;
|
let tables1 = get_tables(c1)?;
|
||||||
let tables2 = get_tables(c2)?;
|
let tables2 = get_tables(c2)?;
|
||||||
if let Some(diff) = diff_slices(n1, &tables1[..], n2, &tables2[..]) {
|
if let Some(diff) = diff_slices(n1, &tables1[..], n2, &tables2[..]) {
|
||||||
write!(&mut diffs, "table list mismatch, {n1} vs {n2}:\n{diff}")?;
|
write!(&mut diffs, "table list mismatch, {n1} vs {n2}:\n{diff}")
|
||||||
|
.expect("write to String shouldn't fail");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compare columns and indices for each table.
|
// Compare columns and indices for each table.
|
||||||
|
@ -176,7 +177,8 @@ pub fn get_diffs(
|
||||||
let columns1 = get_table_columns(c1, t)?;
|
let columns1 = get_table_columns(c1, t)?;
|
||||||
let columns2 = get_table_columns(c2, t)?;
|
let columns2 = get_table_columns(c2, t)?;
|
||||||
if let Some(diff) = diff_slices(n1, &columns1[..], n2, &columns2[..]) {
|
if let Some(diff) = diff_slices(n1, &columns1[..], n2, &columns2[..]) {
|
||||||
write!(&mut diffs, "table {t:?} column, {n1} vs {n2}:\n{diff}")?;
|
write!(&mut diffs, "table {t:?} column, {n1} vs {n2}:\n{diff}")
|
||||||
|
.expect("write to String shouldn't fail");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut indices1 = get_indices(c1, t)?;
|
let mut indices1 = get_indices(c1, t)?;
|
||||||
|
@ -184,7 +186,8 @@ pub fn get_diffs(
|
||||||
indices1.sort_by(|a, b| a.name.cmp(&b.name));
|
indices1.sort_by(|a, b| a.name.cmp(&b.name));
|
||||||
indices2.sort_by(|a, b| a.name.cmp(&b.name));
|
indices2.sort_by(|a, b| a.name.cmp(&b.name));
|
||||||
if let Some(diff) = diff_slices(n1, &indices1[..], n2, &indices2[..]) {
|
if let Some(diff) = diff_slices(n1, &indices1[..], n2, &indices2[..]) {
|
||||||
write!(&mut diffs, "table {t:?} indices, {n1} vs {n2}:\n{diff}")?;
|
write!(&mut diffs, "table {t:?} indices, {n1} vs {n2}:\n{diff}")
|
||||||
|
.expect("write to String shouldn't fail");
|
||||||
}
|
}
|
||||||
|
|
||||||
for i in &indices1 {
|
for i in &indices1 {
|
||||||
|
@ -194,7 +197,8 @@ pub fn get_diffs(
|
||||||
write!(
|
write!(
|
||||||
&mut diffs,
|
&mut diffs,
|
||||||
"table {t:?} index {i:?} columns {n1} vs {n2}:\n{diff}"
|
"table {t:?} index {i:?} columns {n1} vs {n2}:\n{diff}"
|
||||||
)?;
|
)
|
||||||
|
.expect("write to String shouldn't fail");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
//! In-memory indexes by calendar day.
|
//! In-memory indexes by calendar day.
|
||||||
|
|
||||||
use base::time::{Duration, Time, TIME_UNITS_PER_SEC};
|
use base::time::{Duration, Time, TIME_UNITS_PER_SEC};
|
||||||
use failure::Error;
|
use base::{err, Error};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
@ -22,7 +22,12 @@ pub struct Key(pub(crate) [u8; 10]);
|
||||||
impl Key {
|
impl Key {
|
||||||
fn new(tm: time::Tm) -> Result<Self, Error> {
|
fn new(tm: time::Tm) -> Result<Self, Error> {
|
||||||
let mut s = Key([0u8; 10]);
|
let mut s = Key([0u8; 10]);
|
||||||
write!(&mut s.0[..], "{}", tm.strftime("%Y-%m-%d")?)?;
|
write!(
|
||||||
|
&mut s.0[..],
|
||||||
|
"{}",
|
||||||
|
tm.strftime("%Y-%m-%d")
|
||||||
|
.map_err(|e| err!(Internal, source(e)))?
|
||||||
|
)?;
|
||||||
Ok(s)
|
Ok(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
236
server/db/db.rs
236
server/db/db.rs
|
@ -34,11 +34,10 @@ use crate::raw;
|
||||||
use crate::recording;
|
use crate::recording;
|
||||||
use crate::schema;
|
use crate::schema;
|
||||||
use crate::signal;
|
use crate::signal;
|
||||||
use base::bail_t;
|
|
||||||
use base::clock::{self, Clocks};
|
use base::clock::{self, Clocks};
|
||||||
use base::format_err_t;
|
|
||||||
use base::strutil::encode_size;
|
use base::strutil::encode_size;
|
||||||
use failure::{bail, format_err, Error, ResultExt};
|
use base::{bail, err, Error};
|
||||||
|
// use failure::{bail, err, Error, ResultExt};
|
||||||
use fnv::{FnvHashMap, FnvHashSet};
|
use fnv::{FnvHashMap, FnvHashSet};
|
||||||
use hashlink::LinkedHashMap;
|
use hashlink::LinkedHashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -47,7 +46,6 @@ use smallvec::SmallVec;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::collections::{BTreeMap, VecDeque};
|
use std::collections::{BTreeMap, VecDeque};
|
||||||
use std::convert::TryInto;
|
|
||||||
use std::fmt::Write as _;
|
use std::fmt::Write as _;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
@ -344,7 +342,10 @@ impl SampleFileDir {
|
||||||
.dir
|
.dir
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
format_err_t!(FailedPrecondition, "sample file dir {} is closed", self.id)
|
err!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("sample file dir {} is closed", self.id)
|
||||||
|
)
|
||||||
})?
|
})?
|
||||||
.clone())
|
.clone())
|
||||||
}
|
}
|
||||||
|
@ -696,10 +697,13 @@ impl StreamStateChanger {
|
||||||
s.sample_file_dir_id == sc.sample_file_dir_id,
|
s.sample_file_dir_id == sc.sample_file_dir_id,
|
||||||
) {
|
) {
|
||||||
bail!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"can't change sample_file_dir_id {:?}->{:?} for non-empty stream {}",
|
"can't change sample_file_dir_id {:?}->{:?} for non-empty stream {}",
|
||||||
d,
|
d,
|
||||||
sc.sample_file_dir_id,
|
sc.sample_file_dir_id,
|
||||||
sid
|
sid,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -711,7 +715,7 @@ impl StreamStateChanger {
|
||||||
"#,
|
"#,
|
||||||
)?;
|
)?;
|
||||||
if stmt.execute(params![sid])? != 1 {
|
if stmt.execute(params![sid])? != 1 {
|
||||||
bail!("missing stream {}", sid);
|
bail!(Internal, msg("missing stream {sid}"));
|
||||||
}
|
}
|
||||||
streams.push((sid, None));
|
streams.push((sid, None));
|
||||||
} else {
|
} else {
|
||||||
|
@ -731,7 +735,7 @@ impl StreamStateChanger {
|
||||||
":id": sid,
|
":id": sid,
|
||||||
})?;
|
})?;
|
||||||
if rows != 1 {
|
if rows != 1 {
|
||||||
bail!("missing stream {}", sid);
|
bail!(Internal, msg("missing stream {sid}"));
|
||||||
}
|
}
|
||||||
sids[i] = Some(sid);
|
sids[i] = Some(sid);
|
||||||
streams.push((
|
streams.push((
|
||||||
|
@ -872,7 +876,7 @@ impl LockedDatabase {
|
||||||
mut r: RecordingToInsert,
|
mut r: RecordingToInsert,
|
||||||
) -> Result<(CompositeId, Arc<Mutex<RecordingToInsert>>), Error> {
|
) -> Result<(CompositeId, Arc<Mutex<RecordingToInsert>>), Error> {
|
||||||
let stream = match self.streams_by_id.get_mut(&stream_id) {
|
let stream = match self.streams_by_id.get_mut(&stream_id) {
|
||||||
None => bail!("no such stream {}", stream_id),
|
None => bail!(FailedPrecondition, msg("no such stream {stream_id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
let id = CompositeId::new(
|
let id = CompositeId::new(
|
||||||
|
@ -900,20 +904,26 @@ impl LockedDatabase {
|
||||||
/// This must be the next unsynced recording.
|
/// This must be the next unsynced recording.
|
||||||
pub(crate) fn mark_synced(&mut self, id: CompositeId) -> Result<(), Error> {
|
pub(crate) fn mark_synced(&mut self, id: CompositeId) -> Result<(), Error> {
|
||||||
let stream = match self.streams_by_id.get_mut(&id.stream()) {
|
let stream = match self.streams_by_id.get_mut(&id.stream()) {
|
||||||
None => bail!("no stream for recording {}", id),
|
None => bail!(FailedPrecondition, msg("no stream for recording {id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
let next_unsynced = stream.cum_recordings + (stream.synced_recordings as i32);
|
let next_unsynced = stream.cum_recordings + (stream.synced_recordings as i32);
|
||||||
if id.recording() != next_unsynced {
|
if id.recording() != next_unsynced {
|
||||||
bail!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"can't sync {} when next unsynced recording is {} (next unflushed is {})",
|
"can't sync {} when next unsynced recording is {} (next unflushed is {})",
|
||||||
id,
|
id,
|
||||||
next_unsynced,
|
next_unsynced,
|
||||||
stream.cum_recordings
|
stream.cum_recordings,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if stream.synced_recordings == stream.uncommitted.len() {
|
if stream.synced_recordings == stream.uncommitted.len() {
|
||||||
bail!("can't sync un-added recording {}", id);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("can't sync un-added recording {id}")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let l = stream.uncommitted[stream.synced_recordings].lock().unwrap();
|
let l = stream.uncommitted[stream.synced_recordings].lock().unwrap();
|
||||||
let bytes = i64::from(l.sample_file_bytes);
|
let bytes = i64::from(l.sample_file_bytes);
|
||||||
|
@ -929,7 +939,7 @@ impl LockedDatabase {
|
||||||
ids: &mut Vec<CompositeId>,
|
ids: &mut Vec<CompositeId>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let dir = match self.sample_file_dirs_by_id.get_mut(&dir_id) {
|
let dir = match self.sample_file_dirs_by_id.get_mut(&dir_id) {
|
||||||
None => bail!("no such dir {}", dir_id),
|
None => bail!(FailedPrecondition, msg("no such dir {dir_id}")),
|
||||||
Some(d) => d,
|
Some(d) => d,
|
||||||
};
|
};
|
||||||
dir.garbage_unlinked.reserve(ids.len());
|
dir.garbage_unlinked.reserve(ids.len());
|
||||||
|
@ -941,7 +951,10 @@ impl LockedDatabase {
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
if !ids.is_empty() {
|
if !ids.is_empty() {
|
||||||
bail!("delete_garbage with non-garbage ids {:?}", &ids[..]);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("delete_garbage with non-garbage ids {:?}", &ids[..])
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -955,7 +968,7 @@ impl LockedDatabase {
|
||||||
cb: Box<dyn FnMut(LiveSegment) -> bool + Send>,
|
cb: Box<dyn FnMut(LiveSegment) -> bool + Send>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let s = match self.streams_by_id.get_mut(&stream_id) {
|
let s = match self.streams_by_id.get_mut(&stream_id) {
|
||||||
None => bail!("no such stream {}", stream_id),
|
None => bail!(NotFound, msg("no such stream {stream_id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
s.on_live_segment.push(cb);
|
s.on_live_segment.push(cb);
|
||||||
|
@ -975,7 +988,7 @@ impl LockedDatabase {
|
||||||
|
|
||||||
pub(crate) fn send_live_segment(&mut self, stream: i32, l: LiveSegment) -> Result<(), Error> {
|
pub(crate) fn send_live_segment(&mut self, stream: i32, l: LiveSegment) -> Result<(), Error> {
|
||||||
let s = match self.streams_by_id.get_mut(&stream) {
|
let s = match self.streams_by_id.get_mut(&stream) {
|
||||||
None => bail!("no such stream {}", stream),
|
None => bail!(Internal, msg("no such stream {stream}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -992,7 +1005,7 @@ impl LockedDatabase {
|
||||||
let span = tracing::info_span!("flush", flush_count = self.flush_count, reason);
|
let span = tracing::info_span!("flush", flush_count = self.flush_count, reason);
|
||||||
let _enter = span.enter();
|
let _enter = span.enter();
|
||||||
let o = match self.open.as_ref() {
|
let o = match self.open.as_ref() {
|
||||||
None => bail!("database is read-only"),
|
None => bail!(Internal, msg("database is read-only")),
|
||||||
Some(o) => o,
|
Some(o) => o,
|
||||||
};
|
};
|
||||||
let tx = self.conn.transaction()?;
|
let tx = self.conn.transaction()?;
|
||||||
|
@ -1029,7 +1042,7 @@ impl LockedDatabase {
|
||||||
if let Some(l) = s.to_delete.last() {
|
if let Some(l) = s.to_delete.last() {
|
||||||
new_ranges.entry(stream_id).or_insert(None);
|
new_ranges.entry(stream_id).or_insert(None);
|
||||||
let dir = match s.sample_file_dir_id {
|
let dir = match s.sample_file_dir_id {
|
||||||
None => bail!("stream {} has no directory!", stream_id),
|
None => bail!(Internal, msg("stream {stream_id} has no directory!")),
|
||||||
Some(d) => d,
|
Some(d) => d,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1042,12 +1055,15 @@ impl LockedDatabase {
|
||||||
let n = raw::delete_recordings(&tx, dir, start..end)?;
|
let n = raw::delete_recordings(&tx, dir, start..end)?;
|
||||||
if n != s.to_delete.len() {
|
if n != s.to_delete.len() {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"Found {} rows in {} .. {}, expected {}: {:?}",
|
"Found {} rows in {} .. {}, expected {}: {:?}",
|
||||||
n,
|
n,
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
s.to_delete.len(),
|
s.to_delete.len(),
|
||||||
&s.to_delete
|
&s.to_delete,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1069,7 +1085,7 @@ impl LockedDatabase {
|
||||||
o.id,
|
o.id,
|
||||||
])?;
|
])?;
|
||||||
if rows != 1 {
|
if rows != 1 {
|
||||||
bail!("unable to find current open {}", o.id);
|
bail!(Internal, msg("unable to find current open {}", o.id));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.auth.flush(&tx)?;
|
self.auth.flush(&tx)?;
|
||||||
|
@ -1209,7 +1225,7 @@ impl LockedDatabase {
|
||||||
let dir = self
|
let dir = self
|
||||||
.sample_file_dirs_by_id
|
.sample_file_dirs_by_id
|
||||||
.get_mut(&id)
|
.get_mut(&id)
|
||||||
.ok_or_else(|| format_err!("no such dir {}", id))?;
|
.ok_or_else(|| err!(NotFound, msg("no such dir {id}")))?;
|
||||||
if dir.dir.is_some() {
|
if dir.dir.is_some() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -1220,7 +1236,7 @@ impl LockedDatabase {
|
||||||
open.uuid.extend_from_slice(&o.uuid.as_bytes()[..]);
|
open.uuid.extend_from_slice(&o.uuid.as_bytes()[..]);
|
||||||
}
|
}
|
||||||
let d = dir::SampleFileDir::open(&dir.path, &expected_meta)
|
let d = dir::SampleFileDir::open(&dir.path, &expected_meta)
|
||||||
.map_err(|e| e.context(format!("Failed to open dir {}", dir.path.display())))?;
|
.map_err(|e| err!(e, msg("Failed to open dir {}", dir.path.display())))?;
|
||||||
if self.open.is_none() {
|
if self.open.is_none() {
|
||||||
// read-only mode; it's already fully opened.
|
// read-only mode; it's already fully opened.
|
||||||
dir.dir = Some(d);
|
dir.dir = Some(d);
|
||||||
|
@ -1244,7 +1260,7 @@ impl LockedDatabase {
|
||||||
)?;
|
)?;
|
||||||
for &id in in_progress.keys() {
|
for &id in in_progress.keys() {
|
||||||
if stmt.execute(params![o.id, id])? != 1 {
|
if stmt.execute(params![o.id, id])? != 1 {
|
||||||
bail!("unable to update dir {}", id);
|
bail!(Internal, msg("unable to update dir {id}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1290,7 +1306,7 @@ impl LockedDatabase {
|
||||||
f: &mut dyn FnMut(ListRecordingsRow) -> Result<(), base::Error>,
|
f: &mut dyn FnMut(ListRecordingsRow) -> Result<(), base::Error>,
|
||||||
) -> Result<(), base::Error> {
|
) -> Result<(), base::Error> {
|
||||||
let s = match self.streams_by_id.get(&stream_id) {
|
let s = match self.streams_by_id.get(&stream_id) {
|
||||||
None => bail_t!(NotFound, "no such stream {}", stream_id),
|
None => bail!(NotFound, msg("no such stream {stream_id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
raw::list_recordings_by_time(&self.conn, stream_id, desired_time.clone(), f)?;
|
raw::list_recordings_by_time(&self.conn, stream_id, desired_time.clone(), f)?;
|
||||||
|
@ -1323,7 +1339,7 @@ impl LockedDatabase {
|
||||||
f: &mut dyn FnMut(ListRecordingsRow) -> Result<(), base::Error>,
|
f: &mut dyn FnMut(ListRecordingsRow) -> Result<(), base::Error>,
|
||||||
) -> Result<(), base::Error> {
|
) -> Result<(), base::Error> {
|
||||||
let s = match self.streams_by_id.get(&stream_id) {
|
let s = match self.streams_by_id.get(&stream_id) {
|
||||||
None => bail_t!(NotFound, "no such stream {}", stream_id),
|
None => bail!(NotFound, msg("no such stream {stream_id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
if desired_ids.start < s.cum_recordings {
|
if desired_ids.start < s.cum_recordings {
|
||||||
|
@ -1400,25 +1416,29 @@ impl LockedDatabase {
|
||||||
} else {
|
} else {
|
||||||
// append.
|
// append.
|
||||||
if a.time.end != row.start {
|
if a.time.end != row.start {
|
||||||
bail_t!(
|
bail!(
|
||||||
Internal,
|
Internal,
|
||||||
|
msg(
|
||||||
"stream {} recording {} ends at {} but {} starts at {}",
|
"stream {} recording {} ends at {} but {} starts at {}",
|
||||||
stream_id,
|
stream_id,
|
||||||
a.ids.end - 1,
|
a.ids.end - 1,
|
||||||
a.time.end,
|
a.time.end,
|
||||||
row.id,
|
row.id,
|
||||||
row.start
|
row.start,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if a.open_id != row.open_id {
|
if a.open_id != row.open_id {
|
||||||
bail_t!(
|
bail!(
|
||||||
Internal,
|
Internal,
|
||||||
|
msg(
|
||||||
"stream {} recording {} has open id {} but {} has {}",
|
"stream {} recording {} has open id {} but {} has {}",
|
||||||
stream_id,
|
stream_id,
|
||||||
a.ids.end - 1,
|
a.ids.end - 1,
|
||||||
a.open_id,
|
a.open_id,
|
||||||
row.id,
|
row.id,
|
||||||
row.open_id
|
row.open_id,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
a.time.end.0 += row.wall_duration_90k as i64;
|
a.time.end.0 += row.wall_duration_90k as i64;
|
||||||
|
@ -1457,15 +1477,18 @@ impl LockedDatabase {
|
||||||
let s = self
|
let s = self
|
||||||
.streams_by_id
|
.streams_by_id
|
||||||
.get(&id.stream())
|
.get(&id.stream())
|
||||||
.ok_or_else(|| format_err!("no stream for {}", id))?;
|
.ok_or_else(|| err!(Internal, msg("no stream for {}", id)))?;
|
||||||
if s.cum_recordings <= id.recording() {
|
if s.cum_recordings <= id.recording() {
|
||||||
let i = id.recording() - s.cum_recordings;
|
let i = id.recording() - s.cum_recordings;
|
||||||
if i as usize >= s.uncommitted.len() {
|
if i as usize >= s.uncommitted.len() {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"no such recording {}; latest committed is {}, latest is {}",
|
"no such recording {}; latest committed is {}, latest is {}",
|
||||||
id,
|
id,
|
||||||
s.cum_recordings,
|
s.cum_recordings,
|
||||||
s.cum_recordings + s.uncommitted.len() as i32
|
s.cum_recordings + s.uncommitted.len() as i32,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let l = s.uncommitted[i as usize].lock().unwrap();
|
let l = s.uncommitted[i as usize].lock().unwrap();
|
||||||
|
@ -1499,7 +1522,7 @@ impl LockedDatabase {
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
Err(format_err!("no such recording {}", id))
|
Err(err!(Internal, msg("no such recording {id}")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1512,7 +1535,7 @@ impl LockedDatabase {
|
||||||
f: &mut dyn FnMut(&ListOldestRecordingsRow) -> bool,
|
f: &mut dyn FnMut(&ListOldestRecordingsRow) -> bool,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let s = match self.streams_by_id.get_mut(&stream_id) {
|
let s = match self.streams_by_id.get_mut(&stream_id) {
|
||||||
None => bail!("no stream {}", stream_id),
|
None => bail!(Internal, msg("no stream {stream_id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
let end = match s.to_delete.last() {
|
let end = match s.to_delete.last() {
|
||||||
|
@ -1552,15 +1575,18 @@ impl LockedDatabase {
|
||||||
while let Some(row) = rows.next()? {
|
while let Some(row) = rows.next()? {
|
||||||
let id = row.get(0)?;
|
let id = row.get(0)?;
|
||||||
let data: Vec<u8> = row.get(6)?;
|
let data: Vec<u8> = row.get(6)?;
|
||||||
|
let get_and_cvt = |i: usize| {
|
||||||
|
let raw = row.get::<_, i32>(i)?;
|
||||||
|
u16::try_from(raw).map_err(|e| err!(OutOfRange, source(e)))
|
||||||
|
};
|
||||||
self.video_sample_entries_by_id.insert(
|
self.video_sample_entries_by_id.insert(
|
||||||
id,
|
id,
|
||||||
Arc::new(VideoSampleEntry {
|
Arc::new(VideoSampleEntry {
|
||||||
id,
|
id,
|
||||||
width: row.get::<_, i32>(1)?.try_into()?,
|
width: get_and_cvt(1)?,
|
||||||
height: row.get::<_, i32>(2)?.try_into()?,
|
height: get_and_cvt(2)?,
|
||||||
pasp_h_spacing: row.get::<_, i32>(3)?.try_into()?,
|
pasp_h_spacing: get_and_cvt(3)?,
|
||||||
pasp_v_spacing: row.get::<_, i32>(4)?.try_into()?,
|
pasp_v_spacing: get_and_cvt(4)?,
|
||||||
data,
|
data,
|
||||||
rfc6381_codec: row.get(5)?,
|
rfc6381_codec: row.get(5)?,
|
||||||
}),
|
}),
|
||||||
|
@ -1599,7 +1625,7 @@ impl LockedDatabase {
|
||||||
let last_complete_open = match (open_id, open_uuid) {
|
let last_complete_open = match (open_id, open_uuid) {
|
||||||
(Some(id), Some(uuid)) => Some(Open { id, uuid: uuid.0 }),
|
(Some(id), Some(uuid)) => Some(Open { id, uuid: uuid.0 }),
|
||||||
(None, None) => None,
|
(None, None) => None,
|
||||||
_ => bail!("open table missing id {}", id),
|
_ => bail!(Internal, msg("open table missing id {id}")),
|
||||||
};
|
};
|
||||||
self.sample_file_dirs_by_id.insert(
|
self.sample_file_dirs_by_id.insert(
|
||||||
id,
|
id,
|
||||||
|
@ -1680,12 +1706,12 @@ impl LockedDatabase {
|
||||||
let id = row.get(0)?;
|
let id = row.get(0)?;
|
||||||
let type_: String = row.get(1)?;
|
let type_: String = row.get(1)?;
|
||||||
let type_ = StreamType::parse(&type_)
|
let type_ = StreamType::parse(&type_)
|
||||||
.ok_or_else(|| format_err!("no such stream type {}", type_))?;
|
.ok_or_else(|| err!(DataLoss, msg("no such stream type {type_}")))?;
|
||||||
let camera_id = row.get(2)?;
|
let camera_id = row.get(2)?;
|
||||||
let c = self
|
let c = self
|
||||||
.cameras_by_id
|
.cameras_by_id
|
||||||
.get_mut(&camera_id)
|
.get_mut(&camera_id)
|
||||||
.ok_or_else(|| format_err!("missing camera {} for stream {}", camera_id, id))?;
|
.ok_or_else(|| err!(DataLoss, msg("missing camera {camera_id} for stream {id}")))?;
|
||||||
self.streams_by_id.insert(
|
self.streams_by_id.insert(
|
||||||
id,
|
id,
|
||||||
Stream {
|
Stream {
|
||||||
|
@ -1735,10 +1761,8 @@ impl LockedDatabase {
|
||||||
|| v.pasp_v_spacing != entry.pasp_v_spacing
|
|| v.pasp_v_spacing != entry.pasp_v_spacing
|
||||||
{
|
{
|
||||||
bail!(
|
bail!(
|
||||||
"video_sample_entry id {}: existing entry {:?}, new {:?}",
|
Internal,
|
||||||
id,
|
msg("video_sample_entry id {id}: existing entry {v:?}, new {entry:?}"),
|
||||||
v,
|
|
||||||
&entry
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return Ok(id);
|
return Ok(id);
|
||||||
|
@ -1754,7 +1778,7 @@ impl LockedDatabase {
|
||||||
":rfc6381_codec": &entry.rfc6381_codec,
|
":rfc6381_codec": &entry.rfc6381_codec,
|
||||||
":data": &entry.data,
|
":data": &entry.data,
|
||||||
})
|
})
|
||||||
.map_err(|e| Error::from(e).context(format!("Unable to insert {:#?}", &entry)))?;
|
.map_err(|e| err!(e, msg("Unable to insert {entry:#?}")))?;
|
||||||
|
|
||||||
let id = self.conn.last_insert_rowid() as i32;
|
let id = self.conn.last_insert_rowid() as i32;
|
||||||
self.video_sample_entries_by_id.insert(
|
self.video_sample_entries_by_id.insert(
|
||||||
|
@ -1780,7 +1804,7 @@ impl LockedDatabase {
|
||||||
let o = self
|
let o = self
|
||||||
.open
|
.open
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.ok_or_else(|| format_err!("database is read-only"))?;
|
.ok_or_else(|| err!(FailedPrecondition, msg("database is read-only")))?;
|
||||||
|
|
||||||
// Populate meta.
|
// Populate meta.
|
||||||
{
|
{
|
||||||
|
@ -1816,7 +1840,7 @@ impl LockedDatabase {
|
||||||
garbage_needs_unlink: FnvHashSet::default(),
|
garbage_needs_unlink: FnvHashSet::default(),
|
||||||
garbage_unlinked: Vec::new(),
|
garbage_unlinked: Vec::new(),
|
||||||
}),
|
}),
|
||||||
Entry::Occupied(_) => bail!("duplicate sample file dir id {}", id),
|
Entry::Occupied(_) => bail!(Internal, msg("duplicate sample file dir id {id}")),
|
||||||
};
|
};
|
||||||
meta.last_complete_open = meta.in_progress_open.take().into();
|
meta.last_complete_open = meta.in_progress_open.take().into();
|
||||||
d.dir.as_ref().unwrap().write_meta(&meta)?;
|
d.dir.as_ref().unwrap().write_meta(&meta)?;
|
||||||
|
@ -1826,17 +1850,23 @@ impl LockedDatabase {
|
||||||
pub fn delete_sample_file_dir(&mut self, dir_id: i32) -> Result<(), Error> {
|
pub fn delete_sample_file_dir(&mut self, dir_id: i32) -> Result<(), Error> {
|
||||||
for (&id, s) in self.streams_by_id.iter() {
|
for (&id, s) in self.streams_by_id.iter() {
|
||||||
if s.sample_file_dir_id == Some(dir_id) {
|
if s.sample_file_dir_id == Some(dir_id) {
|
||||||
bail!("can't delete dir referenced by stream {}", id);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("can't delete dir referenced by stream {id}")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut d = match self.sample_file_dirs_by_id.entry(dir_id) {
|
let mut d = match self.sample_file_dirs_by_id.entry(dir_id) {
|
||||||
::std::collections::btree_map::Entry::Occupied(e) => e,
|
::std::collections::btree_map::Entry::Occupied(e) => e,
|
||||||
_ => bail!("no such dir {} to remove", dir_id),
|
_ => bail!(NotFound, msg("no such dir {dir_id} to remove")),
|
||||||
};
|
};
|
||||||
if !d.get().garbage_needs_unlink.is_empty() || !d.get().garbage_unlinked.is_empty() {
|
if !d.get().garbage_needs_unlink.is_empty() || !d.get().garbage_unlinked.is_empty() {
|
||||||
bail!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"must collect garbage before deleting directory {}",
|
"must collect garbage before deleting directory {}",
|
||||||
d.get().path.display()
|
d.get().path.display(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let dir = match d.get_mut().dir.take() {
|
let dir = match d.get_mut().dir.take() {
|
||||||
|
@ -1847,17 +1877,19 @@ impl LockedDatabase {
|
||||||
// a writer::Syncer also has a reference.
|
// a writer::Syncer also has a reference.
|
||||||
d.get_mut().dir = Some(arc); // put it back.
|
d.get_mut().dir = Some(arc); // put it back.
|
||||||
bail!(
|
bail!(
|
||||||
"can't delete directory {} with active syncer (refcnt {}",
|
FailedPrecondition,
|
||||||
dir_id,
|
msg("can't delete directory {dir_id} with active syncer (refcnt {c})"),
|
||||||
c
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if !dir.is_empty()? {
|
if !dir.is_empty()? {
|
||||||
bail!(
|
bail!(
|
||||||
"Can't delete sample file directory {} which still has files",
|
FailedPrecondition,
|
||||||
&d.get().path.display()
|
msg(
|
||||||
|
"can't delete sample file directory {} which still has files",
|
||||||
|
&d.get().path.display(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let mut meta = d.get().expected_meta(&self.uuid);
|
let mut meta = d.get().expected_meta(&self.uuid);
|
||||||
|
@ -1868,7 +1900,7 @@ impl LockedDatabase {
|
||||||
.execute("delete from sample_file_dir where id = ?", params![dir_id])?
|
.execute("delete from sample_file_dir where id = ?", params![dir_id])?
|
||||||
!= 1
|
!= 1
|
||||||
{
|
{
|
||||||
bail!("missing database row for dir {}", dir_id);
|
bail!(Internal, msg("missing database row for dir {dir_id}"));
|
||||||
}
|
}
|
||||||
d.remove_entry();
|
d.remove_entry();
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1919,10 +1951,9 @@ impl LockedDatabase {
|
||||||
/// TODO: consider renaming this to `update_camera` and creating a bulk
|
/// TODO: consider renaming this to `update_camera` and creating a bulk
|
||||||
/// `apply_camera_changes`.
|
/// `apply_camera_changes`.
|
||||||
pub fn null_camera_change(&mut self, camera_id: i32) -> Result<CameraChange, Error> {
|
pub fn null_camera_change(&mut self, camera_id: i32) -> Result<CameraChange, Error> {
|
||||||
let camera = self
|
let Some(camera) = self.cameras_by_id.get(&camera_id) else {
|
||||||
.cameras_by_id
|
bail!(Internal, msg("no such camera {camera_id}"));
|
||||||
.get(&camera_id)
|
};
|
||||||
.ok_or_else(|| format_err!("no such camera {}", camera_id))?;
|
|
||||||
let mut change = CameraChange {
|
let mut change = CameraChange {
|
||||||
short_name: camera.short_name.clone(),
|
short_name: camera.short_name.clone(),
|
||||||
config: camera.config.clone(),
|
config: camera.config.clone(),
|
||||||
|
@ -1947,10 +1978,9 @@ impl LockedDatabase {
|
||||||
pub fn update_camera(&mut self, camera_id: i32, mut camera: CameraChange) -> Result<(), Error> {
|
pub fn update_camera(&mut self, camera_id: i32, mut camera: CameraChange) -> Result<(), Error> {
|
||||||
let tx = self.conn.transaction()?;
|
let tx = self.conn.transaction()?;
|
||||||
let streams;
|
let streams;
|
||||||
let c = self
|
let Some(c) = self.cameras_by_id.get_mut(&camera_id) else {
|
||||||
.cameras_by_id
|
bail!(Internal, msg("no such camera {camera_id}"));
|
||||||
.get_mut(&camera_id)
|
};
|
||||||
.ok_or_else(|| format_err!("no such camera {}", camera_id))?;
|
|
||||||
{
|
{
|
||||||
streams =
|
streams =
|
||||||
StreamStateChanger::new(&tx, camera_id, Some(c), &self.streams_by_id, &mut camera)?;
|
StreamStateChanger::new(&tx, camera_id, Some(c), &self.streams_by_id, &mut camera)?;
|
||||||
|
@ -1969,7 +1999,7 @@ impl LockedDatabase {
|
||||||
":config": &camera.config,
|
":config": &camera.config,
|
||||||
})?;
|
})?;
|
||||||
if rows != 1 {
|
if rows != 1 {
|
||||||
bail!("Camera {} missing from database", camera_id);
|
bail!(Internal, msg("camera {camera_id} missing from database"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
|
@ -1982,11 +2012,9 @@ impl LockedDatabase {
|
||||||
/// Deletes a camera and its streams. The camera must have no recordings.
|
/// Deletes a camera and its streams. The camera must have no recordings.
|
||||||
pub fn delete_camera(&mut self, id: i32) -> Result<(), Error> {
|
pub fn delete_camera(&mut self, id: i32) -> Result<(), Error> {
|
||||||
// TODO: also verify there are no uncommitted recordings.
|
// TODO: also verify there are no uncommitted recordings.
|
||||||
let uuid = self
|
let Some(uuid) = self.cameras_by_id.get(&id).map(|c| c.uuid) else {
|
||||||
.cameras_by_id
|
bail!(NotFound, msg("no such camera {id}"));
|
||||||
.get(&id)
|
};
|
||||||
.map(|c| c.uuid)
|
|
||||||
.ok_or_else(|| format_err!("No such camera {} to remove", id))?;
|
|
||||||
let mut streams_to_delete = Vec::new();
|
let mut streams_to_delete = Vec::new();
|
||||||
let tx = self.conn.transaction()?;
|
let tx = self.conn.transaction()?;
|
||||||
{
|
{
|
||||||
|
@ -1996,18 +2024,21 @@ impl LockedDatabase {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
if stream.range.is_some() {
|
if stream.range.is_some() {
|
||||||
bail!("Can't remove camera {}; has recordings.", id);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("can't remove camera {id}; has recordings")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let rows = stream_stmt.execute(named_params! {":id": stream_id})?;
|
let rows = stream_stmt.execute(named_params! {":id": stream_id})?;
|
||||||
if rows != 1 {
|
if rows != 1 {
|
||||||
bail!("Stream {} missing from database", id);
|
bail!(Internal, msg("stream {id} missing from database"));
|
||||||
}
|
}
|
||||||
streams_to_delete.push(*stream_id);
|
streams_to_delete.push(*stream_id);
|
||||||
}
|
}
|
||||||
let mut cam_stmt = tx.prepare_cached(r"delete from camera where id = :id")?;
|
let mut cam_stmt = tx.prepare_cached(r"delete from camera where id = :id")?;
|
||||||
let rows = cam_stmt.execute(named_params! {":id": id})?;
|
let rows = cam_stmt.execute(named_params! {":id": id})?;
|
||||||
if rows != 1 {
|
if rows != 1 {
|
||||||
bail!("Camera {} missing from database", id);
|
bail!(Internal, msg("camera {id} missing from database"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
|
@ -2035,10 +2066,9 @@ impl LockedDatabase {
|
||||||
"#,
|
"#,
|
||||||
)?;
|
)?;
|
||||||
for c in changes {
|
for c in changes {
|
||||||
let stream = self
|
let Some(stream) = self.streams_by_id.get(&c.stream_id) else {
|
||||||
.streams_by_id
|
bail!(Internal, msg("no such stream {}", c.stream_id));
|
||||||
.get(&c.stream_id)
|
};
|
||||||
.ok_or_else(|| format_err!("no such stream id {}", c.stream_id))?;
|
|
||||||
let mut new_config = stream.config.clone();
|
let mut new_config = stream.config.clone();
|
||||||
new_config.mode = (if c.new_record { "record" } else { "" }).into();
|
new_config.mode = (if c.new_record { "record" } else { "" }).into();
|
||||||
new_config.retain_bytes = c.new_limit;
|
new_config.retain_bytes = c.new_limit;
|
||||||
|
@ -2179,8 +2209,11 @@ pub(crate) fn check_sqlite_version() -> Result<(), Error> {
|
||||||
// https://www.sqlite.org/withoutrowid.html
|
// https://www.sqlite.org/withoutrowid.html
|
||||||
if rusqlite::version_number() < 3008002 {
|
if rusqlite::version_number() < 3008002 {
|
||||||
bail!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"SQLite version {} is too old; need at least 3.8.2",
|
"SQLite version {} is too old; need at least 3.8.2",
|
||||||
rusqlite::version()
|
rusqlite::version()
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -2194,7 +2227,7 @@ pub fn init(conn: &mut rusqlite::Connection) -> Result<(), Error> {
|
||||||
set_integrity_pragmas(conn)?;
|
set_integrity_pragmas(conn)?;
|
||||||
let tx = conn.transaction()?;
|
let tx = conn.transaction()?;
|
||||||
tx.execute_batch(include_str!("schema.sql"))
|
tx.execute_batch(include_str!("schema.sql"))
|
||||||
.context("unable to create database schema")?;
|
.map_err(|e| err!(e, msg("unable to create database schema")))?;
|
||||||
{
|
{
|
||||||
let uuid = ::uuid::Uuid::new_v4();
|
let uuid = ::uuid::Uuid::new_v4();
|
||||||
let uuid_bytes = &uuid.as_bytes()[..];
|
let uuid_bytes = &uuid.as_bytes()[..];
|
||||||
|
@ -2228,7 +2261,9 @@ pub fn get_schema_version(conn: &rusqlite::Connection) -> Result<Option<i32>, Er
|
||||||
fn get_boot_uuid() -> Result<Option<Uuid>, Error> {
|
fn get_boot_uuid() -> Result<Option<Uuid>, Error> {
|
||||||
if cfg!(target_os = "linux") {
|
if cfg!(target_os = "linux") {
|
||||||
let boot_id = std::fs::read_to_string("/proc/sys/kernel/random/boot_id")?;
|
let boot_id = std::fs::read_to_string("/proc/sys/kernel/random/boot_id")?;
|
||||||
Ok(Some(Uuid::parse_str(boot_id.trim_end())?))
|
Ok(Some(Uuid::parse_str(boot_id.trim_end()).map_err(|e| {
|
||||||
|
err!(Internal, msg("boot_id is not a valid uuid"), source(e))
|
||||||
|
})?))
|
||||||
} else {
|
} else {
|
||||||
Ok(None) // don't complain about lack of platform support; just return None.
|
Ok(None) // don't complain about lack of platform support; just return None.
|
||||||
}
|
}
|
||||||
|
@ -2236,30 +2271,33 @@ fn get_boot_uuid() -> Result<Option<Uuid>, Error> {
|
||||||
|
|
||||||
/// Checks that the schema version in the given database is as expected.
|
/// Checks that the schema version in the given database is as expected.
|
||||||
pub(crate) fn check_schema_version(conn: &rusqlite::Connection) -> Result<(), Error> {
|
pub(crate) fn check_schema_version(conn: &rusqlite::Connection) -> Result<(), Error> {
|
||||||
let ver = get_schema_version(conn)?.ok_or_else(|| {
|
let Some(ver) = get_schema_version(conn)? else {
|
||||||
format_err!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"no such table: version.\n\n\
|
"no such table: version.\n\n\
|
||||||
If you have created an empty database by hand, delete it and use `nvr init` \
|
If you have created an empty database by hand, delete it and use `nvr init` \
|
||||||
instead, as noted in the installation instructions: \
|
instead, as noted in the installation instructions: \
|
||||||
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/install.md>\n\n\
|
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/install.md>\n\n\
|
||||||
If you are starting from a database that predates schema versioning, see \
|
If you are starting from a database that predates schema versioning, see \
|
||||||
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/schema.md>."
|
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/schema.md>."),
|
||||||
)
|
)
|
||||||
})?;
|
};
|
||||||
match ver.cmp(&EXPECTED_VERSION) {
|
match ver.cmp(&EXPECTED_VERSION) {
|
||||||
std::cmp::Ordering::Less => bail!(
|
std::cmp::Ordering::Less => bail!(
|
||||||
"Database schema version {} is too old (expected {}); \
|
FailedPrecondition,
|
||||||
see upgrade instructions in \
|
msg(
|
||||||
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/upgrade.md>.",
|
"database schema version {ver} is too old (expected {EXPECTED_VERSION}); \
|
||||||
ver,
|
see upgrade instructions in guide/upgrade.md"
|
||||||
EXPECTED_VERSION
|
),
|
||||||
),
|
),
|
||||||
std::cmp::Ordering::Equal => Ok(()),
|
std::cmp::Ordering::Equal => Ok(()),
|
||||||
std::cmp::Ordering::Greater => bail!(
|
std::cmp::Ordering::Greater => bail!(
|
||||||
"Database schema version {} is too new (expected {}); \
|
FailedPrecondition,
|
||||||
must use a newer binary to match.",
|
msg(
|
||||||
ver,
|
"database schema version {ver} is too new (expected {EXPECTED_VERSION}); \
|
||||||
EXPECTED_VERSION
|
must use a newer binary to match"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2560,7 +2598,7 @@ mod tests {
|
||||||
)
|
)
|
||||||
.err()
|
.err()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(e.to_string().starts_with("no such table"), "{}", e);
|
assert!(e.msg().unwrap().starts_with("no such table"), "{}", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -2571,8 +2609,9 @@ mod tests {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let e = Database::new(clock::RealClocks {}, c, false).err().unwrap();
|
let e = Database::new(clock::RealClocks {}, c, false).err().unwrap();
|
||||||
assert!(
|
assert!(
|
||||||
e.to_string()
|
e.msg()
|
||||||
.starts_with("Database schema version 6 is too old (expected 7)"),
|
.unwrap()
|
||||||
|
.starts_with("database schema version 6 is too old (expected 7)"),
|
||||||
"got: {e:?}"
|
"got: {e:?}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -2585,8 +2624,9 @@ mod tests {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let e = Database::new(clock::RealClocks {}, c, false).err().unwrap();
|
let e = Database::new(clock::RealClocks {}, c, false).err().unwrap();
|
||||||
assert!(
|
assert!(
|
||||||
e.to_string()
|
e.msg()
|
||||||
.starts_with("Database schema version 8 is too new (expected 7)"),
|
.unwrap()
|
||||||
|
.starts_with("database schema version 8 is too new (expected 7)"),
|
||||||
"got: {e:?}"
|
"got: {e:?}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,8 @@ mod reader;
|
||||||
use crate::coding;
|
use crate::coding;
|
||||||
use crate::db::CompositeId;
|
use crate::db::CompositeId;
|
||||||
use crate::schema;
|
use crate::schema;
|
||||||
|
use base::{bail, err, Error};
|
||||||
use cstr::cstr;
|
use cstr::cstr;
|
||||||
use failure::{bail, format_err, Error, Fail};
|
|
||||||
use nix::sys::statvfs::Statvfs;
|
use nix::sys::statvfs::Statvfs;
|
||||||
use nix::{
|
use nix::{
|
||||||
fcntl::{FlockArg, OFlag},
|
fcntl::{FlockArg, OFlag},
|
||||||
|
@ -145,20 +145,23 @@ pub(crate) fn read_meta(dir: &Fd) -> Result<schema::DirMeta, Error> {
|
||||||
let mut data = Vec::new();
|
let mut data = Vec::new();
|
||||||
f.read_to_end(&mut data)?;
|
f.read_to_end(&mut data)?;
|
||||||
let (len, pos) = coding::decode_varint32(&data, 0)
|
let (len, pos) = coding::decode_varint32(&data, 0)
|
||||||
.map_err(|_| format_err!("Unable to decode varint length in meta file"))?;
|
.map_err(|_| err!(DataLoss, msg("Unable to decode varint length in meta file")))?;
|
||||||
if data.len() != FIXED_DIR_META_LEN || len as usize + pos > FIXED_DIR_META_LEN {
|
if data.len() != FIXED_DIR_META_LEN || len as usize + pos > FIXED_DIR_META_LEN {
|
||||||
bail!(
|
bail!(
|
||||||
|
DataLoss,
|
||||||
|
msg(
|
||||||
"Expected a {}-byte file with a varint length of a DirMeta message; got \
|
"Expected a {}-byte file with a varint length of a DirMeta message; got \
|
||||||
a {}-byte file with length {}",
|
a {}-byte file with length {}",
|
||||||
FIXED_DIR_META_LEN,
|
FIXED_DIR_META_LEN,
|
||||||
data.len(),
|
data.len(),
|
||||||
len
|
len,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let data = &data[pos..pos + len as usize];
|
let data = &data[pos..pos + len as usize];
|
||||||
let mut s = protobuf::CodedInputStream::from_bytes(data);
|
let mut s = protobuf::CodedInputStream::from_bytes(data);
|
||||||
meta.merge_from(&mut s)
|
meta.merge_from(&mut s)
|
||||||
.map_err(|e| e.context("Unable to parse metadata proto"))?;
|
.map_err(|e| err!(DataLoss, msg("Unable to parse metadata proto"), source(e)))?;
|
||||||
Ok(meta)
|
Ok(meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,9 +172,12 @@ pub(crate) fn write_meta(dirfd: RawFd, meta: &schema::DirMeta) -> Result<(), Err
|
||||||
.expect("proto3->vec is infallible");
|
.expect("proto3->vec is infallible");
|
||||||
if data.len() > FIXED_DIR_META_LEN {
|
if data.len() > FIXED_DIR_META_LEN {
|
||||||
bail!(
|
bail!(
|
||||||
"Length-delimited DirMeta message requires {} bytes, over limit of {}",
|
Internal,
|
||||||
|
msg(
|
||||||
|
"length-delimited DirMeta message requires {} bytes, over limit of {}",
|
||||||
data.len(),
|
data.len(),
|
||||||
FIXED_DIR_META_LEN
|
FIXED_DIR_META_LEN,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
|
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
|
||||||
|
@ -181,28 +187,31 @@ pub(crate) fn write_meta(dirfd: RawFd, meta: &schema::DirMeta) -> Result<(), Err
|
||||||
OFlag::O_CREAT | OFlag::O_WRONLY,
|
OFlag::O_CREAT | OFlag::O_WRONLY,
|
||||||
Mode::S_IRUSR | Mode::S_IWUSR,
|
Mode::S_IRUSR | Mode::S_IWUSR,
|
||||||
)
|
)
|
||||||
.map_err(|e| e.context("Unable to open meta file"))?;
|
.map_err(|e| err!(e, msg("unable to open meta file")))?;
|
||||||
let stat = f
|
let stat = f
|
||||||
.metadata()
|
.metadata()
|
||||||
.map_err(|e| e.context("Unable to stat meta file"))?;
|
.map_err(|e| err!(e, msg("unable to stat meta file")))?;
|
||||||
if stat.len() == 0 {
|
if stat.len() == 0 {
|
||||||
// Need to sync not only the data but also the file metadata and dirent.
|
// Need to sync not only the data but also the file metadata and dirent.
|
||||||
f.write_all(&data)
|
f.write_all(&data)
|
||||||
.map_err(|e| e.context("Unable to write to meta file"))?;
|
.map_err(|e| err!(e, msg("unable to write to meta file")))?;
|
||||||
f.sync_all()
|
f.sync_all()
|
||||||
.map_err(|e| e.context("Unable to sync meta file"))?;
|
.map_err(|e| err!(e, msg("unable to sync meta file")))?;
|
||||||
nix::unistd::fsync(dirfd).map_err(|e| e.context("Unable to sync dir"))?;
|
nix::unistd::fsync(dirfd).map_err(|e| err!(e, msg("unable to sync dir")))?;
|
||||||
} else if stat.len() == FIXED_DIR_META_LEN as u64 {
|
} else if stat.len() == FIXED_DIR_META_LEN as u64 {
|
||||||
// Just syncing the data will suffice; existing metadata and dirent are fine.
|
// Just syncing the data will suffice; existing metadata and dirent are fine.
|
||||||
f.write_all(&data)
|
f.write_all(&data)
|
||||||
.map_err(|e| e.context("Unable to write to meta file"))?;
|
.map_err(|e| err!(e, msg("unable to write to meta file")))?;
|
||||||
f.sync_data()
|
f.sync_data()
|
||||||
.map_err(|e| e.context("Unable to sync meta file"))?;
|
.map_err(|e| err!(e, msg("unable to sync meta file")))?;
|
||||||
} else {
|
} else {
|
||||||
bail!(
|
bail!(
|
||||||
"Existing meta file is {}-byte; expected {}",
|
DataLoss,
|
||||||
|
msg(
|
||||||
|
"existing meta file is {}-byte; expected {}",
|
||||||
stat.len(),
|
stat.len(),
|
||||||
FIXED_DIR_META_LEN
|
FIXED_DIR_META_LEN,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -221,14 +230,15 @@ impl SampleFileDir {
|
||||||
} else {
|
} else {
|
||||||
FlockArg::LockSharedNonblock
|
FlockArg::LockSharedNonblock
|
||||||
})
|
})
|
||||||
.map_err(|e| e.context(format!("unable to lock dir {}", path.display())))?;
|
.map_err(|e| err!(e, msg("unable to lock dir {}", path.display())))?;
|
||||||
let dir_meta = read_meta(&s.fd).map_err(|e| e.context("unable to read meta file"))?;
|
let dir_meta = read_meta(&s.fd).map_err(|e| err!(e, msg("unable to read meta file")))?;
|
||||||
if let Err(e) = SampleFileDir::check_consistent(expected_meta, &dir_meta) {
|
if let Err(e) = SampleFileDir::check_consistent(expected_meta, &dir_meta) {
|
||||||
bail!(
|
bail!(
|
||||||
"metadata mismatch: {}.\nexpected:\n{:#?}\n\nactual:\n{:#?}",
|
Internal,
|
||||||
e,
|
msg(
|
||||||
expected_meta,
|
"metadata mismatch: {e}.\nexpected:\n{expected_meta:#?}\n\nactual:\n\
|
||||||
&dir_meta
|
{dir_meta:#?}",
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if expected_meta.in_progress_open.is_some() {
|
if expected_meta.in_progress_open.is_some() {
|
||||||
|
@ -275,22 +285,28 @@ impl SampleFileDir {
|
||||||
) -> Result<Arc<SampleFileDir>, Error> {
|
) -> Result<Arc<SampleFileDir>, Error> {
|
||||||
let s = SampleFileDir::open_self(path, true)?;
|
let s = SampleFileDir::open_self(path, true)?;
|
||||||
s.fd.lock(FlockArg::LockExclusiveNonblock)
|
s.fd.lock(FlockArg::LockExclusiveNonblock)
|
||||||
.map_err(|e| e.context(format!("unable to lock dir {}", path.display())))?;
|
.map_err(|e| err!(e, msg("unable to lock dir {}", path.display())))?;
|
||||||
let old_meta = read_meta(&s.fd)?;
|
let old_meta = read_meta(&s.fd)?;
|
||||||
|
|
||||||
// Verify metadata. We only care that it hasn't been completely opened.
|
// Verify metadata. We only care that it hasn't been completely opened.
|
||||||
// Partial opening by this or another database is fine; we won't overwrite anything.
|
// Partial opening by this or another database is fine; we won't overwrite anything.
|
||||||
if old_meta.last_complete_open.is_some() {
|
if old_meta.last_complete_open.is_some() {
|
||||||
bail!(
|
bail!(
|
||||||
"Can't create dir at path {}: is already in use:\n{:?}",
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
|
"can't create dir at path {}: is already in use:\n{:?}",
|
||||||
path.display(),
|
path.display(),
|
||||||
old_meta
|
old_meta,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if !s.is_empty()? {
|
if !s.is_empty()? {
|
||||||
bail!(
|
bail!(
|
||||||
"Can't create dir at path {} with existing files",
|
FailedPrecondition,
|
||||||
path.display()
|
msg(
|
||||||
|
"can't create dir at path {} with existing files",
|
||||||
|
path.display(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
s.write_meta(db_meta)?;
|
s.write_meta(db_meta)?;
|
||||||
|
|
|
@ -31,9 +31,9 @@ use std::{
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
|
|
||||||
use base::bail_t;
|
use base::bail;
|
||||||
use base::clock::{RealClocks, TimerGuard};
|
use base::clock::{RealClocks, TimerGuard};
|
||||||
use base::{format_err_t, Error, ErrorKind, ResultExt};
|
use base::{err, Error, ErrorKind, ResultExt};
|
||||||
use nix::{fcntl::OFlag, sys::stat::Mode};
|
use nix::{fcntl::OFlag, sys::stat::Mode};
|
||||||
|
|
||||||
use crate::CompositeId;
|
use crate::CompositeId;
|
||||||
|
@ -116,9 +116,9 @@ impl FileStream {
|
||||||
match Pin::new(&mut rx).poll(cx) {
|
match Pin::new(&mut rx).poll(cx) {
|
||||||
Poll::Ready(Err(_)) => {
|
Poll::Ready(Err(_)) => {
|
||||||
self.state = FileStreamState::Invalid;
|
self.state = FileStreamState::Invalid;
|
||||||
Poll::Ready(Some(Err(format_err_t!(
|
Poll::Ready(Some(Err(err!(
|
||||||
Internal,
|
Internal,
|
||||||
"reader thread panicked; see logs"
|
msg("reader thread panicked; see logs")
|
||||||
))))
|
))))
|
||||||
}
|
}
|
||||||
Poll::Ready(Ok(Err(e))) => {
|
Poll::Ready(Ok(Err(e))) => {
|
||||||
|
@ -319,12 +319,11 @@ impl ReaderInt {
|
||||||
let map_len = usize::try_from(
|
let map_len = usize::try_from(
|
||||||
range.end - range.start + u64::try_from(unaligned).expect("usize fits in u64"),
|
range.end - range.start + u64::try_from(unaligned).expect("usize fits in u64"),
|
||||||
)
|
)
|
||||||
.map_err(|_| {
|
.map_err(|e| {
|
||||||
format_err_t!(
|
err!(
|
||||||
OutOfRange,
|
OutOfRange,
|
||||||
"file {}'s range {:?} len exceeds usize::MAX",
|
msg("file {composite_id}'s range {range:?} len exceeds usize::MAX"),
|
||||||
composite_id,
|
source(e),
|
||||||
range
|
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let map_len = std::num::NonZeroUsize::new(map_len).expect("range is non-empty");
|
let map_len = std::num::NonZeroUsize::new(map_len).expect("range is non-empty");
|
||||||
|
@ -337,12 +336,14 @@ impl ReaderInt {
|
||||||
// with a SIGBUS or reading bad data at the end of the last page later.
|
// with a SIGBUS or reading bad data at the end of the last page later.
|
||||||
let metadata = file.metadata().err_kind(ErrorKind::Unknown)?;
|
let metadata = file.metadata().err_kind(ErrorKind::Unknown)?;
|
||||||
if metadata.len() < u64::try_from(offset).unwrap() + u64::try_from(map_len.get()).unwrap() {
|
if metadata.len() < u64::try_from(offset).unwrap() + u64::try_from(map_len.get()).unwrap() {
|
||||||
bail_t!(
|
bail!(
|
||||||
Internal,
|
OutOfRange,
|
||||||
|
msg(
|
||||||
"file {}, range {:?}, len {}",
|
"file {}, range {:?}, len {}",
|
||||||
composite_id,
|
composite_id,
|
||||||
range,
|
range,
|
||||||
metadata.len()
|
metadata.len()
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let map_ptr = unsafe {
|
let map_ptr = unsafe {
|
||||||
|
@ -356,17 +357,13 @@ impl ReaderInt {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
format_err_t!(
|
err!(
|
||||||
Internal,
|
e,
|
||||||
"mmap failed for {} off={} len={}: {}",
|
msg("mmap failed for {composite_id} off={offset} len={map_len}")
|
||||||
composite_id,
|
|
||||||
offset,
|
|
||||||
map_len,
|
|
||||||
e
|
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if let Err(e) = unsafe {
|
if let Err(err) = unsafe {
|
||||||
nix::sys::mman::madvise(
|
nix::sys::mman::madvise(
|
||||||
map_ptr,
|
map_ptr,
|
||||||
map_len.get(),
|
map_len.get(),
|
||||||
|
@ -375,11 +372,11 @@ impl ReaderInt {
|
||||||
} {
|
} {
|
||||||
// This shouldn't happen but is "just" a performance problem.
|
// This shouldn't happen but is "just" a performance problem.
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"madvise(MADV_SEQUENTIAL) failed for {} off={} len={}: {}",
|
%err,
|
||||||
composite_id,
|
%composite_id,
|
||||||
offset,
|
offset,
|
||||||
map_len,
|
map_len,
|
||||||
e
|
"madvise(MADV_SEQUENTIAL) failed",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,7 @@
|
||||||
use crate::db::{self, CompositeId, SqlUuid};
|
use crate::db::{self, CompositeId, SqlUuid};
|
||||||
use crate::json::GlobalConfig;
|
use crate::json::GlobalConfig;
|
||||||
use crate::recording;
|
use crate::recording;
|
||||||
use base::{ErrorKind, ResultExt as _};
|
use base::{bail, err, Error, ErrorKind, ResultExt as _};
|
||||||
use failure::{bail, Error, ResultExt as _};
|
|
||||||
use fnv::FnvHashSet;
|
use fnv::FnvHashSet;
|
||||||
use rusqlite::{named_params, params};
|
use rusqlite::{named_params, params};
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
@ -190,8 +189,7 @@ pub(crate) fn insert_recording(
|
||||||
id: CompositeId,
|
id: CompositeId,
|
||||||
r: &db::RecordingToInsert,
|
r: &db::RecordingToInsert,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let mut stmt = tx
|
let mut stmt = tx.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
insert into recording (composite_id, stream_id, open_id, run_offset, flags,
|
insert into recording (composite_id, stream_id, open_id, run_offset, flags,
|
||||||
sample_file_bytes, start_time_90k, prev_media_duration_90k,
|
sample_file_bytes, start_time_90k, prev_media_duration_90k,
|
||||||
|
@ -204,8 +202,7 @@ pub(crate) fn insert_recording(
|
||||||
:video_samples, :video_sync_samples, :video_sample_entry_id,
|
:video_samples, :video_sync_samples, :video_sample_entry_id,
|
||||||
:end_reason)
|
:end_reason)
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.with_context(|e| format!("can't prepare recording insert: {e}"))?;
|
|
||||||
stmt.execute(named_params! {
|
stmt.execute(named_params! {
|
||||||
":composite_id": id.0,
|
":composite_id": id.0,
|
||||||
":stream_id": i64::from(id.stream()),
|
":stream_id": i64::from(id.stream()),
|
||||||
|
@ -223,18 +220,21 @@ pub(crate) fn insert_recording(
|
||||||
":video_sample_entry_id": r.video_sample_entry_id,
|
":video_sample_entry_id": r.video_sample_entry_id,
|
||||||
":end_reason": r.end_reason.as_deref(),
|
":end_reason": r.end_reason.as_deref(),
|
||||||
})
|
})
|
||||||
.with_context(|e| format!("unable to insert recording for recording {id} {r:#?}: {e}"))?;
|
.map_err(|e| {
|
||||||
|
err!(
|
||||||
|
e,
|
||||||
|
msg("unable to insert recording for recording {id} {r:#?}")
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
let mut stmt = tx
|
let mut stmt = tx.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
insert into recording_integrity (composite_id, local_time_delta_90k,
|
insert into recording_integrity (composite_id, local_time_delta_90k,
|
||||||
sample_file_blake3)
|
sample_file_blake3)
|
||||||
values (:composite_id, :local_time_delta_90k,
|
values (:composite_id, :local_time_delta_90k,
|
||||||
:sample_file_blake3)
|
:sample_file_blake3)
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.with_context(|e| format!("can't prepare recording_integrity insert: {e}"))?;
|
|
||||||
let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]);
|
let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]);
|
||||||
let delta = match r.run_offset {
|
let delta = match r.run_offset {
|
||||||
0 => None,
|
0 => None,
|
||||||
|
@ -245,21 +245,19 @@ pub(crate) fn insert_recording(
|
||||||
":local_time_delta_90k": delta,
|
":local_time_delta_90k": delta,
|
||||||
":sample_file_blake3": blake3,
|
":sample_file_blake3": blake3,
|
||||||
})
|
})
|
||||||
.with_context(|e| format!("unable to insert recording_integrity for {r:#?}: {e}"))?;
|
.map_err(|e| err!(e, msg("unable to insert recording_integrity for {r:#?}")))?;
|
||||||
|
|
||||||
let mut stmt = tx
|
let mut stmt = tx.prepare_cached(
|
||||||
.prepare_cached(
|
|
||||||
r#"
|
r#"
|
||||||
insert into recording_playback (composite_id, video_index)
|
insert into recording_playback (composite_id, video_index)
|
||||||
values (:composite_id, :video_index)
|
values (:composite_id, :video_index)
|
||||||
"#,
|
"#,
|
||||||
)
|
)?;
|
||||||
.with_context(|e| format!("can't prepare recording_playback insert: {e}"))?;
|
|
||||||
stmt.execute(named_params! {
|
stmt.execute(named_params! {
|
||||||
":composite_id": id.0,
|
":composite_id": id.0,
|
||||||
":video_index": &r.video_index,
|
":video_index": &r.video_index,
|
||||||
})
|
})
|
||||||
.with_context(|e| format!("unable to insert recording_playback for {r:#?}: {e}"))?;
|
.map_err(|e| err!(e, msg("unable to insert recording_playback for {r:#?}")))?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -322,26 +320,31 @@ pub(crate) fn delete_recordings(
|
||||||
let n_playback = del_playback.execute(p)?;
|
let n_playback = del_playback.execute(p)?;
|
||||||
if n_playback != n {
|
if n_playback != n {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"inserted {} garbage rows but deleted {} recording_playback rows!",
|
"inserted {} garbage rows but deleted {} recording_playback rows!",
|
||||||
n,
|
n,
|
||||||
n_playback
|
n_playback
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let n_integrity = del_integrity.execute(p)?;
|
let n_integrity = del_integrity.execute(p)?;
|
||||||
if n_integrity > n {
|
if n_integrity > n {
|
||||||
// fewer is okay; recording_integrity is optional.
|
// fewer is okay; recording_integrity is optional.
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"inserted {} garbage rows but deleted {} recording_integrity rows!",
|
"inserted {} garbage rows but deleted {} recording_integrity rows!",
|
||||||
n,
|
n,
|
||||||
n_integrity
|
n_integrity
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let n_main = del_main.execute(p)?;
|
let n_main = del_main.execute(p)?;
|
||||||
if n_main != n {
|
if n_main != n {
|
||||||
bail!(
|
bail!(
|
||||||
"inserted {} garbage rows but deleted {} recording rows!",
|
Internal,
|
||||||
n,
|
msg("inserted {n} garbage rows but deleted {n_main} recording rows!"),
|
||||||
n_main
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(n)
|
Ok(n)
|
||||||
|
@ -408,9 +411,8 @@ pub(crate) fn get_range(
|
||||||
let max_end = match maxes_opt {
|
let max_end = match maxes_opt {
|
||||||
Some(Range { start: _, end: e }) => e,
|
Some(Range { start: _, end: e }) => e,
|
||||||
None => bail!(
|
None => bail!(
|
||||||
"missing max for stream {} which had min {}",
|
Internal,
|
||||||
stream_id,
|
msg("missing max for stream {stream_id} which had min {min_start}"),
|
||||||
min_start
|
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
Ok(Some(min_start..max_end))
|
Ok(Some(min_start..max_end))
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
|
use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
|
||||||
use crate::db;
|
use crate::db;
|
||||||
use failure::{bail, Error};
|
use base::{bail, Error};
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use tracing::trace;
|
use tracing::trace;
|
||||||
|
@ -79,25 +79,31 @@ impl SampleIndexIterator {
|
||||||
}
|
}
|
||||||
let (raw1, i1) = match decode_varint32(data, i) {
|
let (raw1, i1) = match decode_varint32(data, i) {
|
||||||
Ok(tuple) => tuple,
|
Ok(tuple) => tuple,
|
||||||
Err(()) => bail!("bad varint 1 at offset {}", i),
|
Err(()) => bail!(DataLoss, msg("bad varint 1 at offset {i}")),
|
||||||
};
|
};
|
||||||
let (raw2, i2) = match decode_varint32(data, i1) {
|
let (raw2, i2) = match decode_varint32(data, i1) {
|
||||||
Ok(tuple) => tuple,
|
Ok(tuple) => tuple,
|
||||||
Err(()) => bail!("bad varint 2 at offset {}", i1),
|
Err(()) => bail!(DataLoss, msg("bad varint 2 at offset {i1}")),
|
||||||
};
|
};
|
||||||
let duration_90k_delta = unzigzag32(raw1 >> 1);
|
let duration_90k_delta = unzigzag32(raw1 >> 1);
|
||||||
self.duration_90k += duration_90k_delta;
|
self.duration_90k += duration_90k_delta;
|
||||||
if self.duration_90k < 0 {
|
if self.duration_90k < 0 {
|
||||||
bail!(
|
bail!(
|
||||||
|
DataLoss,
|
||||||
|
msg(
|
||||||
"negative duration {} after applying delta {}",
|
"negative duration {} after applying delta {}",
|
||||||
self.duration_90k,
|
self.duration_90k,
|
||||||
duration_90k_delta
|
duration_90k_delta,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if self.duration_90k == 0 && data.len() > i2 {
|
if self.duration_90k == 0 && data.len() > i2 {
|
||||||
bail!(
|
bail!(
|
||||||
|
DataLoss,
|
||||||
|
msg(
|
||||||
"zero duration only allowed at end; have {} bytes left",
|
"zero duration only allowed at end; have {} bytes left",
|
||||||
data.len() - i2
|
data.len() - i2
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() {
|
let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() {
|
||||||
|
@ -115,11 +121,14 @@ impl SampleIndexIterator {
|
||||||
}
|
}
|
||||||
if self.bytes <= 0 {
|
if self.bytes <= 0 {
|
||||||
bail!(
|
bail!(
|
||||||
|
DataLoss,
|
||||||
|
msg(
|
||||||
"non-positive bytes {} after applying delta {} to key={} frame at ts {}",
|
"non-positive bytes {} after applying delta {} to key={} frame at ts {}",
|
||||||
self.bytes,
|
self.bytes,
|
||||||
bytes_delta,
|
bytes_delta,
|
||||||
self.is_key(),
|
self.is_key(),
|
||||||
self.start_90k
|
self.start_90k,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(true)
|
Ok(true)
|
||||||
|
@ -228,10 +237,13 @@ impl Segment {
|
||||||
|| desired_media_range_90k.end > recording.media_duration_90k
|
|| desired_media_range_90k.end > recording.media_duration_90k
|
||||||
{
|
{
|
||||||
bail!(
|
bail!(
|
||||||
|
OutOfRange,
|
||||||
|
msg(
|
||||||
"desired media range [{}, {}) invalid for recording of length {}",
|
"desired media range [{}, {}) invalid for recording of length {}",
|
||||||
desired_media_range_90k.start,
|
desired_media_range_90k.start,
|
||||||
desired_media_range_90k.end,
|
desired_media_range_90k.end,
|
||||||
recording.media_duration_90k
|
recording.media_duration_90k,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,10 +269,10 @@ impl Segment {
|
||||||
let data = &playback.video_index;
|
let data = &playback.video_index;
|
||||||
let mut it = SampleIndexIterator::default();
|
let mut it = SampleIndexIterator::default();
|
||||||
if !it.next(data)? {
|
if !it.next(data)? {
|
||||||
bail!("no index");
|
bail!(Internal, msg("no index"));
|
||||||
}
|
}
|
||||||
if !it.is_key() {
|
if !it.is_key() {
|
||||||
bail!("not key frame");
|
bail!(Internal, msg("not key frame"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stop when hitting a frame with this start time.
|
// Stop when hitting a frame with this start time.
|
||||||
|
@ -336,10 +348,13 @@ impl Segment {
|
||||||
None => {
|
None => {
|
||||||
let mut it = SampleIndexIterator::default();
|
let mut it = SampleIndexIterator::default();
|
||||||
if !it.next(data)? {
|
if !it.next(data)? {
|
||||||
bail!("recording {} has no frames", self.id);
|
bail!(Internal, msg("recording {} has no frames", self.id));
|
||||||
}
|
}
|
||||||
if !it.is_key() {
|
if !it.is_key() {
|
||||||
bail!("recording {} doesn't start with key frame", self.id);
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg("recording {} doesn't start with key frame", self.id)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
it
|
it
|
||||||
}
|
}
|
||||||
|
@ -350,19 +365,25 @@ impl Segment {
|
||||||
for i in 0..self.frames {
|
for i in 0..self.frames {
|
||||||
if !have_frame {
|
if !have_frame {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"recording {}: expected {} frames, found only {}",
|
"recording {}: expected {} frames, found only {}",
|
||||||
self.id,
|
self.id,
|
||||||
self.frames,
|
self.frames,
|
||||||
i + 1
|
i + 1,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if it.is_key() {
|
if it.is_key() {
|
||||||
key_frame += 1;
|
key_frame += 1;
|
||||||
if key_frame > self.key_frames {
|
if key_frame > self.key_frames {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"recording {}: more than expected {} key frames",
|
"recording {}: more than expected {} key frames",
|
||||||
self.id,
|
self.id,
|
||||||
self.key_frames
|
self.key_frames,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -381,10 +402,13 @@ impl Segment {
|
||||||
}
|
}
|
||||||
if key_frame < self.key_frames {
|
if key_frame < self.key_frames {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"recording {}: expected {} key frames, found only {}",
|
"recording {}: expected {} key frames, found only {}",
|
||||||
self.id,
|
self.id,
|
||||||
self.key_frames,
|
self.key_frames,
|
||||||
key_frame
|
key_frame,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -499,7 +523,7 @@ mod tests {
|
||||||
];
|
];
|
||||||
for test in &tests {
|
for test in &tests {
|
||||||
let mut it = SampleIndexIterator::default();
|
let mut it = SampleIndexIterator::default();
|
||||||
assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
|
assert_eq!(it.next(test.encoded).unwrap_err().msg().unwrap(), test.err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,8 +8,7 @@
|
||||||
use crate::json::{SignalConfig, SignalTypeConfig};
|
use crate::json::{SignalConfig, SignalTypeConfig};
|
||||||
use crate::{coding, days};
|
use crate::{coding, days};
|
||||||
use crate::{recording, SqlUuid};
|
use crate::{recording, SqlUuid};
|
||||||
use base::bail_t;
|
use base::{bail, err, Error};
|
||||||
use failure::{bail, format_err, Error};
|
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use rusqlite::{params, Connection, Transaction};
|
use rusqlite::{params, Connection, Transaction};
|
||||||
use std::collections::btree_map::Entry;
|
use std::collections::btree_map::Entry;
|
||||||
|
@ -149,19 +148,29 @@ impl<'a> PointDataIterator<'a> {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
let (signal_delta, p) = coding::decode_varint32(self.data, self.cur_pos).map_err(|()| {
|
let (signal_delta, p) = coding::decode_varint32(self.data, self.cur_pos).map_err(|()| {
|
||||||
format_err!(
|
err!(
|
||||||
|
DataLoss,
|
||||||
|
msg(
|
||||||
"varint32 decode failure; data={:?} pos={}",
|
"varint32 decode failure; data={:?} pos={}",
|
||||||
self.data,
|
self.data,
|
||||||
self.cur_pos
|
self.cur_pos
|
||||||
|
),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let (state, p) = coding::decode_varint32(self.data, p).map_err(|()| {
|
||||||
|
err!(
|
||||||
|
DataLoss,
|
||||||
|
msg("varint32 decode failure; data={:?} pos={}", self.data, p)
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let (state, p) = coding::decode_varint32(self.data, p)
|
|
||||||
.map_err(|()| format_err!("varint32 decode failure; data={:?} pos={}", self.data, p))?;
|
|
||||||
let signal = self.cur_signal.checked_add(signal_delta).ok_or_else(|| {
|
let signal = self.cur_signal.checked_add(signal_delta).ok_or_else(|| {
|
||||||
format_err!("signal overflow: {} + {}", self.cur_signal, signal_delta)
|
err!(
|
||||||
|
OutOfRange,
|
||||||
|
msg("signal overflow: {} + {}", self.cur_signal, signal_delta)
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
if state > u16::max_value() as u32 {
|
if state > u16::max_value() as u32 {
|
||||||
bail!("state overflow: {}", state);
|
bail!(OutOfRange, msg("state overflow: {state}"));
|
||||||
}
|
}
|
||||||
self.cur_pos = p;
|
self.cur_pos = p;
|
||||||
self.cur_signal = signal + 1;
|
self.cur_signal = signal + 1;
|
||||||
|
@ -335,15 +344,21 @@ impl State {
|
||||||
/// Helper for `update_signals` to do validation.
|
/// Helper for `update_signals` to do validation.
|
||||||
fn update_signals_validate(&self, signals: &[u32], states: &[u16]) -> Result<(), base::Error> {
|
fn update_signals_validate(&self, signals: &[u32], states: &[u16]) -> Result<(), base::Error> {
|
||||||
if signals.len() != states.len() {
|
if signals.len() != states.len() {
|
||||||
bail_t!(InvalidArgument, "signals and states must have same length");
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("signals and states must have same length")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let mut next_allowed = 0u32;
|
let mut next_allowed = 0u32;
|
||||||
for (&signal, &state) in signals.iter().zip(states) {
|
for (&signal, &state) in signals.iter().zip(states) {
|
||||||
if signal < next_allowed {
|
if signal < next_allowed {
|
||||||
bail_t!(InvalidArgument, "signals must be monotonically increasing");
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("signals must be monotonically increasing")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
match self.signals_by_id.get(&signal) {
|
match self.signals_by_id.get(&signal) {
|
||||||
None => bail_t!(InvalidArgument, "unknown signal {}", signal),
|
None => bail!(InvalidArgument, msg("unknown signal {signal}")),
|
||||||
Some(s) => {
|
Some(s) => {
|
||||||
let states = self
|
let states = self
|
||||||
.types_by_uuid
|
.types_by_uuid
|
||||||
|
@ -351,11 +366,9 @@ impl State {
|
||||||
.map(|t| t.valid_states)
|
.map(|t| t.valid_states)
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
if state >= 16 || (states & (1 << state)) == 0 {
|
if state >= 16 || (states & (1 << state)) == 0 {
|
||||||
bail_t!(
|
bail!(
|
||||||
FailedPrecondition,
|
FailedPrecondition,
|
||||||
"signal {} specifies unknown state {}",
|
msg("signal {signal} specifies unknown state {state}"),
|
||||||
signal,
|
|
||||||
state
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -659,7 +672,8 @@ impl State {
|
||||||
let mut rows = stmt.query(params![])?;
|
let mut rows = stmt.query(params![])?;
|
||||||
while let Some(row) = rows.next()? {
|
while let Some(row) = rows.next()? {
|
||||||
let id: i32 = row.get(0)?;
|
let id: i32 = row.get(0)?;
|
||||||
let id = u32::try_from(id)?;
|
let id = u32::try_from(id)
|
||||||
|
.map_err(|e| err!(Internal, msg("signal id out of range"), source(e)))?;
|
||||||
let uuid: SqlUuid = row.get(1)?;
|
let uuid: SqlUuid = row.get(1)?;
|
||||||
let type_: SqlUuid = row.get(2)?;
|
let type_: SqlUuid = row.get(2)?;
|
||||||
let config: SignalConfig = row.get(3)?;
|
let config: SignalConfig = row.get(3)?;
|
||||||
|
@ -698,9 +712,12 @@ impl State {
|
||||||
for &value in type_.config.values.keys() {
|
for &value in type_.config.values.keys() {
|
||||||
if value == 0 || value >= 16 {
|
if value == 0 || value >= 16 {
|
||||||
bail!(
|
bail!(
|
||||||
|
OutOfRange,
|
||||||
|
msg(
|
||||||
"signal type {} value {} out of accepted range [0, 16)",
|
"signal type {} value {} out of accepted range [0, 16)",
|
||||||
uuid.0,
|
uuid.0,
|
||||||
value
|
value,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
type_.valid_states |= 1 << value;
|
type_.valid_states |= 1 << value;
|
||||||
|
@ -741,9 +758,9 @@ impl State {
|
||||||
let e = sig_last_state.entry(signal);
|
let e = sig_last_state.entry(signal);
|
||||||
if let Entry::Occupied(ref e) = e {
|
if let Entry::Occupied(ref e) = e {
|
||||||
let (prev_time, prev_state) = *e.get();
|
let (prev_time, prev_state) = *e.get();
|
||||||
let s = signals_by_id.get_mut(&signal).ok_or_else(|| {
|
let Some(s) = signals_by_id.get_mut(&signal) else {
|
||||||
format_err!("time {} references invalid signal {}", time_90k, signal)
|
bail!(DataLoss, msg("time {time_90k} references invalid signal {signal}"));
|
||||||
})?;
|
};
|
||||||
s.days.adjust(prev_time..time_90k, 0, prev_state);
|
s.days.adjust(prev_time..time_90k, 0, prev_state);
|
||||||
}
|
}
|
||||||
if state == 0 {
|
if state == 0 {
|
||||||
|
@ -760,8 +777,8 @@ impl State {
|
||||||
}
|
}
|
||||||
if !cur.is_empty() {
|
if !cur.is_empty() {
|
||||||
bail!(
|
bail!(
|
||||||
"far future state should be unknown for all signals; is: {:?}",
|
Internal,
|
||||||
cur
|
msg("far future state should be unknown for all signals; is: {cur:?}")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -6,8 +6,8 @@
|
||||||
//!
|
//!
|
||||||
//! See `guide/schema.md` for more information.
|
//! See `guide/schema.md` for more information.
|
||||||
|
|
||||||
use crate::db;
|
use crate::db::{self, EXPECTED_VERSION};
|
||||||
use failure::{bail, Error};
|
use base::{bail, Error};
|
||||||
use nix::NixPath;
|
use nix::NixPath;
|
||||||
use rusqlite::params;
|
use rusqlite::params;
|
||||||
use std::ffi::CStr;
|
use std::ffi::CStr;
|
||||||
|
@ -60,14 +60,16 @@ fn upgrade(args: &Args, target_ver: i32, conn: &mut rusqlite::Connection) -> Res
|
||||||
{
|
{
|
||||||
assert_eq!(upgraders.len(), db::EXPECTED_VERSION as usize);
|
assert_eq!(upgraders.len(), db::EXPECTED_VERSION as usize);
|
||||||
let old_ver = conn.query_row("select max(id) from version", params![], |row| row.get(0))?;
|
let old_ver = conn.query_row("select max(id) from version", params![], |row| row.get(0))?;
|
||||||
if old_ver > db::EXPECTED_VERSION {
|
if old_ver > EXPECTED_VERSION {
|
||||||
bail!(
|
bail!(
|
||||||
"Database is at version {}, later than expected {}",
|
FailedPrecondition,
|
||||||
old_ver,
|
msg("database is at version {old_ver}, later than expected {EXPECTED_VERSION}"),
|
||||||
db::EXPECTED_VERSION
|
|
||||||
);
|
);
|
||||||
} else if old_ver < 0 {
|
} else if old_ver < 0 {
|
||||||
bail!("Database is at negative version {}!", old_ver);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("Database is at negative version {old_ver}!")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
info!(
|
info!(
|
||||||
"Upgrading database from version {} to version {}...",
|
"Upgrading database from version {} to version {}...",
|
||||||
|
@ -95,7 +97,7 @@ pub fn run(args: &Args, conn: &mut rusqlite::Connection) -> Result<(), Error> {
|
||||||
db::check_sqlite_version()?;
|
db::check_sqlite_version()?;
|
||||||
db::set_integrity_pragmas(conn)?;
|
db::set_integrity_pragmas(conn)?;
|
||||||
set_journal_mode(conn, args.preset_journal)?;
|
set_journal_mode(conn, args.preset_journal)?;
|
||||||
upgrade(args, db::EXPECTED_VERSION, conn)?;
|
upgrade(args, EXPECTED_VERSION, conn)?;
|
||||||
|
|
||||||
// As in "moonfire-nvr init": try for page_size=16384 and wal for the reasons explained there.
|
// As in "moonfire-nvr init": try for page_size=16384 and wal for the reasons explained there.
|
||||||
//
|
//
|
||||||
|
@ -154,7 +156,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::compare;
|
use crate::compare;
|
||||||
use crate::testutil;
|
use crate::testutil;
|
||||||
use failure::ResultExt;
|
use base::err;
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
|
|
||||||
const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\
|
const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\
|
||||||
|
@ -209,7 +211,7 @@ mod tests {
|
||||||
let tmpdir = tempfile::Builder::new()
|
let tmpdir = tempfile::Builder::new()
|
||||||
.prefix("moonfire-nvr-test")
|
.prefix("moonfire-nvr-test")
|
||||||
.tempdir()?;
|
.tempdir()?;
|
||||||
//let path = tmpdir.path().to_str().ok_or_else(|| format_err!("invalid UTF-8"))?.to_owned();
|
//let path = tmpdir.path().to_str().ok_or_else(|| err!("invalid UTF-8"))?.to_owned();
|
||||||
let mut upgraded = new_conn()?;
|
let mut upgraded = new_conn()?;
|
||||||
upgraded.execute_batch(include_str!("v0.sql"))?;
|
upgraded.execute_batch(include_str!("v0.sql"))?;
|
||||||
upgraded.execute_batch(
|
upgraded.execute_batch(
|
||||||
|
@ -291,7 +293,7 @@ mod tests {
|
||||||
*ver,
|
*ver,
|
||||||
&mut upgraded,
|
&mut upgraded,
|
||||||
)
|
)
|
||||||
.context(format!("upgrading to version {ver}"))?;
|
.map_err(|e| err!(e, msg("upgrade to version {ver} failed")))?;
|
||||||
if let Some(f) = fresh_sql {
|
if let Some(f) = fresh_sql {
|
||||||
compare(&upgraded, *ver, f)?;
|
compare(&upgraded, *ver, f)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
/// Upgrades a version 0 schema to a version 1 schema.
|
/// Upgrades a version 0 schema to a version 1 schema.
|
||||||
use crate::db;
|
use crate::db;
|
||||||
use crate::recording;
|
use crate::recording;
|
||||||
use failure::Error;
|
use base::Error;
|
||||||
use rusqlite::{named_params, params};
|
use rusqlite::{named_params, params};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tracing::warn;
|
use tracing::warn;
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
/// Upgrades a version 1 schema to a version 2 schema.
|
/// Upgrades a version 1 schema to a version 2 schema.
|
||||||
use crate::dir;
|
use crate::dir;
|
||||||
use crate::schema::DirMeta;
|
use crate::schema::DirMeta;
|
||||||
use failure::{bail, format_err, Error};
|
use base::{bail, Error};
|
||||||
use nix::fcntl::{FlockArg, OFlag};
|
use nix::fcntl::{FlockArg, OFlag};
|
||||||
use nix::sys::stat::Mode;
|
use nix::sys::stat::Mode;
|
||||||
use rusqlite::{named_params, params};
|
use rusqlite::{named_params, params};
|
||||||
|
@ -13,9 +13,12 @@ use std::os::unix::io::AsRawFd;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
|
pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let sample_file_path = args.sample_file_dir.ok_or_else(|| {
|
let Some(sample_file_path) = args.sample_file_dir else {
|
||||||
format_err!("--sample-file-dir required when upgrading from schema version 1 to 2.")
|
bail!(
|
||||||
})?;
|
InvalidArgument,
|
||||||
|
msg("--sample-file-dir required when upgrading from schema version 1 to 2."),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
let mut d = nix::dir::Dir::open(
|
let mut d = nix::dir::Dir::open(
|
||||||
sample_file_path,
|
sample_file_path,
|
||||||
|
@ -101,12 +104,12 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
|
||||||
}
|
}
|
||||||
dir::write_meta(d.as_raw_fd(), &meta)?;
|
dir::write_meta(d.as_raw_fd(), &meta)?;
|
||||||
|
|
||||||
let sample_file_path = sample_file_path.to_str().ok_or_else(|| {
|
let Some(sample_file_path) = sample_file_path.to_str() else {
|
||||||
format_err!(
|
bail!(
|
||||||
"sample file dir {} is not a valid string",
|
InvalidArgument,
|
||||||
sample_file_path.display()
|
msg("sample file dir {} is not a valid string", sample_file_path.display()),
|
||||||
)
|
);
|
||||||
})?;
|
};
|
||||||
tx.execute(
|
tx.execute(
|
||||||
r#"
|
r#"
|
||||||
insert into sample_file_dir (path, uuid, last_complete_open_id)
|
insert into sample_file_dir (path, uuid, last_complete_open_id)
|
||||||
|
@ -317,15 +320,24 @@ fn verify_dir_contents(
|
||||||
};
|
};
|
||||||
let s = match f.to_str() {
|
let s = match f.to_str() {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(_) => bail!("unexpected file {:?} in {:?}", f, sample_file_path),
|
Err(_) => bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("unexpected file {f:?} in {sample_file_path:?}")
|
||||||
|
),
|
||||||
};
|
};
|
||||||
let uuid = match Uuid::parse_str(s) {
|
let uuid = match Uuid::parse_str(s) {
|
||||||
Ok(u) => u,
|
Ok(u) => u,
|
||||||
Err(_) => bail!("unexpected file {:?} in {:?}", f, sample_file_path),
|
Err(_) => bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("unexpected file {f:?} in {sample_file_path:?}")
|
||||||
|
),
|
||||||
};
|
};
|
||||||
if s != uuid.as_hyphenated().to_string() {
|
if s != uuid.as_hyphenated().to_string() {
|
||||||
// non-canonical form.
|
// non-canonical form.
|
||||||
bail!("unexpected file {:?} in {:?}", f, sample_file_path);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("unexpected file {f:?} in {sample_file_path:?}")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
files.insert(uuid);
|
files.insert(uuid);
|
||||||
}
|
}
|
||||||
|
@ -338,9 +350,12 @@ fn verify_dir_contents(
|
||||||
let uuid: crate::db::SqlUuid = row.get(0)?;
|
let uuid: crate::db::SqlUuid = row.get(0)?;
|
||||||
if !files.remove(&uuid.0) {
|
if !files.remove(&uuid.0) {
|
||||||
bail!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"{} is missing from dir {}!",
|
"{} is missing from dir {}!",
|
||||||
uuid.0,
|
uuid.0,
|
||||||
sample_file_path.display()
|
sample_file_path.display()
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -367,10 +382,13 @@ fn verify_dir_contents(
|
||||||
|
|
||||||
if !files.is_empty() {
|
if !files.is_empty() {
|
||||||
bail!(
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg(
|
||||||
"{} unexpected sample file uuids in dir {}: {:?}!",
|
"{} unexpected sample file uuids in dir {}: {:?}!",
|
||||||
files.len(),
|
files.len(),
|
||||||
sample_file_path.display(),
|
sample_file_path.display(),
|
||||||
files
|
files,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -413,7 +431,7 @@ fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> {
|
fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> {
|
||||||
if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" || &sample_entry[90..94] != b"avcC"
|
if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" || &sample_entry[90..94] != b"avcC"
|
||||||
{
|
{
|
||||||
bail!("not a valid AVCSampleEntry");
|
bail!(InvalidArgument, msg("not a valid AVCSampleEntry"));
|
||||||
}
|
}
|
||||||
let profile_idc = sample_entry[103];
|
let profile_idc = sample_entry[103];
|
||||||
let constraint_flags_byte = sample_entry[104];
|
let constraint_flags_byte = sample_entry[104];
|
||||||
|
|
|
@ -8,9 +8,8 @@
|
||||||
use crate::db::{self, SqlUuid};
|
use crate::db::{self, SqlUuid};
|
||||||
use crate::dir;
|
use crate::dir;
|
||||||
use crate::schema;
|
use crate::schema;
|
||||||
use failure::Error;
|
use base::Error;
|
||||||
use rusqlite::params;
|
use rusqlite::params;
|
||||||
use std::convert::TryFrom;
|
|
||||||
use std::os::unix::io::AsRawFd;
|
use std::os::unix::io::AsRawFd;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -50,7 +49,7 @@ fn open_sample_file_dir(tx: &rusqlite::Transaction) -> Result<Arc<dir::SampleFil
|
||||||
open.id = o_id as u32;
|
open.id = o_id as u32;
|
||||||
open.uuid.extend_from_slice(&o_uuid.0.as_bytes()[..]);
|
open.uuid.extend_from_slice(&o_uuid.0.as_bytes()[..]);
|
||||||
}
|
}
|
||||||
let p = PathBuf::try_from(p)?;
|
let p = PathBuf::from(p);
|
||||||
dir::SampleFileDir::open(&p, &meta)
|
dir::SampleFileDir::open(&p, &meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
/// Upgrades a version 3 schema to a version 4 schema.
|
/// Upgrades a version 3 schema to a version 4 schema.
|
||||||
use failure::Error;
|
use base::Error;
|
||||||
|
|
||||||
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
|
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
// These create statements match the schema.sql when version 4 was the latest.
|
// These create statements match the schema.sql when version 4 was the latest.
|
||||||
|
|
|
@ -8,8 +8,8 @@
|
||||||
/// Otherwise, verify they are consistent with the database then upgrade them.
|
/// Otherwise, verify they are consistent with the database then upgrade them.
|
||||||
use crate::db::SqlUuid;
|
use crate::db::SqlUuid;
|
||||||
use crate::{dir, schema};
|
use crate::{dir, schema};
|
||||||
|
use base::{bail, err, Error};
|
||||||
use cstr::cstr;
|
use cstr::cstr;
|
||||||
use failure::{bail, Error, Fail};
|
|
||||||
use nix::fcntl::{FlockArg, OFlag};
|
use nix::fcntl::{FlockArg, OFlag};
|
||||||
use nix::sys::stat::Mode;
|
use nix::sys::stat::Mode;
|
||||||
use protobuf::Message;
|
use protobuf::Message;
|
||||||
|
@ -34,15 +34,17 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
|
||||||
|
|
||||||
let mut s = protobuf::CodedInputStream::from_bytes(&data);
|
let mut s = protobuf::CodedInputStream::from_bytes(&data);
|
||||||
let mut dir_meta = schema::DirMeta::new();
|
let mut dir_meta = schema::DirMeta::new();
|
||||||
dir_meta
|
dir_meta.merge_from(&mut s).map_err(|e| {
|
||||||
.merge_from(&mut s)
|
err!(
|
||||||
.map_err(|e| e.context("Unable to parse metadata proto: {}"))?;
|
FailedPrecondition,
|
||||||
|
msg("unable to parse metadata proto"),
|
||||||
|
source(e)
|
||||||
|
)
|
||||||
|
})?;
|
||||||
if let Err(e) = dir::SampleFileDir::check_consistent(db_meta, &dir_meta) {
|
if let Err(e) = dir::SampleFileDir::check_consistent(db_meta, &dir_meta) {
|
||||||
bail!(
|
bail!(
|
||||||
"Inconsistent db_meta={:?} dir_meta={:?}: {}",
|
FailedPrecondition,
|
||||||
&db_meta,
|
msg("inconsistent db_meta={db_meta:?} dir_meta={dir_meta:?}: {e}"),
|
||||||
&dir_meta,
|
|
||||||
e
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let mut f = crate::fs::openat(
|
let mut f = crate::fs::openat(
|
||||||
|
@ -56,9 +58,12 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
|
||||||
.expect("proto3->vec is infallible");
|
.expect("proto3->vec is infallible");
|
||||||
if data.len() > FIXED_DIR_META_LEN {
|
if data.len() > FIXED_DIR_META_LEN {
|
||||||
bail!(
|
bail!(
|
||||||
"Length-delimited DirMeta message requires {} bytes, over limit of {}",
|
Internal,
|
||||||
|
msg(
|
||||||
|
"length-delimited DirMeta message requires {} bytes, over limit of {}",
|
||||||
data.len(),
|
data.len(),
|
||||||
FIXED_DIR_META_LEN
|
FIXED_DIR_META_LEN,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
|
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
|
||||||
|
@ -144,12 +149,12 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
|
||||||
o.uuid.extend_from_slice(&uuid.0.as_bytes()[..]);
|
o.uuid.extend_from_slice(&uuid.0.as_bytes()[..]);
|
||||||
}
|
}
|
||||||
(None, None) => {}
|
(None, None) => {}
|
||||||
_ => bail!("open table missing id"),
|
_ => bail!(Internal, msg("open table missing id")),
|
||||||
}
|
}
|
||||||
|
|
||||||
let dir = dir::Fd::open(path, false)?;
|
let dir = dir::Fd::open(path, false)?;
|
||||||
dir.lock(FlockArg::LockExclusiveNonblock)
|
dir.lock(FlockArg::LockExclusiveNonblock)
|
||||||
.map_err(|e| e.context(format!("unable to lock dir {path}")))?;
|
.map_err(|e| err!(e, msg("unable to lock dir {path}")))?;
|
||||||
|
|
||||||
let mut need_sync = maybe_upgrade_meta(&dir, &db_meta)?;
|
let mut need_sync = maybe_upgrade_meta(&dir, &db_meta)?;
|
||||||
if maybe_cleanup_garbage_uuids(&dir)? {
|
if maybe_cleanup_garbage_uuids(&dir)? {
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
|
use base::{bail, err, Error};
|
||||||
/// Upgrades a version 4 schema to a version 5 schema.
|
/// Upgrades a version 4 schema to a version 5 schema.
|
||||||
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
|
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
|
||||||
use failure::{bail, format_err, Error, ResultExt};
|
|
||||||
use h264_reader::avcc::AvcDecoderConfigurationRecord;
|
use h264_reader::avcc::AvcDecoderConfigurationRecord;
|
||||||
use rusqlite::{named_params, params};
|
use rusqlite::{named_params, params};
|
||||||
use std::convert::{TryFrom, TryInto};
|
use std::convert::{TryFrom, TryInto};
|
||||||
|
@ -29,22 +29,31 @@ fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) {
|
||||||
|
|
||||||
fn parse(data: &[u8]) -> Result<AvcDecoderConfigurationRecord, Error> {
|
fn parse(data: &[u8]) -> Result<AvcDecoderConfigurationRecord, Error> {
|
||||||
if data.len() < 94 || &data[4..8] != b"avc1" || &data[90..94] != b"avcC" {
|
if data.len() < 94 || &data[4..8] != b"avc1" || &data[90..94] != b"avcC" {
|
||||||
bail!("data of len {} doesn't have an avcC", data.len());
|
bail!(
|
||||||
|
DataLoss,
|
||||||
|
msg("data of len {} doesn't have an avcC", data.len())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let avcc_len = BigEndian::read_u32(&data[86..90]);
|
let avcc_len = BigEndian::read_u32(&data[86..90]);
|
||||||
if avcc_len < 8 {
|
if avcc_len < 8 {
|
||||||
// length and type.
|
// length and type.
|
||||||
bail!("invalid avcc len {}", avcc_len);
|
bail!(DataLoss, msg("invalid avcc len {avcc_len}"));
|
||||||
}
|
}
|
||||||
let end_pos = 86 + usize::try_from(avcc_len)?;
|
let end_pos = usize::try_from(avcc_len)
|
||||||
if end_pos != data.len() {
|
.ok()
|
||||||
|
.and_then(|l| l.checked_add(86));
|
||||||
|
if end_pos != Some(data.len()) {
|
||||||
bail!(
|
bail!(
|
||||||
"expected avcC to be end of extradata; there are {} more bytes.",
|
DataLoss,
|
||||||
data.len() - end_pos
|
msg(
|
||||||
|
"avcC end pos {:?} and total data len {} should match",
|
||||||
|
end_pos,
|
||||||
|
data.len(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
AvcDecoderConfigurationRecord::try_from(&data[94..end_pos])
|
AvcDecoderConfigurationRecord::try_from(&data[94..])
|
||||||
.map_err(|e| format_err!("Bad AvcDecoderConfigurationRecord: {:?}", e))
|
.map_err(|e| err!(DataLoss, msg("Bad AvcDecoderConfigurationRecord: {:?}", e)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
|
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
|
@ -100,24 +109,37 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
|
||||||
let mut rows = stmt.query(params![])?;
|
let mut rows = stmt.query(params![])?;
|
||||||
while let Some(row) = rows.next()? {
|
while let Some(row) = rows.next()? {
|
||||||
let id: i32 = row.get(0)?;
|
let id: i32 = row.get(0)?;
|
||||||
let width: u16 = row.get::<_, i32>(1)?.try_into()?;
|
let width: u16 = row
|
||||||
let height: u16 = row.get::<_, i32>(2)?.try_into()?;
|
.get::<_, i32>(1)?
|
||||||
let rfc6381_codec: &str = row.get_ref(3)?.as_str()?;
|
.try_into()
|
||||||
|
.map_err(|_| err!(OutOfRange))?;
|
||||||
|
let height: u16 = row
|
||||||
|
.get::<_, i32>(2)?
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| err!(OutOfRange))?;
|
||||||
|
let rfc6381_codec: &str = row
|
||||||
|
.get_ref(3)?
|
||||||
|
.as_str()
|
||||||
|
.map_err(|_| err!(InvalidArgument))?;
|
||||||
let mut data: Vec<u8> = row.get(4)?;
|
let mut data: Vec<u8> = row.get(4)?;
|
||||||
let avcc = parse(&data)?;
|
let avcc = parse(&data)?;
|
||||||
if avcc.num_of_sequence_parameter_sets() != 1 {
|
if avcc.num_of_sequence_parameter_sets() != 1 {
|
||||||
bail!("Multiple SPSs!");
|
bail!(Unimplemented, msg("multiple SPSs!"));
|
||||||
}
|
}
|
||||||
let ctx = avcc.create_context().map_err(|e| {
|
let ctx = avcc.create_context().map_err(|e| {
|
||||||
format_err!(
|
err!(
|
||||||
"Can't load SPS+PPS for video_sample_entry_id {}: {:?}",
|
Unknown,
|
||||||
id,
|
msg("can't load SPS+PPS for video_sample_entry_id {id}: {e:?}"),
|
||||||
e
|
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let sps = ctx
|
let sps = ctx
|
||||||
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
|
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
|
||||||
.ok_or_else(|| format_err!("No SPS 0 for video_sample_entry_id {}", id))?;
|
.ok_or_else(|| {
|
||||||
|
err!(
|
||||||
|
Unimplemented,
|
||||||
|
msg("no SPS 0 for video_sample_entry_id {id}")
|
||||||
|
)
|
||||||
|
})?;
|
||||||
let pasp = sps
|
let pasp = sps
|
||||||
.vui_parameters
|
.vui_parameters
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -129,7 +151,10 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
|
||||||
data.write_u32::<BigEndian>(pasp.0.into())?;
|
data.write_u32::<BigEndian>(pasp.0.into())?;
|
||||||
data.write_u32::<BigEndian>(pasp.1.into())?;
|
data.write_u32::<BigEndian>(pasp.1.into())?;
|
||||||
let len = data.len();
|
let len = data.len();
|
||||||
BigEndian::write_u32(&mut data[0..4], u32::try_from(len)?);
|
BigEndian::write_u32(
|
||||||
|
&mut data[0..4],
|
||||||
|
u32::try_from(len).map_err(|_| err!(OutOfRange))?,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
insert.execute(named_params! {
|
insert.execute(named_params! {
|
||||||
|
@ -268,7 +293,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
|
||||||
":video_sync_samples": video_sync_samples,
|
":video_sync_samples": video_sync_samples,
|
||||||
":video_sample_entry_id": video_sample_entry_id,
|
":video_sample_entry_id": video_sample_entry_id,
|
||||||
})
|
})
|
||||||
.with_context(|_| format!("Unable to insert composite_id {composite_id}"))?;
|
.map_err(|e| err!(e, msg("unable to insert composite_id {composite_id}")))?;
|
||||||
cum_duration_90k += i64::from(wall_duration_90k);
|
cum_duration_90k += i64::from(wall_duration_90k);
|
||||||
cum_runs += if run_offset == 0 { 1 } else { 0 };
|
cum_runs += if run_offset == 0 { 1 } else { 0 };
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception
|
||||||
|
|
||||||
/// Upgrades a version 6 schema to a version 7 schema.
|
/// Upgrades a version 6 schema to a version 7 schema.
|
||||||
use failure::{format_err, Error, ResultExt};
|
use base::{err, Error};
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use rusqlite::{named_params, params};
|
use rusqlite::{named_params, params};
|
||||||
use std::{convert::TryFrom, path::PathBuf};
|
use std::{convert::TryFrom, path::PathBuf};
|
||||||
|
@ -28,7 +28,13 @@ fn copy_meta(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let config = GlobalConfig {
|
let config = GlobalConfig {
|
||||||
max_signal_changes: max_signal_changes
|
max_signal_changes: max_signal_changes
|
||||||
.map(|s| {
|
.map(|s| {
|
||||||
u32::try_from(s).map_err(|_| format_err!("max_signal_changes out of range"))
|
u32::try_from(s).map_err(|e| {
|
||||||
|
err!(
|
||||||
|
OutOfRange,
|
||||||
|
msg("max_signal_changes out of range"),
|
||||||
|
source(e)
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.transpose()?,
|
.transpose()?,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -57,7 +63,7 @@ fn copy_sample_file_dir(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let path: String = row.get(2)?;
|
let path: String = row.get(2)?;
|
||||||
let uuid: SqlUuid = row.get(1)?;
|
let uuid: SqlUuid = row.get(1)?;
|
||||||
let config = SampleFileDirConfig {
|
let config = SampleFileDirConfig {
|
||||||
path: PathBuf::try_from(path)?,
|
path: PathBuf::from(path),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let last_complete_open_id: Option<i64> = row.get(3)?;
|
let last_complete_open_id: Option<i64> = row.get(3)?;
|
||||||
|
@ -107,7 +113,10 @@ fn copy_users(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let permissions: Vec<u8> = row.get(7)?;
|
let permissions: Vec<u8> = row.get(7)?;
|
||||||
let config = UserConfig {
|
let config = UserConfig {
|
||||||
disabled: (flags & 1) != 0,
|
disabled: (flags & 1) != 0,
|
||||||
unix_uid: unix_uid.map(u64::try_from).transpose()?,
|
unix_uid: unix_uid
|
||||||
|
.map(u64::try_from)
|
||||||
|
.transpose()
|
||||||
|
.map_err(|_| err!(OutOfRange, msg("bad unix_uid")))?,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
insert.execute(named_params! {
|
insert.execute(named_params! {
|
||||||
|
@ -134,7 +143,8 @@ fn copy_signal_types(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let type_ = types_
|
let type_ = types_
|
||||||
.entry(type_uuid.0)
|
.entry(type_uuid.0)
|
||||||
.or_insert_with(SignalTypeConfig::default);
|
.or_insert_with(SignalTypeConfig::default);
|
||||||
let value = u8::try_from(value).map_err(|_| format_err!("bad signal type value"))?;
|
let value =
|
||||||
|
u8::try_from(value).map_err(|_| err!(OutOfRange, msg("bad signal type value")))?;
|
||||||
let value_config = type_.values.entry(value).or_insert_with(Default::default);
|
let value_config = type_.values.entry(value).or_insert_with(Default::default);
|
||||||
if let Some(n) = name {
|
if let Some(n) = name {
|
||||||
value_config.name = n;
|
value_config.name = n;
|
||||||
|
@ -163,7 +173,8 @@ fn copy_signals(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let mut rows = stmt.query(params![])?;
|
let mut rows = stmt.query(params![])?;
|
||||||
while let Some(row) = rows.next()? {
|
while let Some(row) = rows.next()? {
|
||||||
let id: i32 = row.get(0)?;
|
let id: i32 = row.get(0)?;
|
||||||
let id = u32::try_from(id)?;
|
let id =
|
||||||
|
u32::try_from(id).map_err(|e| err!(OutOfRange, msg("bad signal id"), source(e)))?;
|
||||||
let source_uuid: SqlUuid = row.get(1)?;
|
let source_uuid: SqlUuid = row.get(1)?;
|
||||||
let type_uuid: SqlUuid = row.get(2)?;
|
let type_uuid: SqlUuid = row.get(2)?;
|
||||||
let short_name: String = row.get(3)?;
|
let short_name: String = row.get(3)?;
|
||||||
|
@ -187,7 +198,8 @@ fn copy_signals(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
let mut rows = stmt.query(params![])?;
|
let mut rows = stmt.query(params![])?;
|
||||||
while let Some(row) = rows.next()? {
|
while let Some(row) = rows.next()? {
|
||||||
let signal_id: i32 = row.get(0)?;
|
let signal_id: i32 = row.get(0)?;
|
||||||
let signal_id = u32::try_from(signal_id)?;
|
let signal_id = u32::try_from(signal_id)
|
||||||
|
.map_err(|e| err!(OutOfRange, msg("bad signal_id"), source(e)))?;
|
||||||
let camera_id: i32 = row.get(1)?;
|
let camera_id: i32 = row.get(1)?;
|
||||||
let type_: i32 = row.get(2)?;
|
let type_: i32 = row.get(2)?;
|
||||||
let signal = signals.get_mut(&signal_id).unwrap();
|
let signal = signals.get_mut(&signal_id).unwrap();
|
||||||
|
@ -261,7 +273,13 @@ fn copy_cameras(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
.filter(|h| !h.is_empty())
|
.filter(|h| !h.is_empty())
|
||||||
.map(|h| Url::parse(&format!("http://{h}/")))
|
.map(|h| Url::parse(&format!("http://{h}/")))
|
||||||
.transpose()
|
.transpose()
|
||||||
.with_context(|_| "bad onvif_host")?,
|
.map_err(|e| {
|
||||||
|
err!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("bad onvif_host for camera id {id}"),
|
||||||
|
source(e)
|
||||||
|
)
|
||||||
|
})?,
|
||||||
username: username.take().unwrap_or_default(),
|
username: username.take().unwrap_or_default(),
|
||||||
password: password.take().unwrap_or_default(),
|
password: password.take().unwrap_or_default(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -324,7 +342,13 @@ fn copy_streams(tx: &rusqlite::Transaction) -> Result<(), Error> {
|
||||||
""
|
""
|
||||||
})
|
})
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
url: Some(Url::parse(&rtsp_url)?),
|
url: Some(Url::parse(&rtsp_url).map_err(|e| {
|
||||||
|
err!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("bad rtsp_url for stream id {id}"),
|
||||||
|
source(e)
|
||||||
|
)
|
||||||
|
})?),
|
||||||
retain_bytes,
|
retain_bytes,
|
||||||
flush_if_sec,
|
flush_if_sec,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::dir;
|
||||||
use crate::recording::{self, MAX_RECORDING_WALL_DURATION};
|
use crate::recording::{self, MAX_RECORDING_WALL_DURATION};
|
||||||
use base::clock::{self, Clocks};
|
use base::clock::{self, Clocks};
|
||||||
use base::shutdown::ShutdownError;
|
use base::shutdown::ShutdownError;
|
||||||
use failure::{bail, format_err, Error};
|
use base::{bail, err, Error};
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use std::cmp::{self, Ordering};
|
use std::cmp::{self, Ordering};
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
@ -218,10 +218,9 @@ pub fn lower_retention(
|
||||||
for l in limits {
|
for l in limits {
|
||||||
let (fs_bytes_before, extra);
|
let (fs_bytes_before, extra);
|
||||||
{
|
{
|
||||||
let stream = db
|
let Some(stream) = db.streams_by_id().get(&l.stream_id) else {
|
||||||
.streams_by_id()
|
bail!(NotFound, msg("no such stream {}", l.stream_id));
|
||||||
.get(&l.stream_id)
|
};
|
||||||
.ok_or_else(|| format_err!("no such stream {}", l.stream_id))?;
|
|
||||||
fs_bytes_before =
|
fs_bytes_before =
|
||||||
stream.fs_bytes + stream.fs_bytes_to_add - stream.fs_bytes_to_delete;
|
stream.fs_bytes + stream.fs_bytes_to_add - stream.fs_bytes_to_delete;
|
||||||
extra = stream.config.retain_bytes - l.limit;
|
extra = stream.config.retain_bytes - l.limit;
|
||||||
|
@ -245,7 +244,7 @@ fn delete_recordings(
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let fs_bytes_needed = {
|
let fs_bytes_needed = {
|
||||||
let stream = match db.streams_by_id().get(&stream_id) {
|
let stream = match db.streams_by_id().get(&stream_id) {
|
||||||
None => bail!("no stream {}", stream_id),
|
None => bail!(NotFound, msg("no stream {stream_id}")),
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
};
|
};
|
||||||
stream.fs_bytes + stream.fs_bytes_to_add - stream.fs_bytes_to_delete + extra_bytes_needed
|
stream.fs_bytes + stream.fs_bytes_to_add - stream.fs_bytes_to_delete + extra_bytes_needed
|
||||||
|
@ -326,7 +325,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
|
||||||
let d = l
|
let d = l
|
||||||
.sample_file_dirs_by_id()
|
.sample_file_dirs_by_id()
|
||||||
.get(&dir_id)
|
.get(&dir_id)
|
||||||
.ok_or_else(|| format_err!("no dir {}", dir_id))?;
|
.ok_or_else(|| err!(NotFound, msg("no dir {dir_id}")))?;
|
||||||
let dir = d.get()?;
|
let dir = d.get()?;
|
||||||
|
|
||||||
// Abandon files.
|
// Abandon files.
|
||||||
|
@ -345,17 +344,20 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
|
||||||
let to_abandon = list_files_to_abandon(&dir, streams_to_next)?;
|
let to_abandon = list_files_to_abandon(&dir, streams_to_next)?;
|
||||||
let mut undeletable = 0;
|
let mut undeletable = 0;
|
||||||
for &id in &to_abandon {
|
for &id in &to_abandon {
|
||||||
if let Err(e) = dir.unlink_file(id) {
|
if let Err(err) = dir.unlink_file(id) {
|
||||||
if e == nix::Error::ENOENT {
|
if err == nix::Error::ENOENT {
|
||||||
warn!("dir: abandoned recording {} already deleted!", id);
|
warn!(%id, "dir: abandoned recording already deleted");
|
||||||
} else {
|
} else {
|
||||||
warn!("dir: Unable to unlink abandoned recording {}: {}", id, e);
|
warn!(%err, %id, "dir: unable to unlink abandoned recording");
|
||||||
undeletable += 1;
|
undeletable += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if undeletable > 0 {
|
if undeletable > 0 {
|
||||||
bail!("Unable to delete {} abandoned recordings.", undeletable);
|
bail!(
|
||||||
|
Unknown,
|
||||||
|
msg("unable to delete {undeletable} abandoned recordings; see logs")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
|
@ -410,8 +412,8 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
|
||||||
}
|
}
|
||||||
if errors > 0 {
|
if errors > 0 {
|
||||||
bail!(
|
bail!(
|
||||||
"Unable to unlink {} files (see earlier warning messages for details)",
|
Unknown,
|
||||||
errors
|
msg("unable to unlink {errors} files (see earlier warning messages for details)"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.dir.sync()?;
|
self.dir.sync()?;
|
||||||
|
@ -718,7 +720,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
||||||
WriterState::Unopened => None,
|
WriterState::Unopened => None,
|
||||||
WriterState::Open(ref o) => {
|
WriterState::Open(ref o) => {
|
||||||
if o.video_sample_entry_id != video_sample_entry_id {
|
if o.video_sample_entry_id != video_sample_entry_id {
|
||||||
bail!("inconsistent video_sample_entry_id");
|
bail!(Internal, msg("inconsistent video_sample_entry_id"));
|
||||||
}
|
}
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -738,7 +740,8 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
||||||
)?;
|
)?;
|
||||||
let f = clock::retry(&self.db.clocks(), shutdown_rx, &mut || {
|
let f = clock::retry(&self.db.clocks(), shutdown_rx, &mut || {
|
||||||
self.dir.create_file(id)
|
self.dir.create_file(id)
|
||||||
})?;
|
})
|
||||||
|
.map_err(|e| err!(Cancelled, source(e)))?;
|
||||||
|
|
||||||
self.state = WriterState::Open(InnerWriter {
|
self.state = WriterState::Open(InnerWriter {
|
||||||
f,
|
f,
|
||||||
|
@ -757,7 +760,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
||||||
Ok(match self.state {
|
Ok(match self.state {
|
||||||
WriterState::Unopened => false,
|
WriterState::Unopened => false,
|
||||||
WriterState::Closed(_) => true,
|
WriterState::Closed(_) => true,
|
||||||
WriterState::Open(_) => bail!("open!"),
|
WriterState::Open(_) => bail!(Internal, msg("open!")),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -786,9 +789,12 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
||||||
if duration <= 0 {
|
if duration <= 0 {
|
||||||
w.unindexed_sample = Some(unindexed); // restore invariant.
|
w.unindexed_sample = Some(unindexed); // restore invariant.
|
||||||
bail!(
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg(
|
||||||
"pts not monotonically increasing; got {} then {}",
|
"pts not monotonically increasing; got {} then {}",
|
||||||
unindexed.pts_90k,
|
unindexed.pts_90k,
|
||||||
pts_90k
|
pts_90k,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let duration = match i32::try_from(duration) {
|
let duration = match i32::try_from(duration) {
|
||||||
|
@ -796,9 +802,12 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
w.unindexed_sample = Some(unindexed); // restore invariant.
|
w.unindexed_sample = Some(unindexed); // restore invariant.
|
||||||
bail!(
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg(
|
||||||
"excessive pts jump from {} to {}",
|
"excessive pts jump from {} to {}",
|
||||||
unindexed.pts_90k,
|
unindexed.pts_90k,
|
||||||
pts_90k
|
pts_90k,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -822,10 +831,10 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
// close() will do nothing because unindexed_sample will be None.
|
// close() will do nothing because unindexed_sample will be None.
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
"Abandoning incompletely written recording {} on shutdown",
|
"abandoning incompletely written recording {} on shutdown",
|
||||||
w.id
|
w.id
|
||||||
);
|
);
|
||||||
return Err(e.into());
|
bail!(Cancelled, source(e));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
remaining = &remaining[written..];
|
remaining = &remaining[written..];
|
||||||
|
@ -894,9 +903,8 @@ impl<F: FileWriter> InnerWriter<F> {
|
||||||
+ i32::try_from(clamp(local_start.0 - start.0, -limit, limit)).unwrap();
|
+ i32::try_from(clamp(local_start.0 - start.0, -limit, limit)).unwrap();
|
||||||
if wall_duration_90k > i32::try_from(MAX_RECORDING_WALL_DURATION).unwrap() {
|
if wall_duration_90k > i32::try_from(MAX_RECORDING_WALL_DURATION).unwrap() {
|
||||||
bail!(
|
bail!(
|
||||||
"Duration {} exceeds maximum {}",
|
OutOfRange,
|
||||||
wall_duration_90k,
|
msg("Duration {wall_duration_90k} exceeds maximum {MAX_RECORDING_WALL_DURATION}"),
|
||||||
MAX_RECORDING_WALL_DURATION
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
l.wall_duration_90k = wall_duration_90k;
|
l.wall_duration_90k = wall_duration_90k;
|
||||||
|
@ -926,14 +934,29 @@ impl<F: FileWriter> InnerWriter<F> {
|
||||||
reason: Option<String>,
|
reason: Option<String>,
|
||||||
) -> Result<PreviousWriter, Error> {
|
) -> Result<PreviousWriter, Error> {
|
||||||
let unindexed = self.unindexed_sample.take().ok_or_else(|| {
|
let unindexed = self.unindexed_sample.take().ok_or_else(|| {
|
||||||
format_err!(
|
err!(
|
||||||
"Unable to add recording {} to database due to aborted write",
|
FailedPrecondition,
|
||||||
self.id
|
msg(
|
||||||
|
"unable to add recording {} to database due to aborted write",
|
||||||
|
self.id,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let (last_sample_duration, flags) = match next_pts {
|
let (last_sample_duration, flags) = match next_pts {
|
||||||
None => (0, db::RecordingFlags::TrailingZero as i32),
|
None => (0, db::RecordingFlags::TrailingZero as i32),
|
||||||
Some(p) => (i32::try_from(p - unindexed.pts_90k)?, 0),
|
Some(p) => (
|
||||||
|
i32::try_from(p - unindexed.pts_90k).map_err(|_| {
|
||||||
|
err!(
|
||||||
|
OutOfRange,
|
||||||
|
msg(
|
||||||
|
"pts {} following {} creates invalid duration",
|
||||||
|
p,
|
||||||
|
unindexed.pts_90k
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})?,
|
||||||
|
0,
|
||||||
|
),
|
||||||
};
|
};
|
||||||
let blake3 = self.hasher.finalize();
|
let blake3 = self.hasher.finalize();
|
||||||
let (run_offset, end);
|
let (run_offset, end);
|
||||||
|
|
|
@ -27,7 +27,7 @@ pub type BoxedError = Box<dyn StdError + Send + Sync>;
|
||||||
pub type BodyStream = Box<dyn Stream<Item = Result<Chunk, BoxedError>> + Send>;
|
pub type BodyStream = Box<dyn Stream<Item = Result<Chunk, BoxedError>> + Send>;
|
||||||
|
|
||||||
pub fn wrap_error(e: Error) -> BoxedError {
|
pub fn wrap_error(e: Error) -> BoxedError {
|
||||||
Box::new(e.compat())
|
Box::new(e)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ARefss<'static, [u8]>> for Chunk {
|
impl From<ARefss<'static, [u8]>> for Chunk {
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
|
|
||||||
//! Subcommand to check the database and sample file dir for errors.
|
//! Subcommand to check the database and sample file dir for errors.
|
||||||
|
|
||||||
|
use base::Error;
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use db::check;
|
use db::check;
|
||||||
use failure::Error;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
/// Checks database integrity (like fsck).
|
/// Checks database integrity (like fsck).
|
||||||
|
|
|
@ -4,11 +4,11 @@
|
||||||
|
|
||||||
use crate::stream::{self, Opener};
|
use crate::stream::{self, Opener};
|
||||||
use base::strutil::{decode_size, encode_size};
|
use base::strutil::{decode_size, encode_size};
|
||||||
|
use base::{bail, err, Error};
|
||||||
use cursive::traits::{Finder, Nameable, Resizable, Scrollable};
|
use cursive::traits::{Finder, Nameable, Resizable, Scrollable};
|
||||||
use cursive::views::{self, Dialog, ViewRef};
|
use cursive::views::{self, Dialog, ViewRef};
|
||||||
use cursive::Cursive;
|
use cursive::Cursive;
|
||||||
use db::writer;
|
use db::writer;
|
||||||
use failure::{bail, format_err, Error, ResultExt};
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
@ -123,21 +123,31 @@ fn parse_url(
|
||||||
if raw.is_empty() {
|
if raw.is_empty() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
let url = url::Url::parse(raw)
|
let url = url::Url::parse(raw).map_err(|_| {
|
||||||
.with_context(|_| format!("can't parse {} {:?} as URL", field_name, &raw))?;
|
err!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("can't parse {field_name} {raw:?} as URL")
|
||||||
|
)
|
||||||
|
})?;
|
||||||
if !allowed_schemes.iter().any(|scheme| *scheme == url.scheme()) {
|
if !allowed_schemes.iter().any(|scheme| *scheme == url.scheme()) {
|
||||||
bail!(
|
bail!(
|
||||||
"Unexpected scheme in {} {:?}; should be one of: {}",
|
InvalidArgument,
|
||||||
|
msg(
|
||||||
|
"unexpected scheme in {} {:?}; should be one of: {}",
|
||||||
field_name,
|
field_name,
|
||||||
url.as_str(),
|
url.as_str(),
|
||||||
allowed_schemes.iter().join(", ")
|
allowed_schemes.iter().join(", "),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if !url.username().is_empty() || url.password().is_some() {
|
if !url.username().is_empty() || url.password().is_some() {
|
||||||
bail!(
|
bail!(
|
||||||
"Unexpected credentials in {} {:?}; use the username and password fields instead",
|
InvalidArgument,
|
||||||
|
msg(
|
||||||
|
"unexpected credentials in {} {:?}; use the username and password fields instead",
|
||||||
field_name,
|
field_name,
|
||||||
url.as_str()
|
url.as_str(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(Some(url))
|
Ok(Some(url))
|
||||||
|
@ -166,8 +176,8 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>) {
|
||||||
let type_ = db::StreamType::from_index(i).unwrap();
|
let type_ = db::StreamType::from_index(i).unwrap();
|
||||||
if stream.record && (stream.url.is_empty() || stream.sample_file_dir_id.is_none()) {
|
if stream.record && (stream.url.is_empty() || stream.sample_file_dir_id.is_none()) {
|
||||||
bail!(
|
bail!(
|
||||||
"Can't record {} stream without RTSP URL and sample file directory",
|
InvalidArgument,
|
||||||
type_.as_str()
|
msg("can't record {type_} stream without RTSP URL and sample file directory"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let stream_change = &mut change.streams[i];
|
let stream_change = &mut change.streams[i];
|
||||||
|
@ -184,9 +194,9 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>) {
|
||||||
0
|
0
|
||||||
} else {
|
} else {
|
||||||
stream.flush_if_sec.parse().map_err(|_| {
|
stream.flush_if_sec.parse().map_err(|_| {
|
||||||
format_err!(
|
err!(
|
||||||
"flush_if_sec for {} must be a non-negative integer",
|
InvalidArgument,
|
||||||
type_.as_str()
|
msg("flush_if_sec for {type_} must be a non-negative integer"),
|
||||||
)
|
)
|
||||||
})?
|
})?
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,12 +3,12 @@
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
use base::strutil::{decode_size, encode_size};
|
use base::strutil::{decode_size, encode_size};
|
||||||
|
use base::Error;
|
||||||
use cursive::traits::{Nameable, Resizable};
|
use cursive::traits::{Nameable, Resizable};
|
||||||
use cursive::view::Scrollable;
|
use cursive::view::Scrollable;
|
||||||
use cursive::Cursive;
|
use cursive::Cursive;
|
||||||
use cursive::{views, With};
|
use cursive::{views, With};
|
||||||
use db::writer;
|
use db::writer;
|
||||||
use failure::Error;
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
|
@ -8,10 +8,10 @@
|
||||||
//! configuration will likely be almost entirely done through a web-based UI.
|
//! configuration will likely be almost entirely done through a web-based UI.
|
||||||
|
|
||||||
use base::clock;
|
use base::clock;
|
||||||
|
use base::Error;
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use cursive::views;
|
use cursive::views;
|
||||||
use cursive::Cursive;
|
use cursive::Cursive;
|
||||||
use failure::Error;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
|
use base::Error;
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use failure::Error;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@
|
||||||
//! Subcommand to login a user (without requiring a password).
|
//! Subcommand to login a user (without requiring a password).
|
||||||
|
|
||||||
use base::clock::{self, Clocks};
|
use base::clock::{self, Clocks};
|
||||||
|
use base::{bail, err, Error};
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use db::auth::SessionFlag;
|
use db::auth::SessionFlag;
|
||||||
use failure::{format_err, Error};
|
|
||||||
use std::io::Write as _;
|
use std::io::Write as _;
|
||||||
use std::os::unix::fs::OpenOptionsExt as _;
|
use std::os::unix::fs::OpenOptionsExt as _;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -69,9 +69,9 @@ pub fn run(args: Args) -> Result<i32, Error> {
|
||||||
let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
|
let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
|
||||||
let db = std::sync::Arc::new(db::Database::new(clocks, conn, true).unwrap());
|
let db = std::sync::Arc::new(db::Database::new(clocks, conn, true).unwrap());
|
||||||
let mut l = db.lock();
|
let mut l = db.lock();
|
||||||
let u = l
|
let Some(u) = l.get_user(&args.username) else {
|
||||||
.get_user(&args.username)
|
bail!(NotFound, msg("no such user {:?}", &args.username));
|
||||||
.ok_or_else(|| format_err!("no such user {:?}", &args.username))?;
|
};
|
||||||
let permissions = args
|
let permissions = args
|
||||||
.permissions
|
.permissions
|
||||||
.map(db::Permissions::from)
|
.map(db::Permissions::from)
|
||||||
|
@ -101,13 +101,13 @@ pub fn run(args: Args) -> Result<i32, Error> {
|
||||||
let d = args
|
let d = args
|
||||||
.domain
|
.domain
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.ok_or_else(|| format_err!("--curl-cookie-jar requires --domain"))?;
|
.ok_or_else(|| err!(InvalidArgument, msg("--curl-cookie-jar requires --domain")))?;
|
||||||
let mut f = std::fs::OpenOptions::new()
|
let mut f = std::fs::OpenOptions::new()
|
||||||
.write(true)
|
.write(true)
|
||||||
.create_new(true)
|
.create_new(true)
|
||||||
.mode(0o600)
|
.mode(0o600)
|
||||||
.open(p)
|
.open(p)
|
||||||
.map_err(|e| format_err!("Unable to open {}: {}", p.display(), e))?;
|
.map_err(|e| err!(e, msg("unable to open {}", p.display())))?;
|
||||||
write!(
|
write!(
|
||||||
&mut f,
|
&mut f,
|
||||||
"# Netscape HTTP Cookie File\n\
|
"# Netscape HTTP Cookie File\n\
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
// Copyright (C) 2016 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
// Copyright (C) 2016 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
|
use base::{err, Error};
|
||||||
use db::dir;
|
use db::dir;
|
||||||
use failure::{Error, Fail};
|
|
||||||
use nix::fcntl::FlockArg;
|
use nix::fcntl::FlockArg;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
@ -28,16 +28,19 @@ enum OpenMode {
|
||||||
/// The returned `dir::Fd` holds the lock and should be kept open as long as the `Connection` is.
|
/// The returned `dir::Fd` holds the lock and should be kept open as long as the `Connection` is.
|
||||||
fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
|
fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
|
||||||
let dir = dir::Fd::open(db_dir, mode == OpenMode::Create).map_err(|e| {
|
let dir = dir::Fd::open(db_dir, mode == OpenMode::Create).map_err(|e| {
|
||||||
e.context(if mode == OpenMode::Create {
|
if mode == OpenMode::Create {
|
||||||
format!("unable to create db dir {}", db_dir.display())
|
err!(e, msg("unable to create db dir {}", db_dir.display()))
|
||||||
} else if e == nix::Error::ENOENT {
|
} else if e == nix::Error::ENOENT {
|
||||||
format!(
|
err!(
|
||||||
|
NotFound,
|
||||||
|
msg(
|
||||||
"db dir {} not found; try running moonfire-nvr init",
|
"db dir {} not found; try running moonfire-nvr init",
|
||||||
db_dir.display()
|
db_dir.display(),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
format!("unable to open db dir {}", db_dir.display())
|
err!(e, msg("unable to open db dir {}", db_dir.display()))
|
||||||
})
|
}
|
||||||
})?;
|
})?;
|
||||||
let ro = mode == OpenMode::ReadOnly;
|
let ro = mode == OpenMode::ReadOnly;
|
||||||
dir.lock(if ro {
|
dir.lock(if ro {
|
||||||
|
@ -46,11 +49,14 @@ fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
|
||||||
FlockArg::LockExclusiveNonblock
|
FlockArg::LockExclusiveNonblock
|
||||||
})
|
})
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
e.context(format!(
|
err!(
|
||||||
|
e,
|
||||||
|
msg(
|
||||||
"unable to get {} lock on db dir {} ",
|
"unable to get {} lock on db dir {} ",
|
||||||
if ro { "shared" } else { "exclusive" },
|
if ro { "shared" } else { "exclusive" },
|
||||||
db_dir.display()
|
db_dir.display(),
|
||||||
))
|
),
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
Ok(dir)
|
Ok(dir)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,9 +6,10 @@ use crate::streamer;
|
||||||
use crate::web;
|
use crate::web;
|
||||||
use crate::web::accept::Listener;
|
use crate::web::accept::Listener;
|
||||||
use base::clock;
|
use base::clock;
|
||||||
|
use base::err;
|
||||||
|
use base::{bail, Error};
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use db::{dir, writer};
|
use db::{dir, writer};
|
||||||
use failure::{bail, Error, ResultExt};
|
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use hyper::service::{make_service_fn, service_fn};
|
use hyper::service::{make_service_fn, service_fn};
|
||||||
use retina::client::SessionGroup;
|
use retina::client::SessionGroup;
|
||||||
|
@ -76,7 +77,10 @@ fn resolve_zone() -> Result<String, Error> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if p != LOCALTIME_PATH {
|
if p != LOCALTIME_PATH {
|
||||||
bail!("Unable to resolve env TZ={} to a timezone.", &tz);
|
bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("unable to resolve env TZ={tz} to a timezone")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,21 +90,23 @@ fn resolve_zone() -> Result<String, Error> {
|
||||||
Ok(localtime_dest) => {
|
Ok(localtime_dest) => {
|
||||||
let localtime_dest = match localtime_dest.to_str() {
|
let localtime_dest = match localtime_dest.to_str() {
|
||||||
Some(d) => d,
|
Some(d) => d,
|
||||||
None => bail!("{} symlink destination is invalid UTF-8", LOCALTIME_PATH),
|
None => bail!(
|
||||||
|
FailedPrecondition,
|
||||||
|
msg("{LOCALTIME_PATH} symlink destination is invalid UTF-8")
|
||||||
|
),
|
||||||
};
|
};
|
||||||
if let Some(p) = zoneinfo_name(localtime_dest) {
|
if let Some(p) = zoneinfo_name(localtime_dest) {
|
||||||
return Ok(p.to_owned());
|
return Ok(p.to_owned());
|
||||||
}
|
}
|
||||||
bail!(
|
bail!(
|
||||||
"Unable to resolve {} symlink destination {} to a timezone.",
|
FailedPrecondition,
|
||||||
LOCALTIME_PATH,
|
msg("unable to resolve {LOCALTIME_PATH} symlink destination {localtime_dest} to a timezone"),
|
||||||
&localtime_dest
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
use ::std::io::ErrorKind;
|
use ::std::io::ErrorKind;
|
||||||
if e.kind() != ErrorKind::NotFound && e.kind() != ErrorKind::InvalidInput {
|
if e.kind() != ErrorKind::NotFound && e.kind() != ErrorKind::InvalidInput {
|
||||||
bail!("Unable to read {} symlink: {}", LOCALTIME_PATH, e);
|
bail!(e, msg("unable to read {LOCALTIME_PATH} symlink"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -110,10 +116,8 @@ fn resolve_zone() -> Result<String, Error> {
|
||||||
Ok(z) => Ok(z.trim().to_owned()),
|
Ok(z) => Ok(z.trim().to_owned()),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
bail!(
|
bail!(
|
||||||
"Unable to resolve timezone from TZ env, {}, or {}. Last error: {}",
|
e,
|
||||||
LOCALTIME_PATH,
|
msg("unable to resolve timezone from TZ env, {LOCALTIME_PATH}, or {TIMEZONE_PATH}"),
|
||||||
TIMEZONE_PATH,
|
|
||||||
e
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -127,15 +131,18 @@ struct Syncer {
|
||||||
|
|
||||||
fn read_config(path: &Path) -> Result<ConfigFile, Error> {
|
fn read_config(path: &Path) -> Result<ConfigFile, Error> {
|
||||||
let config = std::fs::read(path)?;
|
let config = std::fs::read(path)?;
|
||||||
let config = toml::from_slice(&config)?;
|
let config = toml::from_slice(&config).map_err(|e| err!(InvalidArgument, source(e)))?;
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(args: Args) -> Result<i32, Error> {
|
pub fn run(args: Args) -> Result<i32, Error> {
|
||||||
let config = read_config(&args.config).with_context(|_| {
|
let config = read_config(&args.config).map_err(|e| {
|
||||||
format!(
|
err!(
|
||||||
"Unable to load config file {}. See documentation in ref/config.md.",
|
e,
|
||||||
&args.config.display()
|
msg(
|
||||||
|
"unable to load config file {}; see documentation in ref/config.md",
|
||||||
|
&args.config.display(),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -180,8 +187,8 @@ async fn async_run(read_only: bool, config: &ConfigFile) -> Result<i32, Error> {
|
||||||
}
|
}
|
||||||
|
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
_ = int.recv() => bail!("immediate shutdown due to second signal (SIGINT)"),
|
_ = int.recv() => bail!(Cancelled, msg("immediate shutdown due to second signal (SIGINT)")),
|
||||||
_ = term.recv() => bail!("immediate shutdown due to second singal (SIGTERM)"),
|
_ = term.recv() => bail!(Cancelled, msg("immediate shutdown due to second singal (SIGTERM)")),
|
||||||
result = &mut inner => result,
|
result = &mut inner => result,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -213,17 +220,16 @@ fn make_listener(addr: &config::AddressConfig) -> Result<Listener, Error> {
|
||||||
config::AddressConfig::Ipv6(a) => (*a).into(),
|
config::AddressConfig::Ipv6(a) => (*a).into(),
|
||||||
config::AddressConfig::Unix(p) => {
|
config::AddressConfig::Unix(p) => {
|
||||||
prepare_unix_socket(p);
|
prepare_unix_socket(p);
|
||||||
return Ok(Listener::Unix(
|
return Ok(Listener::Unix(tokio::net::UnixListener::bind(p).map_err(
|
||||||
tokio::net::UnixListener::bind(p)
|
|e| err!(e, msg("unable bind Unix socket {}", p.display())),
|
||||||
.with_context(|_| format!("unable bind Unix socket {}", p.display()))?,
|
)?));
|
||||||
));
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Go through std::net::TcpListener to avoid needing async. That's there for DNS resolution,
|
// Go through std::net::TcpListener to avoid needing async. That's there for DNS resolution,
|
||||||
// but it's unnecessary when starting from a SocketAddr.
|
// but it's unnecessary when starting from a SocketAddr.
|
||||||
let listener = std::net::TcpListener::bind(sa)
|
let listener = std::net::TcpListener::bind(sa)
|
||||||
.with_context(|_| format!("unable to bind TCP socket {}", &sa))?;
|
.map_err(|e| err!(e, msg("unable to bind TCP socket {sa}")))?;
|
||||||
listener.set_nonblocking(true)?;
|
listener.set_nonblocking(true)?;
|
||||||
Ok(Listener::Tcp(tokio::net::TcpListener::from_std(listener)?))
|
Ok(Listener::Tcp(tokio::net::TcpListener::from_std(listener)?))
|
||||||
}
|
}
|
||||||
|
@ -419,13 +425,16 @@ async fn inner(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await?;
|
.await
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
|
|
||||||
db.lock().clear_watches();
|
db.lock().clear_watches();
|
||||||
|
|
||||||
info!("Waiting for HTTP requests to finish.");
|
info!("Waiting for HTTP requests to finish.");
|
||||||
for h in web_handles {
|
for h in web_handles {
|
||||||
h.await??;
|
h.await
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("Waiting for TEARDOWN requests to complete.");
|
info!("Waiting for TEARDOWN requests to complete.");
|
||||||
|
|
|
@ -5,8 +5,8 @@
|
||||||
//! Subcommand to run a SQLite shell.
|
//! Subcommand to run a SQLite shell.
|
||||||
|
|
||||||
use super::OpenMode;
|
use super::OpenMode;
|
||||||
|
use base::Error;
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use failure::Error;
|
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::os::unix::process::CommandExt;
|
use std::os::unix::process::CommandExt;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
|
use base::Error;
|
||||||
use bpaf::Bpaf;
|
use bpaf::Bpaf;
|
||||||
use failure::Error;
|
|
||||||
|
|
||||||
/// Translates between integer and human-readable timestamps.
|
/// Translates between integer and human-readable timestamps.
|
||||||
#[derive(Bpaf, Debug)]
|
#[derive(Bpaf, Debug)]
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
use bpaf::Bpaf;
|
|
||||||
/// Upgrades the database schema.
|
/// Upgrades the database schema.
|
||||||
///
|
///
|
||||||
/// See `guide/schema.md` for more information.
|
/// See `guide/schema.md` for more information.
|
||||||
use failure::Error;
|
use base::Error;
|
||||||
|
use bpaf::Bpaf;
|
||||||
|
|
||||||
/// Upgrades to the latest database schema.
|
/// Upgrades to the latest database schema.
|
||||||
#[derive(Bpaf, Debug)]
|
#[derive(Bpaf, Debug)]
|
||||||
|
|
|
@ -18,9 +18,9 @@
|
||||||
//! through ffmpeg's own generated `.mp4` file. Extracting just this part of their `.mp4` files
|
//! through ffmpeg's own generated `.mp4` file. Extracting just this part of their `.mp4` files
|
||||||
//! would be more trouble than it's worth.
|
//! would be more trouble than it's worth.
|
||||||
|
|
||||||
|
use base::{bail, err, Error};
|
||||||
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
|
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
|
||||||
use db::VideoSampleEntryToInsert;
|
use db::VideoSampleEntryToInsert;
|
||||||
use failure::{bail, format_err, Error};
|
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
|
||||||
// For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box.
|
// For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box.
|
||||||
|
@ -62,34 +62,31 @@ fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) {
|
||||||
|
|
||||||
/// Parses the `AvcDecoderConfigurationRecord` in the "extra data".
|
/// Parses the `AvcDecoderConfigurationRecord` in the "extra data".
|
||||||
pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Error> {
|
pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Error> {
|
||||||
let avcc = h264_reader::avcc::AvcDecoderConfigurationRecord::try_from(extradata)
|
let avcc =
|
||||||
.map_err(|e| format_err!("Bad AvcDecoderConfigurationRecord: {:?}", e))?;
|
h264_reader::avcc::AvcDecoderConfigurationRecord::try_from(extradata).map_err(|e| {
|
||||||
|
err!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("bad AvcDecoderConfigurationRecord: {:?}", e)
|
||||||
|
)
|
||||||
|
})?;
|
||||||
if avcc.num_of_sequence_parameter_sets() != 1 {
|
if avcc.num_of_sequence_parameter_sets() != 1 {
|
||||||
bail!("Multiple SPSs!");
|
bail!(Unimplemented, msg("multiple SPSs!"));
|
||||||
}
|
}
|
||||||
let ctx = avcc
|
let ctx = avcc
|
||||||
.create_context()
|
.create_context()
|
||||||
.map_err(|e| format_err!("Can't load SPS+PPS: {:?}", e))?;
|
.map_err(|e| err!(Unknown, msg("can't load SPS+PPS: {:?}", e)))?;
|
||||||
let sps = ctx
|
let sps = ctx
|
||||||
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
|
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
|
||||||
.ok_or_else(|| format_err!("No SPS 0"))?;
|
.ok_or_else(|| err!(Unimplemented, msg("no SPS 0")))?;
|
||||||
let pixel_dimensions = sps
|
let pixel_dimensions = sps.pixel_dimensions().map_err(|e| {
|
||||||
.pixel_dimensions()
|
err!(
|
||||||
.map_err(|e| format_err!("SPS has invalid pixel dimensions: {:?}", e))?;
|
InvalidArgument,
|
||||||
let width = u16::try_from(pixel_dimensions.0).map_err(|_| {
|
msg("SPS has invalid pixel dimensions: {:?}", e)
|
||||||
format_err!(
|
|
||||||
"bad dimensions {}x{}",
|
|
||||||
pixel_dimensions.0,
|
|
||||||
pixel_dimensions.1
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let height = u16::try_from(pixel_dimensions.1).map_err(|_| {
|
|
||||||
format_err!(
|
|
||||||
"bad dimensions {}x{}",
|
|
||||||
pixel_dimensions.0,
|
|
||||||
pixel_dimensions.1
|
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
let (Ok(width), Ok(height)) = (u16::try_from(pixel_dimensions.0), u16::try_from(pixel_dimensions.1)) else {
|
||||||
|
bail!(InvalidArgument, msg("bad dimensions {}x{}", pixel_dimensions.0, pixel_dimensions.1));
|
||||||
|
};
|
||||||
|
|
||||||
let mut sample_entry = Vec::with_capacity(256);
|
let mut sample_entry = Vec::with_capacity(256);
|
||||||
|
|
||||||
|
@ -130,7 +127,7 @@ pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Er
|
||||||
let cur_pos = sample_entry.len();
|
let cur_pos = sample_entry.len();
|
||||||
BigEndian::write_u32(
|
BigEndian::write_u32(
|
||||||
&mut sample_entry[avcc_len_pos..avcc_len_pos + 4],
|
&mut sample_entry[avcc_len_pos..avcc_len_pos + 4],
|
||||||
u32::try_from(cur_pos - avcc_len_pos)?,
|
u32::try_from(cur_pos - avcc_len_pos).map_err(|_| err!(OutOfRange))?,
|
||||||
);
|
);
|
||||||
|
|
||||||
// PixelAspectRatioBox, ISO/IEC 14496-12 section 12.1.4.2.
|
// PixelAspectRatioBox, ISO/IEC 14496-12 section 12.1.4.2.
|
||||||
|
@ -150,7 +147,7 @@ pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Er
|
||||||
let cur_pos = sample_entry.len();
|
let cur_pos = sample_entry.len();
|
||||||
BigEndian::write_u32(
|
BigEndian::write_u32(
|
||||||
&mut sample_entry[avc1_len_pos..avc1_len_pos + 4],
|
&mut sample_entry[avc1_len_pos..avc1_len_pos + 4],
|
||||||
u32::try_from(cur_pos - avc1_len_pos)?,
|
u32::try_from(cur_pos - avc1_len_pos).map_err(|_| err!(OutOfRange))?,
|
||||||
);
|
);
|
||||||
|
|
||||||
let profile_idc = sample_entry[103];
|
let profile_idc = sample_entry[103];
|
||||||
|
|
|
@ -5,8 +5,8 @@
|
||||||
//! JSON/TOML-compatible serde types for use in the web API and `moonfire-nvr.toml`.
|
//! JSON/TOML-compatible serde types for use in the web API and `moonfire-nvr.toml`.
|
||||||
|
|
||||||
use base::time::{Duration, Time};
|
use base::time::{Duration, Time};
|
||||||
|
use base::{err, Error};
|
||||||
use db::auth::SessionHash;
|
use db::auth::SessionHash;
|
||||||
use failure::{format_err, Error};
|
|
||||||
use serde::ser::{Error as _, SerializeMap, SerializeSeq, Serializer};
|
use serde::ser::{Error as _, SerializeMap, SerializeSeq, Serializer};
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
use std::ops::Not;
|
use std::ops::Not;
|
||||||
|
@ -230,7 +230,7 @@ impl<'a> Stream<'a> {
|
||||||
let s = db
|
let s = db
|
||||||
.streams_by_id()
|
.streams_by_id()
|
||||||
.get(&id)
|
.get(&id)
|
||||||
.ok_or_else(|| format_err!("missing stream {}", id))?;
|
.ok_or_else(|| err!(Internal, msg("missing stream {id}")))?;
|
||||||
Ok(Some(Stream {
|
Ok(Some(Stream {
|
||||||
id: s.id,
|
id: s.id,
|
||||||
retain_bytes: s.config.retain_bytes,
|
retain_bytes: s.config.retain_bytes,
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
#![cfg_attr(all(feature = "nightly", test), feature(test))]
|
#![cfg_attr(all(feature = "nightly", test), feature(test))]
|
||||||
|
|
||||||
|
use base::Error;
|
||||||
use bpaf::{Bpaf, Parser};
|
use bpaf::{Bpaf, Parser};
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
@ -37,7 +38,7 @@ enum Args {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Args {
|
impl Args {
|
||||||
fn run(self) -> Result<i32, failure::Error> {
|
fn run(self) -> Result<i32, Error> {
|
||||||
match self {
|
match self {
|
||||||
Args::Check(a) => cmds::check::run(a),
|
Args::Check(a) => cmds::check::run(a),
|
||||||
Args::Config(a) => cmds::config::run(a),
|
Args::Config(a) => cmds::config::run(a),
|
||||||
|
@ -93,11 +94,11 @@ fn main() {
|
||||||
|
|
||||||
match args.run() {
|
match args.run() {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Exiting due to error: {}", base::prettify_failure(&e));
|
error!("exiting due to error: {}", e.chain());
|
||||||
::std::process::exit(1);
|
::std::process::exit(1);
|
||||||
}
|
}
|
||||||
Ok(rv) => {
|
Ok(rv) => {
|
||||||
debug!("Exiting with status {}", rv);
|
debug!("exiting with status {}", rv);
|
||||||
std::process::exit(rv)
|
std::process::exit(rv)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,7 @@
|
||||||
|
|
||||||
use crate::body::{wrap_error, BoxedError, Chunk};
|
use crate::body::{wrap_error, BoxedError, Chunk};
|
||||||
use crate::slices::{self, Slices};
|
use crate::slices::{self, Slices};
|
||||||
use base::{bail_t, format_err_t, Error, ErrorKind, ResultExt};
|
use base::{bail, err, Error, ErrorKind, ResultExt};
|
||||||
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
|
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
|
||||||
use bytes::BytesMut;
|
use bytes::BytesMut;
|
||||||
use db::dir;
|
use db::dir;
|
||||||
|
@ -410,14 +410,14 @@ impl Segment {
|
||||||
*index = db
|
*index = db
|
||||||
.lock()
|
.lock()
|
||||||
.with_recording_playback(self.s.id, &mut |playback| self.build_index(playback))
|
.with_recording_playback(self.s.id, &mut |playback| self.build_index(playback))
|
||||||
.map_err(|e| {
|
.map_err(|err| {
|
||||||
error!("Unable to build index for segment: {:?}", e);
|
error!(%err, recording_id = %self.s.id, "unable to build index for segment");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
let index: &'a _ = unsafe { &*self.index.get() };
|
let index: &'a _ = unsafe { &*self.index.get() };
|
||||||
match *index {
|
match *index {
|
||||||
Ok(ref b) => Ok(f(&b[..], self.lens())),
|
Ok(ref b) => Ok(f(&b[..], self.lens())),
|
||||||
Err(()) => bail_t!(Unknown, "Unable to build index; see previous error."),
|
Err(()) => bail!(Unknown, msg("unable to build index; see logs")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -439,7 +439,7 @@ impl Segment {
|
||||||
&buf[lens.stts + lens.stsz..]
|
&buf[lens.stts + lens.stsz..]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_index(&self, playback: &db::RecordingPlayback) -> Result<Box<[u8]>, failure::Error> {
|
fn build_index(&self, playback: &db::RecordingPlayback) -> Result<Box<[u8]>, Error> {
|
||||||
let s = &self.s;
|
let s = &self.s;
|
||||||
let lens = self.lens();
|
let lens = self.lens();
|
||||||
let len = lens.stts + lens.stsz + lens.stss;
|
let len = lens.stts + lens.stsz + lens.stss;
|
||||||
|
@ -511,7 +511,7 @@ impl Segment {
|
||||||
playback: &db::RecordingPlayback,
|
playback: &db::RecordingPlayback,
|
||||||
initial_pos: u64,
|
initial_pos: u64,
|
||||||
len: usize,
|
len: usize,
|
||||||
) -> Result<Vec<u8>, failure::Error> {
|
) -> Result<Vec<u8>, Error> {
|
||||||
let mut v = Vec::with_capacity(len);
|
let mut v = Vec::with_capacity(len);
|
||||||
|
|
||||||
struct RunInfo {
|
struct RunInfo {
|
||||||
|
@ -623,12 +623,14 @@ impl Segment {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if len != v.len() {
|
if len != v.len() {
|
||||||
bail_t!(
|
bail!(
|
||||||
Internal,
|
Internal,
|
||||||
|
msg(
|
||||||
"truns on {:?} expected len {} got len {}",
|
"truns on {:?} expected len {} got len {}",
|
||||||
self,
|
self,
|
||||||
len,
|
len,
|
||||||
v.len()
|
v.len(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(v)
|
Ok(v)
|
||||||
|
@ -698,12 +700,9 @@ enum SliceType {
|
||||||
impl Slice {
|
impl Slice {
|
||||||
fn new(end: u64, t: SliceType, p: usize) -> Result<Self, Error> {
|
fn new(end: u64, t: SliceType, p: usize) -> Result<Self, Error> {
|
||||||
if end >= (1 << 40) || p >= (1 << 20) {
|
if end >= (1 << 40) || p >= (1 << 20) {
|
||||||
bail_t!(
|
bail!(
|
||||||
InvalidArgument,
|
OutOfRange,
|
||||||
"end={} p={} too large for {:?} Slice",
|
msg("end={} p={} too large for {:?} Slice", end, p, t,),
|
||||||
end,
|
|
||||||
p,
|
|
||||||
t
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -730,7 +729,7 @@ impl Slice {
|
||||||
.try_map(|mp4| {
|
.try_map(|mp4| {
|
||||||
let i = mp4.segments[p].get_index(&mp4.db, f)?;
|
let i = mp4.segments[p].get_index(&mp4.db, f)?;
|
||||||
if u64::try_from(i.len()).unwrap() != len {
|
if u64::try_from(i.len()).unwrap() != len {
|
||||||
bail_t!(Internal, "expected len {} got {}", len, i.len());
|
bail!(Internal, msg("expected len {} got {}", len, i.len()));
|
||||||
}
|
}
|
||||||
Ok::<_, Error>(&i[r])
|
Ok::<_, Error>(&i[r])
|
||||||
})?
|
})?
|
||||||
|
@ -760,7 +759,7 @@ impl Slice {
|
||||||
.try_map(|mp4| {
|
.try_map(|mp4| {
|
||||||
let data = &mp4.video_sample_entries[self.p()].data;
|
let data = &mp4.video_sample_entries[self.p()].data;
|
||||||
if u64::try_from(data.len()).unwrap() != len {
|
if u64::try_from(data.len()).unwrap() != len {
|
||||||
bail_t!(Internal, "expected len {} got len {}", len, data.len());
|
bail!(Internal, msg("expected len {} got len {}", len, data.len()));
|
||||||
}
|
}
|
||||||
Ok::<_, Error>(&data[r.start as usize..r.end as usize])
|
Ok::<_, Error>(&data[r.start as usize..r.end as usize])
|
||||||
})?
|
})?
|
||||||
|
@ -787,11 +786,9 @@ impl slices::Slice for Slice {
|
||||||
SliceType::Static => {
|
SliceType::Static => {
|
||||||
let s = STATIC_BYTESTRINGS[p];
|
let s = STATIC_BYTESTRINGS[p];
|
||||||
if u64::try_from(s.len()).unwrap() != len {
|
if u64::try_from(s.len()).unwrap() != len {
|
||||||
Err(format_err_t!(
|
Err(err!(
|
||||||
Internal,
|
Internal,
|
||||||
"expected len {} got len {}",
|
msg("expected len {} got len {}", len, s.len())
|
||||||
len,
|
|
||||||
s.len()
|
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
let part = &s[range.start as usize..range.end as usize];
|
let part = &s[range.start as usize..range.end as usize];
|
||||||
|
@ -817,12 +814,14 @@ impl slices::Slice for Slice {
|
||||||
Box::new(stream::once(futures::future::ready(
|
Box::new(stream::once(futures::future::ready(
|
||||||
res.map_err(wrap_error).and_then(move |c| {
|
res.map_err(wrap_error).and_then(move |c| {
|
||||||
if c.remaining() != (range.end - range.start) as usize {
|
if c.remaining() != (range.end - range.start) as usize {
|
||||||
return Err(wrap_error(format_err_t!(
|
return Err(wrap_error(err!(
|
||||||
Internal,
|
Internal,
|
||||||
"Error producing {:?}: range {:?} produced incorrect len {}.",
|
msg(
|
||||||
|
"{:?} range {:?} produced incorrect len {}",
|
||||||
self,
|
self,
|
||||||
range,
|
range,
|
||||||
c.remaining()
|
c.remaining()
|
||||||
|
)
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
Ok(c)
|
Ok(c)
|
||||||
|
@ -904,9 +903,9 @@ impl FileBuilder {
|
||||||
// There's no support today for timestamp truns or for timestamps without edit lists.
|
// There's no support today for timestamp truns or for timestamps without edit lists.
|
||||||
// The latter would invalidate the code's assumption that desired timespan == actual
|
// The latter would invalidate the code's assumption that desired timespan == actual
|
||||||
// timespan in the timestamp track.
|
// timespan in the timestamp track.
|
||||||
bail_t!(
|
bail!(
|
||||||
InvalidArgument,
|
InvalidArgument,
|
||||||
"timestamp subtitles aren't supported on media segments"
|
msg("timestamp subtitles aren't supported on media segments")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.include_timestamp_subtitle_track = b;
|
self.include_timestamp_subtitle_track = b;
|
||||||
|
@ -934,11 +933,13 @@ impl FileBuilder {
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
if let Some(prev) = self.segments.last() {
|
if let Some(prev) = self.segments.last() {
|
||||||
if prev.s.have_trailing_zero() {
|
if prev.s.have_trailing_zero() {
|
||||||
bail_t!(
|
bail!(
|
||||||
InvalidArgument,
|
InvalidArgument,
|
||||||
|
msg(
|
||||||
"unable to append recording {} after recording {} with trailing zero",
|
"unable to append recording {} after recording {} with trailing zero",
|
||||||
row.id,
|
row.id,
|
||||||
prev.s.id
|
prev.s.id,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -1071,10 +1072,12 @@ impl FileBuilder {
|
||||||
// If the segment is > 4 GiB, the 32-bit trun data offsets are untrustworthy.
|
// If the segment is > 4 GiB, the 32-bit trun data offsets are untrustworthy.
|
||||||
// We'd need multiple moof+mdat sequences to support large media segments properly.
|
// We'd need multiple moof+mdat sequences to support large media segments properly.
|
||||||
if self.body.slices.len() > u32::max_value() as u64 {
|
if self.body.slices.len() > u32::max_value() as u64 {
|
||||||
bail_t!(
|
bail!(
|
||||||
InvalidArgument,
|
OutOfRange,
|
||||||
|
msg(
|
||||||
"media segment has length {}, greater than allowed 4 GiB",
|
"media segment has length {}, greater than allowed 4 GiB",
|
||||||
self.body.slices.len()
|
self.body.slices.len(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1355,7 +1358,7 @@ impl FileBuilder {
|
||||||
None => Some((e.width, e.height)),
|
None => Some((e.width, e.height)),
|
||||||
Some((w, h)) => Some((cmp::max(w, e.width), cmp::max(h, e.height))),
|
Some((w, h)) => Some((cmp::max(w, e.width), cmp::max(h, e.height))),
|
||||||
})
|
})
|
||||||
.ok_or_else(|| format_err_t!(InvalidArgument, "no video_sample_entries"))?;
|
.ok_or_else(|| err!(InvalidArgument, msg("no video_sample_entries")))?;
|
||||||
self.body.append_u32((width as u32) << 16);
|
self.body.append_u32((width as u32) << 16);
|
||||||
self.body.append_u32((height as u32) << 16);
|
self.body.append_u32((height as u32) << 16);
|
||||||
})
|
})
|
||||||
|
@ -1396,7 +1399,10 @@ impl FileBuilder {
|
||||||
let skip = md.start - actual_start_90k;
|
let skip = md.start - actual_start_90k;
|
||||||
let keep = md.end - md.start;
|
let keep = md.end - md.start;
|
||||||
if skip < 0 || keep < 0 {
|
if skip < 0 || keep < 0 {
|
||||||
bail_t!(Internal, "skip={} keep={} on segment {:#?}", skip, keep, s);
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg("skip={} keep={} on segment {:#?}", skip, keep, s)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
cur_media_time += skip as u64;
|
cur_media_time += skip as u64;
|
||||||
if unflushed.segment_duration + unflushed.media_time == cur_media_time {
|
if unflushed.segment_duration + unflushed.media_time == cur_media_time {
|
||||||
|
@ -1817,9 +1823,10 @@ impl FileInner {
|
||||||
let sr = s.s.sample_file_range();
|
let sr = s.s.sample_file_range();
|
||||||
let f = match self.dirs_by_stream_id.get(&s.s.id.stream()) {
|
let f = match self.dirs_by_stream_id.get(&s.s.id.stream()) {
|
||||||
None => {
|
None => {
|
||||||
return Box::new(stream::iter(std::iter::once(Err(wrap_error(
|
return Box::new(stream::iter(std::iter::once(Err(wrap_error(err!(
|
||||||
format_err_t!(NotFound, "{}: stream not found", s.s.id),
|
NotFound,
|
||||||
)))))
|
msg("{}: stream not found", s.s.id)
|
||||||
|
))))))
|
||||||
}
|
}
|
||||||
Some(d) => d.open_file(s.s.id, (r.start + sr.start)..(r.end + sr.start)),
|
Some(d) => d.open_file(s.s.id, (r.start + sr.start)..(r.end + sr.start)),
|
||||||
};
|
};
|
||||||
|
@ -1865,10 +1872,12 @@ impl File {
|
||||||
pub async fn append_into_vec(self, v: &mut Vec<u8>) -> Result<(), Error> {
|
pub async fn append_into_vec(self, v: &mut Vec<u8>) -> Result<(), Error> {
|
||||||
use http_serve::Entity;
|
use http_serve::Entity;
|
||||||
v.reserve(usize::try_from(self.len()).map_err(|_| {
|
v.reserve(usize::try_from(self.len()).map_err(|_| {
|
||||||
format_err_t!(
|
err!(
|
||||||
InvalidArgument,
|
InvalidArgument,
|
||||||
|
msg(
|
||||||
"{}-byte mp4 is too big to send over WebSockets!",
|
"{}-byte mp4 is too big to send over WebSockets!",
|
||||||
self.len()
|
self.len()
|
||||||
|
),
|
||||||
)
|
)
|
||||||
})?);
|
})?);
|
||||||
let mut b = std::pin::Pin::from(self.get_range(0..self.len()));
|
let mut b = std::pin::Pin::from(self.get_range(0..self.len()));
|
||||||
|
@ -1876,9 +1885,7 @@ impl File {
|
||||||
use futures::stream::StreamExt;
|
use futures::stream::StreamExt;
|
||||||
match b.next().await {
|
match b.next().await {
|
||||||
Some(r) => {
|
Some(r) => {
|
||||||
let mut chunk = r
|
let mut chunk = r.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
.map_err(failure::Error::from_boxed_compat)
|
|
||||||
.err_kind(ErrorKind::Unknown)?;
|
|
||||||
while chunk.has_remaining() {
|
while chunk.has_remaining() {
|
||||||
let c = chunk.chunk();
|
let c = chunk.chunk();
|
||||||
v.extend_from_slice(c);
|
v.extend_from_slice(c);
|
||||||
|
@ -2312,7 +2319,7 @@ mod tests {
|
||||||
loop {
|
loop {
|
||||||
let pkt = match input.next() {
|
let pkt = match input.next() {
|
||||||
Ok(p) => p,
|
Ok(p) => p,
|
||||||
Err(e) if e.to_string().contains("End of file") => {
|
Err(e) if e.kind() == ErrorKind::OutOfRange => {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -2419,14 +2426,14 @@ mod tests {
|
||||||
for i in 0.. {
|
for i in 0.. {
|
||||||
let orig_pkt = match orig.next() {
|
let orig_pkt = match orig.next() {
|
||||||
Ok(p) => Some(p),
|
Ok(p) => Some(p),
|
||||||
Err(e) if e.to_string() == "End of file" => None,
|
Err(e) if e.msg().unwrap() == "end of file" => None,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
panic!("unexpected input error: {}", e);
|
panic!("unexpected input error: {}", e);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let new_pkt = match new.next() {
|
let new_pkt = match new.next() {
|
||||||
Ok(p) => Some(p),
|
Ok(p) => Some(p),
|
||||||
Err(e) if e.to_string() == "End of file" => {
|
Err(e) if e.msg().unwrap() == "end of file" => {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -2634,7 +2641,8 @@ mod tests {
|
||||||
let e = make_mp4_from_encoders(Type::Normal, &db, vec![], 0..0, true)
|
let e = make_mp4_from_encoders(Type::Normal, &db, vec![], 0..0, true)
|
||||||
.err()
|
.err()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(e.to_string(), "Invalid argument: no video_sample_entries");
|
assert_eq!(e.kind(), ErrorKind::InvalidArgument);
|
||||||
|
assert_eq!(e.msg().unwrap(), "no video_sample_entries");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
@ -9,8 +9,7 @@ use std::ops::Range;
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
|
||||||
use crate::body::{wrap_error, BoxedError};
|
use crate::body::{wrap_error, BoxedError};
|
||||||
use base::format_err_t;
|
use base::{bail, err, Error};
|
||||||
use failure::{bail, Error};
|
|
||||||
use futures::{stream, stream::StreamExt, Stream};
|
use futures::{stream, stream::StreamExt, Stream};
|
||||||
use tracing_futures::Instrument;
|
use tracing_futures::Instrument;
|
||||||
|
|
||||||
|
@ -102,11 +101,14 @@ where
|
||||||
pub fn append(&mut self, slice: S) -> Result<(), Error> {
|
pub fn append(&mut self, slice: S) -> Result<(), Error> {
|
||||||
if slice.end() <= self.len {
|
if slice.end() <= self.len {
|
||||||
bail!(
|
bail!(
|
||||||
|
Internal,
|
||||||
|
msg(
|
||||||
"end {} <= len {} while adding slice {:?} to slices:\n{:?}",
|
"end {} <= len {} while adding slice {:?} to slices:\n{:?}",
|
||||||
slice.end(),
|
slice.end(),
|
||||||
self.len,
|
self.len,
|
||||||
slice,
|
slice,
|
||||||
self
|
self
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.len = slice.end();
|
self.len = slice.end();
|
||||||
|
@ -133,14 +135,10 @@ where
|
||||||
) -> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> {
|
) -> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> {
|
||||||
#[allow(clippy::suspicious_operation_groupings)]
|
#[allow(clippy::suspicious_operation_groupings)]
|
||||||
if range.start > range.end || range.end > self.len {
|
if range.start > range.end || range.end > self.len {
|
||||||
return Box::new(stream::once(futures::future::err(wrap_error(
|
return Box::new(stream::once(futures::future::err(wrap_error(err!(
|
||||||
format_err_t!(
|
|
||||||
Internal,
|
Internal,
|
||||||
"Bad range {:?} for slice of length {}",
|
msg("bad range {:?} for slice of length {}", range, self.len),
|
||||||
range,
|
)))));
|
||||||
self.len
|
|
||||||
),
|
|
||||||
))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Binary search for the first slice of the range to write, determining its index and
|
// Binary search for the first slice of the range to write, determining its index and
|
||||||
|
|
|
@ -3,9 +3,8 @@
|
||||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
||||||
|
|
||||||
use crate::h264;
|
use crate::h264;
|
||||||
|
use base::{bail, err, Error};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use failure::format_err;
|
|
||||||
use failure::{bail, Error};
|
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use retina::client::Demuxed;
|
use retina::client::Demuxed;
|
||||||
use retina::codec::CodecItem;
|
use retina::codec::CodecItem;
|
||||||
|
@ -74,7 +73,8 @@ impl Opener for RealOpener {
|
||||||
.in_current_span(),
|
.in_current_span(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.expect("RetinaStream::play task panicked, see earlier error")??;
|
.expect("RetinaStream::play task panicked, see earlier error")
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))??;
|
||||||
Ok(Box::new(RetinaStream {
|
Ok(Box::new(RetinaStream {
|
||||||
inner: Some(inner),
|
inner: Some(inner),
|
||||||
rt_handle,
|
rt_handle,
|
||||||
|
@ -121,22 +121,30 @@ impl RetinaStreamInner {
|
||||||
url: Url,
|
url: Url,
|
||||||
options: Options,
|
options: Options,
|
||||||
) -> Result<(Box<Self>, retina::codec::VideoFrame), Error> {
|
) -> Result<(Box<Self>, retina::codec::VideoFrame), Error> {
|
||||||
let mut session = retina::client::Session::describe(url, options.session).await?;
|
let mut session = retina::client::Session::describe(url, options.session)
|
||||||
|
.await
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
tracing::debug!("connected to {:?}, tool {:?}", &label, session.tool());
|
tracing::debug!("connected to {:?}, tool {:?}", &label, session.tool());
|
||||||
let video_i = session
|
let video_i = session
|
||||||
.streams()
|
.streams()
|
||||||
.iter()
|
.iter()
|
||||||
.position(|s| s.media() == "video" && s.encoding_name() == "h264")
|
.position(|s| s.media() == "video" && s.encoding_name() == "h264")
|
||||||
.ok_or_else(|| format_err!("couldn't find H.264 video stream"))?;
|
.ok_or_else(|| err!(FailedPrecondition, msg("couldn't find H.264 video stream")))?;
|
||||||
session.setup(video_i, options.setup).await?;
|
session
|
||||||
let session = session.play(retina::client::PlayOptions::default()).await?;
|
.setup(video_i, options.setup)
|
||||||
let mut session = session.demuxed()?;
|
.await
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
|
let session = session
|
||||||
|
.play(retina::client::PlayOptions::default())
|
||||||
|
.await
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
|
let mut session = session.demuxed().map_err(|e| err!(Unknown, source(e)))?;
|
||||||
|
|
||||||
// First frame.
|
// First frame.
|
||||||
let first_frame = loop {
|
let first_frame = loop {
|
||||||
match Pin::new(&mut session).next().await {
|
match Pin::new(&mut session).next().await {
|
||||||
None => bail!("stream closed before first frame"),
|
None => bail!(Unavailable, msg("stream closed before first frame")),
|
||||||
Some(Err(e)) => return Err(e.into()),
|
Some(Err(e)) => bail!(Unknown, msg("unable to get first frame"), source(e)),
|
||||||
Some(Ok(CodecItem::VideoFrame(v))) => {
|
Some(Ok(CodecItem::VideoFrame(v))) => {
|
||||||
if v.is_random_access_point() {
|
if v.is_random_access_point() {
|
||||||
break v;
|
break v;
|
||||||
|
@ -148,7 +156,7 @@ impl RetinaStreamInner {
|
||||||
let video_params = match session.streams()[video_i].parameters() {
|
let video_params = match session.streams()[video_i].parameters() {
|
||||||
Some(retina::codec::ParametersRef::Video(v)) => v.clone(),
|
Some(retina::codec::ParametersRef::Video(v)) => v.clone(),
|
||||||
Some(_) => unreachable!(),
|
Some(_) => unreachable!(),
|
||||||
None => bail!("couldn't find H.264 parameters"),
|
None => bail!(Unknown, msg("couldn't find H.264 parameters")),
|
||||||
};
|
};
|
||||||
let video_sample_entry = h264::parse_extra_data(video_params.extra_data())?;
|
let video_sample_entry = h264::parse_extra_data(video_params.extra_data())?;
|
||||||
let self_ = Box::new(Self {
|
let self_ = Box::new(Self {
|
||||||
|
@ -171,8 +179,13 @@ impl RetinaStreamInner {
|
||||||
Error,
|
Error,
|
||||||
> {
|
> {
|
||||||
loop {
|
loop {
|
||||||
match Pin::new(&mut self.session).next().await.transpose()? {
|
match Pin::new(&mut self.session)
|
||||||
None => bail!("end of stream"),
|
.next()
|
||||||
|
.await
|
||||||
|
.transpose()
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?
|
||||||
|
{
|
||||||
|
None => bail!(Unavailable, msg("end of stream")),
|
||||||
Some(CodecItem::VideoFrame(v)) => {
|
Some(CodecItem::VideoFrame(v)) => {
|
||||||
if v.loss() > 0 {
|
if v.loss() > 0 {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
|
@ -223,7 +236,13 @@ impl Stream for RetinaStream {
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.expect("fetch_next_frame task panicked, see earlier error")
|
.expect("fetch_next_frame task panicked, see earlier error")
|
||||||
.map_err(|_| format_err!("timeout getting next frame"))??;
|
.map_err(|e| {
|
||||||
|
err!(
|
||||||
|
DeadlineExceeded,
|
||||||
|
msg("timeout getting next frame"),
|
||||||
|
source(e)
|
||||||
|
)
|
||||||
|
})??;
|
||||||
let mut new_video_sample_entry = false;
|
let mut new_video_sample_entry = false;
|
||||||
if let Some(p) = new_parameters {
|
if let Some(p) = new_parameters {
|
||||||
let video_sample_entry = h264::parse_extra_data(p.extra_data())?;
|
let video_sample_entry = h264::parse_extra_data(p.extra_data())?;
|
||||||
|
@ -239,7 +258,7 @@ impl Stream for RetinaStream {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
self.inner = Some(inner);
|
self.inner = Some(inner);
|
||||||
Ok::<_, failure::Error>((frame, new_video_sample_entry))
|
Ok::<_, Error>((frame, new_video_sample_entry))
|
||||||
})?;
|
})?;
|
||||||
Ok(VideoFrame {
|
Ok(VideoFrame {
|
||||||
pts: frame.timestamp().elapsed(),
|
pts: frame.timestamp().elapsed(),
|
||||||
|
@ -269,16 +288,24 @@ pub mod testutil {
|
||||||
pub fn open(path: &str) -> Result<Self, Error> {
|
pub fn open(path: &str) -> Result<Self, Error> {
|
||||||
let f = std::fs::read(path)?;
|
let f = std::fs::read(path)?;
|
||||||
let len = f.len();
|
let len = f.len();
|
||||||
let reader = mp4::Mp4Reader::read_header(Cursor::new(f), u64::try_from(len)?)?;
|
let reader = mp4::Mp4Reader::read_header(
|
||||||
|
Cursor::new(f),
|
||||||
|
u64::try_from(len).expect("len should be in u64 range"),
|
||||||
|
)
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?;
|
||||||
let h264_track = match reader
|
let h264_track = match reader
|
||||||
.tracks()
|
.tracks()
|
||||||
.values()
|
.values()
|
||||||
.find(|t| matches!(t.media_type(), Ok(mp4::MediaType::H264)))
|
.find(|t| matches!(t.media_type(), Ok(mp4::MediaType::H264)))
|
||||||
{
|
{
|
||||||
None => bail!("expected a H.264 track"),
|
None => bail!(InvalidArgument, msg("expected a H.264 track")),
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
};
|
};
|
||||||
let video_sample_entry = h264::parse_extra_data(&h264_track.extra_data()?[..])?;
|
let video_sample_entry = h264::parse_extra_data(
|
||||||
|
&h264_track
|
||||||
|
.extra_data()
|
||||||
|
.map_err(|e| err!(Unknown, source(e)))?[..],
|
||||||
|
)?;
|
||||||
let h264_track_id = h264_track.track_id();
|
let h264_track_id = h264_track.track_id();
|
||||||
let stream = Mp4Stream {
|
let stream = Mp4Stream {
|
||||||
reader,
|
reader,
|
||||||
|
@ -312,8 +339,9 @@ pub mod testutil {
|
||||||
fn next(&mut self) -> Result<VideoFrame, Error> {
|
fn next(&mut self) -> Result<VideoFrame, Error> {
|
||||||
let sample = self
|
let sample = self
|
||||||
.reader
|
.reader
|
||||||
.read_sample(self.h264_track_id, self.next_sample_id)?
|
.read_sample(self.h264_track_id, self.next_sample_id)
|
||||||
.ok_or_else(|| format_err!("End of file"))?;
|
.map_err(|e| err!(Unknown, source(e)))?
|
||||||
|
.ok_or_else(|| err!(OutOfRange, msg("end of file")))?;
|
||||||
self.next_sample_id += 1;
|
self.next_sample_id += 1;
|
||||||
Ok(VideoFrame {
|
Ok(VideoFrame {
|
||||||
pts: sample.start_time as i64,
|
pts: sample.start_time as i64,
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
|
|
||||||
use crate::stream;
|
use crate::stream;
|
||||||
use base::clock::{Clocks, TimerGuard};
|
use base::clock::{Clocks, TimerGuard};
|
||||||
|
use base::{bail, err, Error};
|
||||||
use db::{dir, recording, writer, Camera, Database, Stream};
|
use db::{dir, recording, writer, Camera, Database, Stream};
|
||||||
use failure::{bail, format_err, Error};
|
|
||||||
use std::result::Result;
|
use std::result::Result;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -68,9 +68,12 @@ where
|
||||||
.config
|
.config
|
||||||
.url
|
.url
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.ok_or_else(|| format_err!("Stream has no RTSP URL"))?;
|
.ok_or_else(|| err!(InvalidArgument, msg("stream has no RTSP URL")))?;
|
||||||
if !url.username().is_empty() || url.password().is_some() {
|
if !url.username().is_empty() || url.password().is_some() {
|
||||||
bail!("RTSP URL shouldn't include credentials");
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("RTSP URL shouldn't include credentials")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let stream_transport = if s.config.rtsp_transport.is_empty() {
|
let stream_transport = if s.config.rtsp_transport.is_empty() {
|
||||||
None
|
None
|
||||||
|
@ -119,7 +122,7 @@ where
|
||||||
if let Err(err) = self.run_once() {
|
if let Err(err) = self.run_once() {
|
||||||
let sleep_time = time::Duration::seconds(1);
|
let sleep_time = time::Duration::seconds(1);
|
||||||
warn!(
|
warn!(
|
||||||
err = base::prettify_failure(&err),
|
err = %err.chain(),
|
||||||
"sleeping for 1 s after error"
|
"sleeping for 1 s after error"
|
||||||
);
|
);
|
||||||
self.db.clocks().sleep(sleep_time);
|
self.db.clocks().sleep(sleep_time);
|
||||||
|
@ -150,7 +153,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.in_current_span(),
|
.in_current_span(),
|
||||||
)?;
|
).map_err(|e| err!(Unknown, source(e)))?;
|
||||||
waited = true;
|
waited = true;
|
||||||
} else {
|
} else {
|
||||||
if waited {
|
if waited {
|
||||||
|
@ -221,7 +224,7 @@ where
|
||||||
None
|
None
|
||||||
} else if frame.new_video_sample_entry {
|
} else if frame.new_video_sample_entry {
|
||||||
if !frame.is_key {
|
if !frame.is_key {
|
||||||
bail!("parameter change on non-key frame");
|
bail!(Unavailable, msg("parameter change on non-key frame"));
|
||||||
}
|
}
|
||||||
trace!("close on parameter change");
|
trace!("close on parameter change");
|
||||||
video_sample_entry_id = {
|
video_sample_entry_id = {
|
||||||
|
@ -286,8 +289,8 @@ where
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::stream::{self, Stream};
|
use crate::stream::{self, Stream};
|
||||||
use base::clock::{self, Clocks};
|
use base::clock::{self, Clocks};
|
||||||
|
use base::{bail, Error};
|
||||||
use db::{recording, testutil, CompositeId};
|
use db::{recording, testutil, CompositeId};
|
||||||
use failure::{bail, Error};
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -334,7 +337,7 @@ mod tests {
|
||||||
|
|
||||||
fn next(&mut self) -> Result<stream::VideoFrame, Error> {
|
fn next(&mut self) -> Result<stream::VideoFrame, Error> {
|
||||||
if self.pkts_left == 0 {
|
if self.pkts_left == 0 {
|
||||||
bail!("end of stream");
|
bail!(OutOfRange, msg("end of stream"));
|
||||||
}
|
}
|
||||||
self.pkts_left -= 1;
|
self.pkts_left -= 1;
|
||||||
|
|
||||||
|
@ -394,7 +397,7 @@ mod tests {
|
||||||
None => {
|
None => {
|
||||||
trace!("MockOpener shutting down");
|
trace!("MockOpener shutting down");
|
||||||
self.shutdown_tx.lock().unwrap().take();
|
self.shutdown_tx.lock().unwrap().take();
|
||||||
bail!("done")
|
bail!(Cancelled, msg("done"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use base::{bail_t, format_err_t, Error};
|
use base::{bail, err, Error};
|
||||||
use futures::{future::Either, SinkExt, StreamExt};
|
use futures::{future::Either, SinkExt, StreamExt};
|
||||||
use http::header;
|
use http::header;
|
||||||
use tokio_tungstenite::{tungstenite, WebSocketStream};
|
use tokio_tungstenite::{tungstenite, WebSocketStream};
|
||||||
|
@ -26,7 +26,7 @@ impl Service {
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let caller = caller?;
|
let caller = caller?;
|
||||||
if !caller.permissions.view_video {
|
if !caller.permissions.view_video {
|
||||||
bail_t!(PermissionDenied, "view_video required");
|
bail!(PermissionDenied, msg("view_video required"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let stream_id;
|
let stream_id;
|
||||||
|
@ -36,18 +36,18 @@ impl Service {
|
||||||
let mut db = self.db.lock();
|
let mut db = self.db.lock();
|
||||||
open_id = match db.open {
|
open_id = match db.open {
|
||||||
None => {
|
None => {
|
||||||
bail_t!(
|
bail!(
|
||||||
FailedPrecondition,
|
FailedPrecondition,
|
||||||
"database is read-only; there are no live streams"
|
msg("database is read-only; there are no live streams"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Some(o) => o.id,
|
Some(o) => o.id,
|
||||||
};
|
};
|
||||||
let camera = db
|
let camera = db
|
||||||
.get_camera(uuid)
|
.get_camera(uuid)
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such camera {uuid}"))?;
|
.ok_or_else(|| err!(NotFound, msg("no such camera {uuid}")))?;
|
||||||
stream_id = camera.streams[stream_type.index()]
|
stream_id = camera.streams[stream_type.index()]
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such stream {uuid}/{stream_type}"))?;
|
.ok_or_else(|| err!(NotFound, msg("no such stream {uuid}/{stream_type}")))?;
|
||||||
db.watch_live(
|
db.watch_live(
|
||||||
stream_id,
|
stream_id,
|
||||||
Box::new(move |l| sub_tx.unbounded_send(l).is_ok()),
|
Box::new(move |l| sub_tx.unbounded_send(l).is_ok()),
|
||||||
|
@ -116,7 +116,7 @@ impl Service {
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
let row = row.ok_or_else(|| format_err_t!(Internal, "unable to find {:?}", live))?;
|
let row = row.ok_or_else(|| err!(Internal, msg("unable to find {live:?}")))?;
|
||||||
use http_serve::Entity;
|
use http_serve::Entity;
|
||||||
let mp4 = builder.build(self.db.clone(), self.dirs_by_stream_id.clone())?;
|
let mp4 = builder.build(self.db.clone(), self.dirs_by_stream_id.clone())?;
|
||||||
let mut hdrs = header::HeaderMap::new();
|
let mut hdrs = header::HeaderMap::new();
|
||||||
|
|
|
@ -17,10 +17,10 @@ use self::path::Path;
|
||||||
use crate::body::Body;
|
use crate::body::Body;
|
||||||
use crate::json;
|
use crate::json;
|
||||||
use crate::mp4;
|
use crate::mp4;
|
||||||
use base::format_err_t;
|
use base::err;
|
||||||
use base::Error;
|
use base::Error;
|
||||||
use base::ResultExt;
|
use base::ResultExt;
|
||||||
use base::{bail_t, clock::Clocks, ErrorKind};
|
use base::{bail, clock::Clocks, ErrorKind};
|
||||||
use core::borrow::Borrow;
|
use core::borrow::Borrow;
|
||||||
use core::str::FromStr;
|
use core::str::FromStr;
|
||||||
use db::dir::SampleFileDir;
|
use db::dir::SampleFileDir;
|
||||||
|
@ -134,24 +134,27 @@ async fn extract_json_body(req: &mut Request<hyper::Body>) -> Result<Bytes, base
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
if !correct_mime_type {
|
if !correct_mime_type {
|
||||||
bail_t!(InvalidArgument, "expected application/json request body");
|
bail!(
|
||||||
|
InvalidArgument,
|
||||||
|
msg("expected application/json request body")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let b = ::std::mem::replace(req.body_mut(), hyper::Body::empty());
|
let b = ::std::mem::replace(req.body_mut(), hyper::Body::empty());
|
||||||
hyper::body::to_bytes(b)
|
hyper::body::to_bytes(b)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format_err_t!(Unavailable, "unable to read request body: {}", e))
|
.map_err(|e| err!(Unavailable, msg("unable to read request body"), source(e)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_json_body<'a, T: serde::Deserialize<'a>>(body: &'a [u8]) -> Result<T, base::Error> {
|
fn parse_json_body<'a, T: serde::Deserialize<'a>>(body: &'a [u8]) -> Result<T, base::Error> {
|
||||||
serde_json::from_slice(body)
|
serde_json::from_slice(body)
|
||||||
.map_err(|e| format_err_t!(InvalidArgument, "bad request body: {e}"))
|
.map_err(|e| err!(InvalidArgument, msg("bad request body"), source(e)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn require_csrf_if_session(caller: &Caller, csrf: Option<&str>) -> Result<(), base::Error> {
|
fn require_csrf_if_session(caller: &Caller, csrf: Option<&str>) -> Result<(), base::Error> {
|
||||||
match (csrf, caller.user.as_ref().and_then(|u| u.session.as_ref())) {
|
match (csrf, caller.user.as_ref().and_then(|u| u.session.as_ref())) {
|
||||||
(None, Some(_)) => bail_t!(Unauthenticated, "csrf must be supplied"),
|
(None, Some(_)) => bail!(Unauthenticated, msg("csrf must be supplied")),
|
||||||
(Some(csrf), Some(session)) if !csrf_matches(csrf, session.csrf) => {
|
(Some(csrf), Some(session)) if !csrf_matches(csrf, session.csrf) => {
|
||||||
bail_t!(Unauthenticated, "incorrect csrf");
|
bail!(Unauthenticated, msg("incorrect csrf"));
|
||||||
}
|
}
|
||||||
(_, _) => Ok(()),
|
(_, _) => Ok(()),
|
||||||
}
|
}
|
||||||
|
@ -292,7 +295,7 @@ impl Service {
|
||||||
Path::StreamLiveMp4Segments(..) => {
|
Path::StreamLiveMp4Segments(..) => {
|
||||||
unreachable!("StreamLiveMp4Segments should have already been handled")
|
unreachable!("StreamLiveMp4Segments should have already been handled")
|
||||||
}
|
}
|
||||||
Path::NotFound => return Err(format_err_t!(NotFound, "path not understood")),
|
Path::NotFound => return Err(err!(NotFound, msg("path not understood"))),
|
||||||
Path::Login => (
|
Path::Login => (
|
||||||
CacheControl::PrivateDynamic,
|
CacheControl::PrivateDynamic,
|
||||||
self.login(req, authreq).await?,
|
self.login(req, authreq).await?,
|
||||||
|
@ -422,7 +425,7 @@ impl Service {
|
||||||
}
|
}
|
||||||
|
|
||||||
if camera_configs && !caller.permissions.read_camera_configs {
|
if camera_configs && !caller.permissions.read_camera_configs {
|
||||||
bail_t!(PermissionDenied, "read_camera_configs required");
|
bail!(PermissionDenied, msg("read_camera_configs required"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let db = self.db.lock();
|
let db = self.db.lock();
|
||||||
|
@ -444,7 +447,7 @@ impl Service {
|
||||||
let db = self.db.lock();
|
let db = self.db.lock();
|
||||||
let camera = db
|
let camera = db
|
||||||
.get_camera(uuid)
|
.get_camera(uuid)
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such camera {uuid}"))?;
|
.ok_or_else(|| err!(NotFound, msg("no such camera {uuid}")))?;
|
||||||
serve_json(
|
serve_json(
|
||||||
req,
|
req,
|
||||||
&json::Camera::wrap(camera, &db, true, false).err_kind(ErrorKind::Internal)?,
|
&json::Camera::wrap(camera, &db, true, false).err_kind(ErrorKind::Internal)?,
|
||||||
|
@ -466,17 +469,17 @@ impl Service {
|
||||||
match key {
|
match key {
|
||||||
"startTime90k" => {
|
"startTime90k" => {
|
||||||
time.start = recording::Time::parse(value).map_err(|_| {
|
time.start = recording::Time::parse(value).map_err(|_| {
|
||||||
format_err_t!(InvalidArgument, "unparseable startTime90k")
|
err!(InvalidArgument, msg("unparseable startTime90k"))
|
||||||
})?
|
})?
|
||||||
}
|
}
|
||||||
"endTime90k" => {
|
"endTime90k" => {
|
||||||
time.end = recording::Time::parse(value).map_err(|_| {
|
time.end = recording::Time::parse(value)
|
||||||
format_err_t!(InvalidArgument, "unparseable endTime90k")
|
.map_err(|_| err!(InvalidArgument, msg("unparseable endTime90k")))?
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
"split90k" => {
|
"split90k" => {
|
||||||
split = recording::Duration(i64::from_str(value).map_err(|_| {
|
split =
|
||||||
format_err_t!(InvalidArgument, "unparseable split90k")
|
recording::Duration(i64::from_str(value).map_err(|_| {
|
||||||
|
err!(InvalidArgument, msg("unparseable split90k"))
|
||||||
})?)
|
})?)
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -491,10 +494,10 @@ impl Service {
|
||||||
video_sample_entries: (&db, Vec::new()),
|
video_sample_entries: (&db, Vec::new()),
|
||||||
};
|
};
|
||||||
let Some(camera) = db.get_camera(uuid) else {
|
let Some(camera) = db.get_camera(uuid) else {
|
||||||
bail_t!(NotFound, "no such camera {uuid}");
|
bail!(NotFound, msg("no such camera {uuid}"));
|
||||||
};
|
};
|
||||||
let Some(stream_id) = camera.streams[type_.index()] else {
|
let Some(stream_id) = camera.streams[type_.index()] else {
|
||||||
bail_t!(NotFound, "no such stream {uuid}/{type_}");
|
bail!(NotFound, msg("no such stream {uuid}/{type_}"));
|
||||||
};
|
};
|
||||||
db.list_aggregated_recordings(stream_id, r, split, &mut |row| {
|
db.list_aggregated_recordings(stream_id, r, split, &mut |row| {
|
||||||
let end = row.ids.end - 1; // in api, ids are inclusive.
|
let end = row.ids.end - 1; // in api, ids are inclusive.
|
||||||
|
@ -532,7 +535,7 @@ impl Service {
|
||||||
let mut builder = mp4::FileBuilder::new(mp4::Type::InitSegment);
|
let mut builder = mp4::FileBuilder::new(mp4::Type::InitSegment);
|
||||||
let db = self.db.lock();
|
let db = self.db.lock();
|
||||||
let Some(ent) = db.video_sample_entries_by_id().get(&id) else {
|
let Some(ent) = db.video_sample_entries_by_id().get(&id) else {
|
||||||
bail_t!(NotFound, "no such init segment");
|
bail!(NotFound, msg("no such init segment"));
|
||||||
};
|
};
|
||||||
builder.append_video_sample_entry(ent.clone());
|
builder.append_video_sample_entry(ent.clone());
|
||||||
let mp4 = builder
|
let mp4 = builder
|
||||||
|
@ -672,7 +675,7 @@ impl Service {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
bail_t!(Unauthenticated, "unauthenticated");
|
bail!(Unauthenticated);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
//! Session management: `/api/login` and `/api/logout`.
|
//! Session management: `/api/login` and `/api/logout`.
|
||||||
|
|
||||||
use base::{bail_t, ErrorKind, ResultExt};
|
use base::{bail, ErrorKind, ResultExt};
|
||||||
use db::auth;
|
use db::auth;
|
||||||
use http::{header, HeaderValue, Method, Request, Response, StatusCode};
|
use http::{header, HeaderValue, Method, Request, Response, StatusCode};
|
||||||
use memchr::memchr;
|
use memchr::memchr;
|
||||||
|
@ -32,7 +32,7 @@ impl Service {
|
||||||
let r = extract_json_body(&mut req).await?;
|
let r = extract_json_body(&mut req).await?;
|
||||||
let r: json::LoginRequest = parse_json_body(&r)?;
|
let r: json::LoginRequest = parse_json_body(&r)?;
|
||||||
let Some(host) = req.headers().get(header::HOST) else {
|
let Some(host) = req.headers().get(header::HOST) else {
|
||||||
bail_t!(InvalidArgument, "missing Host header");
|
bail!(InvalidArgument, msg("missing Host header"));
|
||||||
};
|
};
|
||||||
let host = host.as_bytes();
|
let host = host.as_bytes();
|
||||||
let domain = match memchr(b':', host) {
|
let domain = match memchr(b':', host) {
|
||||||
|
@ -94,17 +94,17 @@ impl Service {
|
||||||
match l.authenticate_session(authreq.clone(), &hash) {
|
match l.authenticate_session(authreq.clone(), &hash) {
|
||||||
Ok((s, _)) => {
|
Ok((s, _)) => {
|
||||||
if !csrf_matches(r.csrf, s.csrf()) {
|
if !csrf_matches(r.csrf, s.csrf()) {
|
||||||
bail_t!(InvalidArgument, "logout with incorret csrf token");
|
bail!(InvalidArgument, msg("logout with incorrect csrf token"));
|
||||||
}
|
}
|
||||||
info!("revoking session");
|
info!("revoking session");
|
||||||
l.revoke_session(auth::RevocationReason::LoggedOut, None, authreq, &hash)
|
l.revoke_session(auth::RevocationReason::LoggedOut, None, authreq, &hash)
|
||||||
.err_kind(ErrorKind::Internal)?;
|
.err_kind(ErrorKind::Internal)?;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(err) => {
|
||||||
// TODO: distinguish "no such session", "session is no longer valid", and
|
// TODO: distinguish "no such session", "session is no longer valid", and
|
||||||
// "user ... is disabled" (which are all client error / bad state) from database
|
// "user ... is disabled" (which are all client error / bad state) from database
|
||||||
// errors.
|
// errors.
|
||||||
warn!("logout failed: {}", e);
|
warn!(err = %err.chain(), "logout failed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
//! `/api/signals` handling.
|
//! `/api/signals` handling.
|
||||||
|
|
||||||
use base::{bail_t, clock::Clocks, format_err_t};
|
use base::{bail, clock::Clocks, err};
|
||||||
use db::recording;
|
use db::recording;
|
||||||
use http::{Method, Request, StatusCode};
|
use http::{Method, Request, StatusCode};
|
||||||
use url::form_urlencoded;
|
use url::form_urlencoded;
|
||||||
|
@ -36,7 +36,7 @@ impl Service {
|
||||||
|
|
||||||
async fn post_signals(&self, mut req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
|
async fn post_signals(&self, mut req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
|
||||||
if !caller.permissions.update_signals {
|
if !caller.permissions.update_signals {
|
||||||
bail_t!(PermissionDenied, "update_signals required");
|
bail!(PermissionDenied, msg("update_signals required"));
|
||||||
}
|
}
|
||||||
let r = extract_json_body(&mut req).await?;
|
let r = extract_json_body(&mut req).await?;
|
||||||
let r: json::PostSignalsRequest = parse_json_body(&r)?;
|
let r: json::PostSignalsRequest = parse_json_body(&r)?;
|
||||||
|
@ -62,13 +62,12 @@ impl Service {
|
||||||
let (key, value) = (key.borrow(), value.borrow());
|
let (key, value) = (key.borrow(), value.borrow());
|
||||||
match key {
|
match key {
|
||||||
"startTime90k" => {
|
"startTime90k" => {
|
||||||
time.start = recording::Time::parse(value).map_err(|_| {
|
time.start = recording::Time::parse(value)
|
||||||
format_err_t!(InvalidArgument, "unparseable startTime90k")
|
.map_err(|_| err!(InvalidArgument, msg("unparseable startTime90k")))?
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
"endTime90k" => {
|
"endTime90k" => {
|
||||||
time.end = recording::Time::parse(value)
|
time.end = recording::Time::parse(value)
|
||||||
.map_err(|_| format_err_t!(InvalidArgument, "unparseable endTime90k"))?
|
.map_err(|_| err!(InvalidArgument, msg("unparseable endTime90k")))?
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
//! Static file serving.
|
//! Static file serving.
|
||||||
|
|
||||||
use base::{bail_t, format_err_t, Error, ErrorKind, ResultExt};
|
use base::{bail, err, Error, ErrorKind, ResultExt};
|
||||||
use http::{header, HeaderValue, Request};
|
use http::{header, HeaderValue, Request};
|
||||||
|
|
||||||
use super::{ResponseResult, Service};
|
use super::{ResponseResult, Service};
|
||||||
|
@ -13,15 +13,15 @@ impl Service {
|
||||||
/// Serves a static file if possible.
|
/// Serves a static file if possible.
|
||||||
pub(super) async fn static_file(&self, req: Request<hyper::Body>) -> ResponseResult {
|
pub(super) async fn static_file(&self, req: Request<hyper::Body>) -> ResponseResult {
|
||||||
let Some(dir) = self.ui_dir.clone() else {
|
let Some(dir) = self.ui_dir.clone() else {
|
||||||
bail_t!(NotFound, "ui dir not configured or missing; no static files available.")
|
bail!(NotFound, msg("ui dir not configured or missing; no static files available"))
|
||||||
};
|
};
|
||||||
let Some(static_req) = StaticFileRequest::parse(req.uri().path()) else {
|
let Some(static_req) = StaticFileRequest::parse(req.uri().path()) else {
|
||||||
bail_t!(NotFound, "static file not found");
|
bail!(NotFound, msg("static file not found"));
|
||||||
};
|
};
|
||||||
let f = dir.get(static_req.path, req.headers());
|
let f = dir.get(static_req.path, req.headers());
|
||||||
let node = f.await.map_err(|e| {
|
let node = f.await.map_err(|e| {
|
||||||
if e.kind() == std::io::ErrorKind::NotFound {
|
if e.kind() == std::io::ErrorKind::NotFound {
|
||||||
format_err_t!(NotFound, "no such static file")
|
err!(NotFound, msg("no such static file"))
|
||||||
} else {
|
} else {
|
||||||
Error::wrap(ErrorKind::Internal, e)
|
Error::wrap(ErrorKind::Internal, e)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
//! User management: `/api/users/*`.
|
//! User management: `/api/users/*`.
|
||||||
|
|
||||||
use base::{bail_t, format_err_t};
|
use base::{bail, err};
|
||||||
use http::{Method, Request, StatusCode};
|
use http::{Method, Request, StatusCode};
|
||||||
|
|
||||||
use crate::json::{self, PutUsersResponse, UserSubset, UserWithId};
|
use crate::json::{self, PutUsersResponse, UserSubset, UserWithId};
|
||||||
|
@ -28,7 +28,7 @@ impl Service {
|
||||||
|
|
||||||
async fn get_users(&self, req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
|
async fn get_users(&self, req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
|
||||||
if !caller.permissions.admin_users {
|
if !caller.permissions.admin_users {
|
||||||
bail_t!(Unauthenticated, "must have admin_users permission");
|
bail!(Unauthenticated, msg("must have admin_users permission"));
|
||||||
}
|
}
|
||||||
let l = self.db.lock();
|
let l = self.db.lock();
|
||||||
let users = l
|
let users = l
|
||||||
|
@ -44,7 +44,7 @@ impl Service {
|
||||||
|
|
||||||
async fn post_users(&self, mut req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
|
async fn post_users(&self, mut req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
|
||||||
if !caller.permissions.admin_users {
|
if !caller.permissions.admin_users {
|
||||||
bail_t!(Unauthenticated, "must have admin_users permission");
|
bail!(Unauthenticated, msg("must have admin_users permission"));
|
||||||
}
|
}
|
||||||
let r = extract_json_body(&mut req).await?;
|
let r = extract_json_body(&mut req).await?;
|
||||||
let mut r: json::PutUsers = parse_json_body(&r)?;
|
let mut r: json::PutUsers = parse_json_body(&r)?;
|
||||||
|
@ -53,7 +53,7 @@ impl Service {
|
||||||
.user
|
.user
|
||||||
.username
|
.username
|
||||||
.take()
|
.take()
|
||||||
.ok_or_else(|| format_err_t!(InvalidArgument, "username must be specified"))?;
|
.ok_or_else(|| err!(InvalidArgument, msg("username must be specified")))?;
|
||||||
let mut change = db::UserChange::add_user(username.to_owned());
|
let mut change = db::UserChange::add_user(username.to_owned());
|
||||||
if let Some(Some(pwd)) = r.user.password.take() {
|
if let Some(Some(pwd)) = r.user.password.take() {
|
||||||
change.set_password(pwd.to_owned());
|
change.set_password(pwd.to_owned());
|
||||||
|
@ -65,7 +65,7 @@ impl Service {
|
||||||
change.permissions = permissions.into();
|
change.permissions = permissions.into();
|
||||||
}
|
}
|
||||||
if r.user != Default::default() {
|
if r.user != Default::default() {
|
||||||
bail_t!(Unimplemented, "unsupported user fields: {:#?}", r);
|
bail!(Unimplemented, msg("unsupported user fields: {r:#?}"));
|
||||||
}
|
}
|
||||||
let mut l = self.db.lock();
|
let mut l = self.db.lock();
|
||||||
let user = l.apply_user_change(change)?;
|
let user = l.apply_user_change(change)?;
|
||||||
|
@ -95,7 +95,7 @@ impl Service {
|
||||||
let user = db
|
let user = db
|
||||||
.users_by_id()
|
.users_by_id()
|
||||||
.get(&id)
|
.get(&id)
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "can't find requested user"))?;
|
.ok_or_else(|| err!(NotFound, msg("can't find requested user")))?;
|
||||||
serve_json(&req, &UserSubset::from(user))
|
serve_json(&req, &UserSubset::from(user))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ impl Service {
|
||||||
id: i32,
|
id: i32,
|
||||||
) -> ResponseResult {
|
) -> ResponseResult {
|
||||||
if !caller.permissions.admin_users {
|
if !caller.permissions.admin_users {
|
||||||
bail_t!(Unauthenticated, "must have admin_users permission");
|
bail!(Unauthenticated, msg("must have admin_users permission"));
|
||||||
}
|
}
|
||||||
let r = extract_json_body(&mut req).await?;
|
let r = extract_json_body(&mut req).await?;
|
||||||
let r: json::DeleteUser = parse_json_body(&r)?;
|
let r: json::DeleteUser = parse_json_body(&r)?;
|
||||||
|
@ -128,45 +128,44 @@ impl Service {
|
||||||
let mut db = self.db.lock();
|
let mut db = self.db.lock();
|
||||||
let user = db
|
let user = db
|
||||||
.get_user_by_id_mut(id)
|
.get_user_by_id_mut(id)
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "can't find requested user"))?;
|
.ok_or_else(|| err!(NotFound, msg("can't find requested user")))?;
|
||||||
if r.update.as_ref().and_then(|u| u.password).is_some()
|
if r.update.as_ref().and_then(|u| u.password).is_some()
|
||||||
&& r.precondition.as_ref().and_then(|p| p.password).is_none()
|
&& r.precondition.as_ref().and_then(|p| p.password).is_none()
|
||||||
&& !caller.permissions.admin_users
|
&& !caller.permissions.admin_users
|
||||||
{
|
{
|
||||||
bail_t!(
|
bail!(
|
||||||
Unauthenticated,
|
Unauthenticated,
|
||||||
"to change password, must supply previous password or have admin_users permission"
|
msg("to change password, must supply previous password or have admin_users permission")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
require_csrf_if_session(&caller, r.csrf)?;
|
require_csrf_if_session(&caller, r.csrf)?;
|
||||||
if let Some(mut precondition) = r.precondition {
|
if let Some(mut precondition) = r.precondition {
|
||||||
if matches!(precondition.disabled.take(), Some(d) if d != user.config.disabled) {
|
if matches!(precondition.disabled.take(), Some(d) if d != user.config.disabled) {
|
||||||
bail_t!(FailedPrecondition, "disabled mismatch");
|
bail!(FailedPrecondition, msg("disabled mismatch"));
|
||||||
}
|
}
|
||||||
if matches!(precondition.username.take(), Some(n) if n != user.username) {
|
if matches!(precondition.username.take(), Some(n) if n != user.username) {
|
||||||
bail_t!(FailedPrecondition, "username mismatch");
|
bail!(FailedPrecondition, msg("username mismatch"));
|
||||||
}
|
}
|
||||||
if matches!(precondition.preferences.take(), Some(ref p) if p != &user.config.preferences)
|
if matches!(precondition.preferences.take(), Some(ref p) if p != &user.config.preferences)
|
||||||
{
|
{
|
||||||
bail_t!(FailedPrecondition, "preferences mismatch");
|
bail!(FailedPrecondition, msg("preferences mismatch"));
|
||||||
}
|
}
|
||||||
if let Some(p) = precondition.password.take() {
|
if let Some(p) = precondition.password.take() {
|
||||||
if !user.check_password(p)? {
|
if !user.check_password(p)? {
|
||||||
bail_t!(FailedPrecondition, "password mismatch"); // or Unauthenticated?
|
bail!(FailedPrecondition, msg("password mismatch")); // or Unauthenticated?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(p) = precondition.permissions.take() {
|
if let Some(p) = precondition.permissions.take() {
|
||||||
if user.permissions != db::Permissions::from(p) {
|
if user.permissions != db::Permissions::from(p) {
|
||||||
bail_t!(FailedPrecondition, "permissions mismatch");
|
bail!(FailedPrecondition, msg("permissions mismatch"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Safety valve in case something is added to UserSubset and forgotten here.
|
// Safety valve in case something is added to UserSubset and forgotten here.
|
||||||
if precondition != Default::default() {
|
if precondition != Default::default() {
|
||||||
bail_t!(
|
bail!(
|
||||||
Unimplemented,
|
Unimplemented,
|
||||||
"preconditions not supported: {:#?}",
|
msg("preconditions not supported: {precondition:#?}"),
|
||||||
&precondition
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -185,7 +184,7 @@ impl Service {
|
||||||
|
|
||||||
// Requires admin_users if there's anything else.
|
// Requires admin_users if there's anything else.
|
||||||
if update != Default::default() && !caller.permissions.admin_users {
|
if update != Default::default() && !caller.permissions.admin_users {
|
||||||
bail_t!(Unauthenticated, "must have admin_users permission");
|
bail!(Unauthenticated, msg("must have admin_users permission"));
|
||||||
}
|
}
|
||||||
if let Some(d) = update.disabled.take() {
|
if let Some(d) = update.disabled.take() {
|
||||||
change.config.disabled = d;
|
change.config.disabled = d;
|
||||||
|
@ -199,7 +198,7 @@ impl Service {
|
||||||
|
|
||||||
// Safety valve in case something is added to UserSubset and forgotten here.
|
// Safety valve in case something is added to UserSubset and forgotten here.
|
||||||
if update != Default::default() {
|
if update != Default::default() {
|
||||||
bail_t!(Unimplemented, "updates not supported: {:#?}", &update);
|
bail!(Unimplemented, msg("updates not supported: {update:#?}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Then apply all together.
|
// Then apply all together.
|
||||||
|
@ -211,9 +210,9 @@ impl Service {
|
||||||
|
|
||||||
fn require_same_or_admin(caller: &Caller, id: i32) -> Result<(), base::Error> {
|
fn require_same_or_admin(caller: &Caller, id: i32) -> Result<(), base::Error> {
|
||||||
if caller.user.as_ref().map(|u| u.id) != Some(id) && !caller.permissions.admin_users {
|
if caller.user.as_ref().map(|u| u.id) != Some(id) && !caller.permissions.admin_users {
|
||||||
bail_t!(
|
bail!(
|
||||||
Unauthenticated,
|
Unauthenticated,
|
||||||
"must be authenticated as supplied user or have admin_users permission"
|
msg("must be authenticated as supplied user or have admin_users permission"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
//! `/view.mp4` and `/view.m4s` handling.
|
//! `/view.mp4` and `/view.m4s` handling.
|
||||||
|
|
||||||
use base::{bail_t, format_err_t};
|
use base::{bail, err};
|
||||||
use db::recording::{self, rescale};
|
use db::recording::{self, rescale};
|
||||||
use http::{Request, StatusCode};
|
use http::{Request, StatusCode};
|
||||||
use nom::bytes::complete::{tag, take_while1};
|
use nom::bytes::complete::{tag, take_while1};
|
||||||
|
@ -36,17 +36,17 @@ impl Service {
|
||||||
debug: bool,
|
debug: bool,
|
||||||
) -> ResponseResult {
|
) -> ResponseResult {
|
||||||
if !caller.permissions.view_video {
|
if !caller.permissions.view_video {
|
||||||
bail_t!(PermissionDenied, "view_video required");
|
bail!(PermissionDenied, msg("view_video required"));
|
||||||
}
|
}
|
||||||
let (stream_id, camera_name);
|
let (stream_id, camera_name);
|
||||||
{
|
{
|
||||||
let db = self.db.lock();
|
let db = self.db.lock();
|
||||||
let camera = db
|
let camera = db
|
||||||
.get_camera(uuid)
|
.get_camera(uuid)
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such camera {uuid}"))?;
|
.ok_or_else(|| err!(NotFound, msg("no such camera {uuid}")))?;
|
||||||
camera_name = camera.short_name.clone();
|
camera_name = camera.short_name.clone();
|
||||||
stream_id = camera.streams[stream_type.index()]
|
stream_id = camera.streams[stream_type.index()]
|
||||||
.ok_or_else(|| format_err_t!(NotFound, "no such stream {uuid}/{stream_type}"))?;
|
.ok_or_else(|| err!(NotFound, msg("no such stream {uuid}/{stream_type}")))?;
|
||||||
};
|
};
|
||||||
let mut start_time_for_filename = None;
|
let mut start_time_for_filename = None;
|
||||||
let mut builder = mp4::FileBuilder::new(mp4_type);
|
let mut builder = mp4::FileBuilder::new(mp4_type);
|
||||||
|
@ -56,7 +56,7 @@ impl Service {
|
||||||
match key {
|
match key {
|
||||||
"s" => {
|
"s" => {
|
||||||
let s = Segments::from_str(value).map_err(|()| {
|
let s = Segments::from_str(value).map_err(|()| {
|
||||||
format_err_t!(InvalidArgument, "invalid s parameter: {value}")
|
err!(InvalidArgument, msg("invalid s parameter: {value}"))
|
||||||
})?;
|
})?;
|
||||||
trace!("stream_view_mp4: appending s={:?}", s);
|
trace!("stream_view_mp4: appending s={:?}", s);
|
||||||
let mut est_segments = usize::try_from(s.ids.end - s.ids.start).unwrap();
|
let mut est_segments = usize::try_from(s.ids.end - s.ids.start).unwrap();
|
||||||
|
@ -81,12 +81,14 @@ impl Service {
|
||||||
|
|
||||||
if let Some(o) = s.open_id {
|
if let Some(o) = s.open_id {
|
||||||
if r.open_id != o {
|
if r.open_id != o {
|
||||||
bail_t!(
|
bail!(
|
||||||
NotFound,
|
NotFound,
|
||||||
|
msg(
|
||||||
"recording {} has open id {}, requested {}",
|
"recording {} has open id {}, requested {}",
|
||||||
r.id,
|
r.id,
|
||||||
r.open_id,
|
r.open_id,
|
||||||
o
|
o,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -94,14 +96,15 @@ impl Service {
|
||||||
// Check for missing recordings.
|
// Check for missing recordings.
|
||||||
match prev {
|
match prev {
|
||||||
None if recording_id == s.ids.start => {}
|
None if recording_id == s.ids.start => {}
|
||||||
None => bail_t!(
|
None => bail!(
|
||||||
NotFound,
|
NotFound,
|
||||||
"no such recording {}/{}",
|
msg("no such recording {}/{}", stream_id, s.ids.start),
|
||||||
stream_id,
|
|
||||||
s.ids.start
|
|
||||||
),
|
),
|
||||||
Some(id) if r.id.recording() != id + 1 => {
|
Some(id) if r.id.recording() != id + 1 => {
|
||||||
bail_t!(NotFound, "no such recording {}/{}", stream_id, id + 1);
|
bail!(
|
||||||
|
NotFound,
|
||||||
|
msg("no such recording {}/{}", stream_id, id + 1)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
|
@ -144,35 +147,30 @@ impl Service {
|
||||||
// Check for missing recordings.
|
// Check for missing recordings.
|
||||||
match prev {
|
match prev {
|
||||||
Some(id) if s.ids.end != id + 1 => {
|
Some(id) if s.ids.end != id + 1 => {
|
||||||
bail_t!(
|
bail!(
|
||||||
NotFound,
|
NotFound,
|
||||||
"no such recording {}/{}",
|
msg("no such recording {}/{}", stream_id, s.ids.end - 1),
|
||||||
stream_id,
|
|
||||||
s.ids.end - 1
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
bail_t!(
|
bail!(
|
||||||
NotFound,
|
NotFound,
|
||||||
"no such recording {}/{}",
|
msg("no such recording {}/{}", stream_id, s.ids.start),
|
||||||
stream_id,
|
|
||||||
s.ids.start
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
if let Some(end) = s.end_time {
|
if let Some(end) = s.end_time {
|
||||||
if end > cur_off {
|
if end > cur_off {
|
||||||
bail_t!(
|
bail!(
|
||||||
InvalidArgument,
|
InvalidArgument,
|
||||||
"end time {} is beyond specified recordings",
|
msg("end time {end} is beyond specified recordings"),
|
||||||
end
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"ts" => builder.include_timestamp_subtitle_track(value == "true")?,
|
"ts" => builder.include_timestamp_subtitle_track(value == "true")?,
|
||||||
_ => bail_t!(InvalidArgument, "parameter {key} not understood"),
|
_ => bail!(InvalidArgument, msg("parameter {key} not understood")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
|
||||||
use crate::body::Body;
|
use crate::body::Body;
|
||||||
use base::{bail_t, format_err_t};
|
use base::{bail, err};
|
||||||
use futures::{Future, SinkExt};
|
use futures::{Future, SinkExt};
|
||||||
use http::{header, Request, Response};
|
use http::{header, Request, Response};
|
||||||
use tokio_tungstenite::{tungstenite, WebSocketStream};
|
use tokio_tungstenite::{tungstenite, WebSocketStream};
|
||||||
|
@ -37,7 +37,7 @@ where
|
||||||
// Otherwise, upgrade and handle the rest in a separate task.
|
// Otherwise, upgrade and handle the rest in a separate task.
|
||||||
let response =
|
let response =
|
||||||
tungstenite::handshake::server::create_response_with_body(&req, hyper::Body::empty)
|
tungstenite::handshake::server::create_response_with_body(&req, hyper::Body::empty)
|
||||||
.map_err(|e| format_err_t!(InvalidArgument, "{}", e.to_string()))?;
|
.map_err(|e| err!(InvalidArgument, source(e)))?;
|
||||||
let (parts, _) = response.into_parts();
|
let (parts, _) = response.into_parts();
|
||||||
let span = tracing::info_span!("websocket");
|
let span = tracing::info_span!("websocket");
|
||||||
tokio::spawn(
|
tokio::spawn(
|
||||||
|
@ -84,11 +84,11 @@ fn check_origin(headers: &header::HeaderMap) -> Result<(), base::Error> {
|
||||||
Some(o) => o,
|
Some(o) => o,
|
||||||
};
|
};
|
||||||
let Some(host_hdr) = headers.get(header::HOST) else {
|
let Some(host_hdr) = headers.get(header::HOST) else {
|
||||||
bail_t!(InvalidArgument, "missing Host header");
|
bail!(InvalidArgument, msg("missing Host header"));
|
||||||
};
|
};
|
||||||
let host_str = host_hdr
|
let host_str = host_hdr
|
||||||
.to_str()
|
.to_str()
|
||||||
.map_err(|_| format_err_t!(InvalidArgument, "bad Host header"))?;
|
.map_err(|_| err!(InvalidArgument, msg("bad Host header")))?;
|
||||||
|
|
||||||
// Currently this ignores the port number. This is easiest and I think matches the browser's
|
// Currently this ignores the port number. This is easiest and I think matches the browser's
|
||||||
// rules for when it sends a cookie, so it probably doesn't cause great security problems.
|
// rules for when it sends a cookie, so it probably doesn't cause great security problems.
|
||||||
|
@ -100,16 +100,16 @@ fn check_origin(headers: &header::HeaderMap) -> Result<(), base::Error> {
|
||||||
.to_str()
|
.to_str()
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|o| url::Url::parse(o).ok())
|
.and_then(|o| url::Url::parse(o).ok())
|
||||||
.ok_or_else(|| format_err_t!(InvalidArgument, "bad Origin header"))?;
|
.ok_or_else(|| err!(InvalidArgument, msg("bad Origin header")))?;
|
||||||
let origin_host = origin_url
|
let origin_host = origin_url
|
||||||
.host_str()
|
.host_str()
|
||||||
.ok_or_else(|| format_err_t!(InvalidArgument, "bad Origin header"))?;
|
.ok_or_else(|| err!(InvalidArgument, msg("bad Origin header")))?;
|
||||||
if host != origin_host {
|
if host != origin_host {
|
||||||
bail_t!(
|
bail!(
|
||||||
PermissionDenied,
|
PermissionDenied,
|
||||||
"cross-origin request forbidden (request host {:?}, origin {:?})",
|
msg(
|
||||||
host_hdr,
|
"cross-origin request forbidden (request host {host_hdr:?}, origin {origin_hdr:?})"
|
||||||
origin_hdr
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
Loading…
Reference in New Issue