massive error overhaul

* fully stop using ancient `failure` crate in favor of own error type
* set an `ErrorKind` on everything
This commit is contained in:
Scott Lamb 2023-07-09 22:04:17 -07:00
parent 6a5b751bd6
commit 64ca096ff3
54 changed files with 1493 additions and 1108 deletions

104
server/Cargo.lock generated
View File

@ -2,15 +2,6 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "addr2line"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
@ -77,21 +68,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.13.1"
@ -234,6 +210,11 @@ dependencies = [
"inout",
]
[[package]]
name = "coded"
version = "0.2.0-pre"
source = "git+https://github.com/scottlamb/coded?rev=2c97994974a73243d5dd12134831814f42cdb0e8#2c97994974a73243d5dd12134831814f42cdb0e8"
[[package]]
name = "constant_time_eq"
version = "0.2.4"
@ -478,28 +459,6 @@ dependencies = [
"libc",
]
[[package]]
name = "failure"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
dependencies = [
"backtrace",
"failure_derive",
]
[[package]]
name = "failure_derive"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.107",
"synstructure",
]
[[package]]
name = "fallible-iterator"
version = "0.2.0"
@ -523,9 +482,9 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.0.25"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
dependencies = [
"crc32fast",
"miniz_oxide",
@ -662,12 +621,6 @@ dependencies = [
"wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
name = "gimli"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
[[package]]
name = "h2"
version = "0.3.15"
@ -1060,9 +1013,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.6.2"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
]
@ -1084,10 +1037,12 @@ name = "moonfire-base"
version = "0.0.1"
dependencies = [
"chrono",
"failure",
"coded",
"futures",
"libc",
"nix",
"nom",
"rusqlite",
"serde",
"serde_json",
"slab",
@ -1107,7 +1062,6 @@ dependencies = [
"byteorder",
"cstr",
"diff",
"failure",
"fnv",
"futures",
"h264-reader",
@ -1147,7 +1101,6 @@ dependencies = [
"bytes",
"chrono",
"cursive",
"failure",
"fnv",
"futures",
"h264-reader",
@ -1360,15 +1313,6 @@ dependencies = [
"libc",
]
[[package]]
name = "object"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d864c91689fdc196779b98dba0aceac6118594c2df6ee5d943eb6a8df4d107a"
dependencies = [
"memchr",
]
[[package]]
name = "odds"
version = "0.4.0"
@ -1764,12 +1708,6 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rustc-demangle"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]]
name = "rustix"
version = "0.38.2"
@ -2001,18 +1939,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8"
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.107",
"unicode-xid",
]
[[package]]
name = "tempfile"
version = "3.3.0"
@ -2394,12 +2320,6 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "untrusted"
version = "0.7.1"

View File

@ -21,6 +21,11 @@ bundled = ["rusqlite/bundled"]
[workspace]
members = ["base", "db"]
[workspace.dependencies]
nix = "0.26.1"
tracing = { version = "0.1", features = ["log"] }
rusqlite = "0.28.0"
[dependencies]
base = { package = "moonfire-base", path = "base" }
base64 = "0.13.0"
@ -31,7 +36,6 @@ byteorder = "1.0"
chrono = "0.4.23"
cursive = "0.20.0"
db = { package = "moonfire-db", path = "db" }
failure = "0.1.1"
futures = "0.3"
fnv = "1.0"
h264-reader = "0.6.0"
@ -42,14 +46,14 @@ itertools = "0.10.0"
libc = "0.2"
log = { version = "0.4" }
memchr = "2.0.2"
nix = "0.26.1"
nix = { workspace = true}
nom = "7.0.0"
password-hash = "0.4.2"
protobuf = "3.0"
reffers = "0.7.0"
retina = "0.4.0"
ring = "0.16.2"
rusqlite = "0.28.0"
rusqlite = { workspace = true }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
smallvec = { version = "1.7", features = ["union"] }
@ -59,7 +63,7 @@ tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "signal", "
tokio-stream = "0.1.5"
tokio-tungstenite = "0.18.0"
toml = "0.5"
tracing = { version = "0.1", features = ["log"] }
tracing = { workspace = true }
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
tracing-core = "0.1.30"
tracing-futures = { version = "0.2.5", features = ["futures-03", "std-future"] }

View File

@ -15,15 +15,17 @@ path = "lib.rs"
[dependencies]
chrono = "0.4.23"
failure = "0.1.1"
coded = { git = "https://github.com/scottlamb/coded", rev = "2c97994974a73243d5dd12134831814f42cdb0e8"}
futures = "0.3"
libc = "0.2"
nix = { workspace = true }
nom = "7.0.0"
rusqlite = { workspace = true }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
slab = "0.4"
time = "0.1"
tracing = "0.1.37"
tracing = { workspace = true }
tracing-core = "0.1.30"
tracing-log = "0.1.3"
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }

View File

@ -4,7 +4,6 @@
//! Clock interface and implementations for testability.
use failure::Error;
use std::mem;
use std::sync::Mutex;
use std::sync::{mpsc, Arc};
@ -13,6 +12,7 @@ use std::time::Duration as StdDuration;
use time::{Duration, Timespec};
use tracing::warn;
use crate::error::Error;
use crate::shutdown::ShutdownError;
/// Abstract interface to the system clocks. This is for testability.
@ -54,7 +54,7 @@ where
shutdown_rx.check()?;
let sleep_time = Duration::seconds(1);
warn!(
err = crate::error::prettify_failure(&e),
exception = %e.chain(),
"sleeping for 1 s after error"
);
clocks.sleep(sleep_time);

View File

@ -2,129 +2,315 @@
// Copyright (C) 2018 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use failure::{Backtrace, Context, Fail};
use std::fmt::{self, Write};
use std::backtrace::Backtrace;
use std::error::Error as StdError;
use std::fmt::{Debug, Display};
//use std::num::NonZeroU16;
/// Returns a pretty-and-informative version of `e`.
pub fn prettify_failure(e: &failure::Error) -> String {
let mut msg = e.to_string();
for cause in e.iter_causes() {
write!(&mut msg, "\ncaused by: {cause}").unwrap();
}
if e.backtrace().is_empty() {
write!(
&mut msg,
"\n\n(set environment variable RUST_BACKTRACE=1 to see backtraces)"
)
.unwrap();
} else {
write!(&mut msg, "\n\nBacktrace:\n{}", e.backtrace()).unwrap();
}
msg
pub use coded::ErrorKind;
/// Like [`coded::ToErrKind`] but with more third-party implementations.
///
/// It's not possible to implement those here on that trait because of the orphan rule.
pub trait ToErrKind {
fn err_kind(&self) -> ErrorKind;
}
#[derive(Debug)]
pub struct Error {
inner: Context<ErrorKind>,
impl ToErrKind for Error {
#[inline]
fn err_kind(&self) -> ErrorKind {
self.0.kind
}
}
impl ToErrKind for std::io::Error {
#[inline]
fn err_kind(&self) -> ErrorKind {
self.kind().into()
}
}
impl ToErrKind for rusqlite::ErrorCode {
fn err_kind(&self) -> ErrorKind {
use rusqlite::ErrorCode;
// https://www.sqlite.org/rescode.html
match self {
ErrorCode::InternalMalfunction => ErrorKind::Internal,
ErrorCode::PermissionDenied => ErrorKind::PermissionDenied,
ErrorCode::OperationAborted => ErrorKind::Aborted,
// Conflict with another database connection in a process which is accessing
// the database, apparently without using Moonfire NVR's scheme of acquiring
// a lock on the db directory.
// https://www.sqlite.org/wal.html#sometimes_queries_return_sqlite_busy_in_wal_mode
ErrorCode::DatabaseBusy => ErrorKind::Unavailable,
// Conflict within the same database connection. Shouldn't happen for Moonfire.
ErrorCode::DatabaseLocked => ErrorKind::Internal,
ErrorCode::OutOfMemory => ErrorKind::ResourceExhausted,
ErrorCode::ReadOnly => ErrorKind::FailedPrecondition,
ErrorCode::OperationInterrupted => ErrorKind::Aborted,
ErrorCode::SystemIoFailure => ErrorKind::Unavailable,
ErrorCode::DatabaseCorrupt => ErrorKind::DataLoss,
ErrorCode::NotFound => ErrorKind::NotFound,
ErrorCode::DiskFull => ErrorKind::ResourceExhausted,
ErrorCode::CannotOpen => ErrorKind::Unavailable,
// Similar to DatabaseBusy in this implies a conflict with another conn.
ErrorCode::FileLockingProtocolFailed => ErrorKind::Unavailable,
// Likewise: Moonfire NVR should never change the schema
// mid-statement, so the most plausible explanation for
// SchemaChange is another process.
ErrorCode::SchemaChanged => ErrorKind::Unavailable,
ErrorCode::TooBig => ErrorKind::ResourceExhausted,
ErrorCode::ConstraintViolation => ErrorKind::Internal,
ErrorCode::TypeMismatch => ErrorKind::Internal,
ErrorCode::ApiMisuse => ErrorKind::Internal,
ErrorCode::NoLargeFileSupport => ErrorKind::ResourceExhausted,
ErrorCode::AuthorizationForStatementDenied => ErrorKind::Internal,
ErrorCode::ParameterOutOfRange => ErrorKind::Internal,
ErrorCode::NotADatabase => ErrorKind::FailedPrecondition,
_ => ErrorKind::Unknown,
}
}
}
impl ToErrKind for rusqlite::Error {
#[inline]
fn err_kind(&self) -> ErrorKind {
match self {
rusqlite::Error::SqliteFailure(e, _) => e.code.err_kind(),
_ => ErrorKind::Unknown,
}
}
}
impl ToErrKind for rusqlite::types::FromSqlError {
fn err_kind(&self) -> ErrorKind {
match self {
rusqlite::types::FromSqlError::InvalidType => ErrorKind::FailedPrecondition,
rusqlite::types::FromSqlError::OutOfRange(_) => ErrorKind::OutOfRange,
rusqlite::types::FromSqlError::InvalidBlobSize { .. } => ErrorKind::OutOfRange,
/* rusqlite::types::FromSqlError::Other(_) | */ _ => ErrorKind::Unknown,
}
}
}
impl ToErrKind for nix::Error {
fn err_kind(&self) -> ErrorKind {
use nix::Error;
match self {
Error::EACCES | Error::EPERM => ErrorKind::PermissionDenied,
Error::EDQUOT => ErrorKind::ResourceExhausted,
Error::EBUSY
| Error::EEXIST
| Error::ENOTDIR
| Error::EROFS
| Error::EFBIG
| Error::EOVERFLOW
| Error::ENXIO
| Error::ETXTBSY => ErrorKind::FailedPrecondition,
Error::EINVAL | Error::ENAMETOOLONG => ErrorKind::InvalidArgument,
Error::ELOOP => ErrorKind::FailedPrecondition,
Error::EMLINK | Error::ENOMEM | Error::ENOSPC | Error::EMFILE | Error::ENFILE => {
ErrorKind::ResourceExhausted
}
Error::EBADF | Error::EFAULT => ErrorKind::InvalidArgument,
Error::EINTR | Error::EAGAIN => ErrorKind::Aborted,
Error::ENOENT | Error::ENODEV => ErrorKind::NotFound,
Error::EOPNOTSUPP => ErrorKind::Unimplemented,
_ => ErrorKind::Unknown,
}
}
}
pub struct Error(Box<ErrorInner>);
struct ErrorInner {
kind: ErrorKind,
msg: Option<String>,
//http_status: Option<NonZeroU16>,
backtrace: Option<Backtrace>,
source: Option<Box<dyn StdError + Sync + Send>>,
}
pub struct ErrorBuilder(Box<ErrorInner>);
impl Default for ErrorBuilder {
#[inline]
fn default() -> Self {
Self(Box::new(ErrorInner {
kind: ErrorKind::Unknown,
msg: None,
// http_status: None,
backtrace: None,
source: None,
}))
}
}
impl From<ErrorKind> for ErrorBuilder {
#[inline]
fn from(value: ErrorKind) -> Self {
Self::default().kind(value)
}
}
impl ErrorBuilder {
#[inline]
pub fn kind(mut self, kind: ErrorKind) -> Self {
self.0.kind = kind;
self
}
#[inline]
pub fn map<F: Fn(ErrorKind) -> ErrorKind>(mut self, f: F) -> Self {
self.0.kind = f(self.0.kind);
self
}
#[inline]
pub fn msg(mut self, msg: String) -> Self {
self.0.msg = Some(msg);
self
}
#[inline]
pub fn source<S: Into<Box<dyn StdError + Send + Sync + 'static>>>(mut self, source: S) -> Self {
self.0.source = Some(source.into());
self
}
#[inline]
pub fn build(self) -> Error {
Error(self.0)
}
}
macro_rules! cvt {
($t:ty) => {
impl From<$t> for ErrorBuilder {
#[inline]
fn from(t: $t) -> Self {
Self::default().kind(ToErrKind::err_kind(&t)).source(t)
}
}
impl From<$t> for Error {
#[inline(always)]
fn from(t: $t) -> Self {
Self($crate::ErrorBuilder::from(t).0)
}
}
};
}
cvt!(rusqlite::Error);
cvt!(rusqlite::types::FromSqlError);
cvt!(std::io::Error);
cvt!(nix::Error);
impl From<Error> for ErrorBuilder {
#[inline]
fn from(value: Error) -> Self {
Self::default()
.kind(ToErrKind::err_kind(&value))
.source(value)
}
}
/// Captures a backtrace if enabled for the given error kind.
// TODO: make this more configurable at runtime.
fn maybe_backtrace(kind: ErrorKind) -> Option<Backtrace> {
if matches!(kind, ErrorKind::Internal | ErrorKind::Unknown) {
Some(Backtrace::capture())
} else {
None
}
}
impl Error {
pub fn wrap<E: Into<failure::Error>>(kind: ErrorKind, e: E) -> Self {
Self {
inner: e.into().context(kind),
}
#[inline]
pub fn wrap<E: StdError + Sync + Send + 'static>(kind: ErrorKind, e: E) -> Self {
Self(Box::new(ErrorInner {
kind,
msg: None,
// http_status: None,
backtrace: maybe_backtrace(kind),
source: Some(Box::new(e)),
}))
}
#[inline]
pub fn map<F: FnOnce(ErrorKind) -> ErrorKind>(mut self, f: F) -> Self {
self.0.kind = f(self.0.kind);
self
}
#[inline]
pub fn kind(&self) -> ErrorKind {
*self.inner.get_context()
self.0.kind
}
pub fn compat(self) -> failure::Compat<Context<ErrorKind>> {
self.inner.compat()
#[inline]
pub fn msg(&self) -> Option<&str> {
self.0.msg.as_deref()
}
pub fn map<F>(self, op: F) -> Self
where
F: FnOnce(ErrorKind) -> ErrorKind,
{
Self {
inner: self.inner.map(op),
/// Returns a borrowed value which can display not only this error but also
/// the full chain of causes and (where applicable) the stack trace.
///
/// The exact format may change. Currently, it displays the stack trace for
/// the current error but not any of the sources.
#[inline]
pub fn chain(&self) -> impl Display + '_ {
ErrorChain(self)
}
}
/// Formats this error alone (*not* its full chain).
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0.msg {
None => std::fmt::Display::fmt(self.0.kind.grpc_name(), f)?,
Some(ref msg) => write!(f, "{}: {}", self.0.kind.grpc_name(), msg)?,
}
}
}
impl Fail for Error {
fn cause(&self) -> Option<&dyn Fail> {
self.inner.cause()
}
fn backtrace(&self) -> Option<&Backtrace> {
self.inner.backtrace()
}
}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error {
Error {
inner: Context::new(kind),
if let Some(ref bt) = self.0.backtrace {
// TODO: only with "alternate"/# modifier?
// Shorten this, maybe by switching to `backtrace` + using
// `backtrace_ext::short_frames_strict` or similar.
write!(f, "\nBacktrace:\n{}", bt)?;
}
Ok(())
}
}
impl From<Context<ErrorKind>> for Error {
fn from(inner: Context<ErrorKind>) -> Error {
Error { inner }
impl Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&ErrorChain(self), f)
}
}
/*impl From<failure::Error> for Error {
fn from(e: failure::Error) -> Error {
Error { inner: e.context(ErrorKind::Unknown) }
}
}
/// Value returned by [`Error::chain`].
struct ErrorChain<'a>(&'a Error);
impl<E: std::error::Error + Send + Sync + 'static> From<E> for Error {
fn from(e: E) -> Error {
let f = e as Fail;
Error { inner: f.context(ErrorKind::Unknown) }
}
}*/
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.inner.cause() {
None => fmt::Display::fmt(&self.kind(), f),
Some(c) => write!(f, "{}: {}", self.kind(), c),
impl Display for ErrorChain<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(self.0, f)?;
let mut source = self.0.source();
while let Some(n) = source {
write!(f, "\ncaused by: {}", n)?;
source = n.source()
}
Ok(())
}
}
/// Error kind.
///
/// These codes are taken from
/// [grpc::StatusCode](https://github.com/grpc/grpc/blob/0e00c430827e81d61e1e7164ef04ca21ccbfaa77/include/grpcpp/impl/codegen/status_code_enum.h),
/// which is a nice general-purpose classification of errors. See that link for descriptions of
/// each error.
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail)]
#[non_exhaustive]
#[rustfmt::skip]
pub enum ErrorKind {
#[fail(display = "Cancelled")] Cancelled,
#[fail(display = "Unknown")] Unknown,
#[fail(display = "Invalid argument")] InvalidArgument,
#[fail(display = "Deadline exceeded")] DeadlineExceeded,
#[fail(display = "Not found")] NotFound,
#[fail(display = "Already exists")] AlreadyExists,
#[fail(display = "Permission denied")] PermissionDenied,
#[fail(display = "Unauthenticated")] Unauthenticated,
#[fail(display = "Resource exhausted")] ResourceExhausted,
#[fail(display = "Failed precondition")] FailedPrecondition,
#[fail(display = "Aborted")] Aborted,
#[fail(display = "Out of range")] OutOfRange,
#[fail(display = "Unimplemented")] Unimplemented,
#[fail(display = "Internal")] Internal,
#[fail(display = "Unavailable")] Unavailable,
#[fail(display = "Data loss")] DataLoss,
impl StdError for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
// https://users.rust-lang.org/t/question-about-error-source-s-static-return-type/34515/8
self.0.source.as_ref().map(|e| e.as_ref() as &_)
}
}
/// Extension methods for `Result`.
@ -143,70 +329,130 @@ pub trait ResultExt<T, E> {
impl<T, E> ResultExt<T, E> for Result<T, E>
where
E: Into<failure::Error>,
E: StdError + Sync + Send + 'static,
{
fn err_kind(self, k: ErrorKind) -> Result<T, Error> {
self.map_err(|e| e.into().context(k).into())
self.map_err(|e| ErrorBuilder::default().kind(k).source(e).build())
}
}
/// Like `failure::bail!`, but the first argument specifies a type as an `ErrorKind`.
/// Wrapper around `err!` which returns the error.
///
/// Example with positional arguments:
/// ```
/// use moonfire_base::bail_t;
/// use moonfire_base::bail;
/// let e = || -> Result<(), moonfire_base::Error> {
/// bail_t!(Unauthenticated, "unknown user: {}", "slamb");
/// bail!(Unauthenticated, msg("unknown user: {}", "slamb"));
/// }().unwrap_err();
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
/// assert_eq!(e.to_string(), "UNAUTHENTICATED: unknown user: slamb");
/// ```
///
/// Example with named arguments:
/// ```
/// use moonfire_base::bail_t;
/// use moonfire_base::bail;
/// let e = || -> Result<(), moonfire_base::Error> {
/// let user = "slamb";
/// bail_t!(Unauthenticated, "unknown user: {user}");
/// bail!(Unauthenticated, msg("unknown user: {user}"));
/// }().unwrap_err();
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
/// assert_eq!(e.to_string(), "UNAUTHENTICATED: unknown user: slamb");
/// ```
#[macro_export]
macro_rules! bail_t {
($t:ident, $fmt:expr) => {
return Err($crate::Error::from(failure::err_msg(format!($fmt)).context($crate::ErrorKind::$t)).into());
};
($t:ident, $fmt:expr, $($arg:tt)+) => {
return Err($crate::Error::from(failure::err_msg(format!($fmt, $($arg)+)).context($crate::ErrorKind::$t)).into());
macro_rules! bail {
($($arg:tt)+) => {
return Err($crate::err!($($arg)+).into());
};
}
/// Like `failure::format_err!`, but the first argument specifies a type as an `ErrorKind`.
/// Constructs an [`Error`], tersely.
///
/// Example with positional arguments:
/// ```
/// use moonfire_base::format_err_t;
/// let e = format_err_t!(Unauthenticated, "unknown user: {}", "slamb");
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
/// This is a shorthand way to use [`ErrorBuilder`].
///
/// The first argument is an `Into<ErrorBuilder>`, such as the following:
///
/// * an [`ErrorKind`] enum variant name like `Unauthenticated`.
/// There's an implicit `use ::coded::ErrorKind::*` to allow the bare
/// variant names just within this restrictive scope where you're unlikely
/// to have conflicts with other identifiers.
/// * an [`std::io::Error`] as a source, which sets the new `Error`'s
/// `ErrorKind` based on the `std::io::Error`.
/// * an `Error` as a source, which similarly copies the `ErrorKind`.
/// * an existing `ErrorBuilder`, which does not create a new source link.
///
/// Following arguments may be of these forms:
///
/// * `msg(...)`, which expands to `.msg(format!(...))`. See [`ErrorBuilder::msg`].
/// * `source(...)`, which simply expands to `.source($src)`. See [`ErrorBuilder::source`].
///
/// ## Examples
///
/// Simplest:
///
/// ```rust
/// # use coded::err;
/// let e = err!(InvalidArgument);
/// let e = err!(InvalidArgument,); // trailing commas are allowed
/// assert_eq!(e.kind(), coded::ErrorKind::InvalidArgument);
/// ```
///
/// Example with named arguments:
/// Constructing with a fixed error variant name:
///
/// ```rust
/// # use {coded::err, std::error::Error, std::num::ParseIntError};
/// let input = "a12";
/// let src = i32::from_str_radix(input, 10).unwrap_err();
///
/// let e = err!(InvalidArgument, source(src.clone()), msg("bad argument {:?}", input));
/// // The line above is equivalent to:
/// let e2 = ::coded::ErrorBuilder::from(::coded::ErrorKind::InvalidArgument)
/// .source(src.clone())
/// .msg(format!("bad argument {:?}", input))
/// .build();
///
/// assert_eq!(e.kind(), coded::ErrorKind::InvalidArgument);
/// assert_eq!(e.source().unwrap().downcast_ref::<ParseIntError>().unwrap(), &src);
/// ```
/// use moonfire_base::format_err_t;
/// let user = "slamb";
/// let e = format_err_t!(Unauthenticated, "unknown user: {user}");
/// assert_eq!(e.kind(), moonfire_base::ErrorKind::Unauthenticated);
/// assert_eq!(e.to_string(), "Unauthenticated: unknown user: slamb");
///
/// Constructing from an `std::io::Error`:
///
/// ```rust
/// # use coded::err;
/// let e = std::io::Error::new(std::io::ErrorKind::NotFound, "file not found");
/// let e = err!(e, msg("path {} not found", "foo"));
/// assert_eq!(e.kind(), coded::ErrorKind::NotFound);
/// ```
#[macro_export]
macro_rules! format_err_t {
($t:ident, $fmt:expr) => {
Into::<$crate::Error>::into(failure::err_msg(format!($fmt)).context($crate::ErrorKind::$t))
macro_rules! err {
// This uses the "incremental TT munchers", "internal rules", and "push-down accumulation"
// patterns explained in the excellent "The Little Book of Rust Macros":
// <https://veykril.github.io/tlborm/decl-macros/patterns/push-down-acc.html>.
(@accum $body:tt $(,)?) => {
$body.build()
};
($t:ident, $fmt:expr, $($arg:tt)+) => {
Into::<$crate::Error>::into(failure::err_msg(format!($fmt, $($arg)+))
.context($crate::ErrorKind::$t))
(@accum ($($body:tt)*), source($src:expr) $($tail:tt)*) => {
$crate::err!(@accum ($($body)*.source($src)) $($tail)*)
};
// msg(...) uses the `format!` form even when there's only the format string.
// This can catch errors (e.g. https://github.com/dtolnay/anyhow/issues/55)
// and will allow supporting implicit named parameters:
// https://rust-lang.github.io/rfcs/2795-format-args-implicit-identifiers.html
(@accum ($($body:tt)*), msg($format:expr) $($tail:tt)*) => {
$crate::err!(@accum ($($body)*.msg(format!($format))) $($tail)*)
};
(@accum ($($body:tt)*), msg($format:expr, $($args:tt)*) $($tail:tt)*) => {
$crate::err!(@accum ($($body)*.msg(format!($format, $($args)*))) $($tail)*)
};
($builder:expr $(, $($tail:tt)*)? ) => {
$crate::err!(@accum ({
use $crate::ErrorKind::*;
$crate::ErrorBuilder::from($builder)
})
, $($($tail)*)*
)
};
}

View File

@ -3,10 +3,10 @@
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
pub mod clock;
mod error;
pub mod error;
pub mod shutdown;
pub mod strutil;
pub mod time;
pub mod tracing_setup;
pub use crate::error::{prettify_failure, Error, ErrorKind, ResultExt};
pub use crate::error::{Error, ErrorBuilder, ErrorKind, ResultExt};

View File

@ -4,7 +4,7 @@
//! Time and durations for Moonfire NVR's internal format.
use failure::{bail, format_err, Error};
use crate::{bail, err, Error};
use nom::branch::alt;
use nom::bytes::complete::{tag, take_while_m_n};
use nom::combinator::{map, map_res, opt};
@ -106,13 +106,16 @@ impl Time {
opt(parse_zone),
))(input)
.map_err(|e| match e {
nom::Err::Incomplete(_) => format_err!("incomplete"),
nom::Err::Incomplete(_) => err!(InvalidArgument, msg("incomplete")),
nom::Err::Error(e) | nom::Err::Failure(e) => {
format_err!("{}", nom::error::convert_error(input, e))
err!(InvalidArgument, source(nom::error::convert_error(input, e)))
}
})?;
if !remaining.is_empty() {
bail!("unexpected suffix {:?} following time string", remaining);
bail!(
InvalidArgument,
msg("unexpected suffix {remaining:?} following time string")
);
}
let (tm_hour, tm_min, tm_sec, subsec) = opt_time.unwrap_or((0, 0, 0, 0));
let mut tm = time::Tm {
@ -129,11 +132,11 @@ impl Time {
tm_nsec: 0,
};
if tm.tm_mon == 0 {
bail!("time {:?} has month 0", input);
bail!(InvalidArgument, msg("time {input:?} has month 0"));
}
tm.tm_mon -= 1;
if tm.tm_year < 1900 {
bail!("time {:?} has year before 1900", input);
bail!(InvalidArgument, msg("time {input:?} has year before 1900"));
}
tm.tm_year -= 1900;

View File

@ -21,7 +21,6 @@ blake3 = "1.0.0"
byteorder = "1.0"
cstr = "0.2.5"
diff = "0.1.12"
failure = "0.1.1"
fnv = "1.0"
futures = "0.3"
h264-reader = "0.6.0"

View File

@ -6,7 +6,7 @@
use crate::json::UserConfig;
use crate::schema::Permissions;
use base::{bail_t, format_err_t, strutil, Error, ErrorKind, ResultExt as _};
use base::{bail, err, strutil, Error, ErrorKind, ResultExt as _};
use fnv::FnvHashMap;
use protobuf::Message;
use ring::rand::{SecureRandom, SystemRandom};
@ -96,11 +96,10 @@ impl User {
_ => return Ok(false),
};
let hash = PasswordHash::new(hash).map_err(|e| {
format_err_t!(
err!(
DataLoss,
"bad stored password hash for user {:?}: {}",
self.username,
e,
msg("bad stored password hash for user {:?}", self.username),
source(e),
)
})?;
match scrypt::Scrypt.verify_password(password.as_bytes(), &hash) {
@ -110,11 +109,10 @@ impl User {
self.password_failure_count += 1;
Ok(false)
}
Err(e) => Err(format_err_t!(
Err(e) => Err(err!(
Internal,
"unable to verify password for user {:?}: {}",
self.username,
e
msg("unable to verify password for user {:?}", self.username),
source(e),
)),
}
}
@ -234,7 +232,7 @@ impl FromStr for SessionFlag {
"secure" => Ok(Self::Secure),
"same-site" => Ok(Self::SameSite),
"same-site-strict" => Ok(Self::SameSiteStrict),
_ => bail_t!(InvalidArgument, "No such session flag {:?}", s),
_ => bail!(InvalidArgument, msg("No such session flag {s:?}")),
}
}
}
@ -285,9 +283,9 @@ impl RawSessionId {
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
let mut s = RawSessionId([0u8; 48]);
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut s.0[..])
.map_err(|e| format_err_t!(InvalidArgument, "bad session id: {e}"))?;
.map_err(|e| err!(InvalidArgument, msg("bad session id"), source(e)))?;
if l != 48 {
bail_t!(InvalidArgument, "session id must be 48 bytes");
bail!(InvalidArgument, msg("session id must be 48 bytes"));
}
Ok(s)
}
@ -334,9 +332,9 @@ impl SessionHash {
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
let mut h = SessionHash([0u8; 24]);
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut h.0[..])
.map_err(|e| format_err_t!(InvalidArgument, "invalid session hash: {e}"))?;
.map_err(|e| err!(InvalidArgument, msg("invalid session hash"), source(e)))?;
if l != 24 {
bail_t!(InvalidArgument, "session hash must be 24 bytes");
bail!(InvalidArgument, msg("session hash must be 24 bytes"));
}
Ok(h)
}
@ -361,9 +359,10 @@ impl rusqlite::types::FromSql for Seed {
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
let b = value.as_blob()?;
if b.len() != 32 {
return Err(rusqlite::types::FromSqlError::Other(Box::new(
format_err_t!(Internal, "expected a 32-byte seed").compat(),
)));
return Err(rusqlite::types::FromSqlError::Other(Box::new(err!(
Internal,
msg("expected a 32-byte seed")
))));
}
let mut s = Seed::default();
s.0.copy_from_slice(b);
@ -395,9 +394,8 @@ impl State {
sessions: FnvHashMap::default(),
rand: ring::rand::SystemRandom::new(),
};
let mut stmt = conn
.prepare(
r#"
let mut stmt = conn.prepare(
r#"
select
id,
username,
@ -409,30 +407,24 @@ impl State {
from
user
"#,
)
.err_kind(ErrorKind::Unknown)?;
let mut rows = stmt.query(params![]).err_kind(ErrorKind::Unknown)?;
while let Some(row) = rows.next().err_kind(ErrorKind::Unknown)? {
let id = row.get(0).err_kind(ErrorKind::Unknown)?;
let name: String = row.get(1).err_kind(ErrorKind::Unknown)?;
)?;
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let id = row.get(0)?;
let name: String = row.get(1)?;
let mut permissions = Permissions::new();
permissions
.merge_from_bytes(
row.get_ref(6)
.err_kind(ErrorKind::Unknown)?
.as_blob()
.err_kind(ErrorKind::Unknown)?,
)
.err_kind(ErrorKind::Unknown)?;
.merge_from_bytes(row.get_ref(6)?.as_blob()?)
.err_kind(ErrorKind::DataLoss)?;
state.users_by_id.insert(
id,
User {
id,
username: name.clone(),
config: row.get(2).err_kind(ErrorKind::Unknown)?,
password_hash: row.get(3).err_kind(ErrorKind::Unknown)?,
password_id: row.get(4).err_kind(ErrorKind::Unknown)?,
password_failure_count: row.get(5).err_kind(ErrorKind::Unknown)?,
config: row.get(2)?,
password_hash: row.get(3)?,
password_id: row.get(4)?,
password_failure_count: row.get(5)?,
dirty: false,
permissions,
},
@ -464,9 +456,8 @@ impl State {
id: i32,
change: UserChange,
) -> Result<&User, base::Error> {
let mut stmt = conn
.prepare_cached(
r#"
let mut stmt = conn.prepare_cached(
r#"
update user
set
username = :username,
@ -478,8 +469,7 @@ impl State {
where
id = :id
"#,
)
.err_kind(ErrorKind::Unknown)?;
)?;
let e = self.users_by_id.entry(id);
let e = match e {
::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {id}!"),
@ -505,8 +495,7 @@ impl State {
":config": &change.config,
":id": &id,
":permissions": &permissions,
})
.err_kind(ErrorKind::Unknown)?;
})?;
}
let u = e.into_mut();
if u.username != change.username {
@ -525,14 +514,12 @@ impl State {
}
fn add_user(&mut self, conn: &Connection, change: UserChange) -> Result<&User, base::Error> {
let mut stmt = conn
.prepare_cached(
r#"
let mut stmt = conn.prepare_cached(
r#"
insert into user (username, password_hash, config, permissions)
values (:username, :password_hash, :config, :permissions)
"#,
)
.err_kind(ErrorKind::Unknown)?;
)?;
let password_hash = change.set_password_hash.unwrap_or(None);
let permissions = change
.permissions
@ -543,8 +530,7 @@ impl State {
":password_hash": &password_hash,
":config": &change.config,
":permissions": &permissions,
})
.err_kind(ErrorKind::Unknown)?;
})?;
let id = conn.last_insert_rowid() as i32;
self.users_by_name.insert(change.username.clone(), id);
let e = self.users_by_id.entry(id);
@ -565,22 +551,15 @@ impl State {
}
pub fn delete_user(&mut self, conn: &mut Connection, id: i32) -> Result<(), base::Error> {
let tx = conn.transaction().err_kind(ErrorKind::Unknown)?;
tx.execute("delete from user_session where user_id = ?", params![id])
.err_kind(ErrorKind::Unknown)?;
let tx = conn.transaction()?;
tx.execute("delete from user_session where user_id = ?", params![id])?;
{
let mut user_stmt = tx
.prepare_cached("delete from user where id = ?")
.err_kind(ErrorKind::Unknown)?;
if user_stmt
.execute(params![id])
.err_kind(ErrorKind::Unknown)?
!= 1
{
bail_t!(NotFound, "user {} not found", id);
let mut user_stmt = tx.prepare_cached("delete from user where id = ?")?;
if user_stmt.execute(params![id])? != 1 {
bail!(NotFound, msg("user {id} not found"));
}
}
tx.commit().err_kind(ErrorKind::Unknown)?;
tx.commit()?;
let name = self.users_by_id.remove(&id).unwrap().username;
self.users_by_name
.remove(&name)
@ -609,16 +588,16 @@ impl State {
let id = self
.users_by_name
.get(username)
.ok_or_else(|| format_err_t!(Unauthenticated, "no such user {username:?}"))?;
.ok_or_else(|| err!(Unauthenticated, msg("no such user {username:?}")))?;
let u = self
.users_by_id
.get_mut(id)
.expect("users_by_name implies users_by_id");
if u.config.disabled {
bail_t!(Unauthenticated, "user {username:?} is disabled");
bail!(Unauthenticated, msg("user {username:?} is disabled"));
}
if !u.check_password(Some(&password))? {
bail_t!(Unauthenticated, "incorrect password");
bail!(Unauthenticated, msg("incorrect password"));
}
let password_id = u.password_id;
State::make_session_int(
@ -647,9 +626,9 @@ impl State {
let u = self
.users_by_id
.get_mut(&uid)
.ok_or_else(|| format_err_t!(NotFound, "no such uid {:?}", uid))?;
.ok_or_else(|| err!(NotFound, msg("no such uid {uid:?}")))?;
if u.config.disabled {
bail_t!(FailedPrecondition, "user is disabled");
bail!(FailedPrecondition, msg("user is disabled"));
}
State::make_session_int(
&self.rand,
@ -681,9 +660,8 @@ impl State {
let mut seed = [0u8; 32];
rand.fill(&mut seed).unwrap();
let hash = session_id.hash();
let mut stmt = conn
.prepare_cached(
r#"
let mut stmt = conn.prepare_cached(
r#"
insert into user_session (session_id_hash, user_id, seed, flags, domain,
creation_password_id, creation_time_sec,
creation_user_agent, creation_peer_addr,
@ -693,8 +671,7 @@ impl State {
:creation_user_agent, :creation_peer_addr,
:permissions)
"#,
)
.err_kind(ErrorKind::Unknown)?;
)?;
let addr = creation.addr_buf();
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
let permissions_blob = permissions
@ -711,8 +688,7 @@ impl State {
":creation_user_agent": &creation.user_agent,
":creation_peer_addr": &addr,
":permissions": &permissions_blob,
})
.err_kind(ErrorKind::Unknown)?;
})?;
let e = match sessions.entry(hash) {
::std::collections::hash_map::Entry::Occupied(_) => panic!("duplicate session hash!"),
::std::collections::hash_map::Entry::Vacant(e) => e,
@ -749,17 +725,20 @@ impl State {
}
};
let u = match self.users_by_id.get(&s.user_id) {
None => bail_t!(Internal, "session references nonexistent user!"),
None => bail!(Internal, msg("session references nonexistent user!")),
Some(u) => u,
};
if let Some(r) = s.revocation_reason {
bail_t!(Unauthenticated, "session is no longer valid (reason={})", r);
bail!(
Unauthenticated,
msg("session is no longer valid (reason={r})")
);
}
s.last_use = req;
s.use_count += 1;
s.dirty = true;
if u.config.disabled {
bail_t!(Unauthenticated, "user {:?} is disabled", &u.username);
bail!(Unauthenticated, msg("user {:?} is disabled", &u.username));
}
Ok((s, u))
}
@ -777,9 +756,8 @@ impl State {
::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?),
};
if s.revocation_reason.is_none() {
let mut stmt = conn
.prepare(
r#"
let mut stmt = conn.prepare(
r#"
update user_session
set
revocation_time_sec = ?,
@ -790,8 +768,7 @@ impl State {
where
session_id_hash = ?
"#,
)
.err_kind(ErrorKind::Unknown)?;
)?;
let addr = req.addr_buf();
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
stmt.execute(params![
@ -801,8 +778,7 @@ impl State {
reason as i32,
detail,
&hash.0[..],
])
.err_kind(ErrorKind::Unknown)?;
])?;
s.revocation = req;
s.revocation_reason = Some(reason as i32);
}
@ -814,9 +790,8 @@ impl State {
/// The caller is expected to call `post_flush` afterward if the transaction is
/// successfully committed.
pub fn flush(&self, tx: &Transaction) -> Result<(), Error> {
let mut u_stmt = tx
.prepare(
r#"
let mut u_stmt = tx.prepare(
r#"
update user
set
password_failure_count = :password_failure_count,
@ -824,11 +799,9 @@ impl State {
where
id = :id
"#,
)
.err_kind(ErrorKind::Unknown)?;
let mut s_stmt = tx
.prepare(
r#"
)?;
let mut s_stmt = tx.prepare(
r#"
update user_session
set
last_use_time_sec = :last_use_time_sec,
@ -838,8 +811,7 @@ impl State {
where
session_id_hash = :hash
"#,
)
.err_kind(ErrorKind::Unknown)?;
)?;
for (&id, u) in &self.users_by_id {
if !u.dirty {
continue;
@ -848,13 +820,11 @@ impl State {
"flushing user with hash: {}",
u.password_hash.as_ref().unwrap()
);
u_stmt
.execute(named_params! {
":password_failure_count": &u.password_failure_count,
":password_hash": &u.password_hash,
":id": &id,
})
.err_kind(ErrorKind::Unknown)?;
u_stmt.execute(named_params! {
":password_failure_count": &u.password_failure_count,
":password_hash": &u.password_hash,
":id": &id,
})?;
}
for (sh, s) in &self.sessions {
if !s.dirty {
@ -862,15 +832,13 @@ impl State {
}
let addr = s.last_use.addr_buf();
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
let cnt = s_stmt
.execute(named_params! {
":last_use_time_sec": &s.last_use.when_sec,
":last_use_user_agent": &s.last_use.user_agent,
":last_use_peer_addr": &addr,
":use_count": &s.use_count,
":hash": &sh.0[..],
})
.err_kind(ErrorKind::Unknown)?;
let cnt = s_stmt.execute(named_params! {
":last_use_time_sec": &s.last_use.when_sec,
":last_use_user_agent": &s.last_use.user_agent,
":last_use_peer_addr": &addr,
":use_count": &s.use_count,
":hash": &sh.0[..],
})?;
debug_assert_eq!(cnt, 1);
}
Ok(())
@ -890,9 +858,8 @@ impl State {
}
fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, base::Error> {
let mut stmt = conn
.prepare_cached(
r#"
let mut stmt = conn.prepare_cached(
r#"
select
user_id,
seed,
@ -918,52 +885,43 @@ fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, base
where
session_id_hash = ?
"#,
)
.err_kind(ErrorKind::Unknown)?;
let mut rows = stmt
.query(params![&hash.0[..]])
.err_kind(ErrorKind::Unknown)?;
)?;
let mut rows = stmt.query(params![&hash.0[..]])?;
let row = rows
.next()
.err_kind(ErrorKind::Unknown)?
.ok_or_else(|| format_err_t!(NotFound, "no such session"))?;
let creation_addr: FromSqlIpAddr = row.get(8).err_kind(ErrorKind::Unknown)?;
let revocation_addr: FromSqlIpAddr = row.get(11).err_kind(ErrorKind::Unknown)?;
let last_use_addr: FromSqlIpAddr = row.get(16).err_kind(ErrorKind::Unknown)?;
.next()?
.ok_or_else(|| err!(NotFound, msg("no such session")))?;
let creation_addr: FromSqlIpAddr = row.get(8)?;
let revocation_addr: FromSqlIpAddr = row.get(11)?;
let last_use_addr: FromSqlIpAddr = row.get(16)?;
let mut permissions = Permissions::new();
permissions
.merge_from_bytes(
row.get_ref(18)
.err_kind(ErrorKind::Unknown)?
.as_blob()
.err_kind(ErrorKind::Unknown)?,
)
.err_kind(ErrorKind::Internal)?;
.merge_from_bytes(row.get_ref(18)?.as_blob()?)
.err_kind(ErrorKind::DataLoss)?;
Ok(Session {
user_id: row.get(0).err_kind(ErrorKind::Unknown)?,
seed: row.get(1).err_kind(ErrorKind::Unknown)?,
flags: row.get(2).err_kind(ErrorKind::Unknown)?,
domain: row.get(3).err_kind(ErrorKind::Unknown)?,
description: row.get(4).err_kind(ErrorKind::Unknown)?,
creation_password_id: row.get(5).err_kind(ErrorKind::Unknown)?,
user_id: row.get(0)?,
seed: row.get(1)?,
flags: row.get(2)?,
domain: row.get(3)?,
description: row.get(4)?,
creation_password_id: row.get(5)?,
creation: Request {
when_sec: row.get(6).err_kind(ErrorKind::Unknown)?,
user_agent: row.get(7).err_kind(ErrorKind::Unknown)?,
when_sec: row.get(6)?,
user_agent: row.get(7)?,
addr: creation_addr.0,
},
revocation: Request {
when_sec: row.get(9).err_kind(ErrorKind::Unknown)?,
user_agent: row.get(10).err_kind(ErrorKind::Unknown)?,
when_sec: row.get(9)?,
user_agent: row.get(10)?,
addr: revocation_addr.0,
},
revocation_reason: row.get(12).err_kind(ErrorKind::Unknown)?,
revocation_reason_detail: row.get(13).err_kind(ErrorKind::Unknown)?,
revocation_reason: row.get(12)?,
revocation_reason_detail: row.get(13)?,
last_use: Request {
when_sec: row.get(14).err_kind(ErrorKind::Unknown)?,
user_agent: row.get(15).err_kind(ErrorKind::Unknown)?,
when_sec: row.get(14)?,
user_agent: row.get(15)?,
addr: last_use_addr.0,
},
use_count: row.get(17).err_kind(ErrorKind::Unknown)?,
use_count: row.get(17)?,
dirty: false,
permissions,
})
@ -1014,7 +972,8 @@ mod tests {
0,
)
.unwrap_err();
assert_eq!(format!("{e}"), "Unauthenticated: incorrect password");
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "incorrect password");
c.set_password("hunter2".to_owned());
state.apply(&conn, c).unwrap();
let e = state
@ -1027,7 +986,8 @@ mod tests {
0,
)
.unwrap_err();
assert_eq!(format!("{e}"), "Unauthenticated: incorrect password");
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "incorrect password");
let sid = {
let (sid, s) = state
.login_by_password(
@ -1061,10 +1021,7 @@ mod tests {
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(
format!("{e}"),
"Unauthenticated: session is no longer valid (reason=1)"
);
assert_eq!(e.msg().unwrap(), "session is no longer valid (reason=1)");
// Everything should persist across reload.
drop(state);
@ -1072,10 +1029,7 @@ mod tests {
let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(
format!("{e}"),
"Unauthenticated: session is no longer valid (reason=1)"
);
assert_eq!(e.msg().unwrap(), "session is no longer valid (reason=1)");
}
/// Tests that flush works, including updating dirty sessions.
@ -1173,10 +1127,8 @@ mod tests {
let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(
format!("{e}"),
"Unauthenticated: session is no longer valid (reason=1)"
);
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "session is no longer valid (reason=1)");
}
#[test]
@ -1229,16 +1181,15 @@ mod tests {
0,
)
.unwrap_err();
assert_eq!(e.to_string(), "Unauthenticated: user \"slamb\" is disabled");
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "user \"slamb\" is disabled");
// Authenticating existing sessions shouldn't work either.
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(
format!("{e}"),
"Unauthenticated: user \"slamb\" is disabled"
);
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "user \"slamb\" is disabled");
// The user should still be disabled after reload.
drop(state);
@ -1246,10 +1197,8 @@ mod tests {
let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(
format!("{e}"),
"Unauthenticated: user \"slamb\" is disabled"
);
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "user \"slamb\" is disabled");
}
#[test]
@ -1309,7 +1258,8 @@ mod tests {
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(format!("{e}"), "Unauthenticated: no such session");
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "no such session");
// The user should still be deleted after reload.
drop(state);
@ -1318,7 +1268,8 @@ mod tests {
let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(format!("{e}"), "Unauthenticated: no such session");
assert_eq!(e.kind(), ErrorKind::Unauthenticated);
assert_eq!(e.msg().unwrap(), "no such session");
}
#[test]

View File

@ -11,7 +11,7 @@ use crate::json::SampleFileDirConfig;
use crate::raw;
use crate::recording;
use crate::schema;
use failure::Error;
use base::{err, Error};
use fnv::{FnvHashMap, FnvHashSet};
use nix::fcntl::AtFlags;
use rusqlite::params;
@ -104,7 +104,7 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
// Open the directory (checking its metadata) and hold it open (for the lock).
let dir = dir::SampleFileDir::open(&config.path, &meta)
.map_err(|e| e.context(format!("unable to open dir {}", config.path.display())))?;
.map_err(|e| err!(e, msg("unable to open dir {}", config.path.display())))?;
let mut streams = read_dir(&dir, opts)?;
let mut rows = garbage_stmt.query(params![dir_id])?;
while let Some(row) = rows.next()? {

View File

@ -6,7 +6,7 @@
//! This is used as part of the `moonfire-nvr check` database integrity checking
//! and for tests of `moonfire-nvr upgrade`.
use failure::Error;
use base::Error;
use rusqlite::params;
use std::fmt::Write;
@ -168,7 +168,8 @@ pub fn get_diffs(
let tables1 = get_tables(c1)?;
let tables2 = get_tables(c2)?;
if let Some(diff) = diff_slices(n1, &tables1[..], n2, &tables2[..]) {
write!(&mut diffs, "table list mismatch, {n1} vs {n2}:\n{diff}")?;
write!(&mut diffs, "table list mismatch, {n1} vs {n2}:\n{diff}")
.expect("write to String shouldn't fail");
}
// Compare columns and indices for each table.
@ -176,7 +177,8 @@ pub fn get_diffs(
let columns1 = get_table_columns(c1, t)?;
let columns2 = get_table_columns(c2, t)?;
if let Some(diff) = diff_slices(n1, &columns1[..], n2, &columns2[..]) {
write!(&mut diffs, "table {t:?} column, {n1} vs {n2}:\n{diff}")?;
write!(&mut diffs, "table {t:?} column, {n1} vs {n2}:\n{diff}")
.expect("write to String shouldn't fail");
}
let mut indices1 = get_indices(c1, t)?;
@ -184,7 +186,8 @@ pub fn get_diffs(
indices1.sort_by(|a, b| a.name.cmp(&b.name));
indices2.sort_by(|a, b| a.name.cmp(&b.name));
if let Some(diff) = diff_slices(n1, &indices1[..], n2, &indices2[..]) {
write!(&mut diffs, "table {t:?} indices, {n1} vs {n2}:\n{diff}")?;
write!(&mut diffs, "table {t:?} indices, {n1} vs {n2}:\n{diff}")
.expect("write to String shouldn't fail");
}
for i in &indices1 {
@ -194,7 +197,8 @@ pub fn get_diffs(
write!(
&mut diffs,
"table {t:?} index {i:?} columns {n1} vs {n2}:\n{diff}"
)?;
)
.expect("write to String shouldn't fail");
}
}
}

View File

@ -5,7 +5,7 @@
//! In-memory indexes by calendar day.
use base::time::{Duration, Time, TIME_UNITS_PER_SEC};
use failure::Error;
use base::{err, Error};
use smallvec::SmallVec;
use std::cmp;
use std::collections::BTreeMap;
@ -22,7 +22,12 @@ pub struct Key(pub(crate) [u8; 10]);
impl Key {
fn new(tm: time::Tm) -> Result<Self, Error> {
let mut s = Key([0u8; 10]);
write!(&mut s.0[..], "{}", tm.strftime("%Y-%m-%d")?)?;
write!(
&mut s.0[..],
"{}",
tm.strftime("%Y-%m-%d")
.map_err(|e| err!(Internal, source(e)))?
)?;
Ok(s)
}

View File

@ -34,11 +34,10 @@ use crate::raw;
use crate::recording;
use crate::schema;
use crate::signal;
use base::bail_t;
use base::clock::{self, Clocks};
use base::format_err_t;
use base::strutil::encode_size;
use failure::{bail, format_err, Error, ResultExt};
use base::{bail, err, Error};
// use failure::{bail, err, Error, ResultExt};
use fnv::{FnvHashMap, FnvHashSet};
use hashlink::LinkedHashMap;
use itertools::Itertools;
@ -47,7 +46,6 @@ use smallvec::SmallVec;
use std::cell::RefCell;
use std::cmp;
use std::collections::{BTreeMap, VecDeque};
use std::convert::TryInto;
use std::fmt::Write as _;
use std::mem;
use std::ops::Range;
@ -344,7 +342,10 @@ impl SampleFileDir {
.dir
.as_ref()
.ok_or_else(|| {
format_err_t!(FailedPrecondition, "sample file dir {} is closed", self.id)
err!(
FailedPrecondition,
msg("sample file dir {} is closed", self.id)
)
})?
.clone())
}
@ -696,10 +697,13 @@ impl StreamStateChanger {
s.sample_file_dir_id == sc.sample_file_dir_id,
) {
bail!(
"can't change sample_file_dir_id {:?}->{:?} for non-empty stream {}",
d,
sc.sample_file_dir_id,
sid
FailedPrecondition,
msg(
"can't change sample_file_dir_id {:?}->{:?} for non-empty stream {}",
d,
sc.sample_file_dir_id,
sid,
),
);
}
}
@ -711,7 +715,7 @@ impl StreamStateChanger {
"#,
)?;
if stmt.execute(params![sid])? != 1 {
bail!("missing stream {}", sid);
bail!(Internal, msg("missing stream {sid}"));
}
streams.push((sid, None));
} else {
@ -731,7 +735,7 @@ impl StreamStateChanger {
":id": sid,
})?;
if rows != 1 {
bail!("missing stream {}", sid);
bail!(Internal, msg("missing stream {sid}"));
}
sids[i] = Some(sid);
streams.push((
@ -872,7 +876,7 @@ impl LockedDatabase {
mut r: RecordingToInsert,
) -> Result<(CompositeId, Arc<Mutex<RecordingToInsert>>), Error> {
let stream = match self.streams_by_id.get_mut(&stream_id) {
None => bail!("no such stream {}", stream_id),
None => bail!(FailedPrecondition, msg("no such stream {stream_id}")),
Some(s) => s,
};
let id = CompositeId::new(
@ -900,20 +904,26 @@ impl LockedDatabase {
/// This must be the next unsynced recording.
pub(crate) fn mark_synced(&mut self, id: CompositeId) -> Result<(), Error> {
let stream = match self.streams_by_id.get_mut(&id.stream()) {
None => bail!("no stream for recording {}", id),
None => bail!(FailedPrecondition, msg("no stream for recording {id}")),
Some(s) => s,
};
let next_unsynced = stream.cum_recordings + (stream.synced_recordings as i32);
if id.recording() != next_unsynced {
bail!(
"can't sync {} when next unsynced recording is {} (next unflushed is {})",
id,
next_unsynced,
stream.cum_recordings
FailedPrecondition,
msg(
"can't sync {} when next unsynced recording is {} (next unflushed is {})",
id,
next_unsynced,
stream.cum_recordings,
),
);
}
if stream.synced_recordings == stream.uncommitted.len() {
bail!("can't sync un-added recording {}", id);
bail!(
FailedPrecondition,
msg("can't sync un-added recording {id}")
);
}
let l = stream.uncommitted[stream.synced_recordings].lock().unwrap();
let bytes = i64::from(l.sample_file_bytes);
@ -929,7 +939,7 @@ impl LockedDatabase {
ids: &mut Vec<CompositeId>,
) -> Result<(), Error> {
let dir = match self.sample_file_dirs_by_id.get_mut(&dir_id) {
None => bail!("no such dir {}", dir_id),
None => bail!(FailedPrecondition, msg("no such dir {dir_id}")),
Some(d) => d,
};
dir.garbage_unlinked.reserve(ids.len());
@ -941,7 +951,10 @@ impl LockedDatabase {
false
});
if !ids.is_empty() {
bail!("delete_garbage with non-garbage ids {:?}", &ids[..]);
bail!(
FailedPrecondition,
msg("delete_garbage with non-garbage ids {:?}", &ids[..])
);
}
Ok(())
}
@ -955,7 +968,7 @@ impl LockedDatabase {
cb: Box<dyn FnMut(LiveSegment) -> bool + Send>,
) -> Result<(), Error> {
let s = match self.streams_by_id.get_mut(&stream_id) {
None => bail!("no such stream {}", stream_id),
None => bail!(NotFound, msg("no such stream {stream_id}")),
Some(s) => s,
};
s.on_live_segment.push(cb);
@ -975,7 +988,7 @@ impl LockedDatabase {
pub(crate) fn send_live_segment(&mut self, stream: i32, l: LiveSegment) -> Result<(), Error> {
let s = match self.streams_by_id.get_mut(&stream) {
None => bail!("no such stream {}", stream),
None => bail!(Internal, msg("no such stream {stream}")),
Some(s) => s,
};
@ -992,7 +1005,7 @@ impl LockedDatabase {
let span = tracing::info_span!("flush", flush_count = self.flush_count, reason);
let _enter = span.enter();
let o = match self.open.as_ref() {
None => bail!("database is read-only"),
None => bail!(Internal, msg("database is read-only")),
Some(o) => o,
};
let tx = self.conn.transaction()?;
@ -1029,7 +1042,7 @@ impl LockedDatabase {
if let Some(l) = s.to_delete.last() {
new_ranges.entry(stream_id).or_insert(None);
let dir = match s.sample_file_dir_id {
None => bail!("stream {} has no directory!", stream_id),
None => bail!(Internal, msg("stream {stream_id} has no directory!")),
Some(d) => d,
};
@ -1042,12 +1055,15 @@ impl LockedDatabase {
let n = raw::delete_recordings(&tx, dir, start..end)?;
if n != s.to_delete.len() {
bail!(
"Found {} rows in {} .. {}, expected {}: {:?}",
n,
start,
end,
s.to_delete.len(),
&s.to_delete
Internal,
msg(
"Found {} rows in {} .. {}, expected {}: {:?}",
n,
start,
end,
s.to_delete.len(),
&s.to_delete,
),
);
}
}
@ -1069,7 +1085,7 @@ impl LockedDatabase {
o.id,
])?;
if rows != 1 {
bail!("unable to find current open {}", o.id);
bail!(Internal, msg("unable to find current open {}", o.id));
}
}
self.auth.flush(&tx)?;
@ -1209,7 +1225,7 @@ impl LockedDatabase {
let dir = self
.sample_file_dirs_by_id
.get_mut(&id)
.ok_or_else(|| format_err!("no such dir {}", id))?;
.ok_or_else(|| err!(NotFound, msg("no such dir {id}")))?;
if dir.dir.is_some() {
continue;
}
@ -1220,7 +1236,7 @@ impl LockedDatabase {
open.uuid.extend_from_slice(&o.uuid.as_bytes()[..]);
}
let d = dir::SampleFileDir::open(&dir.path, &expected_meta)
.map_err(|e| e.context(format!("Failed to open dir {}", dir.path.display())))?;
.map_err(|e| err!(e, msg("Failed to open dir {}", dir.path.display())))?;
if self.open.is_none() {
// read-only mode; it's already fully opened.
dir.dir = Some(d);
@ -1244,7 +1260,7 @@ impl LockedDatabase {
)?;
for &id in in_progress.keys() {
if stmt.execute(params![o.id, id])? != 1 {
bail!("unable to update dir {}", id);
bail!(Internal, msg("unable to update dir {id}"));
}
}
}
@ -1290,7 +1306,7 @@ impl LockedDatabase {
f: &mut dyn FnMut(ListRecordingsRow) -> Result<(), base::Error>,
) -> Result<(), base::Error> {
let s = match self.streams_by_id.get(&stream_id) {
None => bail_t!(NotFound, "no such stream {}", stream_id),
None => bail!(NotFound, msg("no such stream {stream_id}")),
Some(s) => s,
};
raw::list_recordings_by_time(&self.conn, stream_id, desired_time.clone(), f)?;
@ -1323,7 +1339,7 @@ impl LockedDatabase {
f: &mut dyn FnMut(ListRecordingsRow) -> Result<(), base::Error>,
) -> Result<(), base::Error> {
let s = match self.streams_by_id.get(&stream_id) {
None => bail_t!(NotFound, "no such stream {}", stream_id),
None => bail!(NotFound, msg("no such stream {stream_id}")),
Some(s) => s,
};
if desired_ids.start < s.cum_recordings {
@ -1400,25 +1416,29 @@ impl LockedDatabase {
} else {
// append.
if a.time.end != row.start {
bail_t!(
bail!(
Internal,
"stream {} recording {} ends at {} but {} starts at {}",
stream_id,
a.ids.end - 1,
a.time.end,
row.id,
row.start
msg(
"stream {} recording {} ends at {} but {} starts at {}",
stream_id,
a.ids.end - 1,
a.time.end,
row.id,
row.start,
),
);
}
if a.open_id != row.open_id {
bail_t!(
bail!(
Internal,
"stream {} recording {} has open id {} but {} has {}",
stream_id,
a.ids.end - 1,
a.open_id,
row.id,
row.open_id
msg(
"stream {} recording {} has open id {} but {} has {}",
stream_id,
a.ids.end - 1,
a.open_id,
row.id,
row.open_id,
),
);
}
a.time.end.0 += row.wall_duration_90k as i64;
@ -1457,15 +1477,18 @@ impl LockedDatabase {
let s = self
.streams_by_id
.get(&id.stream())
.ok_or_else(|| format_err!("no stream for {}", id))?;
.ok_or_else(|| err!(Internal, msg("no stream for {}", id)))?;
if s.cum_recordings <= id.recording() {
let i = id.recording() - s.cum_recordings;
if i as usize >= s.uncommitted.len() {
bail!(
"no such recording {}; latest committed is {}, latest is {}",
id,
s.cum_recordings,
s.cum_recordings + s.uncommitted.len() as i32
Internal,
msg(
"no such recording {}; latest committed is {}, latest is {}",
id,
s.cum_recordings,
s.cum_recordings + s.uncommitted.len() as i32,
),
);
}
let l = s.uncommitted[i as usize].lock().unwrap();
@ -1499,7 +1522,7 @@ impl LockedDatabase {
}
return result;
}
Err(format_err!("no such recording {}", id))
Err(err!(Internal, msg("no such recording {id}")))
}
}
}
@ -1512,7 +1535,7 @@ impl LockedDatabase {
f: &mut dyn FnMut(&ListOldestRecordingsRow) -> bool,
) -> Result<(), Error> {
let s = match self.streams_by_id.get_mut(&stream_id) {
None => bail!("no stream {}", stream_id),
None => bail!(Internal, msg("no stream {stream_id}")),
Some(s) => s,
};
let end = match s.to_delete.last() {
@ -1552,15 +1575,18 @@ impl LockedDatabase {
while let Some(row) = rows.next()? {
let id = row.get(0)?;
let data: Vec<u8> = row.get(6)?;
let get_and_cvt = |i: usize| {
let raw = row.get::<_, i32>(i)?;
u16::try_from(raw).map_err(|e| err!(OutOfRange, source(e)))
};
self.video_sample_entries_by_id.insert(
id,
Arc::new(VideoSampleEntry {
id,
width: row.get::<_, i32>(1)?.try_into()?,
height: row.get::<_, i32>(2)?.try_into()?,
pasp_h_spacing: row.get::<_, i32>(3)?.try_into()?,
pasp_v_spacing: row.get::<_, i32>(4)?.try_into()?,
width: get_and_cvt(1)?,
height: get_and_cvt(2)?,
pasp_h_spacing: get_and_cvt(3)?,
pasp_v_spacing: get_and_cvt(4)?,
data,
rfc6381_codec: row.get(5)?,
}),
@ -1599,7 +1625,7 @@ impl LockedDatabase {
let last_complete_open = match (open_id, open_uuid) {
(Some(id), Some(uuid)) => Some(Open { id, uuid: uuid.0 }),
(None, None) => None,
_ => bail!("open table missing id {}", id),
_ => bail!(Internal, msg("open table missing id {id}")),
};
self.sample_file_dirs_by_id.insert(
id,
@ -1680,12 +1706,12 @@ impl LockedDatabase {
let id = row.get(0)?;
let type_: String = row.get(1)?;
let type_ = StreamType::parse(&type_)
.ok_or_else(|| format_err!("no such stream type {}", type_))?;
.ok_or_else(|| err!(DataLoss, msg("no such stream type {type_}")))?;
let camera_id = row.get(2)?;
let c = self
.cameras_by_id
.get_mut(&camera_id)
.ok_or_else(|| format_err!("missing camera {} for stream {}", camera_id, id))?;
.ok_or_else(|| err!(DataLoss, msg("missing camera {camera_id} for stream {id}")))?;
self.streams_by_id.insert(
id,
Stream {
@ -1735,10 +1761,8 @@ impl LockedDatabase {
|| v.pasp_v_spacing != entry.pasp_v_spacing
{
bail!(
"video_sample_entry id {}: existing entry {:?}, new {:?}",
id,
v,
&entry
Internal,
msg("video_sample_entry id {id}: existing entry {v:?}, new {entry:?}"),
);
}
return Ok(id);
@ -1754,7 +1778,7 @@ impl LockedDatabase {
":rfc6381_codec": &entry.rfc6381_codec,
":data": &entry.data,
})
.map_err(|e| Error::from(e).context(format!("Unable to insert {:#?}", &entry)))?;
.map_err(|e| err!(e, msg("Unable to insert {entry:#?}")))?;
let id = self.conn.last_insert_rowid() as i32;
self.video_sample_entries_by_id.insert(
@ -1780,7 +1804,7 @@ impl LockedDatabase {
let o = self
.open
.as_ref()
.ok_or_else(|| format_err!("database is read-only"))?;
.ok_or_else(|| err!(FailedPrecondition, msg("database is read-only")))?;
// Populate meta.
{
@ -1816,7 +1840,7 @@ impl LockedDatabase {
garbage_needs_unlink: FnvHashSet::default(),
garbage_unlinked: Vec::new(),
}),
Entry::Occupied(_) => bail!("duplicate sample file dir id {}", id),
Entry::Occupied(_) => bail!(Internal, msg("duplicate sample file dir id {id}")),
};
meta.last_complete_open = meta.in_progress_open.take().into();
d.dir.as_ref().unwrap().write_meta(&meta)?;
@ -1826,17 +1850,23 @@ impl LockedDatabase {
pub fn delete_sample_file_dir(&mut self, dir_id: i32) -> Result<(), Error> {
for (&id, s) in self.streams_by_id.iter() {
if s.sample_file_dir_id == Some(dir_id) {
bail!("can't delete dir referenced by stream {}", id);
bail!(
FailedPrecondition,
msg("can't delete dir referenced by stream {id}")
);
}
}
let mut d = match self.sample_file_dirs_by_id.entry(dir_id) {
::std::collections::btree_map::Entry::Occupied(e) => e,
_ => bail!("no such dir {} to remove", dir_id),
_ => bail!(NotFound, msg("no such dir {dir_id} to remove")),
};
if !d.get().garbage_needs_unlink.is_empty() || !d.get().garbage_unlinked.is_empty() {
bail!(
"must collect garbage before deleting directory {}",
d.get().path.display()
FailedPrecondition,
msg(
"must collect garbage before deleting directory {}",
d.get().path.display(),
),
);
}
let dir = match d.get_mut().dir.take() {
@ -1847,17 +1877,19 @@ impl LockedDatabase {
// a writer::Syncer also has a reference.
d.get_mut().dir = Some(arc); // put it back.
bail!(
"can't delete directory {} with active syncer (refcnt {}",
dir_id,
c
FailedPrecondition,
msg("can't delete directory {dir_id} with active syncer (refcnt {c})"),
);
}
},
};
if !dir.is_empty()? {
bail!(
"Can't delete sample file directory {} which still has files",
&d.get().path.display()
FailedPrecondition,
msg(
"can't delete sample file directory {} which still has files",
&d.get().path.display(),
),
);
}
let mut meta = d.get().expected_meta(&self.uuid);
@ -1868,7 +1900,7 @@ impl LockedDatabase {
.execute("delete from sample_file_dir where id = ?", params![dir_id])?
!= 1
{
bail!("missing database row for dir {}", dir_id);
bail!(Internal, msg("missing database row for dir {dir_id}"));
}
d.remove_entry();
Ok(())
@ -1919,10 +1951,9 @@ impl LockedDatabase {
/// TODO: consider renaming this to `update_camera` and creating a bulk
/// `apply_camera_changes`.
pub fn null_camera_change(&mut self, camera_id: i32) -> Result<CameraChange, Error> {
let camera = self
.cameras_by_id
.get(&camera_id)
.ok_or_else(|| format_err!("no such camera {}", camera_id))?;
let Some(camera) = self.cameras_by_id.get(&camera_id) else {
bail!(Internal, msg("no such camera {camera_id}"));
};
let mut change = CameraChange {
short_name: camera.short_name.clone(),
config: camera.config.clone(),
@ -1947,10 +1978,9 @@ impl LockedDatabase {
pub fn update_camera(&mut self, camera_id: i32, mut camera: CameraChange) -> Result<(), Error> {
let tx = self.conn.transaction()?;
let streams;
let c = self
.cameras_by_id
.get_mut(&camera_id)
.ok_or_else(|| format_err!("no such camera {}", camera_id))?;
let Some(c) = self.cameras_by_id.get_mut(&camera_id) else {
bail!(Internal, msg("no such camera {camera_id}"));
};
{
streams =
StreamStateChanger::new(&tx, camera_id, Some(c), &self.streams_by_id, &mut camera)?;
@ -1969,7 +1999,7 @@ impl LockedDatabase {
":config": &camera.config,
})?;
if rows != 1 {
bail!("Camera {} missing from database", camera_id);
bail!(Internal, msg("camera {camera_id} missing from database"));
}
}
tx.commit()?;
@ -1982,11 +2012,9 @@ impl LockedDatabase {
/// Deletes a camera and its streams. The camera must have no recordings.
pub fn delete_camera(&mut self, id: i32) -> Result<(), Error> {
// TODO: also verify there are no uncommitted recordings.
let uuid = self
.cameras_by_id
.get(&id)
.map(|c| c.uuid)
.ok_or_else(|| format_err!("No such camera {} to remove", id))?;
let Some(uuid) = self.cameras_by_id.get(&id).map(|c| c.uuid) else {
bail!(NotFound, msg("no such camera {id}"));
};
let mut streams_to_delete = Vec::new();
let tx = self.conn.transaction()?;
{
@ -1996,18 +2024,21 @@ impl LockedDatabase {
continue;
};
if stream.range.is_some() {
bail!("Can't remove camera {}; has recordings.", id);
bail!(
FailedPrecondition,
msg("can't remove camera {id}; has recordings")
);
}
let rows = stream_stmt.execute(named_params! {":id": stream_id})?;
if rows != 1 {
bail!("Stream {} missing from database", id);
bail!(Internal, msg("stream {id} missing from database"));
}
streams_to_delete.push(*stream_id);
}
let mut cam_stmt = tx.prepare_cached(r"delete from camera where id = :id")?;
let rows = cam_stmt.execute(named_params! {":id": id})?;
if rows != 1 {
bail!("Camera {} missing from database", id);
bail!(Internal, msg("camera {id} missing from database"));
}
}
tx.commit()?;
@ -2035,10 +2066,9 @@ impl LockedDatabase {
"#,
)?;
for c in changes {
let stream = self
.streams_by_id
.get(&c.stream_id)
.ok_or_else(|| format_err!("no such stream id {}", c.stream_id))?;
let Some(stream) = self.streams_by_id.get(&c.stream_id) else {
bail!(Internal, msg("no such stream {}", c.stream_id));
};
let mut new_config = stream.config.clone();
new_config.mode = (if c.new_record { "record" } else { "" }).into();
new_config.retain_bytes = c.new_limit;
@ -2179,8 +2209,11 @@ pub(crate) fn check_sqlite_version() -> Result<(), Error> {
// https://www.sqlite.org/withoutrowid.html
if rusqlite::version_number() < 3008002 {
bail!(
"SQLite version {} is too old; need at least 3.8.2",
rusqlite::version()
FailedPrecondition,
msg(
"SQLite version {} is too old; need at least 3.8.2",
rusqlite::version()
),
);
}
Ok(())
@ -2194,7 +2227,7 @@ pub fn init(conn: &mut rusqlite::Connection) -> Result<(), Error> {
set_integrity_pragmas(conn)?;
let tx = conn.transaction()?;
tx.execute_batch(include_str!("schema.sql"))
.context("unable to create database schema")?;
.map_err(|e| err!(e, msg("unable to create database schema")))?;
{
let uuid = ::uuid::Uuid::new_v4();
let uuid_bytes = &uuid.as_bytes()[..];
@ -2228,7 +2261,9 @@ pub fn get_schema_version(conn: &rusqlite::Connection) -> Result<Option<i32>, Er
fn get_boot_uuid() -> Result<Option<Uuid>, Error> {
if cfg!(target_os = "linux") {
let boot_id = std::fs::read_to_string("/proc/sys/kernel/random/boot_id")?;
Ok(Some(Uuid::parse_str(boot_id.trim_end())?))
Ok(Some(Uuid::parse_str(boot_id.trim_end()).map_err(|e| {
err!(Internal, msg("boot_id is not a valid uuid"), source(e))
})?))
} else {
Ok(None) // don't complain about lack of platform support; just return None.
}
@ -2236,30 +2271,33 @@ fn get_boot_uuid() -> Result<Option<Uuid>, Error> {
/// Checks that the schema version in the given database is as expected.
pub(crate) fn check_schema_version(conn: &rusqlite::Connection) -> Result<(), Error> {
let ver = get_schema_version(conn)?.ok_or_else(|| {
format_err!(
"no such table: version.\n\n\
If you have created an empty database by hand, delete it and use `nvr init` \
instead, as noted in the installation instructions: \
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/install.md>\n\n\
If you are starting from a database that predates schema versioning, see \
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/schema.md>."
let Some(ver) = get_schema_version(conn)? else {
bail!(
FailedPrecondition,
msg(
"no such table: version.\n\n\
If you have created an empty database by hand, delete it and use `nvr init` \
instead, as noted in the installation instructions: \
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/install.md>\n\n\
If you are starting from a database that predates schema versioning, see \
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/schema.md>."),
)
})?;
};
match ver.cmp(&EXPECTED_VERSION) {
std::cmp::Ordering::Less => bail!(
"Database schema version {} is too old (expected {}); \
see upgrade instructions in \
<https://github.com/scottlamb/moonfire-nvr/blob/master/guide/upgrade.md>.",
ver,
EXPECTED_VERSION
FailedPrecondition,
msg(
"database schema version {ver} is too old (expected {EXPECTED_VERSION}); \
see upgrade instructions in guide/upgrade.md"
),
),
std::cmp::Ordering::Equal => Ok(()),
std::cmp::Ordering::Greater => bail!(
"Database schema version {} is too new (expected {}); \
must use a newer binary to match.",
ver,
EXPECTED_VERSION
FailedPrecondition,
msg(
"database schema version {ver} is too new (expected {EXPECTED_VERSION}); \
must use a newer binary to match"
),
),
}
}
@ -2560,7 +2598,7 @@ mod tests {
)
.err()
.unwrap();
assert!(e.to_string().starts_with("no such table"), "{}", e);
assert!(e.msg().unwrap().starts_with("no such table"), "{}", e);
}
#[test]
@ -2571,8 +2609,9 @@ mod tests {
.unwrap();
let e = Database::new(clock::RealClocks {}, c, false).err().unwrap();
assert!(
e.to_string()
.starts_with("Database schema version 6 is too old (expected 7)"),
e.msg()
.unwrap()
.starts_with("database schema version 6 is too old (expected 7)"),
"got: {e:?}"
);
}
@ -2585,8 +2624,9 @@ mod tests {
.unwrap();
let e = Database::new(clock::RealClocks {}, c, false).err().unwrap();
assert!(
e.to_string()
.starts_with("Database schema version 8 is too new (expected 7)"),
e.msg()
.unwrap()
.starts_with("database schema version 8 is too new (expected 7)"),
"got: {e:?}"
);
}

View File

@ -12,8 +12,8 @@ mod reader;
use crate::coding;
use crate::db::CompositeId;
use crate::schema;
use base::{bail, err, Error};
use cstr::cstr;
use failure::{bail, format_err, Error, Fail};
use nix::sys::statvfs::Statvfs;
use nix::{
fcntl::{FlockArg, OFlag},
@ -145,20 +145,23 @@ pub(crate) fn read_meta(dir: &Fd) -> Result<schema::DirMeta, Error> {
let mut data = Vec::new();
f.read_to_end(&mut data)?;
let (len, pos) = coding::decode_varint32(&data, 0)
.map_err(|_| format_err!("Unable to decode varint length in meta file"))?;
.map_err(|_| err!(DataLoss, msg("Unable to decode varint length in meta file")))?;
if data.len() != FIXED_DIR_META_LEN || len as usize + pos > FIXED_DIR_META_LEN {
bail!(
"Expected a {}-byte file with a varint length of a DirMeta message; got \
a {}-byte file with length {}",
FIXED_DIR_META_LEN,
data.len(),
len
DataLoss,
msg(
"Expected a {}-byte file with a varint length of a DirMeta message; got \
a {}-byte file with length {}",
FIXED_DIR_META_LEN,
data.len(),
len,
),
);
}
let data = &data[pos..pos + len as usize];
let mut s = protobuf::CodedInputStream::from_bytes(data);
meta.merge_from(&mut s)
.map_err(|e| e.context("Unable to parse metadata proto"))?;
.map_err(|e| err!(DataLoss, msg("Unable to parse metadata proto"), source(e)))?;
Ok(meta)
}
@ -169,9 +172,12 @@ pub(crate) fn write_meta(dirfd: RawFd, meta: &schema::DirMeta) -> Result<(), Err
.expect("proto3->vec is infallible");
if data.len() > FIXED_DIR_META_LEN {
bail!(
"Length-delimited DirMeta message requires {} bytes, over limit of {}",
data.len(),
FIXED_DIR_META_LEN
Internal,
msg(
"length-delimited DirMeta message requires {} bytes, over limit of {}",
data.len(),
FIXED_DIR_META_LEN,
),
);
}
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
@ -181,28 +187,31 @@ pub(crate) fn write_meta(dirfd: RawFd, meta: &schema::DirMeta) -> Result<(), Err
OFlag::O_CREAT | OFlag::O_WRONLY,
Mode::S_IRUSR | Mode::S_IWUSR,
)
.map_err(|e| e.context("Unable to open meta file"))?;
.map_err(|e| err!(e, msg("unable to open meta file")))?;
let stat = f
.metadata()
.map_err(|e| e.context("Unable to stat meta file"))?;
.map_err(|e| err!(e, msg("unable to stat meta file")))?;
if stat.len() == 0 {
// Need to sync not only the data but also the file metadata and dirent.
f.write_all(&data)
.map_err(|e| e.context("Unable to write to meta file"))?;
.map_err(|e| err!(e, msg("unable to write to meta file")))?;
f.sync_all()
.map_err(|e| e.context("Unable to sync meta file"))?;
nix::unistd::fsync(dirfd).map_err(|e| e.context("Unable to sync dir"))?;
.map_err(|e| err!(e, msg("unable to sync meta file")))?;
nix::unistd::fsync(dirfd).map_err(|e| err!(e, msg("unable to sync dir")))?;
} else if stat.len() == FIXED_DIR_META_LEN as u64 {
// Just syncing the data will suffice; existing metadata and dirent are fine.
f.write_all(&data)
.map_err(|e| e.context("Unable to write to meta file"))?;
.map_err(|e| err!(e, msg("unable to write to meta file")))?;
f.sync_data()
.map_err(|e| e.context("Unable to sync meta file"))?;
.map_err(|e| err!(e, msg("unable to sync meta file")))?;
} else {
bail!(
"Existing meta file is {}-byte; expected {}",
stat.len(),
FIXED_DIR_META_LEN
DataLoss,
msg(
"existing meta file is {}-byte; expected {}",
stat.len(),
FIXED_DIR_META_LEN,
),
);
}
Ok(())
@ -221,14 +230,15 @@ impl SampleFileDir {
} else {
FlockArg::LockSharedNonblock
})
.map_err(|e| e.context(format!("unable to lock dir {}", path.display())))?;
let dir_meta = read_meta(&s.fd).map_err(|e| e.context("unable to read meta file"))?;
.map_err(|e| err!(e, msg("unable to lock dir {}", path.display())))?;
let dir_meta = read_meta(&s.fd).map_err(|e| err!(e, msg("unable to read meta file")))?;
if let Err(e) = SampleFileDir::check_consistent(expected_meta, &dir_meta) {
bail!(
"metadata mismatch: {}.\nexpected:\n{:#?}\n\nactual:\n{:#?}",
e,
expected_meta,
&dir_meta
Internal,
msg(
"metadata mismatch: {e}.\nexpected:\n{expected_meta:#?}\n\nactual:\n\
{dir_meta:#?}",
),
);
}
if expected_meta.in_progress_open.is_some() {
@ -275,22 +285,28 @@ impl SampleFileDir {
) -> Result<Arc<SampleFileDir>, Error> {
let s = SampleFileDir::open_self(path, true)?;
s.fd.lock(FlockArg::LockExclusiveNonblock)
.map_err(|e| e.context(format!("unable to lock dir {}", path.display())))?;
.map_err(|e| err!(e, msg("unable to lock dir {}", path.display())))?;
let old_meta = read_meta(&s.fd)?;
// Verify metadata. We only care that it hasn't been completely opened.
// Partial opening by this or another database is fine; we won't overwrite anything.
if old_meta.last_complete_open.is_some() {
bail!(
"Can't create dir at path {}: is already in use:\n{:?}",
path.display(),
old_meta
FailedPrecondition,
msg(
"can't create dir at path {}: is already in use:\n{:?}",
path.display(),
old_meta,
),
);
}
if !s.is_empty()? {
bail!(
"Can't create dir at path {} with existing files",
path.display()
FailedPrecondition,
msg(
"can't create dir at path {} with existing files",
path.display(),
),
);
}
s.write_meta(db_meta)?;

View File

@ -31,9 +31,9 @@ use std::{
task::{Context, Poll},
};
use base::bail_t;
use base::bail;
use base::clock::{RealClocks, TimerGuard};
use base::{format_err_t, Error, ErrorKind, ResultExt};
use base::{err, Error, ErrorKind, ResultExt};
use nix::{fcntl::OFlag, sys::stat::Mode};
use crate::CompositeId;
@ -116,9 +116,9 @@ impl FileStream {
match Pin::new(&mut rx).poll(cx) {
Poll::Ready(Err(_)) => {
self.state = FileStreamState::Invalid;
Poll::Ready(Some(Err(format_err_t!(
Poll::Ready(Some(Err(err!(
Internal,
"reader thread panicked; see logs"
msg("reader thread panicked; see logs")
))))
}
Poll::Ready(Ok(Err(e))) => {
@ -319,12 +319,11 @@ impl ReaderInt {
let map_len = usize::try_from(
range.end - range.start + u64::try_from(unaligned).expect("usize fits in u64"),
)
.map_err(|_| {
format_err_t!(
.map_err(|e| {
err!(
OutOfRange,
"file {}'s range {:?} len exceeds usize::MAX",
composite_id,
range
msg("file {composite_id}'s range {range:?} len exceeds usize::MAX"),
source(e),
)
})?;
let map_len = std::num::NonZeroUsize::new(map_len).expect("range is non-empty");
@ -337,12 +336,14 @@ impl ReaderInt {
// with a SIGBUS or reading bad data at the end of the last page later.
let metadata = file.metadata().err_kind(ErrorKind::Unknown)?;
if metadata.len() < u64::try_from(offset).unwrap() + u64::try_from(map_len.get()).unwrap() {
bail_t!(
Internal,
"file {}, range {:?}, len {}",
composite_id,
range,
metadata.len()
bail!(
OutOfRange,
msg(
"file {}, range {:?}, len {}",
composite_id,
range,
metadata.len()
),
);
}
let map_ptr = unsafe {
@ -356,17 +357,13 @@ impl ReaderInt {
)
}
.map_err(|e| {
format_err_t!(
Internal,
"mmap failed for {} off={} len={}: {}",
composite_id,
offset,
map_len,
e
err!(
e,
msg("mmap failed for {composite_id} off={offset} len={map_len}")
)
})?;
if let Err(e) = unsafe {
if let Err(err) = unsafe {
nix::sys::mman::madvise(
map_ptr,
map_len.get(),
@ -375,11 +372,11 @@ impl ReaderInt {
} {
// This shouldn't happen but is "just" a performance problem.
tracing::warn!(
"madvise(MADV_SEQUENTIAL) failed for {} off={} len={}: {}",
composite_id,
%err,
%composite_id,
offset,
map_len,
e
"madvise(MADV_SEQUENTIAL) failed",
);
}

View File

@ -7,8 +7,7 @@
use crate::db::{self, CompositeId, SqlUuid};
use crate::json::GlobalConfig;
use crate::recording;
use base::{ErrorKind, ResultExt as _};
use failure::{bail, Error, ResultExt as _};
use base::{bail, err, Error, ErrorKind, ResultExt as _};
use fnv::FnvHashSet;
use rusqlite::{named_params, params};
use std::ops::Range;
@ -190,9 +189,8 @@ pub(crate) fn insert_recording(
id: CompositeId,
r: &db::RecordingToInsert,
) -> Result<(), Error> {
let mut stmt = tx
.prepare_cached(
r#"
let mut stmt = tx.prepare_cached(
r#"
insert into recording (composite_id, stream_id, open_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_media_duration_90k,
prev_runs, wall_duration_90k, media_duration_delta_90k,
@ -204,8 +202,7 @@ pub(crate) fn insert_recording(
:video_samples, :video_sync_samples, :video_sample_entry_id,
:end_reason)
"#,
)
.with_context(|e| format!("can't prepare recording insert: {e}"))?;
)?;
stmt.execute(named_params! {
":composite_id": id.0,
":stream_id": i64::from(id.stream()),
@ -223,18 +220,21 @@ pub(crate) fn insert_recording(
":video_sample_entry_id": r.video_sample_entry_id,
":end_reason": r.end_reason.as_deref(),
})
.with_context(|e| format!("unable to insert recording for recording {id} {r:#?}: {e}"))?;
.map_err(|e| {
err!(
e,
msg("unable to insert recording for recording {id} {r:#?}")
)
})?;
let mut stmt = tx
.prepare_cached(
r#"
let mut stmt = tx.prepare_cached(
r#"
insert into recording_integrity (composite_id, local_time_delta_90k,
sample_file_blake3)
values (:composite_id, :local_time_delta_90k,
:sample_file_blake3)
"#,
)
.with_context(|e| format!("can't prepare recording_integrity insert: {e}"))?;
)?;
let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]);
let delta = match r.run_offset {
0 => None,
@ -245,21 +245,19 @@ pub(crate) fn insert_recording(
":local_time_delta_90k": delta,
":sample_file_blake3": blake3,
})
.with_context(|e| format!("unable to insert recording_integrity for {r:#?}: {e}"))?;
.map_err(|e| err!(e, msg("unable to insert recording_integrity for {r:#?}")))?;
let mut stmt = tx
.prepare_cached(
r#"
let mut stmt = tx.prepare_cached(
r#"
insert into recording_playback (composite_id, video_index)
values (:composite_id, :video_index)
"#,
)
.with_context(|e| format!("can't prepare recording_playback insert: {e}"))?;
)?;
stmt.execute(named_params! {
":composite_id": id.0,
":video_index": &r.video_index,
})
.with_context(|e| format!("unable to insert recording_playback for {r:#?}: {e}"))?;
.map_err(|e| err!(e, msg("unable to insert recording_playback for {r:#?}")))?;
Ok(())
}
@ -322,26 +320,31 @@ pub(crate) fn delete_recordings(
let n_playback = del_playback.execute(p)?;
if n_playback != n {
bail!(
"inserted {} garbage rows but deleted {} recording_playback rows!",
n,
n_playback
Internal,
msg(
"inserted {} garbage rows but deleted {} recording_playback rows!",
n,
n_playback
),
);
}
let n_integrity = del_integrity.execute(p)?;
if n_integrity > n {
// fewer is okay; recording_integrity is optional.
bail!(
"inserted {} garbage rows but deleted {} recording_integrity rows!",
n,
n_integrity
Internal,
msg(
"inserted {} garbage rows but deleted {} recording_integrity rows!",
n,
n_integrity
),
);
}
let n_main = del_main.execute(p)?;
if n_main != n {
bail!(
"inserted {} garbage rows but deleted {} recording rows!",
n,
n_main
Internal,
msg("inserted {n} garbage rows but deleted {n_main} recording rows!"),
);
}
Ok(n)
@ -408,9 +411,8 @@ pub(crate) fn get_range(
let max_end = match maxes_opt {
Some(Range { start: _, end: e }) => e,
None => bail!(
"missing max for stream {} which had min {}",
stream_id,
min_start
Internal,
msg("missing max for stream {stream_id} which had min {min_start}"),
),
};
Ok(Some(min_start..max_end))

View File

@ -6,7 +6,7 @@
use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
use crate::db;
use failure::{bail, Error};
use base::{bail, Error};
use std::convert::TryFrom;
use std::ops::Range;
use tracing::trace;
@ -79,25 +79,31 @@ impl SampleIndexIterator {
}
let (raw1, i1) = match decode_varint32(data, i) {
Ok(tuple) => tuple,
Err(()) => bail!("bad varint 1 at offset {}", i),
Err(()) => bail!(DataLoss, msg("bad varint 1 at offset {i}")),
};
let (raw2, i2) = match decode_varint32(data, i1) {
Ok(tuple) => tuple,
Err(()) => bail!("bad varint 2 at offset {}", i1),
Err(()) => bail!(DataLoss, msg("bad varint 2 at offset {i1}")),
};
let duration_90k_delta = unzigzag32(raw1 >> 1);
self.duration_90k += duration_90k_delta;
if self.duration_90k < 0 {
bail!(
"negative duration {} after applying delta {}",
self.duration_90k,
duration_90k_delta
DataLoss,
msg(
"negative duration {} after applying delta {}",
self.duration_90k,
duration_90k_delta,
),
);
}
if self.duration_90k == 0 && data.len() > i2 {
bail!(
"zero duration only allowed at end; have {} bytes left",
data.len() - i2
DataLoss,
msg(
"zero duration only allowed at end; have {} bytes left",
data.len() - i2
),
);
}
let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() {
@ -115,11 +121,14 @@ impl SampleIndexIterator {
}
if self.bytes <= 0 {
bail!(
"non-positive bytes {} after applying delta {} to key={} frame at ts {}",
self.bytes,
bytes_delta,
self.is_key(),
self.start_90k
DataLoss,
msg(
"non-positive bytes {} after applying delta {} to key={} frame at ts {}",
self.bytes,
bytes_delta,
self.is_key(),
self.start_90k,
),
);
}
Ok(true)
@ -228,10 +237,13 @@ impl Segment {
|| desired_media_range_90k.end > recording.media_duration_90k
{
bail!(
"desired media range [{}, {}) invalid for recording of length {}",
desired_media_range_90k.start,
desired_media_range_90k.end,
recording.media_duration_90k
OutOfRange,
msg(
"desired media range [{}, {}) invalid for recording of length {}",
desired_media_range_90k.start,
desired_media_range_90k.end,
recording.media_duration_90k,
),
);
}
@ -257,10 +269,10 @@ impl Segment {
let data = &playback.video_index;
let mut it = SampleIndexIterator::default();
if !it.next(data)? {
bail!("no index");
bail!(Internal, msg("no index"));
}
if !it.is_key() {
bail!("not key frame");
bail!(Internal, msg("not key frame"));
}
// Stop when hitting a frame with this start time.
@ -336,10 +348,13 @@ impl Segment {
None => {
let mut it = SampleIndexIterator::default();
if !it.next(data)? {
bail!("recording {} has no frames", self.id);
bail!(Internal, msg("recording {} has no frames", self.id));
}
if !it.is_key() {
bail!("recording {} doesn't start with key frame", self.id);
bail!(
Internal,
msg("recording {} doesn't start with key frame", self.id)
);
}
it
}
@ -350,19 +365,25 @@ impl Segment {
for i in 0..self.frames {
if !have_frame {
bail!(
"recording {}: expected {} frames, found only {}",
self.id,
self.frames,
i + 1
Internal,
msg(
"recording {}: expected {} frames, found only {}",
self.id,
self.frames,
i + 1,
),
);
}
if it.is_key() {
key_frame += 1;
if key_frame > self.key_frames {
bail!(
"recording {}: more than expected {} key frames",
self.id,
self.key_frames
Internal,
msg(
"recording {}: more than expected {} key frames",
self.id,
self.key_frames,
),
);
}
}
@ -381,10 +402,13 @@ impl Segment {
}
if key_frame < self.key_frames {
bail!(
"recording {}: expected {} key frames, found only {}",
self.id,
self.key_frames,
key_frame
Internal,
msg(
"recording {}: expected {} key frames, found only {}",
self.id,
self.key_frames,
key_frame,
),
);
}
Ok(())
@ -499,7 +523,7 @@ mod tests {
];
for test in &tests {
let mut it = SampleIndexIterator::default();
assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
assert_eq!(it.next(test.encoded).unwrap_err().msg().unwrap(), test.err);
}
}

View File

@ -8,8 +8,7 @@
use crate::json::{SignalConfig, SignalTypeConfig};
use crate::{coding, days};
use crate::{recording, SqlUuid};
use base::bail_t;
use failure::{bail, format_err, Error};
use base::{bail, err, Error};
use fnv::FnvHashMap;
use rusqlite::{params, Connection, Transaction};
use std::collections::btree_map::Entry;
@ -149,19 +148,29 @@ impl<'a> PointDataIterator<'a> {
return Ok(None);
}
let (signal_delta, p) = coding::decode_varint32(self.data, self.cur_pos).map_err(|()| {
format_err!(
"varint32 decode failure; data={:?} pos={}",
self.data,
self.cur_pos
err!(
DataLoss,
msg(
"varint32 decode failure; data={:?} pos={}",
self.data,
self.cur_pos
),
)
})?;
let (state, p) = coding::decode_varint32(self.data, p).map_err(|()| {
err!(
DataLoss,
msg("varint32 decode failure; data={:?} pos={}", self.data, p)
)
})?;
let (state, p) = coding::decode_varint32(self.data, p)
.map_err(|()| format_err!("varint32 decode failure; data={:?} pos={}", self.data, p))?;
let signal = self.cur_signal.checked_add(signal_delta).ok_or_else(|| {
format_err!("signal overflow: {} + {}", self.cur_signal, signal_delta)
err!(
OutOfRange,
msg("signal overflow: {} + {}", self.cur_signal, signal_delta)
)
})?;
if state > u16::max_value() as u32 {
bail!("state overflow: {}", state);
bail!(OutOfRange, msg("state overflow: {state}"));
}
self.cur_pos = p;
self.cur_signal = signal + 1;
@ -335,15 +344,21 @@ impl State {
/// Helper for `update_signals` to do validation.
fn update_signals_validate(&self, signals: &[u32], states: &[u16]) -> Result<(), base::Error> {
if signals.len() != states.len() {
bail_t!(InvalidArgument, "signals and states must have same length");
bail!(
InvalidArgument,
msg("signals and states must have same length")
);
}
let mut next_allowed = 0u32;
for (&signal, &state) in signals.iter().zip(states) {
if signal < next_allowed {
bail_t!(InvalidArgument, "signals must be monotonically increasing");
bail!(
InvalidArgument,
msg("signals must be monotonically increasing")
);
}
match self.signals_by_id.get(&signal) {
None => bail_t!(InvalidArgument, "unknown signal {}", signal),
None => bail!(InvalidArgument, msg("unknown signal {signal}")),
Some(s) => {
let states = self
.types_by_uuid
@ -351,11 +366,9 @@ impl State {
.map(|t| t.valid_states)
.unwrap_or(0);
if state >= 16 || (states & (1 << state)) == 0 {
bail_t!(
bail!(
FailedPrecondition,
"signal {} specifies unknown state {}",
signal,
state
msg("signal {signal} specifies unknown state {state}"),
);
}
}
@ -659,7 +672,8 @@ impl State {
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let id: i32 = row.get(0)?;
let id = u32::try_from(id)?;
let id = u32::try_from(id)
.map_err(|e| err!(Internal, msg("signal id out of range"), source(e)))?;
let uuid: SqlUuid = row.get(1)?;
let type_: SqlUuid = row.get(2)?;
let config: SignalConfig = row.get(3)?;
@ -698,9 +712,12 @@ impl State {
for &value in type_.config.values.keys() {
if value == 0 || value >= 16 {
bail!(
"signal type {} value {} out of accepted range [0, 16)",
uuid.0,
value
OutOfRange,
msg(
"signal type {} value {} out of accepted range [0, 16)",
uuid.0,
value,
),
);
}
type_.valid_states |= 1 << value;
@ -741,9 +758,9 @@ impl State {
let e = sig_last_state.entry(signal);
if let Entry::Occupied(ref e) = e {
let (prev_time, prev_state) = *e.get();
let s = signals_by_id.get_mut(&signal).ok_or_else(|| {
format_err!("time {} references invalid signal {}", time_90k, signal)
})?;
let Some(s) = signals_by_id.get_mut(&signal) else {
bail!(DataLoss, msg("time {time_90k} references invalid signal {signal}"));
};
s.days.adjust(prev_time..time_90k, 0, prev_state);
}
if state == 0 {
@ -760,8 +777,8 @@ impl State {
}
if !cur.is_empty() {
bail!(
"far future state should be unknown for all signals; is: {:?}",
cur
Internal,
msg("far future state should be unknown for all signals; is: {cur:?}")
);
}
Ok(())

View File

@ -6,8 +6,8 @@
//!
//! See `guide/schema.md` for more information.
use crate::db;
use failure::{bail, Error};
use crate::db::{self, EXPECTED_VERSION};
use base::{bail, Error};
use nix::NixPath;
use rusqlite::params;
use std::ffi::CStr;
@ -60,14 +60,16 @@ fn upgrade(args: &Args, target_ver: i32, conn: &mut rusqlite::Connection) -> Res
{
assert_eq!(upgraders.len(), db::EXPECTED_VERSION as usize);
let old_ver = conn.query_row("select max(id) from version", params![], |row| row.get(0))?;
if old_ver > db::EXPECTED_VERSION {
if old_ver > EXPECTED_VERSION {
bail!(
"Database is at version {}, later than expected {}",
old_ver,
db::EXPECTED_VERSION
FailedPrecondition,
msg("database is at version {old_ver}, later than expected {EXPECTED_VERSION}"),
);
} else if old_ver < 0 {
bail!("Database is at negative version {}!", old_ver);
bail!(
FailedPrecondition,
msg("Database is at negative version {old_ver}!")
);
}
info!(
"Upgrading database from version {} to version {}...",
@ -95,7 +97,7 @@ pub fn run(args: &Args, conn: &mut rusqlite::Connection) -> Result<(), Error> {
db::check_sqlite_version()?;
db::set_integrity_pragmas(conn)?;
set_journal_mode(conn, args.preset_journal)?;
upgrade(args, db::EXPECTED_VERSION, conn)?;
upgrade(args, EXPECTED_VERSION, conn)?;
// As in "moonfire-nvr init": try for page_size=16384 and wal for the reasons explained there.
//
@ -154,7 +156,7 @@ mod tests {
use super::*;
use crate::compare;
use crate::testutil;
use failure::ResultExt;
use base::err;
use fnv::FnvHashMap;
const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\
@ -209,7 +211,7 @@ mod tests {
let tmpdir = tempfile::Builder::new()
.prefix("moonfire-nvr-test")
.tempdir()?;
//let path = tmpdir.path().to_str().ok_or_else(|| format_err!("invalid UTF-8"))?.to_owned();
//let path = tmpdir.path().to_str().ok_or_else(|| err!("invalid UTF-8"))?.to_owned();
let mut upgraded = new_conn()?;
upgraded.execute_batch(include_str!("v0.sql"))?;
upgraded.execute_batch(
@ -291,7 +293,7 @@ mod tests {
*ver,
&mut upgraded,
)
.context(format!("upgrading to version {ver}"))?;
.map_err(|e| err!(e, msg("upgrade to version {ver} failed")))?;
if let Some(f) = fresh_sql {
compare(&upgraded, *ver, f)?;
}

View File

@ -5,7 +5,7 @@
/// Upgrades a version 0 schema to a version 1 schema.
use crate::db;
use crate::recording;
use failure::Error;
use base::Error;
use rusqlite::{named_params, params};
use std::collections::HashMap;
use tracing::warn;

View File

@ -5,7 +5,7 @@
/// Upgrades a version 1 schema to a version 2 schema.
use crate::dir;
use crate::schema::DirMeta;
use failure::{bail, format_err, Error};
use base::{bail, Error};
use nix::fcntl::{FlockArg, OFlag};
use nix::sys::stat::Mode;
use rusqlite::{named_params, params};
@ -13,9 +13,12 @@ use std::os::unix::io::AsRawFd;
use uuid::Uuid;
pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
let sample_file_path = args.sample_file_dir.ok_or_else(|| {
format_err!("--sample-file-dir required when upgrading from schema version 1 to 2.")
})?;
let Some(sample_file_path) = args.sample_file_dir else {
bail!(
InvalidArgument,
msg("--sample-file-dir required when upgrading from schema version 1 to 2."),
);
};
let mut d = nix::dir::Dir::open(
sample_file_path,
@ -101,12 +104,12 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
}
dir::write_meta(d.as_raw_fd(), &meta)?;
let sample_file_path = sample_file_path.to_str().ok_or_else(|| {
format_err!(
"sample file dir {} is not a valid string",
sample_file_path.display()
)
})?;
let Some(sample_file_path) = sample_file_path.to_str() else {
bail!(
InvalidArgument,
msg("sample file dir {} is not a valid string", sample_file_path.display()),
);
};
tx.execute(
r#"
insert into sample_file_dir (path, uuid, last_complete_open_id)
@ -317,15 +320,24 @@ fn verify_dir_contents(
};
let s = match f.to_str() {
Ok(s) => s,
Err(_) => bail!("unexpected file {:?} in {:?}", f, sample_file_path),
Err(_) => bail!(
FailedPrecondition,
msg("unexpected file {f:?} in {sample_file_path:?}")
),
};
let uuid = match Uuid::parse_str(s) {
Ok(u) => u,
Err(_) => bail!("unexpected file {:?} in {:?}", f, sample_file_path),
Err(_) => bail!(
FailedPrecondition,
msg("unexpected file {f:?} in {sample_file_path:?}")
),
};
if s != uuid.as_hyphenated().to_string() {
// non-canonical form.
bail!("unexpected file {:?} in {:?}", f, sample_file_path);
bail!(
FailedPrecondition,
msg("unexpected file {f:?} in {sample_file_path:?}")
);
}
files.insert(uuid);
}
@ -338,9 +350,12 @@ fn verify_dir_contents(
let uuid: crate::db::SqlUuid = row.get(0)?;
if !files.remove(&uuid.0) {
bail!(
"{} is missing from dir {}!",
uuid.0,
sample_file_path.display()
FailedPrecondition,
msg(
"{} is missing from dir {}!",
uuid.0,
sample_file_path.display()
),
);
}
}
@ -367,10 +382,13 @@ fn verify_dir_contents(
if !files.is_empty() {
bail!(
"{} unexpected sample file uuids in dir {}: {:?}!",
files.len(),
sample_file_path.display(),
files
FailedPrecondition,
msg(
"{} unexpected sample file uuids in dir {}: {:?}!",
files.len(),
sample_file_path.display(),
files,
),
);
}
Ok(())
@ -413,7 +431,7 @@ fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> {
fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> {
if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" || &sample_entry[90..94] != b"avcC"
{
bail!("not a valid AVCSampleEntry");
bail!(InvalidArgument, msg("not a valid AVCSampleEntry"));
}
let profile_idc = sample_entry[103];
let constraint_flags_byte = sample_entry[104];

View File

@ -8,9 +8,8 @@
use crate::db::{self, SqlUuid};
use crate::dir;
use crate::schema;
use failure::Error;
use base::Error;
use rusqlite::params;
use std::convert::TryFrom;
use std::os::unix::io::AsRawFd;
use std::path::PathBuf;
use std::sync::Arc;
@ -50,7 +49,7 @@ fn open_sample_file_dir(tx: &rusqlite::Transaction) -> Result<Arc<dir::SampleFil
open.id = o_id as u32;
open.uuid.extend_from_slice(&o_uuid.0.as_bytes()[..]);
}
let p = PathBuf::try_from(p)?;
let p = PathBuf::from(p);
dir::SampleFileDir::open(&p, &meta)
}

View File

@ -3,7 +3,7 @@
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
/// Upgrades a version 3 schema to a version 4 schema.
use failure::Error;
use base::Error;
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
// These create statements match the schema.sql when version 4 was the latest.

View File

@ -8,8 +8,8 @@
/// Otherwise, verify they are consistent with the database then upgrade them.
use crate::db::SqlUuid;
use crate::{dir, schema};
use base::{bail, err, Error};
use cstr::cstr;
use failure::{bail, Error, Fail};
use nix::fcntl::{FlockArg, OFlag};
use nix::sys::stat::Mode;
use protobuf::Message;
@ -34,15 +34,17 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
let mut s = protobuf::CodedInputStream::from_bytes(&data);
let mut dir_meta = schema::DirMeta::new();
dir_meta
.merge_from(&mut s)
.map_err(|e| e.context("Unable to parse metadata proto: {}"))?;
dir_meta.merge_from(&mut s).map_err(|e| {
err!(
FailedPrecondition,
msg("unable to parse metadata proto"),
source(e)
)
})?;
if let Err(e) = dir::SampleFileDir::check_consistent(db_meta, &dir_meta) {
bail!(
"Inconsistent db_meta={:?} dir_meta={:?}: {}",
&db_meta,
&dir_meta,
e
FailedPrecondition,
msg("inconsistent db_meta={db_meta:?} dir_meta={dir_meta:?}: {e}"),
);
}
let mut f = crate::fs::openat(
@ -56,9 +58,12 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
.expect("proto3->vec is infallible");
if data.len() > FIXED_DIR_META_LEN {
bail!(
"Length-delimited DirMeta message requires {} bytes, over limit of {}",
data.len(),
FIXED_DIR_META_LEN
Internal,
msg(
"length-delimited DirMeta message requires {} bytes, over limit of {}",
data.len(),
FIXED_DIR_META_LEN,
),
);
}
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
@ -144,12 +149,12 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
o.uuid.extend_from_slice(&uuid.0.as_bytes()[..]);
}
(None, None) => {}
_ => bail!("open table missing id"),
_ => bail!(Internal, msg("open table missing id")),
}
let dir = dir::Fd::open(path, false)?;
dir.lock(FlockArg::LockExclusiveNonblock)
.map_err(|e| e.context(format!("unable to lock dir {path}")))?;
.map_err(|e| err!(e, msg("unable to lock dir {path}")))?;
let mut need_sync = maybe_upgrade_meta(&dir, &db_meta)?;
if maybe_cleanup_garbage_uuids(&dir)? {

View File

@ -2,9 +2,9 @@
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use base::{bail, err, Error};
/// Upgrades a version 4 schema to a version 5 schema.
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use failure::{bail, format_err, Error, ResultExt};
use h264_reader::avcc::AvcDecoderConfigurationRecord;
use rusqlite::{named_params, params};
use std::convert::{TryFrom, TryInto};
@ -29,22 +29,31 @@ fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) {
fn parse(data: &[u8]) -> Result<AvcDecoderConfigurationRecord, Error> {
if data.len() < 94 || &data[4..8] != b"avc1" || &data[90..94] != b"avcC" {
bail!("data of len {} doesn't have an avcC", data.len());
bail!(
DataLoss,
msg("data of len {} doesn't have an avcC", data.len())
);
}
let avcc_len = BigEndian::read_u32(&data[86..90]);
if avcc_len < 8 {
// length and type.
bail!("invalid avcc len {}", avcc_len);
bail!(DataLoss, msg("invalid avcc len {avcc_len}"));
}
let end_pos = 86 + usize::try_from(avcc_len)?;
if end_pos != data.len() {
let end_pos = usize::try_from(avcc_len)
.ok()
.and_then(|l| l.checked_add(86));
if end_pos != Some(data.len()) {
bail!(
"expected avcC to be end of extradata; there are {} more bytes.",
data.len() - end_pos
DataLoss,
msg(
"avcC end pos {:?} and total data len {} should match",
end_pos,
data.len(),
),
);
}
AvcDecoderConfigurationRecord::try_from(&data[94..end_pos])
.map_err(|e| format_err!("Bad AvcDecoderConfigurationRecord: {:?}", e))
AvcDecoderConfigurationRecord::try_from(&data[94..])
.map_err(|e| err!(DataLoss, msg("Bad AvcDecoderConfigurationRecord: {:?}", e)))
}
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
@ -100,24 +109,37 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let id: i32 = row.get(0)?;
let width: u16 = row.get::<_, i32>(1)?.try_into()?;
let height: u16 = row.get::<_, i32>(2)?.try_into()?;
let rfc6381_codec: &str = row.get_ref(3)?.as_str()?;
let width: u16 = row
.get::<_, i32>(1)?
.try_into()
.map_err(|_| err!(OutOfRange))?;
let height: u16 = row
.get::<_, i32>(2)?
.try_into()
.map_err(|_| err!(OutOfRange))?;
let rfc6381_codec: &str = row
.get_ref(3)?
.as_str()
.map_err(|_| err!(InvalidArgument))?;
let mut data: Vec<u8> = row.get(4)?;
let avcc = parse(&data)?;
if avcc.num_of_sequence_parameter_sets() != 1 {
bail!("Multiple SPSs!");
bail!(Unimplemented, msg("multiple SPSs!"));
}
let ctx = avcc.create_context().map_err(|e| {
format_err!(
"Can't load SPS+PPS for video_sample_entry_id {}: {:?}",
id,
e
err!(
Unknown,
msg("can't load SPS+PPS for video_sample_entry_id {id}: {e:?}"),
)
})?;
let sps = ctx
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
.ok_or_else(|| format_err!("No SPS 0 for video_sample_entry_id {}", id))?;
.ok_or_else(|| {
err!(
Unimplemented,
msg("no SPS 0 for video_sample_entry_id {id}")
)
})?;
let pasp = sps
.vui_parameters
.as_ref()
@ -129,7 +151,10 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
data.write_u32::<BigEndian>(pasp.0.into())?;
data.write_u32::<BigEndian>(pasp.1.into())?;
let len = data.len();
BigEndian::write_u32(&mut data[0..4], u32::try_from(len)?);
BigEndian::write_u32(
&mut data[0..4],
u32::try_from(len).map_err(|_| err!(OutOfRange))?,
);
}
insert.execute(named_params! {
@ -268,7 +293,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
":video_sync_samples": video_sync_samples,
":video_sample_entry_id": video_sample_entry_id,
})
.with_context(|_| format!("Unable to insert composite_id {composite_id}"))?;
.map_err(|e| err!(e, msg("unable to insert composite_id {composite_id}")))?;
cum_duration_90k += i64::from(wall_duration_90k);
cum_runs += if run_offset == 0 { 1 } else { 0 };
}

View File

@ -3,7 +3,7 @@
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception
/// Upgrades a version 6 schema to a version 7 schema.
use failure::{format_err, Error, ResultExt};
use base::{err, Error};
use fnv::FnvHashMap;
use rusqlite::{named_params, params};
use std::{convert::TryFrom, path::PathBuf};
@ -28,7 +28,13 @@ fn copy_meta(tx: &rusqlite::Transaction) -> Result<(), Error> {
let config = GlobalConfig {
max_signal_changes: max_signal_changes
.map(|s| {
u32::try_from(s).map_err(|_| format_err!("max_signal_changes out of range"))
u32::try_from(s).map_err(|e| {
err!(
OutOfRange,
msg("max_signal_changes out of range"),
source(e)
)
})
})
.transpose()?,
..Default::default()
@ -57,7 +63,7 @@ fn copy_sample_file_dir(tx: &rusqlite::Transaction) -> Result<(), Error> {
let path: String = row.get(2)?;
let uuid: SqlUuid = row.get(1)?;
let config = SampleFileDirConfig {
path: PathBuf::try_from(path)?,
path: PathBuf::from(path),
..Default::default()
};
let last_complete_open_id: Option<i64> = row.get(3)?;
@ -107,7 +113,10 @@ fn copy_users(tx: &rusqlite::Transaction) -> Result<(), Error> {
let permissions: Vec<u8> = row.get(7)?;
let config = UserConfig {
disabled: (flags & 1) != 0,
unix_uid: unix_uid.map(u64::try_from).transpose()?,
unix_uid: unix_uid
.map(u64::try_from)
.transpose()
.map_err(|_| err!(OutOfRange, msg("bad unix_uid")))?,
..Default::default()
};
insert.execute(named_params! {
@ -134,7 +143,8 @@ fn copy_signal_types(tx: &rusqlite::Transaction) -> Result<(), Error> {
let type_ = types_
.entry(type_uuid.0)
.or_insert_with(SignalTypeConfig::default);
let value = u8::try_from(value).map_err(|_| format_err!("bad signal type value"))?;
let value =
u8::try_from(value).map_err(|_| err!(OutOfRange, msg("bad signal type value")))?;
let value_config = type_.values.entry(value).or_insert_with(Default::default);
if let Some(n) = name {
value_config.name = n;
@ -163,7 +173,8 @@ fn copy_signals(tx: &rusqlite::Transaction) -> Result<(), Error> {
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let id: i32 = row.get(0)?;
let id = u32::try_from(id)?;
let id =
u32::try_from(id).map_err(|e| err!(OutOfRange, msg("bad signal id"), source(e)))?;
let source_uuid: SqlUuid = row.get(1)?;
let type_uuid: SqlUuid = row.get(2)?;
let short_name: String = row.get(3)?;
@ -187,7 +198,8 @@ fn copy_signals(tx: &rusqlite::Transaction) -> Result<(), Error> {
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let signal_id: i32 = row.get(0)?;
let signal_id = u32::try_from(signal_id)?;
let signal_id = u32::try_from(signal_id)
.map_err(|e| err!(OutOfRange, msg("bad signal_id"), source(e)))?;
let camera_id: i32 = row.get(1)?;
let type_: i32 = row.get(2)?;
let signal = signals.get_mut(&signal_id).unwrap();
@ -261,7 +273,13 @@ fn copy_cameras(tx: &rusqlite::Transaction) -> Result<(), Error> {
.filter(|h| !h.is_empty())
.map(|h| Url::parse(&format!("http://{h}/")))
.transpose()
.with_context(|_| "bad onvif_host")?,
.map_err(|e| {
err!(
InvalidArgument,
msg("bad onvif_host for camera id {id}"),
source(e)
)
})?,
username: username.take().unwrap_or_default(),
password: password.take().unwrap_or_default(),
..Default::default()
@ -324,7 +342,13 @@ fn copy_streams(tx: &rusqlite::Transaction) -> Result<(), Error> {
""
})
.to_owned(),
url: Some(Url::parse(&rtsp_url)?),
url: Some(Url::parse(&rtsp_url).map_err(|e| {
err!(
InvalidArgument,
msg("bad rtsp_url for stream id {id}"),
source(e)
)
})?),
retain_bytes,
flush_if_sec,
..Default::default()

View File

@ -9,7 +9,7 @@ use crate::dir;
use crate::recording::{self, MAX_RECORDING_WALL_DURATION};
use base::clock::{self, Clocks};
use base::shutdown::ShutdownError;
use failure::{bail, format_err, Error};
use base::{bail, err, Error};
use fnv::FnvHashMap;
use std::cmp::{self, Ordering};
use std::convert::TryFrom;
@ -218,10 +218,9 @@ pub fn lower_retention(
for l in limits {
let (fs_bytes_before, extra);
{
let stream = db
.streams_by_id()
.get(&l.stream_id)
.ok_or_else(|| format_err!("no such stream {}", l.stream_id))?;
let Some(stream) = db.streams_by_id().get(&l.stream_id) else {
bail!(NotFound, msg("no such stream {}", l.stream_id));
};
fs_bytes_before =
stream.fs_bytes + stream.fs_bytes_to_add - stream.fs_bytes_to_delete;
extra = stream.config.retain_bytes - l.limit;
@ -245,7 +244,7 @@ fn delete_recordings(
) -> Result<(), Error> {
let fs_bytes_needed = {
let stream = match db.streams_by_id().get(&stream_id) {
None => bail!("no stream {}", stream_id),
None => bail!(NotFound, msg("no stream {stream_id}")),
Some(s) => s,
};
stream.fs_bytes + stream.fs_bytes_to_add - stream.fs_bytes_to_delete + extra_bytes_needed
@ -326,7 +325,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
let d = l
.sample_file_dirs_by_id()
.get(&dir_id)
.ok_or_else(|| format_err!("no dir {}", dir_id))?;
.ok_or_else(|| err!(NotFound, msg("no dir {dir_id}")))?;
let dir = d.get()?;
// Abandon files.
@ -345,17 +344,20 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
let to_abandon = list_files_to_abandon(&dir, streams_to_next)?;
let mut undeletable = 0;
for &id in &to_abandon {
if let Err(e) = dir.unlink_file(id) {
if e == nix::Error::ENOENT {
warn!("dir: abandoned recording {} already deleted!", id);
if let Err(err) = dir.unlink_file(id) {
if err == nix::Error::ENOENT {
warn!(%id, "dir: abandoned recording already deleted");
} else {
warn!("dir: Unable to unlink abandoned recording {}: {}", id, e);
warn!(%err, %id, "dir: unable to unlink abandoned recording");
undeletable += 1;
}
}
}
if undeletable > 0 {
bail!("Unable to delete {} abandoned recordings.", undeletable);
bail!(
Unknown,
msg("unable to delete {undeletable} abandoned recordings; see logs")
);
}
Ok((
@ -410,8 +412,8 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
}
if errors > 0 {
bail!(
"Unable to unlink {} files (see earlier warning messages for details)",
errors
Unknown,
msg("unable to unlink {errors} files (see earlier warning messages for details)"),
);
}
self.dir.sync()?;
@ -718,7 +720,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
WriterState::Unopened => None,
WriterState::Open(ref o) => {
if o.video_sample_entry_id != video_sample_entry_id {
bail!("inconsistent video_sample_entry_id");
bail!(Internal, msg("inconsistent video_sample_entry_id"));
}
return Ok(());
}
@ -738,7 +740,8 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
)?;
let f = clock::retry(&self.db.clocks(), shutdown_rx, &mut || {
self.dir.create_file(id)
})?;
})
.map_err(|e| err!(Cancelled, source(e)))?;
self.state = WriterState::Open(InnerWriter {
f,
@ -757,7 +760,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
Ok(match self.state {
WriterState::Unopened => false,
WriterState::Closed(_) => true,
WriterState::Open(_) => bail!("open!"),
WriterState::Open(_) => bail!(Internal, msg("open!")),
})
}
@ -786,9 +789,12 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
if duration <= 0 {
w.unindexed_sample = Some(unindexed); // restore invariant.
bail!(
"pts not monotonically increasing; got {} then {}",
unindexed.pts_90k,
pts_90k
InvalidArgument,
msg(
"pts not monotonically increasing; got {} then {}",
unindexed.pts_90k,
pts_90k,
),
);
}
let duration = match i32::try_from(duration) {
@ -796,9 +802,12 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
Err(_) => {
w.unindexed_sample = Some(unindexed); // restore invariant.
bail!(
"excessive pts jump from {} to {}",
unindexed.pts_90k,
pts_90k
InvalidArgument,
msg(
"excessive pts jump from {} to {}",
unindexed.pts_90k,
pts_90k,
),
)
}
};
@ -822,10 +831,10 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
Err(e) => {
// close() will do nothing because unindexed_sample will be None.
tracing::warn!(
"Abandoning incompletely written recording {} on shutdown",
"abandoning incompletely written recording {} on shutdown",
w.id
);
return Err(e.into());
bail!(Cancelled, source(e));
}
};
remaining = &remaining[written..];
@ -894,9 +903,8 @@ impl<F: FileWriter> InnerWriter<F> {
+ i32::try_from(clamp(local_start.0 - start.0, -limit, limit)).unwrap();
if wall_duration_90k > i32::try_from(MAX_RECORDING_WALL_DURATION).unwrap() {
bail!(
"Duration {} exceeds maximum {}",
wall_duration_90k,
MAX_RECORDING_WALL_DURATION
OutOfRange,
msg("Duration {wall_duration_90k} exceeds maximum {MAX_RECORDING_WALL_DURATION}"),
);
}
l.wall_duration_90k = wall_duration_90k;
@ -926,14 +934,29 @@ impl<F: FileWriter> InnerWriter<F> {
reason: Option<String>,
) -> Result<PreviousWriter, Error> {
let unindexed = self.unindexed_sample.take().ok_or_else(|| {
format_err!(
"Unable to add recording {} to database due to aborted write",
self.id
err!(
FailedPrecondition,
msg(
"unable to add recording {} to database due to aborted write",
self.id,
),
)
})?;
let (last_sample_duration, flags) = match next_pts {
None => (0, db::RecordingFlags::TrailingZero as i32),
Some(p) => (i32::try_from(p - unindexed.pts_90k)?, 0),
Some(p) => (
i32::try_from(p - unindexed.pts_90k).map_err(|_| {
err!(
OutOfRange,
msg(
"pts {} following {} creates invalid duration",
p,
unindexed.pts_90k
)
)
})?,
0,
),
};
let blake3 = self.hasher.finalize();
let (run_offset, end);

View File

@ -27,7 +27,7 @@ pub type BoxedError = Box<dyn StdError + Send + Sync>;
pub type BodyStream = Box<dyn Stream<Item = Result<Chunk, BoxedError>> + Send>;
pub fn wrap_error(e: Error) -> BoxedError {
Box::new(e.compat())
Box::new(e)
}
impl From<ARefss<'static, [u8]>> for Chunk {

View File

@ -4,9 +4,9 @@
//! Subcommand to check the database and sample file dir for errors.
use base::Error;
use bpaf::Bpaf;
use db::check;
use failure::Error;
use std::path::PathBuf;
/// Checks database integrity (like fsck).

View File

@ -4,11 +4,11 @@
use crate::stream::{self, Opener};
use base::strutil::{decode_size, encode_size};
use base::{bail, err, Error};
use cursive::traits::{Finder, Nameable, Resizable, Scrollable};
use cursive::views::{self, Dialog, ViewRef};
use cursive::Cursive;
use db::writer;
use failure::{bail, format_err, Error, ResultExt};
use itertools::Itertools;
use std::collections::BTreeMap;
use std::str::FromStr;
@ -123,21 +123,31 @@ fn parse_url(
if raw.is_empty() {
return Ok(None);
}
let url = url::Url::parse(raw)
.with_context(|_| format!("can't parse {} {:?} as URL", field_name, &raw))?;
let url = url::Url::parse(raw).map_err(|_| {
err!(
InvalidArgument,
msg("can't parse {field_name} {raw:?} as URL")
)
})?;
if !allowed_schemes.iter().any(|scheme| *scheme == url.scheme()) {
bail!(
"Unexpected scheme in {} {:?}; should be one of: {}",
field_name,
url.as_str(),
allowed_schemes.iter().join(", ")
InvalidArgument,
msg(
"unexpected scheme in {} {:?}; should be one of: {}",
field_name,
url.as_str(),
allowed_schemes.iter().join(", "),
),
);
}
if !url.username().is_empty() || url.password().is_some() {
bail!(
"Unexpected credentials in {} {:?}; use the username and password fields instead",
field_name,
url.as_str()
InvalidArgument,
msg(
"unexpected credentials in {} {:?}; use the username and password fields instead",
field_name,
url.as_str(),
),
);
}
Ok(Some(url))
@ -166,8 +176,8 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>) {
let type_ = db::StreamType::from_index(i).unwrap();
if stream.record && (stream.url.is_empty() || stream.sample_file_dir_id.is_none()) {
bail!(
"Can't record {} stream without RTSP URL and sample file directory",
type_.as_str()
InvalidArgument,
msg("can't record {type_} stream without RTSP URL and sample file directory"),
);
}
let stream_change = &mut change.streams[i];
@ -184,9 +194,9 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>) {
0
} else {
stream.flush_if_sec.parse().map_err(|_| {
format_err!(
"flush_if_sec for {} must be a non-negative integer",
type_.as_str()
err!(
InvalidArgument,
msg("flush_if_sec for {type_} must be a non-negative integer"),
)
})?
};

View File

@ -3,12 +3,12 @@
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use base::strutil::{decode_size, encode_size};
use base::Error;
use cursive::traits::{Nameable, Resizable};
use cursive::view::Scrollable;
use cursive::Cursive;
use cursive::{views, With};
use db::writer;
use failure::Error;
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::path::Path;

View File

@ -8,10 +8,10 @@
//! configuration will likely be almost entirely done through a web-based UI.
use base::clock;
use base::Error;
use bpaf::Bpaf;
use cursive::views;
use cursive::Cursive;
use failure::Error;
use std::path::PathBuf;
use std::sync::Arc;

View File

@ -2,8 +2,8 @@
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use base::Error;
use bpaf::Bpaf;
use failure::Error;
use std::path::PathBuf;
use tracing::info;

View File

@ -5,9 +5,9 @@
//! Subcommand to login a user (without requiring a password).
use base::clock::{self, Clocks};
use base::{bail, err, Error};
use bpaf::Bpaf;
use db::auth::SessionFlag;
use failure::{format_err, Error};
use std::io::Write as _;
use std::os::unix::fs::OpenOptionsExt as _;
use std::path::PathBuf;
@ -69,9 +69,9 @@ pub fn run(args: Args) -> Result<i32, Error> {
let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
let db = std::sync::Arc::new(db::Database::new(clocks, conn, true).unwrap());
let mut l = db.lock();
let u = l
.get_user(&args.username)
.ok_or_else(|| format_err!("no such user {:?}", &args.username))?;
let Some(u) = l.get_user(&args.username) else {
bail!(NotFound, msg("no such user {:?}", &args.username));
};
let permissions = args
.permissions
.map(db::Permissions::from)
@ -101,13 +101,13 @@ pub fn run(args: Args) -> Result<i32, Error> {
let d = args
.domain
.as_ref()
.ok_or_else(|| format_err!("--curl-cookie-jar requires --domain"))?;
.ok_or_else(|| err!(InvalidArgument, msg("--curl-cookie-jar requires --domain")))?;
let mut f = std::fs::OpenOptions::new()
.write(true)
.create_new(true)
.mode(0o600)
.open(p)
.map_err(|e| format_err!("Unable to open {}: {}", p.display(), e))?;
.map_err(|e| err!(e, msg("unable to open {}", p.display())))?;
write!(
&mut f,
"# Netscape HTTP Cookie File\n\

View File

@ -2,8 +2,8 @@
// Copyright (C) 2016 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use base::{err, Error};
use db::dir;
use failure::{Error, Fail};
use nix::fcntl::FlockArg;
use std::path::Path;
use tracing::info;
@ -28,16 +28,19 @@ enum OpenMode {
/// The returned `dir::Fd` holds the lock and should be kept open as long as the `Connection` is.
fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
let dir = dir::Fd::open(db_dir, mode == OpenMode::Create).map_err(|e| {
e.context(if mode == OpenMode::Create {
format!("unable to create db dir {}", db_dir.display())
if mode == OpenMode::Create {
err!(e, msg("unable to create db dir {}", db_dir.display()))
} else if e == nix::Error::ENOENT {
format!(
"db dir {} not found; try running moonfire-nvr init",
db_dir.display()
err!(
NotFound,
msg(
"db dir {} not found; try running moonfire-nvr init",
db_dir.display(),
),
)
} else {
format!("unable to open db dir {}", db_dir.display())
})
err!(e, msg("unable to open db dir {}", db_dir.display()))
}
})?;
let ro = mode == OpenMode::ReadOnly;
dir.lock(if ro {
@ -46,11 +49,14 @@ fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
FlockArg::LockExclusiveNonblock
})
.map_err(|e| {
e.context(format!(
"unable to get {} lock on db dir {} ",
if ro { "shared" } else { "exclusive" },
db_dir.display()
))
err!(
e,
msg(
"unable to get {} lock on db dir {} ",
if ro { "shared" } else { "exclusive" },
db_dir.display(),
),
)
})?;
Ok(dir)
}

View File

@ -6,9 +6,10 @@ use crate::streamer;
use crate::web;
use crate::web::accept::Listener;
use base::clock;
use base::err;
use base::{bail, Error};
use bpaf::Bpaf;
use db::{dir, writer};
use failure::{bail, Error, ResultExt};
use fnv::FnvHashMap;
use hyper::service::{make_service_fn, service_fn};
use retina::client::SessionGroup;
@ -76,7 +77,10 @@ fn resolve_zone() -> Result<String, Error> {
}
if p != LOCALTIME_PATH {
bail!("Unable to resolve env TZ={} to a timezone.", &tz);
bail!(
FailedPrecondition,
msg("unable to resolve env TZ={tz} to a timezone")
);
}
}
@ -86,21 +90,23 @@ fn resolve_zone() -> Result<String, Error> {
Ok(localtime_dest) => {
let localtime_dest = match localtime_dest.to_str() {
Some(d) => d,
None => bail!("{} symlink destination is invalid UTF-8", LOCALTIME_PATH),
None => bail!(
FailedPrecondition,
msg("{LOCALTIME_PATH} symlink destination is invalid UTF-8")
),
};
if let Some(p) = zoneinfo_name(localtime_dest) {
return Ok(p.to_owned());
}
bail!(
"Unable to resolve {} symlink destination {} to a timezone.",
LOCALTIME_PATH,
&localtime_dest
FailedPrecondition,
msg("unable to resolve {LOCALTIME_PATH} symlink destination {localtime_dest} to a timezone"),
);
}
Err(e) => {
use ::std::io::ErrorKind;
if e.kind() != ErrorKind::NotFound && e.kind() != ErrorKind::InvalidInput {
bail!("Unable to read {} symlink: {}", LOCALTIME_PATH, e);
bail!(e, msg("unable to read {LOCALTIME_PATH} symlink"));
}
}
};
@ -110,10 +116,8 @@ fn resolve_zone() -> Result<String, Error> {
Ok(z) => Ok(z.trim().to_owned()),
Err(e) => {
bail!(
"Unable to resolve timezone from TZ env, {}, or {}. Last error: {}",
LOCALTIME_PATH,
TIMEZONE_PATH,
e
e,
msg("unable to resolve timezone from TZ env, {LOCALTIME_PATH}, or {TIMEZONE_PATH}"),
);
}
}
@ -127,15 +131,18 @@ struct Syncer {
fn read_config(path: &Path) -> Result<ConfigFile, Error> {
let config = std::fs::read(path)?;
let config = toml::from_slice(&config)?;
let config = toml::from_slice(&config).map_err(|e| err!(InvalidArgument, source(e)))?;
Ok(config)
}
pub fn run(args: Args) -> Result<i32, Error> {
let config = read_config(&args.config).with_context(|_| {
format!(
"Unable to load config file {}. See documentation in ref/config.md.",
&args.config.display()
let config = read_config(&args.config).map_err(|e| {
err!(
e,
msg(
"unable to load config file {}; see documentation in ref/config.md",
&args.config.display(),
),
)
})?;
@ -180,8 +187,8 @@ async fn async_run(read_only: bool, config: &ConfigFile) -> Result<i32, Error> {
}
tokio::select! {
_ = int.recv() => bail!("immediate shutdown due to second signal (SIGINT)"),
_ = term.recv() => bail!("immediate shutdown due to second singal (SIGTERM)"),
_ = int.recv() => bail!(Cancelled, msg("immediate shutdown due to second signal (SIGINT)")),
_ = term.recv() => bail!(Cancelled, msg("immediate shutdown due to second singal (SIGTERM)")),
result = &mut inner => result,
}
}
@ -213,17 +220,16 @@ fn make_listener(addr: &config::AddressConfig) -> Result<Listener, Error> {
config::AddressConfig::Ipv6(a) => (*a).into(),
config::AddressConfig::Unix(p) => {
prepare_unix_socket(p);
return Ok(Listener::Unix(
tokio::net::UnixListener::bind(p)
.with_context(|_| format!("unable bind Unix socket {}", p.display()))?,
));
return Ok(Listener::Unix(tokio::net::UnixListener::bind(p).map_err(
|e| err!(e, msg("unable bind Unix socket {}", p.display())),
)?));
}
};
// Go through std::net::TcpListener to avoid needing async. That's there for DNS resolution,
// but it's unnecessary when starting from a SocketAddr.
let listener = std::net::TcpListener::bind(sa)
.with_context(|_| format!("unable to bind TCP socket {}", &sa))?;
.map_err(|e| err!(e, msg("unable to bind TCP socket {sa}")))?;
listener.set_nonblocking(true)?;
Ok(Listener::Tcp(tokio::net::TcpListener::from_std(listener)?))
}
@ -419,13 +425,16 @@ async fn inner(
}
}
})
.await?;
.await
.map_err(|e| err!(Unknown, source(e)))?;
db.lock().clear_watches();
info!("Waiting for HTTP requests to finish.");
for h in web_handles {
h.await??;
h.await
.map_err(|e| err!(Unknown, source(e)))?
.map_err(|e| err!(Unknown, source(e)))?;
}
info!("Waiting for TEARDOWN requests to complete.");

View File

@ -5,8 +5,8 @@
//! Subcommand to run a SQLite shell.
use super::OpenMode;
use base::Error;
use bpaf::Bpaf;
use failure::Error;
use std::ffi::OsString;
use std::os::unix::process::CommandExt;
use std::path::PathBuf;

View File

@ -2,8 +2,8 @@
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use base::Error;
use bpaf::Bpaf;
use failure::Error;
/// Translates between integer and human-readable timestamps.
#[derive(Bpaf, Debug)]

View File

@ -2,11 +2,11 @@
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use bpaf::Bpaf;
/// Upgrades the database schema.
///
/// See `guide/schema.md` for more information.
use failure::Error;
use base::Error;
use bpaf::Bpaf;
/// Upgrades to the latest database schema.
#[derive(Bpaf, Debug)]

View File

@ -18,9 +18,9 @@
//! through ffmpeg's own generated `.mp4` file. Extracting just this part of their `.mp4` files
//! would be more trouble than it's worth.
use base::{bail, err, Error};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use db::VideoSampleEntryToInsert;
use failure::{bail, format_err, Error};
use std::convert::TryFrom;
// For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box.
@ -62,34 +62,31 @@ fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) {
/// Parses the `AvcDecoderConfigurationRecord` in the "extra data".
pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Error> {
let avcc = h264_reader::avcc::AvcDecoderConfigurationRecord::try_from(extradata)
.map_err(|e| format_err!("Bad AvcDecoderConfigurationRecord: {:?}", e))?;
let avcc =
h264_reader::avcc::AvcDecoderConfigurationRecord::try_from(extradata).map_err(|e| {
err!(
InvalidArgument,
msg("bad AvcDecoderConfigurationRecord: {:?}", e)
)
})?;
if avcc.num_of_sequence_parameter_sets() != 1 {
bail!("Multiple SPSs!");
bail!(Unimplemented, msg("multiple SPSs!"));
}
let ctx = avcc
.create_context()
.map_err(|e| format_err!("Can't load SPS+PPS: {:?}", e))?;
.map_err(|e| err!(Unknown, msg("can't load SPS+PPS: {:?}", e)))?;
let sps = ctx
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
.ok_or_else(|| format_err!("No SPS 0"))?;
let pixel_dimensions = sps
.pixel_dimensions()
.map_err(|e| format_err!("SPS has invalid pixel dimensions: {:?}", e))?;
let width = u16::try_from(pixel_dimensions.0).map_err(|_| {
format_err!(
"bad dimensions {}x{}",
pixel_dimensions.0,
pixel_dimensions.1
)
})?;
let height = u16::try_from(pixel_dimensions.1).map_err(|_| {
format_err!(
"bad dimensions {}x{}",
pixel_dimensions.0,
pixel_dimensions.1
.ok_or_else(|| err!(Unimplemented, msg("no SPS 0")))?;
let pixel_dimensions = sps.pixel_dimensions().map_err(|e| {
err!(
InvalidArgument,
msg("SPS has invalid pixel dimensions: {:?}", e)
)
})?;
let (Ok(width), Ok(height)) = (u16::try_from(pixel_dimensions.0), u16::try_from(pixel_dimensions.1)) else {
bail!(InvalidArgument, msg("bad dimensions {}x{}", pixel_dimensions.0, pixel_dimensions.1));
};
let mut sample_entry = Vec::with_capacity(256);
@ -130,7 +127,7 @@ pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Er
let cur_pos = sample_entry.len();
BigEndian::write_u32(
&mut sample_entry[avcc_len_pos..avcc_len_pos + 4],
u32::try_from(cur_pos - avcc_len_pos)?,
u32::try_from(cur_pos - avcc_len_pos).map_err(|_| err!(OutOfRange))?,
);
// PixelAspectRatioBox, ISO/IEC 14496-12 section 12.1.4.2.
@ -150,7 +147,7 @@ pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Er
let cur_pos = sample_entry.len();
BigEndian::write_u32(
&mut sample_entry[avc1_len_pos..avc1_len_pos + 4],
u32::try_from(cur_pos - avc1_len_pos)?,
u32::try_from(cur_pos - avc1_len_pos).map_err(|_| err!(OutOfRange))?,
);
let profile_idc = sample_entry[103];

View File

@ -5,8 +5,8 @@
//! JSON/TOML-compatible serde types for use in the web API and `moonfire-nvr.toml`.
use base::time::{Duration, Time};
use base::{err, Error};
use db::auth::SessionHash;
use failure::{format_err, Error};
use serde::ser::{Error as _, SerializeMap, SerializeSeq, Serializer};
use serde::{Deserialize, Deserializer, Serialize};
use std::ops::Not;
@ -230,7 +230,7 @@ impl<'a> Stream<'a> {
let s = db
.streams_by_id()
.get(&id)
.ok_or_else(|| format_err!("missing stream {}", id))?;
.ok_or_else(|| err!(Internal, msg("missing stream {id}")))?;
Ok(Some(Stream {
id: s.id,
retain_bytes: s.config.retain_bytes,

View File

@ -4,6 +4,7 @@
#![cfg_attr(all(feature = "nightly", test), feature(test))]
use base::Error;
use bpaf::{Bpaf, Parser};
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
@ -37,7 +38,7 @@ enum Args {
}
impl Args {
fn run(self) -> Result<i32, failure::Error> {
fn run(self) -> Result<i32, Error> {
match self {
Args::Check(a) => cmds::check::run(a),
Args::Config(a) => cmds::config::run(a),
@ -93,11 +94,11 @@ fn main() {
match args.run() {
Err(e) => {
error!("Exiting due to error: {}", base::prettify_failure(&e));
error!("exiting due to error: {}", e.chain());
::std::process::exit(1);
}
Ok(rv) => {
debug!("Exiting with status {}", rv);
debug!("exiting with status {}", rv);
std::process::exit(rv)
}
}

View File

@ -56,7 +56,7 @@
use crate::body::{wrap_error, BoxedError, Chunk};
use crate::slices::{self, Slices};
use base::{bail_t, format_err_t, Error, ErrorKind, ResultExt};
use base::{bail, err, Error, ErrorKind, ResultExt};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use bytes::BytesMut;
use db::dir;
@ -410,14 +410,14 @@ impl Segment {
*index = db
.lock()
.with_recording_playback(self.s.id, &mut |playback| self.build_index(playback))
.map_err(|e| {
error!("Unable to build index for segment: {:?}", e);
.map_err(|err| {
error!(%err, recording_id = %self.s.id, "unable to build index for segment");
});
});
let index: &'a _ = unsafe { &*self.index.get() };
match *index {
Ok(ref b) => Ok(f(&b[..], self.lens())),
Err(()) => bail_t!(Unknown, "Unable to build index; see previous error."),
Err(()) => bail!(Unknown, msg("unable to build index; see logs")),
}
}
@ -439,7 +439,7 @@ impl Segment {
&buf[lens.stts + lens.stsz..]
}
fn build_index(&self, playback: &db::RecordingPlayback) -> Result<Box<[u8]>, failure::Error> {
fn build_index(&self, playback: &db::RecordingPlayback) -> Result<Box<[u8]>, Error> {
let s = &self.s;
let lens = self.lens();
let len = lens.stts + lens.stsz + lens.stss;
@ -511,7 +511,7 @@ impl Segment {
playback: &db::RecordingPlayback,
initial_pos: u64,
len: usize,
) -> Result<Vec<u8>, failure::Error> {
) -> Result<Vec<u8>, Error> {
let mut v = Vec::with_capacity(len);
struct RunInfo {
@ -623,12 +623,14 @@ impl Segment {
);
}
if len != v.len() {
bail_t!(
bail!(
Internal,
"truns on {:?} expected len {} got len {}",
self,
len,
v.len()
msg(
"truns on {:?} expected len {} got len {}",
self,
len,
v.len(),
),
);
}
Ok(v)
@ -698,12 +700,9 @@ enum SliceType {
impl Slice {
fn new(end: u64, t: SliceType, p: usize) -> Result<Self, Error> {
if end >= (1 << 40) || p >= (1 << 20) {
bail_t!(
InvalidArgument,
"end={} p={} too large for {:?} Slice",
end,
p,
t
bail!(
OutOfRange,
msg("end={} p={} too large for {:?} Slice", end, p, t,),
);
}
@ -730,7 +729,7 @@ impl Slice {
.try_map(|mp4| {
let i = mp4.segments[p].get_index(&mp4.db, f)?;
if u64::try_from(i.len()).unwrap() != len {
bail_t!(Internal, "expected len {} got {}", len, i.len());
bail!(Internal, msg("expected len {} got {}", len, i.len()));
}
Ok::<_, Error>(&i[r])
})?
@ -760,7 +759,7 @@ impl Slice {
.try_map(|mp4| {
let data = &mp4.video_sample_entries[self.p()].data;
if u64::try_from(data.len()).unwrap() != len {
bail_t!(Internal, "expected len {} got len {}", len, data.len());
bail!(Internal, msg("expected len {} got len {}", len, data.len()));
}
Ok::<_, Error>(&data[r.start as usize..r.end as usize])
})?
@ -787,11 +786,9 @@ impl slices::Slice for Slice {
SliceType::Static => {
let s = STATIC_BYTESTRINGS[p];
if u64::try_from(s.len()).unwrap() != len {
Err(format_err_t!(
Err(err!(
Internal,
"expected len {} got len {}",
len,
s.len()
msg("expected len {} got len {}", len, s.len())
))
} else {
let part = &s[range.start as usize..range.end as usize];
@ -817,12 +814,14 @@ impl slices::Slice for Slice {
Box::new(stream::once(futures::future::ready(
res.map_err(wrap_error).and_then(move |c| {
if c.remaining() != (range.end - range.start) as usize {
return Err(wrap_error(format_err_t!(
return Err(wrap_error(err!(
Internal,
"Error producing {:?}: range {:?} produced incorrect len {}.",
self,
range,
c.remaining()
msg(
"{:?} range {:?} produced incorrect len {}",
self,
range,
c.remaining()
)
)));
}
Ok(c)
@ -904,9 +903,9 @@ impl FileBuilder {
// There's no support today for timestamp truns or for timestamps without edit lists.
// The latter would invalidate the code's assumption that desired timespan == actual
// timespan in the timestamp track.
bail_t!(
bail!(
InvalidArgument,
"timestamp subtitles aren't supported on media segments"
msg("timestamp subtitles aren't supported on media segments")
);
}
self.include_timestamp_subtitle_track = b;
@ -934,11 +933,13 @@ impl FileBuilder {
) -> Result<(), Error> {
if let Some(prev) = self.segments.last() {
if prev.s.have_trailing_zero() {
bail_t!(
bail!(
InvalidArgument,
"unable to append recording {} after recording {} with trailing zero",
row.id,
prev.s.id
msg(
"unable to append recording {} after recording {} with trailing zero",
row.id,
prev.s.id,
),
);
}
} else {
@ -1071,10 +1072,12 @@ impl FileBuilder {
// If the segment is > 4 GiB, the 32-bit trun data offsets are untrustworthy.
// We'd need multiple moof+mdat sequences to support large media segments properly.
if self.body.slices.len() > u32::max_value() as u64 {
bail_t!(
InvalidArgument,
"media segment has length {}, greater than allowed 4 GiB",
self.body.slices.len()
bail!(
OutOfRange,
msg(
"media segment has length {}, greater than allowed 4 GiB",
self.body.slices.len(),
),
);
}
@ -1355,7 +1358,7 @@ impl FileBuilder {
None => Some((e.width, e.height)),
Some((w, h)) => Some((cmp::max(w, e.width), cmp::max(h, e.height))),
})
.ok_or_else(|| format_err_t!(InvalidArgument, "no video_sample_entries"))?;
.ok_or_else(|| err!(InvalidArgument, msg("no video_sample_entries")))?;
self.body.append_u32((width as u32) << 16);
self.body.append_u32((height as u32) << 16);
})
@ -1396,7 +1399,10 @@ impl FileBuilder {
let skip = md.start - actual_start_90k;
let keep = md.end - md.start;
if skip < 0 || keep < 0 {
bail_t!(Internal, "skip={} keep={} on segment {:#?}", skip, keep, s);
bail!(
Internal,
msg("skip={} keep={} on segment {:#?}", skip, keep, s)
);
}
cur_media_time += skip as u64;
if unflushed.segment_duration + unflushed.media_time == cur_media_time {
@ -1817,9 +1823,10 @@ impl FileInner {
let sr = s.s.sample_file_range();
let f = match self.dirs_by_stream_id.get(&s.s.id.stream()) {
None => {
return Box::new(stream::iter(std::iter::once(Err(wrap_error(
format_err_t!(NotFound, "{}: stream not found", s.s.id),
)))))
return Box::new(stream::iter(std::iter::once(Err(wrap_error(err!(
NotFound,
msg("{}: stream not found", s.s.id)
))))))
}
Some(d) => d.open_file(s.s.id, (r.start + sr.start)..(r.end + sr.start)),
};
@ -1865,10 +1872,12 @@ impl File {
pub async fn append_into_vec(self, v: &mut Vec<u8>) -> Result<(), Error> {
use http_serve::Entity;
v.reserve(usize::try_from(self.len()).map_err(|_| {
format_err_t!(
err!(
InvalidArgument,
"{}-byte mp4 is too big to send over WebSockets!",
self.len()
msg(
"{}-byte mp4 is too big to send over WebSockets!",
self.len()
),
)
})?);
let mut b = std::pin::Pin::from(self.get_range(0..self.len()));
@ -1876,9 +1885,7 @@ impl File {
use futures::stream::StreamExt;
match b.next().await {
Some(r) => {
let mut chunk = r
.map_err(failure::Error::from_boxed_compat)
.err_kind(ErrorKind::Unknown)?;
let mut chunk = r.map_err(|e| err!(Unknown, source(e)))?;
while chunk.has_remaining() {
let c = chunk.chunk();
v.extend_from_slice(c);
@ -2312,7 +2319,7 @@ mod tests {
loop {
let pkt = match input.next() {
Ok(p) => p,
Err(e) if e.to_string().contains("End of file") => {
Err(e) if e.kind() == ErrorKind::OutOfRange => {
break;
}
Err(e) => {
@ -2419,14 +2426,14 @@ mod tests {
for i in 0.. {
let orig_pkt = match orig.next() {
Ok(p) => Some(p),
Err(e) if e.to_string() == "End of file" => None,
Err(e) if e.msg().unwrap() == "end of file" => None,
Err(e) => {
panic!("unexpected input error: {}", e);
}
};
let new_pkt = match new.next() {
Ok(p) => Some(p),
Err(e) if e.to_string() == "End of file" => {
Err(e) if e.msg().unwrap() == "end of file" => {
break;
}
Err(e) => {
@ -2634,7 +2641,8 @@ mod tests {
let e = make_mp4_from_encoders(Type::Normal, &db, vec![], 0..0, true)
.err()
.unwrap();
assert_eq!(e.to_string(), "Invalid argument: no video_sample_entries");
assert_eq!(e.kind(), ErrorKind::InvalidArgument);
assert_eq!(e.msg().unwrap(), "no video_sample_entries");
}
#[tokio::test]

View File

@ -9,8 +9,7 @@ use std::ops::Range;
use std::pin::Pin;
use crate::body::{wrap_error, BoxedError};
use base::format_err_t;
use failure::{bail, Error};
use base::{bail, err, Error};
use futures::{stream, stream::StreamExt, Stream};
use tracing_futures::Instrument;
@ -102,11 +101,14 @@ where
pub fn append(&mut self, slice: S) -> Result<(), Error> {
if slice.end() <= self.len {
bail!(
"end {} <= len {} while adding slice {:?} to slices:\n{:?}",
slice.end(),
self.len,
slice,
self
Internal,
msg(
"end {} <= len {} while adding slice {:?} to slices:\n{:?}",
slice.end(),
self.len,
slice,
self
),
);
}
self.len = slice.end();
@ -133,14 +135,10 @@ where
) -> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> {
#[allow(clippy::suspicious_operation_groupings)]
if range.start > range.end || range.end > self.len {
return Box::new(stream::once(futures::future::err(wrap_error(
format_err_t!(
Internal,
"Bad range {:?} for slice of length {}",
range,
self.len
),
))));
return Box::new(stream::once(futures::future::err(wrap_error(err!(
Internal,
msg("bad range {:?} for slice of length {}", range, self.len),
)))));
}
// Binary search for the first slice of the range to write, determining its index and

View File

@ -3,9 +3,8 @@
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
use crate::h264;
use base::{bail, err, Error};
use bytes::Bytes;
use failure::format_err;
use failure::{bail, Error};
use futures::StreamExt;
use retina::client::Demuxed;
use retina::codec::CodecItem;
@ -74,7 +73,8 @@ impl Opener for RealOpener {
.in_current_span(),
),
)
.expect("RetinaStream::play task panicked, see earlier error")??;
.expect("RetinaStream::play task panicked, see earlier error")
.map_err(|e| err!(Unknown, source(e)))??;
Ok(Box::new(RetinaStream {
inner: Some(inner),
rt_handle,
@ -121,22 +121,30 @@ impl RetinaStreamInner {
url: Url,
options: Options,
) -> Result<(Box<Self>, retina::codec::VideoFrame), Error> {
let mut session = retina::client::Session::describe(url, options.session).await?;
let mut session = retina::client::Session::describe(url, options.session)
.await
.map_err(|e| err!(Unknown, source(e)))?;
tracing::debug!("connected to {:?}, tool {:?}", &label, session.tool());
let video_i = session
.streams()
.iter()
.position(|s| s.media() == "video" && s.encoding_name() == "h264")
.ok_or_else(|| format_err!("couldn't find H.264 video stream"))?;
session.setup(video_i, options.setup).await?;
let session = session.play(retina::client::PlayOptions::default()).await?;
let mut session = session.demuxed()?;
.ok_or_else(|| err!(FailedPrecondition, msg("couldn't find H.264 video stream")))?;
session
.setup(video_i, options.setup)
.await
.map_err(|e| err!(Unknown, source(e)))?;
let session = session
.play(retina::client::PlayOptions::default())
.await
.map_err(|e| err!(Unknown, source(e)))?;
let mut session = session.demuxed().map_err(|e| err!(Unknown, source(e)))?;
// First frame.
let first_frame = loop {
match Pin::new(&mut session).next().await {
None => bail!("stream closed before first frame"),
Some(Err(e)) => return Err(e.into()),
None => bail!(Unavailable, msg("stream closed before first frame")),
Some(Err(e)) => bail!(Unknown, msg("unable to get first frame"), source(e)),
Some(Ok(CodecItem::VideoFrame(v))) => {
if v.is_random_access_point() {
break v;
@ -148,7 +156,7 @@ impl RetinaStreamInner {
let video_params = match session.streams()[video_i].parameters() {
Some(retina::codec::ParametersRef::Video(v)) => v.clone(),
Some(_) => unreachable!(),
None => bail!("couldn't find H.264 parameters"),
None => bail!(Unknown, msg("couldn't find H.264 parameters")),
};
let video_sample_entry = h264::parse_extra_data(video_params.extra_data())?;
let self_ = Box::new(Self {
@ -171,8 +179,13 @@ impl RetinaStreamInner {
Error,
> {
loop {
match Pin::new(&mut self.session).next().await.transpose()? {
None => bail!("end of stream"),
match Pin::new(&mut self.session)
.next()
.await
.transpose()
.map_err(|e| err!(Unknown, source(e)))?
{
None => bail!(Unavailable, msg("end of stream")),
Some(CodecItem::VideoFrame(v)) => {
if v.loss() > 0 {
tracing::warn!(
@ -223,7 +236,13 @@ impl Stream for RetinaStream {
),
)
.expect("fetch_next_frame task panicked, see earlier error")
.map_err(|_| format_err!("timeout getting next frame"))??;
.map_err(|e| {
err!(
DeadlineExceeded,
msg("timeout getting next frame"),
source(e)
)
})??;
let mut new_video_sample_entry = false;
if let Some(p) = new_parameters {
let video_sample_entry = h264::parse_extra_data(p.extra_data())?;
@ -239,7 +258,7 @@ impl Stream for RetinaStream {
}
};
self.inner = Some(inner);
Ok::<_, failure::Error>((frame, new_video_sample_entry))
Ok::<_, Error>((frame, new_video_sample_entry))
})?;
Ok(VideoFrame {
pts: frame.timestamp().elapsed(),
@ -269,16 +288,24 @@ pub mod testutil {
pub fn open(path: &str) -> Result<Self, Error> {
let f = std::fs::read(path)?;
let len = f.len();
let reader = mp4::Mp4Reader::read_header(Cursor::new(f), u64::try_from(len)?)?;
let reader = mp4::Mp4Reader::read_header(
Cursor::new(f),
u64::try_from(len).expect("len should be in u64 range"),
)
.map_err(|e| err!(Unknown, source(e)))?;
let h264_track = match reader
.tracks()
.values()
.find(|t| matches!(t.media_type(), Ok(mp4::MediaType::H264)))
{
None => bail!("expected a H.264 track"),
None => bail!(InvalidArgument, msg("expected a H.264 track")),
Some(t) => t,
};
let video_sample_entry = h264::parse_extra_data(&h264_track.extra_data()?[..])?;
let video_sample_entry = h264::parse_extra_data(
&h264_track
.extra_data()
.map_err(|e| err!(Unknown, source(e)))?[..],
)?;
let h264_track_id = h264_track.track_id();
let stream = Mp4Stream {
reader,
@ -312,8 +339,9 @@ pub mod testutil {
fn next(&mut self) -> Result<VideoFrame, Error> {
let sample = self
.reader
.read_sample(self.h264_track_id, self.next_sample_id)?
.ok_or_else(|| format_err!("End of file"))?;
.read_sample(self.h264_track_id, self.next_sample_id)
.map_err(|e| err!(Unknown, source(e)))?
.ok_or_else(|| err!(OutOfRange, msg("end of file")))?;
self.next_sample_id += 1;
Ok(VideoFrame {
pts: sample.start_time as i64,

View File

@ -4,8 +4,8 @@
use crate::stream;
use base::clock::{Clocks, TimerGuard};
use base::{bail, err, Error};
use db::{dir, recording, writer, Camera, Database, Stream};
use failure::{bail, format_err, Error};
use std::result::Result;
use std::str::FromStr;
use std::sync::Arc;
@ -68,9 +68,12 @@ where
.config
.url
.as_ref()
.ok_or_else(|| format_err!("Stream has no RTSP URL"))?;
.ok_or_else(|| err!(InvalidArgument, msg("stream has no RTSP URL")))?;
if !url.username().is_empty() || url.password().is_some() {
bail!("RTSP URL shouldn't include credentials");
bail!(
InvalidArgument,
msg("RTSP URL shouldn't include credentials")
);
}
let stream_transport = if s.config.rtsp_transport.is_empty() {
None
@ -119,7 +122,7 @@ where
if let Err(err) = self.run_once() {
let sleep_time = time::Duration::seconds(1);
warn!(
err = base::prettify_failure(&err),
err = %err.chain(),
"sleeping for 1 s after error"
);
self.db.clocks().sleep(sleep_time);
@ -150,7 +153,7 @@ where
}
}
.in_current_span(),
)?;
).map_err(|e| err!(Unknown, source(e)))?;
waited = true;
} else {
if waited {
@ -221,7 +224,7 @@ where
None
} else if frame.new_video_sample_entry {
if !frame.is_key {
bail!("parameter change on non-key frame");
bail!(Unavailable, msg("parameter change on non-key frame"));
}
trace!("close on parameter change");
video_sample_entry_id = {
@ -286,8 +289,8 @@ where
mod tests {
use crate::stream::{self, Stream};
use base::clock::{self, Clocks};
use base::{bail, Error};
use db::{recording, testutil, CompositeId};
use failure::{bail, Error};
use std::cmp;
use std::convert::TryFrom;
use std::sync::Arc;
@ -334,7 +337,7 @@ mod tests {
fn next(&mut self) -> Result<stream::VideoFrame, Error> {
if self.pkts_left == 0 {
bail!("end of stream");
bail!(OutOfRange, msg("end of stream"));
}
self.pkts_left -= 1;
@ -394,7 +397,7 @@ mod tests {
None => {
trace!("MockOpener shutting down");
self.shutdown_tx.lock().unwrap().take();
bail!("done")
bail!(Cancelled, msg("done"))
}
}
}

View File

@ -6,7 +6,7 @@
use std::sync::Arc;
use base::{bail_t, format_err_t, Error};
use base::{bail, err, Error};
use futures::{future::Either, SinkExt, StreamExt};
use http::header;
use tokio_tungstenite::{tungstenite, WebSocketStream};
@ -26,7 +26,7 @@ impl Service {
) -> Result<(), Error> {
let caller = caller?;
if !caller.permissions.view_video {
bail_t!(PermissionDenied, "view_video required");
bail!(PermissionDenied, msg("view_video required"));
}
let stream_id;
@ -36,18 +36,18 @@ impl Service {
let mut db = self.db.lock();
open_id = match db.open {
None => {
bail_t!(
bail!(
FailedPrecondition,
"database is read-only; there are no live streams"
msg("database is read-only; there are no live streams"),
);
}
Some(o) => o.id,
};
let camera = db
.get_camera(uuid)
.ok_or_else(|| format_err_t!(NotFound, "no such camera {uuid}"))?;
.ok_or_else(|| err!(NotFound, msg("no such camera {uuid}")))?;
stream_id = camera.streams[stream_type.index()]
.ok_or_else(|| format_err_t!(NotFound, "no such stream {uuid}/{stream_type}"))?;
.ok_or_else(|| err!(NotFound, msg("no such stream {uuid}/{stream_type}")))?;
db.watch_live(
stream_id,
Box::new(move |l| sub_tx.unbounded_send(l).is_ok()),
@ -116,7 +116,7 @@ impl Service {
Ok(())
})?;
}
let row = row.ok_or_else(|| format_err_t!(Internal, "unable to find {:?}", live))?;
let row = row.ok_or_else(|| err!(Internal, msg("unable to find {live:?}")))?;
use http_serve::Entity;
let mp4 = builder.build(self.db.clone(), self.dirs_by_stream_id.clone())?;
let mut hdrs = header::HeaderMap::new();

View File

@ -17,10 +17,10 @@ use self::path::Path;
use crate::body::Body;
use crate::json;
use crate::mp4;
use base::format_err_t;
use base::err;
use base::Error;
use base::ResultExt;
use base::{bail_t, clock::Clocks, ErrorKind};
use base::{bail, clock::Clocks, ErrorKind};
use core::borrow::Borrow;
use core::str::FromStr;
use db::dir::SampleFileDir;
@ -134,24 +134,27 @@ async fn extract_json_body(req: &mut Request<hyper::Body>) -> Result<Bytes, base
_ => false,
};
if !correct_mime_type {
bail_t!(InvalidArgument, "expected application/json request body");
bail!(
InvalidArgument,
msg("expected application/json request body")
);
}
let b = ::std::mem::replace(req.body_mut(), hyper::Body::empty());
hyper::body::to_bytes(b)
.await
.map_err(|e| format_err_t!(Unavailable, "unable to read request body: {}", e))
.map_err(|e| err!(Unavailable, msg("unable to read request body"), source(e)))
}
fn parse_json_body<'a, T: serde::Deserialize<'a>>(body: &'a [u8]) -> Result<T, base::Error> {
serde_json::from_slice(body)
.map_err(|e| format_err_t!(InvalidArgument, "bad request body: {e}"))
.map_err(|e| err!(InvalidArgument, msg("bad request body"), source(e)))
}
fn require_csrf_if_session(caller: &Caller, csrf: Option<&str>) -> Result<(), base::Error> {
match (csrf, caller.user.as_ref().and_then(|u| u.session.as_ref())) {
(None, Some(_)) => bail_t!(Unauthenticated, "csrf must be supplied"),
(None, Some(_)) => bail!(Unauthenticated, msg("csrf must be supplied")),
(Some(csrf), Some(session)) if !csrf_matches(csrf, session.csrf) => {
bail_t!(Unauthenticated, "incorrect csrf");
bail!(Unauthenticated, msg("incorrect csrf"));
}
(_, _) => Ok(()),
}
@ -292,7 +295,7 @@ impl Service {
Path::StreamLiveMp4Segments(..) => {
unreachable!("StreamLiveMp4Segments should have already been handled")
}
Path::NotFound => return Err(format_err_t!(NotFound, "path not understood")),
Path::NotFound => return Err(err!(NotFound, msg("path not understood"))),
Path::Login => (
CacheControl::PrivateDynamic,
self.login(req, authreq).await?,
@ -422,7 +425,7 @@ impl Service {
}
if camera_configs && !caller.permissions.read_camera_configs {
bail_t!(PermissionDenied, "read_camera_configs required");
bail!(PermissionDenied, msg("read_camera_configs required"));
}
let db = self.db.lock();
@ -444,7 +447,7 @@ impl Service {
let db = self.db.lock();
let camera = db
.get_camera(uuid)
.ok_or_else(|| format_err_t!(NotFound, "no such camera {uuid}"))?;
.ok_or_else(|| err!(NotFound, msg("no such camera {uuid}")))?;
serve_json(
req,
&json::Camera::wrap(camera, &db, true, false).err_kind(ErrorKind::Internal)?,
@ -466,18 +469,18 @@ impl Service {
match key {
"startTime90k" => {
time.start = recording::Time::parse(value).map_err(|_| {
format_err_t!(InvalidArgument, "unparseable startTime90k")
err!(InvalidArgument, msg("unparseable startTime90k"))
})?
}
"endTime90k" => {
time.end = recording::Time::parse(value).map_err(|_| {
format_err_t!(InvalidArgument, "unparseable endTime90k")
})?
time.end = recording::Time::parse(value)
.map_err(|_| err!(InvalidArgument, msg("unparseable endTime90k")))?
}
"split90k" => {
split = recording::Duration(i64::from_str(value).map_err(|_| {
format_err_t!(InvalidArgument, "unparseable split90k")
})?)
split =
recording::Duration(i64::from_str(value).map_err(|_| {
err!(InvalidArgument, msg("unparseable split90k"))
})?)
}
_ => {}
}
@ -491,10 +494,10 @@ impl Service {
video_sample_entries: (&db, Vec::new()),
};
let Some(camera) = db.get_camera(uuid) else {
bail_t!(NotFound, "no such camera {uuid}");
bail!(NotFound, msg("no such camera {uuid}"));
};
let Some(stream_id) = camera.streams[type_.index()] else {
bail_t!(NotFound, "no such stream {uuid}/{type_}");
bail!(NotFound, msg("no such stream {uuid}/{type_}"));
};
db.list_aggregated_recordings(stream_id, r, split, &mut |row| {
let end = row.ids.end - 1; // in api, ids are inclusive.
@ -532,7 +535,7 @@ impl Service {
let mut builder = mp4::FileBuilder::new(mp4::Type::InitSegment);
let db = self.db.lock();
let Some(ent) = db.video_sample_entries_by_id().get(&id) else {
bail_t!(NotFound, "no such init segment");
bail!(NotFound, msg("no such init segment"));
};
builder.append_video_sample_entry(ent.clone());
let mp4 = builder
@ -672,7 +675,7 @@ impl Service {
});
}
bail_t!(Unauthenticated, "unauthenticated");
bail!(Unauthenticated);
}
}

View File

@ -4,7 +4,7 @@
//! Session management: `/api/login` and `/api/logout`.
use base::{bail_t, ErrorKind, ResultExt};
use base::{bail, ErrorKind, ResultExt};
use db::auth;
use http::{header, HeaderValue, Method, Request, Response, StatusCode};
use memchr::memchr;
@ -32,7 +32,7 @@ impl Service {
let r = extract_json_body(&mut req).await?;
let r: json::LoginRequest = parse_json_body(&r)?;
let Some(host) = req.headers().get(header::HOST) else {
bail_t!(InvalidArgument, "missing Host header");
bail!(InvalidArgument, msg("missing Host header"));
};
let host = host.as_bytes();
let domain = match memchr(b':', host) {
@ -94,17 +94,17 @@ impl Service {
match l.authenticate_session(authreq.clone(), &hash) {
Ok((s, _)) => {
if !csrf_matches(r.csrf, s.csrf()) {
bail_t!(InvalidArgument, "logout with incorret csrf token");
bail!(InvalidArgument, msg("logout with incorrect csrf token"));
}
info!("revoking session");
l.revoke_session(auth::RevocationReason::LoggedOut, None, authreq, &hash)
.err_kind(ErrorKind::Internal)?;
}
Err(e) => {
Err(err) => {
// TODO: distinguish "no such session", "session is no longer valid", and
// "user ... is disabled" (which are all client error / bad state) from database
// errors.
warn!("logout failed: {}", e);
warn!(err = %err.chain(), "logout failed");
}
}

View File

@ -4,7 +4,7 @@
//! `/api/signals` handling.
use base::{bail_t, clock::Clocks, format_err_t};
use base::{bail, clock::Clocks, err};
use db::recording;
use http::{Method, Request, StatusCode};
use url::form_urlencoded;
@ -36,7 +36,7 @@ impl Service {
async fn post_signals(&self, mut req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
if !caller.permissions.update_signals {
bail_t!(PermissionDenied, "update_signals required");
bail!(PermissionDenied, msg("update_signals required"));
}
let r = extract_json_body(&mut req).await?;
let r: json::PostSignalsRequest = parse_json_body(&r)?;
@ -62,13 +62,12 @@ impl Service {
let (key, value) = (key.borrow(), value.borrow());
match key {
"startTime90k" => {
time.start = recording::Time::parse(value).map_err(|_| {
format_err_t!(InvalidArgument, "unparseable startTime90k")
})?
time.start = recording::Time::parse(value)
.map_err(|_| err!(InvalidArgument, msg("unparseable startTime90k")))?
}
"endTime90k" => {
time.end = recording::Time::parse(value)
.map_err(|_| format_err_t!(InvalidArgument, "unparseable endTime90k"))?
.map_err(|_| err!(InvalidArgument, msg("unparseable endTime90k")))?
}
_ => {}
}

View File

@ -4,7 +4,7 @@
//! Static file serving.
use base::{bail_t, format_err_t, Error, ErrorKind, ResultExt};
use base::{bail, err, Error, ErrorKind, ResultExt};
use http::{header, HeaderValue, Request};
use super::{ResponseResult, Service};
@ -13,15 +13,15 @@ impl Service {
/// Serves a static file if possible.
pub(super) async fn static_file(&self, req: Request<hyper::Body>) -> ResponseResult {
let Some(dir) = self.ui_dir.clone() else {
bail_t!(NotFound, "ui dir not configured or missing; no static files available.")
bail!(NotFound, msg("ui dir not configured or missing; no static files available"))
};
let Some(static_req) = StaticFileRequest::parse(req.uri().path()) else {
bail_t!(NotFound, "static file not found");
bail!(NotFound, msg("static file not found"));
};
let f = dir.get(static_req.path, req.headers());
let node = f.await.map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
format_err_t!(NotFound, "no such static file")
err!(NotFound, msg("no such static file"))
} else {
Error::wrap(ErrorKind::Internal, e)
}

View File

@ -4,7 +4,7 @@
//! User management: `/api/users/*`.
use base::{bail_t, format_err_t};
use base::{bail, err};
use http::{Method, Request, StatusCode};
use crate::json::{self, PutUsersResponse, UserSubset, UserWithId};
@ -28,7 +28,7 @@ impl Service {
async fn get_users(&self, req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
if !caller.permissions.admin_users {
bail_t!(Unauthenticated, "must have admin_users permission");
bail!(Unauthenticated, msg("must have admin_users permission"));
}
let l = self.db.lock();
let users = l
@ -44,7 +44,7 @@ impl Service {
async fn post_users(&self, mut req: Request<hyper::Body>, caller: Caller) -> ResponseResult {
if !caller.permissions.admin_users {
bail_t!(Unauthenticated, "must have admin_users permission");
bail!(Unauthenticated, msg("must have admin_users permission"));
}
let r = extract_json_body(&mut req).await?;
let mut r: json::PutUsers = parse_json_body(&r)?;
@ -53,7 +53,7 @@ impl Service {
.user
.username
.take()
.ok_or_else(|| format_err_t!(InvalidArgument, "username must be specified"))?;
.ok_or_else(|| err!(InvalidArgument, msg("username must be specified")))?;
let mut change = db::UserChange::add_user(username.to_owned());
if let Some(Some(pwd)) = r.user.password.take() {
change.set_password(pwd.to_owned());
@ -65,7 +65,7 @@ impl Service {
change.permissions = permissions.into();
}
if r.user != Default::default() {
bail_t!(Unimplemented, "unsupported user fields: {:#?}", r);
bail!(Unimplemented, msg("unsupported user fields: {r:#?}"));
}
let mut l = self.db.lock();
let user = l.apply_user_change(change)?;
@ -95,7 +95,7 @@ impl Service {
let user = db
.users_by_id()
.get(&id)
.ok_or_else(|| format_err_t!(NotFound, "can't find requested user"))?;
.ok_or_else(|| err!(NotFound, msg("can't find requested user")))?;
serve_json(&req, &UserSubset::from(user))
}
@ -106,7 +106,7 @@ impl Service {
id: i32,
) -> ResponseResult {
if !caller.permissions.admin_users {
bail_t!(Unauthenticated, "must have admin_users permission");
bail!(Unauthenticated, msg("must have admin_users permission"));
}
let r = extract_json_body(&mut req).await?;
let r: json::DeleteUser = parse_json_body(&r)?;
@ -128,45 +128,44 @@ impl Service {
let mut db = self.db.lock();
let user = db
.get_user_by_id_mut(id)
.ok_or_else(|| format_err_t!(NotFound, "can't find requested user"))?;
.ok_or_else(|| err!(NotFound, msg("can't find requested user")))?;
if r.update.as_ref().and_then(|u| u.password).is_some()
&& r.precondition.as_ref().and_then(|p| p.password).is_none()
&& !caller.permissions.admin_users
{
bail_t!(
bail!(
Unauthenticated,
"to change password, must supply previous password or have admin_users permission"
msg("to change password, must supply previous password or have admin_users permission")
);
}
require_csrf_if_session(&caller, r.csrf)?;
if let Some(mut precondition) = r.precondition {
if matches!(precondition.disabled.take(), Some(d) if d != user.config.disabled) {
bail_t!(FailedPrecondition, "disabled mismatch");
bail!(FailedPrecondition, msg("disabled mismatch"));
}
if matches!(precondition.username.take(), Some(n) if n != user.username) {
bail_t!(FailedPrecondition, "username mismatch");
bail!(FailedPrecondition, msg("username mismatch"));
}
if matches!(precondition.preferences.take(), Some(ref p) if p != &user.config.preferences)
{
bail_t!(FailedPrecondition, "preferences mismatch");
bail!(FailedPrecondition, msg("preferences mismatch"));
}
if let Some(p) = precondition.password.take() {
if !user.check_password(p)? {
bail_t!(FailedPrecondition, "password mismatch"); // or Unauthenticated?
bail!(FailedPrecondition, msg("password mismatch")); // or Unauthenticated?
}
}
if let Some(p) = precondition.permissions.take() {
if user.permissions != db::Permissions::from(p) {
bail_t!(FailedPrecondition, "permissions mismatch");
bail!(FailedPrecondition, msg("permissions mismatch"));
}
}
// Safety valve in case something is added to UserSubset and forgotten here.
if precondition != Default::default() {
bail_t!(
bail!(
Unimplemented,
"preconditions not supported: {:#?}",
&precondition
msg("preconditions not supported: {precondition:#?}"),
);
}
}
@ -185,7 +184,7 @@ impl Service {
// Requires admin_users if there's anything else.
if update != Default::default() && !caller.permissions.admin_users {
bail_t!(Unauthenticated, "must have admin_users permission");
bail!(Unauthenticated, msg("must have admin_users permission"));
}
if let Some(d) = update.disabled.take() {
change.config.disabled = d;
@ -199,7 +198,7 @@ impl Service {
// Safety valve in case something is added to UserSubset and forgotten here.
if update != Default::default() {
bail_t!(Unimplemented, "updates not supported: {:#?}", &update);
bail!(Unimplemented, msg("updates not supported: {update:#?}"));
}
// Then apply all together.
@ -211,9 +210,9 @@ impl Service {
fn require_same_or_admin(caller: &Caller, id: i32) -> Result<(), base::Error> {
if caller.user.as_ref().map(|u| u.id) != Some(id) && !caller.permissions.admin_users {
bail_t!(
bail!(
Unauthenticated,
"must be authenticated as supplied user or have admin_users permission"
msg("must be authenticated as supplied user or have admin_users permission"),
);
}
Ok(())

View File

@ -4,7 +4,7 @@
//! `/view.mp4` and `/view.m4s` handling.
use base::{bail_t, format_err_t};
use base::{bail, err};
use db::recording::{self, rescale};
use http::{Request, StatusCode};
use nom::bytes::complete::{tag, take_while1};
@ -36,17 +36,17 @@ impl Service {
debug: bool,
) -> ResponseResult {
if !caller.permissions.view_video {
bail_t!(PermissionDenied, "view_video required");
bail!(PermissionDenied, msg("view_video required"));
}
let (stream_id, camera_name);
{
let db = self.db.lock();
let camera = db
.get_camera(uuid)
.ok_or_else(|| format_err_t!(NotFound, "no such camera {uuid}"))?;
.ok_or_else(|| err!(NotFound, msg("no such camera {uuid}")))?;
camera_name = camera.short_name.clone();
stream_id = camera.streams[stream_type.index()]
.ok_or_else(|| format_err_t!(NotFound, "no such stream {uuid}/{stream_type}"))?;
.ok_or_else(|| err!(NotFound, msg("no such stream {uuid}/{stream_type}")))?;
};
let mut start_time_for_filename = None;
let mut builder = mp4::FileBuilder::new(mp4_type);
@ -56,7 +56,7 @@ impl Service {
match key {
"s" => {
let s = Segments::from_str(value).map_err(|()| {
format_err_t!(InvalidArgument, "invalid s parameter: {value}")
err!(InvalidArgument, msg("invalid s parameter: {value}"))
})?;
trace!("stream_view_mp4: appending s={:?}", s);
let mut est_segments = usize::try_from(s.ids.end - s.ids.start).unwrap();
@ -81,12 +81,14 @@ impl Service {
if let Some(o) = s.open_id {
if r.open_id != o {
bail_t!(
bail!(
NotFound,
"recording {} has open id {}, requested {}",
r.id,
r.open_id,
o
msg(
"recording {} has open id {}, requested {}",
r.id,
r.open_id,
o,
),
);
}
}
@ -94,14 +96,15 @@ impl Service {
// Check for missing recordings.
match prev {
None if recording_id == s.ids.start => {}
None => bail_t!(
None => bail!(
NotFound,
"no such recording {}/{}",
stream_id,
s.ids.start
msg("no such recording {}/{}", stream_id, s.ids.start),
),
Some(id) if r.id.recording() != id + 1 => {
bail_t!(NotFound, "no such recording {}/{}", stream_id, id + 1);
bail!(
NotFound,
msg("no such recording {}/{}", stream_id, id + 1)
);
}
_ => {}
};
@ -144,35 +147,30 @@ impl Service {
// Check for missing recordings.
match prev {
Some(id) if s.ids.end != id + 1 => {
bail_t!(
bail!(
NotFound,
"no such recording {}/{}",
stream_id,
s.ids.end - 1
msg("no such recording {}/{}", stream_id, s.ids.end - 1),
);
}
None => {
bail_t!(
bail!(
NotFound,
"no such recording {}/{}",
stream_id,
s.ids.start
msg("no such recording {}/{}", stream_id, s.ids.start),
);
}
_ => {}
};
if let Some(end) = s.end_time {
if end > cur_off {
bail_t!(
bail!(
InvalidArgument,
"end time {} is beyond specified recordings",
end
msg("end time {end} is beyond specified recordings"),
);
}
}
}
"ts" => builder.include_timestamp_subtitle_track(value == "true")?,
_ => bail_t!(InvalidArgument, "parameter {key} not understood"),
_ => bail!(InvalidArgument, msg("parameter {key} not understood")),
}
}
}

View File

@ -8,7 +8,7 @@
use std::pin::Pin;
use crate::body::Body;
use base::{bail_t, format_err_t};
use base::{bail, err};
use futures::{Future, SinkExt};
use http::{header, Request, Response};
use tokio_tungstenite::{tungstenite, WebSocketStream};
@ -37,7 +37,7 @@ where
// Otherwise, upgrade and handle the rest in a separate task.
let response =
tungstenite::handshake::server::create_response_with_body(&req, hyper::Body::empty)
.map_err(|e| format_err_t!(InvalidArgument, "{}", e.to_string()))?;
.map_err(|e| err!(InvalidArgument, source(e)))?;
let (parts, _) = response.into_parts();
let span = tracing::info_span!("websocket");
tokio::spawn(
@ -84,11 +84,11 @@ fn check_origin(headers: &header::HeaderMap) -> Result<(), base::Error> {
Some(o) => o,
};
let Some(host_hdr) = headers.get(header::HOST) else {
bail_t!(InvalidArgument, "missing Host header");
bail!(InvalidArgument, msg("missing Host header"));
};
let host_str = host_hdr
.to_str()
.map_err(|_| format_err_t!(InvalidArgument, "bad Host header"))?;
.map_err(|_| err!(InvalidArgument, msg("bad Host header")))?;
// Currently this ignores the port number. This is easiest and I think matches the browser's
// rules for when it sends a cookie, so it probably doesn't cause great security problems.
@ -100,16 +100,16 @@ fn check_origin(headers: &header::HeaderMap) -> Result<(), base::Error> {
.to_str()
.ok()
.and_then(|o| url::Url::parse(o).ok())
.ok_or_else(|| format_err_t!(InvalidArgument, "bad Origin header"))?;
.ok_or_else(|| err!(InvalidArgument, msg("bad Origin header")))?;
let origin_host = origin_url
.host_str()
.ok_or_else(|| format_err_t!(InvalidArgument, "bad Origin header"))?;
.ok_or_else(|| err!(InvalidArgument, msg("bad Origin header")))?;
if host != origin_host {
bail_t!(
bail!(
PermissionDenied,
"cross-origin request forbidden (request host {:?}, origin {:?})",
host_hdr,
origin_hdr
msg(
"cross-origin request forbidden (request host {host_hdr:?}, origin {origin_hdr:?})"
),
);
}
Ok(())