replace homegrown Error with failure crate

This reduces boilerplate, making it a bit easier for me to split the db stuff
out into its own crate.
This commit is contained in:
Scott Lamb 2018-02-20 22:46:14 -08:00
parent 253f3de399
commit d84e754b2a
26 changed files with 247 additions and 387 deletions

61
Cargo.lock generated
View File

@ -11,6 +11,27 @@ dependencies = [
"memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "backtrace"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "backtrace-sys"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.9.0" version = "0.9.0"
@ -125,6 +146,25 @@ name = "dtoa"
version = "0.4.2" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "failure"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "failure_derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "flate2" name = "flate2"
version = "1.0.1" version = "1.0.1"
@ -466,6 +506,7 @@ dependencies = [
"byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"cursive 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)", "cursive 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
"futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -834,6 +875,11 @@ dependencies = [
"time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rustc-demangle"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "rustc-serialize" name = "rustc-serialize"
version = "0.3.24" version = "0.3.24"
@ -977,6 +1023,15 @@ dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "synstructure"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "take" name = "take"
version = "0.1.0" version = "0.1.0"
@ -1225,6 +1280,8 @@ dependencies = [
[metadata] [metadata]
"checksum adler32 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6cbd0b9af8587c72beadc9f72d35b9fbb070982c9e6203e46e93f10df25f8f45" "checksum adler32 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6cbd0b9af8587c72beadc9f72d35b9fbb070982c9e6203e46e93f10df25f8f45"
"checksum aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d6531d44de723825aa81398a6415283229725a00fa30713812ab9323faa82fc4" "checksum aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d6531d44de723825aa81398a6415283229725a00fa30713812ab9323faa82fc4"
"checksum backtrace 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ebbbf59b1c43eefa8c3ede390fcc36820b4999f7914104015be25025e0d62af2"
"checksum backtrace-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "44585761d6161b0f57afc49482ab6bd067e4edef48c12a152c237eb0203f7661"
"checksum base64 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "229d032f1a99302697f10b27167ae6d03d49d032e6a8e2550e8d3fc13356d2b4" "checksum base64 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "229d032f1a99302697f10b27167ae6d03d49d032e6a8e2550e8d3fc13356d2b4"
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" "checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf" "checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf"
@ -1240,6 +1297,8 @@ dependencies = [
"checksum cursive 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)" = "82b96a092541def4e42095b3201a5b4111971c551e579c091b3f121a620fe12e" "checksum cursive 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)" = "82b96a092541def4e42095b3201a5b4111971c551e579c091b3f121a620fe12e"
"checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a" "checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "934799b6c1de475a012a02dab0ace1ace43789ee4b99bcfbf1a2e3e8ced5de82"
"checksum failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c7cdda555bb90c9bb67a3b670a0f42de8e73f5981524123ad8578aafec8ddb8b"
"checksum flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fac2277e84e5e858483756647a9d0aa8d9a2b7cba517fd84325a0aaa69a0909" "checksum flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fac2277e84e5e858483756647a9d0aa8d9a2b7cba517fd84325a0aaa69a0909"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
@ -1315,6 +1374,7 @@ dependencies = [
"checksum remove_dir_all 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b5d2f806b0fcdabd98acd380dc8daef485e22bcb7cddc811d1337967f2528cf5" "checksum remove_dir_all 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b5d2f806b0fcdabd98acd380dc8daef485e22bcb7cddc811d1337967f2528cf5"
"checksum reqwest 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "449c45f593ce9af9417c91e22f274fb8cea013bcf3d37ec1b5fb534b623bc708" "checksum reqwest 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "449c45f593ce9af9417c91e22f274fb8cea013bcf3d37ec1b5fb534b623bc708"
"checksum rusqlite 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9409d78a5a9646685688266e1833df8f08b71ffcae1b5db6c1bfb5970d8a80f" "checksum rusqlite 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9409d78a5a9646685688266e1833df8f08b71ffcae1b5db6c1bfb5970d8a80f"
"checksum rustc-demangle 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f312457f8a4fa31d3581a6f423a70d6c33a10b95291985df55f1ff670ec10ce8"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
"checksum safemem 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f" "checksum safemem 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f"
"checksum schannel 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "acece75e0f987c48863a6c792ec8b7d6c4177d4a027f8ccc72f849794f437016" "checksum schannel 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "acece75e0f987c48863a6c792ec8b7d6c4177d4a027f8ccc72f849794f437016"
@ -1335,6 +1395,7 @@ dependencies = [
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" "checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" "checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
"checksum synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a761d12e6d8dcb4dcf952a7a89b475e3a9d69e4a69307e01a470977642914bd"
"checksum take 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b157868d8ac1f56b64604539990685fa7611d8fa9e5476cf0c02cf34d32917c5" "checksum take 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b157868d8ac1f56b64604539990685fa7611d8fa9e5476cf0c02cf34d32917c5"
"checksum tempdir 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f73eebdb68c14bcb24aef74ea96079830e7fa7b31a6106e42ea7ee887c1e134e" "checksum tempdir 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f73eebdb68c14bcb24aef74ea96079830e7fa7b31a6106e42ea7ee887c1e134e"
"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963" "checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"

View File

@ -16,6 +16,7 @@ bundled = ["rusqlite/bundled"]
[dependencies] [dependencies]
byteorder = "1.0" byteorder = "1.0"
docopt = "0.8" docopt = "0.8"
failure = "0.1.1"
futures = "0.1" futures = "0.1"
futures-cpupool = "0.1" futures-cpupool = "0.1"
fnv = "1.0" fnv = "1.0"

View File

@ -31,7 +31,7 @@
//! Subcommand to check the database and sample file dir for errors. //! Subcommand to check the database and sample file dir for errors.
use db; use db;
use error::Error; use failure::Error;
use recording; use recording;
use std::fs; use std::fs;
use uuid::Uuid; use uuid::Uuid;
@ -153,9 +153,7 @@ pub fn run() -> Result<(), Error> {
error!("composite id {} has recording_playback row but no recording row", id2); error!("composite id {} has recording_playback row but no recording row", id2);
continue; continue;
}, },
(None, None) => { (None, None) => bail!("outer join returned fully empty row"),
return Err(Error::new("outer join returned fully empty row".to_owned()));
},
}; };
let row_summary = RecordingSummary{ let row_summary = RecordingSummary{
flags: row.get_checked(1)?, flags: row.get_checked(1)?,

View File

@ -35,7 +35,7 @@ use self::cursive::traits::{Boxable, Identifiable, Finder};
use self::cursive::views; use self::cursive::views;
use db; use db;
use dir; use dir;
use error::Error; use failure::Error;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::sync::Arc; use std::sync::Arc;
use stream::{self, Opener, Stream}; use stream::{self, Opener, Stream};

View File

@ -35,7 +35,7 @@ use self::cursive::traits::{Boxable, Identifiable};
use self::cursive::views; use self::cursive::views;
use db; use db;
use dir; use dir;
use error::Error; use failure::Error;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::rc::Rc; use std::rc::Rc;

View File

@ -38,7 +38,7 @@ extern crate cursive;
use self::cursive::Cursive; use self::cursive::Cursive;
use self::cursive::views; use self::cursive::views;
use db; use db;
use error::Error; use failure::Error;
use regex::Regex; use regex::Regex;
use std::sync::Arc; use std::sync::Arc;
use std::fmt::Write; use std::fmt::Write;

View File

@ -29,7 +29,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use db; use db;
use error::Error; use failure::Error;
static USAGE: &'static str = r#" static USAGE: &'static str = r#"
Initializes a database. Initializes a database.

View File

@ -30,7 +30,7 @@
use dir; use dir;
use docopt; use docopt;
use error::Error; use failure::{Error, Fail};
use libc; use libc;
use rusqlite; use rusqlite;
use std::path::Path; use std::path::Path;
@ -78,10 +78,8 @@ fn open_conn(db_dir: &str, mode: OpenMode) -> Result<(dir::Fd, rusqlite::Connect
let dir = dir::Fd::open(None, db_dir, mode == OpenMode::Create)?; let dir = dir::Fd::open(None, db_dir, mode == OpenMode::Create)?;
let ro = mode == OpenMode::ReadOnly; let ro = mode == OpenMode::ReadOnly;
dir.lock(if ro { libc::LOCK_SH } else { libc::LOCK_EX } | libc::LOCK_NB) dir.lock(if ro { libc::LOCK_SH } else { libc::LOCK_EX } | libc::LOCK_NB)
.map_err(|e| Error{description: format!("db dir {:?} already in use; can't get {} lock", .map_err(|e| e.context(format!("db dir {:?} already in use; can't get {} lock",
db_dir, db_dir, if ro { "shared" } else { "exclusive" })))?;
if ro { "shared" } else { "exclusive" }),
cause: Some(Box::new(e))})?;
let conn = rusqlite::Connection::open_with_flags( let conn = rusqlite::Connection::open_with_flags(
Path::new(&db_dir).join("db"), Path::new(&db_dir).join("db"),
match mode { match mode {

View File

@ -31,7 +31,7 @@
use clock; use clock;
use db; use db;
use dir; use dir;
use error::Error; use failure::Error;
use fnv::FnvHashMap; use fnv::FnvHashMap;
use futures::{Future, Stream}; use futures::{Future, Stream};
use std::sync::Arc; use std::sync::Arc;

View File

@ -28,7 +28,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use error::Error; use failure::Error;
use recording; use recording;
const USAGE: &'static str = r#" const USAGE: &'static str = r#"

View File

@ -33,7 +33,7 @@
/// See `guide/schema.md` for more information. /// See `guide/schema.md` for more information.
use db; use db;
use error::Error; use failure::Error;
use rusqlite; use rusqlite;
mod v0_to_v1; mod v0_to_v1;
@ -101,10 +101,10 @@ pub fn run() -> Result<(), Error> {
let old_ver = let old_ver =
conn.query_row("select max(id) from version", &[], |row| row.get_checked(0))??; conn.query_row("select max(id) from version", &[], |row| row.get_checked(0))??;
if old_ver > db::EXPECTED_VERSION { if old_ver > db::EXPECTED_VERSION {
return Err(Error::new(format!("Database is at version {}, later than expected {}", bail!("Database is at version {}, later than expected {}",
old_ver, db::EXPECTED_VERSION)))?; old_ver, db::EXPECTED_VERSION);
} else if old_ver < 0 { } else if old_ver < 0 {
return Err(Error::new(format!("Database is at negative version {}!", old_ver))); bail!("Database is at negative version {}!", old_ver);
} }
info!("Upgrading database from version {} to version {}...", old_ver, db::EXPECTED_VERSION); info!("Upgrading database from version {} to version {}...", old_ver, db::EXPECTED_VERSION);
set_journal_mode(&conn, &args.flag_preset_journal).unwrap(); set_journal_mode(&conn, &args.flag_preset_journal).unwrap();

View File

@ -31,7 +31,7 @@
/// Upgrades a version 0 schema to a version 1 schema. /// Upgrades a version 0 schema to a version 1 schema.
use db; use db;
use error::Error; use failure::Error;
use recording; use recording;
use rusqlite; use rusqlite;
use std::collections::HashMap; use std::collections::HashMap;

View File

@ -30,7 +30,7 @@
/// Upgrades a version 1 schema to a version 2 schema. /// Upgrades a version 1 schema to a version 2 schema.
use error::Error; use failure::Error;
use std::fs; use std::fs;
use rusqlite; use rusqlite;
use schema::DirMeta; use schema::DirMeta;
@ -45,8 +45,8 @@ pub fn new<'a>(args: &'a super::Args) -> Result<Box<super::Upgrader + 'a>, Error
let sample_file_path = let sample_file_path =
args.flag_sample_file_dir args.flag_sample_file_dir
.as_ref() .as_ref()
.ok_or_else(|| Error::new("--sample-file-dir required when upgrading from \ .ok_or_else(|| format_err!("--sample-file-dir required when upgrading from \
schema version 1 to 2.".to_owned()))?; schema version 1 to 2."))?;
Ok(Box::new(U { sample_file_path, dir_meta: None })) Ok(Box::new(U { sample_file_path, dir_meta: None }))
} }
@ -81,8 +81,7 @@ impl<'a> U<'a> {
let row = row?; let row = row?;
let uuid: ::db::FromSqlUuid = row.get_checked(0)?; let uuid: ::db::FromSqlUuid = row.get_checked(0)?;
if !files.contains(&uuid.0) { if !files.contains(&uuid.0) {
return Err(Error::new(format!("{} is missing from dir {}!", bail!("{} is missing from dir {}!", uuid.0, self.sample_file_path);
uuid.0, self.sample_file_path)));
} }
} }
Ok(()) Ok(())
@ -318,7 +317,7 @@ fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> {
fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> { fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> {
if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" || if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" ||
&sample_entry[90..94] != b"avcC" { &sample_entry[90..94] != b"avcC" {
return Err(Error::new("not a valid AVCSampleEntry".to_owned())); bail!("not a valid AVCSampleEntry");
} }
let profile_idc = sample_entry[103]; let profile_idc = sample_entry[103];
let constraint_flags_byte = sample_entry[104]; let constraint_flags_byte = sample_entry[104];

View File

@ -32,7 +32,7 @@
use db::{self, FromSqlUuid}; use db::{self, FromSqlUuid};
use dir; use dir;
use error::Error; use failure::Error;
use libc; use libc;
use std::io::{self, Write}; use std::io::{self, Write};
use std::mem; use std::mem;

143
src/db.rs
View File

@ -52,7 +52,7 @@
//! SSD write cycles. //! SSD write cycles.
use dir; use dir;
use error::{Error, ResultExt}; use failure::Error;
use fnv::{self, FnvHashMap}; use fnv::{self, FnvHashMap};
use lru_cache::LruCache; use lru_cache::LruCache;
use openssl::hash; use openssl::hash;
@ -338,7 +338,7 @@ impl SampleFileDir {
pub fn get(&self) -> Result<Arc<dir::SampleFileDir>, Error> { pub fn get(&self) -> Result<Arc<dir::SampleFileDir>, Error> {
Ok(self.dir Ok(self.dir
.as_ref() .as_ref()
.ok_or_else(|| Error::new(format!("sample file dir {} is closed", self.id)))? .ok_or_else(|| format_err!("sample file dir {} is closed", self.id))?
.clone()) .clone())
} }
} }
@ -391,6 +391,12 @@ impl StreamType {
} }
} }
impl ::std::fmt::Display for StreamType {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
f.write_str(self.as_str())
}
}
pub const ALL_STREAM_TYPES: [StreamType; 2] = [StreamType::MAIN, StreamType::SUB]; pub const ALL_STREAM_TYPES: [StreamType; 2] = [StreamType::MAIN, StreamType::SUB];
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -661,19 +667,19 @@ impl<'a> Transaction<'a> {
for row in rows { for row in rows {
let changes = del1.execute_named(&[(":composite_id", &row.id.0)])?; let changes = del1.execute_named(&[(":composite_id", &row.id.0)])?;
if changes != 1 { if changes != 1 {
return Err(Error::new(format!("no such recording {}", row.id))); bail!("no such recording {}", row.id);
} }
let changes = del2.execute_named(&[(":composite_id", &row.id.0)])?; let changes = del2.execute_named(&[(":composite_id", &row.id.0)])?;
if changes != 1 { if changes != 1 {
return Err(Error::new(format!("no such recording_playback {}", row.id))); bail!("no such recording_playback {}", row.id);
} }
let sid = row.id.stream(); let sid = row.id.stream();
let did = self.state let did = self.state
.streams_by_id .streams_by_id
.get(&sid) .get(&sid)
.ok_or_else(|| Error::new(format!("no such stream {}", sid)))? .ok_or_else(|| format_err!("no such stream {}", sid))?
.sample_file_dir_id .sample_file_dir_id
.ok_or_else(|| Error::new(format!("stream {} has no dir", sid)))?; .ok_or_else(|| format_err!("stream {} has no dir", sid))?;
insert.execute_named(&[ insert.execute_named(&[
(":sample_file_dir_id", &did), (":sample_file_dir_id", &did),
(":composite_id", &row.id.0)], (":composite_id", &row.id.0)],
@ -695,7 +701,7 @@ impl<'a> Transaction<'a> {
for &id in ids { for &id in ids {
let changes = stmt.execute_named(&[(":composite_id", &id.0)])?; let changes = stmt.execute_named(&[(":composite_id", &id.0)])?;
if changes != 1 { if changes != 1 {
return Err(Error::new(format!("no garbage row for {}", id))); bail!("no garbage row for {}", id);
} }
} }
Ok(()) Ok(())
@ -706,13 +712,12 @@ impl<'a> Transaction<'a> {
self.check_must_rollback()?; self.check_must_rollback()?;
if r.time.end < r.time.start { if r.time.end < r.time.start {
return Err(Error::new(format!("end time {} must be >= start time {}", bail!("end time {} must be >= start time {}", r.time.end, r.time.start);
r.time.end, r.time.start)));
} }
// Check that the recording id is acceptable and do the insertion. // Check that the recording id is acceptable and do the insertion.
let stream = match self.state.streams_by_id.get(&r.id.stream()) { let stream = match self.state.streams_by_id.get(&r.id.stream()) {
None => return Err(Error::new(format!("no such stream id {}", r.id.stream()))), None => bail!("no such stream id {}", r.id.stream()),
Some(s) => s, Some(s) => s,
}; };
self.must_rollback = true; self.must_rollback = true;
@ -720,8 +725,7 @@ impl<'a> Transaction<'a> {
{ {
let next = m.new_next_recording_id.unwrap_or(stream.next_recording_id); let next = m.new_next_recording_id.unwrap_or(stream.next_recording_id);
if r.id.recording() < next { if r.id.recording() < next {
return Err(Error::new(format!("recording {} out of order; next id is {}!", bail!("recording {} out of order; next id is {}!", r.id, next);
r.id, next)));
} }
let mut stmt = self.tx.prepare_cached(INSERT_RECORDING_SQL)?; let mut stmt = self.tx.prepare_cached(INSERT_RECORDING_SQL)?;
stmt.execute_named(&[ stmt.execute_named(&[
@ -764,8 +768,7 @@ impl<'a> Transaction<'a> {
pub fn update_retention(&mut self, stream_id: i32, new_record: bool, new_limit: i64) pub fn update_retention(&mut self, stream_id: i32, new_record: bool, new_limit: i64)
-> Result<(), Error> { -> Result<(), Error> {
if new_limit < 0 { if new_limit < 0 {
return Err(Error::new(format!("can't set limit for stream {} to {}; must be >= 0", bail!("can't set limit for stream {} to {}; must be >= 0", stream_id, new_limit);
stream_id, new_limit)));
} }
self.check_must_rollback()?; self.check_must_rollback()?;
let mut stmt = self.tx.prepare_cached(r#" let mut stmt = self.tx.prepare_cached(r#"
@ -782,7 +785,7 @@ impl<'a> Transaction<'a> {
(":id", &stream_id), (":id", &stream_id),
])?; ])?;
if changes != 1 { if changes != 1 {
return Err(Error::new(format!("no such stream {}", stream_id))); bail!("no such stream {}", stream_id);
} }
let m = Transaction::get_mods_by_stream(&mut self.mods_by_stream, stream_id); let m = Transaction::get_mods_by_stream(&mut self.mods_by_stream, stream_id);
m.new_record = Some(new_record); m.new_record = Some(new_record);
@ -820,7 +823,7 @@ impl<'a> Transaction<'a> {
/// Raises an error if `must_rollback` is true. To be used on commit and in modifications. /// Raises an error if `must_rollback` is true. To be used on commit and in modifications.
fn check_must_rollback(&self) -> Result<(), Error> { fn check_must_rollback(&self) -> Result<(), Error> {
if self.must_rollback { if self.must_rollback {
return Err(Error::new("failing due to previous error".to_owned())); bail!("failing due to previous error");
} }
Ok(()) Ok(())
} }
@ -867,10 +870,7 @@ impl<'a> Transaction<'a> {
} }
let max_end = match maxes_opt { let max_end = match maxes_opt {
Some(Range{end: e, ..}) => e, Some(Range{end: e, ..}) => e,
None => { None => bail!("missing max for stream {} which had min {}", stream_id, min_start),
return Err(Error::new(format!("missing max for stream {} which had min {}",
stream_id, min_start)));
}
}; };
m.range = Some(min_start .. max_end); m.range = Some(min_start .. max_end);
} }
@ -902,9 +902,8 @@ impl StreamStateChanger {
have_data = true; have_data = true;
if let (Some(d), false) = (s.sample_file_dir_id, if let (Some(d), false) = (s.sample_file_dir_id,
s.sample_file_dir_id == sc.sample_file_dir_id) { s.sample_file_dir_id == sc.sample_file_dir_id) {
return Err(Error::new(format!("can't change sample_file_dir_id \ bail!("can't change sample_file_dir_id {:?}->{:?} for non-empty stream {}",
{:?}->{:?} for non-empty stream {}", d, sc.sample_file_dir_id, sid);
d, sc.sample_file_dir_id, sid)));
} }
} }
if !have_data && sc.rtsp_path.is_empty() && sc.sample_file_dir_id.is_none() && if !have_data && sc.rtsp_path.is_empty() && sc.sample_file_dir_id.is_none() &&
@ -914,7 +913,7 @@ impl StreamStateChanger {
delete from stream where id = ? delete from stream where id = ?
"#)?; "#)?;
if stmt.execute(&[&sid])? != 1 { if stmt.execute(&[&sid])? != 1 {
return Err(Error::new(format!("missing stream {}", sid))); bail!("missing stream {}", sid);
} }
streams.push((sid, None)); streams.push((sid, None));
} else { } else {
@ -934,7 +933,7 @@ impl StreamStateChanger {
(":id", &sid), (":id", &sid),
])?; ])?;
if rows != 1 { if rows != 1 {
return Err(Error::new(format!("missing stream {}", sid))); bail!("missing stream {}", sid);
} }
sids[i] = Some(sid); sids[i] = Some(sid);
let s = (*s).clone(); let s = (*s).clone();
@ -1037,7 +1036,7 @@ impl LockedDatabase {
let dir = self.state let dir = self.state
.sample_file_dirs_by_id .sample_file_dirs_by_id
.get_mut(&id) .get_mut(&id)
.ok_or_else(|| Error::new(format!("no such dir {}", id)))?; .ok_or_else(|| format_err!("no such dir {}", id))?;
if dir.dir.is_some() { continue } if dir.dir.is_some() { continue }
let mut meta = schema::DirMeta::default(); let mut meta = schema::DirMeta::default();
meta.db_uuid.extend_from_slice(&self.state.uuid.as_bytes()[..]); meta.db_uuid.extend_from_slice(&self.state.uuid.as_bytes()[..]);
@ -1072,7 +1071,7 @@ impl LockedDatabase {
"#)?; "#)?;
for &id in in_progress.keys() { for &id in in_progress.keys() {
if stmt.execute(&[&o.id, &id])? != 1 { if stmt.execute(&[&o.id, &id])? != 1 {
return Err(Error::new(format!("unable to update dir {}", id))); bail!("unable to update dir {}", id);
} }
} }
} }
@ -1150,10 +1149,8 @@ impl LockedDatabase {
let vse_id = row.get_checked(8)?; let vse_id = row.get_checked(8)?;
let video_sample_entry = match self.state.video_sample_entries.get(&vse_id) { let video_sample_entry = match self.state.video_sample_entries.get(&vse_id) {
Some(v) => v, Some(v) => v,
None => { None => bail!("recording {} references nonexistent video_sample_entry {}",
return Err(Error::new(format!( id, vse_id),
"recording {} references nonexistent video_sample_entry {}", id, vse_id)));
},
}; };
let out = ListRecordingsRow { let out = ListRecordingsRow {
id, id,
@ -1210,9 +1207,8 @@ impl LockedDatabase {
} }
let need_insert = if let Some(ref mut a) = aggs.get_mut(&run_start_id) { let need_insert = if let Some(ref mut a) = aggs.get_mut(&run_start_id) {
if a.time.end != row.start { if a.time.end != row.start {
return Err(Error::new(format!( bail!("stream {} recording {} ends at {}; {} starts at {}; expected same",
"stream {} recording {} ends at {}; {} starts at {}; expected same", stream_id, a.ids.end - 1, a.time.end, row.id, row.start);
stream_id, a.ids.end - 1, a.time.end, row.id, row.start)));
} }
a.time.end.0 += row.duration_90k as i64; a.time.end.0 += row.duration_90k as i64;
a.ids.end = recording_id + 1; a.ids.end = recording_id + 1;
@ -1264,7 +1260,7 @@ impl LockedDatabase {
cache.insert(id.0, video_index.0); cache.insert(id.0, video_index.0);
return result; return result;
} }
Err(Error::new(format!("no such recording {}", id))) Err(format_err!("no such recording {}", id))
} }
/// Lists all garbage ids. /// Lists all garbage ids.
@ -1327,9 +1323,7 @@ impl LockedDatabase {
let mut sha1 = [0u8; 20]; let mut sha1 = [0u8; 20];
let sha1_vec: Vec<u8> = row.get_checked(1)?; let sha1_vec: Vec<u8> = row.get_checked(1)?;
if sha1_vec.len() != 20 { if sha1_vec.len() != 20 {
return Err(Error::new(format!( bail!("video sample entry id {} has sha1 {} of wrong length", id, sha1_vec.len());
"video sample entry id {} has sha1 {} of wrong length",
id, sha1_vec.len())));
} }
sha1.copy_from_slice(&sha1_vec); sha1.copy_from_slice(&sha1_vec);
let data: Vec<u8> = row.get_checked(5)?; let data: Vec<u8> = row.get_checked(5)?;
@ -1372,7 +1366,7 @@ impl LockedDatabase {
let last_complete_open = match (open_id, open_uuid) { let last_complete_open = match (open_id, open_uuid) {
(Some(id), Some(uuid)) => Some(Open { id, uuid: uuid.0, }), (Some(id), Some(uuid)) => Some(Open { id, uuid: uuid.0, }),
(None, None) => None, (None, None) => None,
_ => return Err(Error::new(format!("open table missing id {}", id))), _ => bail!("open table missing id {}", id),
}; };
self.state.sample_file_dirs_by_id.insert(id, SampleFileDir { self.state.sample_file_dirs_by_id.insert(id, SampleFileDir {
id, id,
@ -1446,12 +1440,13 @@ impl LockedDatabase {
let id = row.get_checked(0)?; let id = row.get_checked(0)?;
let type_: String = row.get_checked(1)?; let type_: String = row.get_checked(1)?;
let type_ = StreamType::parse(&type_).ok_or_else( let type_ = StreamType::parse(&type_).ok_or_else(
|| Error::new(format!("no such stream type {}", type_)))?; || format_err!("no such stream type {}", type_))?;
let camera_id = row.get_checked(2)?; let camera_id = row.get_checked(2)?;
let c = self.state let c = self.state
.cameras_by_id .cameras_by_id
.get_mut(&camera_id) .get_mut(&camera_id)
.ok_or_else(|| Error::new("missing camera".to_owned()))?; .ok_or_else(|| format_err!("missing camera {} for stream {}",
camera_id, id))?;
self.state.streams_by_id.insert(id, Stream { self.state.streams_by_id.insert(id, Stream {
id, id,
type_, type_,
@ -1487,8 +1482,8 @@ impl LockedDatabase {
// The width and height should match given that they're also specified within data // The width and height should match given that they're also specified within data
// and thus included in the just-compared hash. // and thus included in the just-compared hash.
if v.width != width || v.height != height { if v.width != width || v.height != height {
return Err(Error::new(format!("database entry for {:?} is {}x{}, not {}x{}", bail!("database entry for {:?} is {}x{}, not {}x{}",
&sha1[..], v.width, v.height, width, height))); &sha1[..], v.width, v.height, width, height);
} }
return Ok(id); return Ok(id);
} }
@ -1523,7 +1518,7 @@ impl LockedDatabase {
let o = self.state let o = self.state
.open .open
.as_ref() .as_ref()
.ok_or_else(|| Error::new("database is read-only".to_owned()))?; .ok_or_else(|| format_err!("database is read-only"))?;
// Populate meta. // Populate meta.
{ {
@ -1551,7 +1546,7 @@ impl LockedDatabase {
dir: Some(dir), dir: Some(dir),
last_complete_open: None, last_complete_open: None,
}), }),
Entry::Occupied(_) => Err(Error::new(format!("duplicate sample file dir id {}", id)))?, Entry::Occupied(_) => Err(format_err!("duplicate sample file dir id {}", id))?,
}; };
d.last_complete_open = Some(*o); d.last_complete_open = Some(*o);
mem::swap(&mut meta.last_complete_open, &mut meta.in_progress_open); mem::swap(&mut meta.last_complete_open, &mut meta.in_progress_open);
@ -1562,13 +1557,13 @@ impl LockedDatabase {
pub fn delete_sample_file_dir(&mut self, dir_id: i32) -> Result<(), Error> { pub fn delete_sample_file_dir(&mut self, dir_id: i32) -> Result<(), Error> {
for (&id, s) in self.state.streams_by_id.iter() { for (&id, s) in self.state.streams_by_id.iter() {
if s.sample_file_dir_id == Some(dir_id) { if s.sample_file_dir_id == Some(dir_id) {
return Err(Error::new(format!("can't delete dir referenced by stream {}", id))); bail!("can't delete dir referenced by stream {}", id);
} }
} }
// TODO: remove/update metadata stored in the directory? at present this will have to // TODO: remove/update metadata stored in the directory? at present this will have to
// be manually deleted before the dir can be reused. // be manually deleted before the dir can be reused.
if self.conn.execute("delete from sample_file_dir where id = ?", &[&dir_id])? != 1 { if self.conn.execute("delete from sample_file_dir where id = ?", &[&dir_id])? != 1 {
return Err(Error::new(format!("no such dir {} to remove", dir_id))); bail!("no such dir {} to remove", dir_id);
} }
self.state.sample_file_dirs_by_id.remove(&dir_id).expect("sample file dir should exist!"); self.state.sample_file_dirs_by_id.remove(&dir_id).expect("sample file dir should exist!");
Ok(()) Ok(())
@ -1622,7 +1617,7 @@ impl LockedDatabase {
let c = self.state let c = self.state
.cameras_by_id .cameras_by_id
.get_mut(&camera_id) .get_mut(&camera_id)
.ok_or_else(|| Error::new(format!("no such camera {}", camera_id)))?; .ok_or_else(|| format_err!("no such camera {}", camera_id))?;
{ {
streams = StreamStateChanger::new(&tx, camera_id, Some(c), &self.state.streams_by_id, streams = StreamStateChanger::new(&tx, camera_id, Some(c), &self.state.streams_by_id,
&mut camera)?; &mut camera)?;
@ -1645,7 +1640,7 @@ impl LockedDatabase {
(":password", &camera.password), (":password", &camera.password),
])?; ])?;
if rows != 1 { if rows != 1 {
return Err(Error::new(format!("Camera {} missing from database", camera_id))); bail!("Camera {} missing from database", camera_id);
} }
} }
tx.commit()?; tx.commit()?;
@ -1662,7 +1657,7 @@ impl LockedDatabase {
pub fn delete_camera(&mut self, id: i32) -> Result<(), Error> { pub fn delete_camera(&mut self, id: i32) -> Result<(), Error> {
let uuid = self.state.cameras_by_id.get(&id) let uuid = self.state.cameras_by_id.get(&id)
.map(|c| c.uuid) .map(|c| c.uuid)
.ok_or_else(|| Error::new(format!("No such camera {} to remove", id)))?; .ok_or_else(|| format_err!("No such camera {} to remove", id))?;
let mut streams_to_delete = Vec::new(); let mut streams_to_delete = Vec::new();
let tx = self.conn.transaction()?; let tx = self.conn.transaction()?;
{ {
@ -1670,18 +1665,18 @@ impl LockedDatabase {
for (stream_id, stream) in &self.state.streams_by_id { for (stream_id, stream) in &self.state.streams_by_id {
if stream.camera_id != id { continue }; if stream.camera_id != id { continue };
if stream.range.is_some() { if stream.range.is_some() {
return Err(Error::new(format!("Can't remove camera {}; has recordings.", id))); bail!("Can't remove camera {}; has recordings.", id);
} }
let rows = stream_stmt.execute_named(&[(":id", stream_id)])?; let rows = stream_stmt.execute_named(&[(":id", stream_id)])?;
if rows != 1 { if rows != 1 {
return Err(Error::new(format!("Stream {} missing from database", id))); bail!("Stream {} missing from database", id);
} }
streams_to_delete.push(*stream_id); streams_to_delete.push(*stream_id);
} }
let mut cam_stmt = tx.prepare_cached(r"delete from camera where id = :id")?; let mut cam_stmt = tx.prepare_cached(r"delete from camera where id = :id")?;
let rows = cam_stmt.execute_named(&[(":id", &id)])?; let rows = cam_stmt.execute_named(&[(":id", &id)])?;
if rows != 1 { if rows != 1 {
return Err(Error::new(format!("Camera {} missing from database", id))); bail!("Camera {} missing from database", id);
} }
} }
tx.commit()?; tx.commit()?;
@ -1719,24 +1714,21 @@ impl Database {
pub fn new(conn: rusqlite::Connection, read_write: bool) -> Result<Database, Error> { pub fn new(conn: rusqlite::Connection, read_write: bool) -> Result<Database, Error> {
conn.execute("pragma foreign_keys = on", &[])?; conn.execute("pragma foreign_keys = on", &[])?;
{ {
let ver = get_schema_version(&conn)?.ok_or_else(|| Error::new( let ver = get_schema_version(&conn)?.ok_or_else(|| format_err!(
"no such table: version. \ "no such table: version. \
\ \
If you are starting from an \ If you are starting from an \
empty database, see README.md to complete the \ empty database, see README.md to complete the \
installation. If you are starting from a database \ installation. If you are starting from a database \
that predates schema versioning, see guide/schema.md." that predates schema versioning, see guide/schema.md."))?;
.to_owned()))?;
if ver < EXPECTED_VERSION { if ver < EXPECTED_VERSION {
return Err(Error::new(format!( bail!("Database schema version {} is too old (expected {}); \
"Database schema version {} is too old (expected {}); \ see upgrade instructions in guide/upgrade.md.",
see upgrade instructions in guide/upgrade.md.", ver, EXPECTED_VERSION);
ver, EXPECTED_VERSION)));
} else if ver > EXPECTED_VERSION { } else if ver > EXPECTED_VERSION {
return Err(Error::new(format!( bail!("Database schema version {} is too new (expected {}); \
"Database schema version {} is too new (expected {}); \ must use a newer binary to match.", ver,
must use a newer binary to match.", ver, EXPECTED_VERSION);
EXPECTED_VERSION)));
} }
} }
@ -1794,15 +1786,14 @@ impl Database {
})); }));
{ {
let l = &mut *db.lock(); let l = &mut *db.lock();
l.init_video_sample_entries().annotate_err("init_video_sample_entries")?; l.init_video_sample_entries()?;
l.init_sample_file_dirs().annotate_err("init_sample_file_dirs")?; l.init_sample_file_dirs()?;
l.init_cameras().annotate_err("init_cameras")?; l.init_cameras()?;
l.init_streams().annotate_err("init_streams")?; l.init_streams()?;
for (&stream_id, ref mut stream) in &mut l.state.streams_by_id { for (&stream_id, ref mut stream) in &mut l.state.streams_by_id {
// TODO: we could use one thread per stream if we had multiple db conns. // TODO: we could use one thread per stream if we had multiple db conns.
let camera = l.state.cameras_by_id.get(&stream.camera_id).unwrap(); let camera = l.state.cameras_by_id.get(&stream.camera_id).unwrap();
init_recordings(&mut l.conn, stream_id, camera, stream) init_recordings(&mut l.conn, stream_id, camera, stream)?;
.annotate_err("init_recordings")?;
} }
} }
Ok(db) Ok(db)
@ -1842,7 +1833,6 @@ mod tests {
use recording::{self, TIME_UNITS_PER_SEC}; use recording::{self, TIME_UNITS_PER_SEC};
use rusqlite::Connection; use rusqlite::Connection;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::error::Error as E;
use testutil; use testutil;
use super::*; use super::*;
use super::adjust_days; // non-public. use super::adjust_days; // non-public.
@ -2003,7 +1993,7 @@ mod tests {
fn test_no_meta_or_version() { fn test_no_meta_or_version() {
testutil::init(); testutil::init();
let e = Database::new(Connection::open_in_memory().unwrap(), false).unwrap_err(); let e = Database::new(Connection::open_in_memory().unwrap(), false).unwrap_err();
assert!(e.description().starts_with("no such table"), "{}", e); assert!(e.to_string().starts_with("no such table"), "{}", e);
} }
#[test] #[test]
@ -2012,9 +2002,8 @@ mod tests {
let c = setup_conn(); let c = setup_conn();
c.execute_batch("delete from version; insert into version values (2, 0, '');").unwrap(); c.execute_batch("delete from version; insert into version values (2, 0, '');").unwrap();
let e = Database::new(c, false).unwrap_err(); let e = Database::new(c, false).unwrap_err();
assert!(e.description().starts_with( assert!(e.to_string().starts_with(
"Database schema version 2 is too old (expected 3)"), "got: {:?}", "Database schema version 2 is too old (expected 3)"), "got: {:?}", e);
e.description());
} }
#[test] #[test]
@ -2023,8 +2012,8 @@ mod tests {
let c = setup_conn(); let c = setup_conn();
c.execute_batch("delete from version; insert into version values (4, 0, '');").unwrap(); c.execute_batch("delete from version; insert into version values (4, 0, '');").unwrap();
let e = Database::new(c, false).unwrap_err(); let e = Database::new(c, false).unwrap_err();
assert!(e.description().starts_with( assert!(e.to_string().starts_with(
"Database schema version 4 is too new (expected 3)"), "got: {:?}", e.description()); "Database schema version 4 is too new (expected 3)"), "got: {:?}", e);
} }
/// Basic test of running some queries on a fresh database. /// Basic test of running some queries on a fresh database.

View File

@ -33,7 +33,7 @@
//! This includes opening files for serving, rotating away old files, and saving new files. //! This includes opening files for serving, rotating away old files, and saving new files.
use db::{self, CompositeId}; use db::{self, CompositeId};
use error::Error; use failure::{Error, Fail};
use fnv::FnvHashMap; use fnv::FnvHashMap;
use libc::{self, c_char}; use libc::{self, c_char};
use protobuf::{self, Message}; use protobuf::{self, Message};
@ -151,8 +151,7 @@ impl SampleFileDir {
s.fd.lock(if read_write { libc::LOCK_EX } else { libc::LOCK_SH } | libc::LOCK_NB)?; s.fd.lock(if read_write { libc::LOCK_EX } else { libc::LOCK_SH } | libc::LOCK_NB)?;
let dir_meta = s.read_meta()?; let dir_meta = s.read_meta()?;
if !SampleFileDir::consistent(db_meta, &dir_meta) { if !SampleFileDir::consistent(db_meta, &dir_meta) {
return Err(Error::new(format!("metadata mismatch.\ndb: {:#?}\ndir: {:#?}", bail!("metadata mismatch.\ndb: {:#?}\ndir: {:#?}", db_meta, &dir_meta);
db_meta, &dir_meta)));
} }
if db_meta.in_progress_open.is_some() { if db_meta.in_progress_open.is_some() {
s.write_meta(db_meta)?; s.write_meta(db_meta)?;
@ -188,8 +187,7 @@ impl SampleFileDir {
// Partial opening by this or another database is fine; we won't overwrite anything. // Partial opening by this or another database is fine; we won't overwrite anything.
// TODO: consider one exception: if the version 2 upgrade fails at the post_tx step. // TODO: consider one exception: if the version 2 upgrade fails at the post_tx step.
if old_meta.last_complete_open.is_some() { if old_meta.last_complete_open.is_some() {
return Err(Error::new(format!("Can't create dir at path {}: is already in use:\n{:?}", bail!("Can't create dir at path {}: is already in use:\n{:?}", path, old_meta);
path, old_meta)));
} }
s.write_meta(db_meta)?; s.write_meta(db_meta)?;
@ -198,7 +196,7 @@ impl SampleFileDir {
fn open_self(path: &str, create: bool) -> Result<Arc<SampleFileDir>, Error> { fn open_self(path: &str, create: bool) -> Result<Arc<SampleFileDir>, Error> {
let fd = Fd::open(None, path, create) let fd = Fd::open(None, path, create)
.map_err(|e| Error::new(format!("unable to open sample file dir {}: {}", path, e)))?; .map_err(|e| format_err!("unable to open sample file dir {}: {}", path, e))?;
Ok(Arc::new(SampleFileDir { Ok(Arc::new(SampleFileDir {
fd, fd,
mutable: Mutex::new(SharedMutableState{ mutable: Mutex::new(SharedMutableState{
@ -229,10 +227,7 @@ impl SampleFileDir {
let mut data = Vec::new(); let mut data = Vec::new();
f.read_to_end(&mut data)?; f.read_to_end(&mut data)?;
let mut s = protobuf::CodedInputStream::from_bytes(&data); let mut s = protobuf::CodedInputStream::from_bytes(&data);
meta.merge_from(&mut s).map_err(|e| Error { meta.merge_from(&mut s).map_err(|e| e.context("Unable to parse metadata proto: {}"))?;
description: format!("Unable to parse proto: {:?}", e),
cause: Some(Box::new(e)),
})?;
Ok(meta) Ok(meta)
} }
@ -245,10 +240,7 @@ impl SampleFileDir {
let mut f = unsafe { self.fd.openat(tmp_path.as_ptr(), let mut f = unsafe { self.fd.openat(tmp_path.as_ptr(),
libc::O_CREAT | libc::O_TRUNC | libc::O_WRONLY, libc::O_CREAT | libc::O_TRUNC | libc::O_WRONLY,
0o600)? }; 0o600)? };
meta.write_to_writer(&mut f).map_err(|e| Error { meta.write_to_writer(&mut f)?;
description: format!("Unable to write metadata proto: {:?}", e),
cause: Some(Box::new(e)),
})?;
f.sync_all()?; f.sync_all()?;
unsafe { renameat(&self.fd, tmp_path.as_ptr(), &self.fd, final_path.as_ptr())? }; unsafe { renameat(&self.fd, tmp_path.as_ptr(), &self.fd, final_path.as_ptr())? };
self.sync()?; self.sync()?;
@ -411,7 +403,7 @@ pub fn lower_retention(db: Arc<db::Database>, dir_id: i32, limits: &[NewLimit])
for l in limits { for l in limits {
let before = to_delete.len(); let before = to_delete.len();
let stream = db.streams_by_id().get(&l.stream_id) let stream = db.streams_by_id().get(&l.stream_id)
.ok_or_else(|| Error::new(format!("no such stream {}", l.stream_id)))?; .ok_or_else(|| format_err!("no such stream {}", l.stream_id))?;
if l.limit >= stream.sample_file_bytes { continue } if l.limit >= stream.sample_file_bytes { continue }
get_rows_to_delete(db, l.stream_id, stream, stream.retain_bytes - l.limit, get_rows_to_delete(db, l.stream_id, stream, stream.retain_bytes - l.limit,
&mut to_delete)?; &mut to_delete)?;
@ -440,8 +432,7 @@ fn get_rows_to_delete(db: &db::LockedDatabase, stream_id: i32,
bytes_needed > bytes_to_delete // continue as long as more deletions are needed. bytes_needed > bytes_to_delete // continue as long as more deletions are needed.
})?; })?;
if bytes_needed > bytes_to_delete { if bytes_needed > bytes_to_delete {
return Err(Error::new(format!("{}: couldn't find enough files to delete: {} left.", bail!("{}: couldn't find enough files to delete: {} left.", stream.id, bytes_needed);
stream.id, bytes_needed)));
} }
info!("{}: deleting {} bytes in {} recordings ({} bytes needed)", info!("{}: deleting {} bytes in {} recordings ({} bytes needed)",
stream.id, bytes_to_delete, n, bytes_needed); stream.id, bytes_to_delete, n, bytes_needed);
@ -473,7 +464,7 @@ impl Syncer {
-> Result<(Self, String), Error> { -> Result<(Self, String), Error> {
let d = l.sample_file_dirs_by_id() let d = l.sample_file_dirs_by_id()
.get(&dir_id) .get(&dir_id)
.ok_or_else(|| Error::new(format!("no dir {}", dir_id)))?; .ok_or_else(|| format_err!("no dir {}", dir_id))?;
let dir = d.get()?; let dir = d.get()?;
let to_unlink = l.list_garbage(dir_id)?; let to_unlink = l.list_garbage(dir_id)?;
@ -561,8 +552,7 @@ impl Syncer {
} }
self.try_unlink(); self.try_unlink();
if !self.to_unlink.is_empty() { if !self.to_unlink.is_empty() {
return Err(Error::new(format!("failed to unlink {} sample files", bail!("failed to unlink {} sample files", self.to_unlink.len());
self.to_unlink.len())));
} }
self.dir.sync()?; self.dir.sync()?;
{ {
@ -597,7 +587,7 @@ impl Syncer {
-> Result<(), Error> { -> Result<(), Error> {
self.try_unlink(); self.try_unlink();
if !self.to_unlink.is_empty() { if !self.to_unlink.is_empty() {
return Err(Error::new(format!("failed to unlink {} files.", self.to_unlink.len()))); bail!("failed to unlink {} files.", self.to_unlink.len());
} }
// XXX: if these calls fail, any other writes are likely to fail as well. // XXX: if these calls fail, any other writes are likely to fail as well.
@ -610,7 +600,7 @@ impl Syncer {
let stream_id = recording.id.stream(); let stream_id = recording.id.stream();
let stream = let stream =
db.streams_by_id().get(&stream_id) db.streams_by_id().get(&stream_id)
.ok_or_else(|| Error::new(format!("no such stream {}", stream_id)))?; .ok_or_else(|| format_err!("no such stream {}", stream_id))?;
get_rows_to_delete(&db, stream_id, stream, get_rows_to_delete(&db, stream_id, stream,
recording.sample_file_bytes as i64, &mut to_delete)?; recording.sample_file_bytes as i64, &mut to_delete)?;
} }
@ -784,8 +774,8 @@ impl<'a> Writer<'a> {
if let Some(unflushed) = w.unflushed_sample.take() { if let Some(unflushed) = w.unflushed_sample.take() {
let duration = (pts_90k - unflushed.pts_90k) as i32; let duration = (pts_90k - unflushed.pts_90k) as i32;
if duration <= 0 { if duration <= 0 {
return Err(Error::new(format!("pts not monotonically increasing; got {} then {}", bail!("pts not monotonically increasing; got {} then {}",
unflushed.pts_90k, pts_90k))); unflushed.pts_90k, pts_90k);
} }
let duration = w.adjuster.adjust(duration); let duration = w.adjuster.adjust(duration);
w.index.add_sample(duration, unflushed.len, unflushed.is_key); w.index.add_sample(duration, unflushed.len, unflushed.is_key);
@ -835,10 +825,11 @@ impl<'a> InnerWriter<'a> {
fn close(mut self, next_pts: Option<i64>) -> Result<PreviousWriter, Error> { fn close(mut self, next_pts: Option<i64>) -> Result<PreviousWriter, Error> {
if self.corrupt { if self.corrupt {
self.syncer_channel.async_abandon_recording(self.id); self.syncer_channel.async_abandon_recording(self.id);
return Err(Error::new(format!("recording {} is corrupt", self.id))); bail!("recording {} is corrupt", self.id);
} }
let unflushed = let unflushed =
self.unflushed_sample.take().ok_or_else(|| Error::new("no packets!".to_owned()))?; self.unflushed_sample.take()
.ok_or_else(|| format_err!("recording {} has no packets", self.id))?;
let duration = self.adjuster.adjust(match next_pts { let duration = self.adjuster.adjust(match next_pts {
None => 0, None => 0,
Some(p) => (p - unflushed.pts_90k) as i32, Some(p) => (p - unflushed.pts_90k) as i32,

View File

@ -1,148 +0,0 @@
// This file is part of Moonfire NVR, a security camera digital video recorder.
// Copyright (C) 2016 Scott Lamb <slamb@slamb.org>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// In addition, as a special exception, the copyright holders give
// permission to link the code of portions of this program with the
// OpenSSL library under certain conditions as described in each
// individual source file, and distribute linked combinations including
// the two.
//
// You must obey the GNU General Public License in all respects for all
// of the code used other than OpenSSL. If you modify file(s) with this
// exception, you may extend this exception to your version of the
// file(s), but you are not obligated to do so. If you do not wish to do
// so, delete this exception statement from your version. If you delete
// this exception statement from all source files in the program, then
// also delete it here.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate rusqlite;
extern crate time;
extern crate uuid;
use core::ops::Deref;
use core::num;
use openssl::error::ErrorStack;
use moonfire_ffmpeg;
use serde_json;
use std::boxed::Box;
use std::convert::From;
use std::error;
use std::error::Error as E;
use std::fmt;
use std::io;
use std::result;
use std::string::String;
#[derive(Debug)]
pub struct Error {
pub description: String,
pub cause: Option<Box<error::Error + Send + Sync>>,
}
impl Error {
pub fn new(description: String) -> Self {
Error{description: description, cause: None }
}
}
pub trait ResultExt<T> {
/// Returns a new `Result` like this one except that errors are of type `Error` and annotated
/// with the given prefix.
fn annotate_err(self, prefix: &'static str) -> Result<T>;
}
impl<T, E> ResultExt<T> for result::Result<T, E> where E: 'static + error::Error + Send + Sync {
fn annotate_err(self, prefix: &'static str) -> Result<T> {
self.map_err(|e| Error{
description: format!("{}: {}", prefix, e.description()),
cause: Some(Box::new(e)),
})
}
}
impl error::Error for Error {
fn description(&self) -> &str { &self.description }
fn cause(&self) -> Option<&error::Error> {
match self.cause {
Some(ref b) => Some(b.deref()),
None => None
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
write!(f, "Error: {}\ncause: {:?}", self.description, self.cause)
}
}
// TODO(slamb): isn't there a "<? implements error::Error>" or some such?
impl From<rusqlite::Error> for Error {
fn from(err: rusqlite::Error) -> Self {
Error{description: String::from(err.description()), cause: Some(Box::new(err))}
}
}
impl From<fmt::Error> for Error {
fn from(err: fmt::Error) -> Self {
Error{description: String::from(err.description()), cause: Some(Box::new(err))}
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error{description: String::from(err.description()), cause: Some(Box::new(err))}
}
}
impl From<time::ParseError> for Error {
fn from(err: time::ParseError) -> Self {
Error{description: String::from(err.description()), cause: Some(Box::new(err))}
}
}
impl From<num::ParseIntError> for Error {
fn from(err: num::ParseIntError) -> Self {
Error{description: err.description().to_owned(), cause: Some(Box::new(err))}
}
}
impl From<serde_json::Error> for Error {
fn from(err: serde_json::Error) -> Self {
Error{description: format!("{} ({})", err.description(), err), cause: Some(Box::new(err))}
}
}
impl From<moonfire_ffmpeg::Error> for Error {
fn from(err: moonfire_ffmpeg::Error) -> Self {
Error{description: format!("ffmpeg: {}", err), cause: Some(Box::new(err))}
}
}
impl From<uuid::ParseError> for Error {
fn from(_: uuid::ParseError) -> Self {
Error{description: String::from("UUID parse error"), cause: None}
}
}
impl From<ErrorStack> for Error {
fn from(_: ErrorStack) -> Self {
Error{description: String::from("openssl error"), cause: None}
}
}
pub type Result<T> = result::Result<T, Error>;

View File

@ -41,7 +41,7 @@
//! would be more trouble than it's worth. //! would be more trouble than it's worth.
use byteorder::{BigEndian, WriteBytesExt}; use byteorder::{BigEndian, WriteBytesExt};
use error::{Error, Result}; use failure::Error;
use regex::bytes::Regex; use regex::bytes::Regex;
// See ISO/IEC 14496-10 table 7-1 - NAL unit type codes, syntax element categories, and NAL unit // See ISO/IEC 14496-10 table 7-1 - NAL unit type codes, syntax element categories, and NAL unit
@ -59,8 +59,8 @@ const NAL_UNIT_TYPE_MASK: u8 = 0x1F; // bottom 5 bits of first byte of unit.
/// ///
/// TODO: detect invalid byte streams. For example, several 0x00s not followed by a 0x01, a stream /// TODO: detect invalid byte streams. For example, several 0x00s not followed by a 0x01, a stream
/// stream not starting with 0x00 0x00 0x00 0x01, or an empty NAL unit. /// stream not starting with 0x00 0x00 0x00 0x01, or an empty NAL unit.
fn decode_h264_annex_b<'a, F>(data: &'a [u8], mut f: F) -> Result<()> fn decode_h264_annex_b<'a, F>(data: &'a [u8], mut f: F) -> Result<(), Error>
where F: FnMut(&'a [u8]) -> Result<()> { where F: FnMut(&'a [u8]) -> Result<(), Error> {
lazy_static! { lazy_static! {
static ref START_CODE: Regex = Regex::new(r"(\x00{2,}\x01)").unwrap(); static ref START_CODE: Regex = Regex::new(r"(\x00{2,}\x01)").unwrap();
} }
@ -73,21 +73,21 @@ where F: FnMut(&'a [u8]) -> Result<()> {
} }
/// Parses Annex B extra data, returning a tuple holding the `sps` and `pps` substrings. /// Parses Annex B extra data, returning a tuple holding the `sps` and `pps` substrings.
fn parse_annex_b_extra_data(data: &[u8]) -> Result<(&[u8], &[u8])> { fn parse_annex_b_extra_data(data: &[u8]) -> Result<(&[u8], &[u8]), Error> {
let mut sps = None; let mut sps = None;
let mut pps = None; let mut pps = None;
decode_h264_annex_b(data, |unit| { decode_h264_annex_b(data, |unit| {
let nal_type = (unit[0] as u8) & NAL_UNIT_TYPE_MASK; let nal_type = (unit[0] as u8) & NAL_UNIT_TYPE_MASK;
match nal_type { match nal_type {
NAL_UNIT_SEQ_PARAMETER_SET => { sps = Some(unit); }, NAL_UNIT_SEQ_PARAMETER_SET => sps = Some(unit),
NAL_UNIT_PIC_PARAMETER_SET => { pps = Some(unit); }, NAL_UNIT_PIC_PARAMETER_SET => pps = Some(unit),
_ => { return Err(Error::new(format!("Expected SPS and PPS; got type {}", nal_type))); } _ => bail!("Expected SPS and PPS; got type {}", nal_type),
}; };
Ok(()) Ok(())
})?; })?;
match (sps, pps) { match (sps, pps) {
(Some(s), Some(p)) => Ok((s, p)), (Some(s), Some(p)) => Ok((s, p)),
_ => Err(Error::new("SPS and PPS must be specified".to_owned())), _ => bail!("SPS and PPS must be specified"),
} }
} }
@ -107,7 +107,7 @@ pub struct ExtraData {
impl ExtraData { impl ExtraData {
/// Parses "extradata" from ffmpeg. This data may be in either Annex B format or AVC format. /// Parses "extradata" from ffmpeg. This data may be in either Annex B format or AVC format.
pub fn parse(extradata: &[u8], width: u16, height: u16) -> Result<ExtraData> { pub fn parse(extradata: &[u8], width: u16, height: u16) -> Result<ExtraData, Error> {
let mut sps_and_pps = None; let mut sps_and_pps = None;
let need_transform; let need_transform;
let avcc_len = if extradata.starts_with(b"\x00\x00\x00\x01") || let avcc_len = if extradata.starts_with(b"\x00\x00\x00\x01") ||
@ -198,11 +198,9 @@ impl ExtraData {
sample_entry.extend_from_slice(pps); sample_entry.extend_from_slice(pps);
if sample_entry.len() - avcc_len_pos != avcc_len { if sample_entry.len() - avcc_len_pos != avcc_len {
return Err(Error::new(format!("internal error: anticipated AVCConfigurationBox \ bail!("internal error: anticipated AVCConfigurationBox \
length {}, but was actually {}; sps length \ length {}, but was actually {}; sps length {}, pps length {}",
{}, pps length {}", avcc_len, sample_entry.len() - avcc_len_pos, sps.len(), pps.len());
avcc_len, sample_entry.len() - avcc_len_pos,
sps.len(), pps.len())));
} }
sample_entry.len() - before sample_entry.len() - before
} else { } else {
@ -211,10 +209,9 @@ impl ExtraData {
}; };
if sample_entry.len() - avc1_len_pos != avc1_len { if sample_entry.len() - avc1_len_pos != avc1_len {
return Err(Error::new(format!("internal error: anticipated AVCSampleEntry length \ bail!("internal error: anticipated AVCSampleEntry length \
{}, but was actually {}; AVCDecoderConfiguration \ {}, but was actually {}; AVCDecoderConfiguration length {}",
length {}", avc1_len, sample_entry.len() - avc1_len_pos, avc1_len, sample_entry.len() - avc1_len_pos, avc_decoder_config_len);
avc_decoder_config_len)));
} }
let profile_idc = sample_entry[103]; let profile_idc = sample_entry[103];
let constraint_flags = sample_entry[104]; let constraint_flags = sample_entry[104];
@ -233,7 +230,7 @@ impl ExtraData {
/// Transforms sample data from Annex B format to AVC format. Should be called on samples iff /// Transforms sample data from Annex B format to AVC format. Should be called on samples iff
/// `ExtraData::need_transform` is true. Uses an out parameter `avc_sample` rather than a return /// `ExtraData::need_transform` is true. Uses an out parameter `avc_sample` rather than a return
/// so that memory allocations can be reused from sample to sample. /// so that memory allocations can be reused from sample to sample.
pub fn transform_sample_data(annexb_sample: &[u8], avc_sample: &mut Vec<u8>) -> Result<()> { pub fn transform_sample_data(annexb_sample: &[u8], avc_sample: &mut Vec<u8>) -> Result<(), Error> {
// See AVCParameterSamples, ISO/IEC 14496-15 section 5.3.2. // See AVCParameterSamples, ISO/IEC 14496-15 section 5.3.2.
avc_sample.clear(); avc_sample.clear();

View File

@ -29,7 +29,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use db; use db;
use error::Error; use failure::Error;
use serde::ser::{SerializeMap, SerializeSeq, Serializer}; use serde::ser::{SerializeMap, SerializeSeq, Serializer};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use uuid::Uuid; use uuid::Uuid;
@ -104,7 +104,7 @@ impl<'a> Stream<'a> {
Some(id) => id, Some(id) => id,
None => return Ok(None), None => return Ok(None),
}; };
let s = db.streams_by_id().get(&id).ok_or_else(|| Error::new(format!("missing stream {}", id)))?; let s = db.streams_by_id().get(&id).ok_or_else(|| format_err!("missing stream {}", id))?;
Ok(Some(Stream { Ok(Some(Stream {
retain_bytes: s.retain_bytes, retain_bytes: s.retain_bytes,
min_start_time_90k: s.range.as_ref().map(|r| r.start.0), min_start_time_90k: s.range.as_ref().map(|r| r.start.0),

View File

@ -35,6 +35,7 @@ extern crate core;
extern crate docopt; extern crate docopt;
extern crate futures; extern crate futures;
extern crate futures_cpupool; extern crate futures_cpupool;
#[macro_use] extern crate failure;
extern crate fnv; extern crate fnv;
extern crate http_serve; extern crate http_serve;
extern crate hyper; extern crate hyper;
@ -67,7 +68,6 @@ mod coding;
mod cmds; mod cmds;
mod db; mod db;
mod dir; mod dir;
mod error;
mod h264; mod h264;
mod json; mod json;
mod mp4; mod mp4;

View File

@ -81,7 +81,7 @@ extern crate time;
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use db; use db;
use dir; use dir;
use error::Error; use failure::Error;
use futures::stream; use futures::stream;
use http_serve; use http_serve;
use hyper::header; use hyper::header;
@ -387,9 +387,7 @@ impl Segment {
let index: &'a _ = unsafe { &*self.index.get() }; let index: &'a _ = unsafe { &*self.index.get() };
match *index { match *index {
Ok(ref b) => return Ok(f(&b[..], self.lens())), Ok(ref b) => return Ok(f(&b[..], self.lens())),
Err(()) => { Err(()) => bail!("Unable to build index; see previous error."),
return Err(Error::new("Unable to build index; see previous error.".to_owned()))
},
} }
} }
@ -598,7 +596,7 @@ enum SliceType {
impl Slice { impl Slice {
fn new(end: u64, t: SliceType, p: usize) -> Result<Self, Error> { fn new(end: u64, t: SliceType, p: usize) -> Result<Self, Error> {
if end >= (1<<40) || p >= (1<<20) { if end >= (1<<40) || p >= (1<<20) {
return Err(Error::new(format!("end={} p={} too large for Slice", end, p))); bail!("end={} p={} too large for Slice", end, p);
} }
Ok(Slice(end | ((t as u64) << 40) | ((p as u64) << 44))) Ok(Slice(end | ((t as u64) << 40) | ((p as u64) << 44)))
@ -628,8 +626,7 @@ impl Slice {
} }
let truns = let truns =
mp4.0.db.lock() mp4.0.db.lock()
.with_recording_playback(s.s.id, |playback| s.truns(playback, pos, len)) .with_recording_playback(s.s.id, |playback| s.truns(playback, pos, len))?;
.map_err(|e| { Error::new(format!("Unable to build index for segment: {:?}", e)) })?;
let truns = ARefs::new(truns); let truns = ARefs::new(truns);
Ok(truns.map(|t| &t[r.start as usize .. r.end as usize])) Ok(truns.map(|t| &t[r.start as usize .. r.end as usize]))
} }
@ -758,9 +755,8 @@ impl FileBuilder {
rel_range_90k: Range<i32>) -> Result<(), Error> { rel_range_90k: Range<i32>) -> Result<(), Error> {
if let Some(prev) = self.segments.last() { if let Some(prev) = self.segments.last() {
if prev.s.have_trailing_zero() { if prev.s.have_trailing_zero() {
return Err(Error::new(format!( bail!("unable to append recording {} after recording {} with trailing zero",
"unable to append recording {} after recording {} with trailing zero", row.id, prev.s.id);
row.id, prev.s.id)));
} }
} }
let s = Segment::new(db, &row, rel_range_90k, self.next_frame_num)?; let s = Segment::new(db, &row, rel_range_90k, self.next_frame_num)?;
@ -836,9 +832,8 @@ impl FileBuilder {
// If the segment is > 4 GiB, the 32-bit trun data offsets are untrustworthy. // If the segment is > 4 GiB, the 32-bit trun data offsets are untrustworthy.
// We'd need multiple moof+mdat sequences to support large media segments properly. // We'd need multiple moof+mdat sequences to support large media segments properly.
if self.body.slices.len() > u32::max_value() as u64 { if self.body.slices.len() > u32::max_value() as u64 {
return Err(Error::new(format!( bail!("media segment has length {}, greater than allowed 4 GiB",
"media segment has length {}, greater than allowed 4 GiB", self.body.slices.len());
self.body.slices.len())));
} }
p p
@ -1086,7 +1081,7 @@ impl FileBuilder {
let skip = s.s.desired_range_90k.start - actual_start_90k; let skip = s.s.desired_range_90k.start - actual_start_90k;
let keep = s.s.desired_range_90k.end - s.s.desired_range_90k.start; let keep = s.s.desired_range_90k.end - s.s.desired_range_90k.start;
if skip < 0 || keep < 0 { if skip < 0 || keep < 0 {
return Err(Error::new(format!("skip={} keep={} on segment {:#?}", skip, keep, s))); bail!("skip={} keep={} on segment {:#?}", skip, keep, s);
} }
cur_media_time += skip as u64; cur_media_time += skip as u64;
if unflushed.segment_duration + unflushed.media_time == cur_media_time { if unflushed.segment_duration + unflushed.media_time == cur_media_time {
@ -1451,7 +1446,7 @@ impl FileInner {
let s = &self.segments[i]; let s = &self.segments[i];
let f = self.dirs_by_stream_id let f = self.dirs_by_stream_id
.get(&s.s.id.stream()) .get(&s.s.id.stream())
.ok_or_else(|| Error::new(format!("{}: stream not found", s.s.id)))? .ok_or_else(|| format_err!("{}: stream not found", s.s.id))?
.open_sample_file(s.s.id)?; .open_sample_file(s.s.id)?;
let start = s.s.sample_file_range().start + r.start; let start = s.s.sample_file_range().start + r.start;
let mmap = Box::new(unsafe { let mmap = Box::new(unsafe {

View File

@ -31,12 +31,11 @@
use coding::{append_varint32, decode_varint32, unzigzag32, zigzag32}; use coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
use core::str::FromStr; use core::str::FromStr;
use db; use db;
use error::Error; use failure::Error;
use regex::Regex; use regex::Regex;
use std::ops; use std::ops;
use std::fmt; use std::fmt;
use std::ops::Range; use std::ops::Range;
use std::string::String;
use time; use time;
pub const TIME_UNITS_PER_SEC: i64 = 90000; pub const TIME_UNITS_PER_SEC: i64 = 90000;
@ -77,7 +76,7 @@ impl Time {
} }
// If that failed, parse as a time string or bust. // If that failed, parse as a time string or bust.
let c = RE.captures(s).ok_or_else(|| Error::new(format!("unparseable time {:?}", s)))?; let c = RE.captures(s).ok_or_else(|| format_err!("unparseable time {:?}", s))?;
let mut tm = time::Tm{ let mut tm = time::Tm{
tm_sec: i32::from_str(c.get(6).unwrap().as_str()).unwrap(), tm_sec: i32::from_str(c.get(6).unwrap().as_str()).unwrap(),
tm_min: i32::from_str(c.get(5).unwrap().as_str()).unwrap(), tm_min: i32::from_str(c.get(5).unwrap().as_str()).unwrap(),
@ -92,11 +91,11 @@ impl Time {
tm_nsec: 0, tm_nsec: 0,
}; };
if tm.tm_mon == 0 { if tm.tm_mon == 0 {
return Err(Error::new(format!("time {:?} has month 0", s))); bail!("time {:?} has month 0", s);
} }
tm.tm_mon -= 1; tm.tm_mon -= 1;
if tm.tm_year < 1900 { if tm.tm_year < 1900 {
return Err(Error::new(format!("time {:?} has year before 1900", s))); bail!("time {:?} has year before 1900", s);
} }
tm.tm_year -= 1900; tm.tm_year -= 1900;
@ -250,25 +249,20 @@ impl SampleIndexIterator {
} }
let (raw1, i1) = match decode_varint32(data, i) { let (raw1, i1) = match decode_varint32(data, i) {
Ok(tuple) => tuple, Ok(tuple) => tuple,
Err(()) => return Err(Error::new(format!("bad varint 1 at offset {}", i))), Err(()) => bail!("bad varint 1 at offset {}", i),
}; };
let (raw2, i2) = match decode_varint32(data, i1) { let (raw2, i2) = match decode_varint32(data, i1) {
Ok(tuple) => tuple, Ok(tuple) => tuple,
Err(()) => return Err(Error::new(format!("bad varint 2 at offset {}", i1))), Err(()) => bail!("bad varint 2 at offset {}", i1),
}; };
let duration_90k_delta = unzigzag32(raw1 >> 1); let duration_90k_delta = unzigzag32(raw1 >> 1);
self.duration_90k += duration_90k_delta; self.duration_90k += duration_90k_delta;
if self.duration_90k < 0 { if self.duration_90k < 0 {
return Err(Error{ bail!("negative duration {} after applying delta {}",
description: format!("negative duration {} after applying delta {}", self.duration_90k, duration_90k_delta);
self.duration_90k, duration_90k_delta),
cause: None});
} }
if self.duration_90k == 0 && data.len() > i2 { if self.duration_90k == 0 && data.len() > i2 {
return Err(Error{ bail!("zero duration only allowed at end; have {} bytes left", data.len() - i2);
description: format!("zero duration only allowed at end; have {} bytes left",
data.len() - i2),
cause: None});
} }
let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() { let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() {
true => (self.bytes, self.bytes_other), true => (self.bytes, self.bytes_other),
@ -284,11 +278,8 @@ impl SampleIndexIterator {
self.bytes_other = prev_bytes_key; self.bytes_other = prev_bytes_key;
} }
if self.bytes <= 0 { if self.bytes <= 0 {
return Err(Error{ bail!("non-positive bytes {} after applying delta {} to key={} frame at ts {}",
description: format!("non-positive bytes {} after applying delta {} to key={} \ self.bytes, bytes_delta, self.is_key(), self.start_90k);
frame at ts {}", self.bytes, bytes_delta, self.is_key(),
self.start_90k),
cause: None});
} }
Ok(true) Ok(true)
} }
@ -395,10 +386,9 @@ impl Segment {
if self_.desired_range_90k.start > self_.desired_range_90k.end || if self_.desired_range_90k.start > self_.desired_range_90k.end ||
self_.desired_range_90k.end > recording.duration_90k { self_.desired_range_90k.end > recording.duration_90k {
return Err(Error::new(format!( bail!("desired range [{}, {}) invalid for recording of length {}",
"desired range [{}, {}) invalid for recording of length {}", self_.desired_range_90k.start, self_.desired_range_90k.end,
self_.desired_range_90k.start, self_.desired_range_90k.end, recording.duration_90k);
recording.duration_90k)));
} }
if self_.desired_range_90k.start == 0 && if self_.desired_range_90k.start == 0 &&
@ -416,12 +406,10 @@ impl Segment {
let data = &(&playback).video_index; let data = &(&playback).video_index;
let mut it = SampleIndexIterator::new(); let mut it = SampleIndexIterator::new();
if !it.next(data)? { if !it.next(data)? {
return Err(Error{description: String::from("no index"), bail!("no index");
cause: None});
} }
if !it.is_key() { if !it.is_key() {
return Err(Error{description: String::from("not key frame"), bail!("not key frame");
cause: None});
} }
// Stop when hitting a frame with this start time. // Stop when hitting a frame with this start time.
@ -487,26 +475,23 @@ impl Segment {
}; };
if it.uninitialized() { if it.uninitialized() {
if !it.next(data)? { if !it.next(data)? {
return Err(Error::new(format!("recording {}: no frames", self.id))); bail!("recording {}: no frames", self.id);
} }
if !it.is_key() { if !it.is_key() {
return Err(Error::new(format!("recording {}: doesn't start with key frame", bail!("recording {}: doesn't start with key frame", self.id);
self.id)));
} }
} }
let mut have_frame = true; let mut have_frame = true;
let mut key_frame = 0; let mut key_frame = 0;
for i in 0 .. self.frames { for i in 0 .. self.frames {
if !have_frame { if !have_frame {
return Err(Error::new(format!("recording {}: expected {} frames, found only {}", bail!("recording {}: expected {} frames, found only {}", self.id, self.frames, i+1);
self.id, self.frames, i+1)));
} }
if it.is_key() { if it.is_key() {
key_frame += 1; key_frame += 1;
if key_frame > self.key_frames { if key_frame > self.key_frames {
return Err(Error::new(format!( bail!("recording {}: more than expected {} key frames",
"recording {}: more than expected {} key frames", self.id, self.key_frames);
self.id, self.key_frames)));
} }
} }
@ -517,8 +502,8 @@ impl Segment {
have_frame = try!(it.next(data)); have_frame = try!(it.next(data));
} }
if key_frame < self.key_frames { if key_frame < self.key_frames {
return Err(Error::new(format!("recording {}: expected {} key frames, found only {}", bail!("recording {}: expected {} key frames, found only {}",
self.id, self.key_frames, key_frame))); self.id, self.key_frames, key_frame);
} }
Ok(()) Ok(())
} }
@ -644,7 +629,7 @@ mod tests {
]; ];
for test in &tests { for test in &tests {
let mut it = SampleIndexIterator::new(); let mut it = SampleIndexIterator::new();
assert_eq!(it.next(test.encoded).unwrap_err().description, test.err); assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
} }
} }

View File

@ -30,10 +30,10 @@
//! Tools for implementing a `http_serve::Entity` body composed from many "slices". //! Tools for implementing a `http_serve::Entity` body composed from many "slices".
use error::Error; use failure::Error;
use reffers::ARefs;
use futures::stream; use futures::stream;
use futures::Stream; use futures::Stream;
use reffers::ARefs;
use std::fmt; use std::fmt;
use std::ops::Range; use std::ops::Range;
@ -96,9 +96,8 @@ impl<S> Slices<S> where S: Slice {
/// Appends the given slice, which must have end > the Slice's current len. /// Appends the given slice, which must have end > the Slice's current len.
pub fn append(&mut self, slice: S) -> Result<(), Error> { pub fn append(&mut self, slice: S) -> Result<(), Error> {
if slice.end() <= self.len { if slice.end() <= self.len {
return Err(Error::new( bail!("end {} <= len {} while adding slice {:?} to slices:\n{:?}",
format!("end {} <= len {} while adding slice {:?} to slices:\n{:?}", slice.end(), self.len, slice, self);
slice.end(), self.len, slice, self)));
} }
self.len = slice.end(); self.len = slice.end();
self.slices.push(slice); self.slices.push(slice);

View File

@ -28,7 +28,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use error::Error; use failure::Error;
use h264; use h264;
use moonfire_ffmpeg; use moonfire_ffmpeg;
use std::os::raw::c_char; use std::os::raw::c_char;
@ -129,7 +129,7 @@ impl Opener<FfmpegStream> for Ffmpeg {
} }
let video_i = match video_i { let video_i = match video_i {
Some(i) => i, Some(i) => i,
None => { return Err(Error::new("no video stream".to_owned())) }, None => bail!("no video stream"),
}; };
let mut stream = FfmpegStream{ let mut stream = FfmpegStream{
@ -156,13 +156,12 @@ impl Stream for FfmpegStream {
let video = self.input.streams().get(self.video_i); let video = self.input.streams().get(self.video_i);
let tb = video.time_base(); let tb = video.time_base();
if tb.num != 1 || tb.den != 90000 { if tb.num != 1 || tb.den != 90000 {
return Err(Error::new(format!("video stream has timebase {}/{}; expected 1/90000", bail!("video stream has timebase {}/{}; expected 1/90000", tb.num, tb.den);
tb.num, tb.den)));
} }
let codec = video.codec(); let codec = video.codec();
let codec_id = codec.codec_id(); let codec_id = codec.codec_id();
if !codec_id.is_h264() { if !codec_id.is_h264() {
return Err(Error::new(format!("stream's video codec {:?} is not h264", codec_id))); bail!("stream's video codec {:?} is not h264", codec_id);
} }
h264::ExtraData::parse(codec.extradata(), codec.width() as u16, codec.height() as u16) h264::ExtraData::parse(codec.extradata(), codec.width() as u16, codec.height() as u16)
} }

View File

@ -31,7 +31,7 @@
use clock::{Clocks, TimerGuard}; use clock::{Clocks, TimerGuard};
use db::{Camera, Database, Stream}; use db::{Camera, Database, Stream};
use dir; use dir;
use error::Error; use failure::Error;
use h264; use h264;
use recording; use recording;
use std::result::Result; use std::result::Result;
@ -134,7 +134,7 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
let _t = TimerGuard::new(self.clocks, || "getting next packet"); let _t = TimerGuard::new(self.clocks, || "getting next packet");
stream.get_next()? stream.get_next()?
}; };
let pts = pkt.pts().ok_or_else(|| Error::new("packet with no pts".to_owned()))?; let pts = pkt.pts().ok_or_else(|| format_err!("packet with no pts"))?;
if !seen_key_frame && !pkt.is_key() { if !seen_key_frame && !pkt.is_key() {
continue; continue;
} else if !seen_key_frame { } else if !seen_key_frame {
@ -177,7 +177,7 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
}; };
let orig_data = match pkt.data() { let orig_data = match pkt.data() {
Some(d) => d, Some(d) => d,
None => return Err(Error::new("packet has no data".to_owned())), None => bail!("packet has no data"),
}; };
let transformed_data = if extra_data.need_transform { let transformed_data = if extra_data.need_transform {
h264::transform_sample_data(orig_data, &mut transformed)?; h264::transform_sample_data(orig_data, &mut transformed)?;
@ -202,7 +202,7 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
mod tests { mod tests {
use clock::{self, Clocks}; use clock::{self, Clocks};
use db::{self, CompositeId}; use db::{self, CompositeId};
use error::Error; use failure::Error;
use h264; use h264;
use moonfire_ffmpeg; use moonfire_ffmpeg;
use recording; use recording;
@ -301,7 +301,7 @@ mod tests {
None => { None => {
trace!("MockOpener shutting down"); trace!("MockOpener shutting down");
self.shutdown.store(true, Ordering::SeqCst); self.shutdown.store(true, Ordering::SeqCst);
Err(Error::new("done".to_owned())) bail!("done")
}, },
} }
} }

View File

@ -34,7 +34,7 @@ use core::borrow::Borrow;
use core::str::FromStr; use core::str::FromStr;
use db; use db;
use dir::SampleFileDir; use dir::SampleFileDir;
use error::Error; use failure::Error;
use fnv::FnvHashMap; use fnv::FnvHashMap;
use futures::{future, stream}; use futures::{future, stream};
use futures_cpupool; use futures_cpupool;
@ -227,7 +227,7 @@ impl ServiceInner {
if let Some(mut w) = http_serve::streaming_body(&req, &mut resp).build() { if let Some(mut w) = http_serve::streaming_body(&req, &mut resp).build() {
let db = self.db.lock(); let db = self.db.lock();
let camera = db.get_camera(uuid) let camera = db.get_camera(uuid)
.ok_or_else(|| Error::new("no such camera".to_owned()))?; .ok_or_else(|| format_err!("no such camera {}", uuid))?;
serde_json::to_writer(&mut w, &json::Camera::wrap(camera, &db, true)?)? serde_json::to_writer(&mut w, &json::Camera::wrap(camera, &db, true)?)?
}; };
Ok(resp) Ok(resp)
@ -255,9 +255,9 @@ impl ServiceInner {
{ {
let db = self.db.lock(); let db = self.db.lock();
let camera = db.get_camera(uuid) let camera = db.get_camera(uuid)
.ok_or_else(|| Error::new("no such camera".to_owned()))?; .ok_or_else(|| format_err!("no such camera {}", uuid))?;
let stream_id = camera.streams[type_.index()] let stream_id = camera.streams[type_.index()]
.ok_or_else(|| Error::new("no such stream".to_owned()))?; .ok_or_else(|| format_err!("no such stream {}/{}", uuid, type_))?;
db.list_aggregated_recordings(stream_id, r, split, |row| { db.list_aggregated_recordings(stream_id, r, split, |row| {
let end = row.ids.end - 1; // in api, ids are inclusive. let end = row.ids.end - 1; // in api, ids are inclusive.
out.recordings.push(json::Recording { out.recordings.push(json::Recording {
@ -299,8 +299,9 @@ impl ServiceInner {
let stream_id = { let stream_id = {
let db = self.db.lock(); let db = self.db.lock();
let camera = db.get_camera(uuid) let camera = db.get_camera(uuid)
.ok_or_else(|| Error::new("no such camera".to_owned()))?; .ok_or_else(|| format_err!("no such camera {}", uuid))?;
camera.streams[stream_type_.index()].ok_or_else(|| Error::new("no such stream".to_owned()))? camera.streams[stream_type_.index()]
.ok_or_else(|| format_err!("no such stream {}/{}", uuid, stream_type_))?
}; };
let mut builder = mp4::FileBuilder::new(mp4_type_); let mut builder = mp4::FileBuilder::new(mp4_type_);
if let Some(q) = req.uri().query() { if let Some(q) = req.uri().query() {
@ -309,7 +310,7 @@ impl ServiceInner {
match key { match key {
"s" => { "s" => {
let s = Segments::parse(value).map_err( let s = Segments::parse(value).map_err(
|_| Error::new(format!("invalid s parameter: {}", value)))?; |_| format_err!("invalid s parameter: {}", value))?;
debug!("stream_view_mp4: appending s={:?}", s); debug!("stream_view_mp4: appending s={:?}", s);
let mut est_segments = (s.ids.end - s.ids.start) as usize; let mut est_segments = (s.ids.end - s.ids.start) as usize;
if let Some(end) = s.end_time { if let Some(end) = s.end_time {
@ -333,11 +334,9 @@ impl ServiceInner {
// Check for missing recordings. // Check for missing recordings.
match prev { match prev {
None if recording_id == s.ids.start => {}, None if recording_id == s.ids.start => {},
None => return Err(Error::new(format!("no such recording {}/{}", None => bail!("no such recording {}/{}", stream_id, s.ids.start),
stream_id, s.ids.start))),
Some(id) if r.id.recording() != id + 1 => { Some(id) if r.id.recording() != id + 1 => {
return Err(Error::new(format!("no such recording {}/{}", bail!("no such recording {}/{}", stream_id, id + 1);
stream_id, id + 1)));
}, },
_ => {}, _ => {},
}; };
@ -363,24 +362,21 @@ impl ServiceInner {
// Check for missing recordings. // Check for missing recordings.
match prev { match prev {
Some(id) if s.ids.end != id + 1 => { Some(id) if s.ids.end != id + 1 => {
return Err(Error::new(format!("no such recording {}/{}", bail!("no such recording {}/{}", stream_id, s.ids.end - 1);
stream_id, s.ids.end - 1)));
}, },
None => { None => {
return Err(Error::new(format!("no such recording {}/{}", bail!("no such recording {}/{}", stream_id, s.ids.start);
stream_id, s.ids.start)));
}, },
_ => {}, _ => {},
}; };
if let Some(end) = s.end_time { if let Some(end) = s.end_time {
if end > cur_off { if end > cur_off {
return Err(Error::new( bail!("end time {} is beyond specified recordings", end);
format!("end time {} is beyond specified recordings", end)));
} }
} }
}, },
"ts" => builder.include_timestamp_subtitle_track(value == "true"), "ts" => builder.include_timestamp_subtitle_track(value == "true"),
_ => return Err(Error::new(format!("parameter {} not understood", key))), _ => bail!("parameter {} not understood", key),
} }
}; };
} }