mirror of
https://github.com/scottlamb/moonfire-nvr.git
synced 2025-12-08 08:42:41 -05:00
tests and fixes for Writer and Syncer
* separate these out into a new file, writer.rs, as dir.rs was getting unwieldy. * extract traits for the parts of SampleFileDir and std::fs::File they needed; set up mock implementations. * move clock.rs to a new base crate to be accessible from the db crate. * add tests that exercise all the retry paths. * bugfix: account for the new recording's bytes when calculating how much to delete. * bugfix: when retrying an unlink failure in collect_garbage, we shouldn't warn about all the recordings no longer existing. Do this by retrying each step rather than the whole procedure again. * avoid double-panic scenarios, which I hit while tweaking the mocks. These are quite annoying to debug as Rust doesn't print information about either panic. I ended up using lldb to get a backtrace. Better to be cautious about what we're doing when already panicking. * give more context on raw::insert_recording errors, which I hit as well while tweaking the new tests.
This commit is contained in:
@@ -33,7 +33,7 @@ extern crate cursive;
|
||||
use self::cursive::Cursive;
|
||||
use self::cursive::traits::{Boxable, Identifiable, Finder};
|
||||
use self::cursive::views;
|
||||
use db::{self, dir};
|
||||
use db::{self, writer};
|
||||
use failure::Error;
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
@@ -188,7 +188,7 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delet
|
||||
None => continue,
|
||||
};
|
||||
let l = zero_limits.entry(dir_id).or_insert_with(|| Vec::with_capacity(2));
|
||||
l.push(dir::NewLimit {
|
||||
l.push(writer::NewLimit {
|
||||
stream_id,
|
||||
limit: 0,
|
||||
});
|
||||
@@ -209,12 +209,12 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delet
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_retention(db: &Arc<db::Database>, zero_limits: BTreeMap<i32, Vec<dir::NewLimit>>)
|
||||
fn lower_retention(db: &Arc<db::Database>, zero_limits: BTreeMap<i32, Vec<writer::NewLimit>>)
|
||||
-> Result<(), Error> {
|
||||
let dirs_to_open: Vec<_> = zero_limits.keys().map(|id| *id).collect();
|
||||
db.lock().open_sample_file_dirs(&dirs_to_open[..])?;
|
||||
for (&dir_id, l) in &zero_limits {
|
||||
dir::lower_retention(db.clone(), dir_id, &l)?;
|
||||
writer::lower_retention(db.clone(), dir_id, &l)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ extern crate cursive;
|
||||
use self::cursive::Cursive;
|
||||
use self::cursive::traits::{Boxable, Identifiable};
|
||||
use self::cursive::views;
|
||||
use db::{self, dir};
|
||||
use db::{self, writer};
|
||||
use failure::Error;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -142,7 +142,7 @@ fn actually_delete(model: &RefCell<Model>, siv: &mut Cursive) {
|
||||
let model = &*model.borrow();
|
||||
let new_limits: Vec<_> =
|
||||
model.streams.iter()
|
||||
.map(|(&id, s)| dir::NewLimit {stream_id: id, limit: s.retain.unwrap()})
|
||||
.map(|(&id, s)| writer::NewLimit {stream_id: id, limit: s.retain.unwrap()})
|
||||
.collect();
|
||||
siv.pop_layer(); // deletion confirmation
|
||||
siv.pop_layer(); // retention dialog
|
||||
@@ -150,7 +150,7 @@ fn actually_delete(model: &RefCell<Model>, siv: &mut Cursive) {
|
||||
let mut l = model.db.lock();
|
||||
l.open_sample_file_dirs(&[model.dir_id]).unwrap(); // TODO: don't unwrap.
|
||||
}
|
||||
if let Err(e) = dir::lower_retention(model.db.clone(), model.dir_id, &new_limits[..]) {
|
||||
if let Err(e) = writer::lower_retention(model.db.clone(), model.dir_id, &new_limits[..]) {
|
||||
siv.add_layer(views::Dialog::text(format!("Unable to delete excess video: {}", e))
|
||||
.title("Error")
|
||||
.dismiss_button("Abort"));
|
||||
|
||||
Reference in New Issue
Block a user