Merge branch 'master' into new-schema

This commit is contained in:
Scott Lamb 2020-11-22 20:40:16 -08:00
commit 8512199d85
13 changed files with 118 additions and 64 deletions

View File

@ -23,7 +23,7 @@ matrix:
script:
- ci/script-rust.sh
- language: rust
rust: 1.40.0
rust: 1.42.0
script:
- ci/script-rust.sh
- language: node_js

49
Cargo.lock generated
View File

@ -15,6 +15,12 @@ dependencies = [
"const-random",
]
[[package]]
name = "ahash"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6789e291be47ace86a60303502173d84af8327e3627ecf334356ee0f87a164c"
[[package]]
name = "ansi_term"
version = "0.9.0"
@ -414,7 +420,7 @@ version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "341b03eec276c30c6cdc640d8bd8c08eac9605064c3f9c4838f958dac06973bb"
dependencies = [
"ahash",
"ahash 0.2.18",
"cfg-if",
"chrono",
"crossbeam-channel",
@ -824,6 +830,24 @@ dependencies = [
"memchr",
]
[[package]]
name = "hashbrown"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
dependencies = [
"ahash 0.4.6",
]
[[package]]
name = "hashlink"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d99cf782f0dc4372d26846bec3de7804ceb5df083c2d4462c0b8d2330e894fa8"
dependencies = [
"hashbrown",
]
[[package]]
name = "heck"
version = "0.3.1"
@ -1068,9 +1092,9 @@ dependencies = [
[[package]]
name = "libsqlite3-sys"
version = "0.17.3"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56d90181c2904c287e5390186be820e5ef311a3c62edebb7d6ca3d6a48ce041d"
checksum = "64d31059f22935e6c31830db5249ba2b7ecd54fd73a9909286f0a67aa55c2fbd"
dependencies = [
"cc",
"pkg-config",
@ -1101,15 +1125,6 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "lru-cache"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "maplit"
version = "1.0.2"
@ -1235,12 +1250,12 @@ dependencies = [
"failure",
"fnv",
"h264-reader",
"hashlink",
"itertools",
"lazy_static",
"libc",
"libpasta",
"log",
"lru-cache",
"moonfire-base",
"mylog",
"nix",
@ -1973,17 +1988,17 @@ dependencies = [
[[package]]
name = "rusqlite"
version = "0.22.0"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57edf4c4cea4d7e0fab069acb5da9e8e8e5403c78abc81b1f37d83af02148ea5"
checksum = "7e3d4791ab5517217f51216a84a688b53c1ebf7988736469c538d02f46ddba68"
dependencies = [
"bitflags",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"lru-cache",
"memchr",
"time 0.1.43",
"smallvec",
]
[[package]]

View File

@ -49,7 +49,7 @@ parking_lot = { version = "0.10", features = [] }
protobuf = { git = "https://github.com/stepancheg/rust-protobuf" }
reffers = "0.6.0"
ring = "0.14.6"
rusqlite = "0.22.0"
rusqlite = "0.24.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
smallvec = "1.0"

View File

@ -20,11 +20,11 @@ cstr = "0.1.7"
failure = "0.1.1"
fnv = "1.0"
h264-reader = { git = "https://github.com/dholroyd/h264-reader" }
hashlink = "0.6.0"
lazy_static = "1.0"
libc = "0.2"
libpasta = "0.1.0-rc2"
log = "0.4"
lru-cache = "0.1"
mylog = { git = "https://github.com/scottlamb/mylog" }
nix = "0.17.0"
odds = { version = "0.4.0", features = ["std-vec"] }
@ -32,7 +32,7 @@ parking_lot = { version = "0.10", features = [] }
prettydiff = "0.3.1"
protobuf = { git = "https://github.com/stepancheg/rust-protobuf" }
ring = "0.14.6"
rusqlite = "0.22.0"
rusqlite = "0.24.1"
smallvec = "1.0"
tempdir = "0.3"
time = "0.1"

View File

@ -62,9 +62,9 @@ use crate::schema;
use crate::signal;
use failure::{Error, bail, format_err};
use fnv::{FnvHashMap, FnvHashSet};
use hashlink::LinkedHashMap;
use itertools::Itertools;
use log::{error, info, trace};
use lru_cache::LruCache;
use parking_lot::{Mutex,MutexGuard};
use protobuf::prelude::MessageField;
use rusqlite::{named_params, params};
@ -87,6 +87,11 @@ use uuid::Uuid;
/// Expected schema version. See `guide/schema.md` for more information.
pub const EXPECTED_VERSION: i32 = 6;
/// Length of the video index cache.
/// The actual data structure is one bigger than this because we insert before we remove.
/// Make it one less than a power of two so that the data structure's size is efficient.
const VIDEO_INDEX_CACHE_LEN: usize = 1023;
const GET_RECORDING_PLAYBACK_SQL: &'static str = r#"
select
video_index
@ -333,9 +338,9 @@ pub struct StreamDayValue {
/// prunes days with 0 recordings.
pub recordings: i64,
/// The total duration recorded on this day. This can be 0; because frames' durations are taken
/// from the time of the next frame, a recording that ends unexpectedly after a single frame
/// will have 0 duration of that frame and thus the whole recording.
/// The total wall duration recorded on this day. This can be 0; because frames' durations are
/// taken from the time of the next frame, a recording that ends unexpectedly after a single
/// frame will have 0 duration of that frame and thus the whole recording.
pub duration: recording::Duration,
}
@ -484,8 +489,9 @@ pub struct Stream {
/// due to gaps and overlap.
pub duration: recording::Duration,
/// Mapping of calendar day (in the server's time zone) to a summary of recordings on that day.
pub days: BTreeMap<StreamDayKey, StreamDayValue>,
/// Mapping of calendar day (in the server's time zone) to a summary of committed recordings on
/// that day.
pub committed_days: BTreeMap<StreamDayKey, StreamDayValue>,
pub record: bool,
/// The `cum_recordings` currently committed to the database.
@ -634,7 +640,18 @@ impl Stream {
self.duration += r.end - r.start;
self.sample_file_bytes += sample_file_bytes as i64;
self.fs_bytes += round_up(i64::from(sample_file_bytes));
adjust_days(r, 1, &mut self.days);
adjust_days(r, 1, &mut self.committed_days);
}
/// Returns a days map including unflushed recordings.
pub fn days(&self) -> BTreeMap<StreamDayKey, StreamDayValue> {
let mut days = self.committed_days.clone();
for u in &self.uncommitted {
let l = u.lock();
adjust_days(l.start .. l.start + recording::Duration(i64::from(l.wall_duration_90k)),
1, &mut days);
}
days
}
}
@ -686,7 +703,7 @@ pub struct LockedDatabase {
streams_by_id: BTreeMap<i32, Stream>,
cameras_by_uuid: BTreeMap<Uuid, i32>, // values are ids.
video_sample_entries_by_id: BTreeMap<i32, Arc<VideoSampleEntry>>,
video_index_cache: RefCell<LruCache<i64, Box<[u8]>, fnv::FnvBuildHasher>>,
video_index_cache: RefCell<LinkedHashMap<i64, Box<[u8]>, fnv::FnvBuildHasher>>,
on_flush: Vec<Box<dyn Fn() + Send>>,
}
@ -837,7 +854,7 @@ impl StreamStateChanger {
bytes_to_add: 0,
fs_bytes_to_add: 0,
duration: recording::Duration(0),
days: BTreeMap::new(),
committed_days: BTreeMap::new(),
record: sc.record,
cum_recordings: 0,
cum_media_duration: recording::Duration(0),
@ -1105,7 +1122,7 @@ impl LockedDatabase {
dir.garbage_needs_unlink.insert(row.id);
let d = recording::Duration(i64::from(row.wall_duration_90k));
s.duration -= d;
adjust_days(row.start .. row.start + d, -1, &mut s.days);
adjust_days(row.start .. row.start + d, -1, &mut s.committed_days);
}
// Process add_recordings.
@ -1405,20 +1422,30 @@ impl LockedDatabase {
// Committed path.
let mut cache = self.video_index_cache.borrow_mut();
if let Some(video_index) = cache.get_mut(&id.0) {
trace!("cache hit for recording {}", id);
return f(&RecordingPlayback { video_index });
use hashlink::linked_hash_map::RawEntryMut;
match cache.raw_entry_mut().from_key(&id.0) {
RawEntryMut::Occupied(mut occupied) => {
trace!("cache hit for recording {}", id);
occupied.to_back();
let video_index = occupied.get();
return f(&RecordingPlayback { video_index });
},
RawEntryMut::Vacant(vacant) => {
trace!("cache miss for recording {}", id);
let mut stmt = self.conn.prepare_cached(GET_RECORDING_PLAYBACK_SQL)?;
let mut rows = stmt.query_named(named_params!{":composite_id": id.0})?;
if let Some(row) = rows.next()? {
let video_index: VideoIndex = row.get(0)?;
let result = f(&RecordingPlayback { video_index: &video_index.0[..] });
vacant.insert(id.0, video_index.0);
if cache.len() > VIDEO_INDEX_CACHE_LEN {
cache.pop_front();
}
return result;
}
Err(format_err!("no such recording {}", id))
},
}
trace!("cache miss for recording {}", id);
let mut stmt = self.conn.prepare_cached(GET_RECORDING_PLAYBACK_SQL)?;
let mut rows = stmt.query_named(named_params!{":composite_id": id.0})?;
if let Some(row) = rows.next()? {
let video_index: VideoIndex = row.get(0)?;
let result = f(&RecordingPlayback { video_index: &video_index.0[..] });
cache.insert(id.0, video_index.0);
return result;
}
Err(format_err!("no such recording {}", id))
}
/// Queues for deletion the oldest recordings that aren't already queued.
@ -1606,7 +1633,7 @@ impl LockedDatabase {
bytes_to_add: 0,
fs_bytes_to_add: 0,
duration: recording::Duration(0),
days: BTreeMap::new(),
committed_days: BTreeMap::new(),
cum_recordings: row.get(7)?,
cum_media_duration: recording::Duration(row.get(8)?),
cum_runs: row.get(9)?,
@ -2085,7 +2112,8 @@ impl<C: Clocks + Clone> Database<C> {
cameras_by_uuid: BTreeMap::new(),
streams_by_id: BTreeMap::new(),
video_sample_entries_by_id: BTreeMap::new(),
video_index_cache: RefCell::new(LruCache::with_hasher(1024, Default::default())),
video_index_cache: RefCell::new(LinkedHashMap::with_capacity_and_hasher(
VIDEO_INDEX_CACHE_LEN + 1, Default::default())),
on_flush: Vec::new(),
})),
clocks,

View File

@ -93,8 +93,11 @@ The `application/json` response will have a dict as follows:
filesystem block allocated to each file.
* `days`: (only included if request pararameter `days` is true)
dictionary representing calendar days (in the server's time zone)
with non-zero total duration of recordings for that day. The keys
are of the form `YYYY-mm-dd`; the values are objects with the
with non-zero total duration of recordings for that day. Currently
this includes uncommitted and growing recordings. This is likely
to change in a future release for
[#40](https://github.com/scottlamb/moonfire-nvr/issues/40). The
keys are of the form `YYYY-mm-dd`; the values are objects with the
following attributes:
* `totalDuration90k` is the total duration recorded during that
day. If a recording spans a day boundary, some portion of it

View File

@ -48,7 +48,7 @@ $ sudo apt-get install \
tzdata
```
Next, you need Rust 1.40+ and Cargo. The easiest way to install them is by
Next, you need Rust 1.42+ and Cargo. The easiest way to install them is by
following the instructions at [rustup.rs](https://www.rustup.rs/).
Finally, building the UI requires [yarn](https://yarnpkg.com/en/).

View File

@ -85,13 +85,13 @@ RequiresMountsFor=/media/nvr
## Completing configuration through the UI
Once setup is complete, it is time to add sample file directory and camera
configurations to the database.
You can configure the system's database through a text-based user interface:
Once your system is set up, it's time to initialize an empty database,
and add the cameras and sample directories to moonfire. You can do this
by using the `moonfire-nvr` binary's text-based configuration tool.
```
$ sudo -u moonfire-nvr moonfire-nvr config 2>debug-log
$ sudo -u moonfire-nvr moonfire-nvr init # Initialize empty db
$ sudo -u moonfire-nvr moonfire-nvr config 2>debug-log # Configure cameras and storage
```
In the user interface,

View File

@ -40,7 +40,7 @@ fi
NODE_MIN_VERSION="10"
YARN_MIN_VERSION="1.0"
CARGO_MIN_VERSION="0.2"
RUSTC_MIN_VERSION="1.40"
RUSTC_MIN_VERSION="1.42"
normalizeDirPath()
{

View File

@ -53,11 +53,17 @@ enum OpenMode {
/// Locks the directory without opening the database.
/// The returned `dir::Fd` holds the lock and should be kept open as long as the `Connection` is.
fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
let dir = dir::Fd::open(db_dir, mode == OpenMode::Create)?;
let dir = dir::Fd::open(db_dir, mode == OpenMode::Create)
.map_err(|e| e.context(if e == nix::Error::Sys(nix::errno::Errno::ENOENT) {
format!("db dir {} not found; try running moonfire-nvr init",
db_dir.display())
} else {
format!("unable to open db dir {}: {}", db_dir.display(), &e)
}))?;
let ro = mode == OpenMode::ReadOnly;
dir.lock(if ro { FlockArg::LockSharedNonblock } else { FlockArg::LockExclusiveNonblock })
.map_err(|e| e.context(format!("db dir {:?} already in use; can't get {} lock",
db_dir, if ro { "shared" } else { "exclusive" })))?;
.map_err(|e| e.context(format!("db dir {} already in use; can't get {} lock",
db_dir.display(), if ro { "shared" } else { "exclusive" })))?;
Ok(dir)
}

View File

@ -54,7 +54,7 @@ pub struct Args {
db_dir: PathBuf,
/// Directory holding user interface files (.html, .js, etc).
#[structopt(default_value = "/usr/local/lib/moonfire-nvr/ui", value_name="path",
#[structopt(long, default_value = "/usr/local/lib/moonfire-nvr/ui", value_name="path",
parse(from_os_str))]
ui_dir: std::path::PathBuf,

View File

@ -110,7 +110,7 @@ pub struct Stream<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(serialize_with = "Stream::serialize_days")]
pub days: Option<&'a BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
pub days: Option<BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub config: Option<StreamConfig<'a>>,
@ -219,7 +219,7 @@ impl<'a> Camera<'a> {
})
}
fn serialize_streams<S>(streams: &[Option<Stream<'a>>; 2], serializer: S) -> Result<S::Ok, S::Error>
fn serialize_streams<S>(streams: &[Option<Stream>; 2], serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
let mut map = serializer.serialize_map(Some(streams.len()))?;
for (i, s) in streams.iter().enumerate() {
@ -247,7 +247,7 @@ impl<'a> Stream<'a> {
total_duration_90k: s.duration.0,
total_sample_file_bytes: s.sample_file_bytes,
fs_bytes: s.fs_bytes,
days: if include_days { Some(&s.days) } else { None },
days: if include_days { Some(s.days()) } else { None },
config: match include_config {
false => None,
true => Some(StreamConfig {
@ -257,10 +257,10 @@ impl<'a> Stream<'a> {
}))
}
fn serialize_days<S>(days: &Option<&BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
fn serialize_days<S>(days: &Option<BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
let days = match *days {
let days = match days.as_ref() {
Some(d) => d,
None => return serializer.serialize_none(),
};

View File

@ -1,6 +1,8 @@
<!DOCTYPE html>
<head>
<title>Moonfire NVR</title>
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>