schema version 2: support sub streams

This allows each camera to have a main and a sub stream. Previously there was
a field in the schema for the sub stream's url, but it didn't do anything. Now
you can configure individual retention for main and sub streams. They show up
grouped in the UI.

No support for upgrading from schema version 1 yet.
This commit is contained in:
Scott Lamb 2018-01-23 11:05:07 -08:00
parent 0d69f4f49b
commit dc402bdc01
17 changed files with 936 additions and 599 deletions

View File

@ -44,29 +44,32 @@ The `application/json` response will have a dict as follows:
* `uuid`: in text format
* `shortName`: a short name (typically one or two words)
* `description`: a longer description (typically a phrase or paragraph)
* `streams`: a dict of stream type ("main" or "sub") to a dictionary
describing the stream:
* `retainBytes`: the configured total number of bytes of completed
recordings to retain.
* `minStartTime90k`: the start time of the earliest recording for this
* `minStartTime90k`: the start time of the earliest recording for
this camera, in 90kHz units since 1970-01-01 00:00:00 UTC.
* `maxEndTime90k`: the end time of the latest recording for this
camera, in 90kHz units since 1970-01-01 00:00:00 UTC.
* `maxEndTime90k`: the end time of the latest recording for this camera,
in 90kHz units since 1970-01-01 00:00:00 UTC.
* `totalDuration90k`: the total duration recorded, in 90 kHz units.
This is no greater than `maxEndTime90k - maxStartTime90k`; it will be
lesser if there are gaps in the recorded data.
* `totalSampleFileBytes`: the total number of bytes of sample data (the
`mdat` portion of a `.mp4` file).
* `days`: object representing calendar days (in the server's time zone)
with non-zero total duration of recordings for that day. The keys are
of the form `YYYY-mm-dd`; the values are objects with the following
attributes:
* `totalDuration90k` is the total duration recorded during that day.
If a recording spans a day boundary, some portion of it is accounted to
each day.
* `startTime90k` is the start of that calendar day in the server's time
zone.
* `endTime90k` is the end of that calendar day in the server's time zone.
It is usually 24 hours after the start time. It might be 23 hours or 25
hours during spring forward or fall back, respectively.
This is no greater than `maxEndTime90k - maxStartTime90k`; it will
be lesser if there are gaps in the recorded data.
* `totalSampleFileBytes`: the total number of bytes of sample data
(the `mdat` portion of a `.mp4` file).
* `days`: object representing calendar days (in the server's time
zone) with non-zero total duration of recordings for that day. The
keys are of the form `YYYY-mm-dd`; the values are objects with the
following attributes:
* `totalDuration90k` is the total duration recorded during that
day. If a recording spans a day boundary, some portion of it
is accounted to each day.
* `startTime90k` is the start of that calendar day in the
server's time zone.
* `endTime90k` is the end of that calendar day in the server's
time zone. It is usually 24 hours after the start time. It
might be 23 hours or 25 hours during spring forward or fall
back, respectively.
Example response:
@ -78,6 +81,8 @@ Example response:
"uuid": "fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe",
"shortName": "driveway",
"description": "Hikvision DS-2CD2032 overlooking the driveway from east",
"streams": {
"main": {
"retainBytes": 536870912000,
"minStartTime90k": 130888729442361,
"maxEndTime90k": 130985466591817,
@ -94,7 +99,9 @@ Example response:
"startTime90k": 131595516000000,
"totalDuration90k": 20946022
}
},
}
}
}
},
...
],
@ -109,6 +116,9 @@ Example response:
```json
{
"description": "",
"streams": {
"main": {
"days": {
"2016-05-01": {
"endTime90k": 131595516000000,
@ -121,17 +131,18 @@ Example response:
"totalDuration90k": 20946022
}
},
"description": "",
"maxEndTime90k": 131598273666690,
"minStartTime90k": 131590386129355,
"retainBytes": 104857600,
"shortName": "driveway",
"totalDuration90k": 73563631,
"totalSampleFileBytes": 98901406
}
},
"shortName": "driveway"
}
```
### `/api/cameras/<uuid>/recordings`
### `/api/cameras/<uuid>/<stream>/recordings`
A GET returns information about recordings, in descending order.
@ -175,7 +186,7 @@ Each recording object has the following properties:
Example request URI (with added whitespace between parameters):
```
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/recordings
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/main/recordings
?startTime90k=130888729442361
&endTime90k=130985466591817
```
@ -204,7 +215,7 @@ Example response:
}
```
### `/api/cameras/<uuid>/view.mp4`
### `/api/cameras/<uuid>/<stream>/view.mp4`
A GET returns a `.mp4` file, with an etag and support for range requests. The
MIME type will be `video/mp4`, with a `codecs` parameter as specified in [RFC
@ -230,27 +241,27 @@ Expected query parameters:
Example request URI to retrieve all of recording id 1 from the given camera:
```
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/view.mp4?s=1
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/main/view.mp4?s=1
```
Example request URI to retrieve all of recording ids 15 from the given camera,
with timestamp subtitles:
```
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/view.mp4?s=1-5&ts=true
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/main/view.mp4?s=1-5&ts=true
```
Example request URI to retrieve recording id 1, skipping its first 26
90,000ths of a second:
```
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/view.mp4?s=1.26
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/main/view.mp4?s=1.26
```
TODO: error behavior on missing segment. It should be a 404, likely with an
`application/json` body describing what portion if any (still) exists.
### `/api/cameras/<uuid>/view.m4s`
### `/api/cameras/<uuid>/<stream>/view.m4s`
A GET returns a `.mp4` suitable for use as a [HTML5 Media Source Extensions
media segment][media-segment]. The MIME type will be `video/mp4`, with a

View File

@ -4,7 +4,7 @@
"url": "https://github.com/scottlamb/moonfire-nvr/issues"
},
"scripts": {
"build": "webpack && ln ui-src/index.html ui-dist/"
"build": "webpack && ln -f ui-src/index.html ui-dist/"
},
"dependencies": {
"jquery": "^3.2.1",

View File

@ -57,8 +57,7 @@ fn get_change(siv: &mut Cursive) -> db::CameraChange {
host: h,
username: u,
password: p,
main_rtsp_path: m,
sub_rtsp_path: s,
rtsp_paths: [m, s],
}
}
@ -145,8 +144,20 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, dir: &Arc<dir::Sa
let typed = siv.find_id::<views::EditView>("confirm").unwrap().get_content();
if decode_size(typed.as_str()).ok() == Some(to_delete) {
siv.pop_layer(); // deletion confirmation dialog
if let Err(e) = dir::lower_retention(dir.clone(),
&[dir::NewLimit{camera_id: id, limit: 0}]) {
let mut zero_limits = Vec::new();
{
let l = db.lock();
for (&stream_id, stream) in l.streams_by_id() {
if stream.camera_id == id {
zero_limits.push(dir::NewLimit {
stream_id,
limit: 0,
});
}
}
}
if let Err(e) = dir::lower_retention(dir.clone(), &zero_limits) {
siv.add_layer(views::Dialog::text(format!("Unable to delete recordings: {}", e))
.title("Error")
.dismiss_button("Abort"));
@ -162,7 +173,6 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, dir: &Arc<dir::Sa
fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, dir: &Arc<dir::SampleFileDir>,
id: i32) {
info!("actually_delete call");
siv.pop_layer(); // get rid of the add/edit camera dialog.
let result = {
let mut l = db.lock();
@ -198,14 +208,14 @@ fn edit_camera_dialog(db: &Arc<db::Database>, dir: &Arc<dir::SampleFileDir>, siv
.child(views::DummyView)
.child(views::Button::new("Test", |siv| {
let c = get_change(siv);
press_test(siv, &c, "main", &c.main_rtsp_path)
press_test(siv, &c, "main", &c.rtsp_paths[0])
})))
.child("sub_rtsp_path", views::LinearLayout::horizontal()
.child(views::EditView::new().with_id("sub_rtsp_path").full_width())
.child(views::DummyView)
.child(views::Button::new("Test", |siv| {
let c = get_change(siv);
press_test(siv, &c, "sub", &c.sub_rtsp_path)
press_test(siv, &c, "sub", &c.rtsp_paths[1])
})))
.min_height(8);
let layout = views::LinearLayout::vertical()
@ -214,22 +224,41 @@ fn edit_camera_dialog(db: &Arc<db::Database>, dir: &Arc<dir::SampleFileDir>, siv
.child(views::TextArea::new().with_id("description").min_height(3))
.full_width();
let mut dialog = views::Dialog::around(layout);
let dialog = if let Some(id) = *item {
let dialog = if let Some(camera_id) = *item {
let l = db.lock();
let camera = l.cameras_by_id().get(&id).expect("missing camera");
let camera = l.cameras_by_id().get(&camera_id).expect("missing camera");
dialog.find_id("uuid", |v: &mut views::TextView| v.set_content(camera.uuid.to_string()))
.expect("missing TextView");
let bytes = camera.sample_file_bytes;
let mut main_rtsp_path = "";
let mut sub_rtsp_path = "";
let mut bytes = 0;
for (_, s) in l.streams_by_id() {
if s.camera_id != camera_id { continue; }
bytes += s.sample_file_bytes;
match s.type_ {
db::StreamType::MAIN => main_rtsp_path = &s.rtsp_path,
db::StreamType::SUB => sub_rtsp_path = &s.rtsp_path,
};
}
let name = camera.short_name.clone();
for &(view_id, content) in &[("short_name", &camera.short_name),
("host", &camera.host),
("username", &camera.username),
("password", &camera.password),
("main_rtsp_path", &camera.main_rtsp_path),
("sub_rtsp_path", &camera.sub_rtsp_path)] {
for &(view_id, content) in &[("short_name", &*camera.short_name),
("host", &*camera.host),
("username", &*camera.username),
("password", &*camera.password),
("main_rtsp_path", main_rtsp_path),
("sub_rtsp_path", sub_rtsp_path)] {
dialog.find_id(view_id, |v: &mut views::EditView| v.set_content(content.to_string()))
.expect("missing EditView");
}
for s in l.streams_by_id().values() {
if s.camera_id != camera_id { continue };
let id = match s.type_ {
db::StreamType::MAIN => "main_rtsp_path",
db::StreamType::SUB => "sub_rtsp_path",
};
dialog.find_id(id, |v: &mut views::EditView| v.set_content(s.rtsp_path.to_string()))
.expect("missing EditView");
}
dialog.find_id("description",
|v: &mut views::TextArea| v.set_content(camera.description.to_string()))
.expect("missing TextArea");
@ -237,12 +266,12 @@ fn edit_camera_dialog(db: &Arc<db::Database>, dir: &Arc<dir::SampleFileDir>, siv
.button("Edit", {
let db = db.clone();
let dir = dir.clone();
move |s| press_edit(s, &db, &dir, Some(id))
move |s| press_edit(s, &db, &dir, Some(camera_id))
})
.button("Delete", {
let db = db.clone();
let dir = dir.clone();
move |s| press_delete(s, &db, &dir, id, name.clone(), bytes)
move |s| press_delete(s, &db, &dir, camera_id, name.clone(), bytes)
})
} else {
dialog.title("Add camera")

View File

@ -144,7 +144,8 @@ pub fn run() -> Result<(), Error> {
move |siv, item| item(&db, &dir, siv)
})
.item("Edit cameras".to_string(), cameras::add_dialog)
.item("Edit retention".to_string(), retention::add_dialog))
.item("Edit retention".to_string(), retention::add_dialog)
)
.button("Quit", |siv| siv.quit())
.title("Main menu"));

View File

@ -42,7 +42,7 @@ use std::rc::Rc;
use std::sync::Arc;
use super::{decode_size, encode_size};
struct Camera {
struct Stream {
label: String,
used: i64,
retain: Option<i64>, // None if unparseable
@ -55,15 +55,15 @@ struct Model {
total_used: i64,
total_retain: i64,
errors: isize,
cameras: BTreeMap<i32, Camera>,
streams: BTreeMap<i32, Stream>,
}
/// Updates the limits in the database. Doesn't delete excess data (if any).
fn update_limits_inner(model: &Model) -> Result<(), Error> {
let mut db = model.db.lock();
let mut tx = db.tx()?;
for (&id, camera) in &model.cameras {
tx.update_retention(id, camera.retain.unwrap())?;
for (&id, stream) in &model.streams {
tx.update_retention(id, stream.retain.unwrap())?;
}
tx.commit()
}
@ -77,12 +77,12 @@ fn update_limits(model: &Model, siv: &mut Cursive) {
}
fn edit_limit(model: &RefCell<Model>, siv: &mut Cursive, id: i32, content: &str) {
info!("on_edit called for id {}", id);
debug!("on_edit called for id {}", id);
let mut model = model.borrow_mut();
let model: &mut Model = &mut *model;
let camera = model.cameras.get_mut(&id).unwrap();
let stream = model.streams.get_mut(&id).unwrap();
let new_value = decode_size(content).ok();
let delta = new_value.unwrap_or(0) - camera.retain.unwrap_or(0);
let delta = new_value.unwrap_or(0) - stream.retain.unwrap_or(0);
let old_errors = model.errors;
if delta != 0 {
let prev_over = model.total_retain > model.fs_capacity;
@ -91,7 +91,6 @@ fn edit_limit(model: &RefCell<Model>, siv: &mut Cursive, id: i32, content: &str)
.unwrap()
.set_content(encode_size(model.total_retain));
let now_over = model.total_retain > model.fs_capacity;
info!("now_over: {}", now_over);
if now_over != prev_over {
model.errors += if now_over { 1 } else { -1 };
siv.find_id::<views::TextView>("total_ok")
@ -99,13 +98,13 @@ fn edit_limit(model: &RefCell<Model>, siv: &mut Cursive, id: i32, content: &str)
.set_content(if now_over { "*" } else { " " });
}
}
if new_value.is_none() != camera.retain.is_none() {
if new_value.is_none() != stream.retain.is_none() {
model.errors += if new_value.is_none() { 1 } else { -1 };
siv.find_id::<views::TextView>(&format!("{}_ok", id))
.unwrap()
.set_content(if new_value.is_none() { "*" } else { " " });
}
camera.retain = new_value;
stream.retain = new_value;
info!("model.errors = {}", model.errors);
if (model.errors == 0) != (old_errors == 0) {
info!("toggling change state: errors={}", model.errors);
@ -119,7 +118,7 @@ fn confirm_deletion(model: &RefCell<Model>, siv: &mut Cursive, to_delete: i64) {
let typed = siv.find_id::<views::EditView>("confirm")
.unwrap()
.get_content();
info!("confirm, typed: {} vs expected: {}", typed.as_str(), to_delete);
debug!("confirm, typed: {} vs expected: {}", typed.as_str(), to_delete);
if decode_size(typed.as_str()).ok() == Some(to_delete) {
actually_delete(model, siv);
} else {
@ -132,8 +131,8 @@ fn confirm_deletion(model: &RefCell<Model>, siv: &mut Cursive, to_delete: i64) {
fn actually_delete(model: &RefCell<Model>, siv: &mut Cursive) {
let model = &*model.borrow();
let new_limits: Vec<_> =
model.cameras.iter()
.map(|(&id, c)| dir::NewLimit{camera_id: id, limit: c.retain.unwrap()})
model.streams.iter()
.map(|(&id, s)| dir::NewLimit {stream_id: id, limit: s.retain.unwrap()})
.collect();
siv.pop_layer(); // deletion confirmation
siv.pop_layer(); // retention dialog
@ -150,11 +149,11 @@ fn press_change(model: &Rc<RefCell<Model>>, siv: &mut Cursive) {
if model.borrow().errors > 0 {
return;
}
let to_delete = model.borrow().cameras.values().map(
|c| ::std::cmp::max(c.used - c.retain.unwrap(), 0)).sum();
info!("change press, to_delete={}", to_delete);
let to_delete = model.borrow().streams.values().map(
|s| ::std::cmp::max(s.used - s.retain.unwrap(), 0)).sum();
debug!("change press, to_delete={}", to_delete);
if to_delete > 0 {
let prompt = format!("Some cameras' usage exceeds new limit. Please confirm the amount \
let prompt = format!("Some streams' usage exceeds new limit. Please confirm the amount \
of data to delete by typing it back:\n\n{}", encode_size(to_delete));
let dialog = views::Dialog::around(
views::LinearLayout::vertical()
@ -179,19 +178,20 @@ fn press_change(model: &Rc<RefCell<Model>>, siv: &mut Cursive) {
pub fn add_dialog(db: &Arc<db::Database>, dir: &Arc<dir::SampleFileDir>, siv: &mut Cursive) {
let model = {
let mut cameras = BTreeMap::new();
let mut streams = BTreeMap::new();
let mut total_used = 0;
let mut total_retain = 0;
{
let db = db.lock();
for (&id, camera) in db.cameras_by_id() {
cameras.insert(id, Camera{
label: format!("{}: {}", id, camera.short_name),
used: camera.sample_file_bytes,
retain: Some(camera.retain_bytes),
for (&id, s) in db.streams_by_id() {
let c = db.cameras_by_id().get(&s.camera_id).expect("stream without camera");
streams.insert(id, Stream {
label: format!("{}: {}: {}", id, c.short_name, s.type_.as_str()),
used: s.sample_file_bytes,
retain: Some(s.retain_bytes),
});
total_used += camera.sample_file_bytes;
total_retain += camera.retain_bytes;
total_used += s.sample_file_bytes;
total_retain += s.retain_bytes;
}
}
let stat = dir.statfs().unwrap();
@ -199,27 +199,27 @@ pub fn add_dialog(db: &Arc<db::Database>, dir: &Arc<dir::SampleFileDir>, siv: &m
Rc::new(RefCell::new(Model{
dir: dir.clone(),
db: db.clone(),
fs_capacity: fs_capacity,
total_used: total_used,
total_retain: total_retain,
fs_capacity,
total_used,
total_retain,
errors: (total_retain > fs_capacity) as isize,
cameras: cameras,
streams,
}))
};
let mut list = views::ListView::new();
list.add_child(
"camera",
"stream",
views::LinearLayout::horizontal()
.child(views::TextView::new("usage").fixed_width(25))
.child(views::TextView::new("limit").fixed_width(25)));
for (&id, camera) in &model.borrow().cameras {
for (&id, stream) in &model.borrow().streams {
list.add_child(
&camera.label,
&stream.label,
views::LinearLayout::horizontal()
.child(views::TextView::new(encode_size(camera.used)).fixed_width(25))
.child(views::TextView::new(encode_size(stream.used)).fixed_width(25))
.child(views::EditView::new()
.content(encode_size(camera.retain.unwrap()))
.content(encode_size(stream.retain.unwrap()))
.on_edit({
let model = model.clone();
move |siv, content, _pos| edit_limit(&model, siv, id, content)

View File

@ -98,18 +98,21 @@ pub fn run() -> Result<(), Error> {
&args.flag_db_dir,
if args.flag_read_only { super::OpenMode::ReadOnly } else { super::OpenMode::ReadWrite })?;
let db = Arc::new(db::Database::new(conn).unwrap());
// TODO: multiple sample file dirs.
let dir = dir::SampleFileDir::new(&args.flag_sample_file_dir, db.clone()).unwrap();
info!("Database is loaded.");
let s = web::Service::new(db.clone(), dir.clone(), Some(&args.flag_ui_dir), resolve_zone())?;
// Start a streamer for each camera.
// Start a streamer for each stream.
// TODO: enabled only.
let shutdown_streamers = Arc::new(AtomicBool::new(false));
let mut streamers = Vec::new();
let syncer = if !args.flag_read_only {
let (syncer_channel, syncer_join) = dir::start_syncer(dir.clone()).unwrap();
let l = db.lock();
let cameras = l.cameras_by_id().len();
let streams = l.streams_by_id().len();
let env = streamer::Environment{
db: &db,
dir: &dir,
@ -117,12 +120,13 @@ pub fn run() -> Result<(), Error> {
opener: &*stream::FFMPEG,
shutdown: &shutdown_streamers,
};
for (i, (id, camera)) in l.cameras_by_id().iter().enumerate() {
let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / cameras as i64;
for (i, (id, stream)) in l.streams_by_id().iter().enumerate() {
let camera = l.cameras_by_id().get(&stream.camera_id).unwrap();
let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64;
let mut streamer = streamer::Streamer::new(&env, syncer_channel.clone(), *id, camera,
rotate_offset_sec,
stream, rotate_offset_sec,
streamer::ROTATE_INTERVAL_SEC);
let name = format!("stream-{}", streamer.short_name());
let name = format!("s-{}", streamer.short_name());
streamers.push(thread::Builder::new().name(name).spawn(move|| {
streamer.run();
}).expect("can't create thread"));

642
src/db.rs

File diff suppressed because it is too large Load Diff

View File

@ -254,7 +254,7 @@ pub fn start_syncer(dir: Arc<SampleFileDir>)
}
pub struct NewLimit {
pub camera_id: i32,
pub stream_id: i32,
pub limit: i64,
}
@ -272,30 +272,30 @@ pub fn lower_retention(dir: Arc<SampleFileDir>, limits: &[NewLimit]) -> Result<(
let mut to_delete = Vec::new();
for l in limits {
let before = to_delete.len();
let camera = db.cameras_by_id().get(&l.camera_id)
.ok_or_else(|| Error::new(format!("no such camera {}", l.camera_id)))?;
if l.limit >= camera.sample_file_bytes { continue }
get_rows_to_delete(db, l.camera_id, camera, camera.retain_bytes - l.limit,
let stream = db.streams_by_id().get(&l.stream_id)
.ok_or_else(|| Error::new(format!("no such stream {}", l.stream_id)))?;
if l.limit >= stream.sample_file_bytes { continue }
get_rows_to_delete(db, l.stream_id, stream, stream.retain_bytes - l.limit,
&mut to_delete)?;
info!("camera {}, {}->{}, deleting {} rows", camera.short_name,
camera.sample_file_bytes, l.limit, to_delete.len() - before);
info!("stream {}, {}->{}, deleting {} rows", stream.id,
stream.sample_file_bytes, l.limit, to_delete.len() - before);
}
Ok(to_delete)
})
}
/// Gets rows to delete to bring a camera's disk usage within bounds.
fn get_rows_to_delete(db: &db::LockedDatabase, camera_id: i32,
camera: &db::Camera, extra_bytes_needed: i64,
/// Gets rows to delete to bring a stream's disk usage within bounds.
fn get_rows_to_delete(db: &db::LockedDatabase, stream_id: i32,
stream: &db::Stream, extra_bytes_needed: i64,
to_delete: &mut Vec<db::ListOldestSampleFilesRow>) -> Result<(), Error> {
let bytes_needed = camera.sample_file_bytes + extra_bytes_needed - camera.retain_bytes;
let bytes_needed = stream.sample_file_bytes + extra_bytes_needed - stream.retain_bytes;
let mut bytes_to_delete = 0;
if bytes_needed <= 0 {
debug!("{}: have remaining quota of {}", camera.short_name, -bytes_needed);
debug!("{}: have remaining quota of {}", stream.id, -bytes_needed);
return Ok(());
}
let mut n = 0;
db.list_oldest_sample_files(camera_id, |row| {
db.list_oldest_sample_files(stream_id, |row| {
bytes_to_delete += row.sample_file_bytes as i64;
to_delete.push(row);
n += 1;
@ -303,10 +303,10 @@ fn get_rows_to_delete(db: &db::LockedDatabase, camera_id: i32,
})?;
if bytes_needed > bytes_to_delete {
return Err(Error::new(format!("{}: couldn't find enough files to delete: {} left.",
camera.short_name, bytes_needed)));
stream.id, bytes_needed)));
}
info!("{}: deleting {} bytes in {} recordings ({} bytes needed)",
camera.short_name, bytes_to_delete, n, bytes_needed);
stream.id, bytes_to_delete, n, bytes_needed);
Ok(())
}
@ -343,12 +343,12 @@ impl Syncer {
}
}
/// Rotates files for all cameras and deletes stale reserved uuids from previous runs.
/// Rotates files for all streams and deletes stale reserved uuids from previous runs.
fn initial_rotation(&mut self) -> Result<(), Error> {
self.do_rotation(|db| {
let mut to_delete = Vec::new();
for (camera_id, camera) in db.cameras_by_id() {
get_rows_to_delete(&db, *camera_id, camera, 0, &mut to_delete)?;
for (stream_id, stream) in db.streams_by_id() {
get_rows_to_delete(&db, *stream_id, stream, 0, &mut to_delete)?;
}
Ok(to_delete)
})
@ -389,7 +389,7 @@ impl Syncer {
fn save(&mut self, recording: db::RecordingToInsert, f: fs::File) {
if let Err(e) = self.save_helper(&recording, f) {
error!("camera {}: will discard recording {} due to error while saving: {}",
recording.camera_id, recording.sample_file_uuid, e);
recording.stream_id, recording.sample_file_uuid, e);
self.to_unlink.push(recording.sample_file_uuid);
return;
}
@ -416,10 +416,10 @@ impl Syncer {
let mut db = self.dir.db.lock();
let mut new_next_uuid = l.next_uuid;
{
let camera =
db.cameras_by_id().get(&recording.camera_id)
.ok_or_else(|| Error::new(format!("no such camera {}", recording.camera_id)))?;
get_rows_to_delete(&db, recording.camera_id, camera,
let stream =
db.streams_by_id().get(&recording.stream_id)
.ok_or_else(|| Error::new(format!("no such stream {}", recording.stream_id)))?;
get_rows_to_delete(&db, recording.stream_id, stream,
recording.sample_file_bytes as i64, &mut to_delete)?;
}
let mut tx = db.tx()?;
@ -490,7 +490,7 @@ struct InnerWriter<'a> {
adjuster: ClockAdjuster,
camera_id: i32,
stream_id: i32,
video_sample_entry_id: i32,
run_offset: i32,
@ -567,20 +567,20 @@ pub struct PreviousWriter {
impl<'a> Writer<'a> {
/// Opens the writer; for use by `SampleFileDir` (which should supply `f`).
fn open(f: fs::File, uuid: Uuid, prev: Option<PreviousWriter>, camera_id: i32,
fn open(f: fs::File, uuid: Uuid, prev: Option<PreviousWriter>, stream_id: i32,
video_sample_entry_id: i32, syncer_channel: &'a SyncerChannel) -> Result<Self, Error> {
Ok(Writer(Some(InnerWriter{
syncer_channel: syncer_channel,
f: f,
syncer_channel,
f,
index: recording::SampleIndexEncoder::new(),
uuid: uuid,
uuid,
corrupt: false,
hasher: hash::Hasher::new(hash::MessageDigest::sha1())?,
prev_end: prev.map(|p| p.end_time),
local_start: recording::Time(i64::max_value()),
adjuster: ClockAdjuster::new(prev.map(|p| p.local_time_delta.0)),
camera_id: camera_id,
video_sample_entry_id: video_sample_entry_id,
stream_id,
video_sample_entry_id,
run_offset: prev.map(|p| p.run_offset + 1).unwrap_or(0),
unflushed_sample: None,
})))
@ -663,7 +663,7 @@ impl<'a> InnerWriter<'a> {
else { 0 };
let local_start_delta = self.local_start - start;
let recording = db::RecordingToInsert{
camera_id: self.camera_id,
stream_id: self.stream_id,
sample_file_bytes: self.index.sample_file_bytes,
time: start .. end,
local_time_delta: local_start_delta,

View File

@ -29,6 +29,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use db;
use error::Error;
use serde::ser::{SerializeMap, SerializeSeq, Serializer};
use std::collections::BTreeMap;
use uuid::Uuid;
@ -41,17 +42,25 @@ pub struct TopLevel<'a> {
// Use a custom serializer which presents the map's values as a sequence and includes the
// "days" attribute or not, according to the bool in the tuple.
#[serde(serialize_with = "TopLevel::serialize_cameras")]
pub cameras: (&'a BTreeMap<i32, db::Camera>, bool),
pub cameras: (&'a db::LockedDatabase, bool),
}
/// JSON serialization wrapper for a single camera when processing `/cameras/` and
/// `/cameras/<uuid>/`. See `design/api.md` for details.
/// JSON serialization wrapper for a single camera when processing `/api/` and
/// `/api/cameras/<uuid>/`. See `design/api.md` for details.
#[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")]
pub struct Camera<'a> {
pub uuid: Uuid,
pub short_name: &'a str,
pub description: &'a str,
#[serde(serialize_with = "Camera::serialize_streams")]
pub streams: [Option<Stream<'a>>; 2],
}
#[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")]
pub struct Stream<'a> {
pub retain_bytes: i64,
pub min_start_time_90k: Option<i64>,
pub max_end_time_90k: Option<i64>,
@ -59,26 +68,54 @@ pub struct Camera<'a> {
pub total_sample_file_bytes: i64,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(serialize_with = "Camera::serialize_days")]
pub days: Option<&'a BTreeMap<db::CameraDayKey, db::CameraDayValue>>,
#[serde(serialize_with = "Stream::serialize_days")]
pub days: Option<&'a BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
}
impl<'a> Camera<'a> {
pub fn new(c: &'a db::Camera, include_days: bool) -> Self {
Camera{
pub fn wrap(c: &'a db::Camera, db: &'a db::LockedDatabase, include_days: bool) -> Result<Self, Error> {
Ok(Camera {
uuid: c.uuid,
short_name: &c.short_name,
description: &c.description,
retain_bytes: c.retain_bytes,
min_start_time_90k: c.range.as_ref().map(|r| r.start.0),
max_end_time_90k: c.range.as_ref().map(|r| r.end.0),
total_duration_90k: c.duration.0,
total_sample_file_bytes: c.sample_file_bytes,
days: if include_days { Some(&c.days) } else { None },
streams: [
Stream::wrap(db, c.streams[0], include_days)?,
Stream::wrap(db, c.streams[1], include_days)?,
],
})
}
fn serialize_streams<S>(streams: &[Option<Stream<'a>>; 2], serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
let mut map = serializer.serialize_map(Some(streams.len()))?;
for (i, s) in streams.iter().enumerate() {
if let &Some(ref s) = s {
map.serialize_key(db::StreamType::from_index(i).expect("invalid stream type index").as_str())?;
map.serialize_value(s)?;
}
}
map.end()
}
}
fn serialize_days<S>(days: &Option<&BTreeMap<db::CameraDayKey, db::CameraDayValue>>,
impl<'a> Stream<'a> {
fn wrap(db: &'a db::LockedDatabase, id: Option<i32>, include_days: bool) -> Result<Option<Self>, Error> {
let id = match id {
Some(id) => id,
None => return Ok(None),
};
let s = db.streams_by_id().get(&id).ok_or_else(|| Error::new(format!("missing stream {}", id)))?;
Ok(Some(Stream {
retain_bytes: s.retain_bytes,
min_start_time_90k: s.range.as_ref().map(|r| r.start.0),
max_end_time_90k: s.range.as_ref().map(|r| r.end.0),
total_duration_90k: s.duration.0,
total_sample_file_bytes: s.sample_file_bytes,
days: if include_days { Some(&s.days) } else { None },
}))
}
fn serialize_days<S>(days: &Option<&BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
let days = match *days {
@ -89,7 +126,7 @@ impl<'a> Camera<'a> {
for (k, v) in days {
map.serialize_key(k.as_ref())?;
let bounds = k.bounds();
map.serialize_value(&CameraDayValue{
map.serialize_value(&StreamDayValue{
start_time_90k: bounds.start.0,
end_time_90k: bounds.end.0,
total_duration_90k: v.duration.0,
@ -101,7 +138,7 @@ impl<'a> Camera<'a> {
#[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")]
struct CameraDayValue {
struct StreamDayValue {
pub start_time_90k: i64,
pub end_time_90k: i64,
pub total_duration_90k: i64,
@ -109,12 +146,14 @@ struct CameraDayValue {
impl<'a> TopLevel<'a> {
/// Serializes cameras as a list (rather than a map), optionally including the `days` field.
fn serialize_cameras<S>(cameras: &(&BTreeMap<i32, db::Camera>, bool),
fn serialize_cameras<S>(cameras: &(&db::LockedDatabase, bool),
serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
let mut seq = serializer.serialize_seq(Some(cameras.0.len()))?;
for c in cameras.0.values() {
seq.serialize_element(&Camera::new(c, cameras.1))?;
let (db, include_days) = *cameras;
let cs = db.cameras_by_id();
let mut seq = serializer.serialize_seq(Some(cs.len()))?;
for (_, c) in cs {
seq.serialize_element(&Camera::wrap(c, db, include_days).unwrap())?; // TODO: no unwrap.
}
seq.end()
}

View File

@ -381,7 +381,7 @@ impl Segment {
self.index_once.call_once(|| {
let index = unsafe { &mut *self.index.get() };
*index = db.lock()
.with_recording_playback(self.s.camera_id, self.s.recording_id,
.with_recording_playback(self.s.stream_id, self.s.recording_id,
|playback| self.build_index(playback))
.map_err(|e| { error!("Unable to build index for segment: {:?}", e); });
});
@ -629,7 +629,7 @@ impl Slice {
}
let truns =
mp4.0.db.lock()
.with_recording_playback(s.s.camera_id, s.s.recording_id,
.with_recording_playback(s.s.stream_id, s.s.recording_id,
|playback| s.truns(playback, pos, len))
.map_err(|e| { Error::new(format!("Unable to build index for segment: {:?}", e)) })?;
let truns = ARefs::new(truns);
@ -762,7 +762,7 @@ impl FileBuilder {
if prev.s.have_trailing_zero() {
return Err(Error::new(format!(
"unable to append recording {}/{} after recording {}/{} with trailing zero",
row.camera_id, row.id, prev.s.camera_id, prev.s.recording_id)));
row.stream_id, row.id, prev.s.stream_id, prev.s.recording_id)));
}
}
let s = Segment::new(db, &row, rel_range_90k, self.next_frame_num)?;
@ -811,7 +811,7 @@ impl FileBuilder {
// Update the etag to reflect this segment.
let mut data = [0_u8; 24];
let mut cursor = io::Cursor::new(&mut data[..]);
cursor.write_i32::<BigEndian>(s.s.camera_id)?;
cursor.write_i32::<BigEndian>(s.s.stream_id)?;
cursor.write_i32::<BigEndian>(s.s.recording_id)?;
cursor.write_i64::<BigEndian>(s.s.start.0)?;
cursor.write_i32::<BigEndian>(d.start)?;
@ -1452,7 +1452,7 @@ impl FileInner {
fn get_video_sample_data(&self, i: usize, r: Range<u64>) -> Result<Chunk, Error> {
let s = &self.segments[i];
let uuid = {
self.db.lock().with_recording_playback(s.s.camera_id, s.s.recording_id,
self.db.lock().with_recording_playback(s.s.stream_id, s.s.recording_id,
|p| Ok(p.sample_file_uuid))?
};
let f = self.dir.open_sample_file(uuid)?;
@ -1541,7 +1541,7 @@ mod tests {
use strutil;
use super::*;
use stream::{self, Opener, Stream};
use testutil::{self, TestDb, TEST_CAMERA_ID};
use testutil::{self, TestDb, TEST_STREAM_ID};
fn fill_slice<E: http_serve::Entity>(slice: &mut [u8], e: &E, start: u64) {
let mut p = 0;
@ -1765,7 +1765,7 @@ mod tests {
extra_data.width, extra_data.height, extra_data.sample_entry,
extra_data.rfc6381_codec).unwrap();
let mut output = db.dir.create_writer(&db.syncer_channel, None,
TEST_CAMERA_ID, video_sample_entry_id).unwrap();
TEST_STREAM_ID, video_sample_entry_id).unwrap();
// end_pts is the pts of the end of the most recent frame (start + duration).
// It's needed because dir::Writer calculates a packet's duration from its pts and the
@ -1799,7 +1799,7 @@ mod tests {
let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value());
{
let db = db.lock();
db.list_recordings_by_time(TEST_CAMERA_ID, all_time, |r| {
db.list_recordings_by_time(TEST_STREAM_ID, all_time, |r| {
let d = r.duration_90k;
assert!(skip_90k + shorten_90k < d);
builder.append(&*db, r, skip_90k .. d - shorten_90k).unwrap();
@ -2259,7 +2259,7 @@ mod bench {
let segment = {
let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value());
let mut row = None;
db.list_recordings_by_time(testutil::TEST_CAMERA_ID, all_time, |r| {
db.list_recordings_by_time(testutil::TEST_STREAM_ID, all_time, |r| {
row = Some(r);
Ok(())
}).unwrap();
@ -2267,7 +2267,7 @@ mod bench {
let rel_range_90k = 0 .. row.duration_90k;
super::Segment::new(&db, &row, rel_range_90k, 1).unwrap()
};
db.with_recording_playback(segment.s.camera_id, segment.s.recording_id, |playback| {
db.with_recording_playback(segment.s.stream_id, segment.s.recording_id, |playback| {
let v = segment.build_index(playback).unwrap(); // warm.
b.bytes = v.len() as u64; // define the benchmark performance in terms of output bytes.
b.iter(|| segment.build_index(playback).unwrap());

View File

@ -354,7 +354,7 @@ impl SampleIndexEncoder {
/// Used by the `Mp4FileBuilder` class to splice together recordings into a single virtual .mp4.
#[derive(Debug)]
pub struct Segment {
pub camera_id: i32,
pub stream_id: i32,
pub recording_id: i32,
pub start: Time,
@ -382,7 +382,7 @@ impl Segment {
recording: &db::ListRecordingsRow,
desired_range_90k: Range<i32>) -> Result<Segment, Error> {
let mut self_ = Segment {
camera_id: recording.camera_id,
stream_id: recording.stream_id,
recording_id: recording.id,
start: recording.start,
begin: None,
@ -413,7 +413,7 @@ impl Segment {
// Slow path. Need to iterate through the index.
trace!("recording::Segment::new slow path, desired_range_90k={:?}, recording={:#?}",
self_.desired_range_90k, recording);
db.with_recording_playback(self_.camera_id, self_.recording_id, |playback| {
db.with_recording_playback(self_.stream_id, self_.recording_id, |playback| {
let mut begin = Box::new(SampleIndexIterator::new());
let data = &(&playback).video_index;
let mut it = SampleIndexIterator::new();
@ -481,7 +481,7 @@ impl Segment {
pub fn foreach<F>(&self, playback: &db::RecordingPlayback, mut f: F) -> Result<(), Error>
where F: FnMut(&SampleIndexIterator) -> Result<(), Error> {
trace!("foreach on recording {}/{}: {} frames, actual_start_90k: {}",
self.camera_id, self.recording_id, self.frames, self.actual_start_90k());
self.stream_id, self.recording_id, self.frames, self.actual_start_90k());
let data = &(&playback).video_index;
let mut it = match self.begin {
Some(ref b) => **b,
@ -490,11 +490,11 @@ impl Segment {
if it.uninitialized() {
if !it.next(data)? {
return Err(Error::new(format!("recording {}/{}: no frames",
self.camera_id, self.recording_id)));
self.stream_id, self.recording_id)));
}
if !it.is_key() {
return Err(Error::new(format!("recording {}/{}: doesn't start with key frame",
self.camera_id, self.recording_id)));
self.stream_id, self.recording_id)));
}
}
let mut have_frame = true;
@ -502,7 +502,7 @@ impl Segment {
for i in 0 .. self.frames {
if !have_frame {
return Err(Error::new(format!("recording {}/{}: expected {} frames, found only {}",
self.camera_id, self.recording_id, self.frames,
self.stream_id, self.recording_id, self.frames,
i+1)));
}
if it.is_key() {
@ -510,7 +510,7 @@ impl Segment {
if key_frame > self.key_frames {
return Err(Error::new(format!(
"recording {}/{}: more than expected {} key frames",
self.camera_id, self.recording_id, self.key_frames)));
self.stream_id, self.recording_id, self.key_frames)));
}
}
@ -522,7 +522,7 @@ impl Segment {
}
if key_frame < self.key_frames {
return Err(Error::new(format!("recording {}/{}: expected {} key frames, found only {}",
self.camera_id, self.recording_id, self.key_frames,
self.stream_id, self.recording_id, self.key_frames,
key_frame)));
}
Ok(())
@ -656,7 +656,7 @@ mod tests {
fn get_frames<F, T>(db: &db::Database, segment: &Segment, f: F) -> Vec<T>
where F: Fn(&SampleIndexIterator) -> T {
let mut v = Vec::new();
db.lock().with_recording_playback(segment.camera_id, segment.recording_id, |playback| {
db.lock().with_recording_playback(segment.stream_id, segment.recording_id, |playback| {
segment.foreach(playback, |it| { v.push(f(it)); Ok(()) })
}).unwrap();
v

View File

@ -63,31 +63,34 @@ create table camera (
username text,
-- The password to use when accessing the camera.
password text,
password text
);
-- The path (starting with "/") to use in rtsp:// URLs to reference this
-- camera's "main" (full-quality) video stream.
main_rtsp_path text,
create table stream (
id integer primary key,
camera_id integer not null references camera (id),
type text not null check (type in ('main', 'sub')),
-- The path (starting with "/") to use in rtsp:// URLs to reference this
-- camera's "sub" (low-bandwidth) video stream.
sub_rtsp_path text,
-- The path (starting with "/") to use in rtsp:// URLs to for this stream.
rtsp_path text not null,
-- The number of bytes of video to retain, excluding the currently-recording
-- file. Older files will be deleted as necessary to stay within this limit.
retain_bytes integer not null check (retain_bytes >= 0),
-- The low 32 bits of the next recording id to assign for this camera.
-- The low 32 bits of the next recording id to assign for this stream.
-- Typically this is the maximum current recording + 1, but it does
-- not decrease if that recording is deleted.
next_recording_id integer not null check (next_recording_id >= 0)
next_recording_id integer not null check (next_recording_id >= 0),
unique (camera_id, type)
);
-- Each row represents a single completed recorded segment of video.
-- Recordings are typically ~60 seconds; never more than 5 minutes.
create table recording (
-- The high 32 bits of composite_id are taken from the camera's id, which
-- improves locality. The low 32 bits are taken from the camera's
-- The high 32 bits of composite_id are taken from the stream's id, which
-- improves locality. The low 32 bits are taken from the stream's
-- next_recording_id (which should be post-incremented in the same
-- transaction). It'd be simpler to use a "without rowid" table and separate
-- fields to make up the primary key, but
@ -98,7 +101,7 @@ create table recording (
-- This field is redundant with id above, but used to enforce the reference
-- constraint and to structure the recording_start_time index.
camera_id integer not null references camera (id),
stream_id integer not null references stream (id),
-- The offset of this recording within a run. 0 means this was the first
-- recording made from a RTSP session. The start of the run has id
@ -135,12 +138,12 @@ create table recording (
video_sync_samples integer not null check (video_sync_samples > 0),
video_sample_entry_id integer references video_sample_entry (id),
check (composite_id >> 32 = camera_id)
check (composite_id >> 32 = stream_id)
);
create index recording_cover on recording (
-- Typical queries use "where camera_id = ? order by start_time_90k".
camera_id,
-- Typical queries use "where stream_id = ? order by start_time_90k".
stream_id,
start_time_90k,
-- These fields are not used for ordering; they cover most queries so
@ -202,4 +205,4 @@ create table video_sample_entry (
);
insert into version (id, unix_time, notes)
values (1, cast(strftime('%s', 'now') as int), 'db creation');
values (2, cast(strftime('%s', 'now') as int), 'db creation');

View File

@ -29,7 +29,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use clock::{Clocks, TimerGuard};
use db::{Camera, Database};
use db::{Camera, Database, Stream};
use dir;
use error::Error;
use h264;
@ -62,7 +62,7 @@ pub struct Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
syncer_channel: dir::SyncerChannel,
clocks: &'a C,
opener: &'a stream::Opener<S>,
camera_id: i32,
stream_id: i32,
short_name: String,
url: String,
redacted_url: String,
@ -77,7 +77,7 @@ struct WriterState<'a> {
impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
pub fn new<'b>(env: &Environment<'a, 'b, C, S>, syncer_channel: dir::SyncerChannel,
camera_id: i32, c: &Camera, rotate_offset_sec: i64,
stream_id: i32, c: &Camera, s: &Stream, rotate_offset_sec: i64,
rotate_interval_sec: i64) -> Self {
Streamer {
shutdown: env.shutdown.clone(),
@ -88,10 +88,10 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
syncer_channel: syncer_channel,
clocks: env.clocks,
opener: env.opener,
camera_id: camera_id,
short_name: c.short_name.to_owned(),
url: format!("rtsp://{}:{}@{}{}", c.username, c.password, c.host, c.main_rtsp_path),
redacted_url: format!("rtsp://{}:redacted@{}{}", c.username, c.host, c.main_rtsp_path),
stream_id: stream_id,
short_name: format!("{}-{}", c.short_name, s.type_.as_str()),
url: format!("rtsp://{}:{}@{}{}", c.username, c.password, c.host, s.rtsp_path),
redacted_url: format!("rtsp://{}:redacted@{}{}", c.username, c.host, s.rtsp_path),
}
}
@ -167,7 +167,7 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks, S: 'a + stream::Stream {
let r = r + if prev.is_none() { self.rotate_interval_sec } else { 0 };
let _t = TimerGuard::new(self.clocks, || "creating writer");
let w = self.dir.create_writer(&self.syncer_channel, prev, self.camera_id,
let w = self.dir.create_writer(&self.syncer_channel, prev, self.stream_id,
video_sample_entry_id)?;
WriterState{
writer: w,
@ -358,8 +358,9 @@ mod tests {
{
let l = db.db.lock();
let camera = l.cameras_by_id().get(&testutil::TEST_CAMERA_ID).unwrap();
stream = super::Streamer::new(&env, db.syncer_channel.clone(), testutil::TEST_CAMERA_ID,
camera, 0, 3);
let s = l.streams_by_id().get(&testutil::TEST_STREAM_ID).unwrap();
stream = super::Streamer::new(&env, db.syncer_channel.clone(), testutil::TEST_STREAM_ID,
camera, s, 0, 3);
}
stream.run();
assert!(opener.streams.lock().unwrap().is_empty());
@ -370,7 +371,7 @@ mod tests {
// 3-second boundaries (such as 2016-04-26 00:00:03), rotation happens somewhat later:
// * the first rotation is always skipped
// * the second rotation is deferred until a key frame.
assert_eq!(get_frames(&db, testutil::TEST_CAMERA_ID, 1), &[
assert_eq!(get_frames(&db, testutil::TEST_STREAM_ID, 1), &[
Frame{start_90k: 0, duration_90k: 90379, is_key: true},
Frame{start_90k: 90379, duration_90k: 89884, is_key: false},
Frame{start_90k: 180263, duration_90k: 89749, is_key: false},
@ -380,12 +381,12 @@ mod tests {
Frame{start_90k: 540015, duration_90k: 90021, is_key: false}, // pts_time 6.0001...
Frame{start_90k: 630036, duration_90k: 89958, is_key: false},
]);
assert_eq!(get_frames(&db, testutil::TEST_CAMERA_ID, 2), &[
assert_eq!(get_frames(&db, testutil::TEST_STREAM_ID, 2), &[
Frame{start_90k: 0, duration_90k: 90011, is_key: true},
Frame{start_90k: 90011, duration_90k: 0, is_key: false},
]);
let mut recordings = Vec::new();
db.list_recordings_by_id(testutil::TEST_CAMERA_ID, 1..3, |r| {
db.list_recordings_by_id(testutil::TEST_STREAM_ID, 1..3, |r| {
recordings.push(r);
Ok(())
}).unwrap();

View File

@ -45,6 +45,7 @@ static INIT: sync::Once = sync::ONCE_INIT;
/// id of the camera created by `TestDb::new` below.
pub const TEST_CAMERA_ID: i32 = 1;
pub const TEST_STREAM_ID: i32 = 1;
/// Performs global initialization for tests.
/// * set up logging. (Note the output can be confusing unless `RUST_TEST_THREADS=1` is set in
@ -89,12 +90,14 @@ impl TestDb {
host: "test-camera".to_owned(),
username: "foo".to_owned(),
password: "bar".to_owned(),
main_rtsp_path: "/main".to_owned(),
sub_rtsp_path: "/sub".to_owned(),
rtsp_paths: [
"/main".to_owned(),
"/sub".to_owned(),
],
}).unwrap());
test_camera_uuid = l.cameras_by_id().get(&TEST_CAMERA_ID).unwrap().uuid;
let mut tx = l.tx().unwrap();
tx.update_retention(TEST_CAMERA_ID, 1048576).unwrap();
tx.update_retention(TEST_STREAM_ID, 1048576).unwrap();
tx.commit().unwrap();
}
let path = tmpdir.path().to_str().unwrap().to_owned();
@ -121,7 +124,7 @@ impl TestDb {
tx.bypass_reservation_for_testing = true;
const START_TIME: recording::Time = recording::Time(1430006400i64 * TIME_UNITS_PER_SEC);
row_id = tx.insert_recording(&db::RecordingToInsert{
camera_id: TEST_CAMERA_ID,
stream_id: TEST_STREAM_ID,
sample_file_bytes: encoder.sample_file_bytes,
time: START_TIME ..
START_TIME + recording::Duration(encoder.total_duration_90k as i64),
@ -138,7 +141,7 @@ impl TestDb {
tx.commit().unwrap();
}
let mut row = None;
db.list_recordings_by_id(TEST_CAMERA_ID, row_id .. row_id + 1,
db.list_recordings_by_id(TEST_STREAM_ID, row_id .. row_id + 1,
|r| { row = Some(r); Ok(()) }).unwrap();
row.unwrap()
}
@ -155,7 +158,7 @@ pub fn add_dummy_recordings_to_db(db: &db::Database, num: usize) {
const START_TIME: recording::Time = recording::Time(1430006400i64 * TIME_UNITS_PER_SEC);
const DURATION: recording::Duration = recording::Duration(5399985);
let mut recording = db::RecordingToInsert{
camera_id: TEST_CAMERA_ID,
stream_id: TEST_STREAM_ID,
sample_file_bytes: 30104460,
flags: 0,
time: START_TIME .. (START_TIME + DURATION),

View File

@ -69,9 +69,9 @@ enum Path {
TopLevel, // "/api/"
InitSegment([u8; 20]), // "/api/init/<sha1>.mp4"
Camera(Uuid), // "/api/cameras/<uuid>/"
CameraRecordings(Uuid), // "/api/cameras/<uuid>/recordings"
CameraViewMp4(Uuid), // "/api/cameras/<uuid>/view.mp4"
CameraViewMp4Segment(Uuid), // "/api/cameras/<uuid>/view.m4s"
StreamRecordings(Uuid, db::StreamType), // "/api/cameras/<uuid>/<type>/recordings"
StreamViewMp4(Uuid, db::StreamType), // "/api/cameras/<uuid>/<type>/view.mp4"
StreamViewMp4Segment(Uuid, db::StreamType), // "/api/cameras/<uuid>/<type>/view.m4s"
Static, // "<other path>"
NotFound,
}
@ -101,18 +101,33 @@ fn decode_path(path: &str) -> Path {
None => { return Path::NotFound; },
Some(s) => s,
};
let (uuid, path) = path.split_at(slash);
let uuid = &path[0 .. slash];
let path = &path[slash+1 .. ];
// TODO(slamb): require uuid to be in canonical format.
let uuid = match Uuid::parse_str(uuid) {
Ok(u) => u,
Err(_) => { return Path::NotFound },
};
if path.is_empty() {
return Path::Camera(uuid);
}
let slash = match path.find('/') {
None => { return Path::NotFound; },
Some(s) => s,
};
let (type_, path) = path.split_at(slash);
let type_ = match db::StreamType::parse(type_) {
None => { return Path::NotFound; },
Some(t) => t,
};
match path {
"/" => Path::Camera(uuid),
"/recordings" => Path::CameraRecordings(uuid),
"/view.mp4" => Path::CameraViewMp4(uuid),
"/view.m4s" => Path::CameraViewMp4Segment(uuid),
"/recordings" => Path::StreamRecordings(uuid, type_),
"/view.mp4" => Path::StreamViewMp4(uuid, type_),
"/view.m4s" => Path::StreamViewMp4Segment(uuid, type_),
_ => Path::NotFound,
}
}
@ -200,7 +215,7 @@ impl ServiceInner {
let db = self.db.lock();
serde_json::to_writer(&mut w, &json::TopLevel {
time_zone_name: &self.time_zone_name,
cameras: (db.cameras_by_id(), days),
cameras: (&db, days),
})?;
}
Ok(resp)
@ -212,12 +227,12 @@ impl ServiceInner {
let db = self.db.lock();
let camera = db.get_camera(uuid)
.ok_or_else(|| Error::new("no such camera".to_owned()))?;
serde_json::to_writer(&mut w, &json::Camera::new(camera, true))?
serde_json::to_writer(&mut w, &json::Camera::wrap(camera, &db, true)?)?
};
Ok(resp)
}
fn camera_recordings(&self, req: &Request, uuid: Uuid)
fn stream_recordings(&self, req: &Request, uuid: Uuid, type_: db::StreamType)
-> Result<Response<slices::Body>, Error> {
let (r, split) = {
let mut time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value());
@ -240,7 +255,9 @@ impl ServiceInner {
let db = self.db.lock();
let camera = db.get_camera(uuid)
.ok_or_else(|| Error::new("no such camera".to_owned()))?;
db.list_aggregated_recordings(camera.id, r, split, |row| {
let stream_id = camera.streams[type_.index()]
.ok_or_else(|| Error::new("no such stream".to_owned()))?;
db.list_aggregated_recordings(stream_id, r, split, |row| {
let end = row.ids.end - 1; // in api, ids are inclusive.
out.recordings.push(json::Recording {
start_id: row.ids.start,
@ -276,23 +293,23 @@ impl ServiceInner {
self.not_found()
}
fn camera_view_mp4(&self, uuid: Uuid, type_: mp4::Type, query: Option<&str>, req: &Request)
-> Result<Response<slices::Body>, Error> {
let camera_id = {
fn stream_view_mp4(&self, req: &Request, uuid: Uuid, stream_type_: db::StreamType,
mp4_type_: mp4::Type) -> Result<Response<slices::Body>, Error> {
let stream_id = {
let db = self.db.lock();
let camera = db.get_camera(uuid)
.ok_or_else(|| Error::new("no such camera".to_owned()))?;
camera.id
camera.streams[stream_type_.index()].ok_or_else(|| Error::new("no such stream".to_owned()))?
};
let mut builder = mp4::FileBuilder::new(type_);
if let Some(q) = query {
let mut builder = mp4::FileBuilder::new(mp4_type_);
if let Some(q) = req.uri().query() {
for (key, value) in form_urlencoded::parse(q.as_bytes()) {
let (key, value) = (key.borrow(), value.borrow());
match key {
"s" => {
let s = Segments::parse(value).map_err(
|_| Error::new(format!("invalid s parameter: {}", value)))?;
debug!("camera_view_mp4: appending s={:?}", s);
debug!("stream_view_mp4: appending s={:?}", s);
let mut est_segments = (s.ids.end - s.ids.start) as usize;
if let Some(end) = s.end_time {
// There should be roughly ceil((end - start) /
@ -309,15 +326,15 @@ impl ServiceInner {
let db = self.db.lock();
let mut prev = None;
let mut cur_off = 0;
db.list_recordings_by_id(camera_id, s.ids.clone(), |r| {
db.list_recordings_by_id(stream_id, s.ids.clone(), |r| {
// Check for missing recordings.
match prev {
None if r.id == s.ids.start => {},
None => return Err(Error::new(format!("no such recording {}/{}",
camera_id, s.ids.start))),
stream_id, s.ids.start))),
Some(id) if r.id != id + 1 => {
return Err(Error::new(format!("no such recording {}/{}",
camera_id, id + 1)));
stream_id, id + 1)));
},
_ => {},
};
@ -330,11 +347,11 @@ impl ServiceInner {
let start = cmp::max(0, s.start_time - cur_off);
let end = cmp::min(d, end_time - cur_off);
let times = start as i32 .. end as i32;
debug!("...appending recording {}/{} with times {:?} (out of dur {})",
r.camera_id, r.id, times, d);
debug!("...appending recording {}/{} with times {:?} \
(out of dur {})", r.stream_id, r.id, times, d);
builder.append(&db, r, start as i32 .. end as i32)?;
} else {
debug!("...skipping recording {}/{} dur {}", r.camera_id, r.id, d);
debug!("...skipping recording {}/{} dur {}", r.stream_id, r.id, d);
}
cur_off += d;
Ok(())
@ -344,11 +361,11 @@ impl ServiceInner {
match prev {
Some(id) if s.ids.end != id + 1 => {
return Err(Error::new(format!("no such recording {}/{}",
camera_id, s.ids.end - 1)));
stream_id, s.ids.end - 1)));
},
None => {
return Err(Error::new(format!("no such recording {}/{}",
camera_id, s.ids.start)));
stream_id, s.ids.start)));
},
_ => {},
};
@ -451,12 +468,12 @@ impl server::Service for Service {
Path::InitSegment(sha1) => self.0.init_segment(sha1, &req),
Path::TopLevel => self.0.top_level(&req),
Path::Camera(uuid) => self.0.camera(&req, uuid),
Path::CameraRecordings(uuid) => self.0.camera_recordings(&req, uuid),
Path::CameraViewMp4(uuid) => {
self.0.camera_view_mp4(uuid, mp4::Type::Normal, req.uri().query(), &req)
Path::StreamRecordings(uuid, type_) => self.0.stream_recordings(&req, uuid, type_),
Path::StreamViewMp4(uuid, type_) => {
self.0.stream_view_mp4(&req, uuid, type_, mp4::Type::Normal)
},
Path::CameraViewMp4Segment(uuid) => {
self.0.camera_view_mp4(uuid, mp4::Type::MediaSegment, req.uri().query(), &req)
Path::StreamViewMp4Segment(uuid, type_) => {
self.0.stream_view_mp4(&req, uuid, type_, mp4::Type::MediaSegment)
},
Path::NotFound => self.0.not_found(),
Path::Static => self.0.static_file(&req),
@ -539,10 +556,10 @@ mod bench {
}
#[bench]
fn serve_camera_recordings(b: &mut Bencher) {
fn serve_stream_recordings(b: &mut Bencher) {
testutil::init();
let server = &*SERVER;
let url = reqwest::Url::parse(&format!("{}/api/cameras/{}/recordings", server.base_url,
let url = reqwest::Url::parse(&format!("{}/api/cameras/{}/main/recordings", server.base_url,
server.test_camera_uuid)).unwrap();
let mut buf = Vec::new();
let client = reqwest::Client::new();

View File

@ -40,8 +40,10 @@
<body>
<div id="nav">
<form action="#">
<fieldset id="cameras">
<fieldset>
<legend>Cameras</legend>
<table id="cameras">
</table>
</fieldset>
<fieldset id="datetime">
<legend>Datetime range</legend>

View File

@ -21,6 +21,8 @@ import moment from 'moment-timezone';
const apiUrl = '/api/';
const allStreamTypes = ['main', 'sub'];
// IANA timezone name.
let zone = null;
@ -35,7 +37,8 @@ let selectedRange = {
singleDateStr: null, // if startDateStr===endDateStr, that value, otherwise null
};
// Cameras is a dictionary as retrieved from apiUrl + some extra props:
// Cameras is a dictionary as retrieved from apiUrl + some extra props within
// the streams dicts:
// * "enabled" is a boolean indicating if the camera should be displayed and
// if it should be used to constrain the datepickers.
// * "recordingsUrl" is null or the currently fetched/fetching .../recordings url.
@ -62,8 +65,8 @@ function formatTime(ts90k) {
return m.format('YYYY-MM-DDTHH:mm:ss:' + String(100000 + frac).substr(1) + 'Z');
}
function onSelectVideo(camera, range, recording) {
let url = apiUrl + 'cameras/' + camera.uuid + '/view.mp4?s=' + recording.startId;
function onSelectVideo(camera, streamType, range, recording) {
let url = apiUrl + 'cameras/' + camera.uuid + '/' + streamType + '/view.mp4?s=' + recording.startId;
if (recording.endId !== undefined) {
url += '-' + recording.endId;
}
@ -100,30 +103,31 @@ function onSelectVideo(camera, range, recording) {
formattedEnd = formattedEnd.substr(timePos);
}
dialog.dialog({
title: camera.shortName + ", " + formattedStart + " to " + formattedEnd,
title: camera.shortName + " " + streamType + ", " + formattedStart + " to " + formattedEnd,
width: recording.videoSampleEntryWidth / 4,
close: function() { dialog.remove(); },
});
video.attr("src", url);
}
function formatRecordings(camera) {
let tbody = $("#tab-" + camera.uuid);
function formatRecordings(camera, streamType) {
let tbody = $("#tab-" + camera.uuid + "-" + streamType);
$(".loading", tbody).hide();
$(".r", tbody).remove();
const frameRateFmt = new Intl.NumberFormat([], {maximumFractionDigits: 0});
const sizeFmt = new Intl.NumberFormat([], {maximumFractionDigits: 1});
const trim = $("#trim").prop("checked");
for (let recording of camera.recordingsData.recordings) {
const stream = camera.streams[streamType];
for (const recording of stream.recordingsData.recordings) {
const duration = (recording.endTime90k - recording.startTime90k) / 90000;
let row = $('<tr class="r"/>');
const startTime90k = trim && recording.startTime90k < camera.recordingsRange.startTime90k
? camera.recordingsRange.startTime90k : recording.startTime90k;
const endTime90k = trim && recording.endTime90k > camera.recordingsRange.endTime90k
? camera.recordingsRange.endTime90k : recording.endTime90k;
const startTime90k = trim && recording.startTime90k < stream.recordingsRange.startTime90k
? stream.recordingsRange.startTime90k : recording.startTime90k;
const endTime90k = trim && recording.endTime90k > stream.recordingsRange.endTime90k
? stream.recordingsRange.endTime90k : recording.endTime90k;
let formattedStart = formatTime(startTime90k);
let formattedEnd = formatTime(endTime90k);
const singleDateStr = camera.recordingsRange.singleDateStr;
const singleDateStr = stream.recordingsRange.singleDateStr;
if (singleDateStr !== null && formattedStart.startsWith(singleDateStr)) {
formattedStart = formattedStart.substr(11);
}
@ -137,7 +141,7 @@ function formatRecordings(camera) {
$("<td/>").text(frameRateFmt.format(recording.videoSamples / duration)),
$("<td/>").text(sizeFmt.format(recording.sampleFileBytes / 1048576) + " MB"),
$("<td/>").text(sizeFmt.format(recording.sampleFileBytes / duration * .000008) + " Mbps"));
row.on("click", function() { onSelectVideo(camera, camera.recordingsRange, recording); });
row.on("click", function() { onSelectVideo(camera, streamType, stream.recordingsRange, recording); });
tbody.append(row);
}
};
@ -156,47 +160,54 @@ function fetch() {
formatTime(selectedRange.endTime90k));
let split = $("#split").val();
for (let camera of cameras) {
let url = apiUrl + 'cameras/' + camera.uuid + '/recordings?startTime90k=' +
for (const streamType in camera.streams) {
let stream = camera.streams[streamType];
let url = apiUrl + 'cameras/' + camera.uuid + '/' + streamType + '/recordings?startTime90k=' +
selectedRange.startTime90k + '&endTime90k=' + selectedRange.endTime90k;
if (split !== '') {
url += '&split90k=' + split;
}
if (url === camera.recordingsUrl) {
if (url === stream.recordingsUrl) {
continue; // nothing to do.
}
if (camera.recordingsReq !== null) {
camera.recordingsReq.abort();
console.log('url: ', url);
if (stream.recordingsReq !== null) {
stream.recordingsReq.abort();
}
let tbody = $("#tab-" + camera.uuid);
let tbody = $("#tab-" + camera.uuid + "-" + streamType);
$(".r", tbody).remove();
$(".loading", tbody).show();
let r = req(url);
camera.recordingsUrl = url;
camera.recordingsRange = selectedRange;
camera.recordingsReq = r;
r.always(function() { camera.recordingsReq = null; });
stream.recordingsUrl = url;
stream.recordingsRange = selectedRange;
stream.recordingsReq = r;
r.always(function() { stream.recordingsReq = null; });
r.then(function(data, status, req) {
// Sort recordings in descending order.
data.recordings.sort(function(a, b) { return b.startId - a.startId; });
camera.recordingsData = data;
formatRecordings(camera);
stream.recordingsData = data;
formatRecordings(camera, streamType);
}).catch(function(data, status, err) {
console.log(url, ' load failed: ', status, ': ', err);
});
}
}
}
// Run initially and when changing camera filter.
function setupCalendar() {
let merged = {};
for (const camera of cameras) {
if (!camera.enabled) {
for (const streamType in camera.streams) {
const stream = camera.streams[streamType];
if (!stream.enabled) {
continue;
}
for (const dateStr in camera.days) {
for (const dateStr in stream.days) {
merged[dateStr] = true;
}
}
}
let minDateStr = '9999-99-99';
let maxDateStr = '0000-00-00';
for (const dateStr in merged) {
@ -258,14 +269,16 @@ function setupCalendar() {
}
};
function onCameraChange(event, camera) {
camera.enabled = event.target.checked;
if (camera.enabled) {
$("#tab-" + camera.uuid).show();
function onStreamChange(event, camera, streamType) {
let stream = camera.streams[streamType];
stream.enabled = event.target.checked;
let id = "#tab-" + camera.uuid + "-" + streamType;
if (stream.enabled) {
$(id).show();
} else {
$("#tab-" + camera.uuid).hide();
$(id).hide();
}
console.log('Camera ', camera.shortName, camera.enabled ? 'enabled' : 'disabled');
console.log(camera.shortName + "/" + streamType, stream.enabled ? 'enabled' : 'disabled');
setupCalendar();
}
@ -324,30 +337,53 @@ function onTimeChange(e, isEnd) {
}
function onReceivedCameras(data) {
let fieldset = $("#cameras");
let camtable = $("#cameras");
if (data.cameras.length === 0) {
return;
}
// Add a header row.
let hdr = $('<tr/>').append($('<th/>'));
for (const streamType of allStreamTypes) {
hdr.append($('<th/>').text(streamType));
}
camtable.append(hdr);
var reqs = [];
let videos = $("#videos");
for (let camera of data.cameras) {
const id = "cam-" + camera.uuid;
let checkBox = $('<input type="checkbox" checked>').attr("name", id).attr("id", id);
checkBox.change(function(event) { onCameraChange(event, camera); });
fieldset.append(checkBox,
$("<label/>").attr("for", id).text(camera.shortName),
$("<br/>"));
let tab = $("<tbody>").attr("id", "tab-" + camera.uuid);
let row = $('<tr/>').append($('<td>').text(camera.shortName));
let anyCheckedForCam = false;
for (const streamType of allStreamTypes) {
let stream = camera.streams[streamType];
if (stream === undefined) {
row.append('<td/>');
continue;
}
const id = "cam-" + camera.uuid + "-" + streamType;
let checkBox = $('<input type="checkbox">').attr("name", id).attr("id", id);
checkBox.change(function(event) { onStreamChange(event, camera, streamType); });
row.append($("<td/>").append(checkBox));
let tab = $("<tbody>").attr("id", "tab-" + camera.uuid + "-" + streamType);
tab.append(
$('<tr class="name">').append($('<th colspan=6/>').text(camera.shortName)),
$('<tr class="name">').append($('<th colspan=6/>').text(camera.shortName + " " + streamType)),
$('<tr class="hdr"><th>start</th><th>end</th><th>resolution</th><th>fps</th><th>size</th><th>bitrate</th></tr>'),
$('<tr class="loading"><td colspan=6>loading...</td></tr>'));
videos.append(tab);
camera.enabled = true;
camera.recordingsUrl = null;
camera.recordingsRange = null;
camera.recordingsData = null;
camera.recordingsReq = null;
stream.recordingsUrl = null;
stream.recordingsRange = null;
stream.recordingsData = null;
stream.recordingsReq = null;
stream.enabled = false;
if (!anyCheckedForCam) {
checkBox.attr("checked", "checked");
anyCheckedForCam = true;
stream.enabled = true;
} else {
tab.hide();
}
}
camtable.append(row);
}
$("#end-date-same").change(function(e) { setupCalendar(); });
$("#end-date-other").change(function(e) { setupCalendar(); });
@ -365,8 +401,11 @@ function onReceivedCameras(data) {
// reformat the tables.
let newTrim = e.target.checked;
for (camera of cameras) {
if (camera.recordingsData !== null) {
formatRecordings(camera);
for (streamType in camera.streams) {
const stream = camera.streams[streamType];
if (stream.recordingsData !== null) {
formatRecordings(camera, streamType);
}
}
}
});