start splitting wall and media duration for #34

This splits the schema and playback path. The recording path still
adjusts the frame durations and always says the wall and media durations
are the same. I expect to change that in a following commit. I wouldn't
be surprised if that shakes out some bugs in this portion.
This commit is contained in:
Scott Lamb 2020-08-04 21:44:01 -07:00
parent 476bd86b12
commit cb97ccdfeb
12 changed files with 437 additions and 241 deletions

View File

@ -106,7 +106,7 @@ const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &'static str = r#"
const UPDATE_STREAM_COUNTERS_SQL: &'static str = r#" const UPDATE_STREAM_COUNTERS_SQL: &'static str = r#"
update stream update stream
set cum_recordings = :cum_recordings, set cum_recordings = :cum_recordings,
cum_duration_90k = :cum_duration_90k, cum_media_duration_90k = :cum_media_duration_90k,
cum_runs = :cum_runs cum_runs = :cum_runs
where id = :stream_id where id = :stream_id
"#; "#;
@ -178,7 +178,8 @@ pub struct ListRecordingsRow {
pub id: CompositeId, pub id: CompositeId,
/// This is a recording::Duration, but a single recording's duration fits into an i32. /// This is a recording::Duration, but a single recording's duration fits into an i32.
pub duration_90k: i32, pub wall_duration_90k: i32,
pub media_duration_90k: i32,
pub video_samples: i32, pub video_samples: i32,
pub video_sync_samples: i32, pub video_sync_samples: i32,
pub sample_file_bytes: i32, pub sample_file_bytes: i32,
@ -189,7 +190,7 @@ pub struct ListRecordingsRow {
/// This is populated by `list_recordings_by_id` but not `list_recordings_by_time`. /// This is populated by `list_recordings_by_id` but not `list_recordings_by_time`.
/// (It's not included in the `recording_cover` index, so adding it to /// (It's not included in the `recording_cover` index, so adding it to
/// `list_recordings_by_time` would be inefficient.) /// `list_recordings_by_time` would be inefficient.)
pub prev_duration_and_runs: Option<(recording::Duration, i32)>, pub prev_media_duration_and_runs: Option<(recording::Duration, i32)>,
} }
/// A row used in `list_aggregated_recordings`. /// A row used in `list_aggregated_recordings`.
@ -213,7 +214,7 @@ impl ListAggregatedRecordingsRow { fn from(row: ListRecordingsRow) -> Self {
let uncommitted = (row.flags & RecordingFlags::Uncommitted as i32) != 0; let uncommitted = (row.flags & RecordingFlags::Uncommitted as i32) != 0;
let growing = (row.flags & RecordingFlags::Growing as i32) != 0; let growing = (row.flags & RecordingFlags::Growing as i32) != 0;
ListAggregatedRecordingsRow { ListAggregatedRecordingsRow {
time: row.start .. recording::Time(row.start.0 + row.duration_90k as i64), time: row.start .. recording::Time(row.start.0 + row.wall_duration_90k as i64),
ids: recording_id .. recording_id+1, ids: recording_id .. recording_id+1,
video_samples: row.video_samples as i64, video_samples: row.video_samples as i64,
video_sync_samples: row.video_sync_samples as i64, video_sync_samples: row.video_sync_samples as i64,
@ -252,12 +253,13 @@ pub struct RecordingToInsert {
pub start: recording::Time, pub start: recording::Time,
/// Filled in by `add_recording`. /// Filled in by `add_recording`.
pub prev_duration: recording::Duration, pub prev_media_duration: recording::Duration,
/// Filled in by `add_recording`. /// Filled in by `add_recording`.
pub prev_runs: i32, pub prev_runs: i32,
pub duration_90k: i32, // a recording::Duration, but guaranteed to fit in i32. pub wall_duration_90k: i32, // a recording::Duration, but guaranteed to fit in i32.
pub media_duration_90k: i32,
pub local_time_delta: recording::Duration, pub local_time_delta: recording::Duration,
pub video_samples: i32, pub video_samples: i32,
pub video_sync_samples: i32, pub video_sync_samples: i32,
@ -272,14 +274,15 @@ impl RecordingToInsert {
start: self.start, start: self.start,
video_sample_entry_id: self.video_sample_entry_id, video_sample_entry_id: self.video_sample_entry_id,
id, id,
duration_90k: self.duration_90k, wall_duration_90k: self.wall_duration_90k,
media_duration_90k: self.media_duration_90k,
video_samples: self.video_samples, video_samples: self.video_samples,
video_sync_samples: self.video_sync_samples, video_sync_samples: self.video_sync_samples,
sample_file_bytes: self.sample_file_bytes, sample_file_bytes: self.sample_file_bytes,
run_offset: self.run_offset, run_offset: self.run_offset,
open_id, open_id,
flags: self.flags | RecordingFlags::Uncommitted as i32, flags: self.flags | RecordingFlags::Uncommitted as i32,
prev_duration_and_runs: Some((self.prev_duration, self.prev_runs)), prev_media_duration_and_runs: Some((self.prev_media_duration, self.prev_runs)),
} }
} }
} }
@ -290,7 +293,7 @@ impl RecordingToInsert {
pub(crate) struct ListOldestRecordingsRow { pub(crate) struct ListOldestRecordingsRow {
pub id: CompositeId, pub id: CompositeId,
pub start: recording::Time, pub start: recording::Time,
pub duration: i32, pub wall_duration_90k: i32,
pub sample_file_bytes: i32, pub sample_file_bytes: i32,
} }
@ -488,8 +491,8 @@ pub struct Stream {
/// The `cum_recordings` currently committed to the database. /// The `cum_recordings` currently committed to the database.
pub(crate) cum_recordings: i32, pub(crate) cum_recordings: i32,
/// The `cum_duration_90k` currently committed to the database. /// The `cum_media_duration_90k` currently committed to the database.
cum_duration: recording::Duration, cum_media_duration: recording::Duration,
/// The `cum_runs` currently committed to the database. /// The `cum_runs` currently committed to the database.
cum_runs: i32, cum_runs: i32,
@ -640,7 +643,7 @@ fn init_recordings(conn: &mut rusqlite::Connection, stream_id: i32, camera: &Cam
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(r#"
select select
recording.start_time_90k, recording.start_time_90k,
recording.duration_90k, recording.wall_duration_90k,
recording.sample_file_bytes recording.sample_file_bytes
from from
recording recording
@ -782,7 +785,7 @@ impl StreamStateChanger {
let mut stmt = tx.prepare_cached(r#" let mut stmt = tx.prepare_cached(r#"
insert into stream (camera_id, sample_file_dir_id, type, rtsp_url, record, insert into stream (camera_id, sample_file_dir_id, type, rtsp_url, record,
retain_bytes, flush_if_sec, cum_recordings, retain_bytes, flush_if_sec, cum_recordings,
cum_duration_90k, cum_runs) cum_media_duration_90k, cum_runs)
values (:camera_id, :sample_file_dir_id, :type, :rtsp_url, :record, values (:camera_id, :sample_file_dir_id, :type, :rtsp_url, :record,
0, :flush_if_sec, 0, 0, :flush_if_sec, 0,
0, 0) 0, 0)
@ -834,7 +837,7 @@ impl StreamStateChanger {
days: BTreeMap::new(), days: BTreeMap::new(),
record: sc.record, record: sc.record,
cum_recordings: 0, cum_recordings: 0,
cum_duration: recording::Duration(0), cum_media_duration: recording::Duration(0),
cum_runs: 0, cum_runs: 0,
uncommitted: VecDeque::new(), uncommitted: VecDeque::new(),
synced_recordings: 0, synced_recordings: 0,
@ -883,7 +886,7 @@ impl LockedDatabase {
/// A call to `add_recording` is also a promise that previous recordings (even if not yet /// A call to `add_recording` is also a promise that previous recordings (even if not yet
/// synced and committed) won't change. /// synced and committed) won't change.
/// ///
/// This fills the `prev_duration` and `prev_runs` fields. /// This fills the `prev_media_duration` and `prev_runs` fields.
pub(crate) fn add_recording(&mut self, stream_id: i32, mut r: RecordingToInsert) pub(crate) fn add_recording(&mut self, stream_id: i32, mut r: RecordingToInsert)
-> Result<(CompositeId, Arc<Mutex<RecordingToInsert>>), Error> { -> Result<(CompositeId, Arc<Mutex<RecordingToInsert>>), Error> {
let stream = match self.streams_by_id.get_mut(&stream_id) { let stream = match self.streams_by_id.get_mut(&stream_id) {
@ -895,11 +898,12 @@ impl LockedDatabase {
match stream.uncommitted.back() { match stream.uncommitted.back() {
Some(s) => { Some(s) => {
let l = s.lock(); let l = s.lock();
r.prev_duration = l.prev_duration + recording::Duration(l.duration_90k.into()); r.prev_media_duration =
l.prev_media_duration + recording::Duration(l.wall_duration_90k.into());
r.prev_runs = l.prev_runs + if l.run_offset == 0 { 1 } else { 0 }; r.prev_runs = l.prev_runs + if l.run_offset == 0 { 1 } else { 0 };
}, },
None => { None => {
r.prev_duration = stream.cum_duration; r.prev_media_duration = stream.cum_media_duration;
r.prev_runs = stream.cum_runs; r.prev_runs = stream.cum_runs;
}, },
}; };
@ -1006,7 +1010,7 @@ impl LockedDatabase {
let l = s.uncommitted[i].lock(); let l = s.uncommitted[i].lock();
raw::insert_recording( raw::insert_recording(
&tx, o, CompositeId::new(stream_id, s.cum_recordings + i as i32), &l)?; &tx, o, CompositeId::new(stream_id, s.cum_recordings + i as i32), &l)?;
new_duration += i64::from(l.duration_90k); new_duration += i64::from(l.wall_duration_90k);
new_runs += if l.run_offset == 0 { 1 } else { 0 }; new_runs += if l.run_offset == 0 { 1 } else { 0 };
} }
if s.synced_recordings > 0 { if s.synced_recordings > 0 {
@ -1014,7 +1018,7 @@ impl LockedDatabase {
stmt.execute_named(named_params!{ stmt.execute_named(named_params!{
":stream_id": stream_id, ":stream_id": stream_id,
":cum_recordings": s.cum_recordings + s.synced_recordings as i32, ":cum_recordings": s.cum_recordings + s.synced_recordings as i32,
":cum_duration_90k": s.cum_duration.0 + new_duration, ":cum_media_duration_90k": s.cum_media_duration.0 + new_duration,
":cum_runs": s.cum_runs + new_runs, ":cum_runs": s.cum_runs + new_runs,
})?; })?;
} }
@ -1096,7 +1100,7 @@ impl LockedDatabase {
for row in s.to_delete.drain(..) { for row in s.to_delete.drain(..) {
log.deleted.push(row.id); log.deleted.push(row.id);
dir.garbage_needs_unlink.insert(row.id); dir.garbage_needs_unlink.insert(row.id);
let d = recording::Duration(row.duration as i64); let d = recording::Duration(i64::from(row.wall_duration_90k));
s.duration -= d; s.duration -= d;
adjust_days(row.start .. row.start + d, -1, &mut s.days); adjust_days(row.start .. row.start + d, -1, &mut s.days);
} }
@ -1111,10 +1115,11 @@ impl LockedDatabase {
log.added.push(CompositeId::new(stream_id, s.cum_recordings)); log.added.push(CompositeId::new(stream_id, s.cum_recordings));
let l = u.lock(); let l = u.lock();
s.cum_recordings += 1; s.cum_recordings += 1;
let dur = recording::Duration(l.duration_90k.into()); let wall_dur = recording::Duration(l.wall_duration_90k.into());
s.cum_duration += dur; let media_dur = recording::Duration(l.media_duration_90k.into());
s.cum_media_duration += media_dur;
s.cum_runs += if l.run_offset == 0 { 1 } else { 0 }; s.cum_runs += if l.run_offset == 0 { 1 } else { 0 };
let end = l.start + dur; let end = l.start + wall_dur;
s.add_recording(l.start .. end, l.sample_file_bytes); s.add_recording(l.start .. end, l.sample_file_bytes);
} }
s.synced_recordings = 0; s.synced_recordings = 0;
@ -1258,7 +1263,7 @@ impl LockedDatabase {
let row = { let row = {
let l = u.lock(); let l = u.lock();
if l.video_samples > 0 { if l.video_samples > 0 {
let end = l.start + recording::Duration(l.duration_90k as i64); let end = l.start + recording::Duration(l.wall_duration_90k as i64);
if l.start > desired_time.end || end < desired_time.start { if l.start > desired_time.end || end < desired_time.start {
continue; // there's no overlap with the requested range. continue; // there's no overlap with the requested range.
} }
@ -1337,7 +1342,7 @@ impl LockedDatabase {
Entry::Occupied(mut e) => { Entry::Occupied(mut e) => {
let a = e.get_mut(); let a = e.get_mut();
let new_dur = a.time.end - a.time.start + let new_dur = a.time.end - a.time.start +
recording::Duration(row.duration_90k as i64); recording::Duration(row.wall_duration_90k as i64);
let needs_flush = let needs_flush =
a.ids.end != recording_id || a.ids.end != recording_id ||
row.video_sample_entry_id != a.video_sample_entry_id || row.video_sample_entry_id != a.video_sample_entry_id ||
@ -1354,7 +1359,7 @@ impl LockedDatabase {
bail!("stream {} recording {} has open id {} but {} has {}", bail!("stream {} recording {} has open id {} but {} has {}",
stream_id, a.ids.end - 1, a.open_id, row.id, row.open_id); stream_id, a.ids.end - 1, a.open_id, row.id, row.open_id);
} }
a.time.end.0 += row.duration_90k as i64; a.time.end.0 += row.wall_duration_90k as i64;
a.ids.end = recording_id + 1; a.ids.end = recording_id + 1;
a.video_samples += row.video_samples as i64; a.video_samples += row.video_samples as i64;
a.video_sync_samples += row.video_sync_samples as i64; a.video_sync_samples += row.video_sync_samples as i64;
@ -1562,7 +1567,7 @@ impl LockedDatabase {
retain_bytes, retain_bytes,
flush_if_sec, flush_if_sec,
cum_recordings, cum_recordings,
cum_duration_90k, cum_media_duration_90k,
cum_runs, cum_runs,
record record
from from
@ -1600,7 +1605,7 @@ impl LockedDatabase {
duration: recording::Duration(0), duration: recording::Duration(0),
days: BTreeMap::new(), days: BTreeMap::new(),
cum_recordings: row.get(7)?, cum_recordings: row.get(7)?,
cum_duration: recording::Duration(row.get(8)?), cum_media_duration: recording::Duration(row.get(8)?),
cum_runs: row.get(9)?, cum_runs: row.get(9)?,
record: row.get(10)?, record: row.get(10)?,
uncommitted: VecDeque::new(), uncommitted: VecDeque::new(),
@ -2209,7 +2214,7 @@ mod tests {
{ {
let db = db.lock(); let db = db.lock();
let stream = db.streams_by_id().get(&stream_id).unwrap(); let stream = db.streams_by_id().get(&stream_id).unwrap();
let dur = recording::Duration(r.duration_90k as i64); let dur = recording::Duration(r.wall_duration_90k as i64);
assert_eq!(Some(r.start .. r.start + dur), stream.range); assert_eq!(Some(r.start .. r.start + dur), stream.range);
assert_eq!(r.sample_file_bytes as i64, stream.sample_file_bytes); assert_eq!(r.sample_file_bytes as i64, stream.sample_file_bytes);
assert_eq!(dur, stream.duration); assert_eq!(dur, stream.duration);
@ -2227,7 +2232,7 @@ mod tests {
rows += 1; rows += 1;
recording_id = Some(row.id); recording_id = Some(row.id);
assert_eq!(r.start, row.start); assert_eq!(r.start, row.start);
assert_eq!(r.duration_90k, row.duration_90k); assert_eq!(r.wall_duration_90k, row.wall_duration_90k);
assert_eq!(r.video_samples, row.video_samples); assert_eq!(r.video_samples, row.video_samples);
assert_eq!(r.video_sync_samples, row.video_sync_samples); assert_eq!(r.video_sync_samples, row.video_sync_samples);
assert_eq!(r.sample_file_bytes, row.sample_file_bytes); assert_eq!(r.sample_file_bytes, row.sample_file_bytes);
@ -2243,7 +2248,7 @@ mod tests {
rows += 1; rows += 1;
assert_eq!(recording_id, Some(row.id)); assert_eq!(recording_id, Some(row.id));
assert_eq!(r.start, row.start); assert_eq!(r.start, row.start);
assert_eq!(r.duration_90k, row.duration); assert_eq!(r.wall_duration_90k, row.wall_duration_90k);
assert_eq!(r.sample_file_bytes, row.sample_file_bytes); assert_eq!(r.sample_file_bytes, row.sample_file_bytes);
true true
}).unwrap(); }).unwrap();
@ -2442,9 +2447,10 @@ mod tests {
run_offset: 0, run_offset: 0,
flags: 0, flags: 0,
start, start,
prev_duration: recording::Duration(0), prev_media_duration: recording::Duration(0),
prev_runs: 0, prev_runs: 0,
duration_90k: TIME_UNITS_PER_SEC as i32, wall_duration_90k: TIME_UNITS_PER_SEC.try_into().unwrap(),
media_duration_90k: TIME_UNITS_PER_SEC.try_into().unwrap(),
local_time_delta: recording::Duration(0), local_time_delta: recording::Duration(0),
video_samples: 1, video_samples: 1,
video_sync_samples: 1, video_sync_samples: 1,

View File

@ -1,5 +1,5 @@
// This file is part of Moonfire NVR, a security camera network video recorder. // This file is part of Moonfire NVR, a security camera network video recorder.
// Copyright (C) 2018 The Moonfire NVR Authors // Copyright (C) 2018-2020 The Moonfire NVR Authors
// //
// This program is free software: you can redistribute it and/or modify // This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by // it under the terms of the GNU General Public License as published by
@ -45,7 +45,8 @@ const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
recording.run_offset, recording.run_offset,
recording.flags, recording.flags,
recording.start_time_90k, recording.start_time_90k,
recording.duration_90k, recording.wall_duration_90k,
recording.media_duration_delta_90k,
recording.sample_file_bytes, recording.sample_file_bytes,
recording.video_samples, recording.video_samples,
recording.video_sync_samples, recording.video_sync_samples,
@ -57,7 +58,7 @@ const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
stream_id = :stream_id and stream_id = :stream_id and
recording.start_time_90k > :start_time_90k - 27000000 and recording.start_time_90k > :start_time_90k - 27000000 and
recording.start_time_90k < :end_time_90k and recording.start_time_90k < :end_time_90k and
recording.start_time_90k + recording.duration_90k > :start_time_90k recording.start_time_90k + recording.wall_duration_90k > :start_time_90k
order by order by
recording.start_time_90k recording.start_time_90k
"#; "#;
@ -68,13 +69,14 @@ const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
recording.run_offset, recording.run_offset,
recording.flags, recording.flags,
recording.start_time_90k, recording.start_time_90k,
recording.duration_90k, recording.wall_duration_90k,
recording.media_duration_delta_90k,
recording.sample_file_bytes, recording.sample_file_bytes,
recording.video_samples, recording.video_samples,
recording.video_sync_samples, recording.video_sync_samples,
recording.video_sample_entry_id, recording.video_sample_entry_id,
recording.open_id, recording.open_id,
recording.prev_duration_90k, recording.prev_media_duration_90k,
recording.prev_runs recording.prev_runs
from from
recording recording
@ -98,7 +100,7 @@ const STREAM_MIN_START_SQL: &'static str = r#"
const STREAM_MAX_START_SQL: &'static str = r#" const STREAM_MAX_START_SQL: &'static str = r#"
select select
start_time_90k, start_time_90k,
duration_90k wall_duration_90k
from from
recording recording
where where
@ -110,7 +112,7 @@ const LIST_OLDEST_RECORDINGS_SQL: &'static str = r#"
select select
composite_id, composite_id,
start_time_90k, start_time_90k,
duration_90k, wall_duration_90k,
sample_file_bytes sample_file_bytes
from from
recording recording
@ -151,20 +153,23 @@ fn list_recordings_inner(mut rows: rusqlite::Rows, include_prev: bool,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>) f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>)
-> Result<(), Error> { -> Result<(), Error> {
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let wall_duration_90k = row.get(4)?;
let media_duration_delta_90k: i32 = row.get(5)?;
f(db::ListRecordingsRow { f(db::ListRecordingsRow {
id: CompositeId(row.get(0)?), id: CompositeId(row.get(0)?),
run_offset: row.get(1)?, run_offset: row.get(1)?,
flags: row.get(2)?, flags: row.get(2)?,
start: recording::Time(row.get(3)?), start: recording::Time(row.get(3)?),
duration_90k: row.get(4)?, wall_duration_90k,
sample_file_bytes: row.get(5)?, media_duration_90k: wall_duration_90k + media_duration_delta_90k,
video_samples: row.get(6)?, sample_file_bytes: row.get(6)?,
video_sync_samples: row.get(7)?, video_samples: row.get(7)?,
video_sample_entry_id: row.get(8)?, video_sync_samples: row.get(8)?,
open_id: row.get(9)?, video_sample_entry_id: row.get(9)?,
prev_duration_and_runs: match include_prev { open_id: row.get(10)?,
prev_media_duration_and_runs: match include_prev {
false => None, false => None,
true => Some((recording::Duration(row.get(10)?), row.get(11)?)), true => Some((recording::Duration(row.get(11)?), row.get(12)?)),
}, },
})?; })?;
} }
@ -183,13 +188,13 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
r: &db::RecordingToInsert) -> Result<(), Error> { r: &db::RecordingToInsert) -> Result<(), Error> {
let mut stmt = tx.prepare_cached(r#" let mut stmt = tx.prepare_cached(r#"
insert into recording (composite_id, stream_id, open_id, run_offset, flags, insert into recording (composite_id, stream_id, open_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_duration_90k, sample_file_bytes, start_time_90k, prev_media_duration_90k,
prev_runs, duration_90k, video_samples, video_sync_samples, prev_runs, wall_duration_90k, media_duration_delta_90k,
video_sample_entry_id) video_samples, video_sync_samples, video_sample_entry_id)
values (:composite_id, :stream_id, :open_id, :run_offset, :flags, values (:composite_id, :stream_id, :open_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :prev_duration_90k, :sample_file_bytes, :start_time_90k, :prev_media_duration_90k,
:prev_runs, :duration_90k, :video_samples, :video_sync_samples, :prev_runs, :wall_duration_90k, :media_duration_delta_90k,
:video_sample_entry_id) :video_samples, :video_sync_samples, :video_sample_entry_id)
"#).with_context(|e| format!("can't prepare recording insert: {}", e))?; "#).with_context(|e| format!("can't prepare recording insert: {}", e))?;
stmt.execute_named(named_params!{ stmt.execute_named(named_params!{
":composite_id": id.0, ":composite_id": id.0,
@ -199,8 +204,9 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
":flags": r.flags, ":flags": r.flags,
":sample_file_bytes": r.sample_file_bytes, ":sample_file_bytes": r.sample_file_bytes,
":start_time_90k": r.start.0, ":start_time_90k": r.start.0,
":duration_90k": r.duration_90k, ":wall_duration_90k": r.wall_duration_90k,
":prev_duration_90k": r.prev_duration.0, ":media_duration_delta_90k": r.media_duration_90k - r.wall_duration_90k,
":prev_media_duration_90k": r.prev_media_duration.0,
":prev_runs": r.prev_runs, ":prev_runs": r.prev_runs,
":video_samples": r.video_samples, ":video_samples": r.video_samples,
":video_sync_samples": r.video_sync_samples, ":video_sync_samples": r.video_sync_samples,
@ -351,7 +357,7 @@ pub(crate) fn get_range(conn: &rusqlite::Connection, stream_id: i32)
None => row_start .. row_end, None => row_start .. row_end,
Some(Range{start: s, end: e}) => s .. ::std::cmp::max(e, row_end), Some(Range{start: s, end: e}) => s .. ::std::cmp::max(e, row_end),
}; };
if row_start.0 <= maxes.start.0 - recording::MAX_RECORDING_DURATION { if row_start.0 <= maxes.start.0 - recording::MAX_RECORDING_WALL_DURATION {
break; break;
} }
maxes_opt = Some(maxes); maxes_opt = Some(maxes);
@ -390,7 +396,7 @@ pub(crate) fn list_oldest_recordings(conn: &rusqlite::Connection, start: Composi
let should_continue = f(db::ListOldestRecordingsRow { let should_continue = f(db::ListOldestRecordingsRow {
id: CompositeId(row.get(0)?), id: CompositeId(row.get(0)?),
start: recording::Time(row.get(1)?), start: recording::Time(row.get(1)?),
duration: row.get(2)?, wall_duration_90k: row.get(2)?,
sample_file_bytes: row.get(3)?, sample_file_bytes: row.get(3)?,
}); });
if !should_continue { if !should_continue {

View File

@ -1,5 +1,5 @@
// This file is part of Moonfire NVR, a security camera network video recorder. // This file is part of Moonfire NVR, a security camera network video recorder.
// Copyright (C) 2016 The Moonfire NVR Authors // Copyright (C) 2016-2020 The Moonfire NVR Authors
// //
// This program is free software: you can redistribute it and/or modify // This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by // it under the terms of the GNU General Public License as published by
@ -32,16 +32,39 @@ use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
use crate::db; use crate::db;
use failure::{Error, bail}; use failure::{Error, bail};
use log::trace; use log::trace;
use std::convert::TryFrom;
use std::ops::Range; use std::ops::Range;
pub use base::time::TIME_UNITS_PER_SEC; pub use base::time::TIME_UNITS_PER_SEC;
pub const DESIRED_RECORDING_DURATION: i64 = 60 * TIME_UNITS_PER_SEC; pub const DESIRED_RECORDING_WALL_DURATION: i64 = 60 * TIME_UNITS_PER_SEC;
pub const MAX_RECORDING_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC; pub const MAX_RECORDING_WALL_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC;
pub use base::time::Time; pub use base::time::Time;
pub use base::time::Duration; pub use base::time::Duration;
/// Converts from a wall time offset into a recording to a media time offset.
pub fn wall_to_media(wall_off_90k: i32, wall_duration_90k: i32, media_duration_90k: i32) -> i32 {
debug_assert!(wall_off_90k <= wall_duration_90k,
"wall_off_90k={} wall_duration_90k={} media_duration_90k={}",
wall_off_90k, wall_duration_90k, media_duration_90k);
if wall_duration_90k == 0 {
return 0;
}
// The intermediate values here may overflow i32, so use an i64 instead. The max wall
// time is recording::MAX_RECORDING_WALL_DURATION; the max media duration should be
// roughly the same (design limit of 500 ppm correction). The final result should fit
// within i32.
i32::try_from(i64::from(wall_off_90k) *
i64::from(media_duration_90k) /
i64::from(wall_duration_90k))
.map_err(|_| format!("wall_to_media overflow: {} * {} / {} > i32::max_value()",
wall_off_90k, media_duration_90k,
wall_duration_90k))
.unwrap()
}
/// An iterator through a sample index. /// An iterator through a sample index.
/// Initially invalid; call `next()` before each read. /// Initially invalid; call `next()` before each read.
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
@ -145,11 +168,12 @@ impl SampleIndexEncoder {
r: &mut db::RecordingToInsert) -> Result<(), Error> { r: &mut db::RecordingToInsert) -> Result<(), Error> {
let duration_delta = duration_90k - self.prev_duration_90k; let duration_delta = duration_90k - self.prev_duration_90k;
self.prev_duration_90k = duration_90k; self.prev_duration_90k = duration_90k;
let new_duration_90k = r.duration_90k + duration_90k; let new_duration_90k = r.wall_duration_90k + duration_90k;
if new_duration_90k as i64 > MAX_RECORDING_DURATION { if i64::from(new_duration_90k) > MAX_RECORDING_WALL_DURATION {
bail!("Duration {} exceeds maximum {}", new_duration_90k, MAX_RECORDING_DURATION); bail!("Duration {} exceeds maximum {}", new_duration_90k, MAX_RECORDING_WALL_DURATION);
} }
r.duration_90k += duration_90k; r.wall_duration_90k += duration_90k;
r.media_duration_90k += duration_90k;
r.sample_file_bytes += bytes; r.sample_file_bytes += bytes;
r.video_samples += 1; r.video_samples += 1;
let bytes_delta = bytes - if is_key { let bytes_delta = bytes - if is_key {
@ -169,19 +193,19 @@ impl SampleIndexEncoder {
} }
/// A segment represents a view of some or all of a single recording, starting from a key frame. /// A segment represents a view of some or all of a single recording, starting from a key frame.
/// Used by the `Mp4FileBuilder` class to splice together recordings into a single virtual .mp4. /// This struct is not specific to a container format; for `.mp4`s, it's wrapped in a
/// `mp4::Segment`. Other container/transport formats could be supported in a similar manner.
#[derive(Debug)] #[derive(Debug)]
pub struct Segment { pub struct Segment {
pub id: db::CompositeId, pub id: db::CompositeId,
pub open_id: u32, pub open_id: u32,
pub start: Time,
/// An iterator positioned at the beginning of the segment, or `None`. Most segments are /// An iterator positioned at the beginning of the segment, or `None`. Most segments are
/// positioned at the beginning of the recording, so this is an optional box to shrink a long /// positioned at the beginning of the recording, so this is an optional box to shrink a long
/// of segments. `None` is equivalent to `SampleIndexIterator::new()`. /// of segments. `None` is equivalent to `SampleIndexIterator::new()`.
begin: Option<Box<SampleIndexIterator>>, begin: Option<Box<SampleIndexIterator>>,
pub file_end: i32, pub file_end: i32,
pub desired_range_90k: Range<i32>,
pub frames: u16, pub frames: u16,
pub key_frames: u16, pub key_frames: u16,
video_sample_entry_id_and_trailing_zero: i32, video_sample_entry_id_and_trailing_zero: i32,
@ -190,22 +214,20 @@ pub struct Segment {
impl Segment { impl Segment {
/// Creates a segment. /// Creates a segment.
/// ///
/// `desired_range_90k` represents the desired range of the segment relative to the start of /// `desired_media_range_90k` represents the desired range of the segment relative to the start
/// the recording. The actual range will start at the first key frame at or before the /// of the recording, in media time units. The actual range will start at the first key frame
/// desired start time. (The caller is responsible for creating an edit list to skip the /// at or before the desired start time. (The caller is responsible for creating an edit list
/// undesired portion.) It will end at the first frame after the desired range (unless the /// to skip the undesired portion.) It will end at the first frame after the desired range
/// desired range extends beyond the recording). (Likewise, the caller is responsible for /// (unless the desired range extends beyond the recording). (Likewise, the caller is
/// trimming the final frame's duration if desired.) /// responsible for trimming the final frame's duration if desired.)
pub fn new(db: &db::LockedDatabase, pub fn new(db: &db::LockedDatabase,
recording: &db::ListRecordingsRow, recording: &db::ListRecordingsRow,
desired_range_90k: Range<i32>) -> Result<Segment, Error> { desired_media_range_90k: Range<i32>) -> Result<Segment, Error> {
let mut self_ = Segment { let mut self_ = Segment {
id: recording.id, id: recording.id,
open_id: recording.open_id, open_id: recording.open_id,
start: recording.start,
begin: None, begin: None,
file_end: recording.sample_file_bytes, file_end: recording.sample_file_bytes,
desired_range_90k: desired_range_90k,
frames: recording.video_samples as u16, frames: recording.video_samples as u16,
key_frames: recording.video_sync_samples as u16, key_frames: recording.video_sync_samples as u16,
video_sample_entry_id_and_trailing_zero: video_sample_entry_id_and_trailing_zero:
@ -213,23 +235,23 @@ impl Segment {
((((recording.flags & db::RecordingFlags::TrailingZero as i32) != 0) as i32) << 31), ((((recording.flags & db::RecordingFlags::TrailingZero as i32) != 0) as i32) << 31),
}; };
if self_.desired_range_90k.start > self_.desired_range_90k.end || if desired_media_range_90k.start > desired_media_range_90k.end ||
self_.desired_range_90k.end > recording.duration_90k { desired_media_range_90k.end > recording.media_duration_90k {
bail!("desired range [{}, {}) invalid for recording of length {}", bail!("desired media range [{}, {}) invalid for recording of length {}",
self_.desired_range_90k.start, self_.desired_range_90k.end, desired_media_range_90k.start, desired_media_range_90k.end,
recording.duration_90k); recording.media_duration_90k);
} }
if self_.desired_range_90k.start == 0 && if desired_media_range_90k.start == 0 &&
self_.desired_range_90k.end == recording.duration_90k { desired_media_range_90k.end == recording.media_duration_90k {
// Fast path. Existing entry is fine. // Fast path. Existing entry is fine.
trace!("recording::Segment::new fast path, recording={:#?}", recording); trace!("recording::Segment::new fast path, recording={:#?}", recording);
return Ok(self_) return Ok(self_)
} }
// Slow path. Need to iterate through the index. // Slow path. Need to iterate through the index.
trace!("recording::Segment::new slow path, desired_range_90k={:?}, recording={:#?}", trace!("recording::Segment::new slow path, desired_media_range_90k={:?}, recording={:#?}",
self_.desired_range_90k, recording); desired_media_range_90k, recording);
db.with_recording_playback(self_.id, &mut |playback| { db.with_recording_playback(self_.id, &mut |playback| {
let mut begin = Box::new(SampleIndexIterator::new()); let mut begin = Box::new(SampleIndexIterator::new());
let data = &(&playback).video_index; let data = &(&playback).video_index;
@ -245,15 +267,15 @@ impl Segment {
// Going until the end of the recording is special-cased because there can be a trailing // Going until the end of the recording is special-cased because there can be a trailing
// frame of zero duration. It's unclear exactly how this should be handled, but let's // frame of zero duration. It's unclear exactly how this should be handled, but let's
// include it for consistency with the fast path. It'd be bizarre to have it included or // include it for consistency with the fast path. It'd be bizarre to have it included or
// not based on desired_range_90k.start. // not based on desired_media_range_90k.start.
let end_90k = if self_.desired_range_90k.end == recording.duration_90k { let end_90k = if desired_media_range_90k.end == recording.media_duration_90k {
i32::max_value() i32::max_value()
} else { } else {
self_.desired_range_90k.end desired_media_range_90k.end
}; };
loop { loop {
if it.start_90k <= self_.desired_range_90k.start && it.is_key() { if it.start_90k <= desired_media_range_90k.start && it.is_key() {
// new start candidate. // new start candidate.
*begin = it; *begin = it;
self_.frames = 0; self_.frames = 0;
@ -289,7 +311,8 @@ impl Segment {
self.begin.as_ref().map(|b| b.pos as u64).unwrap_or(0) .. self.file_end as u64 self.begin.as_ref().map(|b| b.pos as u64).unwrap_or(0) .. self.file_end as u64
} }
/// Returns the actual start time as described in `new`. /// Returns the actual media start time. As described in `new`, this can be less than the
/// desired media start time if there is no key frame at the right position.
pub fn actual_start_90k(&self) -> i32 { self.begin.as_ref().map(|b| b.start_90k).unwrap_or(0) } pub fn actual_start_90k(&self) -> i32 { self.begin.as_ref().map(|b| b.start_90k).unwrap_or(0) }
/// Iterates through each frame in the segment. /// Iterates through each frame in the segment.
@ -363,7 +386,7 @@ mod tests {
e.add_sample(10, 12, false, &mut r).unwrap(); e.add_sample(10, 12, false, &mut r).unwrap();
e.add_sample(10, 1050, true, &mut r).unwrap(); e.add_sample(10, 1050, true, &mut r).unwrap();
assert_eq!(r.video_index, b"\x29\xd0\x0f\x02\x14\x08\x0a\x02\x05\x01\x64"); assert_eq!(r.video_index, b"\x29\xd0\x0f\x02\x14\x08\x0a\x02\x05\x01\x64");
assert_eq!(10 + 9 + 11 + 10 + 10, r.duration_90k); assert_eq!(10 + 9 + 11 + 10 + 10, r.media_duration_90k);
assert_eq!(5, r.video_samples); assert_eq!(5, r.video_samples);
assert_eq!(2, r.video_sync_samples); assert_eq!(2, r.video_sync_samples);
} }

View File

@ -149,8 +149,8 @@ create table stream (
-- deleted ones. This is used for assigning the next recording id. -- deleted ones. This is used for assigning the next recording id.
cum_recordings integer not null check (cum_recordings >= 0), cum_recordings integer not null check (cum_recordings >= 0),
-- The total duration of all recordings ever created on this stream. -- The total media duration of all recordings ever created on this stream.
cum_duration_90k integer not null check (cum_duration_90k >= 0), cum_media_duration_90k integer not null check (cum_media_duration_90k >= 0),
-- The total number of runs (recordings with run_offset = 0) ever created -- The total number of runs (recordings with run_offset = 0) ever created
-- on this stream. -- on this stream.
@ -207,14 +207,19 @@ create table recording (
-- The total duration of all previous recordings on this stream. This is -- The total duration of all previous recordings on this stream. This is
-- returned in API requests and may be helpful for timestamps in a HTML -- returned in API requests and may be helpful for timestamps in a HTML
-- MediaSourceExtensions SourceBuffer. -- MediaSourceExtensions SourceBuffer.
prev_duration_90k integer not null check (prev_duration_90k >= 0), prev_media_duration_90k integer not null
check (prev_media_duration_90k >= 0),
-- The total number of previous runs (rows in which run_offset = 0). -- The total number of previous runs (rows in which run_offset = 0).
prev_runs integer not null check (prev_runs >= 0), prev_runs integer not null check (prev_runs >= 0),
-- The duration of the recording, in 90 kHz units. -- The wall-time duration of the recording, in 90 kHz units. This is the
duration_90k integer not null -- "corrected" duration.
check (duration_90k >= 0 and duration_90k < 5*60*90000), wall_duration_90k integer not null
check (wall_duration_90k >= 0 and wall_duration_90k < 5*60*90000),
-- TODO: comment.
media_duration_delta_90k integer not null,
video_samples integer not null check (video_samples > 0), video_samples integer not null check (video_samples > 0),
video_sync_samples integer not null check (video_sync_samples > 0), video_sync_samples integer not null check (video_sync_samples > 0),
@ -232,7 +237,8 @@ create index recording_cover on recording (
-- that only database verification and actual viewing of recordings need -- that only database verification and actual viewing of recordings need
-- to consult the underlying row. -- to consult the underlying row.
open_id, open_id,
duration_90k, wall_duration_90k,
media_duration_delta_90k,
video_samples, video_samples,
video_sync_samples, video_sync_samples,
video_sample_entry_id, video_sample_entry_id,

View File

@ -165,7 +165,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
retain_bytes integer not null check (retain_bytes >= 0), retain_bytes integer not null check (retain_bytes >= 0),
flush_if_sec integer not null, flush_if_sec integer not null,
cum_recordings integer not null check (cum_recordings >= 0), cum_recordings integer not null check (cum_recordings >= 0),
cum_duration_90k integer not null check (cum_duration_90k >= 0), cum_media_duration_90k integer not null check (cum_media_duration_90k >= 0),
cum_runs integer not null check (cum_runs >= 0), cum_runs integer not null check (cum_runs >= 0),
unique (camera_id, type) unique (camera_id, type)
); );
@ -195,10 +195,11 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
flags integer not null, flags integer not null,
sample_file_bytes integer not null check (sample_file_bytes > 0), sample_file_bytes integer not null check (sample_file_bytes > 0),
start_time_90k integer not null check (start_time_90k > 0), start_time_90k integer not null check (start_time_90k > 0),
prev_duration_90k integer not null check (prev_duration_90k >= 0), prev_media_duration_90k integer not null check (prev_media_duration_90k >= 0),
prev_runs integer not null check (prev_runs >= 0), prev_runs integer not null check (prev_runs >= 0),
duration_90k integer not null wall_duration_90k integer not null
check (duration_90k >= 0 and duration_90k < 5*60*90000), check (wall_duration_90k >= 0 and wall_duration_90k < 5*60*90000),
media_duration_delta_90k integer not null,
video_samples integer not null check (video_samples > 0), video_samples integer not null check (video_samples > 0),
video_sync_samples integer not null check (video_sync_samples > 0), video_sync_samples integer not null check (video_sync_samples > 0),
video_sample_entry_id integer references video_sample_entry (id), video_sample_entry_id integer references video_sample_entry (id),
@ -230,13 +231,13 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
"#)?; "#)?;
let mut insert = tx.prepare(r#" let mut insert = tx.prepare(r#"
insert into recording (composite_id, open_id, stream_id, run_offset, flags, insert into recording (composite_id, open_id, stream_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_duration_90k, prev_runs, sample_file_bytes, start_time_90k, prev_media_duration_90k,
duration_90k, video_samples, video_sync_samples, prev_runs, wall_duration_90k, media_duration_delta_90k,
video_sample_entry_id) video_samples, video_sync_samples, video_sample_entry_id)
values (:composite_id, :open_id, :stream_id, :run_offset, :flags, values (:composite_id, :open_id, :stream_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :prev_duration_90k, :prev_runs, :sample_file_bytes, :start_time_90k, :prev_media_duration_90k,
:duration_90k, :video_samples, :video_sync_samples, :prev_runs, :wall_duration_90k, 0, :video_samples,
:video_sample_entry_id) :video_sync_samples, :video_sample_entry_id)
"#)?; "#)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
@ -247,7 +248,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
let flags: i32 = row.get(4)?; let flags: i32 = row.get(4)?;
let sample_file_bytes: i32 = row.get(5)?; let sample_file_bytes: i32 = row.get(5)?;
let start_time_90k: i64 = row.get(6)?; let start_time_90k: i64 = row.get(6)?;
let duration_90k: i32 = row.get(7)?; let wall_duration_90k: i32 = row.get(7)?;
let video_samples: i32 = row.get(8)?; let video_samples: i32 = row.get(8)?;
let video_sync_samples: i32 = row.get(9)?; let video_sync_samples: i32 = row.get(9)?;
let video_sample_entry_id: i32 = row.get(10)?; let video_sample_entry_id: i32 = row.get(10)?;
@ -264,14 +265,14 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
":flags": flags, ":flags": flags,
":sample_file_bytes": sample_file_bytes, ":sample_file_bytes": sample_file_bytes,
":start_time_90k": start_time_90k, ":start_time_90k": start_time_90k,
":prev_duration_90k": cum_duration_90k, ":prev_media_duration_90k": cum_duration_90k,
":prev_runs": cum_runs, ":prev_runs": cum_runs,
":duration_90k": duration_90k, ":wall_duration_90k": wall_duration_90k,
":video_samples": video_samples, ":video_samples": video_samples,
":video_sync_samples": video_sync_samples, ":video_sync_samples": video_sync_samples,
":video_sample_entry_id": video_sample_entry_id, ":video_sample_entry_id": video_sample_entry_id,
}).with_context(|_| format!("Unable to insert composite_id {}", composite_id))?; }).with_context(|_| format!("Unable to insert composite_id {}", composite_id))?;
cum_duration_90k += i64::from(duration_90k); cum_duration_90k += i64::from(wall_duration_90k);
cum_runs += if run_offset == 0 { 1 } else { 0 }; cum_runs += if run_offset == 0 { 1 } else { 0 };
} }
tx.execute_batch(r#" tx.execute_batch(r#"
@ -280,7 +281,8 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
stream_id, stream_id,
start_time_90k, start_time_90k,
open_id, open_id,
duration_90k, wall_duration_90k,
media_duration_delta_90k,
video_samples, video_samples,
video_sync_samples, video_sync_samples,
video_sample_entry_id, video_sample_entry_id,

View File

@ -781,12 +781,12 @@ impl<F: FileWriter> InnerWriter<F> {
pkt_local_time: recording::Time) -> Result<i32, Error> { pkt_local_time: recording::Time) -> Result<i32, Error> {
let mut l = self.r.lock(); let mut l = self.r.lock();
self.e.add_sample(duration_90k, bytes, is_key, &mut l)?; self.e.add_sample(duration_90k, bytes, is_key, &mut l)?;
let new = pkt_local_time - recording::Duration(i64::from(l.duration_90k)); let new = pkt_local_time - recording::Duration(i64::from(l.media_duration_90k));
self.local_start = cmp::min(self.local_start, new); self.local_start = cmp::min(self.local_start, new);
if l.run_offset == 0 { // start time isn't anchored to previous recording's end; adjust. if l.run_offset == 0 { // start time isn't anchored to previous recording's end; adjust.
l.start = self.local_start; l.start = self.local_start;
} }
Ok(l.duration_90k) Ok(l.media_duration_90k)
} }
fn close<C: Clocks + Clone>(mut self, channel: &SyncerChannel<F>, next_pts: Option<i64>, fn close<C: Clocks + Clone>(mut self, channel: &SyncerChannel<F>, next_pts: Option<i64>,
@ -813,7 +813,7 @@ impl<F: FileWriter> InnerWriter<F> {
local_time_delta = self.local_start - l.start; local_time_delta = self.local_start - l.start;
l.local_time_delta = local_time_delta; l.local_time_delta = local_time_delta;
l.sample_file_blake3 = Some(blake3.as_bytes().clone()); l.sample_file_blake3 = Some(blake3.as_bytes().clone());
total_duration = recording::Duration(i64::from(l.duration_90k)); total_duration = recording::Duration(i64::from(l.wall_duration_90k));
run_offset = l.run_offset; run_offset = l.run_offset;
end = l.start + total_duration; end = l.start + total_duration;
} }

View File

@ -13,14 +13,10 @@ In the future, this is likely to be expanded:
(at least for bootstrapping web authentication) (at least for bootstrapping web authentication)
* mobile interface * mobile interface
## Terminology
*signal:* a timeseries with an enum value. Signals might represent a camera's
motion detection or day/night status. They could also represent an external
input such as a burglar alarm system's zone status.
## Detailed design ## Detailed design
*Note:* italicized terms in this document are defined in the [glossary](glossary.md).
All requests for JSON data should be sent with the header All requests for JSON data should be sent with the header
`Accept: application/json` (exactly). `Accept: application/json` (exactly).
@ -112,7 +108,7 @@ The `application/json` response will have a dict as follows:
* `config`: (only included if request parameter `cameraConfigs` is * `config`: (only included if request parameter `cameraConfigs` is
true) a dictionary describing the configuration of the stream: true) a dictionary describing the configuration of the stream:
* `rtsp_url` * `rtsp_url`
* `signals`: a list of all signals known to the server. Each is a dictionary * `signals`: a list of all *signals* known to the server. Each is a dictionary
with the following properties: with the following properties:
* `id`: an integer identifier. * `id`: an integer identifier.
* `shortName`: a unique, human-readable description of the signal * `shortName`: a unique, human-readable description of the signal
@ -254,13 +250,12 @@ Example response:
### `GET /api/cameras/<uuid>/<stream>/recordings` ### `GET /api/cameras/<uuid>/<stream>/recordings`
Returns information about recordings. Returns information about *recordings*. Valid request parameters:
Valid request parameters:
* `startTime90k` and and `endTime90k` limit the data returned to only * `startTime90k` and and `endTime90k` limit the data returned to only
recordings which overlap with the given half-open interval. Either or both recordings with wall times overlapping with the given half-open interval.
may be absent; they default to the beginning and end of time, respectively. Either or both may be absent; they default to the beginning and end of time,
respectively.
* `split90k` causes long runs of recordings to be split at the next * `split90k` causes long runs of recordings to be split at the next
convenient boundary after the given duration. convenient boundary after the given duration.
* TODO(slamb): `continue` to support paging. (If data is too large, the * TODO(slamb): `continue` to support paging. (If data is too large, the
@ -291,12 +286,12 @@ arbitrary order. Each recording object has the following properties:
an increasing "open id". This field is the open id as of when these an increasing "open id". This field is the open id as of when these
recordings were written. This can be used to disambiguate ids referring to recordings were written. This can be used to disambiguate ids referring to
uncommitted recordings. uncommitted recordings.
* `startTime90k`: the start time of the given recording. Note this may be * `startTime90k`: the start time of the given recording, in the wall time
less than the requested `startTime90k` if this recording was ongoing scale. Note this may be less than the requested `startTime90k` if this
at the requested time. recording was ongoing at the requested time.
* `endTime90k`: the end time of the given recording. Note this may be * `endTime90k`: the end time of the given recording, in the wall time scale.
greater than the requested `endTime90k` if this recording was ongoing at Note this may be greater than the requested `endTime90k` if this recording
the requested time. was ongoing at the requested time.
* `videoSampleEntryId`: a reference to an entry in the `videoSampleEntries` * `videoSampleEntryId`: a reference to an entry in the `videoSampleEntries`
map.mp4` URL. map.mp4` URL.
* `videoSamples`: the number of samples (aka frames) of video in this * `videoSamples`: the number of samples (aka frames) of video in this
@ -362,18 +357,19 @@ Expected query parameters:
* `s` (one or more): a string of the form * `s` (one or more): a string of the form
`START_ID[-END_ID][@OPEN_ID][.[REL_START_TIME]-[REL_END_TIME]]`. This `START_ID[-END_ID][@OPEN_ID][.[REL_START_TIME]-[REL_END_TIME]]`. This
specifies recording segments to include. The produced `.mp4` file will be a specifies *segments* to include. The produced `.mp4` file will be a
concatenation of the segments indicated by all `s` parameters. The ids to concatenation of the segments indicated by all `s` parameters. The ids to
retrieve are as returned by the `/recordings` URL. The open id is optional retrieve are as returned by the `/recordings` URL. The *open id* (see
and will be enforced if present; it's recommended for disambiguation when [glossary](glossary.md)) is optional and will be enforced if present; it's
the requested range includes uncommitted recordings. The optional start and recommended for disambiguation when the requested range includes uncommitted
end times are in 90k units and relative to the start of the first specified recordings. The optional start and end times are in 90k units of wall time
id. These can be used to clip the returned segments. Note they can be used and relative to the start of the first specified id. These can be used to
to skip over some ids entirely; this is allowed so that the caller doesn't clip the returned segments. Note they can be used to skip over some ids
need to know the start time of each interior id. If there is no key frame entirely; this is allowed so that the caller doesn't need to know the start
at the desired relative start time, frames back to the last key frame will time of each interior id. If there is no key frame at the desired relative
be included in the returned data, and an edit list will instruct the start time, frames back to the last key frame will be included in the
viewer to skip to the desired start time. returned data, and an edit list will instruct the viewer to skip to the
desired start time.
* `ts` (optional): should be set to `true` to request a subtitle track be * `ts` (optional): should be set to `true` to request a subtitle track be
added with human-readable recording timestamps. added with human-readable recording timestamps.
@ -397,6 +393,11 @@ Example request URI to retrieve recording id 1, skipping its first 26
/api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/main/view.mp4?s=1.26 /api/cameras/fd20f7a2-9d69-4cb3-94ed-d51a20c3edfe/main/view.mp4?s=1.26
``` ```
Note carefully the distinction between *wall duration* and *media duration*.
It's normal for `/view.mp4` to return a media presentation with a length
slightly different from the *wall duration* of the backing recording or
portion that was requested.
TODO: error behavior on missing segment. It should be a 404, likely with an TODO: error behavior on missing segment. It should be a 404, likely with an
`application/json` body describing what portion if any (still) exists. `application/json` body describing what portion if any (still) exists.
@ -415,20 +416,20 @@ trim undesired leading portions.
This response will include the following additional headers: This response will include the following additional headers:
* `X-Prev-Duration`: the total duration (in 90 kHz units) of all recordings * `X-Prev-Media-Duration`: the total *media duration* (in 90 kHz units) of all
before the first requested recording in the `s` parameter. Browser-based *recordings* before the first requested recording in the `s` parameter.
callers may use this to place this at the correct position in the source Browser-based callers may use this to place this at the correct position in
buffer via `SourceBuffer.timestampOffset`. the source buffer via `SourceBuffer.timestampOffset`.
* `X-Runs`: the cumulative number of "runs" of recordings. If this recording * `X-Runs`: the cumulative number of "runs" of recordings. If this recording
starts a new run, it is included in the count. Browser-based callers may starts a new run, it is included in the count. Browser-based callers may
use this to force gaps in the source buffer timeline by adjusting the use this to force gaps in the source buffer timeline by adjusting the
timestamp offset if desired. timestamp offset if desired.
* `X-Leading-Duration`: if present, the total duration (in 90 kHz units) of * `X-Leading-Media-Duration`: if present, the total duration (in 90 kHz
additional leading video included before the caller's first requested units) of additional leading video included before the caller's first
timestamp. This happens when the caller's requested timestamp does not requested timestamp. This happens when the caller's requested timestamp
fall exactly on a key frame. Media segments can't include edit lists, so does not fall exactly on a key frame. Media segments can't include edit
unlike with the `/api/.../view.mp4` endpoint the caller is responsible for lists, so unlike with the `/api/.../view.mp4` endpoint the caller is
trimming this portion. Browser-based callers may use responsible for trimming this portion. Browser-based callers may use
`SourceBuffer.appendWindowStart`. `SourceBuffer.appendWindowStart`.
Expected query parameters: Expected query parameters:
@ -448,8 +449,12 @@ this fundamental reason Moonfire NVR makes no effort to make multiple-segment
* There's currently no way to generate an initialization segment for more * There's currently no way to generate an initialization segment for more
than one video sample entry, so a `.m4s` that uses more than one video than one video sample entry, so a `.m4s` that uses more than one video
sample entry can't be used. sample entry can't be used.
* The `X-Prev-Duration` and `X-Leading-Duration` headers only describe the * The `X-Prev-Media-Duration` and `X-Leading-Duration` headers only describe
first segment. the first segment.
Timestamp tracks (see the `ts` parameter to `.mp4` URIs) aren't supported
today. Most likely browser clients will implement timestamp subtitles via
WebVTT API calls anyway.
### `GET /api/cameras/<uuid>/<stream>/view.m4s.txt` ### `GET /api/cameras/<uuid>/<stream>/view.m4s.txt`

66
design/glossary.md Normal file
View File

@ -0,0 +1,66 @@
# Moonfire NVR Glossary
*media duration:* the total duration of the actual samples in a recording. These
durations are based on the camera's clock. Camera clocks can be quite
inaccurate, so this may not match the *wall duration*. See [time.md](time.md)
for details.
*open id:* a sequence number representing a time the database was opened in
write mode. One reason for using open ids is to disambiguate unflushed
recordings. Recordings' ids are assigned immediately, without any kind of
database transaction or reservation. Thus if a recording is never flushed
successfully, a following *open* may assign the same id to a new recording.
The open id disambiguates this and should be used whenever referring to a
recording that may be unflushed.
*recording:* the video from a (typically 1-minute) portion of an RTSP session.
RTSP sessions are divided into recordings as a detail of the
storage schema. See [schema.md](schema.md) for details. This concept is exposed
to the frontend code through the API; see [api.md](api.md). It's not exposed in
the user interface; videos are reconstructed from segments automatically.
*run:* all the recordings from a single RTSP session. These are all from the
same *stream* and could be reassembled into a single video with no gaps. If the
camera is lost and re-established, one run ends and another starts.
*sample:* data associated with a single timestamp within a recording, e.g. a video
frame or a set of
*sample file:* a file on disk that holds all the samples from a single recording.
*sample file directory:* a directory in the local filesystem that holds all
sample files for one or more streams. Typically there is one directory per disk.
*segment:* part or all of a recording. An API request might ask for a video of
recordings 14 starting 80 seconds in. If each recording is exactly 60 seconds,
this would correspond to three segments: recording 2 from 20 seconds in to
the end, all of recording 3, and all of recording 4. See [api.md](api.md).
*session:* a set of authenticated Moonfire NVR requests defined by the use of a
given credential (`s` cookie). Each user may have many credentials and thus
many sessions. Note that in Moonfire NVR's the term "session" by itself has
nothing to do with RTSP sessions; those more closely match a *run*.
*signal:* a timeseries with an enum value. Signals might represent a camera's
motion detection or day/night status. They could also represent an external
input such as a burglar alarm system's zone status. See [api.md](api.md).
Note signals are still under development and not yet exposed in Moonfire NVR's
UI. See [#28](https://github.com/scottlamb/moonfire-nvr/issues/28) for more
information.
*stream:* the "main" or "sub" stream from a given camera. Moonfire NVR expects
cameras support configuring and simultaneously viewing two streams encoded from
the same underlying video and audio source. The difference between the two is
that the "main" stream's video is typically higher quality in terms of frame
rate, resolution, and bitrate. Likewise it may have higher quality audio.
A stream corresponds to an ONVIF "media profile". Each stream has a distinct
RTSP URL that yields a difference RTSP "presentation".
*track:* one of the video, audio, or subtitles associated with a single
*stream*. This is consistent with the definition in ISO/IEC 14496-12 section
3.1.19. Note that RTSP RFC 2326 uses the word "stream" in the same way
Moonfire NVR uses the word "track".
*wall duration:* the total duration of a recording for the purpose of matching
with the NVR's wall clock time. This may not match the same recording's media
duration. See [time.md](time.md) for details.

View File

@ -1,6 +1,10 @@
# Moonfire NVR Time Handling # Moonfire NVR Time Handling
Status: **current** Status: **in flux**. The approach below works well for video, but audio frames'
durations can't be adjusted as easily. As part of implementing audio support,
the implementation is changing to instead decouple "wall time" and "media time",
as described in
[this comment](https://github.com/scottlamb/moonfire-nvr/issues/34#issuecomment-651548468).
> A man with a watch knows what time it is. A man with two watches is never > A man with a watch knows what time it is. A man with two watches is never
> sure. > sure.

View File

@ -258,5 +258,8 @@ Version 6 adds over version 5:
before it on that stream. This is useful for MediaSourceExtension-based before it on that stream. This is useful for MediaSourceExtension-based
web browser UIs when setting timestamps of video segments in the web browser UIs when setting timestamps of video segments in the
SourceBuffer. SourceBuffer.
* decoupled "wall time" and "media time" of recoridngs, as a step toward
implementing audio support without giving up clock frequency adjustment. See
[this comment](https://github.com/scottlamb/moonfire-nvr/issues/34#issuecomment-651548468).
On upgrading to this version, sessions will be revoked. On upgrading to this version, sessions will be revoked.

View File

@ -81,7 +81,7 @@ use bytes::{Buf, BytesMut};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use crate::body::{Chunk, BoxedError, wrap_error}; use crate::body::{Chunk, BoxedError, wrap_error};
use db::dir; use db::dir;
use db::recording::{self, TIME_UNITS_PER_SEC}; use db::recording::{self, TIME_UNITS_PER_SEC, wall_to_media};
use futures::Stream; use futures::Stream;
use futures::stream; use futures::stream;
use http; use http;
@ -338,8 +338,23 @@ struct SegmentLengths {
/// A wrapper around `recording::Segment` that keeps some additional `.mp4`-specific state. /// A wrapper around `recording::Segment` that keeps some additional `.mp4`-specific state.
struct Segment { struct Segment {
/// The underlying segment (a portion of a recording).
s: recording::Segment, s: recording::Segment,
/// The absolute timestamp of the recording's start time.
recording_start: recording::Time,
recording_wall_duration_90k: i32,
recording_media_duration_90k: i32,
/// The _desired_, _relative_, _wall_ time range covered by this recording.
/// * _desired_: as noted in `recording::Segment`, the _actual_ time range may be somewhat
/// more if there's no key frame at the desired start.
/// * _relative_: relative to `recording_start` rather than absolute timestamps.
/// * _wall_ time: the media time units are in terms of the cameras' clocks. Wall time units
/// differ slightly.
rel_wall_range_90k: Range<i32>,
/// If generated, the `.mp4`-format sample indexes, accessed only through `get_index`: /// If generated, the `.mp4`-format sample indexes, accessed only through `get_index`:
/// 1. stts: `slice[.. stsz_start]` /// 1. stts: `slice[.. stsz_start]`
/// 2. stsz: `slice[stsz_start .. stss_start]` /// 2. stsz: `slice[stsz_start .. stss_start]`
@ -367,10 +382,18 @@ impl fmt::Debug for Segment {
unsafe impl Sync for Segment {} unsafe impl Sync for Segment {}
impl Segment { impl Segment {
fn new(db: &db::LockedDatabase, row: &db::ListRecordingsRow, rel_range_90k: Range<i32>, fn new(db: &db::LockedDatabase, row: &db::ListRecordingsRow, rel_wall_range_90k: Range<i32>,
first_frame_num: u32) -> Result<Self, Error> { first_frame_num: u32) -> Result<Self, Error> {
Ok(Segment{ let rel_media_range_90k =
s: recording::Segment::new(db, row, rel_range_90k).err_kind(ErrorKind::Unknown)?, wall_to_media(rel_wall_range_90k.start, row.wall_duration_90k, row.media_duration_90k)
..
wall_to_media(rel_wall_range_90k.end, row.wall_duration_90k, row.media_duration_90k);
Ok(Segment {
s: recording::Segment::new(db, row, rel_media_range_90k).err_kind(ErrorKind::Unknown)?,
recording_start: row.start,
recording_wall_duration_90k: row.wall_duration_90k,
recording_media_duration_90k: row.media_duration_90k,
rel_wall_range_90k,
index: UnsafeCell::new(Err(())), index: UnsafeCell::new(Err(())),
index_once: Once::new(), index_once: Once::new(),
first_frame_num, first_frame_num,
@ -378,6 +401,11 @@ impl Segment {
}) })
} }
fn media(&self, rel_wall_90k: i32) -> i32 {
db::recording::wall_to_media(rel_wall_90k, self.recording_wall_duration_90k,
self.recording_media_duration_90k)
}
fn get_index<'a, F>(&'a self, db: &db::Database, f: F) -> Result<&'a [u8], Error> fn get_index<'a, F>(&'a self, db: &db::Database, f: F) -> Result<&'a [u8], Error>
where F: FnOnce(&[u8], SegmentLengths) -> &[u8] { where F: FnOnce(&[u8], SegmentLengths) -> &[u8] {
self.index_once.call_once(|| { self.index_once.call_once(|| {
@ -439,8 +467,8 @@ impl Segment {
// Doing this after the fact is more efficient than having a condition on every // Doing this after the fact is more efficient than having a condition on every
// iteration. // iteration.
if let Some((last_start, dur)) = last_start_and_dur { if let Some((last_start, dur)) = last_start_and_dur {
BigEndian::write_u32(&mut stts[8*frame-4 ..], let min = cmp::min(self.media(self.rel_wall_range_90k.end) - last_start, dur);
cmp::min(s.desired_range_90k.end - last_start, dur) as u32); BigEndian::write_u32(&mut stts[8*frame-4 ..], u32::try_from(min).unwrap());
} }
} }
@ -531,7 +559,7 @@ impl Segment {
// Doing this after the fact is more efficient than having a condition on every // Doing this after the fact is more efficient than having a condition on every
// iteration. // iteration.
BigEndian::write_u32(&mut v[p-8 .. p-4], BigEndian::write_u32(&mut v[p-8 .. p-4],
cmp::min(self.s.desired_range_90k.end - r.last_start, cmp::min(self.media(self.rel_wall_range_90k.end) - r.last_start,
r.last_dur) as u32); r.last_dur) as u32);
} }
@ -545,12 +573,14 @@ pub struct FileBuilder {
segments: Vec<Segment>, segments: Vec<Segment>,
video_sample_entries: SmallVec<[Arc<db::VideoSampleEntry>; 1]>, video_sample_entries: SmallVec<[Arc<db::VideoSampleEntry>; 1]>,
next_frame_num: u32, next_frame_num: u32,
duration_90k: u64,
/// The total media time, after applying edit lists (if applicable) to skip unwanted portions.
media_duration_90k: u64,
num_subtitle_samples: u32, num_subtitle_samples: u32,
subtitle_co64_pos: Option<usize>, subtitle_co64_pos: Option<usize>,
body: BodyState, body: BodyState,
type_: Type, type_: Type,
prev_duration_and_cur_runs: Option<(recording::Duration, i32)>, prev_media_duration_and_cur_runs: Option<(recording::Duration, i32)>,
include_timestamp_subtitle_track: bool, include_timestamp_subtitle_track: bool,
content_disposition: Option<HeaderValue>, content_disposition: Option<HeaderValue>,
} }
@ -726,7 +756,7 @@ impl FileBuilder {
segments: Vec::new(), segments: Vec::new(),
video_sample_entries: SmallVec::new(), video_sample_entries: SmallVec::new(),
next_frame_num: 1, next_frame_num: 1,
duration_90k: 0, media_duration_90k: 0,
num_subtitle_samples: 0, num_subtitle_samples: 0,
subtitle_co64_pos: None, subtitle_co64_pos: None,
body: BodyState{ body: BodyState{
@ -737,14 +767,21 @@ impl FileBuilder {
type_: type_, type_: type_,
include_timestamp_subtitle_track: false, include_timestamp_subtitle_track: false,
content_disposition: None, content_disposition: None,
prev_duration_and_cur_runs: None, prev_media_duration_and_cur_runs: None,
} }
} }
/// Sets if the generated `.mp4` should include a subtitle track with second-level timestamps. /// Sets if the generated `.mp4` should include a subtitle track with second-level timestamps.
/// Default is false. /// Default is false.
pub fn include_timestamp_subtitle_track(&mut self, b: bool) { pub fn include_timestamp_subtitle_track(&mut self, b: bool) -> Result<(), Error> {
if b && self.type_ == Type::MediaSegment {
// There's no support today for timestamp truns or for timestamps without edit lists.
// The latter would invalidate the code's assumption that desired timespan == actual
// timespan in the timestamp track.
bail_t!(InvalidArgument, "timestamp subtitles aren't supported on media segments");
}
self.include_timestamp_subtitle_track = b; self.include_timestamp_subtitle_track = b;
Ok(())
} }
/// Reserves space for the given number of additional segments. /// Reserves space for the given number of additional segments.
@ -757,8 +794,10 @@ impl FileBuilder {
} }
/// Appends a segment for (a subset of) the given recording. /// Appends a segment for (a subset of) the given recording.
/// `rel_wall_range_90k` is the wall time range within the recording.
/// Eg `0 .. row.wall_duration_90k` means the full recording.
pub fn append(&mut self, db: &db::LockedDatabase, row: db::ListRecordingsRow, pub fn append(&mut self, db: &db::LockedDatabase, row: db::ListRecordingsRow,
rel_range_90k: Range<i32>) -> Result<(), Error> { rel_wall_range_90k: Range<i32>) -> Result<(), Error> {
if let Some(prev) = self.segments.last() { if let Some(prev) = self.segments.last() {
if prev.s.have_trailing_zero() { if prev.s.have_trailing_zero() {
bail_t!(InvalidArgument, bail_t!(InvalidArgument,
@ -768,10 +807,10 @@ impl FileBuilder {
} else { } else {
// Include the current run in this count here, as we're not propagating the // Include the current run in this count here, as we're not propagating the
// run_offset_id further. // run_offset_id further.
self.prev_duration_and_cur_runs = row.prev_duration_and_runs self.prev_media_duration_and_cur_runs = row.prev_media_duration_and_runs
.map(|(d, r)| (d, r + if row.open_id == 0 { 1 } else { 0 })); .map(|(d, r)| (d, r + if row.open_id == 0 { 1 } else { 0 }));
} }
let s = Segment::new(db, &row, rel_range_90k, self.next_frame_num)?; let s = Segment::new(db, &row, rel_wall_range_90k, self.next_frame_num)?;
self.next_frame_num += s.s.frames as u32; self.next_frame_num += s.s.frames as u32;
self.segments.push(s); self.segments.push(s);
@ -809,20 +848,29 @@ impl FileBuilder {
Type::MediaSegment => { etag.update(b":media:"); }, Type::MediaSegment => { etag.update(b":media:"); },
}; };
for s in &mut self.segments { for s in &mut self.segments {
let d = &s.s.desired_range_90k; let wd = &s.rel_wall_range_90k;
self.duration_90k += (d.end - d.start) as u64; let md = s.media(wd.start) .. s.media(wd.end);
let end = s.s.start + recording::Duration(d.end as i64);
// Add the media time for this segment. If edit lists are supported (not media
// segments), this shouldn't include the portion they skip.
let start = match self.type_ {
Type::MediaSegment => s.s.actual_start_90k(),
_ => md.start,
};
self.media_duration_90k += u64::try_from(md.end - start).unwrap();
let wall =
s.recording_start + recording::Duration(i64::from(s.rel_wall_range_90k.start)) ..
s.recording_start + recording::Duration(i64::from(s.rel_wall_range_90k.end));
max_end = match max_end { max_end = match max_end {
None => Some(end), None => Some(wall.end),
Some(v) => Some(cmp::max(v, end)), Some(v) => Some(cmp::max(v, wall.end)),
}; };
if self.include_timestamp_subtitle_track { if self.include_timestamp_subtitle_track {
// Calculate the number of subtitle samples: starting to ending time (rounding up). // Calculate the number of subtitle samples: starting to ending time (rounding up).
let start_sec = (s.s.start + recording::Duration(d.start as i64)).unix_seconds(); let start_sec = wall.start.unix_seconds();
let end_sec = (s.s.start + let end_sec =
recording::Duration(d.end as i64 + TIME_UNITS_PER_SEC - 1)) (wall.end + recording::Duration(TIME_UNITS_PER_SEC - 1)).unix_seconds();
.unix_seconds();
s.num_subtitle_samples = (end_sec - start_sec) as u16; s.num_subtitle_samples = (end_sec - start_sec) as u16;
self.num_subtitle_samples += s.num_subtitle_samples as u32; self.num_subtitle_samples += s.num_subtitle_samples as u32;
} }
@ -831,10 +879,10 @@ impl FileBuilder {
let mut data = [0_u8; 28]; let mut data = [0_u8; 28];
let mut cursor = io::Cursor::new(&mut data[..]); let mut cursor = io::Cursor::new(&mut data[..]);
cursor.write_i64::<BigEndian>(s.s.id.0).err_kind(ErrorKind::Internal)?; cursor.write_i64::<BigEndian>(s.s.id.0).err_kind(ErrorKind::Internal)?;
cursor.write_i64::<BigEndian>(s.s.start.0).err_kind(ErrorKind::Internal)?; cursor.write_i64::<BigEndian>(s.recording_start.0).err_kind(ErrorKind::Internal)?;
cursor.write_u32::<BigEndian>(s.s.open_id).err_kind(ErrorKind::Internal)?; cursor.write_u32::<BigEndian>(s.s.open_id).err_kind(ErrorKind::Internal)?;
cursor.write_i32::<BigEndian>(d.start).err_kind(ErrorKind::Internal)?; cursor.write_i32::<BigEndian>(wd.start).err_kind(ErrorKind::Internal)?;
cursor.write_i32::<BigEndian>(d.end).err_kind(ErrorKind::Internal)?; cursor.write_i32::<BigEndian>(wd.end).err_kind(ErrorKind::Internal)?;
etag.update(cursor.into_inner()); etag.update(cursor.into_inner());
} }
let max_end = match max_end { let max_end = match max_end {
@ -906,7 +954,7 @@ impl FileBuilder {
etag: HeaderValue::try_from(format!("\"{}\"", etag.to_hex().as_str())) etag: HeaderValue::try_from(format!("\"{}\"", etag.to_hex().as_str()))
.expect("hex string should be valid UTF-8"), .expect("hex string should be valid UTF-8"),
content_disposition: self.content_disposition, content_disposition: self.content_disposition,
prev_duration_and_cur_runs: self.prev_duration_and_cur_runs, prev_media_duration_and_cur_runs: self.prev_media_duration_and_cur_runs,
type_: self.type_, type_: self.type_,
}))) })))
} }
@ -1033,7 +1081,7 @@ impl FileBuilder {
self.body.append_u64(creation_ts as u64); self.body.append_u64(creation_ts as u64);
self.body.append_u64(creation_ts as u64); self.body.append_u64(creation_ts as u64);
self.body.append_u32(TIME_UNITS_PER_SEC as u32); self.body.append_u32(TIME_UNITS_PER_SEC as u32);
let d = self.duration_90k; let d = self.media_duration_90k;
self.body.append_u64(d); self.body.append_u64(d);
self.body.append_static(StaticBytestring::MvhdJunk)?; self.body.append_static(StaticBytestring::MvhdJunk)?;
let next_track_id = if self.include_timestamp_subtitle_track { 3 } else { 2 }; let next_track_id = if self.include_timestamp_subtitle_track { 3 } else { 2 };
@ -1069,7 +1117,7 @@ impl FileBuilder {
self.body.append_u32(creation_ts); self.body.append_u32(creation_ts);
self.body.append_u32(1); // track_id self.body.append_u32(1); // track_id
self.body.append_u32(0); // reserved self.body.append_u32(0); // reserved
self.body.append_u32(self.duration_90k as u32); self.body.append_u32(self.media_duration_90k as u32);
self.body.append_static(StaticBytestring::TkhdJunk)?; self.body.append_static(StaticBytestring::TkhdJunk)?;
let (width, height) = self.video_sample_entries.iter().fold(None, |m, e| { let (width, height) = self.video_sample_entries.iter().fold(None, |m, e| {
@ -1092,7 +1140,7 @@ impl FileBuilder {
self.body.append_u64(creation_ts as u64); self.body.append_u64(creation_ts as u64);
self.body.append_u32(2); // track_id self.body.append_u32(2); // track_id
self.body.append_u32(0); // reserved self.body.append_u32(0); // reserved
self.body.append_u64(self.duration_90k); self.body.append_u64(self.media_duration_90k);
self.body.append_static(StaticBytestring::TkhdJunk)?; self.body.append_static(StaticBytestring::TkhdJunk)?;
self.body.append_u32(0); // width, unused. self.body.append_u32(0); // width, unused.
self.body.append_u32(0); // height, unused. self.body.append_u32(0); // height, unused.
@ -1114,8 +1162,9 @@ impl FileBuilder {
// key frame. This relationship should hold true: // key frame. This relationship should hold true:
// actual start <= desired start <= desired end // actual start <= desired start <= desired end
let actual_start_90k = s.s.actual_start_90k(); let actual_start_90k = s.s.actual_start_90k();
let skip = s.s.desired_range_90k.start - actual_start_90k; let md = s.media(s.rel_wall_range_90k.start) .. s.media(s.rel_wall_range_90k.end);
let keep = s.s.desired_range_90k.end - s.s.desired_range_90k.start; let skip = md.start - actual_start_90k;
let keep = md.end - md.start;
if skip < 0 || keep < 0 { if skip < 0 || keep < 0 {
bail_t!(Internal, "skip={} keep={} on segment {:#?}", skip, keep, s); bail_t!(Internal, "skip={} keep={} on segment {:#?}", skip, keep, s);
} }
@ -1186,7 +1235,7 @@ impl FileBuilder {
self.body.append_u64(creation_ts as u64); self.body.append_u64(creation_ts as u64);
self.body.append_u64(creation_ts as u64); self.body.append_u64(creation_ts as u64);
self.body.append_u32(TIME_UNITS_PER_SEC as u32); self.body.append_u32(TIME_UNITS_PER_SEC as u32);
self.body.append_u64(self.duration_90k); self.body.append_u64(self.media_duration_90k);
self.body.append_u32(0x55c40000); // language=und + pre_defined self.body.append_u32(0x55c40000); // language=und + pre_defined
}) })
} }
@ -1244,7 +1293,7 @@ impl FileBuilder {
}) })
} }
/// Appends a `TimeToSampleBox` (ISO/IEC 14496-12 section 8.6.1) suitable for video. /// Appends an `stts` / `TimeToSampleBox` (ISO/IEC 14496-12 section 8.6.1) for video.
fn append_video_stts(&mut self) -> Result<(), Error> { fn append_video_stts(&mut self) -> Result<(), Error> {
write_length!(self, { write_length!(self, {
self.body.buf.extend_from_slice(b"stts\x00\x00\x00\x00"); self.body.buf.extend_from_slice(b"stts\x00\x00\x00\x00");
@ -1264,7 +1313,7 @@ impl FileBuilder {
}) })
} }
/// Appends a `TimeToSampleBox` (ISO/IEC 14496-12 section 8.6.1) suitable for subtitles. /// Appends an `stts` / `TimeToSampleBox` (ISO/IEC 14496-12 section 8.6.1) for subtitles.
fn append_subtitle_stts(&mut self) -> Result<(), Error> { fn append_subtitle_stts(&mut self) -> Result<(), Error> {
write_length!(self, { write_length!(self, {
self.body.buf.extend_from_slice(b"stts\x00\x00\x00\x00"); self.body.buf.extend_from_slice(b"stts\x00\x00\x00\x00");
@ -1274,35 +1323,49 @@ impl FileBuilder {
let mut entry_count = 0; let mut entry_count = 0;
for s in &self.segments { for s in &self.segments {
let r = &s.s.desired_range_90k; // Note desired media range = actual media range for the subtitle track.
let start = s.s.start + recording::Duration(r.start as i64); // We still need to consider media time vs wall time.
let end = s.s.start + recording::Duration(r.end as i64); let wr = &s.rel_wall_range_90k;
let start = s.recording_start + recording::Duration(i64::from(wr.start));
let end = s.recording_start + recording::Duration(i64::from(wr.end));
let start_next_sec = recording::Time( let start_next_sec = recording::Time(
start.0 + TIME_UNITS_PER_SEC - (start.0 % TIME_UNITS_PER_SEC)); start.0 + TIME_UNITS_PER_SEC - (start.0 % TIME_UNITS_PER_SEC));
if end <= start_next_sec {
// Segment doesn't last past the next second.
entry_count += 1;
self.body.append_u32(1); // count
self.body.append_u32((end - start).0 as u32); // duration
} else {
// The first subtitle just lasts until the next second.
entry_count += 1;
self.body.append_u32(1); // count
self.body.append_u32((start_next_sec - start).0 as u32); // duration
// Then there are zero or more "interior" subtitles, one second each. let mr = s.media(wr.start) .. s.media(wr.end);
if end <= start_next_sec {
// Segment doesn't last past the next second. Just write one entry.
entry_count += 1;
self.body.append_u32(1);
self.body.append_u32(u32::try_from(mr.end - mr.start).unwrap());
} else {
// The first subtitle lasts until the next second.
let mut media_pos =
s.media(i32::try_from((start_next_sec - start).0).unwrap());
entry_count += 1;
self.body.append_u32(1);
self.body.append_u32(u32::try_from(media_pos - mr.start).unwrap());
// Then there are zero or more "interior" subtitles, one second each. That's
// one second converted from wall to media duration. wall_to_media rounds down,
// and these errors accumulate, so the final subtitle can be too early by as
// much as (MAX_RECORDING_WALL_DURATION/TIME_UNITS_PER_SEC) time units, or
// roughly 3 ms. We could avoid that by writing a separate entry for each
// second but it's not worth bloating the moov over 3 ms.
let end_prev_sec = recording::Time(end.0 - (end.0 % TIME_UNITS_PER_SEC)); let end_prev_sec = recording::Time(end.0 - (end.0 % TIME_UNITS_PER_SEC));
if start_next_sec < end_prev_sec { if start_next_sec < end_prev_sec {
entry_count += 1; let onesec_media_dur =
s.media(i32::try_from(TIME_UNITS_PER_SEC).unwrap());
let interior = (end_prev_sec - start_next_sec).0 / TIME_UNITS_PER_SEC; let interior = (end_prev_sec - start_next_sec).0 / TIME_UNITS_PER_SEC;
entry_count += 1;
self.body.append_u32(interior as u32); // count self.body.append_u32(interior as u32); // count
self.body.append_u32(TIME_UNITS_PER_SEC as u32); // duration self.body.append_u32(u32::try_from(onesec_media_dur).unwrap());
media_pos += onesec_media_dur * i32::try_from(interior).unwrap();
} }
// Then there's a final subtitle for the remaining fraction of a second. // Then there's a final subtitle for the remaining fraction of a second.
entry_count += 1; entry_count += 1;
self.body.append_u32(1); // count self.body.append_u32(1);
self.body.append_u32((end - end_prev_sec).0 as u32); // duration self.body.append_u32(u32::try_from(mr.end - media_pos).unwrap());
} }
} }
BigEndian::write_u32(&mut self.body.buf[entry_count_pos .. entry_count_pos + 4], BigEndian::write_u32(&mut self.body.buf[entry_count_pos .. entry_count_pos + 4],
@ -1456,7 +1519,7 @@ struct FileInner {
last_modified: SystemTime, last_modified: SystemTime,
etag: HeaderValue, etag: HeaderValue,
content_disposition: Option<HeaderValue>, content_disposition: Option<HeaderValue>,
prev_duration_and_cur_runs: Option<(recording::Duration, i32)>, prev_media_duration_and_cur_runs: Option<(recording::Duration, i32)>,
type_: Type, type_: Type,
} }
@ -1500,11 +1563,15 @@ impl FileInner {
fn get_subtitle_sample_data(&self, i: usize, r: Range<u64>, l: u64) -> Result<Chunk, Error> { fn get_subtitle_sample_data(&self, i: usize, r: Range<u64>, l: u64) -> Result<Chunk, Error> {
let s = &self.segments[i]; let s = &self.segments[i];
let d = &s.s.desired_range_90k; let d = &s.rel_wall_range_90k;
let start_sec = (s.s.start + recording::Duration(d.start as i64)).unix_seconds(); let start_sec =
let end_sec = (s.s.start + recording::Duration(d.end as i64 + TIME_UNITS_PER_SEC - 1)) (s.recording_start + recording::Duration(i64::from(d.start))).unix_seconds();
let end_sec =
(s.recording_start + recording::Duration(i64::from(d.end) + TIME_UNITS_PER_SEC - 1))
.unix_seconds(); .unix_seconds();
let mut v = Vec::with_capacity(l as usize); let l = usize::try_from(l).unwrap();
let mut v = Vec::with_capacity(l);
// TODO(slamb): is this right?!? might have an off-by-one here.
for ts in start_sec .. end_sec { for ts in start_sec .. end_sec {
v.write_u16::<BigEndian>(SUBTITLE_LENGTH as u16).expect("Vec write shouldn't fail"); v.write_u16::<BigEndian>(SUBTITLE_LENGTH as u16).expect("Vec write shouldn't fail");
let tm = time::at(time::Timespec{sec: ts, nsec: 0}); let tm = time::at(time::Timespec{sec: ts, nsec: 0});
@ -1512,6 +1579,7 @@ impl FileInner {
write!(v, "{}", tm.strftime(SUBTITLE_TEMPLATE).err_kind(ErrorKind::Internal)?) write!(v, "{}", tm.strftime(SUBTITLE_TEMPLATE).err_kind(ErrorKind::Internal)?)
.expect("Vec write shouldn't fail"); .expect("Vec write shouldn't fail");
} }
assert_eq!(l, v.len());
Ok(ARefss::new(v).map(|v| &v[r.start as usize .. r.end as usize]).into()) Ok(ARefss::new(v).map(|v| &v[r.start as usize .. r.end as usize]).into())
} }
} }
@ -1565,19 +1633,19 @@ impl http_serve::Entity for File {
hdrs.insert(http::header::CONTENT_DISPOSITION, cd.clone()); hdrs.insert(http::header::CONTENT_DISPOSITION, cd.clone());
} }
if self.0.type_ == Type::MediaSegment { if self.0.type_ == Type::MediaSegment {
if let Some((d, r)) = self.0.prev_duration_and_cur_runs { if let Some((d, r)) = self.0.prev_media_duration_and_cur_runs {
hdrs.insert( hdrs.insert(
"X-Prev-Duration", "X-Prev-Media-Duration",
HeaderValue::try_from(d.0.to_string()).expect("ints are valid headers")); HeaderValue::try_from(d.0.to_string()).expect("ints are valid headers"));
hdrs.insert( hdrs.insert(
"X-Runs", "X-Runs",
HeaderValue::try_from(r.to_string()).expect("ints are valid headers")); HeaderValue::try_from(r.to_string()).expect("ints are valid headers"));
} }
if let Some(s) = self.0.segments.first() { if let Some(s) = self.0.segments.first() {
let skip = s.s.desired_range_90k.start - s.s.actual_start_90k(); let skip = s.media(s.rel_wall_range_90k.start) - s.s.actual_start_90k();
if skip > 0 { if skip > 0 {
hdrs.insert( hdrs.insert(
"X-Leading-Duration", "X-Leading-Media-Duration",
HeaderValue::try_from(skip.to_string()).expect("ints are valid headers")); HeaderValue::try_from(skip.to_string()).expect("ints are valid headers"));
} }
} }
@ -1886,12 +1954,12 @@ mod tests {
pub fn create_mp4_from_db(tdb: &TestDb<RealClocks>, pub fn create_mp4_from_db(tdb: &TestDb<RealClocks>,
skip_90k: i32, shorten_90k: i32, include_subtitles: bool) -> File { skip_90k: i32, shorten_90k: i32, include_subtitles: bool) -> File {
let mut builder = FileBuilder::new(Type::Normal); let mut builder = FileBuilder::new(Type::Normal);
builder.include_timestamp_subtitle_track(include_subtitles); builder.include_timestamp_subtitle_track(include_subtitles).unwrap();
let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value()); let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value());
{ {
let db = tdb.db.lock(); let db = tdb.db.lock();
db.list_recordings_by_time(TEST_STREAM_ID, all_time, &mut |r| { db.list_recordings_by_time(TEST_STREAM_ID, all_time, &mut |r| {
let d = r.duration_90k; let d = r.media_duration_90k;
assert!(skip_90k + shorten_90k < d, "skip_90k={} shorten_90k={} r={:?}", assert!(skip_90k + shorten_90k < d, "skip_90k={} shorten_90k={} r={:?}",
skip_90k, shorten_90k, r); skip_90k, shorten_90k, r);
builder.append(&*db, r, skip_90k .. d - shorten_90k).unwrap(); builder.append(&*db, r, skip_90k .. d - shorten_90k).unwrap();
@ -1967,9 +2035,12 @@ mod tests {
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
let d_start = if desired_range_90k.start < duration_so_far { 0 } let d_start = if desired_range_90k.start < duration_so_far { 0 }
else { desired_range_90k.start - duration_so_far }; else { desired_range_90k.start - duration_so_far };
let d_end = if desired_range_90k.end > duration_so_far + row.duration_90k let d_end = if desired_range_90k.end > duration_so_far + row.media_duration_90k {
{ row.duration_90k } else { desired_range_90k.end - duration_so_far }; row.media_duration_90k
duration_so_far += row.duration_90k; } else {
desired_range_90k.end - duration_so_far
};
duration_so_far += row.media_duration_90k;
builder.append(&db.db.lock(), row, d_start .. d_end).unwrap(); builder.append(&db.db.lock(), row, d_start .. d_end).unwrap();
} }
builder.build(db.db.clone(), db.dirs_by_stream_id.clone()) builder.build(db.db.clone(), db.dirs_by_stream_id.clone())

View File

@ -1,5 +1,5 @@
// This file is part of Moonfire NVR, a security camera network video recorder. // This file is part of Moonfire NVR, a security camera network video recorder.
// Copyright (C) 2016 The Moonfire NVR Authors // Copyright (C) 2016-2020 The Moonfire NVR Authors
// //
// This program is free software: you can redistribute it and/or modify // This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by // it under the terms of the GNU General Public License as published by
@ -52,6 +52,7 @@ use nom::IResult;
use nom::bytes::complete::{take_while1, tag}; use nom::bytes::complete::{take_while1, tag};
use nom::combinator::{all_consuming, map, map_res, opt}; use nom::combinator::{all_consuming, map, map_res, opt};
use nom::sequence::{preceded, tuple}; use nom::sequence::{preceded, tuple};
use std::convert::TryFrom;
use std::cmp; use std::cmp;
use std::net::IpAddr; use std::net::IpAddr;
use std::ops::Range; use std::ops::Range;
@ -460,13 +461,13 @@ impl Service {
let mut hdrs = header::HeaderMap::new(); let mut hdrs = header::HeaderMap::new();
mp4.add_headers(&mut hdrs); mp4.add_headers(&mut hdrs);
let mime_type = hdrs.get(header::CONTENT_TYPE).unwrap(); let mime_type = hdrs.get(header::CONTENT_TYPE).unwrap();
let (prev_duration, prev_runs) = row.prev_duration_and_runs.unwrap(); let (prev_media_duration, prev_runs) = row.prev_media_duration_and_runs.unwrap();
let hdr = format!( let hdr = format!(
"Content-Type: {}\r\n\ "Content-Type: {}\r\n\
X-Recording-Start: {}\r\n\ X-Recording-Start: {}\r\n\
X-Recording-Id: {}.{}\r\n\ X-Recording-Id: {}.{}\r\n\
X-Time-Range: {}-{}\r\n\ X-Time-Range: {}-{}\r\n\
X-Prev-Duration: {}\r\n\ X-Prev-Media-Duration: {}\r\n\
X-Runs: {}\r\n\ X-Runs: {}\r\n\
X-Video-Sample-Entry-Id: {}\r\n\r\n", X-Video-Sample-Entry-Id: {}\r\n\r\n",
mime_type.to_str().unwrap(), mime_type.to_str().unwrap(),
@ -475,7 +476,7 @@ impl Service {
live.recording, live.recording,
live.off_90k.start, live.off_90k.start,
live.off_90k.end, live.off_90k.end,
prev_duration.0, prev_media_duration.0,
prev_runs + if row.run_offset == 0 { 1 } else { 0 }, prev_runs + if row.run_offset == 0 { 1 } else { 0 },
&row.video_sample_entry_id); &row.video_sample_entry_id);
let mut v = hdr.into_bytes(); let mut v = hdr.into_bytes();
@ -696,7 +697,7 @@ impl Service {
|()| plain_response(StatusCode::BAD_REQUEST, |()| plain_response(StatusCode::BAD_REQUEST,
format!("invalid s parameter: {}", value)))?; format!("invalid s parameter: {}", value)))?;
debug!("stream_view_mp4: appending s={:?}", s); debug!("stream_view_mp4: appending s={:?}", s);
let mut est_segments = (s.ids.end - s.ids.start) as usize; let mut est_segments = usize::try_from(s.ids.end - s.ids.start).unwrap();
if let Some(end) = s.end_time { if let Some(end) = s.end_time {
// There should be roughly ceil((end - start) / // There should be roughly ceil((end - start) /
// desired_recording_duration) recordings in the desired timespan if // desired_recording_duration) recordings in the desired timespan if
@ -704,13 +705,13 @@ impl Service {
// the requested timespan with the rotate offset and another because // the requested timespan with the rotate offset and another because
// rotation only happens at key frames. // rotation only happens at key frames.
let ceil_durations = (end - s.start_time + let ceil_durations = (end - s.start_time +
recording::DESIRED_RECORDING_DURATION - 1) / recording::DESIRED_RECORDING_WALL_DURATION - 1) /
recording::DESIRED_RECORDING_DURATION; recording::DESIRED_RECORDING_WALL_DURATION;
est_segments = cmp::min(est_segments, (ceil_durations + 2) as usize); est_segments = cmp::min(est_segments, (ceil_durations + 2) as usize);
} }
builder.reserve(est_segments); builder.reserve(est_segments);
let db = self.db.lock(); let db = self.db.lock();
let mut prev = None; let mut prev = None; // previous recording id
let mut cur_off = 0; let mut cur_off = 0;
db.list_recordings_by_id(stream_id, s.ids.clone(), &mut |r| { db.list_recordings_by_id(stream_id, s.ids.clone(), &mut |r| {
let recording_id = r.id.recording(); let recording_id = r.id.recording();
@ -734,19 +735,21 @@ impl Service {
prev = Some(recording_id); prev = Some(recording_id);
// Add a segment for the relevant part of the recording, if any. // Add a segment for the relevant part of the recording, if any.
// Note all calculations here are in wall times / wall durations.
let end_time = s.end_time.unwrap_or(i64::max_value()); let end_time = s.end_time.unwrap_or(i64::max_value());
let d = r.duration_90k as i64; let d = i64::from(r.wall_duration_90k);
if s.start_time <= cur_off + d && cur_off < end_time { if s.start_time <= cur_off + d && cur_off < end_time {
let start = cmp::max(0, s.start_time - cur_off); let start = cmp::max(0, s.start_time - cur_off);
let end = cmp::min(d, end_time - cur_off); let end = cmp::min(d, end_time - cur_off);
let times = start as i32 .. end as i32; let times = i32::try_from(start).unwrap() ..
i32::try_from(end).unwrap();
debug!("...appending recording {} with times {:?} \ debug!("...appending recording {} with times {:?} \
(out of dur {})", r.id, times, d); (out of dur {})", r.id, times, d);
if start_time_for_filename.is_none() { if start_time_for_filename.is_none() {
start_time_for_filename = start_time_for_filename =
Some(r.start + recording::Duration(start)); Some(r.start + recording::Duration(start));
} }
builder.append(&db, r, start as i32 .. end as i32)?; builder.append(&db, r, times)?;
} else { } else {
debug!("...skipping recording {} dur {}", r.id, d); debug!("...skipping recording {} dur {}", r.id, d);
} }
@ -775,7 +778,8 @@ impl Service {
} }
} }
}, },
"ts" => builder.include_timestamp_subtitle_track(value == "true"), "ts" => builder.include_timestamp_subtitle_track(value == "true")
.map_err(from_base_error)?,
_ => return Err(bad_req(format!("parameter {} not understood", key))), _ => return Err(bad_req(format!("parameter {} not understood", key))),
} }
}; };