start splitting wall and media duration for #34

This splits the schema and playback path. The recording path still
adjusts the frame durations and always says the wall and media durations
are the same. I expect to change that in a following commit. I wouldn't
be surprised if that shakes out some bugs in this portion.
This commit is contained in:
Scott Lamb
2020-08-04 21:44:01 -07:00
parent 476bd86b12
commit cb97ccdfeb
12 changed files with 437 additions and 241 deletions

View File

@@ -106,7 +106,7 @@ const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &'static str = r#"
const UPDATE_STREAM_COUNTERS_SQL: &'static str = r#"
update stream
set cum_recordings = :cum_recordings,
cum_duration_90k = :cum_duration_90k,
cum_media_duration_90k = :cum_media_duration_90k,
cum_runs = :cum_runs
where id = :stream_id
"#;
@@ -178,7 +178,8 @@ pub struct ListRecordingsRow {
pub id: CompositeId,
/// This is a recording::Duration, but a single recording's duration fits into an i32.
pub duration_90k: i32,
pub wall_duration_90k: i32,
pub media_duration_90k: i32,
pub video_samples: i32,
pub video_sync_samples: i32,
pub sample_file_bytes: i32,
@@ -189,7 +190,7 @@ pub struct ListRecordingsRow {
/// This is populated by `list_recordings_by_id` but not `list_recordings_by_time`.
/// (It's not included in the `recording_cover` index, so adding it to
/// `list_recordings_by_time` would be inefficient.)
pub prev_duration_and_runs: Option<(recording::Duration, i32)>,
pub prev_media_duration_and_runs: Option<(recording::Duration, i32)>,
}
/// A row used in `list_aggregated_recordings`.
@@ -213,7 +214,7 @@ impl ListAggregatedRecordingsRow { fn from(row: ListRecordingsRow) -> Self {
let uncommitted = (row.flags & RecordingFlags::Uncommitted as i32) != 0;
let growing = (row.flags & RecordingFlags::Growing as i32) != 0;
ListAggregatedRecordingsRow {
time: row.start .. recording::Time(row.start.0 + row.duration_90k as i64),
time: row.start .. recording::Time(row.start.0 + row.wall_duration_90k as i64),
ids: recording_id .. recording_id+1,
video_samples: row.video_samples as i64,
video_sync_samples: row.video_sync_samples as i64,
@@ -252,12 +253,13 @@ pub struct RecordingToInsert {
pub start: recording::Time,
/// Filled in by `add_recording`.
pub prev_duration: recording::Duration,
pub prev_media_duration: recording::Duration,
/// Filled in by `add_recording`.
pub prev_runs: i32,
pub duration_90k: i32, // a recording::Duration, but guaranteed to fit in i32.
pub wall_duration_90k: i32, // a recording::Duration, but guaranteed to fit in i32.
pub media_duration_90k: i32,
pub local_time_delta: recording::Duration,
pub video_samples: i32,
pub video_sync_samples: i32,
@@ -272,14 +274,15 @@ impl RecordingToInsert {
start: self.start,
video_sample_entry_id: self.video_sample_entry_id,
id,
duration_90k: self.duration_90k,
wall_duration_90k: self.wall_duration_90k,
media_duration_90k: self.media_duration_90k,
video_samples: self.video_samples,
video_sync_samples: self.video_sync_samples,
sample_file_bytes: self.sample_file_bytes,
run_offset: self.run_offset,
open_id,
flags: self.flags | RecordingFlags::Uncommitted as i32,
prev_duration_and_runs: Some((self.prev_duration, self.prev_runs)),
prev_media_duration_and_runs: Some((self.prev_media_duration, self.prev_runs)),
}
}
}
@@ -290,7 +293,7 @@ impl RecordingToInsert {
pub(crate) struct ListOldestRecordingsRow {
pub id: CompositeId,
pub start: recording::Time,
pub duration: i32,
pub wall_duration_90k: i32,
pub sample_file_bytes: i32,
}
@@ -488,8 +491,8 @@ pub struct Stream {
/// The `cum_recordings` currently committed to the database.
pub(crate) cum_recordings: i32,
/// The `cum_duration_90k` currently committed to the database.
cum_duration: recording::Duration,
/// The `cum_media_duration_90k` currently committed to the database.
cum_media_duration: recording::Duration,
/// The `cum_runs` currently committed to the database.
cum_runs: i32,
@@ -640,7 +643,7 @@ fn init_recordings(conn: &mut rusqlite::Connection, stream_id: i32, camera: &Cam
let mut stmt = conn.prepare(r#"
select
recording.start_time_90k,
recording.duration_90k,
recording.wall_duration_90k,
recording.sample_file_bytes
from
recording
@@ -782,7 +785,7 @@ impl StreamStateChanger {
let mut stmt = tx.prepare_cached(r#"
insert into stream (camera_id, sample_file_dir_id, type, rtsp_url, record,
retain_bytes, flush_if_sec, cum_recordings,
cum_duration_90k, cum_runs)
cum_media_duration_90k, cum_runs)
values (:camera_id, :sample_file_dir_id, :type, :rtsp_url, :record,
0, :flush_if_sec, 0,
0, 0)
@@ -834,7 +837,7 @@ impl StreamStateChanger {
days: BTreeMap::new(),
record: sc.record,
cum_recordings: 0,
cum_duration: recording::Duration(0),
cum_media_duration: recording::Duration(0),
cum_runs: 0,
uncommitted: VecDeque::new(),
synced_recordings: 0,
@@ -883,7 +886,7 @@ impl LockedDatabase {
/// A call to `add_recording` is also a promise that previous recordings (even if not yet
/// synced and committed) won't change.
///
/// This fills the `prev_duration` and `prev_runs` fields.
/// This fills the `prev_media_duration` and `prev_runs` fields.
pub(crate) fn add_recording(&mut self, stream_id: i32, mut r: RecordingToInsert)
-> Result<(CompositeId, Arc<Mutex<RecordingToInsert>>), Error> {
let stream = match self.streams_by_id.get_mut(&stream_id) {
@@ -895,11 +898,12 @@ impl LockedDatabase {
match stream.uncommitted.back() {
Some(s) => {
let l = s.lock();
r.prev_duration = l.prev_duration + recording::Duration(l.duration_90k.into());
r.prev_media_duration =
l.prev_media_duration + recording::Duration(l.wall_duration_90k.into());
r.prev_runs = l.prev_runs + if l.run_offset == 0 { 1 } else { 0 };
},
None => {
r.prev_duration = stream.cum_duration;
r.prev_media_duration = stream.cum_media_duration;
r.prev_runs = stream.cum_runs;
},
};
@@ -1006,7 +1010,7 @@ impl LockedDatabase {
let l = s.uncommitted[i].lock();
raw::insert_recording(
&tx, o, CompositeId::new(stream_id, s.cum_recordings + i as i32), &l)?;
new_duration += i64::from(l.duration_90k);
new_duration += i64::from(l.wall_duration_90k);
new_runs += if l.run_offset == 0 { 1 } else { 0 };
}
if s.synced_recordings > 0 {
@@ -1014,7 +1018,7 @@ impl LockedDatabase {
stmt.execute_named(named_params!{
":stream_id": stream_id,
":cum_recordings": s.cum_recordings + s.synced_recordings as i32,
":cum_duration_90k": s.cum_duration.0 + new_duration,
":cum_media_duration_90k": s.cum_media_duration.0 + new_duration,
":cum_runs": s.cum_runs + new_runs,
})?;
}
@@ -1096,7 +1100,7 @@ impl LockedDatabase {
for row in s.to_delete.drain(..) {
log.deleted.push(row.id);
dir.garbage_needs_unlink.insert(row.id);
let d = recording::Duration(row.duration as i64);
let d = recording::Duration(i64::from(row.wall_duration_90k));
s.duration -= d;
adjust_days(row.start .. row.start + d, -1, &mut s.days);
}
@@ -1111,10 +1115,11 @@ impl LockedDatabase {
log.added.push(CompositeId::new(stream_id, s.cum_recordings));
let l = u.lock();
s.cum_recordings += 1;
let dur = recording::Duration(l.duration_90k.into());
s.cum_duration += dur;
let wall_dur = recording::Duration(l.wall_duration_90k.into());
let media_dur = recording::Duration(l.media_duration_90k.into());
s.cum_media_duration += media_dur;
s.cum_runs += if l.run_offset == 0 { 1 } else { 0 };
let end = l.start + dur;
let end = l.start + wall_dur;
s.add_recording(l.start .. end, l.sample_file_bytes);
}
s.synced_recordings = 0;
@@ -1258,7 +1263,7 @@ impl LockedDatabase {
let row = {
let l = u.lock();
if l.video_samples > 0 {
let end = l.start + recording::Duration(l.duration_90k as i64);
let end = l.start + recording::Duration(l.wall_duration_90k as i64);
if l.start > desired_time.end || end < desired_time.start {
continue; // there's no overlap with the requested range.
}
@@ -1337,7 +1342,7 @@ impl LockedDatabase {
Entry::Occupied(mut e) => {
let a = e.get_mut();
let new_dur = a.time.end - a.time.start +
recording::Duration(row.duration_90k as i64);
recording::Duration(row.wall_duration_90k as i64);
let needs_flush =
a.ids.end != recording_id ||
row.video_sample_entry_id != a.video_sample_entry_id ||
@@ -1354,7 +1359,7 @@ impl LockedDatabase {
bail!("stream {} recording {} has open id {} but {} has {}",
stream_id, a.ids.end - 1, a.open_id, row.id, row.open_id);
}
a.time.end.0 += row.duration_90k as i64;
a.time.end.0 += row.wall_duration_90k as i64;
a.ids.end = recording_id + 1;
a.video_samples += row.video_samples as i64;
a.video_sync_samples += row.video_sync_samples as i64;
@@ -1562,7 +1567,7 @@ impl LockedDatabase {
retain_bytes,
flush_if_sec,
cum_recordings,
cum_duration_90k,
cum_media_duration_90k,
cum_runs,
record
from
@@ -1600,7 +1605,7 @@ impl LockedDatabase {
duration: recording::Duration(0),
days: BTreeMap::new(),
cum_recordings: row.get(7)?,
cum_duration: recording::Duration(row.get(8)?),
cum_media_duration: recording::Duration(row.get(8)?),
cum_runs: row.get(9)?,
record: row.get(10)?,
uncommitted: VecDeque::new(),
@@ -2209,7 +2214,7 @@ mod tests {
{
let db = db.lock();
let stream = db.streams_by_id().get(&stream_id).unwrap();
let dur = recording::Duration(r.duration_90k as i64);
let dur = recording::Duration(r.wall_duration_90k as i64);
assert_eq!(Some(r.start .. r.start + dur), stream.range);
assert_eq!(r.sample_file_bytes as i64, stream.sample_file_bytes);
assert_eq!(dur, stream.duration);
@@ -2227,7 +2232,7 @@ mod tests {
rows += 1;
recording_id = Some(row.id);
assert_eq!(r.start, row.start);
assert_eq!(r.duration_90k, row.duration_90k);
assert_eq!(r.wall_duration_90k, row.wall_duration_90k);
assert_eq!(r.video_samples, row.video_samples);
assert_eq!(r.video_sync_samples, row.video_sync_samples);
assert_eq!(r.sample_file_bytes, row.sample_file_bytes);
@@ -2243,7 +2248,7 @@ mod tests {
rows += 1;
assert_eq!(recording_id, Some(row.id));
assert_eq!(r.start, row.start);
assert_eq!(r.duration_90k, row.duration);
assert_eq!(r.wall_duration_90k, row.wall_duration_90k);
assert_eq!(r.sample_file_bytes, row.sample_file_bytes);
true
}).unwrap();
@@ -2442,9 +2447,10 @@ mod tests {
run_offset: 0,
flags: 0,
start,
prev_duration: recording::Duration(0),
prev_media_duration: recording::Duration(0),
prev_runs: 0,
duration_90k: TIME_UNITS_PER_SEC as i32,
wall_duration_90k: TIME_UNITS_PER_SEC.try_into().unwrap(),
media_duration_90k: TIME_UNITS_PER_SEC.try_into().unwrap(),
local_time_delta: recording::Duration(0),
video_samples: 1,
video_sync_samples: 1,

View File

@@ -1,5 +1,5 @@
// This file is part of Moonfire NVR, a security camera network video recorder.
// Copyright (C) 2018 The Moonfire NVR Authors
// Copyright (C) 2018-2020 The Moonfire NVR Authors
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -45,7 +45,8 @@ const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
recording.run_offset,
recording.flags,
recording.start_time_90k,
recording.duration_90k,
recording.wall_duration_90k,
recording.media_duration_delta_90k,
recording.sample_file_bytes,
recording.video_samples,
recording.video_sync_samples,
@@ -57,7 +58,7 @@ const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
stream_id = :stream_id and
recording.start_time_90k > :start_time_90k - 27000000 and
recording.start_time_90k < :end_time_90k and
recording.start_time_90k + recording.duration_90k > :start_time_90k
recording.start_time_90k + recording.wall_duration_90k > :start_time_90k
order by
recording.start_time_90k
"#;
@@ -68,13 +69,14 @@ const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
recording.run_offset,
recording.flags,
recording.start_time_90k,
recording.duration_90k,
recording.wall_duration_90k,
recording.media_duration_delta_90k,
recording.sample_file_bytes,
recording.video_samples,
recording.video_sync_samples,
recording.video_sample_entry_id,
recording.open_id,
recording.prev_duration_90k,
recording.prev_media_duration_90k,
recording.prev_runs
from
recording
@@ -98,7 +100,7 @@ const STREAM_MIN_START_SQL: &'static str = r#"
const STREAM_MAX_START_SQL: &'static str = r#"
select
start_time_90k,
duration_90k
wall_duration_90k
from
recording
where
@@ -110,7 +112,7 @@ const LIST_OLDEST_RECORDINGS_SQL: &'static str = r#"
select
composite_id,
start_time_90k,
duration_90k,
wall_duration_90k,
sample_file_bytes
from
recording
@@ -151,20 +153,23 @@ fn list_recordings_inner(mut rows: rusqlite::Rows, include_prev: bool,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>)
-> Result<(), Error> {
while let Some(row) = rows.next()? {
let wall_duration_90k = row.get(4)?;
let media_duration_delta_90k: i32 = row.get(5)?;
f(db::ListRecordingsRow {
id: CompositeId(row.get(0)?),
run_offset: row.get(1)?,
flags: row.get(2)?,
start: recording::Time(row.get(3)?),
duration_90k: row.get(4)?,
sample_file_bytes: row.get(5)?,
video_samples: row.get(6)?,
video_sync_samples: row.get(7)?,
video_sample_entry_id: row.get(8)?,
open_id: row.get(9)?,
prev_duration_and_runs: match include_prev {
wall_duration_90k,
media_duration_90k: wall_duration_90k + media_duration_delta_90k,
sample_file_bytes: row.get(6)?,
video_samples: row.get(7)?,
video_sync_samples: row.get(8)?,
video_sample_entry_id: row.get(9)?,
open_id: row.get(10)?,
prev_media_duration_and_runs: match include_prev {
false => None,
true => Some((recording::Duration(row.get(10)?), row.get(11)?)),
true => Some((recording::Duration(row.get(11)?), row.get(12)?)),
},
})?;
}
@@ -183,13 +188,13 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
r: &db::RecordingToInsert) -> Result<(), Error> {
let mut stmt = tx.prepare_cached(r#"
insert into recording (composite_id, stream_id, open_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_duration_90k,
prev_runs, duration_90k, video_samples, video_sync_samples,
video_sample_entry_id)
sample_file_bytes, start_time_90k, prev_media_duration_90k,
prev_runs, wall_duration_90k, media_duration_delta_90k,
video_samples, video_sync_samples, video_sample_entry_id)
values (:composite_id, :stream_id, :open_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :prev_duration_90k,
:prev_runs, :duration_90k, :video_samples, :video_sync_samples,
:video_sample_entry_id)
:sample_file_bytes, :start_time_90k, :prev_media_duration_90k,
:prev_runs, :wall_duration_90k, :media_duration_delta_90k,
:video_samples, :video_sync_samples, :video_sample_entry_id)
"#).with_context(|e| format!("can't prepare recording insert: {}", e))?;
stmt.execute_named(named_params!{
":composite_id": id.0,
@@ -199,8 +204,9 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
":flags": r.flags,
":sample_file_bytes": r.sample_file_bytes,
":start_time_90k": r.start.0,
":duration_90k": r.duration_90k,
":prev_duration_90k": r.prev_duration.0,
":wall_duration_90k": r.wall_duration_90k,
":media_duration_delta_90k": r.media_duration_90k - r.wall_duration_90k,
":prev_media_duration_90k": r.prev_media_duration.0,
":prev_runs": r.prev_runs,
":video_samples": r.video_samples,
":video_sync_samples": r.video_sync_samples,
@@ -351,7 +357,7 @@ pub(crate) fn get_range(conn: &rusqlite::Connection, stream_id: i32)
None => row_start .. row_end,
Some(Range{start: s, end: e}) => s .. ::std::cmp::max(e, row_end),
};
if row_start.0 <= maxes.start.0 - recording::MAX_RECORDING_DURATION {
if row_start.0 <= maxes.start.0 - recording::MAX_RECORDING_WALL_DURATION {
break;
}
maxes_opt = Some(maxes);
@@ -390,7 +396,7 @@ pub(crate) fn list_oldest_recordings(conn: &rusqlite::Connection, start: Composi
let should_continue = f(db::ListOldestRecordingsRow {
id: CompositeId(row.get(0)?),
start: recording::Time(row.get(1)?),
duration: row.get(2)?,
wall_duration_90k: row.get(2)?,
sample_file_bytes: row.get(3)?,
});
if !should_continue {

View File

@@ -1,5 +1,5 @@
// This file is part of Moonfire NVR, a security camera network video recorder.
// Copyright (C) 2016 The Moonfire NVR Authors
// Copyright (C) 2016-2020 The Moonfire NVR Authors
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
@@ -32,16 +32,39 @@ use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
use crate::db;
use failure::{Error, bail};
use log::trace;
use std::convert::TryFrom;
use std::ops::Range;
pub use base::time::TIME_UNITS_PER_SEC;
pub const DESIRED_RECORDING_DURATION: i64 = 60 * TIME_UNITS_PER_SEC;
pub const MAX_RECORDING_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC;
pub const DESIRED_RECORDING_WALL_DURATION: i64 = 60 * TIME_UNITS_PER_SEC;
pub const MAX_RECORDING_WALL_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC;
pub use base::time::Time;
pub use base::time::Duration;
/// Converts from a wall time offset into a recording to a media time offset.
pub fn wall_to_media(wall_off_90k: i32, wall_duration_90k: i32, media_duration_90k: i32) -> i32 {
debug_assert!(wall_off_90k <= wall_duration_90k,
"wall_off_90k={} wall_duration_90k={} media_duration_90k={}",
wall_off_90k, wall_duration_90k, media_duration_90k);
if wall_duration_90k == 0 {
return 0;
}
// The intermediate values here may overflow i32, so use an i64 instead. The max wall
// time is recording::MAX_RECORDING_WALL_DURATION; the max media duration should be
// roughly the same (design limit of 500 ppm correction). The final result should fit
// within i32.
i32::try_from(i64::from(wall_off_90k) *
i64::from(media_duration_90k) /
i64::from(wall_duration_90k))
.map_err(|_| format!("wall_to_media overflow: {} * {} / {} > i32::max_value()",
wall_off_90k, media_duration_90k,
wall_duration_90k))
.unwrap()
}
/// An iterator through a sample index.
/// Initially invalid; call `next()` before each read.
#[derive(Clone, Copy, Debug)]
@@ -145,11 +168,12 @@ impl SampleIndexEncoder {
r: &mut db::RecordingToInsert) -> Result<(), Error> {
let duration_delta = duration_90k - self.prev_duration_90k;
self.prev_duration_90k = duration_90k;
let new_duration_90k = r.duration_90k + duration_90k;
if new_duration_90k as i64 > MAX_RECORDING_DURATION {
bail!("Duration {} exceeds maximum {}", new_duration_90k, MAX_RECORDING_DURATION);
let new_duration_90k = r.wall_duration_90k + duration_90k;
if i64::from(new_duration_90k) > MAX_RECORDING_WALL_DURATION {
bail!("Duration {} exceeds maximum {}", new_duration_90k, MAX_RECORDING_WALL_DURATION);
}
r.duration_90k += duration_90k;
r.wall_duration_90k += duration_90k;
r.media_duration_90k += duration_90k;
r.sample_file_bytes += bytes;
r.video_samples += 1;
let bytes_delta = bytes - if is_key {
@@ -169,19 +193,19 @@ impl SampleIndexEncoder {
}
/// A segment represents a view of some or all of a single recording, starting from a key frame.
/// Used by the `Mp4FileBuilder` class to splice together recordings into a single virtual .mp4.
/// This struct is not specific to a container format; for `.mp4`s, it's wrapped in a
/// `mp4::Segment`. Other container/transport formats could be supported in a similar manner.
#[derive(Debug)]
pub struct Segment {
pub id: db::CompositeId,
pub open_id: u32,
pub start: Time,
/// An iterator positioned at the beginning of the segment, or `None`. Most segments are
/// positioned at the beginning of the recording, so this is an optional box to shrink a long
/// of segments. `None` is equivalent to `SampleIndexIterator::new()`.
begin: Option<Box<SampleIndexIterator>>,
pub file_end: i32,
pub desired_range_90k: Range<i32>,
pub frames: u16,
pub key_frames: u16,
video_sample_entry_id_and_trailing_zero: i32,
@@ -190,22 +214,20 @@ pub struct Segment {
impl Segment {
/// Creates a segment.
///
/// `desired_range_90k` represents the desired range of the segment relative to the start of
/// the recording. The actual range will start at the first key frame at or before the
/// desired start time. (The caller is responsible for creating an edit list to skip the
/// undesired portion.) It will end at the first frame after the desired range (unless the
/// desired range extends beyond the recording). (Likewise, the caller is responsible for
/// trimming the final frame's duration if desired.)
/// `desired_media_range_90k` represents the desired range of the segment relative to the start
/// of the recording, in media time units. The actual range will start at the first key frame
/// at or before the desired start time. (The caller is responsible for creating an edit list
/// to skip the undesired portion.) It will end at the first frame after the desired range
/// (unless the desired range extends beyond the recording). (Likewise, the caller is
/// responsible for trimming the final frame's duration if desired.)
pub fn new(db: &db::LockedDatabase,
recording: &db::ListRecordingsRow,
desired_range_90k: Range<i32>) -> Result<Segment, Error> {
desired_media_range_90k: Range<i32>) -> Result<Segment, Error> {
let mut self_ = Segment {
id: recording.id,
open_id: recording.open_id,
start: recording.start,
begin: None,
file_end: recording.sample_file_bytes,
desired_range_90k: desired_range_90k,
frames: recording.video_samples as u16,
key_frames: recording.video_sync_samples as u16,
video_sample_entry_id_and_trailing_zero:
@@ -213,23 +235,23 @@ impl Segment {
((((recording.flags & db::RecordingFlags::TrailingZero as i32) != 0) as i32) << 31),
};
if self_.desired_range_90k.start > self_.desired_range_90k.end ||
self_.desired_range_90k.end > recording.duration_90k {
bail!("desired range [{}, {}) invalid for recording of length {}",
self_.desired_range_90k.start, self_.desired_range_90k.end,
recording.duration_90k);
if desired_media_range_90k.start > desired_media_range_90k.end ||
desired_media_range_90k.end > recording.media_duration_90k {
bail!("desired media range [{}, {}) invalid for recording of length {}",
desired_media_range_90k.start, desired_media_range_90k.end,
recording.media_duration_90k);
}
if self_.desired_range_90k.start == 0 &&
self_.desired_range_90k.end == recording.duration_90k {
if desired_media_range_90k.start == 0 &&
desired_media_range_90k.end == recording.media_duration_90k {
// Fast path. Existing entry is fine.
trace!("recording::Segment::new fast path, recording={:#?}", recording);
return Ok(self_)
}
// Slow path. Need to iterate through the index.
trace!("recording::Segment::new slow path, desired_range_90k={:?}, recording={:#?}",
self_.desired_range_90k, recording);
trace!("recording::Segment::new slow path, desired_media_range_90k={:?}, recording={:#?}",
desired_media_range_90k, recording);
db.with_recording_playback(self_.id, &mut |playback| {
let mut begin = Box::new(SampleIndexIterator::new());
let data = &(&playback).video_index;
@@ -245,15 +267,15 @@ impl Segment {
// Going until the end of the recording is special-cased because there can be a trailing
// frame of zero duration. It's unclear exactly how this should be handled, but let's
// include it for consistency with the fast path. It'd be bizarre to have it included or
// not based on desired_range_90k.start.
let end_90k = if self_.desired_range_90k.end == recording.duration_90k {
// not based on desired_media_range_90k.start.
let end_90k = if desired_media_range_90k.end == recording.media_duration_90k {
i32::max_value()
} else {
self_.desired_range_90k.end
desired_media_range_90k.end
};
loop {
if it.start_90k <= self_.desired_range_90k.start && it.is_key() {
if it.start_90k <= desired_media_range_90k.start && it.is_key() {
// new start candidate.
*begin = it;
self_.frames = 0;
@@ -289,7 +311,8 @@ impl Segment {
self.begin.as_ref().map(|b| b.pos as u64).unwrap_or(0) .. self.file_end as u64
}
/// Returns the actual start time as described in `new`.
/// Returns the actual media start time. As described in `new`, this can be less than the
/// desired media start time if there is no key frame at the right position.
pub fn actual_start_90k(&self) -> i32 { self.begin.as_ref().map(|b| b.start_90k).unwrap_or(0) }
/// Iterates through each frame in the segment.
@@ -363,7 +386,7 @@ mod tests {
e.add_sample(10, 12, false, &mut r).unwrap();
e.add_sample(10, 1050, true, &mut r).unwrap();
assert_eq!(r.video_index, b"\x29\xd0\x0f\x02\x14\x08\x0a\x02\x05\x01\x64");
assert_eq!(10 + 9 + 11 + 10 + 10, r.duration_90k);
assert_eq!(10 + 9 + 11 + 10 + 10, r.media_duration_90k);
assert_eq!(5, r.video_samples);
assert_eq!(2, r.video_sync_samples);
}

View File

@@ -149,8 +149,8 @@ create table stream (
-- deleted ones. This is used for assigning the next recording id.
cum_recordings integer not null check (cum_recordings >= 0),
-- The total duration of all recordings ever created on this stream.
cum_duration_90k integer not null check (cum_duration_90k >= 0),
-- The total media duration of all recordings ever created on this stream.
cum_media_duration_90k integer not null check (cum_media_duration_90k >= 0),
-- The total number of runs (recordings with run_offset = 0) ever created
-- on this stream.
@@ -207,14 +207,19 @@ create table recording (
-- The total duration of all previous recordings on this stream. This is
-- returned in API requests and may be helpful for timestamps in a HTML
-- MediaSourceExtensions SourceBuffer.
prev_duration_90k integer not null check (prev_duration_90k >= 0),
prev_media_duration_90k integer not null
check (prev_media_duration_90k >= 0),
-- The total number of previous runs (rows in which run_offset = 0).
prev_runs integer not null check (prev_runs >= 0),
-- The duration of the recording, in 90 kHz units.
duration_90k integer not null
check (duration_90k >= 0 and duration_90k < 5*60*90000),
-- The wall-time duration of the recording, in 90 kHz units. This is the
-- "corrected" duration.
wall_duration_90k integer not null
check (wall_duration_90k >= 0 and wall_duration_90k < 5*60*90000),
-- TODO: comment.
media_duration_delta_90k integer not null,
video_samples integer not null check (video_samples > 0),
video_sync_samples integer not null check (video_sync_samples > 0),
@@ -232,7 +237,8 @@ create index recording_cover on recording (
-- that only database verification and actual viewing of recordings need
-- to consult the underlying row.
open_id,
duration_90k,
wall_duration_90k,
media_duration_delta_90k,
video_samples,
video_sync_samples,
video_sample_entry_id,

View File

@@ -165,7 +165,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
retain_bytes integer not null check (retain_bytes >= 0),
flush_if_sec integer not null,
cum_recordings integer not null check (cum_recordings >= 0),
cum_duration_90k integer not null check (cum_duration_90k >= 0),
cum_media_duration_90k integer not null check (cum_media_duration_90k >= 0),
cum_runs integer not null check (cum_runs >= 0),
unique (camera_id, type)
);
@@ -195,10 +195,11 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
flags integer not null,
sample_file_bytes integer not null check (sample_file_bytes > 0),
start_time_90k integer not null check (start_time_90k > 0),
prev_duration_90k integer not null check (prev_duration_90k >= 0),
prev_media_duration_90k integer not null check (prev_media_duration_90k >= 0),
prev_runs integer not null check (prev_runs >= 0),
duration_90k integer not null
check (duration_90k >= 0 and duration_90k < 5*60*90000),
wall_duration_90k integer not null
check (wall_duration_90k >= 0 and wall_duration_90k < 5*60*90000),
media_duration_delta_90k integer not null,
video_samples integer not null check (video_samples > 0),
video_sync_samples integer not null check (video_sync_samples > 0),
video_sample_entry_id integer references video_sample_entry (id),
@@ -230,13 +231,13 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
"#)?;
let mut insert = tx.prepare(r#"
insert into recording (composite_id, open_id, stream_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_duration_90k, prev_runs,
duration_90k, video_samples, video_sync_samples,
video_sample_entry_id)
sample_file_bytes, start_time_90k, prev_media_duration_90k,
prev_runs, wall_duration_90k, media_duration_delta_90k,
video_samples, video_sync_samples, video_sample_entry_id)
values (:composite_id, :open_id, :stream_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :prev_duration_90k, :prev_runs,
:duration_90k, :video_samples, :video_sync_samples,
:video_sample_entry_id)
:sample_file_bytes, :start_time_90k, :prev_media_duration_90k,
:prev_runs, :wall_duration_90k, 0, :video_samples,
:video_sync_samples, :video_sample_entry_id)
"#)?;
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
@@ -247,7 +248,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
let flags: i32 = row.get(4)?;
let sample_file_bytes: i32 = row.get(5)?;
let start_time_90k: i64 = row.get(6)?;
let duration_90k: i32 = row.get(7)?;
let wall_duration_90k: i32 = row.get(7)?;
let video_samples: i32 = row.get(8)?;
let video_sync_samples: i32 = row.get(9)?;
let video_sample_entry_id: i32 = row.get(10)?;
@@ -264,14 +265,14 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
":flags": flags,
":sample_file_bytes": sample_file_bytes,
":start_time_90k": start_time_90k,
":prev_duration_90k": cum_duration_90k,
":prev_media_duration_90k": cum_duration_90k,
":prev_runs": cum_runs,
":duration_90k": duration_90k,
":wall_duration_90k": wall_duration_90k,
":video_samples": video_samples,
":video_sync_samples": video_sync_samples,
":video_sample_entry_id": video_sample_entry_id,
}).with_context(|_| format!("Unable to insert composite_id {}", composite_id))?;
cum_duration_90k += i64::from(duration_90k);
cum_duration_90k += i64::from(wall_duration_90k);
cum_runs += if run_offset == 0 { 1 } else { 0 };
}
tx.execute_batch(r#"
@@ -280,7 +281,8 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
stream_id,
start_time_90k,
open_id,
duration_90k,
wall_duration_90k,
media_duration_delta_90k,
video_samples,
video_sync_samples,
video_sample_entry_id,

View File

@@ -781,12 +781,12 @@ impl<F: FileWriter> InnerWriter<F> {
pkt_local_time: recording::Time) -> Result<i32, Error> {
let mut l = self.r.lock();
self.e.add_sample(duration_90k, bytes, is_key, &mut l)?;
let new = pkt_local_time - recording::Duration(i64::from(l.duration_90k));
let new = pkt_local_time - recording::Duration(i64::from(l.media_duration_90k));
self.local_start = cmp::min(self.local_start, new);
if l.run_offset == 0 { // start time isn't anchored to previous recording's end; adjust.
l.start = self.local_start;
}
Ok(l.duration_90k)
Ok(l.media_duration_90k)
}
fn close<C: Clocks + Clone>(mut self, channel: &SyncerChannel<F>, next_pts: Option<i64>,
@@ -813,7 +813,7 @@ impl<F: FileWriter> InnerWriter<F> {
local_time_delta = self.local_start - l.start;
l.local_time_delta = local_time_delta;
l.sample_file_blake3 = Some(blake3.as_bytes().clone());
total_duration = recording::Duration(i64::from(l.duration_90k));
total_duration = recording::Duration(i64::from(l.wall_duration_90k));
run_offset = l.run_offset;
end = l.start + total_duration;
}