2020-03-02 01:53:41 -05:00
|
|
|
// This file is part of Moonfire NVR, a security camera network video recorder.
|
2020-08-05 00:44:01 -04:00
|
|
|
// Copyright (C) 2016-2020 The Moonfire NVR Authors
|
2016-11-25 17:34:00 -05:00
|
|
|
//
|
|
|
|
// This program is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
//
|
|
|
|
// In addition, as a special exception, the copyright holders give
|
|
|
|
// permission to link the code of portions of this program with the
|
|
|
|
// OpenSSL library under certain conditions as described in each
|
|
|
|
// individual source file, and distribute linked combinations including
|
|
|
|
// the two.
|
|
|
|
//
|
|
|
|
// You must obey the GNU General Public License in all respects for all
|
|
|
|
// of the code used other than OpenSSL. If you modify file(s) with this
|
|
|
|
// exception, you may extend this exception to your version of the
|
|
|
|
// file(s), but you are not obligated to do so. If you do not wish to do
|
|
|
|
// so, delete this exception statement from your version. If you delete
|
|
|
|
// this exception statement from all source files in the program, then
|
|
|
|
// also delete it here.
|
|
|
|
//
|
|
|
|
// This program is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
//
|
|
|
|
// You should have received a copy of the GNU General Public License
|
|
|
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2018-12-28 13:21:49 -05:00
|
|
|
use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
|
|
|
|
use crate::db;
|
2020-04-18 02:02:02 -04:00
|
|
|
use failure::{Error, bail};
|
2018-12-28 22:53:29 -05:00
|
|
|
use log::trace;
|
2020-08-05 00:44:01 -04:00
|
|
|
use std::convert::TryFrom;
|
2016-11-25 17:34:00 -05:00
|
|
|
use std::ops::Range;
|
|
|
|
|
2020-04-18 02:02:02 -04:00
|
|
|
pub use base::time::TIME_UNITS_PER_SEC;
|
|
|
|
|
2020-08-05 00:44:01 -04:00
|
|
|
pub const DESIRED_RECORDING_WALL_DURATION: i64 = 60 * TIME_UNITS_PER_SEC;
|
|
|
|
pub const MAX_RECORDING_WALL_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC;
|
2016-11-25 17:34:00 -05:00
|
|
|
|
2020-04-18 02:02:02 -04:00
|
|
|
pub use base::time::Time;
|
|
|
|
pub use base::time::Duration;
|
2016-11-25 17:34:00 -05:00
|
|
|
|
2020-08-05 00:44:01 -04:00
|
|
|
/// Converts from a wall time offset into a recording to a media time offset.
|
|
|
|
pub fn wall_to_media(wall_off_90k: i32, wall_duration_90k: i32, media_duration_90k: i32) -> i32 {
|
|
|
|
debug_assert!(wall_off_90k <= wall_duration_90k,
|
|
|
|
"wall_off_90k={} wall_duration_90k={} media_duration_90k={}",
|
|
|
|
wall_off_90k, wall_duration_90k, media_duration_90k);
|
|
|
|
if wall_duration_90k == 0 {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The intermediate values here may overflow i32, so use an i64 instead. The max wall
|
|
|
|
// time is recording::MAX_RECORDING_WALL_DURATION; the max media duration should be
|
|
|
|
// roughly the same (design limit of 500 ppm correction). The final result should fit
|
|
|
|
// within i32.
|
|
|
|
i32::try_from(i64::from(wall_off_90k) *
|
|
|
|
i64::from(media_duration_90k) /
|
|
|
|
i64::from(wall_duration_90k))
|
|
|
|
.map_err(|_| format!("wall_to_media overflow: {} * {} / {} > i32::max_value()",
|
|
|
|
wall_off_90k, media_duration_90k,
|
|
|
|
wall_duration_90k))
|
|
|
|
.unwrap()
|
|
|
|
}
|
|
|
|
|
2017-02-28 00:14:06 -05:00
|
|
|
/// An iterator through a sample index.
|
|
|
|
/// Initially invalid; call `next()` before each read.
|
2016-11-25 17:34:00 -05:00
|
|
|
#[derive(Clone, Copy, Debug)]
|
|
|
|
pub struct SampleIndexIterator {
|
2017-02-28 00:14:06 -05:00
|
|
|
/// The index byte position of the next sample to read (low 31 bits) and if the current
|
|
|
|
/// same is a key frame (high bit).
|
|
|
|
i_and_is_key: u32,
|
|
|
|
|
|
|
|
/// The starting data byte position of this sample within the segment.
|
2016-11-25 17:34:00 -05:00
|
|
|
pub pos: i32,
|
2017-02-28 00:14:06 -05:00
|
|
|
|
|
|
|
/// The starting time of this sample within the segment (in 90 kHz units).
|
2016-11-25 17:34:00 -05:00
|
|
|
pub start_90k: i32,
|
2017-02-28 00:14:06 -05:00
|
|
|
|
|
|
|
/// The duration of this sample (in 90 kHz units).
|
2016-11-25 17:34:00 -05:00
|
|
|
pub duration_90k: i32,
|
|
|
|
|
2017-02-28 00:14:06 -05:00
|
|
|
/// The byte length of this frame.
|
|
|
|
pub bytes: i32,
|
2016-11-25 17:34:00 -05:00
|
|
|
|
2017-02-28 00:14:06 -05:00
|
|
|
/// The byte length of the last frame of the "other" type: if this one is key, the last
|
|
|
|
/// non-key; if this one is non-key, the last key.
|
|
|
|
bytes_other: i32,
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl SampleIndexIterator {
|
|
|
|
pub fn new() -> SampleIndexIterator {
|
2017-02-28 00:14:06 -05:00
|
|
|
SampleIndexIterator{i_and_is_key: 0,
|
2016-11-25 17:34:00 -05:00
|
|
|
pos: 0,
|
|
|
|
start_90k: 0,
|
|
|
|
duration_90k: 0,
|
|
|
|
bytes: 0,
|
2017-02-28 00:14:06 -05:00
|
|
|
bytes_other: 0}
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn next(&mut self, data: &[u8]) -> Result<bool, Error> {
|
|
|
|
self.pos += self.bytes;
|
|
|
|
self.start_90k += self.duration_90k;
|
2017-02-28 00:14:06 -05:00
|
|
|
let i = (self.i_and_is_key & 0x7FFF_FFFF) as usize;
|
|
|
|
if i == data.len() {
|
2016-11-25 17:34:00 -05:00
|
|
|
return Ok(false)
|
|
|
|
}
|
2017-02-28 00:14:06 -05:00
|
|
|
let (raw1, i1) = match decode_varint32(data, i) {
|
2016-11-25 17:34:00 -05:00
|
|
|
Ok(tuple) => tuple,
|
2018-02-21 01:46:14 -05:00
|
|
|
Err(()) => bail!("bad varint 1 at offset {}", i),
|
2016-11-25 17:34:00 -05:00
|
|
|
};
|
|
|
|
let (raw2, i2) = match decode_varint32(data, i1) {
|
|
|
|
Ok(tuple) => tuple,
|
2018-02-21 01:46:14 -05:00
|
|
|
Err(()) => bail!("bad varint 2 at offset {}", i1),
|
2016-11-25 17:34:00 -05:00
|
|
|
};
|
|
|
|
let duration_90k_delta = unzigzag32(raw1 >> 1);
|
|
|
|
self.duration_90k += duration_90k_delta;
|
|
|
|
if self.duration_90k < 0 {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("negative duration {} after applying delta {}",
|
|
|
|
self.duration_90k, duration_90k_delta);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
2017-02-28 00:14:06 -05:00
|
|
|
if self.duration_90k == 0 && data.len() > i2 {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("zero duration only allowed at end; have {} bytes left", data.len() - i2);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
2017-02-28 00:14:06 -05:00
|
|
|
let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() {
|
|
|
|
true => (self.bytes, self.bytes_other),
|
|
|
|
false => (self.bytes_other, self.bytes),
|
|
|
|
};
|
|
|
|
self.i_and_is_key = (i2 as u32) | (((raw1 & 1) as u32) << 31);
|
2016-11-25 17:34:00 -05:00
|
|
|
let bytes_delta = unzigzag32(raw2);
|
2017-02-28 00:14:06 -05:00
|
|
|
if self.is_key() {
|
|
|
|
self.bytes = prev_bytes_key + bytes_delta;
|
|
|
|
self.bytes_other = prev_bytes_nonkey;
|
2016-11-25 17:34:00 -05:00
|
|
|
} else {
|
2017-02-28 00:14:06 -05:00
|
|
|
self.bytes = prev_bytes_nonkey + bytes_delta;
|
|
|
|
self.bytes_other = prev_bytes_key;
|
|
|
|
}
|
2016-11-25 17:34:00 -05:00
|
|
|
if self.bytes <= 0 {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("non-positive bytes {} after applying delta {} to key={} frame at ts {}",
|
|
|
|
self.bytes, bytes_delta, self.is_key(), self.start_90k);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
Ok(true)
|
|
|
|
}
|
2017-02-28 00:14:06 -05:00
|
|
|
|
|
|
|
pub fn uninitialized(&self) -> bool { self.i_and_is_key == 0 }
|
|
|
|
pub fn is_key(&self) -> bool { (self.i_and_is_key & 0x8000_0000) != 0 }
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct SampleIndexEncoder {
|
|
|
|
prev_duration_90k: i32,
|
|
|
|
prev_bytes_key: i32,
|
|
|
|
prev_bytes_nonkey: i32,
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl SampleIndexEncoder {
|
|
|
|
pub fn new() -> Self {
|
2018-03-02 18:40:32 -05:00
|
|
|
SampleIndexEncoder {
|
2016-11-25 17:34:00 -05:00
|
|
|
prev_duration_90k: 0,
|
|
|
|
prev_bytes_key: 0,
|
|
|
|
prev_bytes_nonkey: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-02 18:40:32 -05:00
|
|
|
pub fn add_sample(&mut self, duration_90k: i32, bytes: i32, is_key: bool,
|
2020-08-06 08:16:38 -04:00
|
|
|
r: &mut db::RecordingToInsert) {
|
2016-11-25 17:34:00 -05:00
|
|
|
let duration_delta = duration_90k - self.prev_duration_90k;
|
|
|
|
self.prev_duration_90k = duration_90k;
|
2020-08-05 00:44:01 -04:00
|
|
|
r.media_duration_90k += duration_90k;
|
2018-03-02 18:40:32 -05:00
|
|
|
r.sample_file_bytes += bytes;
|
|
|
|
r.video_samples += 1;
|
2016-11-25 17:34:00 -05:00
|
|
|
let bytes_delta = bytes - if is_key {
|
|
|
|
let prev = self.prev_bytes_key;
|
2018-03-02 18:40:32 -05:00
|
|
|
r.video_sync_samples += 1;
|
2016-11-25 17:34:00 -05:00
|
|
|
self.prev_bytes_key = bytes;
|
|
|
|
prev
|
|
|
|
} else {
|
|
|
|
let prev = self.prev_bytes_nonkey;
|
|
|
|
self.prev_bytes_nonkey = bytes;
|
|
|
|
prev
|
|
|
|
};
|
2018-03-02 18:40:32 -05:00
|
|
|
append_varint32((zigzag32(duration_delta) << 1) | (is_key as u32), &mut r.video_index);
|
|
|
|
append_varint32(zigzag32(bytes_delta), &mut r.video_index);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A segment represents a view of some or all of a single recording, starting from a key frame.
|
2020-08-05 00:44:01 -04:00
|
|
|
/// This struct is not specific to a container format; for `.mp4`s, it's wrapped in a
|
|
|
|
/// `mp4::Segment`. Other container/transport formats could be supported in a similar manner.
|
2017-10-09 09:32:43 -04:00
|
|
|
#[derive(Debug)]
|
2016-11-25 17:34:00 -05:00
|
|
|
pub struct Segment {
|
2018-02-20 13:11:10 -05:00
|
|
|
pub id: db::CompositeId,
|
2018-03-02 14:38:11 -05:00
|
|
|
pub open_id: u32,
|
2017-03-27 23:55:58 -04:00
|
|
|
|
|
|
|
/// An iterator positioned at the beginning of the segment, or `None`. Most segments are
|
|
|
|
/// positioned at the beginning of the recording, so this is an optional box to shrink a long
|
|
|
|
/// of segments. `None` is equivalent to `SampleIndexIterator::new()`.
|
|
|
|
begin: Option<Box<SampleIndexIterator>>,
|
2016-11-25 17:34:00 -05:00
|
|
|
pub file_end: i32,
|
2020-08-05 00:44:01 -04:00
|
|
|
|
2017-02-28 00:14:06 -05:00
|
|
|
pub frames: u16,
|
|
|
|
pub key_frames: u16,
|
|
|
|
video_sample_entry_id_and_trailing_zero: i32,
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Segment {
|
2016-12-02 23:40:55 -05:00
|
|
|
/// Creates a segment.
|
2016-11-25 17:34:00 -05:00
|
|
|
///
|
2020-08-05 00:44:01 -04:00
|
|
|
/// `desired_media_range_90k` represents the desired range of the segment relative to the start
|
|
|
|
/// of the recording, in media time units. The actual range will start at the first key frame
|
|
|
|
/// at or before the desired start time. (The caller is responsible for creating an edit list
|
|
|
|
/// to skip the undesired portion.) It will end at the first frame after the desired range
|
|
|
|
/// (unless the desired range extends beyond the recording). (Likewise, the caller is
|
|
|
|
/// responsible for trimming the final frame's duration if desired.)
|
2017-03-02 22:29:28 -05:00
|
|
|
pub fn new(db: &db::LockedDatabase,
|
|
|
|
recording: &db::ListRecordingsRow,
|
2020-08-05 00:44:01 -04:00
|
|
|
desired_media_range_90k: Range<i32>) -> Result<Segment, Error> {
|
2018-01-23 14:05:07 -05:00
|
|
|
let mut self_ = Segment {
|
2018-02-20 13:11:10 -05:00
|
|
|
id: recording.id,
|
2018-03-02 14:38:11 -05:00
|
|
|
open_id: recording.open_id,
|
2017-03-27 23:55:58 -04:00
|
|
|
begin: None,
|
2016-11-25 17:34:00 -05:00
|
|
|
file_end: recording.sample_file_bytes,
|
2017-02-28 00:14:06 -05:00
|
|
|
frames: recording.video_samples as u16,
|
|
|
|
key_frames: recording.video_sync_samples as u16,
|
|
|
|
video_sample_entry_id_and_trailing_zero:
|
2018-03-01 23:59:05 -05:00
|
|
|
recording.video_sample_entry_id |
|
2017-02-28 00:14:06 -05:00
|
|
|
((((recording.flags & db::RecordingFlags::TrailingZero as i32) != 0) as i32) << 31),
|
2016-12-02 23:40:55 -05:00
|
|
|
};
|
2016-11-25 17:34:00 -05:00
|
|
|
|
2020-08-05 00:44:01 -04:00
|
|
|
if desired_media_range_90k.start > desired_media_range_90k.end ||
|
|
|
|
desired_media_range_90k.end > recording.media_duration_90k {
|
|
|
|
bail!("desired media range [{}, {}) invalid for recording of length {}",
|
|
|
|
desired_media_range_90k.start, desired_media_range_90k.end,
|
|
|
|
recording.media_duration_90k);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
2020-08-05 00:44:01 -04:00
|
|
|
if desired_media_range_90k.start == 0 &&
|
|
|
|
desired_media_range_90k.end == recording.media_duration_90k {
|
2016-11-25 17:34:00 -05:00
|
|
|
// Fast path. Existing entry is fine.
|
2017-10-17 11:55:21 -04:00
|
|
|
trace!("recording::Segment::new fast path, recording={:#?}", recording);
|
2016-12-02 23:40:55 -05:00
|
|
|
return Ok(self_)
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Slow path. Need to iterate through the index.
|
2020-08-05 00:44:01 -04:00
|
|
|
trace!("recording::Segment::new slow path, desired_media_range_90k={:?}, recording={:#?}",
|
|
|
|
desired_media_range_90k, recording);
|
2018-08-24 01:34:40 -04:00
|
|
|
db.with_recording_playback(self_.id, &mut |playback| {
|
2017-03-27 23:55:58 -04:00
|
|
|
let mut begin = Box::new(SampleIndexIterator::new());
|
2017-03-01 02:28:25 -05:00
|
|
|
let data = &(&playback).video_index;
|
|
|
|
let mut it = SampleIndexIterator::new();
|
|
|
|
if !it.next(data)? {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("no index");
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
2017-03-01 02:28:25 -05:00
|
|
|
if !it.is_key() {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("not key frame");
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
2017-03-01 02:28:25 -05:00
|
|
|
|
|
|
|
// Stop when hitting a frame with this start time.
|
|
|
|
// Going until the end of the recording is special-cased because there can be a trailing
|
|
|
|
// frame of zero duration. It's unclear exactly how this should be handled, but let's
|
|
|
|
// include it for consistency with the fast path. It'd be bizarre to have it included or
|
2020-08-05 00:44:01 -04:00
|
|
|
// not based on desired_media_range_90k.start.
|
|
|
|
let end_90k = if desired_media_range_90k.end == recording.media_duration_90k {
|
2017-03-01 02:28:25 -05:00
|
|
|
i32::max_value()
|
|
|
|
} else {
|
2020-08-05 00:44:01 -04:00
|
|
|
desired_media_range_90k.end
|
2017-03-01 02:28:25 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
loop {
|
2020-08-05 00:44:01 -04:00
|
|
|
if it.start_90k <= desired_media_range_90k.start && it.is_key() {
|
2017-03-01 02:28:25 -05:00
|
|
|
// new start candidate.
|
2017-03-27 23:55:58 -04:00
|
|
|
*begin = it;
|
2017-03-01 02:28:25 -05:00
|
|
|
self_.frames = 0;
|
|
|
|
self_.key_frames = 0;
|
|
|
|
}
|
2017-10-17 11:55:21 -04:00
|
|
|
if it.start_90k >= end_90k && self_.frames > 0 {
|
2017-03-01 02:28:25 -05:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
self_.frames += 1;
|
|
|
|
self_.key_frames += it.is_key() as u16;
|
|
|
|
if !it.next(data)? {
|
|
|
|
break;
|
|
|
|
}
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
2017-03-27 23:55:58 -04:00
|
|
|
self_.begin = Some(begin);
|
2017-03-01 02:28:25 -05:00
|
|
|
self_.file_end = it.pos;
|
|
|
|
self_.video_sample_entry_id_and_trailing_zero =
|
2018-03-01 23:59:05 -05:00
|
|
|
recording.video_sample_entry_id |
|
2017-03-01 02:28:25 -05:00
|
|
|
(((it.duration_90k == 0) as i32) << 31);
|
2018-08-24 01:34:40 -04:00
|
|
|
Ok(())
|
|
|
|
})?;
|
|
|
|
Ok(self_)
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
2017-02-28 00:14:06 -05:00
|
|
|
pub fn video_sample_entry_id(&self) -> i32 {
|
|
|
|
self.video_sample_entry_id_and_trailing_zero & 0x7FFFFFFF
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn have_trailing_zero(&self) -> bool { self.video_sample_entry_id_and_trailing_zero < 0 }
|
|
|
|
|
2016-11-25 17:34:00 -05:00
|
|
|
/// Returns the byte range within the sample file of data associated with this segment.
|
2017-03-27 23:55:58 -04:00
|
|
|
pub fn sample_file_range(&self) -> Range<u64> {
|
|
|
|
self.begin.as_ref().map(|b| b.pos as u64).unwrap_or(0) .. self.file_end as u64
|
|
|
|
}
|
2016-11-25 17:34:00 -05:00
|
|
|
|
2020-08-05 00:44:01 -04:00
|
|
|
/// Returns the actual media start time. As described in `new`, this can be less than the
|
|
|
|
/// desired media start time if there is no key frame at the right position.
|
2017-03-27 23:55:58 -04:00
|
|
|
pub fn actual_start_90k(&self) -> i32 { self.begin.as_ref().map(|b| b.start_90k).unwrap_or(0) }
|
2016-11-25 17:34:00 -05:00
|
|
|
|
|
|
|
/// Iterates through each frame in the segment.
|
|
|
|
/// Must be called without the database lock held; retrieves video index from the cache.
|
2017-03-01 02:28:25 -05:00
|
|
|
pub fn foreach<F>(&self, playback: &db::RecordingPlayback, mut f: F) -> Result<(), Error>
|
2017-02-26 23:21:46 -05:00
|
|
|
where F: FnMut(&SampleIndexIterator) -> Result<(), Error> {
|
2018-02-20 13:11:10 -05:00
|
|
|
trace!("foreach on recording {}: {} frames, actual_start_90k: {}",
|
|
|
|
self.id, self.frames, self.actual_start_90k());
|
2016-12-21 01:08:18 -05:00
|
|
|
let data = &(&playback).video_index;
|
2017-03-27 23:55:58 -04:00
|
|
|
let mut it = match self.begin {
|
|
|
|
Some(ref b) => **b,
|
|
|
|
None => SampleIndexIterator::new(),
|
|
|
|
};
|
2017-02-28 00:14:06 -05:00
|
|
|
if it.uninitialized() {
|
2016-11-25 17:34:00 -05:00
|
|
|
if !it.next(data)? {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("recording {}: no frames", self.id);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
2017-02-28 00:14:06 -05:00
|
|
|
if !it.is_key() {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("recording {}: doesn't start with key frame", self.id);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
}
|
2016-12-02 23:40:55 -05:00
|
|
|
let mut have_frame = true;
|
|
|
|
let mut key_frame = 0;
|
2019-12-28 09:07:33 -05:00
|
|
|
|
2016-12-02 23:40:55 -05:00
|
|
|
for i in 0 .. self.frames {
|
|
|
|
if !have_frame {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("recording {}: expected {} frames, found only {}", self.id, self.frames, i+1);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
2017-02-28 00:14:06 -05:00
|
|
|
if it.is_key() {
|
2016-12-02 23:40:55 -05:00
|
|
|
key_frame += 1;
|
|
|
|
if key_frame > self.key_frames {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("recording {}: more than expected {} key frames",
|
|
|
|
self.id, self.key_frames);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
}
|
2017-02-26 23:21:46 -05:00
|
|
|
|
2019-12-28 09:07:33 -05:00
|
|
|
// Note: this inner loop avoids ? for performance. Don't change these lines without
|
|
|
|
// reading https://github.com/rust-lang/rust/issues/37939 and running
|
2017-02-26 23:21:46 -05:00
|
|
|
// mp4::bench::build_index.
|
2019-12-28 09:07:33 -05:00
|
|
|
if let Err(e) = f(&it) {
|
|
|
|
return Err(e);
|
|
|
|
}
|
|
|
|
have_frame = match it.next(data) {
|
|
|
|
Err(e) => return Err(e),
|
|
|
|
Ok(hf) => hf,
|
|
|
|
};
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
if key_frame < self.key_frames {
|
2018-02-21 01:46:14 -05:00
|
|
|
bail!("recording {}: expected {} key frames, found only {}",
|
|
|
|
self.id, self.key_frames, key_frame);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
Ok(())
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2018-12-28 22:53:29 -05:00
|
|
|
use base::clock::RealClocks;
|
2016-11-25 17:34:00 -05:00
|
|
|
use super::*;
|
2018-12-28 13:21:49 -05:00
|
|
|
use crate::testutil::{self, TestDb};
|
2016-11-25 17:34:00 -05:00
|
|
|
|
2016-12-02 23:40:55 -05:00
|
|
|
/// Tests encoding the example from design/schema.md.
|
2016-11-25 17:34:00 -05:00
|
|
|
#[test]
|
|
|
|
fn test_encode_example() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-11-25 17:34:00 -05:00
|
|
|
let mut e = SampleIndexEncoder::new();
|
2020-08-06 08:16:38 -04:00
|
|
|
e.add_sample(10, 1000, true, &mut r);
|
|
|
|
e.add_sample(9, 10, false, &mut r);
|
|
|
|
e.add_sample(11, 15, false, &mut r);
|
|
|
|
e.add_sample(10, 12, false, &mut r);
|
|
|
|
e.add_sample(10, 1050, true, &mut r);
|
2018-03-02 18:40:32 -05:00
|
|
|
assert_eq!(r.video_index, b"\x29\xd0\x0f\x02\x14\x08\x0a\x02\x05\x01\x64");
|
2020-08-05 00:44:01 -04:00
|
|
|
assert_eq!(10 + 9 + 11 + 10 + 10, r.media_duration_90k);
|
2018-03-02 18:40:32 -05:00
|
|
|
assert_eq!(5, r.video_samples);
|
|
|
|
assert_eq!(2, r.video_sync_samples);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
2016-12-02 23:40:55 -05:00
|
|
|
/// Tests a round trip from `SampleIndexEncoder` to `SampleIndexIterator`.
|
2016-11-25 17:34:00 -05:00
|
|
|
#[test]
|
|
|
|
fn test_round_trip() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2016-11-25 17:34:00 -05:00
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
|
|
|
struct Sample {
|
|
|
|
duration_90k: i32,
|
|
|
|
bytes: i32,
|
|
|
|
is_key: bool,
|
|
|
|
}
|
|
|
|
let samples = [
|
|
|
|
Sample{duration_90k: 10, bytes: 30000, is_key: true},
|
|
|
|
Sample{duration_90k: 9, bytes: 1000, is_key: false},
|
|
|
|
Sample{duration_90k: 11, bytes: 1100, is_key: false},
|
|
|
|
Sample{duration_90k: 18, bytes: 31000, is_key: true},
|
|
|
|
Sample{duration_90k: 0, bytes: 1000, is_key: false},
|
|
|
|
];
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-11-25 17:34:00 -05:00
|
|
|
let mut e = SampleIndexEncoder::new();
|
|
|
|
for sample in &samples {
|
2020-08-06 08:16:38 -04:00
|
|
|
e.add_sample(sample.duration_90k, sample.bytes, sample.is_key, &mut r);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
let mut it = SampleIndexIterator::new();
|
|
|
|
for sample in &samples {
|
2018-03-02 18:40:32 -05:00
|
|
|
assert!(it.next(&r.video_index).unwrap());
|
2016-11-25 17:34:00 -05:00
|
|
|
assert_eq!(sample,
|
2017-02-28 00:14:06 -05:00
|
|
|
&Sample{duration_90k: it.duration_90k,
|
|
|
|
bytes: it.bytes,
|
|
|
|
is_key: it.is_key()});
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
2018-03-02 18:40:32 -05:00
|
|
|
assert!(!it.next(&r.video_index).unwrap());
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
|
2016-12-02 23:40:55 -05:00
|
|
|
/// Tests that `SampleIndexIterator` spots several classes of errors.
|
|
|
|
/// TODO: test and fix overflow cases.
|
2016-11-25 17:34:00 -05:00
|
|
|
#[test]
|
|
|
|
fn test_iterator_errors() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2016-11-25 17:34:00 -05:00
|
|
|
struct Test {
|
|
|
|
encoded: &'static [u8],
|
|
|
|
err: &'static str,
|
|
|
|
}
|
|
|
|
let tests = [
|
|
|
|
Test{encoded: b"\x80", err: "bad varint 1 at offset 0"},
|
|
|
|
Test{encoded: b"\x00\x80", err: "bad varint 2 at offset 1"},
|
|
|
|
Test{encoded: b"\x00\x02\x00\x00",
|
|
|
|
err: "zero duration only allowed at end; have 2 bytes left"},
|
|
|
|
Test{encoded: b"\x02\x02",
|
|
|
|
err: "negative duration -1 after applying delta -1"},
|
|
|
|
Test{encoded: b"\x04\x00",
|
|
|
|
err: "non-positive bytes 0 after applying delta 0 to key=false frame at ts 0"},
|
|
|
|
];
|
|
|
|
for test in &tests {
|
|
|
|
let mut it = SampleIndexIterator::new();
|
2018-02-21 01:46:14 -05:00
|
|
|
assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
|
2016-11-25 17:34:00 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-01 02:28:25 -05:00
|
|
|
fn get_frames<F, T>(db: &db::Database, segment: &Segment, f: F) -> Vec<T>
|
|
|
|
where F: Fn(&SampleIndexIterator) -> T {
|
|
|
|
let mut v = Vec::new();
|
2018-08-24 01:34:40 -04:00
|
|
|
db.lock().with_recording_playback(segment.id, &mut |playback| {
|
2017-03-01 02:28:25 -05:00
|
|
|
segment.foreach(playback, |it| { v.push(f(it)); Ok(()) })
|
|
|
|
}).unwrap();
|
|
|
|
v
|
|
|
|
}
|
|
|
|
|
2016-12-02 23:40:55 -05:00
|
|
|
/// Tests that a `Segment` correctly can clip at the beginning and end.
|
|
|
|
/// This is a simpler case; all sync samples means we can start on any frame.
|
|
|
|
#[test]
|
|
|
|
fn test_segment_clipping_with_all_sync() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-12-02 23:40:55 -05:00
|
|
|
let mut encoder = SampleIndexEncoder::new();
|
|
|
|
for i in 1..6 {
|
|
|
|
let duration_90k = 2 * i;
|
|
|
|
let bytes = 3 * i;
|
2020-08-06 08:16:38 -04:00
|
|
|
encoder.add_sample(duration_90k, bytes, true, &mut r);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
2018-03-23 16:31:23 -04:00
|
|
|
let db = TestDb::new(RealClocks {});
|
2018-03-02 18:40:32 -05:00
|
|
|
let row = db.insert_recording_from_encoder(r);
|
2016-12-02 23:40:55 -05:00
|
|
|
// Time range [2, 2 + 4 + 6 + 8) means the 2nd, 3rd, 4th samples should be
|
|
|
|
// included.
|
|
|
|
let segment = Segment::new(&db.db.lock(), &row, 2 .. 2+4+6+8).unwrap();
|
2017-03-01 02:28:25 -05:00
|
|
|
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[4, 6, 8]);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Half sync frames means starting from the last sync frame <= desired point.
|
|
|
|
#[test]
|
|
|
|
fn test_segment_clipping_with_half_sync() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-12-02 23:40:55 -05:00
|
|
|
let mut encoder = SampleIndexEncoder::new();
|
|
|
|
for i in 1..6 {
|
|
|
|
let duration_90k = 2 * i;
|
|
|
|
let bytes = 3 * i;
|
2020-08-06 08:16:38 -04:00
|
|
|
encoder.add_sample(duration_90k, bytes, (i % 2) == 1, &mut r);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
2018-03-23 16:31:23 -04:00
|
|
|
let db = TestDb::new(RealClocks {});
|
2018-03-02 18:40:32 -05:00
|
|
|
let row = db.insert_recording_from_encoder(r);
|
2016-12-02 23:40:55 -05:00
|
|
|
// Time range [2 + 4 + 6, 2 + 4 + 6 + 8) means the 4th sample should be included.
|
|
|
|
// The 3rd also gets pulled in because it is a sync frame and the 4th is not.
|
|
|
|
let segment = Segment::new(&db.db.lock(), &row, 2+4+6 .. 2+4+6+8).unwrap();
|
2017-03-01 02:28:25 -05:00
|
|
|
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[6, 8]);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_segment_clipping_with_trailing_zero() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-12-02 23:40:55 -05:00
|
|
|
let mut encoder = SampleIndexEncoder::new();
|
2020-08-06 08:16:38 -04:00
|
|
|
encoder.add_sample(1, 1, true, &mut r);
|
|
|
|
encoder.add_sample(1, 2, true, &mut r);
|
|
|
|
encoder.add_sample(0, 3, true, &mut r);
|
2018-03-23 16:31:23 -04:00
|
|
|
let db = TestDb::new(RealClocks {});
|
2018-03-02 18:40:32 -05:00
|
|
|
let row = db.insert_recording_from_encoder(r);
|
2016-12-02 23:40:55 -05:00
|
|
|
let segment = Segment::new(&db.db.lock(), &row, 1 .. 2).unwrap();
|
2017-03-01 02:28:25 -05:00
|
|
|
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[2, 3]);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
|
2017-10-17 11:55:21 -04:00
|
|
|
/// Even if the desired duration is 0, there should still be a frame.
|
|
|
|
#[test]
|
|
|
|
fn test_segment_zero_desired_duration() {
|
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2017-10-17 11:55:21 -04:00
|
|
|
let mut encoder = SampleIndexEncoder::new();
|
2020-08-06 08:16:38 -04:00
|
|
|
encoder.add_sample(1, 1, true, &mut r);
|
2018-03-23 16:31:23 -04:00
|
|
|
let db = TestDb::new(RealClocks {});
|
2018-03-02 18:40:32 -05:00
|
|
|
let row = db.insert_recording_from_encoder(r);
|
2017-10-17 11:55:21 -04:00
|
|
|
let segment = Segment::new(&db.db.lock(), &row, 0 .. 0).unwrap();
|
|
|
|
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1]);
|
|
|
|
}
|
|
|
|
|
2016-12-02 23:40:55 -05:00
|
|
|
/// Test a `Segment` which uses the whole recording.
|
|
|
|
/// This takes a fast path which skips scanning the index in `new()`.
|
|
|
|
#[test]
|
|
|
|
fn test_segment_fast_path() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-12-02 23:40:55 -05:00
|
|
|
let mut encoder = SampleIndexEncoder::new();
|
|
|
|
for i in 1..6 {
|
|
|
|
let duration_90k = 2 * i;
|
|
|
|
let bytes = 3 * i;
|
2020-08-06 08:16:38 -04:00
|
|
|
encoder.add_sample(duration_90k, bytes, (i % 2) == 1, &mut r);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
2018-03-23 16:31:23 -04:00
|
|
|
let db = TestDb::new(RealClocks {});
|
2018-03-02 18:40:32 -05:00
|
|
|
let row = db.insert_recording_from_encoder(r);
|
2016-12-02 23:40:55 -05:00
|
|
|
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2+4+6+8+10).unwrap();
|
2017-03-01 02:28:25 -05:00
|
|
|
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[2, 4, 6, 8, 10]);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_segment_fast_path_with_trailing_zero() {
|
2017-10-11 23:08:26 -04:00
|
|
|
testutil::init();
|
2018-03-02 18:40:32 -05:00
|
|
|
let mut r = db::RecordingToInsert::default();
|
2016-12-02 23:40:55 -05:00
|
|
|
let mut encoder = SampleIndexEncoder::new();
|
2020-08-06 08:16:38 -04:00
|
|
|
encoder.add_sample(1, 1, true, &mut r);
|
|
|
|
encoder.add_sample(1, 2, true, &mut r);
|
|
|
|
encoder.add_sample(0, 3, true, &mut r);
|
2018-03-23 16:31:23 -04:00
|
|
|
let db = TestDb::new(RealClocks {});
|
2018-03-02 18:40:32 -05:00
|
|
|
let row = db.insert_recording_from_encoder(r);
|
2016-12-02 23:40:55 -05:00
|
|
|
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2).unwrap();
|
2017-03-01 02:28:25 -05:00
|
|
|
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1, 2, 3]);
|
2016-12-02 23:40:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: test segment error cases involving mismatch between row frames/key_frames and index.
|
2017-01-08 17:22:35 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(all(test, feature="nightly"))]
|
|
|
|
mod bench {
|
|
|
|
extern crate test;
|
2018-12-28 22:53:29 -05:00
|
|
|
|
2017-01-08 17:22:35 -05:00
|
|
|
use super::*;
|
2016-12-02 23:40:55 -05:00
|
|
|
|
2016-11-25 17:34:00 -05:00
|
|
|
/// Benchmarks the decoder, which is performance-critical for .mp4 serving.
|
|
|
|
#[bench]
|
2018-12-28 22:53:29 -05:00
|
|
|
fn bench_decoder(b: &mut test::Bencher) {
|
2016-11-25 17:34:00 -05:00
|
|
|
let data = include_bytes!("testdata/video_sample_index.bin");
|
|
|
|
b.bytes = data.len() as u64;
|
|
|
|
b.iter(|| {
|
|
|
|
let mut it = SampleIndexIterator::new();
|
|
|
|
while it.next(data).unwrap() {}
|
|
|
|
assert_eq!(30104460, it.pos);
|
|
|
|
assert_eq!(5399985, it.start_90k);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|