pass prev duration and runs through API layer

Builds on f3ddbfe, for #32 and #59.
This commit is contained in:
Scott Lamb 2020-06-09 22:06:03 -07:00
parent f3ddbfe22a
commit 6f9612738c
5 changed files with 88 additions and 25 deletions

View File

@ -156,7 +156,7 @@ pub struct VideoSampleEntryToInsert {
}
/// A row used in `list_recordings_by_time` and `list_recordings_by_id`.
#[derive(Debug)]
#[derive(Copy, Clone, Debug)]
pub struct ListRecordingsRow {
pub start: recording::Time,
pub video_sample_entry_id: i32,
@ -171,6 +171,11 @@ pub struct ListRecordingsRow {
pub run_offset: i32,
pub open_id: u32,
pub flags: i32,
/// This is populated by `list_recordings_by_id` but not `list_recordings_by_time`.
/// (It's not included in the `recording_cover` index, so adding it to
/// `list_recordings_by_time` would be inefficient.)
pub prev_duration_and_runs: Option<(recording::Duration, i32)>,
}
/// A row used in `list_aggregated_recordings`.
@ -261,6 +266,7 @@ impl RecordingToInsert {
run_offset: self.run_offset,
open_id,
flags: self.flags | RecordingFlags::Uncommitted as i32,
prev_duration_and_runs: Some((self.prev_duration, self.prev_runs)),
}
}
}
@ -1078,7 +1084,6 @@ impl LockedDatabase {
s.cum_duration += dur;
s.cum_runs += if l.run_offset == 0 { 1 } else { 0 };
let end = l.start + dur;
info!("range={:?}", l.start .. end);
s.add_recording(l.start .. end, l.sample_file_bytes);
}
s.synced_recordings = 0;

View File

@ -73,7 +73,9 @@ const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
recording.video_samples,
recording.video_sync_samples,
recording.video_sample_entry_id,
recording.open_id
recording.open_id,
recording.prev_duration_90k,
recording.prev_runs
from
recording
where
@ -130,7 +132,7 @@ pub(crate) fn list_recordings_by_time(
":start_time_90k": desired_time.start.0,
":end_time_90k": desired_time.end.0,
})?;
list_recordings_inner(rows, f)
list_recordings_inner(rows, false, f)
}
/// Lists the specified recordings in ascending order by id.
@ -142,10 +144,10 @@ pub(crate) fn list_recordings_by_id(
":start": CompositeId::new(stream_id, desired_ids.start).0,
":end": CompositeId::new(stream_id, desired_ids.end).0,
})?;
list_recordings_inner(rows, f)
list_recordings_inner(rows, true, f)
}
fn list_recordings_inner(mut rows: rusqlite::Rows,
fn list_recordings_inner(mut rows: rusqlite::Rows, include_prev: bool,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>)
-> Result<(), Error> {
while let Some(row) = rows.next()? {
@ -160,6 +162,10 @@ fn list_recordings_inner(mut rows: rusqlite::Rows,
video_sync_samples: row.get(7)?,
video_sample_entry_id: row.get(8)?,
open_id: row.get(9)?,
prev_duration_and_runs: match include_prev {
false => None,
true => Some((recording::Duration(row.get(10)?), row.get(11)?)),
},
})?;
}
Ok(())

View File

@ -401,27 +401,47 @@ same URL minus the `.txt` suffix.
Returns a `.mp4` suitable for use as a [HTML5 Media Source Extensions
media segment][media-segment]. The MIME type will be `video/mp4`, with a
`codecs` parameter as specified in [RFC 6381][rfc-6381].
`codecs` parameter as specified in [RFC 6381][rfc-6381]. Note that these
can't include edit lists, so (unlike `/view.mp4`) the caller must manually
trim undesired leading portions.
This response will include the following additional headers:
* `X-Prev-Duration`: the total duration (in 90 kHz units) of all recordings
before the first requested recording in the `s` parameter. Browser-based
callers may use this to place this at the correct position in the source
buffer via `SourceBuffer.timestampOffset`.
* `X-Runs`: the cumulative number of "runs" of recordings. If this recording
starts a new run, it is included in the count. Browser-based callers may
use this to force gaps in the source buffer timeline by adjusting the
timestamp offset if desired.
* `X-Leading-Duration`: if present, the total duration (in 90 kHz units) of
additional leading video included before the caller's first requested
timestamp. This happens when the caller's requested timestamp does not
fall exactly on a key frame. Media segments can't include edit lists, so
unlike with the `/api/.../view.mp4` endpoint the caller is responsible for
trimming this portion. Browser-based callers may use
`SourceBuffer.appendWindowStart`.
Expected query parameters:
* `s` (one or more): as with the `.mp4` URL, except that media segments
can't contain edit lists so none will be generated. TODO: maybe add a
`Leading-Time:` header to indicate how many leading 90,000ths of a second
are present, so that the caller can trim it in some other way.
* `s` (one or more): as with the `.mp4` URL.
It's recommended that each `.m4s` retrieval be for at most one Moonfire NVR
recording segment for several reasons:
recording segment. The fundamental reason is that the Media Source Extension
API appears structured for adding a complete segment at a time. Large media
segments thus impose significant latency on seeking. Additionally, because of
this fundamental reason Moonfire NVR makes no effort to make multiple-segment
`.m4s` requests practical:
* The Media Source Extension API appears structured for adding a complete
segment at a time. Large media segments thus impose significant latency on
seeking.
* There is currently a hard limit of 4 GiB of data because the `.m4s` uses a
single `moof` followed by a single `mdat`; the former references the
latter with 32-bit offsets.
* There's currently no way to generate an initialization segment for more
than one video sample entry, so a `.m4s` that uses more than one video
sample entry can't be used.
* The `X-Prev-Duration` and `X-Leading-Duration` headers only describe the
first segment.
### `GET /api/cameras/<uuid>/<stream>/view.m4s.txt`
@ -445,6 +465,8 @@ be included:
of a second) of the start of the recording. Note that if the recording
is "unanchored" (as described in `GET /api/.../recordings`), the
recording's start time may change before it is completed.
* `X-Prev-Duration`: as in `/.../view.m4s`.
* `X-Runs`: as in `/.../view.m4s`.
* `X-Time-Range`: the relative start and end times of these frames within
the recording, in the same format as `REL_START_TIME` and `REL_END_TIME`
above.

View File

@ -550,6 +550,7 @@ pub struct FileBuilder {
subtitle_co64_pos: Option<usize>,
body: BodyState,
type_: Type,
prev_duration_and_cur_runs: Option<(recording::Duration, i32)>,
include_timestamp_subtitle_track: bool,
content_disposition: Option<HeaderValue>,
}
@ -736,6 +737,7 @@ impl FileBuilder {
type_: type_,
include_timestamp_subtitle_track: false,
content_disposition: None,
prev_duration_and_cur_runs: None,
}
}
@ -763,6 +765,11 @@ impl FileBuilder {
"unable to append recording {} after recording {} with trailing zero",
row.id, prev.s.id);
}
} else {
// Include the current run in this count here, as we're not propagating the
// run_offset_id further.
self.prev_duration_and_cur_runs = row.prev_duration_and_runs
.map(|(d, r)| (d, r + if row.open_id == 0 { 1 } else { 0 }));
}
let s = Segment::new(db, &row, rel_range_90k, self.next_frame_num)?;
@ -899,6 +906,8 @@ impl FileBuilder {
etag: HeaderValue::try_from(format!("\"{}\"", etag.to_hex().as_str()))
.expect("hex string should be valid UTF-8"),
content_disposition: self.content_disposition,
prev_duration_and_cur_runs: self.prev_duration_and_cur_runs,
type_: self.type_,
})))
}
@ -1447,6 +1456,8 @@ struct FileInner {
last_modified: SystemTime,
etag: HeaderValue,
content_disposition: Option<HeaderValue>,
prev_duration_and_cur_runs: Option<(recording::Duration, i32)>,
type_: Type,
}
impl FileInner {
@ -1553,6 +1564,24 @@ impl http_serve::Entity for File {
if let Some(cd) = self.0.content_disposition.as_ref() {
hdrs.insert(http::header::CONTENT_DISPOSITION, cd.clone());
}
if self.0.type_ == Type::MediaSegment {
if let Some((d, r)) = self.0.prev_duration_and_cur_runs {
hdrs.insert(
"X-Prev-Duration",
HeaderValue::try_from(d.0.to_string()).expect("ints are valid headers"));
hdrs.insert(
"X-Runs",
HeaderValue::try_from(r.to_string()).expect("ints are valid headers"));
}
if let Some(s) = self.0.segments.first() {
let skip = s.s.desired_range_90k.start - s.s.actual_start_90k();
if skip > 0 {
hdrs.insert(
"X-Leading-Duration",
HeaderValue::try_from(skip.to_string()).expect("ints are valid headers"));
}
}
}
}
fn last_modified(&self) -> Option<SystemTime> { Some(self.0.last_modified) }
fn etag(&self) -> Option<HeaderValue> { Some(self.0.etag.clone()) }

View File

@ -440,15 +440,13 @@ impl Service {
ws: &mut tokio_tungstenite::WebSocketStream<hyper::upgrade::Upgraded>,
live: db::LiveSegment) -> Result<(), Error> {
let mut builder = mp4::FileBuilder::new(mp4::Type::MediaSegment);
let mut vse_id = None;
let mut start = None;
let mut row = None;
{
let db = self.db.lock();
let mut rows = 0;
db.list_recordings_by_id(stream_id, live.recording .. live.recording+1, &mut |r| {
rows += 1;
vse_id = Some(r.video_sample_entry_id);
start = Some(r.start);
row = Some(r);
builder.append(&db, r, live.off_90k.clone())?;
Ok(())
})?;
@ -456,29 +454,32 @@ impl Service {
bail_t!(Internal, "unable to find {:?}", live);
}
}
let vse_id = vse_id.unwrap();
let start = start.unwrap();
let row = row.unwrap();
use http_serve::Entity;
let mp4 = builder.build(self.db.clone(), self.dirs_by_stream_id.clone())?;
let mut hdrs = header::HeaderMap::new();
mp4.add_headers(&mut hdrs);
let mime_type = hdrs.get(header::CONTENT_TYPE).unwrap();
let (prev_duration, prev_runs) = row.prev_duration_and_runs.unwrap();
let hdr = format!(
"Content-Type: {}\r\n\
X-Recording-Start: {}\r\n\
X-Recording-Id: {}.{}\r\n\
X-Time-Range: {}-{}\r\n\
X-Prev-Duration: {}\r\n\
X-Runs: {}\r\n\
X-Video-Sample-Entry-Id: {}\r\n\r\n",
mime_type.to_str().unwrap(),
start.0,
row.start.0,
open_id,
live.recording,
live.off_90k.start,
live.off_90k.end,
&vse_id);
let mut v = /*Pin::from(*/hdr.into_bytes()/*)*/;
prev_duration.0,
prev_runs + if row.run_offset == 0 { 1 } else { 0 },
&row.video_sample_entry_id);
let mut v = hdr.into_bytes();
mp4.append_into_vec(&mut v).await?;
//let v = Pin::into_inner();
ws.send(tungstenite::Message::Binary(v)).await?;
Ok(())
}