reorganize /recordings JSON response

I want to start returning the pixel aspect ratio of each video sample
entry. It's silly to duplicate it for each returned recording, so
let's instead return a videoSampleEntryId and then put all the
information about each VSE once.

This change doesn't actually handle pixel aspect ratio server-side yet.
Most likely I'll require a new schema version for that, to store it as a
new column in the database. Codec-specific logic in the database layer
is awkward and I'd like to avoid it. I did a similar schema change to
add the rfc6381_codec.

I also adjusted ui-src/lib/models/Recording.js in a few ways:

* fixed a couple mismatches between its field name and the key defined
  in the API. Consistency aids understanding.
* dropped all the getters in favor of just setting the fields (with
  type annotations) as described here:
  https://google.github.io/styleguide/jsguide.html#features-classes-fields
* where the wire format used undefined (to save space), translate it to
  a more natural null or false.
This commit is contained in:
Scott Lamb 2020-03-13 21:20:51 -07:00
parent 317a620e6e
commit 3968bfe912
8 changed files with 141 additions and 136 deletions

View File

@ -246,7 +246,7 @@ Example response:
### `GET /api/cameras/<uuid>/<stream>/recordings`
Returns information about recordings, in descending order.
Returns information about recordings.
Valid request parameters:
@ -259,8 +259,8 @@ Valid request parameters:
server should return a `continue` key which is expected to be returned on
following requests.)
In the property `recordings`, returns a list of recordings in arbitrary order.
Each recording object has the following properties:
Returns a JSON object. Under the key `recordings` is an array of recordings in
arbitrary order. Each recording object has the following properties:
* `startId`. The id of this recording, which can be used with `/view.mp4`
to retrieve its content.
@ -289,13 +289,24 @@ Each recording object has the following properties:
* `endTime90k`: the end time of the given recording. Note this may be
greater than the requested `endTime90k` if this recording was ongoing at
the requested time.
* `sampleFileBytes`
* `videoSampleEntrySha1`
* `videoSampleEntryWidth`
* `videoSampleEntryHeight`
* `videoSampleEntryId`: a reference to an entry in the `videoSampleEntries`
map. These ids are strings so that they can serve as JSON object keys.
* `videoSamples`: the number of samples (aka frames) of video in this
recording.
Under the property `videoSampleEntries`, an object mapping ids to objects with
the following properties:
* `sha1`: a SHA-1 hash of the ISO/IEC 14496-12 section 8.5.2
`VisualSampleEntry` bytes. The actual bytes can be retrieved, wrapped into
an initialization segment `.mp4`, at the URL `/api/init/<sha1>.mp4`.
* `width`: the stored width in pixels.
* `height`: the stored height in pixels.
* `pixelHSpacing`: the relative width of a pixel, as in a ISO/IEC 14496-12
section 12.1.4.3 `PixelAspectRatioBox`. If absent, assumed to be 1.
* `pixelVSpacing`: the relative height of a pixel, as in a ISO/IEC 14496-12
section 12.1.4.3 `PixelAspectRatioBox`. If absent, assumed to be 1.
Example request URI (with added whitespace between parameters):
```
@ -314,9 +325,7 @@ Example response:
"startTime90k": 130985461191810,
"endTime90k": 130985466591817,
"sampleFileBytes": 8405564,
"videoSampleEntrySha1": "81710c9c51a02cc95439caa8dd3bc12b77ffe767",
"videoSampleEntryWidth": 1280,
"videoSampleEntryHeight": 720,
"videoSampleEntryId": "1",
},
{
"endTime90k": 130985461191810,
@ -324,7 +333,13 @@ Example response:
},
...
],
"continue": "<opaque blob>",
"videoSampleEntries": {
"1": {
"sha1": "81710c9c51a02cc95439caa8dd3bc12b77ffe767",
"width": 1280,
"height": 720
}
},
}
```

View File

@ -372,9 +372,31 @@ impl<'a> TopLevel<'a> {
}
}
#[derive(Debug, Serialize)]
pub struct ListRecordings {
#[derive(Serialize)]
#[serde(rename_all="camelCase")]
pub struct ListRecordings<'a> {
pub recordings: Vec<Recording>,
// There are likely very few video sample entries for a given stream in a given day, so
// representing with an unordered Vec (and having O(n) insert-if-absent) is probably better
// than dealing with a HashSet's code bloat.
#[serde(serialize_with = "ListRecordings::serialize_video_sample_entries")]
pub video_sample_entries: (&'a db::LockedDatabase, Vec<i32>),
}
impl<'a> ListRecordings<'a> {
fn serialize_video_sample_entries<S>(video_sample_entries: &(&db::LockedDatabase, Vec<i32>),
serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
let (db, ref v) = *video_sample_entries;
let mut map = serializer.serialize_map(Some(v.len()))?;
for id in v {
map.serialize_entry(
id,
&VideoSampleEntry::from(&db.video_sample_entries_by_id().get(id).unwrap()))?;
}
map.end()
}
}
#[derive(Debug, Serialize)]
@ -384,7 +406,7 @@ pub struct Recording {
pub end_time_90k: i64,
pub sample_file_bytes: i64,
pub video_samples: i64,
pub video_sample_entry_sha1: String,
pub video_sample_entry_id: String,
pub start_id: i32,
pub open_id: u32,
@ -393,9 +415,25 @@ pub struct Recording {
#[serde(skip_serializing_if = "Option::is_none")]
pub end_id: Option<i32>,
pub video_sample_entry_width: u16,
pub video_sample_entry_height: u16,
#[serde(skip_serializing_if = "Not::not")]
pub growing: bool,
}
#[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")]
pub struct VideoSampleEntry {
pub sha1: String,
pub width: u16,
pub height: u16,
}
impl VideoSampleEntry {
fn from(e: &db::VideoSampleEntry) -> Self {
Self {
sha1: base::strutil::hex(&e.sha1),
width: e.width,
height: e.height,
}
}
}

View File

@ -344,35 +344,36 @@ impl ServiceInner {
}
(time, split)
};
let mut out = json::ListRecordings{recordings: Vec::new()};
{
let db = self.db.lock();
let camera = db.get_camera(uuid)
.ok_or_else(|| plain_response(StatusCode::NOT_FOUND,
format!("no such camera {}", uuid)))?;
let stream_id = camera.streams[type_.index()]
.ok_or_else(|| plain_response(StatusCode::NOT_FOUND,
format!("no such stream {}/{}", uuid, type_)))?;
db.list_aggregated_recordings(stream_id, r, split, &mut |row| {
let end = row.ids.end - 1; // in api, ids are inclusive.
let vse = db.video_sample_entries_by_id().get(&row.video_sample_entry_id).unwrap();
out.recordings.push(json::Recording {
start_id: row.ids.start,
end_id: if end == row.ids.start { None } else { Some(end) },
start_time_90k: row.time.start.0,
end_time_90k: row.time.end.0,
sample_file_bytes: row.sample_file_bytes,
open_id: row.open_id,
first_uncommitted: row.first_uncommitted,
video_samples: row.video_samples,
video_sample_entry_width: vse.width,
video_sample_entry_height: vse.height,
video_sample_entry_sha1: strutil::hex(&vse.sha1),
growing: row.growing,
});
Ok(())
}).map_err(internal_server_err)?;
}
let db = self.db.lock();
let mut out = json::ListRecordings {
recordings: Vec::new(),
video_sample_entries: (&db, Vec::new()),
};
let camera = db.get_camera(uuid)
.ok_or_else(|| plain_response(StatusCode::NOT_FOUND,
format!("no such camera {}", uuid)))?;
let stream_id = camera.streams[type_.index()]
.ok_or_else(|| plain_response(StatusCode::NOT_FOUND,
format!("no such stream {}/{}", uuid, type_)))?;
db.list_aggregated_recordings(stream_id, r, split, &mut |row| {
let end = row.ids.end - 1; // in api, ids are inclusive.
out.recordings.push(json::Recording {
start_id: row.ids.start,
end_id: if end == row.ids.start { None } else { Some(end) },
start_time_90k: row.time.start.0,
end_time_90k: row.time.end.0,
sample_file_bytes: row.sample_file_bytes,
open_id: row.open_id,
first_uncommitted: row.first_uncommitted,
video_samples: row.video_samples,
video_sample_entry_id: row.video_sample_entry_id.to_string(),
growing: row.growing,
});
if !out.video_sample_entries.1.contains(&row.video_sample_entry_id) {
out.video_sample_entries.1.push(row.video_sample_entry_id);
}
Ok(())
}).map_err(internal_server_err)?;
serve_json(req, &out)
}

View File

@ -198,7 +198,7 @@ function fetch(selectedRange, videoLength) {
'Fetched results for "%s-%s" > updating recordings',
streamView.camera.shortName, streamView.streamType
);
streamView.recordingsJSON = data.recordings;
streamView.recordingsJSON = data;
})
.catch(function(data, status, err) {
console.error(url, ' load failed: ', status, ': ', err);

View File

@ -111,10 +111,10 @@ export default class MoonfireAPI {
videoPlayUrl(cameraUUID, streamType, recording, trimmedRange,
timestampTrack = true) {
let sParam = recording.startId;
if (recording.endId !== undefined) {
if (recording.endId !== null) {
sParam += '-' + recording.endId;
}
if (recording.firstUncommitted !== undefined) {
if (recording.firstUncommitted !== null) {
sParam += '@' + recording.openId; // disambiguate.
}
let rel = '';
@ -124,7 +124,7 @@ export default class MoonfireAPI {
rel += '-';
if (recording.endTime90k > trimmedRange.endTime90k) {
rel += trimmedRange.endTime90k - recording.startTime90k;
} else if (recording.growing !== undefined) {
} else if (recording.growing) {
// View just the portion described by recording.
rel += recording.endTime90k - recording.startTime90k;
}

View File

@ -39,51 +39,46 @@ export default class Recording {
/**
* Accept JSON data to be encapsulated
*
* @param {object} recordingJson JSON for a recording
* @param {object} recordingJson JSON for a recording
* @param {object} videoSampleEntryJson JSON for a video sample entry
*/
constructor(recordingJson) {
this.json_ = recordingJson;
}
constructor(recordingJson, videoSampleEntryJson) {
/** @const {!number} */
this.startId = recordingJson.startId;
/** @return {Number} */
get startId() {
return this.json_.startId;
}
/** @const {?number} */
this.endId = recordingJson.endId !== undefined ? recordingJson.endId : null;
/** @return {Number} */
get endId() {
return this.json_.endId;
}
/** @const {!number} */
this.openId = recordingJson.openId;
/** @return {Number} */
get openId() {
return this.json_.openId;
}
/** @const {?number} */
this.firstUncommitted = recordingJson.firstUncommitted !== undefined
? recordingJson.firstUncommitted : null;
/** @return {Number} or undefined */
get firstUncommitted() {
return this.json_.firstUncommitted;
}
/** @const {!boolean} */
this.growing = recordingJson.growing || false;
/** @return {Boolean} or undefined */
get growing() {
return this.json_.growing;
}
/** @const {!number} */
this.startTime90k = recordingJson.startTime90k;
/**
* Return start time of recording in 90k units.
* @return {Number} Time in units of 90k parts of a second
*/
get startTime90k() {
return this.json_.startTime90k;
}
/** @const {!number} */
this.endTime90k = recordingJson.endTime90k;
/**
* Return end time of recording in 90k units.
* @return {Number} Time in units of 90k parts of a second
*/
get endTime90k() {
return this.json_.endTime90k;
/** @const {!number} */
this.sampleFileBytes = recordingJson.sampleFileBytes;
/** @const {!number} */
this.videoSamples = recordingJson.videoSamples;
/** @const {!string} */
this.videoSampleEntrySha1 = videoSampleEntryJson.sha1;
/** @const {!number} */
this.videoSampleEntryWidth = videoSampleEntryJson.width;
/** @const {!number} */
this.videoSampleEntryHeight = videoSampleEntryJson.height;
}
/**
@ -91,7 +86,7 @@ export default class Recording {
* @return {Number} Time in units of 90k parts of a second
*/
get duration90k() {
return this.json_.endTime90k - this.json_.startTime90k;
return this.endTime90k - this.startTime90k;
}
/**
@ -112,49 +107,4 @@ export default class Recording {
get duration() {
return this.duration90k / 90000;
}
/**
* Get the number of bytes used by sample storage.
*
* @return {Number} Total bytes used
*/
get sampleFileBytes() {
return this.json_.sampleFileBytes;
}
/**
* Get the number of video samples (frames) for the recording.
*
* @return {Number} Total bytes used
*/
get frameCount() {
return this.json_.videoSamples;
}
/**
* Get the has for the video samples.
*
* @return {String} Hash
*/
get videoSampleEntryHash() {
return this.json_.videoSampleEntrySha1;
}
/**
* Get the width of the frame(s) of the video samples.
*
* @return {Number} Width in pixels
*/
get videoSampleEntryWidth() {
return this.json_.videoSampleEntryWidth;
}
/**
* Get the height of the frame(s) of the video samples.
*
* @return {Number} Height in pixels
*/
get videoSampleEntryHeight() {
return this.json_.videoSampleEntryHeight;
}
}

View File

@ -91,7 +91,7 @@ export default class RecordingFormatter {
recording.videoSampleEntryWidth +
'x' +
recording.videoSampleEntryHeight,
frameRate: frameRateFmt.format(recording.frameCount / duration),
frameRate: frameRateFmt.format(recording.videoSamples / duration),
size: sizeFmt.format(recording.sampleFileBytes / 1048576) + ' MB',
rate:
sizeFmt.format(recording.sampleFileBytes / duration * 0.000008) +

View File

@ -256,13 +256,14 @@ export default class RecordingsView {
*
* The data is expected to be an array with recording objects.
*
* @param {String} recordingsJSON JSON data (array)
* @param {object} recordingsJSON JSON data (object)
*/
set recordingsJSON(recordingsJSON) {
this.showLoading = false;
// Store as model objects
this._recordings = recordingsJSON.map(function(r) {
return new Recording(r);
this._recordings = recordingsJSON.recordings.map(function(r) {
const vse = recordingsJSON.videoSampleEntries[r.videoSampleEntryId];
return new Recording(r, vse);
});
const tbody = this._element;