trim 16 bytes from each recording::Segment

This reduces the working set by another 960 bytes for a typical one-hour recording, improving cache efficiency a bit more.

8 bytes from SampleIndexIterator:
   * reduce the three "bytes" fields to two. Doing so as "bytes_key" vs
     "bytes_nonkey" slowed it down a bit, perhaps because the "bytes" is
     needed right away and requires a branch. But "bytes" vs "bytes_other"
     seems fine. Looks like it can do this with cmovs in parallel with other
     stuff.
   * stuff "is_key" into the "i" field.

8 bytes from recording::Segment itself:
   * make "frames" and "key_frame" u16s
   * stuff "trailing_zero" into "video_sample_entry_id"
This commit is contained in:
Scott Lamb
2017-02-27 21:14:06 -08:00
parent 15609ddb8e
commit ce363162f4
4 changed files with 114 additions and 86 deletions

View File

@@ -400,7 +400,7 @@ impl Segment {
BigEndian::write_u32(&mut stts[8*frame .. 8*frame+4], 1);
BigEndian::write_u32(&mut stts[8*frame+4 .. 8*frame+8], it.duration_90k as u32);
BigEndian::write_u32(&mut stsz[4*frame .. 4*frame+4], it.bytes as u32);
if it.is_key {
if it.is_key() {
BigEndian::write_u32(&mut stss[4*key_frame .. 4*key_frame+4],
self.first_frame_num + (frame as u32));
key_frame += 1;
@@ -589,7 +589,7 @@ impl FileBuilder {
pub fn append(&mut self, db: &db::LockedDatabase, row: db::ListRecordingsRow,
rel_range_90k: Range<i32>) -> Result<(), Error> {
if let Some(prev) = self.segments.last() {
if prev.s.have_trailing_zero {
if prev.s.have_trailing_zero() {
return Err(Error::new(format!(
"unable to append recording {}/{} after recording {}/{} with trailing zero",
row.camera_id, row.id, prev.s.camera_id, prev.s.recording_id)));
@@ -1005,7 +1005,7 @@ impl FileBuilder {
// Write sample_description_index.
let i = self.video_sample_entries.iter().position(
|e| e.id == s.s.video_sample_entry_id).unwrap();
|e| e.id == s.s.video_sample_entry_id()).unwrap();
self.body.append_u32((i + 1) as u32);
}
})