trim the recording playback cache a bit

It had an Arc which in hindsight isn't necessary; the actual video index
generation is fast anyway. This saves a couple pointers per cache entry and
the overhead of chasing them. LruCache itself also has some extra pointers on
it but that's something to address another day.
This commit is contained in:
Scott Lamb 2017-02-28 23:28:25 -08:00
parent 045ee95820
commit 618709734a
4 changed files with 132 additions and 99 deletions

View File

@ -200,6 +200,21 @@ impl rusqlite::types::FromSql for FromSqlUuid {
} }
} }
/// A box with space for the uuid (initially uninitialized) and the video index.
/// The caller must fill the uuid bytes.
struct PlaybackData(Box<[u8]>);
impl rusqlite::types::FromSql for PlaybackData {
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
let blob = value.as_blob()?;
let len = 16 + blob.len();
let mut v = Vec::with_capacity(len);
unsafe { v.set_len(len) };
v[16..].copy_from_slice(blob);
Ok(PlaybackData(v.into_boxed_slice()))
}
}
/// A concrete box derived from a ISO/IEC 14496-12 section 8.5.2 VisualSampleEntry box. Describes /// A concrete box derived from a ISO/IEC 14496-12 section 8.5.2 VisualSampleEntry box. Describes
/// the codec, width, height, etc. /// the codec, width, height, etc.
#[derive(Debug)] #[derive(Debug)]
@ -243,11 +258,20 @@ pub struct ListAggregatedRecordingsRow {
pub run_start_id: i32, pub run_start_id: i32,
} }
/// Select fields from the `recordings_playback` table. Retrieve with `get_recording_playback`. /// Select fields from the `recordings_playback` table. Retrieve with `with_recording_playback`.
#[derive(Debug)] #[derive(Debug)]
pub struct RecordingPlayback { pub struct RecordingPlayback<'a> {
pub sample_file_uuid: Uuid, pub sample_file_uuid: Uuid,
pub video_index: Box<[u8]>, pub video_index: &'a [u8],
}
impl<'a> RecordingPlayback<'a> {
fn new(data: &'a [u8]) -> Self {
RecordingPlayback {
sample_file_uuid: Uuid::from_bytes(&data[..16]).unwrap(),
video_index: &data[16..],
}
}
} }
/// Bitmask in the `flags` field in the `recordings` table; see `schema.sql`. /// Bitmask in the `flags` field in the `recordings` table; see `schema.sql`.
@ -497,7 +521,7 @@ struct State {
cameras_by_uuid: BTreeMap<Uuid, i32>, cameras_by_uuid: BTreeMap<Uuid, i32>,
video_sample_entries: BTreeMap<i32, Arc<VideoSampleEntry>>, video_sample_entries: BTreeMap<i32, Arc<VideoSampleEntry>>,
list_recordings_by_time_sql: String, list_recordings_by_time_sql: String,
playback_cache: RefCell<LruCache<i64, Arc<RecordingPlayback>, fnv::FnvBuildHasher>>, playback_cache: RefCell<LruCache<i64, Box<[u8]>, fnv::FnvBuildHasher>>,
} }
/// A high-level transaction. This manages the SQLite transaction and the matching modification to /// A high-level transaction. This manages the SQLite transaction and the matching modification to
@ -949,15 +973,17 @@ impl LockedDatabase {
Ok(()) Ok(())
} }
/// Gets a single `recording_playback` row. /// Calls `f` with a single `recording_playback` row.
/// Note the lock is held for the duration of `f`.
/// This uses a LRU cache to reduce the number of retrievals from the database. /// This uses a LRU cache to reduce the number of retrievals from the database.
pub fn get_recording_playback(&self, camera_id: i32, recording_id: i32) pub fn with_recording_playback<F, R>(&self, camera_id: i32, recording_id: i32, f: F)
-> Result<Arc<RecordingPlayback>, Error> { -> Result<R, Error>
where F: FnOnce(&RecordingPlayback) -> Result<R, Error> {
let composite_id = composite_id(camera_id, recording_id); let composite_id = composite_id(camera_id, recording_id);
let mut cache = self.state.playback_cache.borrow_mut(); let mut cache = self.state.playback_cache.borrow_mut();
if let Some(r) = cache.get_mut(&composite_id) { if let Some(r) = cache.get_mut(&composite_id) {
trace!("cache hit for recording {}/{}", camera_id, recording_id); trace!("cache hit for recording {}/{}", camera_id, recording_id);
return Ok(r.clone()); return f(&RecordingPlayback::new(r));
} }
trace!("cache miss for recording {}/{}", camera_id, recording_id); trace!("cache miss for recording {}/{}", camera_id, recording_id);
let mut stmt = self.conn.prepare_cached(GET_RECORDING_PLAYBACK_SQL)?; let mut stmt = self.conn.prepare_cached(GET_RECORDING_PLAYBACK_SQL)?;
@ -965,12 +991,14 @@ impl LockedDatabase {
if let Some(row) = rows.next() { if let Some(row) = rows.next() {
let row = row?; let row = row?;
let uuid: FromSqlUuid = row.get_checked(0)?; let uuid: FromSqlUuid = row.get_checked(0)?;
let r = Arc::new(RecordingPlayback { let data = {
sample_file_uuid: uuid.0, let mut data: PlaybackData = row.get_checked(1)?;
video_index: row.get_checked::<_, Vec<u8>>(1)?.into_boxed_slice(), data.0[0..16].copy_from_slice(uuid.0.as_bytes());
}); data.0
cache.insert(composite_id, r.clone()); };
return Ok(r); let result = f(&RecordingPlayback::new(&data));
cache.insert(composite_id, data);
return result;
} }
Err(Error::new(format!("no such recording {}/{}", camera_id, recording_id))) Err(Error::new(format!("no such recording {}/{}", camera_id, recording_id)))
} }
@ -1467,7 +1495,7 @@ mod tests {
assert_eq!(1, rows); assert_eq!(1, rows);
// TODO: list_aggregated_recordings. // TODO: list_aggregated_recordings.
// TODO: get_recording_playback. // TODO: with_recording_playback.
} }
fn assert_unsorted_eq<T>(mut a: Vec<T>, mut b: Vec<T>) fn assert_unsorted_eq<T>(mut a: Vec<T>, mut b: Vec<T>)

View File

@ -352,11 +352,10 @@ impl Segment {
-> Result<(), Error> -> Result<(), Error>
where F: FnOnce(&[u8], SegmentLengths) -> &[u8] { where F: FnOnce(&[u8], SegmentLengths) -> &[u8] {
let index = self.index.borrow_with(|| { let index = self.index.borrow_with(|| {
self.build_index(db) db.lock()
.map_err(|e| { .with_recording_playback(self.s.camera_id, self.s.recording_id,
error!("Unable to build index for segment: {:?}", e); |playback| self.build_index(playback))
() .map_err(|e| { error!("Unable to build index for segment: {:?}", e); })
})
}); });
let index = match *index { let index = match *index {
Ok(ref b) => &b[..], Ok(ref b) => &b[..],
@ -380,7 +379,7 @@ impl Segment {
fn stsz(buf: &[u8], lens: SegmentLengths) -> &[u8] { &buf[lens.stts .. lens.stts + lens.stsz] } fn stsz(buf: &[u8], lens: SegmentLengths) -> &[u8] { &buf[lens.stts .. lens.stts + lens.stsz] }
fn stss(buf: &[u8], lens: SegmentLengths) -> &[u8] { &buf[lens.stts + lens.stsz ..] } fn stss(buf: &[u8], lens: SegmentLengths) -> &[u8] { &buf[lens.stts + lens.stsz ..] }
fn build_index(&self, db: &db::Database) -> Result<Box<[u8]>, Error> { fn build_index(&self, playback: &db::RecordingPlayback) -> Result<Box<[u8]>, Error> {
let s = &self.s; let s = &self.s;
let lens = self.lens(); let lens = self.lens();
let len = lens.stts + lens.stsz + lens.stss; let len = lens.stts + lens.stsz + lens.stss;
@ -395,7 +394,7 @@ impl Segment {
let mut frame = 0; let mut frame = 0;
let mut key_frame = 0; let mut key_frame = 0;
let mut last_start_and_dur = None; let mut last_start_and_dur = None;
s.foreach(db, |it| { s.foreach(playback, |it| {
last_start_and_dur = Some((it.start_90k, it.duration_90k)); last_start_and_dur = Some((it.start_90k, it.duration_90k));
BigEndian::write_u32(&mut stts[8*frame .. 8*frame+4], 1); BigEndian::write_u32(&mut stts[8*frame .. 8*frame+4], 1);
BigEndian::write_u32(&mut stts[8*frame+4 .. 8*frame+8], it.duration_90k as u32); BigEndian::write_u32(&mut stts[8*frame+4 .. 8*frame+8], it.duration_90k as u32);
@ -1152,8 +1151,11 @@ impl File {
fn write_video_sample_data(&self, i: usize, r: Range<u64>, out: &mut io::Write) fn write_video_sample_data(&self, i: usize, r: Range<u64>, out: &mut io::Write)
-> Result<(), Error> { -> Result<(), Error> {
let s = &self.segments[i]; let s = &self.segments[i];
let rec = self.db.lock().get_recording_playback(s.s.camera_id, s.s.recording_id)?; let uuid = {
let f = self.dir.open_sample_file(rec.sample_file_uuid)?; self.db.lock().with_recording_playback(s.s.camera_id, s.s.recording_id,
|p| Ok(p.sample_file_uuid))?
};
let f = self.dir.open_sample_file(uuid)?;
mmapfile::MmapFileSlice::new(f, s.s.sample_file_range()).write_to(r, out) mmapfile::MmapFileSlice::new(f, s.s.sample_file_range()).write_to(r, out)
} }
@ -1786,8 +1788,8 @@ mod bench {
let db = TestDb::new(); let db = TestDb::new();
testutil::add_dummy_recordings_to_db(&db.db, 1); testutil::add_dummy_recordings_to_db(&db.db, 1);
let segment = {
let db = db.db.lock(); let db = db.db.lock();
let segment = {
let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value()); let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value());
let mut row = None; let mut row = None;
db.list_recordings_by_time(testutil::TEST_CAMERA_ID, all_time, |r| { db.list_recordings_by_time(testutil::TEST_CAMERA_ID, all_time, |r| {
@ -1798,9 +1800,12 @@ mod bench {
let rel_range_90k = 0 .. row.duration_90k; let rel_range_90k = 0 .. row.duration_90k;
super::Segment::new(&db, &row, rel_range_90k, 1).unwrap() super::Segment::new(&db, &row, rel_range_90k, 1).unwrap()
}; };
let v = segment.build_index(&db.db).unwrap(); // warm. db.with_recording_playback(segment.s.camera_id, segment.s.recording_id, |playback| {
let v = segment.build_index(playback).unwrap(); // warm.
b.bytes = v.len() as u64; // define the benchmark performance in terms of output bytes. b.bytes = v.len() as u64; // define the benchmark performance in terms of output bytes.
b.iter(|| segment.build_index(&db.db).unwrap()); b.iter(|| segment.build_index(playback).unwrap());
Ok(())
}).unwrap();
} }
/// Benchmarks serving the generated part of a `.mp4` file (up to the first byte from disk). /// Benchmarks serving the generated part of a `.mp4` file (up to the first byte from disk).

View File

@ -406,7 +406,7 @@ impl Segment {
} }
// Slow path. Need to iterate through the index. // Slow path. Need to iterate through the index.
let playback = db.get_recording_playback(self_.camera_id, self_.recording_id)?; db.with_recording_playback(self_.camera_id, self_.recording_id, |playback| {
let data = &(&playback).video_index; let data = &(&playback).video_index;
let mut it = SampleIndexIterator::new(); let mut it = SampleIndexIterator::new();
if !it.next(data)? { if !it.next(data)? {
@ -451,6 +451,7 @@ impl Segment {
recording.video_sample_entry.id | recording.video_sample_entry.id |
(((it.duration_90k == 0) as i32) << 31); (((it.duration_90k == 0) as i32) << 31);
Ok(self_) Ok(self_)
})
} }
pub fn video_sample_entry_id(&self) -> i32 { pub fn video_sample_entry_id(&self) -> i32 {
@ -467,11 +468,10 @@ impl Segment {
/// Iterates through each frame in the segment. /// Iterates through each frame in the segment.
/// Must be called without the database lock held; retrieves video index from the cache. /// Must be called without the database lock held; retrieves video index from the cache.
pub fn foreach<F>(&self, db: &db::Database, mut f: F) -> Result<(), Error> pub fn foreach<F>(&self, playback: &db::RecordingPlayback, mut f: F) -> Result<(), Error>
where F: FnMut(&SampleIndexIterator) -> Result<(), Error> { where F: FnMut(&SampleIndexIterator) -> Result<(), Error> {
trace!("foreach on recording {}/{}: {} frames, actual_time_90k: {:?}", trace!("foreach on recording {}/{}: {} frames, actual_time_90k: {:?}",
self.camera_id, self.recording_id, self.frames, self.actual_time_90k()); self.camera_id, self.recording_id, self.frames, self.actual_time_90k());
let playback = db.lock().get_recording_playback(self.camera_id, self.recording_id)?;
let data = &(&playback).video_index; let data = &(&playback).video_index;
let mut it = self.begin; let mut it = self.begin;
if it.uninitialized() { if it.uninitialized() {
@ -634,6 +634,15 @@ mod tests {
} }
} }
fn get_frames<F, T>(db: &db::Database, segment: &Segment, f: F) -> Vec<T>
where F: Fn(&SampleIndexIterator) -> T {
let mut v = Vec::new();
db.lock().with_recording_playback(segment.camera_id, segment.recording_id, |playback| {
segment.foreach(playback, |it| { v.push(f(it)); Ok(()) })
}).unwrap();
v
}
/// Tests that a `Segment` correctly can clip at the beginning and end. /// Tests that a `Segment` correctly can clip at the beginning and end.
/// This is a simpler case; all sync samples means we can start on any frame. /// This is a simpler case; all sync samples means we can start on any frame.
#[test] #[test]
@ -649,9 +658,7 @@ mod tests {
// Time range [2, 2 + 4 + 6 + 8) means the 2nd, 3rd, 4th samples should be // Time range [2, 2 + 4 + 6 + 8) means the 2nd, 3rd, 4th samples should be
// included. // included.
let segment = Segment::new(&db.db.lock(), &row, 2 .. 2+4+6+8).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 2 .. 2+4+6+8).unwrap();
let mut v = Vec::new(); assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[4, 6, 8]);
segment.foreach(&db.db, |it| { v.push(it.duration_90k); Ok(()) }).unwrap();
assert_eq!(&v, &[4, 6, 8]);
} }
/// Half sync frames means starting from the last sync frame <= desired point. /// Half sync frames means starting from the last sync frame <= desired point.
@ -668,9 +675,7 @@ mod tests {
// Time range [2 + 4 + 6, 2 + 4 + 6 + 8) means the 4th sample should be included. // Time range [2 + 4 + 6, 2 + 4 + 6 + 8) means the 4th sample should be included.
// The 3rd also gets pulled in because it is a sync frame and the 4th is not. // The 3rd also gets pulled in because it is a sync frame and the 4th is not.
let segment = Segment::new(&db.db.lock(), &row, 2+4+6 .. 2+4+6+8).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 2+4+6 .. 2+4+6+8).unwrap();
let mut v = Vec::new(); assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[6, 8]);
segment.foreach(&db.db, |it| { v.push(it.duration_90k); Ok(()) }).unwrap();
assert_eq!(&v, &[6, 8]);
} }
#[test] #[test]
@ -682,9 +687,7 @@ mod tests {
let db = TestDb::new(); let db = TestDb::new();
let row = db.create_recording_from_encoder(encoder); let row = db.create_recording_from_encoder(encoder);
let segment = Segment::new(&db.db.lock(), &row, 1 .. 2).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 1 .. 2).unwrap();
let mut v = Vec::new(); assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[2, 3]);
segment.foreach(&db.db, |it| { v.push(it.bytes); Ok(()) }).unwrap();
assert_eq!(&v, &[2, 3]);
} }
/// Test a `Segment` which uses the whole recording. /// Test a `Segment` which uses the whole recording.
@ -700,9 +703,7 @@ mod tests {
let db = TestDb::new(); let db = TestDb::new();
let row = db.create_recording_from_encoder(encoder); let row = db.create_recording_from_encoder(encoder);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2+4+6+8+10).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 0 .. 2+4+6+8+10).unwrap();
let mut v = Vec::new(); assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[2, 4, 6, 8, 10]);
segment.foreach(&db.db, |it| { v.push(it.duration_90k); Ok(()) }).unwrap();
assert_eq!(&v, &[2, 4, 6, 8, 10]);
} }
#[test] #[test]
@ -714,9 +715,7 @@ mod tests {
let db = TestDb::new(); let db = TestDb::new();
let row = db.create_recording_from_encoder(encoder); let row = db.create_recording_from_encoder(encoder);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 0 .. 2).unwrap();
let mut v = Vec::new(); assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1, 2, 3]);
segment.foreach(&db.db, |it| { v.push(it.bytes); Ok(()) }).unwrap();
assert_eq!(&v, &[1, 2, 3]);
} }
// TODO: test segment error cases involving mismatch between row frames/key_frames and index. // TODO: test segment error cases involving mismatch between row frames/key_frames and index.

View File

@ -306,7 +306,7 @@ mod tests {
} }
fn get_frames(db: &db::LockedDatabase, camera_id: i32, recording_id: i32) -> Vec<Frame> { fn get_frames(db: &db::LockedDatabase, camera_id: i32, recording_id: i32) -> Vec<Frame> {
let rec = db.get_recording_playback(camera_id, recording_id).unwrap(); db.with_recording_playback(camera_id, recording_id, |rec| {
let mut it = recording::SampleIndexIterator::new(); let mut it = recording::SampleIndexIterator::new();
let mut frames = Vec::new(); let mut frames = Vec::new();
while it.next(&rec.video_index).unwrap() { while it.next(&rec.video_index).unwrap() {
@ -316,7 +316,8 @@ mod tests {
is_key: it.is_key(), is_key: it.is_key(),
}); });
} }
frames Ok(frames)
}).unwrap()
} }
#[test] #[test]