trim the recording playback cache a bit

It had an Arc which in hindsight isn't necessary; the actual video index
generation is fast anyway. This saves a couple pointers per cache entry and
the overhead of chasing them. LruCache itself also has some extra pointers on
it but that's something to address another day.
This commit is contained in:
Scott Lamb 2017-02-28 23:28:25 -08:00
parent 045ee95820
commit 618709734a
4 changed files with 132 additions and 99 deletions

View File

@ -200,6 +200,21 @@ impl rusqlite::types::FromSql for FromSqlUuid {
}
}
/// A box with space for the uuid (initially uninitialized) and the video index.
/// The caller must fill the uuid bytes.
struct PlaybackData(Box<[u8]>);
impl rusqlite::types::FromSql for PlaybackData {
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
let blob = value.as_blob()?;
let len = 16 + blob.len();
let mut v = Vec::with_capacity(len);
unsafe { v.set_len(len) };
v[16..].copy_from_slice(blob);
Ok(PlaybackData(v.into_boxed_slice()))
}
}
/// A concrete box derived from a ISO/IEC 14496-12 section 8.5.2 VisualSampleEntry box. Describes
/// the codec, width, height, etc.
#[derive(Debug)]
@ -243,11 +258,20 @@ pub struct ListAggregatedRecordingsRow {
pub run_start_id: i32,
}
/// Select fields from the `recordings_playback` table. Retrieve with `get_recording_playback`.
/// Select fields from the `recordings_playback` table. Retrieve with `with_recording_playback`.
#[derive(Debug)]
pub struct RecordingPlayback {
pub struct RecordingPlayback<'a> {
pub sample_file_uuid: Uuid,
pub video_index: Box<[u8]>,
pub video_index: &'a [u8],
}
impl<'a> RecordingPlayback<'a> {
fn new(data: &'a [u8]) -> Self {
RecordingPlayback {
sample_file_uuid: Uuid::from_bytes(&data[..16]).unwrap(),
video_index: &data[16..],
}
}
}
/// Bitmask in the `flags` field in the `recordings` table; see `schema.sql`.
@ -497,7 +521,7 @@ struct State {
cameras_by_uuid: BTreeMap<Uuid, i32>,
video_sample_entries: BTreeMap<i32, Arc<VideoSampleEntry>>,
list_recordings_by_time_sql: String,
playback_cache: RefCell<LruCache<i64, Arc<RecordingPlayback>, fnv::FnvBuildHasher>>,
playback_cache: RefCell<LruCache<i64, Box<[u8]>, fnv::FnvBuildHasher>>,
}
/// A high-level transaction. This manages the SQLite transaction and the matching modification to
@ -949,15 +973,17 @@ impl LockedDatabase {
Ok(())
}
/// Gets a single `recording_playback` row.
/// Calls `f` with a single `recording_playback` row.
/// Note the lock is held for the duration of `f`.
/// This uses a LRU cache to reduce the number of retrievals from the database.
pub fn get_recording_playback(&self, camera_id: i32, recording_id: i32)
-> Result<Arc<RecordingPlayback>, Error> {
pub fn with_recording_playback<F, R>(&self, camera_id: i32, recording_id: i32, f: F)
-> Result<R, Error>
where F: FnOnce(&RecordingPlayback) -> Result<R, Error> {
let composite_id = composite_id(camera_id, recording_id);
let mut cache = self.state.playback_cache.borrow_mut();
if let Some(r) = cache.get_mut(&composite_id) {
trace!("cache hit for recording {}/{}", camera_id, recording_id);
return Ok(r.clone());
return f(&RecordingPlayback::new(r));
}
trace!("cache miss for recording {}/{}", camera_id, recording_id);
let mut stmt = self.conn.prepare_cached(GET_RECORDING_PLAYBACK_SQL)?;
@ -965,12 +991,14 @@ impl LockedDatabase {
if let Some(row) = rows.next() {
let row = row?;
let uuid: FromSqlUuid = row.get_checked(0)?;
let r = Arc::new(RecordingPlayback {
sample_file_uuid: uuid.0,
video_index: row.get_checked::<_, Vec<u8>>(1)?.into_boxed_slice(),
});
cache.insert(composite_id, r.clone());
return Ok(r);
let data = {
let mut data: PlaybackData = row.get_checked(1)?;
data.0[0..16].copy_from_slice(uuid.0.as_bytes());
data.0
};
let result = f(&RecordingPlayback::new(&data));
cache.insert(composite_id, data);
return result;
}
Err(Error::new(format!("no such recording {}/{}", camera_id, recording_id)))
}
@ -1467,7 +1495,7 @@ mod tests {
assert_eq!(1, rows);
// TODO: list_aggregated_recordings.
// TODO: get_recording_playback.
// TODO: with_recording_playback.
}
fn assert_unsorted_eq<T>(mut a: Vec<T>, mut b: Vec<T>)

View File

@ -352,11 +352,10 @@ impl Segment {
-> Result<(), Error>
where F: FnOnce(&[u8], SegmentLengths) -> &[u8] {
let index = self.index.borrow_with(|| {
self.build_index(db)
.map_err(|e| {
error!("Unable to build index for segment: {:?}", e);
()
})
db.lock()
.with_recording_playback(self.s.camera_id, self.s.recording_id,
|playback| self.build_index(playback))
.map_err(|e| { error!("Unable to build index for segment: {:?}", e); })
});
let index = match *index {
Ok(ref b) => &b[..],
@ -380,7 +379,7 @@ impl Segment {
fn stsz(buf: &[u8], lens: SegmentLengths) -> &[u8] { &buf[lens.stts .. lens.stts + lens.stsz] }
fn stss(buf: &[u8], lens: SegmentLengths) -> &[u8] { &buf[lens.stts + lens.stsz ..] }
fn build_index(&self, db: &db::Database) -> Result<Box<[u8]>, Error> {
fn build_index(&self, playback: &db::RecordingPlayback) -> Result<Box<[u8]>, Error> {
let s = &self.s;
let lens = self.lens();
let len = lens.stts + lens.stsz + lens.stss;
@ -395,7 +394,7 @@ impl Segment {
let mut frame = 0;
let mut key_frame = 0;
let mut last_start_and_dur = None;
s.foreach(db, |it| {
s.foreach(playback, |it| {
last_start_and_dur = Some((it.start_90k, it.duration_90k));
BigEndian::write_u32(&mut stts[8*frame .. 8*frame+4], 1);
BigEndian::write_u32(&mut stts[8*frame+4 .. 8*frame+8], it.duration_90k as u32);
@ -1152,8 +1151,11 @@ impl File {
fn write_video_sample_data(&self, i: usize, r: Range<u64>, out: &mut io::Write)
-> Result<(), Error> {
let s = &self.segments[i];
let rec = self.db.lock().get_recording_playback(s.s.camera_id, s.s.recording_id)?;
let f = self.dir.open_sample_file(rec.sample_file_uuid)?;
let uuid = {
self.db.lock().with_recording_playback(s.s.camera_id, s.s.recording_id,
|p| Ok(p.sample_file_uuid))?
};
let f = self.dir.open_sample_file(uuid)?;
mmapfile::MmapFileSlice::new(f, s.s.sample_file_range()).write_to(r, out)
}
@ -1786,8 +1788,8 @@ mod bench {
let db = TestDb::new();
testutil::add_dummy_recordings_to_db(&db.db, 1);
let db = db.db.lock();
let segment = {
let db = db.db.lock();
let all_time = recording::Time(i64::min_value()) .. recording::Time(i64::max_value());
let mut row = None;
db.list_recordings_by_time(testutil::TEST_CAMERA_ID, all_time, |r| {
@ -1798,9 +1800,12 @@ mod bench {
let rel_range_90k = 0 .. row.duration_90k;
super::Segment::new(&db, &row, rel_range_90k, 1).unwrap()
};
let v = segment.build_index(&db.db).unwrap(); // warm.
b.bytes = v.len() as u64; // define the benchmark performance in terms of output bytes.
b.iter(|| segment.build_index(&db.db).unwrap());
db.with_recording_playback(segment.s.camera_id, segment.s.recording_id, |playback| {
let v = segment.build_index(playback).unwrap(); // warm.
b.bytes = v.len() as u64; // define the benchmark performance in terms of output bytes.
b.iter(|| segment.build_index(playback).unwrap());
Ok(())
}).unwrap();
}
/// Benchmarks serving the generated part of a `.mp4` file (up to the first byte from disk).

View File

@ -406,51 +406,52 @@ impl Segment {
}
// Slow path. Need to iterate through the index.
let playback = db.get_recording_playback(self_.camera_id, self_.recording_id)?;
let data = &(&playback).video_index;
let mut it = SampleIndexIterator::new();
if !it.next(data)? {
return Err(Error{description: String::from("no index"),
cause: None});
}
if !it.is_key() {
return Err(Error{description: String::from("not key frame"),
cause: None});
}
// Stop when hitting a frame with this start time.
// Going until the end of the recording is special-cased because there can be a trailing
// frame of zero duration. It's unclear exactly how this should be handled, but let's
// include it for consistency with the fast path. It'd be bizarre to have it included or
// not based on desired_range_90k.start.
let end_90k = if self_.desired_range_90k.end == self_.actual_end_90k {
i32::max_value()
} else {
self_.desired_range_90k.end
};
loop {
if it.start_90k <= self_.desired_range_90k.start && it.is_key() {
// new start candidate.
self_.begin = it;
self_.frames = 0;
self_.key_frames = 0;
}
if it.start_90k >= end_90k {
break;
}
self_.frames += 1;
self_.key_frames += it.is_key() as u16;
db.with_recording_playback(self_.camera_id, self_.recording_id, |playback| {
let data = &(&playback).video_index;
let mut it = SampleIndexIterator::new();
if !it.next(data)? {
break;
return Err(Error{description: String::from("no index"),
cause: None});
}
}
self_.file_end = it.pos;
self_.actual_end_90k = it.start_90k;
self_.video_sample_entry_id_and_trailing_zero =
recording.video_sample_entry.id |
(((it.duration_90k == 0) as i32) << 31);
Ok(self_)
if !it.is_key() {
return Err(Error{description: String::from("not key frame"),
cause: None});
}
// Stop when hitting a frame with this start time.
// Going until the end of the recording is special-cased because there can be a trailing
// frame of zero duration. It's unclear exactly how this should be handled, but let's
// include it for consistency with the fast path. It'd be bizarre to have it included or
// not based on desired_range_90k.start.
let end_90k = if self_.desired_range_90k.end == self_.actual_end_90k {
i32::max_value()
} else {
self_.desired_range_90k.end
};
loop {
if it.start_90k <= self_.desired_range_90k.start && it.is_key() {
// new start candidate.
self_.begin = it;
self_.frames = 0;
self_.key_frames = 0;
}
if it.start_90k >= end_90k {
break;
}
self_.frames += 1;
self_.key_frames += it.is_key() as u16;
if !it.next(data)? {
break;
}
}
self_.file_end = it.pos;
self_.actual_end_90k = it.start_90k;
self_.video_sample_entry_id_and_trailing_zero =
recording.video_sample_entry.id |
(((it.duration_90k == 0) as i32) << 31);
Ok(self_)
})
}
pub fn video_sample_entry_id(&self) -> i32 {
@ -467,11 +468,10 @@ impl Segment {
/// Iterates through each frame in the segment.
/// Must be called without the database lock held; retrieves video index from the cache.
pub fn foreach<F>(&self, db: &db::Database, mut f: F) -> Result<(), Error>
pub fn foreach<F>(&self, playback: &db::RecordingPlayback, mut f: F) -> Result<(), Error>
where F: FnMut(&SampleIndexIterator) -> Result<(), Error> {
trace!("foreach on recording {}/{}: {} frames, actual_time_90k: {:?}",
self.camera_id, self.recording_id, self.frames, self.actual_time_90k());
let playback = db.lock().get_recording_playback(self.camera_id, self.recording_id)?;
let data = &(&playback).video_index;
let mut it = self.begin;
if it.uninitialized() {
@ -634,6 +634,15 @@ mod tests {
}
}
fn get_frames<F, T>(db: &db::Database, segment: &Segment, f: F) -> Vec<T>
where F: Fn(&SampleIndexIterator) -> T {
let mut v = Vec::new();
db.lock().with_recording_playback(segment.camera_id, segment.recording_id, |playback| {
segment.foreach(playback, |it| { v.push(f(it)); Ok(()) })
}).unwrap();
v
}
/// Tests that a `Segment` correctly can clip at the beginning and end.
/// This is a simpler case; all sync samples means we can start on any frame.
#[test]
@ -649,9 +658,7 @@ mod tests {
// Time range [2, 2 + 4 + 6 + 8) means the 2nd, 3rd, 4th samples should be
// included.
let segment = Segment::new(&db.db.lock(), &row, 2 .. 2+4+6+8).unwrap();
let mut v = Vec::new();
segment.foreach(&db.db, |it| { v.push(it.duration_90k); Ok(()) }).unwrap();
assert_eq!(&v, &[4, 6, 8]);
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[4, 6, 8]);
}
/// Half sync frames means starting from the last sync frame <= desired point.
@ -668,9 +675,7 @@ mod tests {
// Time range [2 + 4 + 6, 2 + 4 + 6 + 8) means the 4th sample should be included.
// The 3rd also gets pulled in because it is a sync frame and the 4th is not.
let segment = Segment::new(&db.db.lock(), &row, 2+4+6 .. 2+4+6+8).unwrap();
let mut v = Vec::new();
segment.foreach(&db.db, |it| { v.push(it.duration_90k); Ok(()) }).unwrap();
assert_eq!(&v, &[6, 8]);
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[6, 8]);
}
#[test]
@ -682,9 +687,7 @@ mod tests {
let db = TestDb::new();
let row = db.create_recording_from_encoder(encoder);
let segment = Segment::new(&db.db.lock(), &row, 1 .. 2).unwrap();
let mut v = Vec::new();
segment.foreach(&db.db, |it| { v.push(it.bytes); Ok(()) }).unwrap();
assert_eq!(&v, &[2, 3]);
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[2, 3]);
}
/// Test a `Segment` which uses the whole recording.
@ -700,9 +703,7 @@ mod tests {
let db = TestDb::new();
let row = db.create_recording_from_encoder(encoder);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2+4+6+8+10).unwrap();
let mut v = Vec::new();
segment.foreach(&db.db, |it| { v.push(it.duration_90k); Ok(()) }).unwrap();
assert_eq!(&v, &[2, 4, 6, 8, 10]);
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[2, 4, 6, 8, 10]);
}
#[test]
@ -714,9 +715,7 @@ mod tests {
let db = TestDb::new();
let row = db.create_recording_from_encoder(encoder);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2).unwrap();
let mut v = Vec::new();
segment.foreach(&db.db, |it| { v.push(it.bytes); Ok(()) }).unwrap();
assert_eq!(&v, &[1, 2, 3]);
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1, 2, 3]);
}
// TODO: test segment error cases involving mismatch between row frames/key_frames and index.

View File

@ -306,17 +306,18 @@ mod tests {
}
fn get_frames(db: &db::LockedDatabase, camera_id: i32, recording_id: i32) -> Vec<Frame> {
let rec = db.get_recording_playback(camera_id, recording_id).unwrap();
let mut it = recording::SampleIndexIterator::new();
let mut frames = Vec::new();
while it.next(&rec.video_index).unwrap() {
frames.push(Frame{
start_90k: it.start_90k,
duration_90k: it.duration_90k,
is_key: it.is_key(),
});
}
frames
db.with_recording_playback(camera_id, recording_id, |rec| {
let mut it = recording::SampleIndexIterator::new();
let mut frames = Vec::new();
while it.next(&rec.video_index).unwrap() {
frames.push(Frame{
start_90k: it.start_90k,
duration_90k: it.duration_90k,
is_key: it.is_key(),
});
}
Ok(frames)
}).unwrap()
}
#[test]