new recording_integrity table

A couple rarely-used fields move to here, and I expect I'll add more.
Redo the check command to just put everything in RAM for simplicity.
This commit is contained in:
Scott Lamb 2018-03-09 07:31:48 -08:00
parent 03809eee8e
commit f81d699c8c
5 changed files with 256 additions and 154 deletions

View File

@ -49,17 +49,19 @@ pub fn run(conn: &rusqlite::Connection, opts: &Options) -> Result<(), Error> {
let db_uuid = raw::get_db_uuid(&conn)?; let db_uuid = raw::get_db_uuid(&conn)?;
// Scan directories. // Scan directories.
let mut files_by_dir = FnvHashMap::default(); let mut streams_by_dir: FnvHashMap<i32, Dir> = FnvHashMap::default();
{ {
let mut stmt = conn.prepare(r#" let mut dir_stmt = conn.prepare(r#"
select d.id, d.path, d.uuid, d.last_complete_open_id, o.uuid select d.id, d.path, d.uuid, d.last_complete_open_id, o.uuid
from sample_file_dir d left join open o on (d.last_complete_open_id = o.id) from sample_file_dir d left join open o on (d.last_complete_open_id = o.id)
"#)?; "#)?;
let mut rows = stmt.query(&[])?; let mut garbage_stmt = conn.prepare_cached(
"select composite_id from garbage where sample_file_dir_id = ?")?;
let mut rows = dir_stmt.query(&[])?;
while let Some(row) = rows.next() { while let Some(row) = rows.next() {
let row = row?; let row = row?;
let mut meta = schema::DirMeta::default(); let mut meta = schema::DirMeta::default();
let dir_id = row.get_checked(0)?; let dir_id: i32 = row.get_checked(0)?;
let dir_path: String = row.get_checked(1)?; let dir_path: String = row.get_checked(1)?;
let dir_uuid: FromSqlUuid = row.get_checked(2)?; let dir_uuid: FromSqlUuid = row.get_checked(2)?;
let open_id = row.get_checked(3)?; let open_id = row.get_checked(3)?;
@ -74,32 +76,47 @@ pub fn run(conn: &rusqlite::Connection, opts: &Options) -> Result<(), Error> {
// Open the directory (checking its metadata) and hold it open (for the lock). // Open the directory (checking its metadata) and hold it open (for the lock).
let _dir = dir::SampleFileDir::open(&dir_path, &meta)?; let _dir = dir::SampleFileDir::open(&dir_path, &meta)?;
let files = read_dir(&dir_path, opts)?; let mut streams = read_dir(&dir_path, opts)?;
files_by_dir.insert(dir_id, files); let mut rows = garbage_stmt.query(&[&dir_id])?;
while let Some(row) = rows.next() {
let row = row?;
let id = CompositeId(row.get_checked(0)?);
let s = streams.entry(id.stream()).or_insert_with(Stream::default);
s.entry(id.recording()).or_insert_with(Recording::default).garbage_row = true;
}
streams_by_dir.insert(dir_id, streams);
} }
} }
// Scan streams. // Scan known streams.
{ {
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(r#"
select id, sample_file_dir_id from stream select id, sample_file_dir_id from stream where sample_file_dir_id is not null
"#)?; "#)?;
let mut rows = stmt.query(&[])?; let mut rows = stmt.query(&[])?;
while let Some(row) = rows.next() { while let Some(row) = rows.next() {
let row = row?; let row = row?;
let stream_id = row.get_checked(0)?; let stream_id = row.get_checked(0)?;
let dir_id = row.get_checked(1)?; let dir_id = row.get_checked(1)?;
let mut empty = FnvHashMap::default(); let stream = match streams_by_dir.get_mut(&dir_id) {
let files = match dir_id { None => Stream::default(),
None => &mut empty, Some(d) => d.remove(&stream_id).unwrap_or_else(Stream::default),
Some(id) => files_by_dir.get_mut(&id).unwrap(),
}; };
compare_stream(conn, stream_id, opts, files)?; compare_stream(conn, stream_id, opts, stream)?;
} }
} }
for (&dir_id, files) in &mut files_by_dir { // Expect the rest to have only garbage.
compare_dir(conn, dir_id, files)?; for (&dir_id, streams) in &streams_by_dir {
for (&stream_id, stream) in streams {
for (&recording_id, r) in stream {
let id = CompositeId::new(stream_id, recording_id);
if r.recording_row.is_some() || r.playback_row.is_some() ||
r.integrity_row || !r.garbage_row {
error!("dir {} recording {} for unknown stream: {:#?}", dir_id, id, r);
}
}
}
} }
Ok(()) Ok(())
@ -114,6 +131,28 @@ struct RecordingSummary {
flags: i32, flags: i32,
} }
#[derive(Debug, Default)]
struct Recording {
/// Present iff there is a file. When `args.compare_lens` is true, the length; otherwise 0.
file: Option<u64>,
/// Iff a `recording` row is present, a `RecordingSummary` from those fields.
recording_row: Option<RecordingSummary>,
/// Iff a `recording_playback` row is present, a `RecordingSummary` computed from the index.
/// This should match the recording row.
playback_row: Option<RecordingSummary>,
/// True iff a `recording_integrity` row is present.
integrity_row: bool,
/// True iff a `garbage` row is present.
garbage_row: bool,
}
type Stream = FnvHashMap<i32, Recording>;
type Dir = FnvHashMap<i32, Stream>;
fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> { fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
let mut it = recording::SampleIndexIterator::new(); let mut it = recording::SampleIndexIterator::new();
let mut duration = 0; let mut duration = 0;
@ -127,10 +166,10 @@ fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
video_sync_samples += it.is_key() as i32; video_sync_samples += it.is_key() as i32;
} }
Ok(RecordingSummary { Ok(RecordingSummary {
bytes: bytes, bytes,
video_samples: video_samples, video_samples,
video_sync_samples: video_sync_samples, video_sync_samples,
duration: duration, duration,
flags: if it.duration_90k == 0 { db::RecordingFlags::TrailingZero as i32 } else { 0 }, flags: if it.duration_90k == 0 { db::RecordingFlags::TrailingZero as i32 } else { 0 },
}) })
} }
@ -138,14 +177,13 @@ fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
/// Reads through the given sample file directory. /// Reads through the given sample file directory.
/// Logs unexpected files and creates a hash map of the files found there. /// Logs unexpected files and creates a hash map of the files found there.
/// If `opts.compare_lens` is set, the values are lengths; otherwise they're insignificant. /// If `opts.compare_lens` is set, the values are lengths; otherwise they're insignificant.
fn read_dir(path: &str, opts: &Options) -> Result<FnvHashMap<CompositeId, u64>, Error> { fn read_dir(path: &str, opts: &Options) -> Result<Dir, Error> {
let mut files = FnvHashMap::default(); let mut dir = Dir::default();
for e in fs::read_dir(path)? { for e in fs::read_dir(path)? {
let e = e?; let e = e?;
let f = e.file_name(); let f = e.file_name();
let f = f.as_bytes(); let f = f.as_bytes();
match f { match f {
//"." | ".." => continue,
b"meta" | b"meta-tmp" => continue, b"meta" | b"meta-tmp" => continue,
_ => {}, _ => {},
}; };
@ -157,106 +195,126 @@ fn read_dir(path: &str, opts: &Options) -> Result<FnvHashMap<CompositeId, u64>,
} }
}; };
let len = if opts.compare_lens { e.metadata()?.len() } else { 0 }; let len = if opts.compare_lens { e.metadata()?.len() } else { 0 };
files.insert(id, len); let stream = dir.entry(id.stream()).or_insert_with(Stream::default);
stream.entry(id.recording()).or_insert_with(Recording::default).file = Some(len);
} }
Ok(files) Ok(dir)
} }
/// Looks through the stream for errors. /// Looks through a known stream for errors.
/// Removes found recordings from the given file map.
fn compare_stream(conn: &rusqlite::Connection, stream_id: i32, opts: &Options, fn compare_stream(conn: &rusqlite::Connection, stream_id: i32, opts: &Options,
files: &mut FnvHashMap<CompositeId, u64>) mut stream: Stream) -> Result<(), Error> {
-> Result<(), Error> { let start = CompositeId::new(stream_id, 0);
// This statement should be a full outer join over the recording and recording_playback tables. let end = CompositeId::new(stream_id, i32::max_value());
// SQLite3 doesn't support that, though, so emulate it with a couple left joins and a union.
const FIELDS: &'static str = r#" // recording row.
recording.composite_id, {
recording.flags, let mut stmt = conn.prepare_cached(r#"
recording.sample_file_bytes, select
recording.duration_90k, composite_id,
recording.video_samples, flags,
recording.video_sync_samples, sample_file_bytes,
recording_playback.composite_id, duration_90k,
recording_playback.video_index video_samples,
"#; video_sync_samples
let mut stmt = conn.prepare_cached(&format!(r#" from
select {} recording
from recording left join recording_playback on where
(recording.composite_id = recording_playback.composite_id) composite_id between ? and ?
where :start <= recording.composite_id and recording.composite_id < :end "#)?;
union all let mut rows = stmt.query(&[&start.0, &end.0])?;
select {} while let Some(row) = rows.next() {
from recording_playback left join recording on let row = row?;
(recording_playback.composite_id = recording.composite_id) let id = CompositeId(row.get_checked(0)?);
where recording.composite_id is null and let s = RecordingSummary {
:start <= recording_playback.composite_id and recording_playback.composite_id < :end flags: row.get_checked(1)?,
"#, FIELDS, FIELDS))?; bytes: row.get_checked::<_, i64>(2)? as u64,
let mut rows = stmt.query_named(&[ duration: row.get_checked(3)?,
(":start", &CompositeId::new(stream_id, 0).0), video_samples: row.get_checked(4)?,
(":end", &CompositeId::new(stream_id + 1, 0).0), video_sync_samples: row.get_checked(5)?,
])?; };
while let Some(row) = rows.next() { stream.entry(id.recording())
let row = row?; .or_insert_with(Recording::default)
let id = row.get_checked::<_, Option<i64>>(0)?.map(|id| CompositeId(id)); .recording_row = Some(s);
let playback_id = row.get_checked::<_, Option<i64>>(6)?.map(|id| CompositeId(id));
let id = match (id, playback_id) {
(Some(id1), Some(_)) => id1,
(Some(id1), None) => {
error!("id {} has recording row but no recording_playback row", id1);
continue;
},
(None, Some(id2)) => {
error!("id {} has recording_playback row but no recording row", id2);
continue;
},
(None, None) => bail!("outer join returned fully empty row"),
};
let row_summary = RecordingSummary {
flags: row.get_checked(1)?,
bytes: row.get_checked::<_, i64>(2)? as u64,
duration: row.get_checked(3)?,
video_samples: row.get_checked(4)?,
video_sync_samples: row.get_checked(5)?,
};
let video_index: Vec<u8> = row.get_checked(7)?;
let index_summary = match summarize_index(&video_index) {
Ok(s) => s,
Err(e) => {
error!("id {} has bad video_index: {}", id, e);
continue;
},
};
if row_summary != index_summary {
error!("id {} row summary {:#?} inconsistent with index {:#?}",
id, row_summary, index_summary);
} }
let len = match files.remove(&id) { }
Some(l) => l,
// recording_playback row.
{
let mut stmt = conn.prepare_cached(r#"
select
composite_id,
video_index
from
recording_playback
where
composite_id between ? and ?
"#)?;
let mut rows = stmt.query(&[&start.0, &end.0])?;
while let Some(row) = rows.next() {
let row = row?;
let id = CompositeId(row.get_checked(0)?);
let video_index: Vec<u8> = row.get_checked(1)?;
let s = match summarize_index(&video_index) {
Ok(s) => s,
Err(e) => {
error!("id {} has bad video_index: {}", id, e);
continue;
},
};
stream.entry(id.recording())
.or_insert_with(Recording::default)
.playback_row = Some(s);
}
}
// recording_integrity row.
{
let mut stmt = conn.prepare_cached(r#"
select
composite_id
from
recording_integrity
where
composite_id between ? and ?
"#)?;
let mut rows = stmt.query(&[&start.0, &end.0])?;
while let Some(row) = rows.next() {
let row = row?;
let id = CompositeId(row.get_checked(0)?);
stream.entry(id.recording())
.or_insert_with(Recording::default)
.integrity_row = true;
}
}
for (&id, recording) in &stream {
let id = CompositeId::new(stream_id, id);
let r = match recording.recording_row {
Some(ref r) => r,
None => { None => {
error!("id {} missing", id); if !recording.garbage_row || recording.playback_row.is_some() ||
recording.integrity_row {
error!("Missing recording row for {}: {:#?}", id, recording);
}
continue; continue;
} },
}; };
if opts.compare_lens && row_summary.bytes != len { match recording.playback_row {
error!("id {} declares length {}, but its sample file has length {}", Some(ref p) => {
id, row_summary.bytes, len); if r != p {
error!("Recording {} summary doesn't match video_index: {:#?}", id, recording);
}
},
None => error!("Recording {} missing playback row: {:#?}", id, recording),
}
match recording.file {
Some(len) => if opts.compare_lens && r.bytes != len {
error!("Recording {} length mismatch: {:#?}", id, recording);
},
None => error!("Recording {} missing file: {:#?}", id, recording),
} }
} }
Ok(())
}
fn compare_dir(conn: &rusqlite::Connection, dir_id: i32,
files: &mut FnvHashMap<CompositeId, u64>) -> Result<(), Error> {
let mut stmt = conn.prepare_cached(
"select composite_id from garbage where sample_file_dir_id = ?")?;
let mut rows = stmt.query(&[&dir_id])?;
while let Some(row) = rows.next() {
let row = row?;
files.remove(&CompositeId(row.get_checked(0)?));
}
for (k, _) in files {
error!("dir {}: Unexpected file {}", dir_id, k);
}
Ok(()) Ok(())
} }

View File

@ -83,22 +83,6 @@ const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
recording.composite_id recording.composite_id
"#; "#;
const INSERT_RECORDING_SQL: &'static str = r#"
insert into recording (composite_id, stream_id, open_id, run_offset, flags,
sample_file_bytes, start_time_90k, duration_90k,
local_time_delta_90k, video_samples, video_sync_samples,
video_sample_entry_id)
values (:composite_id, :stream_id, :open_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :duration_90k,
:local_time_delta_90k, :video_samples, :video_sync_samples,
:video_sample_entry_id)
"#;
const INSERT_RECORDING_PLAYBACK_SQL: &'static str = r#"
insert into recording_playback (composite_id, sample_file_sha1, video_index)
values (:composite_id, :sample_file_sha1, :video_index)
"#;
const STREAM_MIN_START_SQL: &'static str = r#" const STREAM_MIN_START_SQL: &'static str = r#"
select select
start_time_90k start_time_90k
@ -191,8 +175,15 @@ pub(crate) fn get_db_uuid(conn: &rusqlite::Connection) -> Result<Uuid, Error> {
/// Inserts the specified recording (for from `try_flush` only). /// Inserts the specified recording (for from `try_flush` only).
pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: CompositeId, pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: CompositeId,
r: &db::RecordingToInsert) -> Result<(), Error> { r: &db::RecordingToInsert) -> Result<(), Error> {
let mut stmt = tx.prepare_cached(INSERT_RECORDING_SQL) let mut stmt = tx.prepare_cached(r#"
.with_context(|e| format!("can't prepare recording insert: {}", e))?; insert into recording (composite_id, stream_id, open_id, run_offset, flags,
sample_file_bytes, start_time_90k, duration_90k,
video_samples, video_sync_samples, video_sample_entry_id)
values (:composite_id, :stream_id, :open_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :duration_90k,
:video_samples, :video_sync_samples,
:video_sample_entry_id)
"#).with_context(|e| format!("can't prepare recording insert: {}", e))?;
stmt.execute_named(&[ stmt.execute_named(&[
(":composite_id", &id.0), (":composite_id", &id.0),
(":stream_id", &(id.stream() as i64)), (":stream_id", &(id.stream() as i64)),
@ -202,20 +193,35 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
(":sample_file_bytes", &r.sample_file_bytes), (":sample_file_bytes", &r.sample_file_bytes),
(":start_time_90k", &r.start.0), (":start_time_90k", &r.start.0),
(":duration_90k", &r.duration_90k), (":duration_90k", &r.duration_90k),
(":local_time_delta_90k", &r.local_time_delta.0),
(":video_samples", &r.video_samples), (":video_samples", &r.video_samples),
(":video_sync_samples", &r.video_sync_samples), (":video_sync_samples", &r.video_sync_samples),
(":video_sample_entry_id", &r.video_sample_entry_id), (":video_sample_entry_id", &r.video_sample_entry_id),
]).with_context(|e| format!("unable to insert recording for {:#?}: {}", r, e))?; ]).with_context(|e| format!("unable to insert recording for {:#?}: {}", r, e))?;
let mut stmt = tx.prepare_cached(INSERT_RECORDING_PLAYBACK_SQL) let mut stmt = tx.prepare_cached(r#"
.with_context(|e| format!("can't prepare recording_playback insert: {}", e))?; insert into recording_integrity (composite_id, local_time_delta_90k, sample_file_sha1)
values (:composite_id, :local_time_delta_90k, :sample_file_sha1)
"#).with_context(|e| format!("can't prepare recording_integrity insert: {}", e))?;
let sha1 = &r.sample_file_sha1[..]; let sha1 = &r.sample_file_sha1[..];
let delta = match r.run_offset {
0 => None,
_ => Some(r.local_time_delta.0),
};
stmt.execute_named(&[ stmt.execute_named(&[
(":composite_id", &id.0), (":composite_id", &id.0),
(":local_time_delta_90k", &delta),
(":sample_file_sha1", &sha1), (":sample_file_sha1", &sha1),
]).with_context(|e| format!("unable to insert recording_integrity for {:#?}: {}", r, e))?;
let mut stmt = tx.prepare_cached(r#"
insert into recording_playback (composite_id, video_index)
values (:composite_id, :video_index)
"#).with_context(|e| format!("can't prepare recording_playback insert: {}", e))?;
stmt.execute_named(&[
(":composite_id", &id.0),
(":video_index", &r.video_index), (":video_index", &r.video_index),
]).with_context(|e| format!("unable to insert recording_playback for {:#?}: {}", r, e))?; ]).with_context(|e| format!("unable to insert recording_playback for {:#?}: {}", r, e))?;
Ok(()) Ok(())
} }
@ -244,6 +250,12 @@ pub(crate) fn delete_recordings(tx: &rusqlite::Transaction, sample_file_dir_id:
composite_id < :end composite_id < :end
"#)?; "#)?;
let mut del2 = tx.prepare_cached(r#" let mut del2 = tx.prepare_cached(r#"
delete from recording_integrity
where
:start <= composite_id and
composite_id < :end
"#)?;
let mut del3 = tx.prepare_cached(r#"
delete from recording delete from recording
where where
:start <= composite_id and :start <= composite_id and
@ -260,11 +272,15 @@ pub(crate) fn delete_recordings(tx: &rusqlite::Transaction, sample_file_dir_id:
]; ];
let n1 = del1.execute_named(p)?; let n1 = del1.execute_named(p)?;
if n1 != n { if n1 != n {
bail!("inserted {} rows but deleted {} recording rows!", n, n1); bail!("inserted {} garbage rows but deleted {} recording_playback rows!", n, n1);
} }
let n2 = del2.execute_named(p)?; let n2 = del2.execute_named(p)?;
if n2 != n { if n2 > n { // fewer is okay; recording_integrity is optional.
bail!("deleted {} recording rows but {} recording_playback rows!", n, n2); bail!("inserted {} garbage rows but deleted {} recording_integrity rows!", n, n2);
}
let n3 = del3.execute_named(p)?;
if n3 != n {
bail!("deleted {} recording rows but {} recording_playback rows!", n3, n);
} }
Ok(n) Ok(n)
} }

View File

@ -168,13 +168,6 @@ create table recording (
duration_90k integer not null duration_90k integer not null
check (duration_90k >= 0 and duration_90k < 5*60*90000), check (duration_90k >= 0 and duration_90k < 5*60*90000),
-- The number of 90 kHz units the local system time is ahead of the
-- recording; negative numbers indicate the local system time is behind
-- the recording. Large absolute values would indicate that the local time
-- has jumped during recording or that the local time and camera time
-- frequencies do not match.
local_time_delta_90k integer not null,
video_samples integer not null check (video_samples > 0), video_samples integer not null check (video_samples > 0),
video_sync_samples integer not null check (video_sync_samples > 0), video_sync_samples integer not null check (video_sync_samples > 0),
video_sample_entry_id integer references video_sample_entry (id), video_sample_entry_id integer references video_sample_entry (id),
@ -200,20 +193,43 @@ create index recording_cover on recording (
flags flags
); );
-- Large fields for a recording which are not needed when simply listing all -- Fields which are only needed to check/correct database integrity problems
-- of the recordings in a given range. In particular, when serving a byte -- (such as incorrect timestamps).
-- range within a .mp4 file, the recording_playback row is needed for the create table recording_integrity (
-- recording(s) corresponding to that particular byte range, needed, but the -- See description on recording table.
-- recording rows suffice for all other recordings in the .mp4. composite_id integer primary key references recording (composite_id),
-- The number of 90 kHz units the local system's monotonic clock has
-- advanced more than the stated duration of recordings in a run since the
-- first recording ended. Negative numbers indicate the local system time is
-- behind the recording.
--
-- The first recording of a run (that is, one with run_offset=0) has null
-- local_time_delta_90k because errors are assumed to
-- be the result of initial buffering rather than frequency mismatch.
--
-- This value should be near 0 even on long runs in which the camera's clock
-- and local system's clock frequency differ because each recording's delta
-- is used to correct the durations of the next (up to 500 ppm error).
local_time_delta_90k integer,
-- The sha1 hash of the contents of the sample file.
sample_file_sha1 blob check (length(sample_file_sha1) <= 20)
);
-- Large fields for a recording which are needed ony for playback.
-- In particular, when serving a byte range within a .mp4 file, the
-- recording_playback row is needed for the recording(s) corresponding to that
-- particular byte range, needed, but the recording rows suffice for all other
-- recordings in the .mp4.
create table recording_playback ( create table recording_playback (
-- See description on recording table. -- See description on recording table.
composite_id integer primary key references recording (composite_id), composite_id integer primary key references recording (composite_id),
-- The sha1 hash of the contents of the sample file.
sample_file_sha1 blob not null check (length(sample_file_sha1) = 20),
-- See design/schema.md#video_index for a description of this field. -- See design/schema.md#video_index for a description of this field.
video_index blob not null check (length(video_index) > 0) video_index blob not null check (length(video_index) > 0)
-- audio_index could be added here in the future.
); );
-- Files which are to be deleted (may or may not still exist). -- Files which are to be deleted (may or may not still exist).

View File

@ -151,6 +151,12 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
flags flags
); );
create table recording_integrity (
composite_id integer primary key references recording (composite_id),
local_time_delta_90k integer,
sample_file_sha1 blob check (length(sample_file_sha1) <= 20)
);
create table video_sample_entry ( create table video_sample_entry (
id integer primary key, id integer primary key,
sha1 blob unique not null check (length(sha1) = 20), sha1 blob unique not null check (length(sha1) = 20),
@ -225,6 +231,14 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
r.video_sample_entry_id r.video_sample_entry_id
from from
old_recording r cross join open o; old_recording r cross join open o;
insert into recording_integrity
select
r.composite_id,
case when r.run_offset > 0 then local_time_delta_90k else null end,
p.sample_file_sha1
from
old_recording r join recording_playback p on (r.composite_id = p.composite_id);
"#)?; "#)?;
fix_video_sample_entry(tx)?; fix_video_sample_entry(tx)?;

View File

@ -105,13 +105,11 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
alter table recording_playback rename to old_recording_playback; alter table recording_playback rename to old_recording_playback;
create table recording_playback ( create table recording_playback (
composite_id integer primary key references recording (composite_id), composite_id integer primary key references recording (composite_id),
sample_file_sha1 blob not null check (length(sample_file_sha1) = 20),
video_index blob not null check (length(video_index) > 0) video_index blob not null check (length(video_index) > 0)
); );
insert into recording_playback insert into recording_playback
select select
composite_id, composite_id,
sample_file_sha1,
video_index video_index
from from
old_recording_playback; old_recording_playback;