use Blake3 instead of SHA-1 or Blake2b

Benefits:

* Blake3 is faster. This is most noticeable for the hashing of the
  sample file data.
* we no longer need OpenSSL, which helps with shrinking the binary size
  (#70). sha1 basically forced OpenSSL usage; ring deliberately doesn't
  support this old algorithm, and the pure-Rust sha1 crate is painfully
  slow. OpenSSL might still be a better choice than ring/rustls for TLS
  but it's nice to have the option.

For the video sample entries, I decided we don't need to hash at all. I
think the id number is sufficiently stable, and it's okay---perhaps even
desirable---if an existing init segment changes for fixes like e5b83c2.
This commit is contained in:
Scott Lamb 2020-03-20 20:52:30 -07:00
parent e5b83c21e1
commit 00991733f2
18 changed files with 248 additions and 187 deletions

21
Cargo.lock generated
View File

@ -185,6 +185,21 @@ dependencies = [
"constant_time_eq",
]
[[package]]
name = "blake3"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c58fbca3e5be3b7a3c24a5ec6976a6a464490528e8fee92896fe4ee2a40b10fb"
dependencies = [
"arrayref",
"arrayvec 0.5.1",
"cc",
"cfg-if",
"constant_time_eq",
"crypto-mac",
"digest",
]
[[package]]
name = "block-buffer"
version = "0.7.3"
@ -1253,7 +1268,7 @@ name = "moonfire-db"
version = "0.0.1"
dependencies = [
"base64 0.11.0",
"blake2-rfc",
"blake3",
"byteorder",
"cstr",
"failure",
@ -1269,12 +1284,12 @@ dependencies = [
"mylog",
"nix",
"odds",
"openssl",
"parking_lot",
"prettydiff",
"protobuf",
"protobuf-codegen-pure",
"regex",
"ring",
"rusqlite",
"smallvec 1.1.0",
"tempdir",
@ -1298,6 +1313,7 @@ name = "moonfire-nvr"
version = "0.1.0"
dependencies = [
"base64 0.11.0",
"blake3",
"byteorder",
"bytes",
"cstr",
@ -1320,7 +1336,6 @@ dependencies = [
"moonfire-ffmpeg",
"mylog",
"nix",
"openssl",
"parking_lot",
"protobuf",
"reffers",

View File

@ -20,6 +20,7 @@ members = ["base", "db", "ffmpeg"]
[dependencies]
base = { package = "moonfire-base", path = "base" }
base64 = "0.11.0"
blake3 = "0.2.2"
bytes = "0.5.3"
byteorder = "1.0"
cstr = "0.1.7"
@ -41,7 +42,6 @@ memchr = "2.0.2"
memmap = "0.7"
mylog = { git = "https://github.com/scottlamb/mylog" }
nix = "0.16.1"
openssl = "0.10"
parking_lot = { version = "0.9", features = [] }
protobuf = { git = "https://github.com/stepancheg/rust-protobuf" }
reffers = "0.6.0"

View File

@ -14,7 +14,7 @@ path = "lib.rs"
[dependencies]
base = { package = "moonfire-base", path = "../base" }
base64 = "0.11.0"
blake2-rfc = "0.2.18"
blake3 = "0.2.2"
byteorder = "1.0"
cstr = "0.1.7"
failure = "0.1.1"
@ -28,11 +28,11 @@ lru-cache = "0.1"
mylog = { git = "https://github.com/scottlamb/mylog" }
nix = "0.16.1"
odds = { version = "0.3.1", features = ["std-vec"] }
openssl = "0.10"
parking_lot = { version = "0.9", features = [] }
prettydiff = "0.3.1"
protobuf = { git = "https://github.com/stepancheg/rust-protobuf" }
regex = "1.0"
ring = "0.14.6"
rusqlite = "0.21.0"
smallvec = "1.0"
tempdir = "0.3"

View File

@ -30,7 +30,6 @@
use log::info;
use base::strutil;
use blake2_rfc::blake2b::blake2b;
use crate::schema::Permissions;
use failure::{Error, bail, format_err};
use fnv::FnvHashMap;
@ -38,6 +37,7 @@ use lazy_static::lazy_static;
use libpasta;
use parking_lot::Mutex;
use protobuf::Message;
use ring::rand::{SecureRandom, SystemRandom};
use rusqlite::{Connection, Transaction, params};
use std::collections::BTreeMap;
use std::fmt;
@ -204,6 +204,7 @@ pub enum SessionFlags {
#[derive(Copy, Clone)]
pub enum RevocationReason {
LoggedOut = 1,
AlgorithmChange = 2,
}
#[derive(Debug, Default)]
@ -230,9 +231,9 @@ pub struct Session {
impl Session {
pub fn csrf(&self) -> SessionHash {
let r = blake2b(24, b"csrf", &self.seed.0[..]);
let r = blake3::keyed_hash(&self.seed.0, b"csrf");
let mut h = SessionHash([0u8; 24]);
h.0.copy_from_slice(r.as_bytes());
h.0.copy_from_slice(&r.as_bytes()[0..24]);
h
}
}
@ -253,9 +254,9 @@ impl RawSessionId {
}
pub fn hash(&self) -> SessionHash {
let r = blake2b(24, &[], &self.0[..]);
let r = blake3::hash(&self.0[..]);
let mut h = SessionHash([0u8; 24]);
h.0.copy_from_slice(r.as_bytes());
h.0.copy_from_slice(&r.as_bytes()[0..24]);
h
}
}
@ -276,8 +277,8 @@ impl fmt::Debug for RawSessionId {
/// A Blake2b-256 (48 bytes) of data associated with the session.
/// This is currently used in two ways:
/// * the csrf token is a blake2b drived from the session's seed. This is put into the `sc`
/// cookie.
/// * the csrf token is a truncated blake3 derived from the session's seed. This is put into the
/// `sc` cookie.
/// * the 48-byte session id is hashed to be used as a database key.
#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
pub struct SessionHash(pub [u8; 24]);
@ -333,6 +334,8 @@ pub(crate) struct State {
/// evict the oldest when its size exceeds a threshold. Or just evict everything on every flush
/// (and accept more frequent database accesses).
sessions: FnvHashMap<SessionHash, Session>,
rand: SystemRandom,
}
impl State {
@ -341,6 +344,7 @@ impl State {
users_by_id: BTreeMap::new(),
users_by_name: BTreeMap::new(),
sessions: FnvHashMap::default(),
rand: ring::rand::SystemRandom::new(),
};
let mut stmt = conn.prepare(r#"
select
@ -525,7 +529,7 @@ impl State {
u.dirty = true;
}
let password_id = u.password_id;
State::make_session_int(conn, req, u, domain, Some(password_id), session_flags,
State::make_session_int(&self.rand, conn, req, u, domain, Some(password_id), session_flags,
&mut self.sessions, u.permissions.clone())
}
@ -537,19 +541,20 @@ impl State {
if u.disabled() {
bail!("user is disabled");
}
State::make_session_int(conn, creation, u, domain, None, flags, &mut self.sessions,
permissions)
State::make_session_int(&self.rand, conn, creation, u, domain, None, flags,
&mut self.sessions, permissions)
}
fn make_session_int<'s>(conn: &Connection, creation: Request, user: &mut User,
domain: Option<Vec<u8>>, creation_password_id: Option<i32>, flags: i32,
fn make_session_int<'s>(rand: &SystemRandom, conn: &Connection, creation: Request,
user: &mut User, domain: Option<Vec<u8>>,
creation_password_id: Option<i32>, flags: i32,
sessions: &'s mut FnvHashMap<SessionHash, Session>,
permissions: Permissions)
-> Result<(RawSessionId, &'s Session), Error> {
let mut session_id = RawSessionId::new();
::openssl::rand::rand_bytes(&mut session_id.0).unwrap();
rand.fill(&mut session_id.0).unwrap();
let mut seed = [0u8; 32];
::openssl::rand::rand_bytes(&mut seed).unwrap();
rand.fill(&mut seed).unwrap();
let hash = session_id.hash();
let mut stmt = conn.prepare_cached(r#"
insert into user_session (session_id_hash, user_id, seed, flags, domain,

View File

@ -97,9 +97,9 @@ const GET_RECORDING_PLAYBACK_SQL: &'static str = r#"
"#;
const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &'static str = r#"
insert into video_sample_entry (sha1, width, height, pasp_h_spacing, pasp_v_spacing,
insert into video_sample_entry (width, height, pasp_h_spacing, pasp_v_spacing,
rfc6381_codec, data)
values (:sha1, :width, :height, :pasp_h_spacing, :pasp_v_spacing,
values (:width, :height, :pasp_h_spacing, :pasp_v_spacing,
:rfc6381_codec, :data)
"#;
@ -129,7 +129,6 @@ impl rusqlite::types::FromSql for VideoIndex {
#[derive(Debug)]
pub struct VideoSampleEntry {
pub id: i32,
pub sha1: [u8; 20],
// Fields matching VideoSampleEntryToInsert below.
@ -234,7 +233,7 @@ pub struct RecordingToInsert {
pub video_sync_samples: i32,
pub video_sample_entry_id: i32,
pub video_index: Vec<u8>,
pub sample_file_sha1: [u8; 20],
pub sample_file_blake3: Option<[u8; 32]>,
}
impl RecordingToInsert {
@ -1358,7 +1357,6 @@ impl LockedDatabase {
let mut stmt = self.conn.prepare(r#"
select
id,
sha1,
width,
height,
pasp_h_spacing,
@ -1371,23 +1369,16 @@ impl LockedDatabase {
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let id = row.get(0)?;
let mut sha1 = [0u8; 20];
let sha1_vec: Vec<u8> = row.get(1)?;
if sha1_vec.len() != 20 {
bail!("video sample entry id {} has sha1 {} of wrong length", id, sha1_vec.len());
}
sha1.copy_from_slice(&sha1_vec);
let data: Vec<u8> = row.get(7)?;
let data: Vec<u8> = row.get(6)?;
self.video_sample_entries_by_id.insert(id, Arc::new(VideoSampleEntry {
id,
width: row.get::<_, i32>(2)?.try_into()?,
height: row.get::<_, i32>(3)?.try_into()?,
pasp_h_spacing: row.get::<_, i32>(4)?.try_into()?,
pasp_v_spacing: row.get::<_, i32>(5)?.try_into()?,
sha1,
width: row.get::<_, i32>(1)?.try_into()?,
height: row.get::<_, i32>(2)?.try_into()?,
pasp_h_spacing: row.get::<_, i32>(3)?.try_into()?,
pasp_v_spacing: row.get::<_, i32>(4)?.try_into()?,
data,
rfc6381_codec: row.get(6)?,
rfc6381_codec: row.get(5)?,
}));
}
info!("Loaded {} video sample entries",
@ -1532,19 +1523,16 @@ impl LockedDatabase {
/// On success, returns the id of a new or existing row.
pub fn insert_video_sample_entry(&mut self, entry: VideoSampleEntryToInsert)
-> Result<i32, Error> {
let sha1 = crate::sha1(&entry.data)?;
// Check if it already exists.
// There shouldn't be too many entries, so it's fine to enumerate everything.
for (&id, v) in &self.video_sample_entries_by_id {
if v.sha1 == sha1 {
// A hash collision (different data with the same hash) is unlikely.
// The other fields are derived from data, so differences there indicate a bug.
if v.data == entry.data {
// The other fields are derived from data, so differences indicate a bug.
if v.width != entry.width || v.height != entry.height ||
v.pasp_h_spacing != entry.pasp_h_spacing ||
v.pasp_v_spacing != entry.pasp_v_spacing {
bail!("video_sample_entry SHA-1 {} mismatch: existing entry {:?}, new {:?}",
base::strutil::hex(&sha1[..]), v, &entry);
bail!("video_sample_entry id {}: existing entry {:?}, new {:?}", id, v, &entry);
}
return Ok(id);
}
@ -1552,7 +1540,6 @@ impl LockedDatabase {
let mut stmt = self.conn.prepare_cached(INSERT_VIDEO_SAMPLE_ENTRY_SQL)?;
stmt.execute_named(named_params!{
":sha1": &sha1[..],
":width": i32::from(entry.width),
":height": i32::from(entry.height),
":pasp_h_spacing": i32::from(entry.pasp_h_spacing),
@ -1568,7 +1555,6 @@ impl LockedDatabase {
height: entry.height,
pasp_h_spacing: entry.pasp_h_spacing,
pasp_v_spacing: entry.pasp_v_spacing,
sha1,
data: entry.data,
rfc6381_codec: entry.rfc6381_codec,
}));
@ -2356,7 +2342,7 @@ mod tests {
video_sync_samples: 1,
video_sample_entry_id: vse_id,
video_index: [0u8; 100].to_vec(),
sample_file_sha1: [0u8; 20],
sample_file_blake3: None,
};
let id = {
let mut db = db.lock();

View File

@ -51,12 +51,3 @@ pub mod testutil;
pub use crate::db::*;
pub use crate::schema::Permissions;
pub use crate::signal::Signal;
use openssl::hash;
fn sha1(input: &[u8]) -> Result<[u8; 20], failure::Error> {
let sha1 = hash::hash(hash::MessageDigest::sha1(), &input)?;
let mut sha1_bytes = [0u8; 20];
sha1_bytes.copy_from_slice(&sha1);
Ok(sha1_bytes)
}

View File

@ -200,10 +200,10 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
id, r, e))?;
let mut stmt = tx.prepare_cached(r#"
insert into recording_integrity (composite_id, local_time_delta_90k, sample_file_sha1)
values (:composite_id, :local_time_delta_90k, :sample_file_sha1)
insert into recording_integrity (composite_id, local_time_delta_90k, sample_file_blake3)
values (:composite_id, :local_time_delta_90k, :sample_file_blake3)
"#).with_context(|e| format!("can't prepare recording_integrity insert: {}", e))?;
let sha1 = &r.sample_file_sha1[..];
let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]);
let delta = match r.run_offset {
0 => None,
_ => Some(r.local_time_delta.0),
@ -211,7 +211,7 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
stmt.execute_named(named_params!{
":composite_id": id.0,
":local_time_delta_90k": delta,
":sample_file_sha1": sha1,
":sample_file_blake3": blake3,
}).with_context(|e| format!("unable to insert recording_integrity for {:#?}: {}", r, e))?;
let mut stmt = tx.prepare_cached(r#"

View File

@ -258,8 +258,9 @@ create table recording_integrity (
-- TODO: fill this in!
wall_time_delta_90k integer,
-- The sha1 hash of the contents of the sample file.
sample_file_sha1 blob check (length(sample_file_sha1) <= 20)
-- The (possibly truncated) raw blake3 hash of the contents of the sample
-- file.
sample_file_blake3 blob check (length(sample_file_blake3) <= 32)
);
-- Large fields for a recording which are needed ony for playback.
@ -299,11 +300,8 @@ create table garbage (
create table video_sample_entry (
id integer primary key,
-- A SHA-1 hash of |bytes|.
sha1 blob unique not null check (length(sha1) = 20),
-- The width and height in pixels; must match values within
-- |sample_entry_bytes|.
-- `sample_entry_bytes`.
width integer not null check (width > 0),
height integer not null check (height > 0),
@ -354,10 +352,10 @@ create table user (
-- elsewhere), which holds the session id and an encrypted sequence number for
-- replay protection.
create table user_session (
-- The session id is a 48-byte blob. This is the unencoded, unsalted Blake2b-192
-- (24 bytes) of the unencoded session id. Much like `password_hash`, a
-- hash is used here so that a leaked database backup can't be trivially used
-- to steal credentials.
-- The session id is a 48-byte blob. This is the unsalted Blake3 (32 bytes)
-- of the unencoded session id. Much like `password_hash`, a hash is used here
-- so that a leaked database backup can't be trivially used to steal
-- credentials.
session_id_hash blob primary key not null,
user_id integer references user (id) not null,
@ -395,6 +393,7 @@ create table user_session (
-- A value indicating the reason for revocation, with optional additional
-- text detail. Enumeration values:
-- 0: logout link clicked (i.e. from within the session itself)
-- 1: obsoleted by a change in hashing algorithm (eg schema 5->6 upgrade)
--
-- This might be extended for a variety of other reasons:
-- x: user revoked (while authenticated in another way)

View File

@ -220,19 +220,16 @@ mod tests {
"#)?;
upgraded.execute(r#"
insert into video_sample_entry (id, sha1, width, height, data)
values (1, ?, 1920, 1080, ?);
"#, params![&crate::sha1(testutil::TEST_VIDEO_SAMPLE_ENTRY_DATA).unwrap()[..],
testutil::TEST_VIDEO_SAMPLE_ENTRY_DATA])?;
values (1, X'0000000000000000000000000000000000000000', 1920, 1080, ?);
"#, params![testutil::TEST_VIDEO_SAMPLE_ENTRY_DATA])?;
upgraded.execute(r#"
insert into video_sample_entry (id, sha1, width, height, data)
values (2, ?, 320, 240, ?);
"#, params![&crate::sha1(BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY).unwrap()[..],
BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?;
values (2, X'0000000000000000000000000000000000000001', 320, 240, ?);
"#, params![BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?;
upgraded.execute(r#"
insert into video_sample_entry (id, sha1, width, height, data)
values (3, ?, 704, 480, ?);
"#, params![&crate::sha1(GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY).unwrap()[..],
GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?;
values (3, X'0000000000000000000000000000000000000002', 704, 480, ?);
"#, params![GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?;
upgraded.execute_batch(r#"
insert into recording (id, camera_id, sample_file_bytes, start_time_90k, duration_90k,
local_time_delta_90k, video_samples, video_sync_samples,

View File

@ -74,33 +74,42 @@ fn parse(data: &[u8]) -> Result<AvcDecoderConfigurationRecord, Error> {
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
// These create statements match the schema.sql when version 5 was the latest.
tx.execute_batch(r#"
alter table video_sample_entry add column pasp_h_spacing integer not null default 1 check (pasp_h_spacing > 0);
alter table video_sample_entry add column pasp_v_spacing integer not null default 1 check (pasp_v_spacing > 0);
alter table video_sample_entry rename to old_video_sample_entry;
create table video_sample_entry (
id integer primary key,
width integer not null check (width > 0),
height integer not null check (height > 0),
rfc6381_codec text not null,
data blob not null check (length(data) > 86),
pasp_h_spacing integer not null default 1 check (pasp_h_spacing > 0),
pasp_v_spacing integer not null default 1 check (pasp_v_spacing > 0)
);
"#)?;
let mut update = tx.prepare(r#"
update video_sample_entry
set data = :data,
sha1 = :sha1,
pasp_h_spacing = :pasp_h_spacing,
pasp_v_spacing = :pasp_v_spacing
where id = :id
let mut insert = tx.prepare(r#"
insert into video_sample_entry (id, width, height, rfc6381_codec, data,
pasp_h_spacing, pasp_v_spacing)
values (:id, :width, :height, :rfc6381_codec, :data,
:pasp_h_spacing, :pasp_v_spacing)
"#)?;
let mut stmt = tx.prepare(r#"
select
id,
width,
height,
rfc6381_codec,
data
from
video_sample_entry
old_video_sample_entry
"#)?;
let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? {
let id: i32 = row.get(0)?;
let width: u16 = row.get::<_, i32>(1)?.try_into()?;
let height: u16 = row.get::<_, i32>(2)?.try_into()?;
let mut data: Vec<u8> = row.get(3)?;
let rfc6381_codec: &str = row.get_raw_checked(3)?.as_str()?;
let mut data: Vec<u8> = row.get(4)?;
let avcc = parse(&data)?;
if avcc.num_of_sequence_parameter_sets() != 1 {
bail!("Multiple SPSs!");
@ -121,13 +130,85 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
BigEndian::write_u32(&mut data[0..4], u32::try_from(len)?);
}
update.execute_named(named_params!{
insert.execute_named(named_params!{
":id": id,
":width": width,
":height": height,
":rfc6381_codec": rfc6381_codec,
":data": &data,
":sha1": &crate::sha1(&data)?[..],
":pasp_h_spacing": pasp.0,
":pasp_v_spacing": pasp.1,
})?;
}
tx.execute_batch(r#"
alter table recording rename to old_recording;
create table recording (
composite_id integer primary key,
open_id integer not null,
stream_id integer not null references stream (id),
run_offset integer not null,
flags integer not null,
sample_file_bytes integer not null check (sample_file_bytes > 0),
start_time_90k integer not null check (start_time_90k > 0),
duration_90k integer not null
check (duration_90k >= 0 and duration_90k < 5*60*90000),
video_samples integer not null check (video_samples > 0),
video_sync_samples integer not null check (video_sync_samples > 0),
video_sample_entry_id integer references video_sample_entry (id),
check (composite_id >> 32 = stream_id)
);
insert into recording select * from old_recording;
drop index recording_cover;
create index recording_cover on recording (
stream_id,
start_time_90k,
open_id,
duration_90k,
video_samples,
video_sync_samples,
video_sample_entry_id,
sample_file_bytes,
run_offset,
flags
);
alter table recording_integrity rename to old_recording_integrity;
create table recording_integrity (
composite_id integer primary key references recording (composite_id),
local_time_delta_90k integer,
local_time_since_open_90k integer,
wall_time_delta_90k integer,
sample_file_blake3 blob check (length(sample_file_blake3) <= 32)
);
insert into recording_integrity
select
composite_id,
local_time_delta_90k,
local_time_since_open_90k,
wall_time_delta_90k,
null
from
old_recording_integrity;
alter table recording_playback rename to old_recording_playback;
create table recording_playback (
composite_id integer primary key references recording (composite_id),
video_index blob not null check (length(video_index) > 0)
);
insert into recording_playback select * from old_recording_playback;
drop table old_recording_playback;
drop table old_recording_integrity;
drop table old_recording;
drop table old_video_sample_entry;
update user_session
set
revocation_reason = 1,
revocation_reason_detail = 'Blake2b->Blake3 upgrade'
where
revocation_reason is null;
"#)?;
Ok(())
}

View File

@ -40,7 +40,6 @@ use failure::{Error, bail, format_err};
use fnv::FnvHashMap;
use parking_lot::Mutex;
use log::{debug, trace, warn};
use openssl::hash;
use std::cmp::Ordering;
use std::cmp;
use std::io;
@ -563,7 +562,7 @@ struct InnerWriter<F: FileWriter> {
/// segments have been sent out. Initially 0.
completed_live_segment_off_90k: i32,
hasher: hash::Hasher,
hasher: blake3::Hasher,
/// The start time of this segment, based solely on examining the local clock after frames in
/// this segment were received. Frames can suffer from various kinds of delay (initial
@ -688,7 +687,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
e: recording::SampleIndexEncoder::new(),
id,
completed_live_segment_off_90k: 0,
hasher: hash::Hasher::new(hash::MessageDigest::sha1())?,
hasher: blake3::Hasher::new(),
local_start: recording::Time(i64::max_value()),
adjuster: ClockAdjuster::new(prev.map(|p| p.local_time_delta.0)),
unflushed_sample: None,
@ -757,7 +756,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
len: pkt.len() as i32,
is_key,
});
w.hasher.update(pkt).unwrap();
w.hasher.update(pkt);
Ok(())
}
@ -797,8 +796,7 @@ impl<F: FileWriter> InnerWriter<F> {
None => (self.adjuster.adjust(0), db::RecordingFlags::TrailingZero as i32),
Some(p) => (self.adjuster.adjust((p - unflushed.pts_90k) as i32), 0),
};
let mut sha1_bytes = [0u8; 20];
sha1_bytes.copy_from_slice(&self.hasher.finish().unwrap()[..]);
let blake3 = self.hasher.finalize();
let (local_time_delta, run_offset, end);
let d = self.add_sample(last_sample_duration, unflushed.len, unflushed.is_key,
unflushed.local_time)?;
@ -814,7 +812,7 @@ impl<F: FileWriter> InnerWriter<F> {
l.flags = flags;
local_time_delta = self.local_start - l.start;
l.local_time_delta = local_time_delta;
l.sample_file_sha1 = sha1_bytes;
l.sample_file_blake3 = Some(blake3.as_bytes().clone());
total_duration = recording::Duration(l.duration_90k as i64);
run_offset = l.run_offset;
end = l.start + total_duration;

View File

@ -290,16 +290,13 @@ arbitrary order. Each recording object has the following properties:
greater than the requested `endTime90k` if this recording was ongoing at
the requested time.
* `videoSampleEntryId`: a reference to an entry in the `videoSampleEntries`
map. These ids are strings so that they can serve as JSON object keys.
map.mp4` URL.
* `videoSamples`: the number of samples (aka frames) of video in this
recording.
Under the property `videoSampleEntries`, an object mapping ids to objects with
the following properties:
* `sha1`: a SHA-1 hash of the ISO/IEC 14496-12 section 8.5.2
`VisualSampleEntry` bytes. The actual bytes can be retrieved, wrapped into
an initialization segment `.mp4`, at the URL `/api/init/<sha1>.mp4`.
* `width`: the stored width in pixels.
* `height`: the stored height in pixels.
* `pixelHSpacing`: the relative width of a pixel, as in a ISO/IEC 14496-12
@ -307,6 +304,9 @@ the following properties:
* `pixelVSpacing`: the relative height of a pixel, as in a ISO/IEC 14496-12
section 12.1.4.3 `PixelAspectRatioBox`. If absent, assumed to be 1.
The full initialization segment data for a given video sample entry can be
retrieved at the URL `/api/init/<id>.mp4`.
Example request URI (with added whitespace between parameters):
```
@ -335,7 +335,6 @@ Example response:
],
"videoSampleEntries": {
"1": {
"sha1": "81710c9c51a02cc95439caa8dd3bc12b77ffe767",
"width": 1280,
"height": 720
}
@ -468,7 +467,7 @@ Content-Type: video/mp4; codecs="avc1.640028"
X-Recording-Id: 42.5680
X-Recording-Start: 130985461191810
X-Time-Range: 5220058-5400061
X-Video-Sample-Entry-Sha1: 25fad1b92c344dadc0473a783dff957b0d7d56bb
X-Video-Sample-Entry-Id: 4
binary mp4 data
```
@ -478,7 +477,7 @@ Content-Type: video/mp4; codecs="avc1.640028"
X-Recording-Id: 42.5681
X-Recording-Start: 130985461191822
X-Time-Range: 0-180002
X-Video-Sample-Entry-Sha1: 25fad1b92c344dadc0473a783dff957b0d7d56bb
X-Video-Sample-Entry-Id: 4
binary mp4 data
```
@ -488,7 +487,7 @@ Content-Type: video/mp4; codecs="avc1.640028"
X-Recording-Id: 42.5681
X-Recording-Start: 130985461191822
X-Time-Range: 180002-360004
X-Video-Sample-Entry-Sha1: 25fad1b92c344dadc0473a783dff957b0d7d56bb
X-Video-Sample-Entry-Id: 4
binary mp4 data
```
@ -507,13 +506,13 @@ active HTTP/1.1 connections: six in Chrome's case. The WebSocket limit is much
higher (256), allowing browser-side Javascript to stream all active camera
streams simultaneously as well as making other simultaneous HTTP requests.
### `GET /api/init/<sha1>.mp4`
### `GET /api/init/<id>.mp4`
Returns a `.mp4` suitable for use as a [HTML5 Media Source Extensions
initialization segment][init-segment]. The MIME type will be `video/mp4`, with
a `codecs` parameter as specified in [RFC 6381][rfc-6381].
### `GET /api/init/<sha1>.mp4.txt`
### `GET /api/init/<id>.mp4.txt`
Returns a `text/plain` debugging string for the `.mp4` generated by the
same URL minus the `.txt` suffix.

View File

@ -522,7 +522,7 @@ The snippet below is a illustrative excerpt of the SQLite schema; see
camera_id integer references camera (id) not null,
sample_file_uuid blob unique not null,
sample_file_sha1 blob,
sample_file_blake3 blob,
sample_file_size integer,
-- The starting time and duration of the recording, in 90 kHz units since
@ -540,11 +540,10 @@ The snippet below is a illustrative excerpt of the SQLite schema; see
-- A concrete box derived from a ISO/IEC 14496-12 section 8.5.2
-- VisualSampleEntry box. Describes the codec, width, height, etc.
create table visual_sample_entry (
-- A SHA-1 hash of |bytes|.
sha1 blob primary key,
id integerprimary key,
-- The width and height in pixels; must match values within
|sample_entry_bytes|.
-- `sample_entry_bytes`.
width integer,
height integer,

View File

@ -250,6 +250,10 @@ Version 6 adds over version 5:
* metadata about the pixel aspect ratio to properly support
[anamorphic](https://en.wikipedia.org/wiki/Anamorphic_widescreen) "sub"
streams.
* hashes in Blake3 rather than older SHA-1 (for file integrity checksums)
or Blake2b (for sessions).
On upgrading to this version, sessions will be wiped.
Before it is finalized, it likely will also add a schema for [object
detection](https://en.wikipedia.org/wiki/Object_detection).

View File

@ -423,7 +423,6 @@ pub struct Recording {
#[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")]
pub struct VideoSampleEntry {
pub sha1: String,
pub width: u16,
pub height: u16,
pub pasp_h_spacing: u16,
@ -433,7 +432,6 @@ pub struct VideoSampleEntry {
impl VideoSampleEntry {
fn from(e: &db::VideoSampleEntry) -> Self {
Self {
sha1: base::strutil::hex(&e.sha1),
width: e.width,
height: e.height,
pasp_h_spacing: e.pasp_h_spacing,

View File

@ -76,7 +76,7 @@
//! * mdat (media data container)
//! ```
use base::{strutil, Error, ErrorKind, ResultExt, bail_t, format_err_t};
use base::{Error, ErrorKind, ResultExt, bail_t, format_err_t};
use bytes::{Buf, BytesMut};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use crate::body::{Chunk, BoxedError, wrap_error};
@ -89,7 +89,6 @@ use http::header::HeaderValue;
use http_serve;
use log::{debug, error, trace, warn};
use memmap;
use openssl::hash;
use parking_lot::Once;
use reffers::ARefss;
use crate::slices::{self, Slices};
@ -788,20 +787,19 @@ impl FileBuilder {
dirs_by_stream_id: Arc<::fnv::FnvHashMap<i32, Arc<dir::SampleFileDir>>>)
-> Result<File, Error> {
let mut max_end = None;
let mut etag = hash::Hasher::new(hash::MessageDigest::sha1())
.err_kind(ErrorKind::Internal)?;
etag.update(&FORMAT_VERSION[..]).err_kind(ErrorKind::Internal)?;
let mut etag = blake3::Hasher::new();
etag.update(&FORMAT_VERSION[..]);
if self.include_timestamp_subtitle_track {
etag.update(b":ts:").err_kind(ErrorKind::Internal)?;
etag.update(b":ts:");
}
if let Some(cd) = self.content_disposition.as_ref() {
etag.update(b":cd:").err_kind(ErrorKind::Internal)?;
etag.update(cd.as_bytes()).err_kind(ErrorKind::Internal)?;
etag.update(b":cd:");
etag.update(cd.as_bytes());
}
match self.type_ {
Type::Normal => {},
Type::InitSegment => etag.update(b":init:").err_kind(ErrorKind::Internal)?,
Type::MediaSegment => etag.update(b":media:").err_kind(ErrorKind::Internal)?,
Type::InitSegment => { etag.update(b":init:"); },
Type::MediaSegment => { etag.update(b":media:"); },
};
for s in &mut self.segments {
let d = &s.s.desired_range_90k;
@ -830,7 +828,7 @@ impl FileBuilder {
cursor.write_u32::<BigEndian>(s.s.open_id).err_kind(ErrorKind::Internal)?;
cursor.write_i32::<BigEndian>(d.start).err_kind(ErrorKind::Internal)?;
cursor.write_i32::<BigEndian>(d.end).err_kind(ErrorKind::Internal)?;
etag.update(cursor.into_inner()).err_kind(ErrorKind::Internal)?;
etag.update(cursor.into_inner());
}
let max_end = match max_end {
None => 0,
@ -888,7 +886,7 @@ impl FileBuilder {
debug!("slices: {:?}", self.body.slices);
let last_modified = ::std::time::UNIX_EPOCH +
::std::time::Duration::from_secs(max_end as u64);
let etag = etag.finish().err_kind(ErrorKind::Internal)?;
let etag = etag.finalize();
Ok(File(Arc::new(FileInner {
db,
dirs_by_stream_id,
@ -898,7 +896,7 @@ impl FileBuilder {
video_sample_entries: self.video_sample_entries,
initial_sample_byte_pos,
last_modified,
etag: HeaderValue::try_from(format!("\"{}\"", &strutil::hex(&etag)))
etag: HeaderValue::try_from(format!("\"{}\"", etag.to_hex().as_str()))
.expect("hex string should be valid UTF-8"),
content_disposition: self.content_disposition,
})))
@ -1588,7 +1586,7 @@ impl fmt::Debug for File {
/// to verify the output is byte-for-byte as expected.
#[cfg(test)]
mod tests {
use base::{clock::RealClocks, strutil};
use base::clock::RealClocks;
use bytes::Buf;
use byteorder::{BigEndian, ByteOrder};
use crate::stream::{self, Opener, Stream};
@ -1597,7 +1595,6 @@ mod tests {
use db::writer;
use futures::stream::TryStreamExt;
use log::info;
use openssl::hash;
use http_serve::{self, Entity};
use std::fs;
use std::ops::Range;
@ -1620,19 +1617,18 @@ mod tests {
.unwrap();
}
/// Returns the SHA-1 digest of the given `Entity`.
async fn digest<E: http_serve::Entity>(e: &E) -> hash::DigestBytes
/// Returns the Blake3 digest of the given `Entity`.
async fn digest<E: http_serve::Entity>(e: &E) -> blake3::Hash
where E::Error : ::std::fmt::Debug {
Pin::from(e.get_range(0 .. e.len()))
.try_fold(hash::Hasher::new(hash::MessageDigest::sha1()).unwrap(), |mut sha1, chunk| {
.try_fold(blake3::Hasher::new(), |mut hasher, chunk| {
let c: &[u8] = chunk.bytes();
sha1.update(c).unwrap();
futures::future::ok::<_, E::Error>(sha1)
hasher.update(c);
futures::future::ok::<_, E::Error>(hasher)
})
.await
.unwrap()
.finish()
.unwrap()
.finalize()
}
/// Information used within `BoxCursor` to describe a box on the stack.
@ -2193,9 +2189,11 @@ mod tests {
// Test the metadata. This is brittle, which is the point. Any time the digest comparison
// here fails, it can be updated, but the etag must change as well! Otherwise clients may
// combine ranges from the new format with ranges from the old format.
let sha1 = digest(&mp4).await;
assert_eq!("2ea2cb354503b9d50d028af00bddcd23d6651f28", strutil::hex(&sha1[..]));
const EXPECTED_ETAG: &'static str = "\"7b55d0bd4370712bf1a7549f6383ca51b1eb97e9\"";
let hash = digest(&mp4).await;
assert_eq!("e95f2d261cdebac5b9983abeea59e8eb053dc4efac866722544c665d9de7c49d",
hash.to_hex().as_str());
const EXPECTED_ETAG: &'static str =
"\"16d80691792326c314990b15f1f0387e1dd12119614fea3ecaeca88325f6000b\"";
assert_eq!(Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()), mp4.etag());
drop(db.syncer_channel);
db.db.lock().clear_on_flush();
@ -2214,9 +2212,11 @@ mod tests {
// Test the metadata. This is brittle, which is the point. Any time the digest comparison
// here fails, it can be updated, but the etag must change as well! Otherwise clients may
// combine ranges from the new format with ranges from the old format.
let sha1 = digest(&mp4).await;
assert_eq!("ec79a2d2362b3ae9dec18762c78c8c60932b4ff0", strutil::hex(&sha1[..]));
const EXPECTED_ETAG: &'static str = "\"f17085373bbee7d2ffc99046575a1ef28f8134e0\"";
let hash = digest(&mp4).await;
assert_eq!("77e09be8ee5ca353ca56f9a80bb7420680713c80a0831d236fac45a96aa3b3d4",
hash.to_hex().as_str());
const EXPECTED_ETAG: &'static str =
"\"932883a0d7c5e464c9f1b1a62d36db670631eee7c1eefc74deb331c1f623affb\"";
assert_eq!(Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()), mp4.etag());
drop(db.syncer_channel);
db.db.lock().clear_on_flush();
@ -2235,9 +2235,11 @@ mod tests {
// Test the metadata. This is brittle, which is the point. Any time the digest comparison
// here fails, it can be updated, but the etag must change as well! Otherwise clients may
// combine ranges from the new format with ranges from the old format.
let sha1 = digest(&mp4).await;
assert_eq!("26e5989211456a0de493e146e2cda7a89a3b485e", strutil::hex(&sha1[..]));
const EXPECTED_ETAG: &'static str = "\"c48b2819f74b090d89c27fa615ab34e445a4b322\"";
let hash = digest(&mp4).await;
assert_eq!("f9807cfc6b96a399f3a5ad62d090f55a18543a9eeb1f48d59f86564ffd9b1e84",
hash.to_hex().as_str());
const EXPECTED_ETAG: &'static str =
"\"53e9e33e28bafb6af8cee2f8b71d7751874a83a3aa7782396878b3caeacec526\"";
assert_eq!(Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()), mp4.etag());
drop(db.syncer_channel);
db.db.lock().clear_on_flush();
@ -2256,9 +2258,11 @@ mod tests {
// Test the metadata. This is brittle, which is the point. Any time the digest comparison
// here fails, it can be updated, but the etag must change as well! Otherwise clients may
// combine ranges from the new format with ranges from the old format.
let sha1 = digest(&mp4).await;
assert_eq!("d182fb5c9402ec863527b22526e152dccba82c4a", strutil::hex(&sha1[..]));
const EXPECTED_ETAG: &'static str = "\"48da7c8f9c15c318ef91ae00148356b3247b671f\"";
let hash = digest(&mp4).await;
assert_eq!("5211104e1fdfe3bbc0d7d7d479933940305ff7f23201e97308db23a022ee6339",
hash.to_hex().as_str());
const EXPECTED_ETAG: &'static str =
"\"f77e81297b5ca9d1c1dcf0d0f8eebbdea8d41b4c8af1917f9d3fe84de6e68a5b\"";
assert_eq!(Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()), mp4.etag());
drop(db.syncer_channel);
db.db.lock().clear_on_flush();

View File

@ -29,7 +29,7 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::clock::Clocks;
use base::{ErrorKind, bail_t, strutil};
use base::{ErrorKind, bail_t};
use bytes::Bytes;
use crate::body::{Body, BoxedError};
use crate::json;
@ -79,7 +79,7 @@ type BoxedFuture = Box<dyn Future<Output = Result<Response<Body>, BoxedError>> +
enum Path {
TopLevel, // "/api/"
Request, // "/api/request"
InitSegment([u8; 20], bool), // "/api/init/<sha1>.mp4{.txt}"
InitSegment(i32, bool), // "/api/init/<id>.mp4{.txt}"
Camera(Uuid), // "/api/cameras/<uuid>/"
Signals, // "/api/signals"
StreamRecordings(Uuid, db::StreamType), // "/api/cameras/<uuid>/<type>/recordings"
@ -114,11 +114,13 @@ impl Path {
} else {
(false, path)
};
if path.len() != 50 || !path.ends_with(".mp4") {
if !path.ends_with(".mp4") {
return Path::NotFound;
}
if let Ok(sha1) = strutil::dehex(&path.as_bytes()[6..46]) {
return Path::InitSegment(sha1, debug);
let id_start = "/init/".len();
let id_end = path.len() - ".mp4".len();
if let Ok(id) = i32::from_str(&path[id_start .. id_end]) {
return Path::InitSegment(id, debug);
}
return Path::NotFound;
}
@ -377,23 +379,20 @@ impl ServiceInner {
serve_json(req, &out)
}
fn init_segment(&self, sha1: [u8; 20], debug: bool, req: &Request<::hyper::Body>)
fn init_segment(&self, id: i32, debug: bool, req: &Request<::hyper::Body>)
-> ResponseResult {
let mut builder = mp4::FileBuilder::new(mp4::Type::InitSegment);
let db = self.db.lock();
for ent in db.video_sample_entries_by_id().values() {
if ent.sha1 == sha1 {
builder.append_video_sample_entry(ent.clone());
let mp4 = builder.build(self.db.clone(), self.dirs_by_stream_id.clone())
.map_err(from_base_error)?;
if debug {
return Ok(plain_response(StatusCode::OK, format!("{:#?}", mp4)));
} else {
return Ok(http_serve::serve(mp4, req));
}
}
let ent = db.video_sample_entries_by_id().get(&id)
.ok_or_else(|| not_found("not such init segment"))?;
builder.append_video_sample_entry(ent.clone());
let mp4 = builder.build(self.db.clone(), self.dirs_by_stream_id.clone())
.map_err(from_base_error)?;
if debug {
Ok(plain_response(StatusCode::OK, format!("{:#?}", mp4)))
} else {
Ok(http_serve::serve(mp4, req))
}
Err(not_found("no such init segment"))
}
fn stream_view_mp4(&self, req: &Request<::hyper::Body>, caller: Caller, uuid: Uuid,
@ -959,9 +958,7 @@ impl Service {
let mut rows = 0;
db.list_recordings_by_id(stream_id, live.recording .. live.recording+1, &mut |r| {
rows += 1;
let vse = db.video_sample_entries_by_id().get(&r.video_sample_entry_id)
.unwrap();
vse_id = Some(strutil::hex(&vse.sha1));
vse_id = Some(r.video_sample_entry_id);
start = Some(r.start);
builder.append(&db, r, live.off_90k.clone())?;
Ok(())
@ -982,14 +979,14 @@ impl Service {
X-Recording-Start: {}\r\n\
X-Recording-Id: {}.{}\r\n\
X-Time-Range: {}-{}\r\n\
X-Video-Sample-Entry-Sha1: {}\r\n\r\n",
X-Video-Sample-Entry-Id: {}\r\n\r\n",
mime_type.to_str().unwrap(),
start.0,
open_id,
live.recording,
live.off_90k.start,
live.off_90k.end,
&vse_id);
vse_id);
let mut v = /*Pin::from(*/hdr.into_bytes()/*)*/;
mp4.append_into_vec(&mut v).await?;
//let v = Pin::into_inner();
@ -1181,18 +1178,9 @@ mod tests {
let cam_uuid = Uuid::parse_str("35144640-ff1e-4619-b0d5-4c74c185741c").unwrap();
assert_eq!(Path::decode("/foo"), Path::Static);
assert_eq!(Path::decode("/api/"), Path::TopLevel);
assert_eq!(Path::decode("/api/init/07cec464126825088ea86a07eddd6a00afa71559.mp4"),
Path::InitSegment([0x07, 0xce, 0xc4, 0x64, 0x12, 0x68, 0x25, 0x08, 0x8e, 0xa8,
0x6a, 0x07, 0xed, 0xdd, 0x6a, 0x00, 0xaf, 0xa7, 0x15, 0x59],
false));
assert_eq!(Path::decode("/api/init/07cec464126825088ea86a07eddd6a00afa71559.mp4.txt"),
Path::InitSegment([0x07, 0xce, 0xc4, 0x64, 0x12, 0x68, 0x25, 0x08, 0x8e, 0xa8,
0x6a, 0x07, 0xed, 0xdd, 0x6a, 0x00, 0xaf, 0xa7, 0x15, 0x59],
true));
assert_eq!(Path::decode("/api/init/000000000000000000000000000000000000000x.mp4"),
Path::NotFound); // non-hexadigit
assert_eq!(Path::decode("/api/init/000000000000000000000000000000000000000.mp4"),
Path::NotFound); // too short
assert_eq!(Path::decode("/api/init/42.mp4"), Path::InitSegment(42, false));
assert_eq!(Path::decode("/api/init/42.mp4.txt"), Path::InitSegment(42, true));
assert_eq!(Path::decode("/api/init/x.mp4"), Path::NotFound); // non-digit
assert_eq!(Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/"),
Path::Camera(cam_uuid));
assert_eq!(Path::decode("/api/cameras/asdf/"), Path::NotFound);

View File

@ -71,9 +71,6 @@ export default class Recording {
/** @const {!number} */
this.videoSamples = recordingJson.videoSamples;
/** @const {!string} */
this.videoSampleEntrySha1 = videoSampleEntryJson.sha1;
/** @const {!number} */
this.videoSampleEntryWidth = videoSampleEntryJson.width;