rustfmt everything

I want to make the project more accessible by not expecting folks to
match my idiosyncratic style. Now almost [1] everything is written
in the "standard" style. CI enforces this.

[1] "Almost": I used #[rustfmt::skip] in a few sections where I felt
aligning things in columns significantly improves readability.
This commit is contained in:
Scott Lamb 2021-02-16 22:15:54 -08:00
parent 64f8d38e01
commit 97678f42e4
47 changed files with 6541 additions and 3453 deletions

View File

@ -17,6 +17,8 @@ jobs:
include: include:
- rust: nightly - rust: nightly
extra_args: "--features nightly --benches" extra_args: "--features nightly --benches"
- rust: stable
extra_components: rustfmt
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- name: Checkout - name: Checkout
@ -37,8 +39,12 @@ jobs:
profile: minimal profile: minimal
toolchain: ${{ matrix.rust }} toolchain: ${{ matrix.rust }}
override: true override: true
components: ${{ matrix.extra_components }}
- name: Test - name: Test
run: cd server && cargo test ${{ matrix.extra_args }} --all run: cd server && cargo test ${{ matrix.extra_args }} --all
- name: Check formatting
if: matrix.rust == 'stable'
run: cd server && cargo fmt --all -- --check
js: js:
name: Build and lint Javascript frontend name: Build and lint Javascript frontend
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04

View File

@ -35,13 +35,13 @@ use libc;
use log::warn; use log::warn;
use parking_lot::Mutex; use parking_lot::Mutex;
use std::mem; use std::mem;
use std::sync::{Arc, mpsc}; use std::sync::{mpsc, Arc};
use std::thread; use std::thread;
use std::time::Duration as StdDuration; use std::time::Duration as StdDuration;
use time::{Duration, Timespec}; use time::{Duration, Timespec};
/// Abstract interface to the system clocks. This is for testability. /// Abstract interface to the system clocks. This is for testability.
pub trait Clocks : Send + Sync + 'static { pub trait Clocks: Send + Sync + 'static {
/// Gets the current time from `CLOCK_REALTIME`. /// Gets the current time from `CLOCK_REALTIME`.
fn realtime(&self) -> Timespec; fn realtime(&self) -> Timespec;
@ -52,19 +52,29 @@ pub trait Clocks : Send + Sync + 'static {
fn sleep(&self, how_long: Duration); fn sleep(&self, how_long: Duration);
/// Calls `rcv.recv_timeout` or substitutes a test implementation. /// Calls `rcv.recv_timeout` or substitutes a test implementation.
fn recv_timeout<T>(&self, rcv: &mpsc::Receiver<T>, fn recv_timeout<T>(
timeout: StdDuration) -> Result<T, mpsc::RecvTimeoutError>; &self,
rcv: &mpsc::Receiver<T>,
timeout: StdDuration,
) -> Result<T, mpsc::RecvTimeoutError>;
} }
pub fn retry_forever<C, T, E>(clocks: &C, f: &mut dyn FnMut() -> Result<T, E>) -> T pub fn retry_forever<C, T, E>(clocks: &C, f: &mut dyn FnMut() -> Result<T, E>) -> T
where C: Clocks, E: Into<Error> { where
C: Clocks,
E: Into<Error>,
{
loop { loop {
let e = match f() { let e = match f() {
Ok(t) => return t, Ok(t) => return t,
Err(e) => e.into(), Err(e) => e.into(),
}; };
let sleep_time = Duration::seconds(1); let sleep_time = Duration::seconds(1);
warn!("sleeping for {:?} after error: {}", sleep_time, crate::error::prettify_failure(&e)); warn!(
"sleeping for {:?} after error: {}",
sleep_time,
crate::error::prettify_failure(&e)
);
clocks.sleep(sleep_time); clocks.sleep(sleep_time);
} }
} }
@ -84,8 +94,12 @@ impl RealClocks {
} }
impl Clocks for RealClocks { impl Clocks for RealClocks {
fn realtime(&self) -> Timespec { self.get(libc::CLOCK_REALTIME) } fn realtime(&self) -> Timespec {
fn monotonic(&self) -> Timespec { self.get(libc::CLOCK_MONOTONIC) } self.get(libc::CLOCK_REALTIME)
}
fn monotonic(&self) -> Timespec {
self.get(libc::CLOCK_MONOTONIC)
}
fn sleep(&self, how_long: Duration) { fn sleep(&self, how_long: Duration) {
match how_long.to_std() { match how_long.to_std() {
@ -94,8 +108,11 @@ impl Clocks for RealClocks {
}; };
} }
fn recv_timeout<T>(&self, rcv: &mpsc::Receiver<T>, fn recv_timeout<T>(
timeout: StdDuration) -> Result<T, mpsc::RecvTimeoutError> { &self,
rcv: &mpsc::Receiver<T>,
timeout: StdDuration,
) -> Result<T, mpsc::RecvTimeoutError> {
rcv.recv_timeout(timeout) rcv.recv_timeout(timeout)
} }
} }
@ -119,7 +136,11 @@ impl<'a, C: Clocks + ?Sized, S: AsRef<str>, F: FnOnce() -> S + 'a> TimerGuard<'a
} }
impl<'a, C, S, F> Drop for TimerGuard<'a, C, S, F> impl<'a, C, S, F> Drop for TimerGuard<'a, C, S, F>
where C: Clocks + ?Sized, S: AsRef<str>, F: FnOnce() -> S + 'a { where
C: Clocks + ?Sized,
S: AsRef<str>,
F: FnOnce() -> S + 'a,
{
fn drop(&mut self) { fn drop(&mut self) {
let elapsed = self.clocks.monotonic() - self.start; let elapsed = self.clocks.monotonic() - self.start;
if elapsed.num_seconds() >= 1 { if elapsed.num_seconds() >= 1 {
@ -148,8 +169,12 @@ impl SimulatedClocks {
} }
impl Clocks for SimulatedClocks { impl Clocks for SimulatedClocks {
fn realtime(&self) -> Timespec { self.0.boot + *self.0.uptime.lock() } fn realtime(&self) -> Timespec {
fn monotonic(&self) -> Timespec { Timespec::new(0, 0) + *self.0.uptime.lock() } self.0.boot + *self.0.uptime.lock()
}
fn monotonic(&self) -> Timespec {
Timespec::new(0, 0) + *self.0.uptime.lock()
}
/// Advances the clock by the specified amount without actually sleeping. /// Advances the clock by the specified amount without actually sleeping.
fn sleep(&self, how_long: Duration) { fn sleep(&self, how_long: Duration) {
@ -158,8 +183,11 @@ impl Clocks for SimulatedClocks {
} }
/// Advances the clock by the specified amount if data is not immediately available. /// Advances the clock by the specified amount if data is not immediately available.
fn recv_timeout<T>(&self, rcv: &mpsc::Receiver<T>, fn recv_timeout<T>(
timeout: StdDuration) -> Result<T, mpsc::RecvTimeoutError> { &self,
rcv: &mpsc::Receiver<T>,
timeout: StdDuration,
) -> Result<T, mpsc::RecvTimeoutError> {
let r = rcv.recv_timeout(StdDuration::new(0, 0)); let r = rcv.recv_timeout(StdDuration::new(0, 0));
if let Err(_) = r { if let Err(_) = r {
self.sleep(Duration::from_std(timeout).unwrap()); self.sleep(Duration::from_std(timeout).unwrap());

View File

@ -38,8 +38,11 @@ pub fn prettify_failure(e: &failure::Error) -> String {
write!(&mut msg, "\ncaused by: {}", cause).unwrap(); write!(&mut msg, "\ncaused by: {}", cause).unwrap();
} }
if e.backtrace().is_empty() { if e.backtrace().is_empty() {
write!(&mut msg, "\n\n(set environment variable RUST_BACKTRACE=1 to see backtraces)") write!(
.unwrap(); &mut msg,
"\n\n(set environment variable RUST_BACKTRACE=1 to see backtraces)"
)
.unwrap();
} else { } else {
write!(&mut msg, "\n\nBacktrace:\n{}", e.backtrace()).unwrap(); write!(&mut msg, "\n\nBacktrace:\n{}", e.backtrace()).unwrap();
} }
@ -73,7 +76,9 @@ impl Fail for Error {
impl From<ErrorKind> for Error { impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Error { fn from(kind: ErrorKind) -> Error {
Error { inner: Context::new(kind) } Error {
inner: Context::new(kind),
}
} }
} }
@ -112,6 +117,8 @@ impl fmt::Display for Error {
/// which is a nice general-purpose classification of errors. See that link for descriptions of /// which is a nice general-purpose classification of errors. See that link for descriptions of
/// each error. /// each error.
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail)] #[derive(Copy, Clone, Eq, PartialEq, Debug, Fail)]
#[non_exhaustive]
#[rustfmt::skip]
pub enum ErrorKind { pub enum ErrorKind {
#[fail(display = "Cancelled")] Cancelled, #[fail(display = "Cancelled")] Cancelled,
#[fail(display = "Unknown")] Unknown, #[fail(display = "Unknown")] Unknown,
@ -129,7 +136,6 @@ pub enum ErrorKind {
#[fail(display = "Internal")] Internal, #[fail(display = "Internal")] Internal,
#[fail(display = "Unavailable")] Unavailable, #[fail(display = "Unavailable")] Unavailable,
#[fail(display = "Data loss")] DataLoss, #[fail(display = "Data loss")] DataLoss,
#[doc(hidden)] #[fail(display = "__Nonexhaustive")] __Nonexhaustive,
} }
/// Extension methods for `Result`. /// Extension methods for `Result`.
@ -146,7 +152,10 @@ pub trait ResultExt<T, E> {
fn err_kind(self, k: ErrorKind) -> Result<T, Error>; fn err_kind(self, k: ErrorKind) -> Result<T, Error>;
} }
impl<T, E> ResultExt<T, E> for Result<T, E> where E: Into<failure::Error> { impl<T, E> ResultExt<T, E> for Result<T, E>
where
E: Into<failure::Error>,
{
fn err_kind(self, k: ErrorKind) -> Result<T, Error> { fn err_kind(self, k: ErrorKind) -> Result<T, Error> {
self.map_err(|e| e.into().context(k).into()) self.map_err(|e| e.into().context(k).into())
} }

View File

@ -29,8 +29,8 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
pub mod clock; pub mod clock;
pub mod time;
mod error; mod error;
pub mod strutil; pub mod strutil;
pub mod time;
pub use crate::error::{Error, ErrorKind, ResultExt, prettify_failure}; pub use crate::error::{prettify_failure, Error, ErrorKind, ResultExt};

View File

@ -28,12 +28,12 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use nom::IResult;
use nom::branch::alt; use nom::branch::alt;
use nom::bytes::complete::{tag, take_while1}; use nom::bytes::complete::{tag, take_while1};
use nom::character::complete::space0; use nom::character::complete::space0;
use nom::combinator::{map, map_res, opt}; use nom::combinator::{map, map_res, opt};
use nom::sequence::{delimited, tuple}; use nom::sequence::{delimited, tuple};
use nom::IResult;
use std::fmt::Write as _; use std::fmt::Write as _;
static MULTIPLIERS: [(char, u64); 4] = [ static MULTIPLIERS: [(char, u64); 4] = [
@ -48,7 +48,7 @@ static MULTIPLIERS: [(char, u64); 4] = [
pub fn encode_size(mut raw: i64) -> String { pub fn encode_size(mut raw: i64) -> String {
let mut encoded = String::new(); let mut encoded = String::new();
for &(c, n) in &MULTIPLIERS { for &(c, n) in &MULTIPLIERS {
if raw >= 1i64<<n { if raw >= 1i64 << n {
write!(&mut encoded, "{}{} ", raw >> n, c).unwrap(); write!(&mut encoded, "{}{} ", raw >> n, c).unwrap();
raw &= (1i64 << n) - 1; raw &= (1i64 << n) - 1;
} }
@ -56,7 +56,7 @@ pub fn encode_size(mut raw: i64) -> String {
if raw > 0 || encoded.len() == 0 { if raw > 0 || encoded.len() == 0 {
write!(&mut encoded, "{}", raw).unwrap(); write!(&mut encoded, "{}", raw).unwrap();
} else { } else {
encoded.pop(); // remove trailing space. encoded.pop(); // remove trailing space.
} }
encoded encoded
} }
@ -64,24 +64,24 @@ pub fn encode_size(mut raw: i64) -> String {
fn decode_sizepart(input: &str) -> IResult<&str, i64> { fn decode_sizepart(input: &str) -> IResult<&str, i64> {
map( map(
tuple(( tuple((
map_res(take_while1(|c: char| c.is_ascii_digit()), map_res(take_while1(|c: char| c.is_ascii_digit()), |input: &str| {
|input: &str| i64::from_str_radix(input, 10)), i64::from_str_radix(input, 10)
}),
opt(alt(( opt(alt((
nom::combinator::value(1<<40, tag("T")), nom::combinator::value(1 << 40, tag("T")),
nom::combinator::value(1<<30, tag("G")), nom::combinator::value(1 << 30, tag("G")),
nom::combinator::value(1<<20, tag("M")), nom::combinator::value(1 << 20, tag("M")),
nom::combinator::value(1<<10, tag("K")) nom::combinator::value(1 << 10, tag("K")),
))) ))),
)), )),
|(n, opt_unit)| n * opt_unit.unwrap_or(1) |(n, opt_unit)| n * opt_unit.unwrap_or(1),
)(input) )(input)
} }
fn decode_size_internal(input: &str) -> IResult<&str, i64> { fn decode_size_internal(input: &str) -> IResult<&str, i64> {
nom::multi::fold_many1( nom::multi::fold_many1(delimited(space0, decode_sizepart, space0), 0, |sum, i| {
delimited(space0, decode_sizepart, space0), sum + i
0, })(input)
|sum, i| sum + i)(input)
} }
/// Decodes a human-readable size as output by encode_size. /// Decodes a human-readable size as output by encode_size.
@ -95,12 +95,15 @@ pub fn decode_size(encoded: &str) -> Result<i64, ()> {
/// Returns a hex-encoded version of the input. /// Returns a hex-encoded version of the input.
pub fn hex(raw: &[u8]) -> String { pub fn hex(raw: &[u8]) -> String {
const HEX_CHARS: [u8; 16] = [b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', #[rustfmt::skip]
b'8', b'9', b'a', b'b', b'c', b'd', b'e', b'f']; const HEX_CHARS: [u8; 16] = [
b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7',
b'8', b'9', b'a', b'b', b'c', b'd', b'e', b'f',
];
let mut hex = Vec::with_capacity(2 * raw.len()); let mut hex = Vec::with_capacity(2 * raw.len());
for b in raw { for b in raw {
hex.push(HEX_CHARS[((b & 0xf0) >> 4) as usize]); hex.push(HEX_CHARS[((b & 0xf0) >> 4) as usize]);
hex.push(HEX_CHARS[( b & 0x0f ) as usize]); hex.push(HEX_CHARS[(b & 0x0f) as usize]);
} }
unsafe { String::from_utf8_unchecked(hex) } unsafe { String::from_utf8_unchecked(hex) }
} }
@ -108,8 +111,8 @@ pub fn hex(raw: &[u8]) -> String {
/// Returns [0, 16) or error. /// Returns [0, 16) or error.
fn dehex_byte(hex_byte: u8) -> Result<u8, ()> { fn dehex_byte(hex_byte: u8) -> Result<u8, ()> {
match hex_byte { match hex_byte {
b'0' ..= b'9' => Ok(hex_byte - b'0'), b'0'..=b'9' => Ok(hex_byte - b'0'),
b'a' ..= b'f' => Ok(hex_byte - b'a' + 10), b'a'..=b'f' => Ok(hex_byte - b'a' + 10),
_ => Err(()), _ => Err(()),
} }
} }
@ -122,7 +125,7 @@ pub fn dehex(hexed: &[u8]) -> Result<[u8; 20], ()> {
} }
let mut out = [0; 20]; let mut out = [0; 20];
for i in 0..20 { for i in 0..20 {
out[i] = (dehex_byte(hexed[i<<1])? << 4) + dehex_byte(hexed[(i<<1) + 1])?; out[i] = (dehex_byte(hexed[i << 1])? << 4) + dehex_byte(hexed[(i << 1) + 1])?;
} }
Ok(out) Ok(out)
} }

View File

@ -30,13 +30,13 @@
//! Time and durations for Moonfire NVR's internal format. //! Time and durations for Moonfire NVR's internal format.
use failure::{Error, bail, format_err}; use failure::{bail, format_err, Error};
use nom::branch::alt; use nom::branch::alt;
use nom::bytes::complete::{tag, take_while_m_n}; use nom::bytes::complete::{tag, take_while_m_n};
use nom::combinator::{map, map_res, opt}; use nom::combinator::{map, map_res, opt};
use nom::sequence::{preceded, tuple}; use nom::sequence::{preceded, tuple};
use std::ops;
use std::fmt; use std::fmt;
use std::ops;
use std::str::FromStr; use std::str::FromStr;
use time; use time;
@ -50,8 +50,10 @@ pub struct Time(pub i64);
/// Returns a parser for a `len`-digit non-negative number which fits into an i32. /// Returns a parser for a `len`-digit non-negative number which fits into an i32.
fn fixed_len_num<'a>(len: usize) -> impl FnMut(&'a str) -> IResult<&'a str, i32> { fn fixed_len_num<'a>(len: usize) -> impl FnMut(&'a str) -> IResult<&'a str, i32> {
map_res(take_while_m_n(len, len, |c: char| c.is_ascii_digit()), map_res(
|input: &str| i32::from_str_radix(input, 10)) take_while_m_n(len, len, |c: char| c.is_ascii_digit()),
|input: &str| i32::from_str_radix(input, 10),
)
} }
/// Parses `YYYY-mm-dd` into pieces. /// Parses `YYYY-mm-dd` into pieces.
@ -59,7 +61,7 @@ fn parse_datepart(input: &str) -> IResult<&str, (i32, i32, i32)> {
tuple(( tuple((
fixed_len_num(4), fixed_len_num(4),
preceded(tag("-"), fixed_len_num(2)), preceded(tag("-"), fixed_len_num(2)),
preceded(tag("-"), fixed_len_num(2)) preceded(tag("-"), fixed_len_num(2)),
))(input) ))(input)
} }
@ -67,9 +69,9 @@ fn parse_datepart(input: &str) -> IResult<&str, (i32, i32, i32)> {
fn parse_timepart(input: &str) -> IResult<&str, (i32, i32, i32, i32)> { fn parse_timepart(input: &str) -> IResult<&str, (i32, i32, i32, i32)> {
let (input, (hr, _, min)) = tuple((fixed_len_num(2), tag(":"), fixed_len_num(2)))(input)?; let (input, (hr, _, min)) = tuple((fixed_len_num(2), tag(":"), fixed_len_num(2)))(input)?;
let (input, stuff) = opt(tuple(( let (input, stuff) = opt(tuple((
preceded(tag(":"), fixed_len_num(2)), preceded(tag(":"), fixed_len_num(2)),
opt(preceded(tag(":"), fixed_len_num(5))) opt(preceded(tag(":"), fixed_len_num(5))),
)))(input)?; )))(input)?;
let (sec, opt_subsec) = stuff.unwrap_or((0, None)); let (sec, opt_subsec) = stuff.unwrap_or((0, None));
Ok((input, (hr, min, sec, opt_subsec.unwrap_or(0)))) Ok((input, (hr, min, sec, opt_subsec.unwrap_or(0))))
} }
@ -77,18 +79,23 @@ fn parse_timepart(input: &str) -> IResult<&str, (i32, i32, i32, i32)> {
/// Parses `Z` (UTC) or `{+,-,}HH:MM` into a time zone offset in seconds. /// Parses `Z` (UTC) or `{+,-,}HH:MM` into a time zone offset in seconds.
fn parse_zone(input: &str) -> IResult<&str, i32> { fn parse_zone(input: &str) -> IResult<&str, i32> {
alt(( alt((
nom::combinator::value(0, tag("Z")), nom::combinator::value(0, tag("Z")),
map( map(
tuple(( tuple((
opt(nom::character::complete::one_of(&b"+-"[..])), opt(nom::character::complete::one_of(&b"+-"[..])),
fixed_len_num(2), fixed_len_num(2),
tag(":"), tag(":"),
fixed_len_num(2) fixed_len_num(2),
)), )),
|(sign, hr, _, min)| { |(sign, hr, _, min)| {
let off = hr * 3600 + min * 60; let off = hr * 3600 + min * 60;
if sign == Some('-') { off } else { -off } if sign == Some('-') {
}) off
} else {
-off
}
},
),
))(input) ))(input)
} }
@ -97,8 +104,12 @@ impl Time {
Time(tm.sec * TIME_UNITS_PER_SEC + tm.nsec as i64 * TIME_UNITS_PER_SEC / 1_000_000_000) Time(tm.sec * TIME_UNITS_PER_SEC + tm.nsec as i64 * TIME_UNITS_PER_SEC / 1_000_000_000)
} }
pub const fn min_value() -> Self { Time(i64::min_value()) } pub const fn min_value() -> Self {
pub const fn max_value() -> Self { Time(i64::max_value()) } Time(i64::min_value())
}
pub const fn max_value() -> Self {
Time(i64::max_value())
}
/// Parses a time as either 90,000ths of a second since epoch or a RFC 3339-like string. /// Parses a time as either 90,000ths of a second since epoch or a RFC 3339-like string.
/// ///
@ -112,20 +123,21 @@ impl Time {
// First try parsing as 90,000ths of a second since epoch. // First try parsing as 90,000ths of a second since epoch.
match i64::from_str(input) { match i64::from_str(input) {
Ok(i) => return Ok(Time(i)), Ok(i) => return Ok(Time(i)),
Err(_) => {}, Err(_) => {}
} }
// If that failed, parse as a time string or bust. // If that failed, parse as a time string or bust.
let (remaining, ((tm_year, tm_mon, tm_mday), opt_time, opt_zone)) = let (remaining, ((tm_year, tm_mon, tm_mday), opt_time, opt_zone)) = tuple((
tuple((parse_datepart, parse_datepart,
opt(preceded(tag("T"), parse_timepart)), opt(preceded(tag("T"), parse_timepart)),
opt(parse_zone)))(input) opt(parse_zone),
.map_err(|e| match e { ))(input)
nom::Err::Incomplete(_) => format_err!("incomplete"), .map_err(|e| match e {
nom::Err::Error(e) | nom::Err::Failure(e) => { nom::Err::Incomplete(_) => format_err!("incomplete"),
format_err!("{}", nom::error::convert_error(input, e)) nom::Err::Error(e) | nom::Err::Failure(e) => {
} format_err!("{}", nom::error::convert_error(input, e))
})?; }
})?;
if remaining != "" { if remaining != "" {
bail!("unexpected suffix {:?} following time string", remaining); bail!("unexpected suffix {:?} following time string", remaining);
} }
@ -166,32 +178,44 @@ impl Time {
} }
/// Convert to unix seconds by floor method (rounding down). /// Convert to unix seconds by floor method (rounding down).
pub fn unix_seconds(&self) -> i64 { self.0 / TIME_UNITS_PER_SEC } pub fn unix_seconds(&self) -> i64 {
self.0 / TIME_UNITS_PER_SEC
}
} }
impl std::str::FromStr for Time { impl std::str::FromStr for Time {
type Err = Error; type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> { Self::parse(s) } fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::parse(s)
}
} }
impl ops::Sub for Time { impl ops::Sub for Time {
type Output = Duration; type Output = Duration;
fn sub(self, rhs: Time) -> Duration { Duration(self.0 - rhs.0) } fn sub(self, rhs: Time) -> Duration {
Duration(self.0 - rhs.0)
}
} }
impl ops::AddAssign<Duration> for Time { impl ops::AddAssign<Duration> for Time {
fn add_assign(&mut self, rhs: Duration) { self.0 += rhs.0 } fn add_assign(&mut self, rhs: Duration) {
self.0 += rhs.0
}
} }
impl ops::Add<Duration> for Time { impl ops::Add<Duration> for Time {
type Output = Time; type Output = Time;
fn add(self, rhs: Duration) -> Time { Time(self.0 + rhs.0) } fn add(self, rhs: Duration) -> Time {
Time(self.0 + rhs.0)
}
} }
impl ops::Sub<Duration> for Time { impl ops::Sub<Duration> for Time {
type Output = Time; type Output = Time;
fn sub(self, rhs: Duration) -> Time { Time(self.0 - rhs.0) } fn sub(self, rhs: Duration) -> Time {
Time(self.0 - rhs.0)
}
} }
impl fmt::Debug for Time { impl fmt::Debug for Time {
@ -203,11 +227,20 @@ impl fmt::Debug for Time {
impl fmt::Display for Time { impl fmt::Display for Time {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let tm = time::at(time::Timespec{sec: self.0 / TIME_UNITS_PER_SEC, nsec: 0}); let tm = time::at(time::Timespec {
sec: self.0 / TIME_UNITS_PER_SEC,
nsec: 0,
});
let zone_minutes = tm.tm_utcoff.abs() / 60; let zone_minutes = tm.tm_utcoff.abs() / 60;
write!(f, "{}:{:05}{}{:02}:{:02}", tm.strftime("%FT%T").or_else(|_| Err(fmt::Error))?, write!(
self.0 % TIME_UNITS_PER_SEC, f,
if tm.tm_utcoff > 0 { '+' } else { '-' }, zone_minutes / 60, zone_minutes % 60) "{}:{:05}{}{:02}:{:02}",
tm.strftime("%FT%T").or_else(|_| Err(fmt::Error))?,
self.0 % TIME_UNITS_PER_SEC,
if tm.tm_utcoff > 0 { '+' } else { '-' },
zone_minutes / 60,
zone_minutes % 60
)
} }
} }
@ -242,18 +275,33 @@ impl fmt::Display for Duration {
false false
}; };
if hours > 0 { if hours > 0 {
write!(f, "{}{} hour{}", if have_written { " " } else { "" }, write!(
hours, if hours == 1 { "" } else { "s" })?; f,
"{}{} hour{}",
if have_written { " " } else { "" },
hours,
if hours == 1 { "" } else { "s" }
)?;
have_written = true; have_written = true;
} }
if minutes > 0 { if minutes > 0 {
write!(f, "{}{} minute{}", if have_written { " " } else { "" }, write!(
minutes, if minutes == 1 { "" } else { "s" })?; f,
"{}{} minute{}",
if have_written { " " } else { "" },
minutes,
if minutes == 1 { "" } else { "s" }
)?;
have_written = true; have_written = true;
} }
if seconds > 0 || !have_written { if seconds > 0 || !have_written {
write!(f, "{}{} second{}", if have_written { " " } else { "" }, write!(
seconds, if seconds == 1 { "" } else { "s" })?; f,
"{}{} second{}",
if have_written { " " } else { "" },
seconds,
if seconds == 1 { "" } else { "s" }
)?;
} }
Ok(()) Ok(())
} }
@ -261,15 +309,21 @@ impl fmt::Display for Duration {
impl ops::Add for Duration { impl ops::Add for Duration {
type Output = Duration; type Output = Duration;
fn add(self, rhs: Duration) -> Duration { Duration(self.0 + rhs.0) } fn add(self, rhs: Duration) -> Duration {
Duration(self.0 + rhs.0)
}
} }
impl ops::AddAssign for Duration { impl ops::AddAssign for Duration {
fn add_assign(&mut self, rhs: Duration) { self.0 += rhs.0 } fn add_assign(&mut self, rhs: Duration) {
self.0 += rhs.0
}
} }
impl ops::SubAssign for Duration { impl ops::SubAssign for Duration {
fn sub_assign(&mut self, rhs: Duration) { self.0 -= rhs.0 } fn sub_assign(&mut self, rhs: Duration) {
self.0 -= rhs.0
}
} }
#[cfg(test)] #[cfg(test)]
@ -280,17 +334,18 @@ mod tests {
fn test_parse_time() { fn test_parse_time() {
std::env::set_var("TZ", "America/Los_Angeles"); std::env::set_var("TZ", "America/Los_Angeles");
time::tzset(); time::tzset();
#[rustfmt::skip]
let tests = &[ let tests = &[
("2006-01-02T15:04:05-07:00", 102261550050000), ("2006-01-02T15:04:05-07:00", 102261550050000),
("2006-01-02T15:04:05:00001-07:00", 102261550050001), ("2006-01-02T15:04:05:00001-07:00", 102261550050001),
("2006-01-02T15:04:05-08:00", 102261874050000), ("2006-01-02T15:04:05-08:00", 102261874050000),
("2006-01-02T15:04:05", 102261874050000), // implied -08:00 ("2006-01-02T15:04:05", 102261874050000), // implied -08:00
("2006-01-02T15:04", 102261873600000), // implied -08:00 ("2006-01-02T15:04", 102261873600000), // implied -08:00
("2006-01-02T15:04:05:00001", 102261874050001), // implied -08:00 ("2006-01-02T15:04:05:00001", 102261874050001), // implied -08:00
("2006-01-02T15:04:05-00:00", 102259282050000), ("2006-01-02T15:04:05-00:00", 102259282050000),
("2006-01-02T15:04:05Z", 102259282050000), ("2006-01-02T15:04:05Z", 102259282050000),
("2006-01-02-08:00", 102256992000000), // implied -08:00 ("2006-01-02-08:00", 102256992000000), // implied -08:00
("2006-01-02", 102256992000000), // implied -08:00 ("2006-01-02", 102256992000000), // implied -08:00
("2006-01-02Z", 102254400000000), ("2006-01-02Z", 102254400000000),
("102261550050000", 102261550050000), ("102261550050000", 102261550050000),
]; ];
@ -303,7 +358,10 @@ mod tests {
fn test_format_time() { fn test_format_time() {
std::env::set_var("TZ", "America/Los_Angeles"); std::env::set_var("TZ", "America/Los_Angeles");
time::tzset(); time::tzset();
assert_eq!("2006-01-02T15:04:05:00000-08:00", format!("{}", Time(102261874050000))); assert_eq!(
"2006-01-02T15:04:05:00000-08:00",
format!("{}", Time(102261874050000))
);
} }
#[test] #[test]

View File

@ -28,17 +28,17 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use log::info;
use base::strutil;
use crate::schema::Permissions; use crate::schema::Permissions;
use failure::{Error, bail, format_err}; use base::strutil;
use failure::{bail, format_err, Error};
use fnv::FnvHashMap; use fnv::FnvHashMap;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use libpasta; use libpasta;
use log::info;
use parking_lot::Mutex; use parking_lot::Mutex;
use protobuf::Message; use protobuf::Message;
use ring::rand::{SecureRandom, SystemRandom}; use ring::rand::{SecureRandom, SystemRandom};
use rusqlite::{Connection, Transaction, params}; use rusqlite::{params, Connection, Transaction};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt; use std::fmt;
use std::net::IpAddr; use std::net::IpAddr;
@ -54,8 +54,9 @@ lazy_static! {
/// See also <https://github.com/libpasta/libpasta/issues/9>. /// See also <https://github.com/libpasta/libpasta/issues/9>.
/// Call via `testutil::init()`. /// Call via `testutil::init()`.
pub(crate) fn set_test_config() { pub(crate) fn set_test_config() {
*PASTA_CONFIG.lock() = *PASTA_CONFIG.lock() = Arc::new(libpasta::Config::with_primitive(
Arc::new(libpasta::Config::with_primitive(libpasta::primitives::Bcrypt::new(2))); libpasta::primitives::Bcrypt::new(2),
));
} }
enum UserFlag { enum UserFlag {
@ -91,8 +92,12 @@ impl User {
} }
} }
pub fn has_password(&self) -> bool { self.password_hash.is_some() } pub fn has_password(&self) -> bool {
fn disabled(&self) -> bool { (self.flags & UserFlag::Disabled as i32) != 0 } self.password_hash.is_some()
}
fn disabled(&self) -> bool {
(self.flags & UserFlag::Disabled as i32) != 0
}
} }
/// A change to a user. /// A change to a user.
@ -175,20 +180,18 @@ impl rusqlite::types::FromSql for FromSqlIpAddr {
use rusqlite::types::ValueRef; use rusqlite::types::ValueRef;
match value { match value {
ValueRef::Null => Ok(FromSqlIpAddr(None)), ValueRef::Null => Ok(FromSqlIpAddr(None)),
ValueRef::Blob(ref b) => { ValueRef::Blob(ref b) => match b.len() {
match b.len() { 4 => {
4 => { let mut buf = [0u8; 4];
let mut buf = [0u8; 4]; buf.copy_from_slice(b);
buf.copy_from_slice(b); Ok(FromSqlIpAddr(Some(buf.into())))
Ok(FromSqlIpAddr(Some(buf.into())))
},
16 => {
let mut buf = [0u8; 16];
buf.copy_from_slice(b);
Ok(FromSqlIpAddr(Some(buf.into())))
},
_ => Err(rusqlite::types::FromSqlError::InvalidType),
} }
16 => {
let mut buf = [0u8; 16];
buf.copy_from_slice(b);
Ok(FromSqlIpAddr(Some(buf.into())))
}
_ => Err(rusqlite::types::FromSqlError::InvalidType),
}, },
_ => Err(rusqlite::types::FromSqlError::InvalidType), _ => Err(rusqlite::types::FromSqlError::InvalidType),
} }
@ -227,7 +230,7 @@ pub enum RevocationReason {
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct Session { pub struct Session {
user_id: i32, user_id: i32,
flags: i32, // bitmask of SessionFlag enum values flags: i32, // bitmask of SessionFlag enum values
domain: Option<Vec<u8>>, domain: Option<Vec<u8>>,
description: Option<String>, description: Option<String>,
seed: Seed, seed: Seed,
@ -236,7 +239,7 @@ pub struct Session {
creation: Request, creation: Request,
revocation: Request, revocation: Request,
revocation_reason: Option<i32>, // see RevocationReason enum revocation_reason: Option<i32>, // see RevocationReason enum
revocation_reason_detail: Option<String>, revocation_reason_detail: Option<String>,
pub permissions: Permissions, pub permissions: Permissions,
@ -259,7 +262,9 @@ impl Session {
pub struct RawSessionId([u8; 48]); pub struct RawSessionId([u8; 48]);
impl RawSessionId { impl RawSessionId {
pub fn new() -> Self { RawSessionId([0u8; 48]) } pub fn new() -> Self {
RawSessionId([0u8; 48])
}
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> { pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
let mut s = RawSessionId::new(); let mut s = RawSessionId::new();
@ -279,11 +284,15 @@ impl RawSessionId {
} }
impl AsRef<[u8]> for RawSessionId { impl AsRef<[u8]> for RawSessionId {
fn as_ref(&self) -> &[u8] { &self.0[..] } fn as_ref(&self) -> &[u8] {
&self.0[..]
}
} }
impl AsMut<[u8]> for RawSessionId { impl AsMut<[u8]> for RawSessionId {
fn as_mut(&mut self) -> &mut [u8] { &mut self.0[..] } fn as_mut(&mut self) -> &mut [u8] {
&mut self.0[..]
}
} }
impl fmt::Debug for RawSessionId { impl fmt::Debug for RawSessionId {
@ -319,7 +328,11 @@ impl fmt::Debug for SessionHash {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut buf = [0; 32]; let mut buf = [0; 32];
self.encode_base64(&mut buf); self.encode_base64(&mut buf);
write!(f, "SessionHash(\"{}\")", ::std::str::from_utf8(&buf[..]).expect("base64 is UTF-8")) write!(
f,
"SessionHash(\"{}\")",
::std::str::from_utf8(&buf[..]).expect("base64 is UTF-8")
)
} }
} }
@ -330,8 +343,9 @@ impl rusqlite::types::FromSql for Seed {
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> { fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
let b = value.as_blob()?; let b = value.as_blob()?;
if b.len() != 32 { if b.len() != 32 {
return Err(rusqlite::types::FromSqlError::Other( return Err(rusqlite::types::FromSqlError::Other(Box::new(
Box::new(format_err!("expected a 32-byte seed").compat()))); format_err!("expected a 32-byte seed").compat(),
)));
} }
let mut s = Seed::default(); let mut s = Seed::default();
s.0.copy_from_slice(b); s.0.copy_from_slice(b);
@ -363,7 +377,8 @@ impl State {
sessions: FnvHashMap::default(), sessions: FnvHashMap::default(),
rand: ring::rand::SystemRandom::new(), rand: ring::rand::SystemRandom::new(),
}; };
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(
r#"
select select
id, id,
username, username,
@ -375,24 +390,28 @@ impl State {
permissions permissions
from from
user user
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = row.get(0)?; let id = row.get(0)?;
let name: String = row.get(1)?; let name: String = row.get(1)?;
let mut permissions = Permissions::new(); let mut permissions = Permissions::new();
permissions.merge_from_bytes(row.get_raw_checked(7)?.as_blob()?)?; permissions.merge_from_bytes(row.get_raw_checked(7)?.as_blob()?)?;
state.users_by_id.insert(id, User { state.users_by_id.insert(
id, id,
username: name.clone(), User {
flags: row.get(2)?, id,
password_hash: row.get(3)?, username: name.clone(),
password_id: row.get(4)?, flags: row.get(2)?,
password_failure_count: row.get(5)?, password_hash: row.get(3)?,
unix_uid: row.get(6)?, password_id: row.get(4)?,
dirty: false, password_failure_count: row.get(5)?,
permissions, unix_uid: row.get(6)?,
}); dirty: false,
permissions,
},
);
state.users_by_name.insert(name, id); state.users_by_name.insert(name, id);
} }
Ok(state) Ok(state)
@ -406,11 +425,18 @@ impl State {
} }
} }
pub fn users_by_id(&self) -> &BTreeMap<i32, User> { &self.users_by_id } pub fn users_by_id(&self) -> &BTreeMap<i32, User> {
&self.users_by_id
}
fn update_user(&mut self, conn: &Connection, id: i32, change: UserChange) fn update_user(
-> Result<&User, Error> { &mut self,
let mut stmt = conn.prepare_cached(r#" conn: &Connection,
id: i32,
change: UserChange,
) -> Result<&User, Error> {
let mut stmt = conn.prepare_cached(
r#"
update user update user
set set
username = :username, username = :username,
@ -422,7 +448,8 @@ impl State {
permissions = :permissions permissions = :permissions
where where
id = :id id = :id
"#)?; "#,
)?;
let e = self.users_by_id.entry(id); let e = self.users_by_id.entry(id);
let e = match e { let e = match e {
::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {}!", id), ::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {}!", id),
@ -433,10 +460,13 @@ impl State {
None => { None => {
let u = e.get(); let u = e.get();
(&u.password_hash, u.password_id, u.password_failure_count) (&u.password_hash, u.password_id, u.password_failure_count)
}, }
Some(h) => (h, e.get().password_id + 1, 0), Some(h) => (h, e.get().password_id + 1, 0),
}; };
let permissions = change.permissions.write_to_bytes().expect("proto3->vec is infallible"); let permissions = change
.permissions
.write_to_bytes()
.expect("proto3->vec is infallible");
stmt.execute_named(&[ stmt.execute_named(&[
(":username", &&change.username[..]), (":username", &&change.username[..]),
(":password_hash", phash), (":password_hash", phash),
@ -462,12 +492,17 @@ impl State {
} }
fn add_user(&mut self, conn: &Connection, change: UserChange) -> Result<&User, Error> { fn add_user(&mut self, conn: &Connection, change: UserChange) -> Result<&User, Error> {
let mut stmt = conn.prepare_cached(r#" let mut stmt = conn.prepare_cached(
r#"
insert into user (username, password_hash, flags, unix_uid, permissions) insert into user (username, password_hash, flags, unix_uid, permissions)
values (:username, :password_hash, :flags, :unix_uid, :permissions) values (:username, :password_hash, :flags, :unix_uid, :permissions)
"#)?; "#,
)?;
let password_hash = change.set_password_hash.unwrap_or(None); let password_hash = change.set_password_hash.unwrap_or(None);
let permissions = change.permissions.write_to_bytes().expect("proto3->vec is infallible"); let permissions = change
.permissions
.write_to_bytes()
.expect("proto3->vec is infallible");
stmt.execute_named(&[ stmt.execute_named(&[
(":username", &&change.username[..]), (":username", &&change.username[..]),
(":password_hash", &password_hash), (":password_hash", &password_hash),
@ -512,17 +547,30 @@ impl State {
} }
pub fn get_user(&self, username: &str) -> Option<&User> { pub fn get_user(&self, username: &str) -> Option<&User> {
self.users_by_name self.users_by_name.get(username).map(|id| {
.get(username) self.users_by_id
.map(|id| self.users_by_id.get(id).expect("users_by_name implies users_by_id")) .get(id)
.expect("users_by_name implies users_by_id")
})
} }
pub fn login_by_password(&mut self, conn: &Connection, req: Request, username: &str, pub fn login_by_password(
password: String, domain: Option<Vec<u8>>, session_flags: i32) &mut self,
-> Result<(RawSessionId, &Session), Error> { conn: &Connection,
let id = self.users_by_name.get(username) req: Request,
username: &str,
password: String,
domain: Option<Vec<u8>>,
session_flags: i32,
) -> Result<(RawSessionId, &Session), Error> {
let id = self
.users_by_name
.get(username)
.ok_or_else(|| format_err!("no such user {:?}", username))?; .ok_or_else(|| format_err!("no such user {:?}", username))?;
let u = self.users_by_id.get_mut(id).expect("users_by_name implies users_by_id"); let u = self
.users_by_id
.get_mut(id)
.expect("users_by_name implies users_by_id");
if u.disabled() { if u.disabled() {
bail!("user {:?} is disabled", username); bail!("user {:?} is disabled", username);
} }
@ -537,7 +585,7 @@ impl State {
u.dirty = true; u.dirty = true;
u.password_failure_count += 1; u.password_failure_count += 1;
bail!("incorrect password for user {:?}", username); bail!("incorrect password for user {:?}", username);
}, }
libpasta::HashUpdate::Verified(new_pwd) => new_pwd, libpasta::HashUpdate::Verified(new_pwd) => new_pwd,
} }
}; };
@ -546,34 +594,67 @@ impl State {
u.dirty = true; u.dirty = true;
} }
let password_id = u.password_id; let password_id = u.password_id;
State::make_session_int(&self.rand, conn, req, u, domain, Some(password_id), session_flags, State::make_session_int(
&mut self.sessions, u.permissions.clone()) &self.rand,
conn,
req,
u,
domain,
Some(password_id),
session_flags,
&mut self.sessions,
u.permissions.clone(),
)
} }
/// Makes a session directly (no password required). /// Makes a session directly (no password required).
pub fn make_session<'s>(&'s mut self, conn: &Connection, creation: Request, uid: i32, pub fn make_session<'s>(
domain: Option<Vec<u8>>, flags: i32, permissions: Permissions) &'s mut self,
-> Result<(RawSessionId, &'s Session), Error> { conn: &Connection,
let u = self.users_by_id.get_mut(&uid).ok_or_else(|| format_err!("no such uid {:?}", uid))?; creation: Request,
uid: i32,
domain: Option<Vec<u8>>,
flags: i32,
permissions: Permissions,
) -> Result<(RawSessionId, &'s Session), Error> {
let u = self
.users_by_id
.get_mut(&uid)
.ok_or_else(|| format_err!("no such uid {:?}", uid))?;
if u.disabled() { if u.disabled() {
bail!("user is disabled"); bail!("user is disabled");
} }
State::make_session_int(&self.rand, conn, creation, u, domain, None, flags, State::make_session_int(
&mut self.sessions, permissions) &self.rand,
conn,
creation,
u,
domain,
None,
flags,
&mut self.sessions,
permissions,
)
} }
fn make_session_int<'s>(rand: &SystemRandom, conn: &Connection, creation: Request, fn make_session_int<'s>(
user: &mut User, domain: Option<Vec<u8>>, rand: &SystemRandom,
creation_password_id: Option<i32>, flags: i32, conn: &Connection,
sessions: &'s mut FnvHashMap<SessionHash, Session>, creation: Request,
permissions: Permissions) user: &mut User,
-> Result<(RawSessionId, &'s Session), Error> { domain: Option<Vec<u8>>,
creation_password_id: Option<i32>,
flags: i32,
sessions: &'s mut FnvHashMap<SessionHash, Session>,
permissions: Permissions,
) -> Result<(RawSessionId, &'s Session), Error> {
let mut session_id = RawSessionId::new(); let mut session_id = RawSessionId::new();
rand.fill(&mut session_id.0).unwrap(); rand.fill(&mut session_id.0).unwrap();
let mut seed = [0u8; 32]; let mut seed = [0u8; 32];
rand.fill(&mut seed).unwrap(); rand.fill(&mut seed).unwrap();
let hash = session_id.hash(); let hash = session_id.hash();
let mut stmt = conn.prepare_cached(r#" let mut stmt = conn.prepare_cached(
r#"
insert into user_session (session_id_hash, user_id, seed, flags, domain, insert into user_session (session_id_hash, user_id, seed, flags, domain,
creation_password_id, creation_time_sec, creation_password_id, creation_time_sec,
creation_user_agent, creation_peer_addr, creation_user_agent, creation_peer_addr,
@ -582,10 +663,13 @@ impl State {
:creation_password_id, :creation_time_sec, :creation_password_id, :creation_time_sec,
:creation_user_agent, :creation_peer_addr, :creation_user_agent, :creation_peer_addr,
:permissions) :permissions)
"#)?; "#,
)?;
let addr = creation.addr_buf(); let addr = creation.addr_buf();
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref()); let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
let permissions_blob = permissions.write_to_bytes().expect("proto3->vec is infallible"); let permissions_blob = permissions
.write_to_bytes()
.expect("proto3->vec is infallible");
stmt.execute_named(&[ stmt.execute_named(&[
(":session_id_hash", &&hash.0[..]), (":session_id_hash", &&hash.0[..]),
(":user_id", &user.id), (":user_id", &user.id),
@ -615,8 +699,12 @@ impl State {
Ok((session_id, session)) Ok((session_id, session))
} }
pub fn authenticate_session(&mut self, conn: &Connection, req: Request, hash: &SessionHash) pub fn authenticate_session(
-> Result<(&Session, &User), Error> { &mut self,
conn: &Connection,
req: Request,
hash: &SessionHash,
) -> Result<(&Session, &User), Error> {
let s = match self.sessions.entry(*hash) { let s = match self.sessions.entry(*hash) {
::std::collections::hash_map::Entry::Occupied(e) => e.into_mut(), ::std::collections::hash_map::Entry::Occupied(e) => e.into_mut(),
::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?), ::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?),
@ -637,15 +725,21 @@ impl State {
Ok((s, u)) Ok((s, u))
} }
pub fn revoke_session(&mut self, conn: &Connection, reason: RevocationReason, pub fn revoke_session(
detail: Option<String>, req: Request, hash: &SessionHash) &mut self,
-> Result<(), Error> { conn: &Connection,
reason: RevocationReason,
detail: Option<String>,
req: Request,
hash: &SessionHash,
) -> Result<(), Error> {
let s = match self.sessions.entry(*hash) { let s = match self.sessions.entry(*hash) {
::std::collections::hash_map::Entry::Occupied(e) => e.into_mut(), ::std::collections::hash_map::Entry::Occupied(e) => e.into_mut(),
::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?), ::std::collections::hash_map::Entry::Vacant(e) => e.insert(lookup_session(conn, hash)?),
}; };
if s.revocation_reason.is_none() { if s.revocation_reason.is_none() {
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(
r#"
update user_session update user_session
set set
revocation_time_sec = ?, revocation_time_sec = ?,
@ -655,7 +749,8 @@ impl State {
revocation_reason_detail = ? revocation_reason_detail = ?
where where
session_id_hash = ? session_id_hash = ?
"#)?; "#,
)?;
let addr = req.addr_buf(); let addr = req.addr_buf();
let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref()); let addr: Option<&[u8]> = addr.as_ref().map(|a| a.as_ref());
stmt.execute(params![ stmt.execute(params![
@ -677,15 +772,18 @@ impl State {
/// The caller is expected to call `post_flush` afterward if the transaction is /// The caller is expected to call `post_flush` afterward if the transaction is
/// successfully committed. /// successfully committed.
pub fn flush(&self, tx: &Transaction) -> Result<(), Error> { pub fn flush(&self, tx: &Transaction) -> Result<(), Error> {
let mut u_stmt = tx.prepare(r#" let mut u_stmt = tx.prepare(
r#"
update user update user
set set
password_failure_count = :password_failure_count, password_failure_count = :password_failure_count,
password_hash = :password_hash password_hash = :password_hash
where where
id = :id id = :id
"#)?; "#,
let mut s_stmt = tx.prepare(r#" )?;
let mut s_stmt = tx.prepare(
r#"
update user_session update user_session
set set
last_use_time_sec = :last_use_time_sec, last_use_time_sec = :last_use_time_sec,
@ -694,12 +792,16 @@ impl State {
use_count = :use_count use_count = :use_count
where where
session_id_hash = :hash session_id_hash = :hash
"#)?; "#,
)?;
for (&id, u) in &self.users_by_id { for (&id, u) in &self.users_by_id {
if !u.dirty { if !u.dirty {
continue; continue;
} }
info!("flushing user with hash: {}", u.password_hash.as_ref().unwrap()); info!(
"flushing user with hash: {}",
u.password_hash.as_ref().unwrap()
);
u_stmt.execute_named(&[ u_stmt.execute_named(&[
(":password_failure_count", &u.password_failure_count), (":password_failure_count", &u.password_failure_count),
(":password_hash", &u.password_hash), (":password_hash", &u.password_hash),
@ -736,7 +838,8 @@ impl State {
} }
fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, Error> { fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, Error> {
let mut stmt = conn.prepare_cached(r#" let mut stmt = conn.prepare_cached(
r#"
select select
user_id, user_id,
seed, seed,
@ -761,7 +864,8 @@ fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, Erro
user_session user_session
where where
session_id_hash = ? session_id_hash = ?
"#)?; "#,
)?;
let mut rows = stmt.query(params![&hash.0[..]])?; let mut rows = stmt.query(params![&hash.0[..]])?;
let row = rows.next()?.ok_or_else(|| format_err!("no such session"))?; let row = rows.next()?.ok_or_else(|| format_err!("no such session"))?;
let creation_addr: FromSqlIpAddr = row.get(8)?; let creation_addr: FromSqlIpAddr = row.get(8)?;
@ -801,10 +905,10 @@ fn lookup_session(conn: &Connection, hash: &SessionHash) -> Result<Session, Erro
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::db;
use rusqlite::Connection;
use super::*; use super::*;
use crate::db;
use crate::testutil; use crate::testutil;
use rusqlite::Connection;
#[test] #[test]
fn open_empty_db() { fn open_empty_db() {
@ -823,43 +927,82 @@ mod tests {
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
let req = Request { let req = Request {
when_sec: Some(42), when_sec: Some(42),
addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(127, 0, 0, 1))), addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(
127, 0, 0, 1,
))),
user_agent: Some(b"some ua".to_vec()), user_agent: Some(b"some ua".to_vec()),
}; };
let (uid, mut c) = { let (uid, mut c) = {
let u = state.apply(&conn, UserChange::add_user("slamb".to_owned())).unwrap(); let u = state
.apply(&conn, UserChange::add_user("slamb".to_owned()))
.unwrap();
(u.id, u.change()) (u.id, u.change())
}; };
let e = state.login_by_password(&conn, req.clone(), "slamb", "hunter2".to_owned(), let e = state
Some(b"nvr.example.com".to_vec()), 0).unwrap_err(); .login_by_password(
&conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap_err();
assert_eq!(format!("{}", e), "no password set for user \"slamb\""); assert_eq!(format!("{}", e), "no password set for user \"slamb\"");
c.set_password("hunter2".to_owned()); c.set_password("hunter2".to_owned());
state.apply(&conn, c).unwrap(); state.apply(&conn, c).unwrap();
let e = state.login_by_password(&conn, req.clone(), "slamb", let e = state
"hunter3".to_owned(), .login_by_password(
Some(b"nvr.example.com".to_vec()), 0).unwrap_err(); &conn,
req.clone(),
"slamb",
"hunter3".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap_err();
assert_eq!(format!("{}", e), "incorrect password for user \"slamb\""); assert_eq!(format!("{}", e), "incorrect password for user \"slamb\"");
let sid = { let sid = {
let (sid, s) = state.login_by_password(&conn, req.clone(), "slamb", let (sid, s) = state
"hunter2".to_owned(), .login_by_password(
Some(b"nvr.example.com".to_vec()), 0).unwrap(); &conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap();
assert_eq!(s.user_id, uid); assert_eq!(s.user_id, uid);
sid sid
}; };
{ {
let (_, u) = state.authenticate_session(&conn, req.clone(), &sid.hash()).unwrap(); let (_, u) = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap();
assert_eq!(u.id, uid); assert_eq!(u.id, uid);
} }
state.revoke_session(&conn, RevocationReason::LoggedOut, None, req.clone(), state
&sid.hash()).unwrap(); .revoke_session(
let e = state.authenticate_session(&conn, req.clone(), &sid.hash()).unwrap_err(); &conn,
RevocationReason::LoggedOut,
None,
req.clone(),
&sid.hash(),
)
.unwrap();
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "session is no longer valid (reason=1)"); assert_eq!(format!("{}", e), "session is no longer valid (reason=1)");
// Everything should persist across reload. // Everything should persist across reload.
drop(state); drop(state);
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
let e = state.authenticate_session(&conn, req, &sid.hash()).unwrap_err(); let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "session is no longer valid (reason=1)"); assert_eq!(format!("{}", e), "session is no longer valid (reason=1)");
} }
@ -871,7 +1014,9 @@ mod tests {
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
let req = Request { let req = Request {
when_sec: Some(42), when_sec: Some(42),
addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(127, 0, 0, 1))), addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(
127, 0, 0, 1,
))),
user_agent: Some(b"some ua".to_vec()), user_agent: Some(b"some ua".to_vec()),
}; };
{ {
@ -879,25 +1024,43 @@ mod tests {
c.set_password("hunter2".to_owned()); c.set_password("hunter2".to_owned());
state.apply(&conn, c).unwrap(); state.apply(&conn, c).unwrap();
}; };
let sid = state.login_by_password(&conn, req.clone(), "slamb", let sid = state
"hunter2".to_owned(), .login_by_password(
Some(b"nvr.example.com".to_vec()), 0).unwrap().0; &conn,
state.authenticate_session(&conn, req.clone(), &sid.hash()).unwrap(); req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap()
.0;
state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap();
// Reload. // Reload.
drop(state); drop(state);
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
state.revoke_session(&conn, RevocationReason::LoggedOut, None, req.clone(), state
&sid.hash()).unwrap(); .revoke_session(
let e = state.authenticate_session(&conn, req, &sid.hash()).unwrap_err(); &conn,
RevocationReason::LoggedOut,
None,
req.clone(),
&sid.hash(),
)
.unwrap();
let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "session is no longer valid (reason=1)"); assert_eq!(format!("{}", e), "session is no longer valid (reason=1)");
} }
#[test] #[test]
fn upgrade_hash() { fn upgrade_hash() {
// This hash is generated with cost=1 vs the cost=2 of PASTA_CONFIG. // This hash is generated with cost=1 vs the cost=2 of PASTA_CONFIG.
let insecure_hash = let insecure_hash = libpasta::Config::with_primitive(libpasta::primitives::Bcrypt::new(1))
libpasta::Config::with_primitive(libpasta::primitives::Bcrypt::new(1))
.hash_password("hunter2"); .hash_password("hunter2");
testutil::init(); testutil::init();
let mut conn = Connection::open_in_memory().unwrap(); let mut conn = Connection::open_in_memory().unwrap();
@ -915,11 +1078,21 @@ mod tests {
let req = Request { let req = Request {
when_sec: Some(42), when_sec: Some(42),
addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(127, 0, 0, 1))), addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(
127, 0, 0, 1,
))),
user_agent: Some(b"some ua".to_vec()), user_agent: Some(b"some ua".to_vec()),
}; };
state.login_by_password(&conn, req.clone(), "slamb", "hunter2".to_owned(), state
Some(b"nvr.example.com".to_vec()), 0).unwrap(); .login_by_password(
&conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap();
let new_hash = { let new_hash = {
// Password should have been automatically upgraded. // Password should have been automatically upgraded.
let u = state.users_by_id().get(&uid).unwrap(); let u = state.users_by_id().get(&uid).unwrap();
@ -944,8 +1117,16 @@ mod tests {
} }
// Login should still work. // Login should still work.
state.login_by_password(&conn, req.clone(), "slamb", "hunter2".to_owned(), state
Some(b"nvr.example.com".to_vec()), 0).unwrap(); .login_by_password(
&conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap();
} }
#[test] #[test]
@ -956,7 +1137,9 @@ mod tests {
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
let req = Request { let req = Request {
when_sec: Some(42), when_sec: Some(42),
addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(127, 0, 0, 1))), addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(
127, 0, 0, 1,
))),
user_agent: Some(b"some ua".to_vec()), user_agent: Some(b"some ua".to_vec()),
}; };
let uid = { let uid = {
@ -966,9 +1149,17 @@ mod tests {
}; };
// Get a session for later. // Get a session for later.
let sid = state.login_by_password(&conn, req.clone(), "slamb", let sid = state
"hunter2".to_owned(), .login_by_password(
Some(b"nvr.example.com".to_vec()), 0).unwrap().0; &conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap()
.0;
// Disable the user. // Disable the user.
{ {
@ -978,19 +1169,30 @@ mod tests {
} }
// Fresh logins shouldn't work. // Fresh logins shouldn't work.
let e = state.login_by_password(&conn, req.clone(), "slamb", let e = state
"hunter2".to_owned(), .login_by_password(
Some(b"nvr.example.com".to_vec()), 0).unwrap_err(); &conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap_err();
assert_eq!(format!("{}", e), "user \"slamb\" is disabled"); assert_eq!(format!("{}", e), "user \"slamb\" is disabled");
// Authenticating existing sessions shouldn't work either. // Authenticating existing sessions shouldn't work either.
let e = state.authenticate_session(&conn, req.clone(), &sid.hash()).unwrap_err(); let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "user \"slamb\" is disabled"); assert_eq!(format!("{}", e), "user \"slamb\" is disabled");
// The user should still be disabled after reload. // The user should still be disabled after reload.
drop(state); drop(state);
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
let e = state.authenticate_session(&conn, req, &sid.hash()).unwrap_err(); let e = state
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "user \"slamb\" is disabled"); assert_eq!(format!("{}", e), "user \"slamb\" is disabled");
} }
@ -1002,7 +1204,9 @@ mod tests {
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
let req = Request { let req = Request {
when_sec: Some(42), when_sec: Some(42),
addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(127, 0, 0, 1))), addr: Some(::std::net::IpAddr::V4(::std::net::Ipv4Addr::new(
127, 0, 0, 1,
))),
user_agent: Some(b"some ua".to_vec()), user_agent: Some(b"some ua".to_vec()),
}; };
let uid = { let uid = {
@ -1012,20 +1216,31 @@ mod tests {
}; };
// Get a session for later. // Get a session for later.
let (sid, _) = state.login_by_password(&conn, req.clone(), "slamb", let (sid, _) = state
"hunter2".to_owned(), .login_by_password(
Some(b"nvr.example.com".to_vec()), 0).unwrap(); &conn,
req.clone(),
"slamb",
"hunter2".to_owned(),
Some(b"nvr.example.com".to_vec()),
0,
)
.unwrap();
state.delete_user(&mut conn, uid).unwrap(); state.delete_user(&mut conn, uid).unwrap();
assert!(state.users_by_id().get(&uid).is_none()); assert!(state.users_by_id().get(&uid).is_none());
let e = state.authenticate_session(&conn, req.clone(), &sid.hash()).unwrap_err(); let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "no such session"); assert_eq!(format!("{}", e), "no such session");
// The user should still be deleted after reload. // The user should still be deleted after reload.
drop(state); drop(state);
let mut state = State::init(&conn).unwrap(); let mut state = State::init(&conn).unwrap();
assert!(state.users_by_id().get(&uid).is_none()); assert!(state.users_by_id().get(&uid).is_none());
let e = state.authenticate_session(&conn, req.clone(), &sid.hash()).unwrap_err(); let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "no such session"); assert_eq!(format!("{}", e), "no such session");
} }

View File

@ -35,12 +35,12 @@ use crate::db::{self, CompositeId, FromSqlUuid};
use crate::dir; use crate::dir;
use crate::raw; use crate::raw;
use crate::recording; use crate::recording;
use crate::schema;
use failure::Error; use failure::Error;
use fnv::{FnvHashMap, FnvHashSet}; use fnv::{FnvHashMap, FnvHashSet};
use log::{info, error, warn}; use log::{error, info, warn};
use nix::fcntl::AtFlags; use nix::fcntl::AtFlags;
use rusqlite::params; use rusqlite::params;
use crate::schema;
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
pub struct Options { pub struct Options {
@ -53,7 +53,7 @@ pub struct Options {
#[derive(Default)] #[derive(Default)]
pub struct Context { pub struct Context {
rows_to_delete: FnvHashSet<CompositeId>, rows_to_delete: FnvHashSet<CompositeId>,
files_to_trash: FnvHashSet<(i32, CompositeId)>, // (dir_id, composite_id) files_to_trash: FnvHashSet<(i32, CompositeId)>, // (dir_id, composite_id)
} }
pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error> { pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error> {
@ -65,7 +65,9 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let e: String = row.get(0)?; let e: String = row.get(0)?;
if e == "ok" { continue; } if e == "ok" {
continue;
}
error!("{}", e); error!("{}", e);
printed_error = true; printed_error = true;
} }
@ -101,12 +103,14 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
// Scan directories. // Scan directories.
let mut dirs_by_id: FnvHashMap<i32, Dir> = FnvHashMap::default(); let mut dirs_by_id: FnvHashMap<i32, Dir> = FnvHashMap::default();
{ {
let mut dir_stmt = conn.prepare(r#" let mut dir_stmt = conn.prepare(
r#"
select d.id, d.path, d.uuid, d.last_complete_open_id, o.uuid select d.id, d.path, d.uuid, d.last_complete_open_id, o.uuid
from sample_file_dir d left join open o on (d.last_complete_open_id = o.id) from sample_file_dir d left join open o on (d.last_complete_open_id = o.id)
"#)?; "#,
let mut garbage_stmt = conn.prepare_cached( )?;
"select composite_id from garbage where sample_file_dir_id = ?")?; let mut garbage_stmt =
conn.prepare_cached("select composite_id from garbage where sample_file_dir_id = ?")?;
let mut rows = dir_stmt.query(params![])?; let mut rows = dir_stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let mut meta = schema::DirMeta::default(); let mut meta = schema::DirMeta::default();
@ -131,8 +135,10 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = CompositeId(row.get(0)?); let id = CompositeId(row.get(0)?);
let s = streams.entry(id.stream()).or_insert_with(Stream::default); let s = streams.entry(id.stream()).or_insert_with(Stream::default);
s.recordings.entry(id.recording()).or_insert_with(Recording::default).garbage_row = s.recordings
true; .entry(id.recording())
.or_insert_with(Recording::default)
.garbage_row = true;
} }
dirs_by_id.insert(dir_id, streams); dirs_by_id.insert(dir_id, streams);
} }
@ -141,7 +147,8 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
// Scan known streams. // Scan known streams.
let mut ctx = Context::default(); let mut ctx = Context::default();
{ {
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(
r#"
select select
id, id,
sample_file_dir_id, sample_file_dir_id,
@ -150,7 +157,8 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
stream stream
where where
sample_file_dir_id is not null sample_file_dir_id is not null
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let stream_id = row.get(0)?; let stream_id = row.get(0)?;
@ -170,9 +178,15 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
for (&stream_id, stream) in streams { for (&stream_id, stream) in streams {
for (&recording_id, r) in &stream.recordings { for (&recording_id, r) in &stream.recordings {
let id = CompositeId::new(stream_id, recording_id); let id = CompositeId::new(stream_id, recording_id);
if r.recording_row.is_some() || r.playback_row.is_some() || if r.recording_row.is_some()
r.integrity_row || !r.garbage_row { || r.playback_row.is_some()
error!("dir {} recording {} for unknown stream: {:#?}", dir_id, id, r); || r.integrity_row
|| !r.garbage_row
{
error!(
"dir {} recording {} for unknown stream: {:#?}",
dir_id, id, r
);
printed_error = true; printed_error = true;
} }
} }
@ -195,7 +209,8 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
if !ctx.files_to_trash.is_empty() { if !ctx.files_to_trash.is_empty() {
info!("Trashing {} recording files", ctx.files_to_trash.len()); info!("Trashing {} recording files", ctx.files_to_trash.len());
let mut g = tx.prepare( let mut g = tx.prepare(
"insert or ignore into garbage (sample_file_dir_id, composite_id) values (?, ?)")?; "insert or ignore into garbage (sample_file_dir_id, composite_id) values (?, ?)",
)?;
for (dir_id, composite_id) in &ctx.files_to_trash { for (dir_id, composite_id) in &ctx.files_to_trash {
g.execute(params![dir_id, composite_id.0])?; g.execute(params![dir_id, composite_id.0])?;
} }
@ -259,7 +274,11 @@ fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
video_samples, video_samples,
video_sync_samples, video_sync_samples,
media_duration, media_duration,
flags: if it.duration_90k == 0 { db::RecordingFlags::TrailingZero as i32 } else { 0 }, flags: if it.duration_90k == 0 {
db::RecordingFlags::TrailingZero as i32
} else {
0
},
}) })
} }
@ -275,35 +294,53 @@ fn read_dir(d: &dir::SampleFileDir, opts: &Options) -> Result<Dir, Error> {
let f = e.file_name(); let f = e.file_name();
match f.to_bytes() { match f.to_bytes() {
b"." | b".." | b"meta" => continue, b"." | b".." | b"meta" => continue,
_ => {}, _ => {}
}; };
let id = match dir::parse_id(f.to_bytes()) { let id = match dir::parse_id(f.to_bytes()) {
Ok(id) => id, Ok(id) => id,
Err(_) => { Err(_) => {
error!("sample file directory contains file {:?} which isn't an id", f); error!(
"sample file directory contains file {:?} which isn't an id",
f
);
continue; continue;
} }
}; };
let len = if opts.compare_lens { let len = if opts.compare_lens {
nix::sys::stat::fstatat(fd, f, AtFlags::empty())?.st_size as u64 nix::sys::stat::fstatat(fd, f, AtFlags::empty())?.st_size as u64
} else { 0 }; } else {
0
};
let stream = dir.entry(id.stream()).or_insert_with(Stream::default); let stream = dir.entry(id.stream()).or_insert_with(Stream::default);
stream.recordings.entry(id.recording()).or_insert_with(Recording::default).file = Some(len); stream
.recordings
.entry(id.recording())
.or_insert_with(Recording::default)
.file = Some(len);
} }
Ok(dir) Ok(dir)
} }
/// Looks through a known stream for errors. /// Looks through a known stream for errors.
fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts: &Options, fn compare_stream(
mut stream: Stream, ctx: &mut Context) -> Result<bool, Error> { conn: &rusqlite::Connection,
dir_id: i32,
stream_id: i32,
opts: &Options,
mut stream: Stream,
ctx: &mut Context,
) -> Result<bool, Error> {
let start = CompositeId::new(stream_id, 0); let start = CompositeId::new(stream_id, 0);
let end = CompositeId::new(stream_id, i32::max_value()); let end = CompositeId::new(stream_id, i32::max_value());
let mut printed_error = false; let mut printed_error = false;
let cum_recordings = stream.cum_recordings.expect("cum_recordings must be set on known stream"); let cum_recordings = stream
.cum_recordings
.expect("cum_recordings must be set on known stream");
// recording row. // recording row.
{ {
let mut stmt = conn.prepare_cached(r#" let mut stmt = conn.prepare_cached(
r#"
select select
composite_id, composite_id,
flags, flags,
@ -315,7 +352,8 @@ fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts
recording recording
where where
composite_id between ? and ? composite_id between ? and ?
"#)?; "#,
)?;
let mut rows = stmt.query(params![start.0, end.0])?; let mut rows = stmt.query(params![start.0, end.0])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = CompositeId(row.get(0)?); let id = CompositeId(row.get(0)?);
@ -326,15 +364,18 @@ fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts
video_samples: row.get(4)?, video_samples: row.get(4)?,
video_sync_samples: row.get(5)?, video_sync_samples: row.get(5)?,
}; };
stream.recordings.entry(id.recording()) stream
.or_insert_with(Recording::default) .recordings
.recording_row = Some(s); .entry(id.recording())
.or_insert_with(Recording::default)
.recording_row = Some(s);
} }
} }
// recording_playback row. // recording_playback row.
{ {
let mut stmt = conn.prepare_cached(r#" let mut stmt = conn.prepare_cached(
r#"
select select
composite_id, composite_id,
video_index video_index
@ -342,7 +383,8 @@ fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts
recording_playback recording_playback
where where
composite_id between ? and ? composite_id between ? and ?
"#)?; "#,
)?;
let mut rows = stmt.query(params![start.0, end.0])?; let mut rows = stmt.query(params![start.0, end.0])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = CompositeId(row.get(0)?); let id = CompositeId(row.get(0)?);
@ -357,30 +399,36 @@ fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts
ctx.files_to_trash.insert((dir_id, id)); ctx.files_to_trash.insert((dir_id, id));
} }
continue; continue;
}, }
}; };
stream.recordings.entry(id.recording()) stream
.or_insert_with(Recording::default) .recordings
.playback_row = Some(s); .entry(id.recording())
.or_insert_with(Recording::default)
.playback_row = Some(s);
} }
} }
// recording_integrity row. // recording_integrity row.
{ {
let mut stmt = conn.prepare_cached(r#" let mut stmt = conn.prepare_cached(
r#"
select select
composite_id composite_id
from from
recording_integrity recording_integrity
where where
composite_id between ? and ? composite_id between ? and ?
"#)?; "#,
)?;
let mut rows = stmt.query(params![start.0, end.0])?; let mut rows = stmt.query(params![start.0, end.0])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = CompositeId(row.get(0)?); let id = CompositeId(row.get(0)?);
stream.recordings.entry(id.recording()) stream
.or_insert_with(Recording::default) .recordings
.integrity_row = true; .entry(id.recording())
.or_insert_with(Recording::default)
.integrity_row = true;
} }
} }
@ -400,14 +448,15 @@ fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts
continue; continue;
} }
r r
}, }
None => { None => {
if db_rows_expected { if db_rows_expected {
error!("Missing recording row for {}: {:#?}", id, recording); error!("Missing recording row for {}: {:#?}", id, recording);
if opts.trash_orphan_sample_files { if opts.trash_orphan_sample_files {
ctx.files_to_trash.insert((dir_id, id)); ctx.files_to_trash.insert((dir_id, id));
} }
if opts.delete_orphan_rows { // also delete playback/integrity rows, if any. if opts.delete_orphan_rows {
// also delete playback/integrity rows, if any.
ctx.rows_to_delete.insert(id); ctx.rows_to_delete.insert(id);
} }
printed_error = true; printed_error = true;
@ -419,38 +468,44 @@ fn compare_stream(conn: &rusqlite::Connection, dir_id: i32, stream_id: i32, opts
printed_error = true; printed_error = true;
} }
continue; continue;
}, }
}; };
match recording.playback_row { match recording.playback_row {
Some(ref p) => { Some(ref p) => {
if r != p { if r != p {
error!("Recording {} summary doesn't match video_index: {:#?}", id, recording); error!(
"Recording {} summary doesn't match video_index: {:#?}",
id, recording
);
printed_error = true; printed_error = true;
} }
}, }
None => { None => {
error!("Recording {} missing playback row: {:#?}", id, recording); error!("Recording {} missing playback row: {:#?}", id, recording);
printed_error = true; printed_error = true;
if opts.trash_orphan_sample_files { if opts.trash_orphan_sample_files {
ctx.files_to_trash.insert((dir_id, id)); ctx.files_to_trash.insert((dir_id, id));
} }
if opts.delete_orphan_rows { // also delete recording/integrity rows, if any. if opts.delete_orphan_rows {
// also delete recording/integrity rows, if any.
ctx.rows_to_delete.insert(id); ctx.rows_to_delete.insert(id);
} }
}, }
} }
match recording.file { match recording.file {
Some(len) => if opts.compare_lens && r.bytes != len { Some(len) => {
error!("Recording {} length mismatch: {:#?}", id, recording); if opts.compare_lens && r.bytes != len {
printed_error = true; error!("Recording {} length mismatch: {:#?}", id, recording);
}, printed_error = true;
}
}
None => { None => {
error!("Recording {} missing file: {:#?}", id, recording); error!("Recording {} missing file: {:#?}", id, recording);
if opts.delete_orphan_rows { if opts.delete_orphan_rows {
ctx.rows_to_delete.insert(id); ctx.rows_to_delete.insert(id);
} }
printed_error = true; printed_error = true;
}, }
} }
} }

View File

@ -34,28 +34,35 @@
/// encoding](https://developers.google.com/protocol-buffers/docs/encoding#types). Uses the low bit /// encoding](https://developers.google.com/protocol-buffers/docs/encoding#types). Uses the low bit
/// to indicate signedness (1 = negative, 0 = non-negative). /// to indicate signedness (1 = negative, 0 = non-negative).
#[inline(always)] #[inline(always)]
pub fn zigzag32(i: i32) -> u32 { ((i << 1) as u32) ^ ((i >> 31) as u32) } pub fn zigzag32(i: i32) -> u32 {
((i << 1) as u32) ^ ((i >> 31) as u32)
}
/// Zigzag-decodes to a signed integer. /// Zigzag-decodes to a signed integer.
/// See `zigzag`. /// See `zigzag`.
#[inline(always)] #[inline(always)]
pub fn unzigzag32(i: u32) -> i32 { ((i >> 1) as i32) ^ -((i & 1) as i32) } pub fn unzigzag32(i: u32) -> i32 {
((i >> 1) as i32) ^ -((i & 1) as i32)
}
#[inline(always)] #[inline(always)]
pub fn decode_varint32(data: &[u8], i: usize) -> Result<(u32, usize), ()> { pub fn decode_varint32(data: &[u8], i: usize) -> Result<(u32, usize), ()> {
// Unroll a few likely possibilities before going into the robust out-of-line loop. // Unroll a few likely possibilities before going into the robust out-of-line loop.
// This aids branch prediction. // This aids branch prediction.
if data.len() > i && (data[i] & 0x80) == 0 { if data.len() > i && (data[i] & 0x80) == 0 {
return Ok((data[i] as u32, i+1)) return Ok((data[i] as u32, i + 1));
} else if data.len() > i + 1 && (data[i+1] & 0x80) == 0 { } else if data.len() > i + 1 && (data[i + 1] & 0x80) == 0 {
return Ok((( (data[i] & 0x7f) as u32) | return Ok((
(( data[i+1] as u32) << 7), ((data[i] & 0x7f) as u32) | ((data[i + 1] as u32) << 7),
i+2)) i + 2,
} else if data.len() > i + 2 && (data[i+2] & 0x80) == 0 { ));
return Ok((( (data[i] & 0x7f) as u32) | } else if data.len() > i + 2 && (data[i + 2] & 0x80) == 0 {
(((data[i+1] & 0x7f) as u32) << 7) | return Ok((
(( data[i+2] as u32) << 14), ((data[i] & 0x7f) as u32)
i+3)) | (((data[i + 1] & 0x7f) as u32) << 7)
| ((data[i + 2] as u32) << 14),
i + 3,
));
} }
decode_varint32_slow(data, i) decode_varint32_slow(data, i)
} }
@ -67,11 +74,11 @@ fn decode_varint32_slow(data: &[u8], mut i: usize) -> Result<(u32, usize), ()> {
let mut shift = 0; let mut shift = 0;
loop { loop {
if i == l { if i == l {
return Err(()) return Err(());
} }
let b = data[i]; let b = data[i];
if shift == 28 && (b & 0xf0) != 0 { if shift == 28 && (b & 0xf0) != 0 {
return Err(()) return Err(());
} }
out |= ((b & 0x7f) as u32) << shift; out |= ((b & 0x7f) as u32) << shift;
shift += 7; shift += 7;
@ -87,27 +94,31 @@ pub fn append_varint32(i: u32, data: &mut Vec<u8>) {
if i < 1u32 << 7 { if i < 1u32 << 7 {
data.push(i as u8); data.push(i as u8);
} else if i < 1u32 << 14 { } else if i < 1u32 << 14 {
data.extend_from_slice(&[(( i & 0x7F) | 0x80) as u8, data.extend_from_slice(&[((i & 0x7F) | 0x80) as u8, (i >> 7) as u8]);
(i >> 7) as u8]);
} else if i < 1u32 << 21 { } else if i < 1u32 << 21 {
data.extend_from_slice(&[(( i & 0x7F) | 0x80) as u8, data.extend_from_slice(&[
(((i >> 7) & 0x7F) | 0x80) as u8, ((i & 0x7F) | 0x80) as u8,
(i >> 14) as u8]); (((i >> 7) & 0x7F) | 0x80) as u8,
(i >> 14) as u8,
]);
} else if i < 1u32 << 28 { } else if i < 1u32 << 28 {
data.extend_from_slice(&[(( i & 0x7F) | 0x80) as u8, data.extend_from_slice(&[
(((i >> 7) & 0x7F) | 0x80) as u8, ((i & 0x7F) | 0x80) as u8,
(((i >> 14) & 0x7F) | 0x80) as u8, (((i >> 7) & 0x7F) | 0x80) as u8,
(i >> 21) as u8]); (((i >> 14) & 0x7F) | 0x80) as u8,
(i >> 21) as u8,
]);
} else { } else {
data.extend_from_slice(&[(( i & 0x7F) | 0x80) as u8, data.extend_from_slice(&[
(((i >> 7) & 0x7F) | 0x80) as u8, ((i & 0x7F) | 0x80) as u8,
(((i >> 14) & 0x7F) | 0x80) as u8, (((i >> 7) & 0x7F) | 0x80) as u8,
(((i >> 21) & 0x7F) | 0x80) as u8, (((i >> 14) & 0x7F) | 0x80) as u8,
(i >> 28) as u8]); (((i >> 21) & 0x7F) | 0x80) as u8,
(i >> 28) as u8,
]);
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -119,12 +130,30 @@ mod tests {
encoded: u32, encoded: u32,
} }
let tests = [ let tests = [
Test{decoded: 0, encoded: 0}, Test {
Test{decoded: -1, encoded: 1}, decoded: 0,
Test{decoded: 1, encoded: 2}, encoded: 0,
Test{decoded: -2, encoded: 3}, },
Test{decoded: 2147483647, encoded: 4294967294}, Test {
Test{decoded: -2147483648, encoded: 4294967295}, decoded: -1,
encoded: 1,
},
Test {
decoded: 1,
encoded: 2,
},
Test {
decoded: -2,
encoded: 3,
},
Test {
decoded: 2147483647,
encoded: 4294967294,
},
Test {
decoded: -2147483648,
encoded: 4294967295,
},
]; ];
for test in &tests { for test in &tests {
assert_eq!(test.encoded, zigzag32(test.decoded)); assert_eq!(test.encoded, zigzag32(test.decoded));
@ -139,11 +168,26 @@ mod tests {
encoded: &'static [u8], encoded: &'static [u8],
} }
let tests = [ let tests = [
Test{decoded: 1, encoded: b"\x01"}, Test {
Test{decoded: 257, encoded: b"\x81\x02"}, decoded: 1,
Test{decoded: 49409, encoded: b"\x81\x82\x03"}, encoded: b"\x01",
Test{decoded: 8438017, encoded: b"\x81\x82\x83\x04"}, },
Test{decoded: 1350615297, encoded: b"\x81\x82\x83\x84\x05"}, Test {
decoded: 257,
encoded: b"\x81\x02",
},
Test {
decoded: 49409,
encoded: b"\x81\x82\x03",
},
Test {
decoded: 8438017,
encoded: b"\x81\x82\x83\x04",
},
Test {
decoded: 1350615297,
encoded: b"\x81\x82\x83\x84\x05",
},
]; ];
for test in &tests { for test in &tests {
// Test encoding to an empty buffer. // Test encoding to an empty buffer.
@ -161,13 +205,17 @@ mod tests {
assert_eq!(out, buf); assert_eq!(out, buf);
// Test decoding from the beginning of the string. // Test decoding from the beginning of the string.
assert_eq!((test.decoded, test.encoded.len()), assert_eq!(
decode_varint32(test.encoded, 0).unwrap()); (test.decoded, test.encoded.len()),
decode_varint32(test.encoded, 0).unwrap()
);
// ...and from the middle of a buffer. // ...and from the middle of a buffer.
buf.push(b'x'); buf.push(b'x');
assert_eq!((test.decoded, test.encoded.len() + 1), assert_eq!(
decode_varint32(&buf, 1).unwrap()); (test.decoded, test.encoded.len() + 1),
decode_varint32(&buf, 1).unwrap()
);
} }
} }
@ -180,7 +228,6 @@ mod tests {
b"\x80\x80", b"\x80\x80",
b"\x80\x80\x80", b"\x80\x80\x80",
b"\x80\x80\x80\x80", b"\x80\x80\x80\x80",
// int32 overflows // int32 overflows
b"\x80\x80\x80\x80\x80", b"\x80\x80\x80\x80\x80",
b"\x80\x80\x80\x80\x80\x00", b"\x80\x80\x80\x80\x80\x00",

View File

@ -79,7 +79,8 @@ impl std::fmt::Display for IndexColumn {
/// Returns a sorted vec of table names in the given connection. /// Returns a sorted vec of table names in the given connection.
fn get_tables(c: &rusqlite::Connection) -> Result<Vec<String>, rusqlite::Error> { fn get_tables(c: &rusqlite::Connection) -> Result<Vec<String>, rusqlite::Error> {
c.prepare(r#" c.prepare(
r#"
select select
name name
from from
@ -88,66 +89,86 @@ fn get_tables(c: &rusqlite::Connection) -> Result<Vec<String>, rusqlite::Error>
type = 'table' and type = 'table' and
name not like 'sqlite_%' name not like 'sqlite_%'
order by name order by name
"#)? "#,
.query_map(params![], |r| r.get(0))? )?
.collect() .query_map(params![], |r| r.get(0))?
.collect()
} }
/// Returns a vec of columns in the given table. /// Returns a vec of columns in the given table.
fn get_table_columns(c: &rusqlite::Connection, table: &str) fn get_table_columns(
-> Result<Vec<Column>, rusqlite::Error> { c: &rusqlite::Connection,
table: &str,
) -> Result<Vec<Column>, rusqlite::Error> {
// Note that placeholders aren't allowed for these pragmas. Just assume sane table names // Note that placeholders aren't allowed for these pragmas. Just assume sane table names
// (no escaping). "select * from pragma_..." syntax would be nicer but requires SQLite // (no escaping). "select * from pragma_..." syntax would be nicer but requires SQLite
// 3.16.0 (2017-01-02). Ubuntu 16.04 Xenial (still used on Travis CI) has an older SQLite. // 3.16.0 (2017-01-02). Ubuntu 16.04 Xenial (still used on Travis CI) has an older SQLite.
c.prepare(&format!("pragma table_info(\"{}\")", table))? c.prepare(&format!("pragma table_info(\"{}\")", table))?
.query_map(params![], |r| Ok(Column { .query_map(params![], |r| {
cid: r.get(0)?, Ok(Column {
name: r.get(1)?, cid: r.get(0)?,
type_: r.get(2)?, name: r.get(1)?,
notnull: r.get(3)?, type_: r.get(2)?,
dflt_value: r.get(4)?, notnull: r.get(3)?,
pk: r.get(5)?, dflt_value: r.get(4)?,
}))? pk: r.get(5)?,
.collect() })
})?
.collect()
} }
/// Returns a vec of indices associated with the given table. /// Returns a vec of indices associated with the given table.
fn get_indices(c: &rusqlite::Connection, table: &str) -> Result<Vec<Index>, rusqlite::Error> { fn get_indices(c: &rusqlite::Connection, table: &str) -> Result<Vec<Index>, rusqlite::Error> {
// See note at get_tables_columns about placeholders. // See note at get_tables_columns about placeholders.
c.prepare(&format!("pragma index_list(\"{}\")", table))? c.prepare(&format!("pragma index_list(\"{}\")", table))?
.query_map(params![], |r| Ok(Index { .query_map(params![], |r| {
seq: r.get(0)?, Ok(Index {
name: r.get(1)?, seq: r.get(0)?,
unique: r.get(2)?, name: r.get(1)?,
origin: r.get(3)?, unique: r.get(2)?,
partial: r.get(4)?, origin: r.get(3)?,
}))? partial: r.get(4)?,
.collect() })
})?
.collect()
} }
/// Returns a vec of all the columns in the given index. /// Returns a vec of all the columns in the given index.
fn get_index_columns(c: &rusqlite::Connection, index: &str) fn get_index_columns(
-> Result<Vec<IndexColumn>, rusqlite::Error> { c: &rusqlite::Connection,
index: &str,
) -> Result<Vec<IndexColumn>, rusqlite::Error> {
// See note at get_tables_columns about placeholders. // See note at get_tables_columns about placeholders.
c.prepare(&format!("pragma index_info(\"{}\")", index))? c.prepare(&format!("pragma index_info(\"{}\")", index))?
.query_map(params![], |r| Ok(IndexColumn { .query_map(params![], |r| {
seqno: r.get(0)?, Ok(IndexColumn {
cid: r.get(1)?, seqno: r.get(0)?,
name: r.get(2)?, cid: r.get(1)?,
}))? name: r.get(2)?,
.collect() })
})?
.collect()
} }
pub fn get_diffs(n1: &str, c1: &rusqlite::Connection, n2: &str, c2: &rusqlite::Connection) pub fn get_diffs(
-> Result<Option<String>, Error> { n1: &str,
c1: &rusqlite::Connection,
n2: &str,
c2: &rusqlite::Connection,
) -> Result<Option<String>, Error> {
let mut diffs = String::new(); let mut diffs = String::new();
// Compare table list. // Compare table list.
let tables1 = get_tables(c1)?; let tables1 = get_tables(c1)?;
let tables2 = get_tables(c2)?; let tables2 = get_tables(c2)?;
if tables1 != tables2 { if tables1 != tables2 {
write!(&mut diffs, "table list mismatch, {} vs {}:\n{}", write!(
n1, n2, diff_slice(&tables1, &tables2))?; &mut diffs,
"table list mismatch, {} vs {}:\n{}",
n1,
n2,
diff_slice(&tables1, &tables2)
)?;
} }
// Compare columns and indices for each table. // Compare columns and indices for each table.
@ -155,8 +176,14 @@ pub fn get_diffs(n1: &str, c1: &rusqlite::Connection, n2: &str, c2: &rusqlite::C
let columns1 = get_table_columns(c1, &t)?; let columns1 = get_table_columns(c1, &t)?;
let columns2 = get_table_columns(c2, &t)?; let columns2 = get_table_columns(c2, &t)?;
if columns1 != columns2 { if columns1 != columns2 {
write!(&mut diffs, "table {:?} column, {} vs {}:\n{}", write!(
t, n1, n2, diff_slice(&columns1, &columns2))?; &mut diffs,
"table {:?} column, {} vs {}:\n{}",
t,
n1,
n2,
diff_slice(&columns1, &columns2)
)?;
} }
let mut indices1 = get_indices(c1, &t)?; let mut indices1 = get_indices(c1, &t)?;
@ -164,16 +191,29 @@ pub fn get_diffs(n1: &str, c1: &rusqlite::Connection, n2: &str, c2: &rusqlite::C
indices1.sort_by(|a, b| a.name.cmp(&b.name)); indices1.sort_by(|a, b| a.name.cmp(&b.name));
indices2.sort_by(|a, b| a.name.cmp(&b.name)); indices2.sort_by(|a, b| a.name.cmp(&b.name));
if indices1 != indices2 { if indices1 != indices2 {
write!(&mut diffs, "table {:?} indices, {} vs {}:\n{}", write!(
t, n1, n2, diff_slice(&indices1, &indices2))?; &mut diffs,
"table {:?} indices, {} vs {}:\n{}",
t,
n1,
n2,
diff_slice(&indices1, &indices2)
)?;
} }
for i in &indices1 { for i in &indices1 {
let ic1 = get_index_columns(c1, &i.name)?; let ic1 = get_index_columns(c1, &i.name)?;
let ic2 = get_index_columns(c2, &i.name)?; let ic2 = get_index_columns(c2, &i.name)?;
if ic1 != ic2 { if ic1 != ic2 {
write!(&mut diffs, "table {:?} index {:?} columns {} vs {}:\n{}", write!(
t, i, n1, n2, diff_slice(&ic1, &ic2))?; &mut diffs,
"table {:?} index {:?} columns {} vs {}:\n{}",
t,
i,
n1,
n2,
diff_slice(&ic1, &ic2)
)?;
} }
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -36,11 +36,15 @@ use crate::coding;
use crate::db::CompositeId; use crate::db::CompositeId;
use crate::schema; use crate::schema;
use cstr::cstr; use cstr::cstr;
use failure::{Error, Fail, bail, format_err}; use failure::{bail, format_err, Error, Fail};
use log::warn; use log::warn;
use protobuf::Message;
use nix::{NixPath, fcntl::{FlockArg, OFlag}, sys::stat::Mode};
use nix::sys::statvfs::Statvfs; use nix::sys::statvfs::Statvfs;
use nix::{
fcntl::{FlockArg, OFlag},
sys::stat::Mode,
NixPath,
};
use protobuf::Message;
use std::ffi::CStr; use std::ffi::CStr;
use std::fs; use std::fs;
use std::io::{Read, Write}; use std::io::{Read, Write};
@ -76,11 +80,17 @@ impl CompositeIdPath {
} }
impl NixPath for CompositeIdPath { impl NixPath for CompositeIdPath {
fn is_empty(&self) -> bool { false } fn is_empty(&self) -> bool {
fn len(&self) -> usize { 16 } false
}
fn len(&self) -> usize {
16
}
fn with_nix_path<T, F>(&self, f: F) -> Result<T, nix::Error> fn with_nix_path<T, F>(&self, f: F) -> Result<T, nix::Error>
where F: FnOnce(&CStr) -> T { where
F: FnOnce(&CStr) -> T,
{
let p = CStr::from_bytes_with_nul(&self.0[..]).expect("no interior nuls"); let p = CStr::from_bytes_with_nul(&self.0[..]).expect("no interior nuls");
Ok(f(p)) Ok(f(p))
} }
@ -91,7 +101,9 @@ impl NixPath for CompositeIdPath {
pub struct Fd(std::os::unix::io::RawFd); pub struct Fd(std::os::unix::io::RawFd);
impl std::os::unix::io::AsRawFd for Fd { impl std::os::unix::io::AsRawFd for Fd {
fn as_raw_fd(&self) -> std::os::unix::io::RawFd { self.0 } fn as_raw_fd(&self) -> std::os::unix::io::RawFd {
self.0
}
} }
impl Drop for Fd { impl Drop for Fd {
@ -107,7 +119,7 @@ impl Fd {
pub fn open<P: ?Sized + NixPath>(path: &P, mkdir: bool) -> Result<Fd, nix::Error> { pub fn open<P: ?Sized + NixPath>(path: &P, mkdir: bool) -> Result<Fd, nix::Error> {
if mkdir { if mkdir {
match nix::unistd::mkdir(path, nix::sys::stat::Mode::S_IRWXU) { match nix::unistd::mkdir(path, nix::sys::stat::Mode::S_IRWXU) {
Ok(()) | Err(nix::Error::Sys(nix::errno::Errno::EEXIST)) => {}, Ok(()) | Err(nix::Error::Sys(nix::errno::Errno::EEXIST)) => {}
Err(e) => return Err(e), Err(e) => return Err(e),
} }
} }
@ -138,7 +150,7 @@ pub(crate) fn read_meta(dir: &Fd) -> Result<schema::DirMeta, Error> {
return Ok(meta); return Ok(meta);
} }
return Err(e.into()); return Err(e.into());
}, }
Ok(f) => f, Ok(f) => f,
}; };
let mut data = Vec::new(); let mut data = Vec::new();
@ -146,38 +158,63 @@ pub(crate) fn read_meta(dir: &Fd) -> Result<schema::DirMeta, Error> {
let (len, pos) = coding::decode_varint32(&data, 0) let (len, pos) = coding::decode_varint32(&data, 0)
.map_err(|_| format_err!("Unable to decode varint length in meta file"))?; .map_err(|_| format_err!("Unable to decode varint length in meta file"))?;
if data.len() != FIXED_DIR_META_LEN || len as usize + pos > FIXED_DIR_META_LEN { if data.len() != FIXED_DIR_META_LEN || len as usize + pos > FIXED_DIR_META_LEN {
bail!("Expected a {}-byte file with a varint length of a DirMeta message; got \ bail!(
a {}-byte file with length {}", FIXED_DIR_META_LEN, data.len(), len); "Expected a {}-byte file with a varint length of a DirMeta message; got \
a {}-byte file with length {}",
FIXED_DIR_META_LEN,
data.len(),
len
);
} }
let data = &data[pos..pos+len as usize]; let data = &data[pos..pos + len as usize];
let mut s = protobuf::CodedInputStream::from_bytes(&data); let mut s = protobuf::CodedInputStream::from_bytes(&data);
meta.merge_from(&mut s).map_err(|e| e.context("Unable to parse metadata proto"))?; meta.merge_from(&mut s)
.map_err(|e| e.context("Unable to parse metadata proto"))?;
Ok(meta) Ok(meta)
} }
/// Write `dir`'s metadata, clobbering existing data. /// Write `dir`'s metadata, clobbering existing data.
pub(crate) fn write_meta(dirfd: RawFd, meta: &schema::DirMeta) -> Result<(), Error> { pub(crate) fn write_meta(dirfd: RawFd, meta: &schema::DirMeta) -> Result<(), Error> {
let mut data = meta.write_length_delimited_to_bytes().expect("proto3->vec is infallible"); let mut data = meta
.write_length_delimited_to_bytes()
.expect("proto3->vec is infallible");
if data.len() > FIXED_DIR_META_LEN { if data.len() > FIXED_DIR_META_LEN {
bail!("Length-delimited DirMeta message requires {} bytes, over limit of {}", bail!(
data.len(), FIXED_DIR_META_LEN); "Length-delimited DirMeta message requires {} bytes, over limit of {}",
data.len(),
FIXED_DIR_META_LEN
);
} }
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length. data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
let mut f = crate::fs::openat(dirfd, cstr!("meta"), OFlag::O_CREAT | OFlag::O_WRONLY, let mut f = crate::fs::openat(
Mode::S_IRUSR | Mode::S_IWUSR) dirfd,
.map_err(|e| e.context("Unable to open meta file"))?; cstr!("meta"),
let stat = f.metadata().map_err(|e| e.context("Unable to stat meta file"))?; OFlag::O_CREAT | OFlag::O_WRONLY,
Mode::S_IRUSR | Mode::S_IWUSR,
)
.map_err(|e| e.context("Unable to open meta file"))?;
let stat = f
.metadata()
.map_err(|e| e.context("Unable to stat meta file"))?;
if stat.len() == 0 { if stat.len() == 0 {
// Need to sync not only the data but also the file metadata and dirent. // Need to sync not only the data but also the file metadata and dirent.
f.write_all(&data).map_err(|e| e.context("Unable to write to meta file"))?; f.write_all(&data)
f.sync_all().map_err(|e| e.context("Unable to sync meta file"))?; .map_err(|e| e.context("Unable to write to meta file"))?;
f.sync_all()
.map_err(|e| e.context("Unable to sync meta file"))?;
nix::unistd::fsync(dirfd).map_err(|e| e.context("Unable to sync dir"))?; nix::unistd::fsync(dirfd).map_err(|e| e.context("Unable to sync dir"))?;
} else if stat.len() == FIXED_DIR_META_LEN as u64 { } else if stat.len() == FIXED_DIR_META_LEN as u64 {
// Just syncing the data will suffice; existing metadata and dirent are fine. // Just syncing the data will suffice; existing metadata and dirent are fine.
f.write_all(&data).map_err(|e| e.context("Unable to write to meta file"))?; f.write_all(&data)
f.sync_data().map_err(|e| e.context("Unable to sync meta file"))?; .map_err(|e| e.context("Unable to write to meta file"))?;
f.sync_data()
.map_err(|e| e.context("Unable to sync meta file"))?;
} else { } else {
bail!("Existing meta file is {}-byte; expected {}", stat.len(), FIXED_DIR_META_LEN); bail!(
"Existing meta file is {}-byte; expected {}",
stat.len(),
FIXED_DIR_META_LEN
);
} }
Ok(()) Ok(())
} }
@ -187,21 +224,26 @@ impl SampleFileDir {
/// ///
/// `db_meta.in_progress_open` should be filled if the directory should be opened in read/write /// `db_meta.in_progress_open` should be filled if the directory should be opened in read/write
/// mode; absent in read-only mode. /// mode; absent in read-only mode.
pub fn open(path: &str, db_meta: &schema::DirMeta) pub fn open(path: &str, db_meta: &schema::DirMeta) -> Result<Arc<SampleFileDir>, Error> {
-> Result<Arc<SampleFileDir>, Error> {
let read_write = db_meta.in_progress_open.is_some(); let read_write = db_meta.in_progress_open.is_some();
let s = SampleFileDir::open_self(path, false)?; let s = SampleFileDir::open_self(path, false)?;
s.fd.lock(if read_write { s.fd.lock(if read_write {
FlockArg::LockExclusiveNonblock FlockArg::LockExclusiveNonblock
} else { } else {
FlockArg::LockSharedNonblock FlockArg::LockSharedNonblock
}).map_err(|e| e.context(format!("unable to lock dir {}", path)))?; })
.map_err(|e| e.context(format!("unable to lock dir {}", path)))?;
let dir_meta = read_meta(&s.fd).map_err(|e| e.context("unable to read meta file"))?; let dir_meta = read_meta(&s.fd).map_err(|e| e.context("unable to read meta file"))?;
if !SampleFileDir::consistent(db_meta, &dir_meta) { if !SampleFileDir::consistent(db_meta, &dir_meta) {
let serialized = let serialized = db_meta
db_meta.write_length_delimited_to_bytes().expect("proto3->vec is infallible"); .write_length_delimited_to_bytes()
bail!("metadata mismatch.\ndb: {:#?}\ndir: {:#?}\nserialized db: {:#?}", .expect("proto3->vec is infallible");
db_meta, &dir_meta, &serialized); bail!(
"metadata mismatch.\ndb: {:#?}\ndir: {:#?}\nserialized db: {:#?}",
db_meta,
&dir_meta,
&serialized
);
} }
if db_meta.in_progress_open.is_some() { if db_meta.in_progress_open.is_some() {
s.write_meta(db_meta)?; s.write_meta(db_meta)?;
@ -212,12 +254,17 @@ impl SampleFileDir {
/// Returns true if the existing directory and database metadata are consistent; the directory /// Returns true if the existing directory and database metadata are consistent; the directory
/// is then openable. /// is then openable.
pub(crate) fn consistent(db_meta: &schema::DirMeta, dir_meta: &schema::DirMeta) -> bool { pub(crate) fn consistent(db_meta: &schema::DirMeta, dir_meta: &schema::DirMeta) -> bool {
if dir_meta.db_uuid != db_meta.db_uuid { return false; } if dir_meta.db_uuid != db_meta.db_uuid {
if dir_meta.dir_uuid != db_meta.dir_uuid { return false; } return false;
}
if dir_meta.dir_uuid != db_meta.dir_uuid {
return false;
}
if db_meta.last_complete_open.is_some() && if db_meta.last_complete_open.is_some()
(db_meta.last_complete_open != dir_meta.last_complete_open && && (db_meta.last_complete_open != dir_meta.last_complete_open
db_meta.last_complete_open != dir_meta.in_progress_open) { && db_meta.last_complete_open != dir_meta.in_progress_open)
{
return false; return false;
} }
@ -228,8 +275,10 @@ impl SampleFileDir {
true true
} }
pub(crate) fn create(path: &str, db_meta: &schema::DirMeta) pub(crate) fn create(
-> Result<Arc<SampleFileDir>, Error> { path: &str,
db_meta: &schema::DirMeta,
) -> Result<Arc<SampleFileDir>, Error> {
let s = SampleFileDir::open_self(path, true)?; let s = SampleFileDir::open_self(path, true)?;
s.fd.lock(FlockArg::LockExclusiveNonblock) s.fd.lock(FlockArg::LockExclusiveNonblock)
.map_err(|e| e.context(format!("unable to lock dir {}", path)))?; .map_err(|e| e.context(format!("unable to lock dir {}", path)))?;
@ -238,7 +287,11 @@ impl SampleFileDir {
// Verify metadata. We only care that it hasn't been completely opened. // Verify metadata. We only care that it hasn't been completely opened.
// Partial opening by this or another database is fine; we won't overwrite anything. // Partial opening by this or another database is fine; we won't overwrite anything.
if old_meta.last_complete_open.is_some() { if old_meta.last_complete_open.is_some() {
bail!("Can't create dir at path {}: is already in use:\n{:?}", path, old_meta); bail!(
"Can't create dir at path {}: is already in use:\n{:?}",
path,
old_meta
);
} }
if !s.is_empty()? { if !s.is_empty()? {
bail!("Can't create dir at path {} with existing files", path); bail!("Can't create dir at path {} with existing files", path);
@ -248,8 +301,12 @@ impl SampleFileDir {
} }
pub(crate) fn opendir(&self) -> Result<nix::dir::Dir, nix::Error> { pub(crate) fn opendir(&self) -> Result<nix::dir::Dir, nix::Error> {
nix::dir::Dir::openat(self.fd.as_raw_fd(), ".", OFlag::O_DIRECTORY | OFlag::O_RDONLY, nix::dir::Dir::openat(
Mode::empty()) self.fd.as_raw_fd(),
".",
OFlag::O_DIRECTORY | OFlag::O_RDONLY,
Mode::empty(),
)
} }
/// Determines if the directory is empty, aside form metadata. /// Determines if the directory is empty, aside form metadata.
@ -259,7 +316,7 @@ impl SampleFileDir {
let e = e?; let e = e?;
match e.file_name().to_bytes() { match e.file_name().to_bytes() {
b"." | b".." => continue, b"." | b".." => continue,
b"meta" => continue, // existing metadata is fine. b"meta" => continue, // existing metadata is fine.
_ => return Ok(false), _ => return Ok(false),
} }
} }
@ -268,9 +325,7 @@ impl SampleFileDir {
fn open_self(path: &str, create: bool) -> Result<Arc<SampleFileDir>, Error> { fn open_self(path: &str, create: bool) -> Result<Arc<SampleFileDir>, Error> {
let fd = Fd::open(path, create)?; let fd = Fd::open(path, create)?;
Ok(Arc::new(SampleFileDir { Ok(Arc::new(SampleFileDir { fd }))
fd,
}))
} }
/// Opens the given sample file for reading. /// Opens the given sample file for reading.
@ -281,15 +336,21 @@ impl SampleFileDir {
pub fn create_file(&self, composite_id: CompositeId) -> Result<fs::File, nix::Error> { pub fn create_file(&self, composite_id: CompositeId) -> Result<fs::File, nix::Error> {
let p = CompositeIdPath::from(composite_id); let p = CompositeIdPath::from(composite_id);
crate::fs::openat(self.fd.0, &p, OFlag::O_WRONLY | OFlag::O_EXCL | OFlag::O_CREAT, crate::fs::openat(
Mode::S_IRUSR | Mode::S_IWUSR) self.fd.0,
&p,
OFlag::O_WRONLY | OFlag::O_EXCL | OFlag::O_CREAT,
Mode::S_IRUSR | Mode::S_IWUSR,
)
} }
pub(crate) fn write_meta(&self, meta: &schema::DirMeta) -> Result<(), Error> { pub(crate) fn write_meta(&self, meta: &schema::DirMeta) -> Result<(), Error> {
write_meta(self.fd.0, meta) write_meta(self.fd.0, meta)
} }
pub fn statfs(&self) -> Result<Statvfs, nix::Error> { self.fd.statfs() } pub fn statfs(&self) -> Result<Statvfs, nix::Error> {
self.fd.statfs()
}
/// Unlinks the given sample file within this directory. /// Unlinks the given sample file within this directory.
pub(crate) fn unlink_file(&self, id: CompositeId) -> Result<(), nix::Error> { pub(crate) fn unlink_file(&self, id: CompositeId) -> Result<(), nix::Error> {
@ -312,11 +373,12 @@ pub(crate) fn parse_id(id: &[u8]) -> Result<CompositeId, ()> {
} }
let mut v: u64 = 0; let mut v: u64 = 0;
for i in 0..16 { for i in 0..16 {
v = (v << 4) | match id[i] { v = (v << 4)
b @ b'0'..=b'9' => b - b'0', | match id[i] {
b @ b'a'..=b'f' => b - b'a' + 10, b @ b'0'..=b'9' => b - b'0',
_ => return Err(()), b @ b'a'..=b'f' => b - b'a' + 10,
} as u64; _ => return Err(()),
} as u64;
} }
Ok(CompositeId(v as i64)) Ok(CompositeId(v as i64))
} }
@ -353,7 +415,14 @@ mod tests {
o.id = u32::max_value(); o.id = u32::max_value();
o.uuid.extend_from_slice(fake_uuid); o.uuid.extend_from_slice(fake_uuid);
} }
let data = meta.write_length_delimited_to_bytes().expect("proto3->vec is infallible"); let data = meta
assert!(data.len() <= FIXED_DIR_META_LEN, "{} vs {}", data.len(), FIXED_DIR_META_LEN); .write_length_delimited_to_bytes()
.expect("proto3->vec is infallible");
assert!(
data.len() <= FIXED_DIR_META_LEN,
"{} vs {}",
data.len(),
FIXED_DIR_META_LEN
);
} }
} }

View File

@ -28,13 +28,17 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use std::os::unix::io::{FromRawFd, RawFd};
use nix::NixPath;
use nix::fcntl::OFlag; use nix::fcntl::OFlag;
use nix::sys::stat::Mode; use nix::sys::stat::Mode;
use nix::NixPath;
use std::os::unix::io::{FromRawFd, RawFd};
pub fn openat<P: ?Sized + NixPath>(dirfd: RawFd, path: &P, oflag: OFlag, mode: Mode) pub fn openat<P: ?Sized + NixPath>(
-> Result<std::fs::File, nix::Error> { dirfd: RawFd,
path: &P,
oflag: OFlag,
mode: Mode,
) -> Result<std::fs::File, nix::Error> {
let fd = nix::fcntl::openat(dirfd, path, oflag, mode)?; let fd = nix::fcntl::openat(dirfd, path, oflag, mode)?;
Ok(unsafe { std::fs::File::from_raw_fd(fd) }) Ok(unsafe { std::fs::File::from_raw_fd(fd) })
} }

View File

@ -28,7 +28,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
#![cfg_attr(all(feature="nightly", test), feature(test))] #![cfg_attr(all(feature = "nightly", test), feature(test))]
pub mod auth; pub mod auth;
pub mod check; pub mod check;

View File

@ -31,9 +31,9 @@
//! Raw database access: SQLite statements which do not touch any cached state. //! Raw database access: SQLite statements which do not touch any cached state.
use crate::db::{self, CompositeId, FromSqlUuid}; use crate::db::{self, CompositeId, FromSqlUuid};
use failure::{Error, ResultExt, bail};
use fnv::FnvHashSet;
use crate::recording; use crate::recording;
use failure::{bail, Error, ResultExt};
use fnv::FnvHashSet;
use rusqlite::{named_params, params}; use rusqlite::{named_params, params};
use std::ops::Range; use std::ops::Range;
use uuid::Uuid; use uuid::Uuid;
@ -126,10 +126,13 @@ const LIST_OLDEST_RECORDINGS_SQL: &'static str = r#"
/// Lists the specified recordings in ascending order by start time, passing them to a supplied /// Lists the specified recordings in ascending order by start time, passing them to a supplied
/// function. Given that the function is called with the database lock held, it should be quick. /// function. Given that the function is called with the database lock held, it should be quick.
pub(crate) fn list_recordings_by_time( pub(crate) fn list_recordings_by_time(
conn: &rusqlite::Connection, stream_id: i32, desired_time: Range<recording::Time>, conn: &rusqlite::Connection,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>) -> Result<(), Error> { stream_id: i32,
desired_time: Range<recording::Time>,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>,
) -> Result<(), Error> {
let mut stmt = conn.prepare_cached(LIST_RECORDINGS_BY_TIME_SQL)?; let mut stmt = conn.prepare_cached(LIST_RECORDINGS_BY_TIME_SQL)?;
let rows = stmt.query_named(named_params!{ let rows = stmt.query_named(named_params! {
":stream_id": stream_id, ":stream_id": stream_id,
":start_time_90k": desired_time.start.0, ":start_time_90k": desired_time.start.0,
":end_time_90k": desired_time.end.0, ":end_time_90k": desired_time.end.0,
@ -139,19 +142,24 @@ pub(crate) fn list_recordings_by_time(
/// Lists the specified recordings in ascending order by id. /// Lists the specified recordings in ascending order by id.
pub(crate) fn list_recordings_by_id( pub(crate) fn list_recordings_by_id(
conn: &rusqlite::Connection, stream_id: i32, desired_ids: Range<i32>, conn: &rusqlite::Connection,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>) -> Result<(), Error> { stream_id: i32,
desired_ids: Range<i32>,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>,
) -> Result<(), Error> {
let mut stmt = conn.prepare_cached(LIST_RECORDINGS_BY_ID_SQL)?; let mut stmt = conn.prepare_cached(LIST_RECORDINGS_BY_ID_SQL)?;
let rows = stmt.query_named(named_params!{ let rows = stmt.query_named(named_params! {
":start": CompositeId::new(stream_id, desired_ids.start).0, ":start": CompositeId::new(stream_id, desired_ids.start).0,
":end": CompositeId::new(stream_id, desired_ids.end).0, ":end": CompositeId::new(stream_id, desired_ids.end).0,
})?; })?;
list_recordings_inner(rows, true, f) list_recordings_inner(rows, true, f)
} }
fn list_recordings_inner(mut rows: rusqlite::Rows, include_prev: bool, fn list_recordings_inner(
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>) mut rows: rusqlite::Rows,
-> Result<(), Error> { include_prev: bool,
f: &mut dyn FnMut(db::ListRecordingsRow) -> Result<(), Error>,
) -> Result<(), Error> {
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let wall_duration_90k = row.get(4)?; let wall_duration_90k = row.get(4)?;
let media_duration_delta_90k: i32 = row.get(5)?; let media_duration_delta_90k: i32 = row.get(5)?;
@ -177,17 +185,27 @@ fn list_recordings_inner(mut rows: rusqlite::Rows, include_prev: bool,
} }
pub(crate) fn get_db_uuid(conn: &rusqlite::Connection) -> Result<Uuid, Error> { pub(crate) fn get_db_uuid(conn: &rusqlite::Connection) -> Result<Uuid, Error> {
Ok(conn.query_row("select uuid from meta", params![], |row| -> rusqlite::Result<Uuid> { Ok(conn.query_row(
let uuid: FromSqlUuid = row.get(0)?; "select uuid from meta",
Ok(uuid.0) params![],
})?) |row| -> rusqlite::Result<Uuid> {
let uuid: FromSqlUuid = row.get(0)?;
Ok(uuid.0)
},
)?)
} }
/// Inserts the specified recording (for from `try_flush` only). /// Inserts the specified recording (for from `try_flush` only).
pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: CompositeId, pub(crate) fn insert_recording(
r: &db::RecordingToInsert) -> Result<(), Error> { tx: &rusqlite::Transaction,
let mut stmt = tx.prepare_cached(r#" o: &db::Open,
insert into recording (composite_id, stream_id, open_id, run_offset, flags, id: CompositeId,
r: &db::RecordingToInsert,
) -> Result<(), Error> {
let mut stmt = tx
.prepare_cached(
r#"
insert into recording (composite_id, stream_id, open_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_media_duration_90k, sample_file_bytes, start_time_90k, prev_media_duration_90k,
prev_runs, wall_duration_90k, media_duration_delta_90k, prev_runs, wall_duration_90k, media_duration_delta_90k,
video_samples, video_sync_samples, video_sample_entry_id) video_samples, video_sync_samples, video_sample_entry_id)
@ -195,8 +213,10 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
:sample_file_bytes, :start_time_90k, :prev_media_duration_90k, :sample_file_bytes, :start_time_90k, :prev_media_duration_90k,
:prev_runs, :wall_duration_90k, :media_duration_delta_90k, :prev_runs, :wall_duration_90k, :media_duration_delta_90k,
:video_samples, :video_sync_samples, :video_sample_entry_id) :video_samples, :video_sync_samples, :video_sample_entry_id)
"#).with_context(|e| format!("can't prepare recording insert: {}", e))?; "#,
stmt.execute_named(named_params!{ )
.with_context(|e| format!("can't prepare recording insert: {}", e))?;
stmt.execute_named(named_params! {
":composite_id": id.0, ":composite_id": id.0,
":stream_id": i64::from(id.stream()), ":stream_id": i64::from(id.stream()),
":open_id": o.id, ":open_id": o.id,
@ -211,32 +231,49 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
":video_samples": r.video_samples, ":video_samples": r.video_samples,
":video_sync_samples": r.video_sync_samples, ":video_sync_samples": r.video_sync_samples,
":video_sample_entry_id": r.video_sample_entry_id, ":video_sample_entry_id": r.video_sample_entry_id,
}).with_context(|e| format!("unable to insert recording for recording {} {:#?}: {}", })
id, r, e))?; .with_context(|e| {
format!(
"unable to insert recording for recording {} {:#?}: {}",
id, r, e
)
})?;
let mut stmt = tx.prepare_cached(r#" let mut stmt = tx
insert into recording_integrity (composite_id, local_time_delta_90k, sample_file_blake3) .prepare_cached(
values (:composite_id, :local_time_delta_90k, :sample_file_blake3) r#"
"#).with_context(|e| format!("can't prepare recording_integrity insert: {}", e))?; insert into recording_integrity (composite_id, local_time_delta_90k,
sample_file_blake3)
values (:composite_id, :local_time_delta_90k,
:sample_file_blake3)
"#,
)
.with_context(|e| format!("can't prepare recording_integrity insert: {}", e))?;
let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]); let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]);
let delta = match r.run_offset { let delta = match r.run_offset {
0 => None, 0 => None,
_ => Some(r.local_time_delta.0), _ => Some(r.local_time_delta.0),
}; };
stmt.execute_named(named_params!{ stmt.execute_named(named_params! {
":composite_id": id.0, ":composite_id": id.0,
":local_time_delta_90k": delta, ":local_time_delta_90k": delta,
":sample_file_blake3": blake3, ":sample_file_blake3": blake3,
}).with_context(|e| format!("unable to insert recording_integrity for {:#?}: {}", r, e))?; })
.with_context(|e| format!("unable to insert recording_integrity for {:#?}: {}", r, e))?;
let mut stmt = tx.prepare_cached(r#" let mut stmt = tx
insert into recording_playback (composite_id, video_index) .prepare_cached(
values (:composite_id, :video_index) r#"
"#).with_context(|e| format!("can't prepare recording_playback insert: {}", e))?; insert into recording_playback (composite_id, video_index)
stmt.execute_named(named_params!{ values (:composite_id, :video_index)
"#,
)
.with_context(|e| format!("can't prepare recording_playback insert: {}", e))?;
stmt.execute_named(named_params! {
":composite_id": id.0, ":composite_id": id.0,
":video_index": &r.video_index, ":video_index": &r.video_index,
}).with_context(|e| format!("unable to insert recording_playback for {:#?}: {}", r, e))?; })
.with_context(|e| format!("unable to insert recording_playback for {:#?}: {}", r, e))?;
Ok(()) Ok(())
} }
@ -245,10 +282,13 @@ pub(crate) fn insert_recording(tx: &rusqlite::Transaction, o: &db::Open, id: Com
/// table. `sample_file_dir_id` is assumed to be correct. /// table. `sample_file_dir_id` is assumed to be correct.
/// ///
/// Returns the number of recordings which were deleted. /// Returns the number of recordings which were deleted.
pub(crate) fn delete_recordings(tx: &rusqlite::Transaction, sample_file_dir_id: i32, pub(crate) fn delete_recordings(
ids: Range<CompositeId>) tx: &rusqlite::Transaction,
-> Result<usize, Error> { sample_file_dir_id: i32,
let mut insert = tx.prepare_cached(r#" ids: Range<CompositeId>,
) -> Result<usize, Error> {
let mut insert = tx.prepare_cached(
r#"
insert into garbage (sample_file_dir_id, composite_id) insert into garbage (sample_file_dir_id, composite_id)
select select
:sample_file_dir_id, :sample_file_dir_id,
@ -258,54 +298,78 @@ pub(crate) fn delete_recordings(tx: &rusqlite::Transaction, sample_file_dir_id:
where where
:start <= composite_id and :start <= composite_id and
composite_id < :end composite_id < :end
"#)?; "#,
let mut del_playback = tx.prepare_cached(r#" )?;
let mut del_playback = tx.prepare_cached(
r#"
delete from recording_playback delete from recording_playback
where where
:start <= composite_id and :start <= composite_id and
composite_id < :end composite_id < :end
"#)?; "#,
let mut del_integrity = tx.prepare_cached(r#" )?;
let mut del_integrity = tx.prepare_cached(
r#"
delete from recording_integrity delete from recording_integrity
where where
:start <= composite_id and :start <= composite_id and
composite_id < :end composite_id < :end
"#)?; "#,
let mut del_main = tx.prepare_cached(r#" )?;
let mut del_main = tx.prepare_cached(
r#"
delete from recording delete from recording
where where
:start <= composite_id and :start <= composite_id and
composite_id < :end composite_id < :end
"#)?; "#,
let n = insert.execute_named(named_params!{ )?;
let n = insert.execute_named(named_params! {
":sample_file_dir_id": sample_file_dir_id, ":sample_file_dir_id": sample_file_dir_id,
":start": ids.start.0, ":start": ids.start.0,
":end": ids.end.0, ":end": ids.end.0,
})?; })?;
let p = named_params!{ let p = named_params! {
":start": ids.start.0, ":start": ids.start.0,
":end": ids.end.0, ":end": ids.end.0,
}; };
let n_playback = del_playback.execute_named(p)?; let n_playback = del_playback.execute_named(p)?;
if n_playback != n { if n_playback != n {
bail!("inserted {} garbage rows but deleted {} recording_playback rows!", n, n_playback); bail!(
"inserted {} garbage rows but deleted {} recording_playback rows!",
n,
n_playback
);
} }
let n_integrity = del_integrity.execute_named(p)?; let n_integrity = del_integrity.execute_named(p)?;
if n_integrity > n { // fewer is okay; recording_integrity is optional. if n_integrity > n {
bail!("inserted {} garbage rows but deleted {} recording_integrity rows!", n, n_integrity); // fewer is okay; recording_integrity is optional.
bail!(
"inserted {} garbage rows but deleted {} recording_integrity rows!",
n,
n_integrity
);
} }
let n_main = del_main.execute_named(p)?; let n_main = del_main.execute_named(p)?;
if n_main != n { if n_main != n {
bail!("inserted {} garbage rows but deleted {} recording rows!", n, n_main); bail!(
"inserted {} garbage rows but deleted {} recording rows!",
n,
n_main
);
} }
Ok(n) Ok(n)
} }
/// Marks the given sample files as deleted. This shouldn't be called until the files have /// Marks the given sample files as deleted. This shouldn't be called until the files have
/// been `unlink()`ed and the parent directory `fsync()`ed. /// been `unlink()`ed and the parent directory `fsync()`ed.
pub(crate) fn mark_sample_files_deleted(tx: &rusqlite::Transaction, ids: &[CompositeId]) pub(crate) fn mark_sample_files_deleted(
-> Result<(), Error> { tx: &rusqlite::Transaction,
if ids.is_empty() { return Ok(()); } ids: &[CompositeId],
) -> Result<(), Error> {
if ids.is_empty() {
return Ok(());
}
let mut stmt = tx.prepare_cached("delete from garbage where composite_id = ?")?; let mut stmt = tx.prepare_cached("delete from garbage where composite_id = ?")?;
for &id in ids { for &id in ids {
let changes = stmt.execute(params![id.0])?; let changes = stmt.execute(params![id.0])?;
@ -323,11 +387,13 @@ pub(crate) fn mark_sample_files_deleted(tx: &rusqlite::Transaction, ids: &[Compo
} }
/// Gets the time range of recordings for the given stream. /// Gets the time range of recordings for the given stream.
pub(crate) fn get_range(conn: &rusqlite::Connection, stream_id: i32) pub(crate) fn get_range(
-> Result<Option<Range<recording::Time>>, Error> { conn: &rusqlite::Connection,
stream_id: i32,
) -> Result<Option<Range<recording::Time>>, Error> {
// The minimum is straightforward, taking advantage of the start_time_90k index. // The minimum is straightforward, taking advantage of the start_time_90k index.
let mut stmt = conn.prepare_cached(STREAM_MIN_START_SQL)?; let mut stmt = conn.prepare_cached(STREAM_MIN_START_SQL)?;
let mut rows = stmt.query_named(named_params!{":stream_id": stream_id})?; let mut rows = stmt.query_named(named_params! {":stream_id": stream_id})?;
let min_start = match rows.next()? { let min_start = match rows.next()? {
Some(row) => recording::Time(row.get(0)?), Some(row) => recording::Time(row.get(0)?),
None => return Ok(None), None => return Ok(None),
@ -338,15 +404,15 @@ pub(crate) fn get_range(conn: &rusqlite::Connection, stream_id: i32)
// last MAX_RECORDING_DURATION must be examined in order to take advantage of the // last MAX_RECORDING_DURATION must be examined in order to take advantage of the
// start_time_90k index. // start_time_90k index.
let mut stmt = conn.prepare_cached(STREAM_MAX_START_SQL)?; let mut stmt = conn.prepare_cached(STREAM_MAX_START_SQL)?;
let mut rows = stmt.query_named(named_params!{":stream_id": stream_id})?; let mut rows = stmt.query_named(named_params! {":stream_id": stream_id})?;
let mut maxes_opt = None; let mut maxes_opt = None;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let row_start = recording::Time(row.get(0)?); let row_start = recording::Time(row.get(0)?);
let row_duration: i64 = row.get(1)?; let row_duration: i64 = row.get(1)?;
let row_end = recording::Time(row_start.0 + row_duration); let row_end = recording::Time(row_start.0 + row_duration);
let maxes = match maxes_opt { let maxes = match maxes_opt {
None => row_start .. row_end, None => row_start..row_end,
Some(Range{start: s, end: e}) => s .. ::std::cmp::max(e, row_end), Some(Range { start: s, end: e }) => s..::std::cmp::max(e, row_end),
}; };
if row_start.0 <= maxes.start.0 - recording::MAX_RECORDING_WALL_DURATION { if row_start.0 <= maxes.start.0 - recording::MAX_RECORDING_WALL_DURATION {
break; break;
@ -354,18 +420,24 @@ pub(crate) fn get_range(conn: &rusqlite::Connection, stream_id: i32)
maxes_opt = Some(maxes); maxes_opt = Some(maxes);
} }
let max_end = match maxes_opt { let max_end = match maxes_opt {
Some(Range{start: _, end: e}) => e, Some(Range { start: _, end: e }) => e,
None => bail!("missing max for stream {} which had min {}", stream_id, min_start), None => bail!(
"missing max for stream {} which had min {}",
stream_id,
min_start
),
}; };
Ok(Some(min_start .. max_end)) Ok(Some(min_start..max_end))
} }
/// Lists all garbage ids for the given sample file directory. /// Lists all garbage ids for the given sample file directory.
pub(crate) fn list_garbage(conn: &rusqlite::Connection, dir_id: i32) pub(crate) fn list_garbage(
-> Result<FnvHashSet<CompositeId>, Error> { conn: &rusqlite::Connection,
dir_id: i32,
) -> Result<FnvHashSet<CompositeId>, Error> {
let mut garbage = FnvHashSet::default(); let mut garbage = FnvHashSet::default();
let mut stmt = conn.prepare_cached( let mut stmt =
"select composite_id from garbage where sample_file_dir_id = ?")?; conn.prepare_cached("select composite_id from garbage where sample_file_dir_id = ?")?;
let mut rows = stmt.query(&[&dir_id])?; let mut rows = stmt.query(&[&dir_id])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
garbage.insert(CompositeId(row.get(0)?)); garbage.insert(CompositeId(row.get(0)?));
@ -375,11 +447,13 @@ pub(crate) fn list_garbage(conn: &rusqlite::Connection, dir_id: i32)
/// Lists the oldest recordings for a stream, starting with the given id. /// Lists the oldest recordings for a stream, starting with the given id.
/// `f` should return true as long as further rows are desired. /// `f` should return true as long as further rows are desired.
pub(crate) fn list_oldest_recordings(conn: &rusqlite::Connection, start: CompositeId, pub(crate) fn list_oldest_recordings(
f: &mut dyn FnMut(db::ListOldestRecordingsRow) -> bool) conn: &rusqlite::Connection,
-> Result<(), Error> { start: CompositeId,
f: &mut dyn FnMut(db::ListOldestRecordingsRow) -> bool,
) -> Result<(), Error> {
let mut stmt = conn.prepare_cached(LIST_OLDEST_RECORDINGS_SQL)?; let mut stmt = conn.prepare_cached(LIST_OLDEST_RECORDINGS_SQL)?;
let mut rows = stmt.query_named(named_params!{ let mut rows = stmt.query_named(named_params! {
":start": start.0, ":start": start.0,
":end": CompositeId::new(start.stream() + 1, 0).0, ":end": CompositeId::new(start.stream() + 1, 0).0,
})?; })?;

View File

@ -30,7 +30,7 @@
use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32}; use crate::coding::{append_varint32, decode_varint32, unzigzag32, zigzag32};
use crate::db; use crate::db;
use failure::{Error, bail}; use failure::{bail, Error};
use log::trace; use log::trace;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::ops::Range; use std::ops::Range;
@ -40,14 +40,18 @@ pub use base::time::TIME_UNITS_PER_SEC;
pub const DESIRED_RECORDING_WALL_DURATION: i64 = 60 * TIME_UNITS_PER_SEC; pub const DESIRED_RECORDING_WALL_DURATION: i64 = 60 * TIME_UNITS_PER_SEC;
pub const MAX_RECORDING_WALL_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC; pub const MAX_RECORDING_WALL_DURATION: i64 = 5 * 60 * TIME_UNITS_PER_SEC;
pub use base::time::Time;
pub use base::time::Duration; pub use base::time::Duration;
pub use base::time::Time;
/// Converts from a wall time offset into a recording to a media time offset or vice versa. /// Converts from a wall time offset into a recording to a media time offset or vice versa.
pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32) -> i32 { pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32) -> i32 {
debug_assert!(from_off_90k <= from_duration_90k, debug_assert!(
"from_off_90k={} from_duration_90k={} to_duration_90k={}", from_off_90k <= from_duration_90k,
from_off_90k, from_duration_90k, to_duration_90k); "from_off_90k={} from_duration_90k={} to_duration_90k={}",
from_off_90k,
from_duration_90k,
to_duration_90k
);
if from_duration_90k == 0 { if from_duration_90k == 0 {
return 0; // avoid a divide by zero. return 0; // avoid a divide by zero.
} }
@ -56,12 +60,16 @@ pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32)
// time is recording::MAX_RECORDING_WALL_DURATION; the max media duration should be // time is recording::MAX_RECORDING_WALL_DURATION; the max media duration should be
// roughly the same (design limit of 500 ppm correction). The final result should fit // roughly the same (design limit of 500 ppm correction). The final result should fit
// within i32. // within i32.
i32::try_from(i64::from(from_off_90k) * i32::try_from(
i64::from(to_duration_90k) / i64::from(from_off_90k) * i64::from(to_duration_90k) / i64::from(from_duration_90k),
i64::from(from_duration_90k)) )
.map_err(|_| format!("rescale overflow: {} * {} / {} > i32::max_value()", .map_err(|_| {
from_off_90k, to_duration_90k, from_duration_90k)) format!(
.unwrap() "rescale overflow: {} * {} / {} > i32::max_value()",
from_off_90k, to_duration_90k, from_duration_90k
)
})
.unwrap()
} }
/// An iterator through a sample index. /// An iterator through a sample index.
@ -91,12 +99,14 @@ pub struct SampleIndexIterator {
impl SampleIndexIterator { impl SampleIndexIterator {
pub fn new() -> SampleIndexIterator { pub fn new() -> SampleIndexIterator {
SampleIndexIterator{i_and_is_key: 0, SampleIndexIterator {
pos: 0, i_and_is_key: 0,
start_90k: 0, pos: 0,
duration_90k: 0, start_90k: 0,
bytes: 0, duration_90k: 0,
bytes_other: 0} bytes: 0,
bytes_other: 0,
}
} }
pub fn next(&mut self, data: &[u8]) -> Result<bool, Error> { pub fn next(&mut self, data: &[u8]) -> Result<bool, Error> {
@ -104,7 +114,7 @@ impl SampleIndexIterator {
self.start_90k += self.duration_90k; self.start_90k += self.duration_90k;
let i = (self.i_and_is_key & 0x7FFF_FFFF) as usize; let i = (self.i_and_is_key & 0x7FFF_FFFF) as usize;
if i == data.len() { if i == data.len() {
return Ok(false) return Ok(false);
} }
let (raw1, i1) = match decode_varint32(data, i) { let (raw1, i1) = match decode_varint32(data, i) {
Ok(tuple) => tuple, Ok(tuple) => tuple,
@ -117,11 +127,17 @@ impl SampleIndexIterator {
let duration_90k_delta = unzigzag32(raw1 >> 1); let duration_90k_delta = unzigzag32(raw1 >> 1);
self.duration_90k += duration_90k_delta; self.duration_90k += duration_90k_delta;
if self.duration_90k < 0 { if self.duration_90k < 0 {
bail!("negative duration {} after applying delta {}", bail!(
self.duration_90k, duration_90k_delta); "negative duration {} after applying delta {}",
self.duration_90k,
duration_90k_delta
);
} }
if self.duration_90k == 0 && data.len() > i2 { if self.duration_90k == 0 && data.len() > i2 {
bail!("zero duration only allowed at end; have {} bytes left", data.len() - i2); bail!(
"zero duration only allowed at end; have {} bytes left",
data.len() - i2
);
} }
let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() { let (prev_bytes_key, prev_bytes_nonkey) = match self.is_key() {
true => (self.bytes, self.bytes_other), true => (self.bytes, self.bytes_other),
@ -137,14 +153,21 @@ impl SampleIndexIterator {
self.bytes_other = prev_bytes_key; self.bytes_other = prev_bytes_key;
} }
if self.bytes <= 0 { if self.bytes <= 0 {
bail!("non-positive bytes {} after applying delta {} to key={} frame at ts {}", bail!(
self.bytes, bytes_delta, self.is_key(), self.start_90k); "non-positive bytes {} after applying delta {} to key={} frame at ts {}",
self.bytes,
bytes_delta,
self.is_key(),
self.start_90k
);
} }
Ok(true) Ok(true)
} }
#[inline] #[inline]
pub fn is_key(&self) -> bool { (self.i_and_is_key & 0x8000_0000) != 0 } pub fn is_key(&self) -> bool {
(self.i_and_is_key & 0x8000_0000) != 0
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -163,24 +186,33 @@ impl SampleIndexEncoder {
} }
} }
pub fn add_sample(&mut self, duration_90k: i32, bytes: i32, is_key: bool, pub fn add_sample(
r: &mut db::RecordingToInsert) { &mut self,
duration_90k: i32,
bytes: i32,
is_key: bool,
r: &mut db::RecordingToInsert,
) {
let duration_delta = duration_90k - self.prev_duration_90k; let duration_delta = duration_90k - self.prev_duration_90k;
self.prev_duration_90k = duration_90k; self.prev_duration_90k = duration_90k;
r.media_duration_90k += duration_90k; r.media_duration_90k += duration_90k;
r.sample_file_bytes += bytes; r.sample_file_bytes += bytes;
r.video_samples += 1; r.video_samples += 1;
let bytes_delta = bytes - if is_key { let bytes_delta = bytes
let prev = self.prev_bytes_key; - if is_key {
r.video_sync_samples += 1; let prev = self.prev_bytes_key;
self.prev_bytes_key = bytes; r.video_sync_samples += 1;
prev self.prev_bytes_key = bytes;
} else { prev
let prev = self.prev_bytes_nonkey; } else {
self.prev_bytes_nonkey = bytes; let prev = self.prev_bytes_nonkey;
prev self.prev_bytes_nonkey = bytes;
}; prev
append_varint32((zigzag32(duration_delta) << 1) | (is_key as u32), &mut r.video_index); };
append_varint32(
(zigzag32(duration_delta) << 1) | (is_key as u32),
&mut r.video_index,
);
append_varint32(zigzag32(bytes_delta), &mut r.video_index); append_varint32(zigzag32(bytes_delta), &mut r.video_index);
} }
} }
@ -218,10 +250,12 @@ impl Segment {
/// The actual range will end at the first frame after the desired range (unless the desired /// The actual range will end at the first frame after the desired range (unless the desired
/// range extends beyond the recording). Likewise, the caller is responsible for trimming the /// range extends beyond the recording). Likewise, the caller is responsible for trimming the
/// final frame's duration if desired. /// final frame's duration if desired.
pub fn new(db: &db::LockedDatabase, pub fn new(
recording: &db::ListRecordingsRow, db: &db::LockedDatabase,
desired_media_range_90k: Range<i32>, recording: &db::ListRecordingsRow,
start_at_key: bool) -> Result<Segment, Error> { desired_media_range_90k: Range<i32>,
start_at_key: bool,
) -> Result<Segment, Error> {
let mut self_ = Segment { let mut self_ = Segment {
id: recording.id, id: recording.id,
open_id: recording.open_id, open_id: recording.open_id,
@ -229,28 +263,39 @@ impl Segment {
file_end: recording.sample_file_bytes, file_end: recording.sample_file_bytes,
frames: recording.video_samples as u16, frames: recording.video_samples as u16,
key_frames: recording.video_sync_samples as u16, key_frames: recording.video_sync_samples as u16,
video_sample_entry_id_and_trailing_zero: video_sample_entry_id_and_trailing_zero: recording.video_sample_entry_id
recording.video_sample_entry_id | | ((((recording.flags & db::RecordingFlags::TrailingZero as i32) != 0) as i32)
((((recording.flags & db::RecordingFlags::TrailingZero as i32) != 0) as i32) << 31), << 31),
}; };
if desired_media_range_90k.start > desired_media_range_90k.end || if desired_media_range_90k.start > desired_media_range_90k.end
desired_media_range_90k.end > recording.media_duration_90k { || desired_media_range_90k.end > recording.media_duration_90k
bail!("desired media range [{}, {}) invalid for recording of length {}", {
desired_media_range_90k.start, desired_media_range_90k.end, bail!(
recording.media_duration_90k); "desired media range [{}, {}) invalid for recording of length {}",
desired_media_range_90k.start,
desired_media_range_90k.end,
recording.media_duration_90k
);
} }
if desired_media_range_90k.start == 0 && if desired_media_range_90k.start == 0
desired_media_range_90k.end == recording.media_duration_90k { && desired_media_range_90k.end == recording.media_duration_90k
{
// Fast path. Existing entry is fine. // Fast path. Existing entry is fine.
trace!("recording::Segment::new fast path, recording={:#?}", recording); trace!(
return Ok(self_) "recording::Segment::new fast path, recording={:#?}",
recording
);
return Ok(self_);
} }
// Slow path. Need to iterate through the index. // Slow path. Need to iterate through the index.
trace!("recording::Segment::new slow path, desired_media_range_90k={:?}, recording={:#?}", trace!(
desired_media_range_90k, recording); "recording::Segment::new slow path, desired_media_range_90k={:?}, recording={:#?}",
desired_media_range_90k,
recording
);
db.with_recording_playback(self_.id, &mut |playback| { db.with_recording_playback(self_.id, &mut |playback| {
let mut begin = Box::new(SampleIndexIterator::new()); let mut begin = Box::new(SampleIndexIterator::new());
let data = &(&playback).video_index; let data = &(&playback).video_index;
@ -274,8 +319,7 @@ impl Segment {
}; };
loop { loop {
if it.start_90k <= desired_media_range_90k.start && if it.start_90k <= desired_media_range_90k.start && (!start_at_key || it.is_key()) {
(!start_at_key || it.is_key()) {
// new start candidate. // new start candidate.
*begin = it; *begin = it;
self_.frames = 0; self_.frames = 0;
@ -293,8 +337,7 @@ impl Segment {
self_.begin = Some(begin); self_.begin = Some(begin);
self_.file_end = it.pos; self_.file_end = it.pos;
self_.video_sample_entry_id_and_trailing_zero = self_.video_sample_entry_id_and_trailing_zero =
recording.video_sample_entry_id | recording.video_sample_entry_id | (((it.duration_90k == 0) as i32) << 31);
(((it.duration_90k == 0) as i32) << 31);
Ok(()) Ok(())
})?; })?;
Ok(self_) Ok(self_)
@ -304,23 +347,33 @@ impl Segment {
self.video_sample_entry_id_and_trailing_zero & 0x7FFFFFFF self.video_sample_entry_id_and_trailing_zero & 0x7FFFFFFF
} }
pub fn have_trailing_zero(&self) -> bool { self.video_sample_entry_id_and_trailing_zero < 0 } pub fn have_trailing_zero(&self) -> bool {
self.video_sample_entry_id_and_trailing_zero < 0
}
/// Returns the byte range within the sample file of data associated with this segment. /// Returns the byte range within the sample file of data associated with this segment.
pub fn sample_file_range(&self) -> Range<u64> { pub fn sample_file_range(&self) -> Range<u64> {
self.begin.as_ref().map(|b| b.pos as u64).unwrap_or(0) .. self.file_end as u64 self.begin.as_ref().map(|b| b.pos as u64).unwrap_or(0)..self.file_end as u64
} }
/// Returns the actual media start time. As described in `new`, this can be less than the /// Returns the actual media start time. As described in `new`, this can be less than the
/// desired media start time if there is no key frame at the right position. /// desired media start time if there is no key frame at the right position.
pub fn actual_start_90k(&self) -> i32 { self.begin.as_ref().map(|b| b.start_90k).unwrap_or(0) } pub fn actual_start_90k(&self) -> i32 {
self.begin.as_ref().map(|b| b.start_90k).unwrap_or(0)
}
/// Iterates through each frame in the segment. /// Iterates through each frame in the segment.
/// Must be called without the database lock held; retrieves video index from the cache. /// Must be called without the database lock held; retrieves video index from the cache.
pub fn foreach<F>(&self, playback: &db::RecordingPlayback, mut f: F) -> Result<(), Error> pub fn foreach<F>(&self, playback: &db::RecordingPlayback, mut f: F) -> Result<(), Error>
where F: FnMut(&SampleIndexIterator) -> Result<(), Error> { where
trace!("foreach on recording {}: {} frames, actual_start_90k: {}", F: FnMut(&SampleIndexIterator) -> Result<(), Error>,
self.id, self.frames, self.actual_start_90k()); {
trace!(
"foreach on recording {}: {} frames, actual_start_90k: {}",
self.id,
self.frames,
self.actual_start_90k()
);
let data = &(&playback).video_index; let data = &(&playback).video_index;
let mut it = match self.begin { let mut it = match self.begin {
Some(ref b) => **b, Some(ref b) => **b,
@ -338,15 +391,23 @@ impl Segment {
let mut have_frame = true; let mut have_frame = true;
let mut key_frame = 0; let mut key_frame = 0;
for i in 0 .. self.frames { for i in 0..self.frames {
if !have_frame { if !have_frame {
bail!("recording {}: expected {} frames, found only {}", self.id, self.frames, i+1); bail!(
"recording {}: expected {} frames, found only {}",
self.id,
self.frames,
i + 1
);
} }
if it.is_key() { if it.is_key() {
key_frame += 1; key_frame += 1;
if key_frame > self.key_frames { if key_frame > self.key_frames {
bail!("recording {}: more than expected {} key frames", bail!(
self.id, self.key_frames); "recording {}: more than expected {} key frames",
self.id,
self.key_frames
);
} }
} }
@ -362,8 +423,12 @@ impl Segment {
}; };
} }
if key_frame < self.key_frames { if key_frame < self.key_frames {
bail!("recording {}: expected {} key frames, found only {}", bail!(
self.id, self.key_frames, key_frame); "recording {}: expected {} key frames, found only {}",
self.id,
self.key_frames,
key_frame
);
} }
Ok(()) Ok(())
} }
@ -382,9 +447,9 @@ impl Segment {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base::clock::RealClocks;
use super::*; use super::*;
use crate::testutil::{self, TestDb}; use crate::testutil::{self, TestDb};
use base::clock::RealClocks;
/// Tests encoding the example from design/schema.md. /// Tests encoding the example from design/schema.md.
#[test] #[test]
@ -397,7 +462,10 @@ mod tests {
e.add_sample(11, 15, false, &mut r); e.add_sample(11, 15, false, &mut r);
e.add_sample(10, 12, false, &mut r); e.add_sample(10, 12, false, &mut r);
e.add_sample(10, 1050, true, &mut r); e.add_sample(10, 1050, true, &mut r);
assert_eq!(r.video_index, b"\x29\xd0\x0f\x02\x14\x08\x0a\x02\x05\x01\x64"); assert_eq!(
r.video_index,
b"\x29\xd0\x0f\x02\x14\x08\x0a\x02\x05\x01\x64"
);
assert_eq!(10 + 9 + 11 + 10 + 10, r.media_duration_90k); assert_eq!(10 + 9 + 11 + 10 + 10, r.media_duration_90k);
assert_eq!(5, r.video_samples); assert_eq!(5, r.video_samples);
assert_eq!(2, r.video_sync_samples); assert_eq!(2, r.video_sync_samples);
@ -413,12 +481,13 @@ mod tests {
bytes: i32, bytes: i32,
is_key: bool, is_key: bool,
} }
#[rustfmt::skip]
let samples = [ let samples = [
Sample{duration_90k: 10, bytes: 30000, is_key: true}, Sample { duration_90k: 10, bytes: 30000, is_key: true, },
Sample{duration_90k: 9, bytes: 1000, is_key: false}, Sample { duration_90k: 9, bytes: 1000, is_key: false, },
Sample{duration_90k: 11, bytes: 1100, is_key: false}, Sample { duration_90k: 11, bytes: 1100, is_key: false, },
Sample{duration_90k: 18, bytes: 31000, is_key: true}, Sample { duration_90k: 18, bytes: 31000, is_key: true, },
Sample{duration_90k: 0, bytes: 1000, is_key: false}, Sample { duration_90k: 0, bytes: 1000, is_key: false, },
]; ];
let mut r = db::RecordingToInsert::default(); let mut r = db::RecordingToInsert::default();
let mut e = SampleIndexEncoder::new(); let mut e = SampleIndexEncoder::new();
@ -428,10 +497,14 @@ mod tests {
let mut it = SampleIndexIterator::new(); let mut it = SampleIndexIterator::new();
for sample in &samples { for sample in &samples {
assert!(it.next(&r.video_index).unwrap()); assert!(it.next(&r.video_index).unwrap());
assert_eq!(sample, assert_eq!(
&Sample{duration_90k: it.duration_90k, sample,
bytes: it.bytes, &Sample {
is_key: it.is_key()}); duration_90k: it.duration_90k,
bytes: it.bytes,
is_key: it.is_key()
}
);
} }
assert!(!it.next(&r.video_index).unwrap()); assert!(!it.next(&r.video_index).unwrap());
} }
@ -446,14 +519,26 @@ mod tests {
err: &'static str, err: &'static str,
} }
let tests = [ let tests = [
Test{encoded: b"\x80", err: "bad varint 1 at offset 0"}, Test {
Test{encoded: b"\x00\x80", err: "bad varint 2 at offset 1"}, encoded: b"\x80",
Test{encoded: b"\x00\x02\x00\x00", err: "bad varint 1 at offset 0",
err: "zero duration only allowed at end; have 2 bytes left"}, },
Test{encoded: b"\x02\x02", Test {
err: "negative duration -1 after applying delta -1"}, encoded: b"\x00\x80",
Test{encoded: b"\x04\x00", err: "bad varint 2 at offset 1",
err: "non-positive bytes 0 after applying delta 0 to key=false frame at ts 0"}, },
Test {
encoded: b"\x00\x02\x00\x00",
err: "zero duration only allowed at end; have 2 bytes left",
},
Test {
encoded: b"\x02\x02",
err: "negative duration -1 after applying delta -1",
},
Test {
encoded: b"\x04\x00",
err: "non-positive bytes 0 after applying delta 0 to key=false frame at ts 0",
},
]; ];
for test in &tests { for test in &tests {
let mut it = SampleIndexIterator::new(); let mut it = SampleIndexIterator::new();
@ -462,11 +547,18 @@ mod tests {
} }
fn get_frames<F, T>(db: &db::Database, segment: &Segment, f: F) -> Vec<T> fn get_frames<F, T>(db: &db::Database, segment: &Segment, f: F) -> Vec<T>
where F: Fn(&SampleIndexIterator) -> T { where
F: Fn(&SampleIndexIterator) -> T,
{
let mut v = Vec::new(); let mut v = Vec::new();
db.lock().with_recording_playback(segment.id, &mut |playback| { db.lock()
segment.foreach(playback, |it| { v.push(f(it)); Ok(()) }) .with_recording_playback(segment.id, &mut |playback| {
}).unwrap(); segment.foreach(playback, |it| {
v.push(f(it));
Ok(())
})
})
.unwrap();
v v
} }
@ -486,8 +578,11 @@ mod tests {
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
// Time range [2, 2 + 4 + 6 + 8) means the 2nd, 3rd, 4th samples should be // Time range [2, 2 + 4 + 6 + 8) means the 2nd, 3rd, 4th samples should be
// included. // included.
let segment = Segment::new(&db.db.lock(), &row, 2 .. 2+4+6+8, true).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 2..2 + 4 + 6 + 8, true).unwrap();
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[4, 6, 8]); assert_eq!(
&get_frames(&db.db, &segment, |it| it.duration_90k),
&[4, 6, 8]
);
} }
/// Half sync frames means starting from the last sync frame <= desired point. /// Half sync frames means starting from the last sync frame <= desired point.
@ -505,7 +600,7 @@ mod tests {
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
// Time range [2 + 4 + 6, 2 + 4 + 6 + 8) means the 4th sample should be included. // Time range [2 + 4 + 6, 2 + 4 + 6 + 8) means the 4th sample should be included.
// The 3rd also gets pulled in because it is a sync frame and the 4th is not. // The 3rd also gets pulled in because it is a sync frame and the 4th is not.
let segment = Segment::new(&db.db.lock(), &row, 2+4+6 .. 2+4+6+8, true).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 2 + 4 + 6..2 + 4 + 6 + 8, true).unwrap();
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[6, 8]); assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[6, 8]);
} }
@ -519,7 +614,7 @@ mod tests {
encoder.add_sample(0, 3, true, &mut r); encoder.add_sample(0, 3, true, &mut r);
let db = TestDb::new(RealClocks {}); let db = TestDb::new(RealClocks {});
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
let segment = Segment::new(&db.db.lock(), &row, 1 .. 2, true).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 1..2, true).unwrap();
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[2, 3]); assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[2, 3]);
} }
@ -532,7 +627,7 @@ mod tests {
encoder.add_sample(1, 1, true, &mut r); encoder.add_sample(1, 1, true, &mut r);
let db = TestDb::new(RealClocks {}); let db = TestDb::new(RealClocks {});
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 0, true).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 0..0, true).unwrap();
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1]); assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1]);
} }
@ -550,8 +645,11 @@ mod tests {
} }
let db = TestDb::new(RealClocks {}); let db = TestDb::new(RealClocks {});
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2+4+6+8+10, true).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 0..2 + 4 + 6 + 8 + 10, true).unwrap();
assert_eq!(&get_frames(&db.db, &segment, |it| it.duration_90k), &[2, 4, 6, 8, 10]); assert_eq!(
&get_frames(&db.db, &segment, |it| it.duration_90k),
&[2, 4, 6, 8, 10]
);
} }
#[test] #[test]
@ -564,14 +662,14 @@ mod tests {
encoder.add_sample(0, 3, true, &mut r); encoder.add_sample(0, 3, true, &mut r);
let db = TestDb::new(RealClocks {}); let db = TestDb::new(RealClocks {});
let row = db.insert_recording_from_encoder(r); let row = db.insert_recording_from_encoder(r);
let segment = Segment::new(&db.db.lock(), &row, 0 .. 2, true).unwrap(); let segment = Segment::new(&db.db.lock(), &row, 0..2, true).unwrap();
assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1, 2, 3]); assert_eq!(&get_frames(&db.db, &segment, |it| it.bytes), &[1, 2, 3]);
} }
// TODO: test segment error cases involving mismatch between row frames/key_frames and index. // TODO: test segment error cases involving mismatch between row frames/key_frames and index.
} }
#[cfg(all(test, feature="nightly"))] #[cfg(all(test, feature = "nightly"))]
mod bench { mod bench {
extern crate test; extern crate test;

View File

@ -28,16 +28,16 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::bail_t;
use crate::coding; use crate::coding;
use crate::db::FromSqlUuid; use crate::db::FromSqlUuid;
use crate::recording; use crate::recording;
use failure::{Error, bail, format_err}; use base::bail_t;
use failure::{bail, format_err, Error};
use fnv::FnvHashMap; use fnv::FnvHashMap;
use log::debug; use log::debug;
use rusqlite::{Connection, Transaction, params}; use rusqlite::{params, Connection, Transaction};
use std::collections::{BTreeMap, BTreeSet};
use std::collections::btree_map::Entry; use std::collections::btree_map::Entry;
use std::collections::{BTreeMap, BTreeSet};
use std::ops::Range; use std::ops::Range;
use uuid::Uuid; use uuid::Uuid;
@ -132,7 +132,9 @@ impl Point {
/// `from` must be an iterator of `(signal, state)` with signal numbers in monotonically increasing /// `from` must be an iterator of `(signal, state)` with signal numbers in monotonically increasing
/// order. /// order.
fn append_serialized<'a, I>(from: I, to: &mut Vec<u8>) fn append_serialized<'a, I>(from: I, to: &mut Vec<u8>)
where I: IntoIterator<Item = (&'a u32, &'a u16)> { where
I: IntoIterator<Item = (&'a u32, &'a u16)>,
{
let mut next_allowed = 0; let mut next_allowed = 0;
for (&signal, &state) in from.into_iter() { for (&signal, &state) in from.into_iter() {
assert!(signal >= next_allowed); assert!(signal >= next_allowed);
@ -170,15 +172,18 @@ impl<'a> PointDataIterator<'a> {
if self.cur_pos == self.data.len() { if self.cur_pos == self.data.len() {
return Ok(None); return Ok(None);
} }
let (signal_delta, p) = coding::decode_varint32(self.data, self.cur_pos) let (signal_delta, p) = coding::decode_varint32(self.data, self.cur_pos).map_err(|()| {
.map_err(|()| format_err!("varint32 decode failure; data={:?} pos={}", format_err!(
self.data, self.cur_pos))?; "varint32 decode failure; data={:?} pos={}",
self.data,
self.cur_pos
)
})?;
let (state, p) = coding::decode_varint32(self.data, p) let (state, p) = coding::decode_varint32(self.data, p)
.map_err(|()| format_err!("varint32 decode failure; data={:?} pos={}", .map_err(|()| format_err!("varint32 decode failure; data={:?} pos={}", self.data, p))?;
self.data, p))?; let signal = self.cur_signal.checked_add(signal_delta).ok_or_else(|| {
let signal = self.cur_signal.checked_add(signal_delta) format_err!("signal overflow: {} + {}", self.cur_signal, signal_delta)
.ok_or_else(|| format_err!("signal overflow: {} + {}", })?;
self.cur_signal, signal_delta))?;
if state > u16::max_value() as u32 { if state > u16::max_value() as u32 {
bail!("state overflow: {}", state); bail!("state overflow: {}", state);
} }
@ -221,7 +226,9 @@ pub struct ListStateChangesRow {
impl State { impl State {
pub fn init(conn: &Connection) -> Result<Self, Error> { pub fn init(conn: &Connection) -> Result<Self, Error> {
let max_signal_changes: Option<i64> = let max_signal_changes: Option<i64> =
conn.query_row("select max_signal_changes from meta", params![], |row| row.get(0))?; conn.query_row("select max_signal_changes from meta", params![], |row| {
row.get(0)
})?;
let mut signals_by_id = State::init_signals(conn)?; let mut signals_by_id = State::init_signals(conn)?;
State::fill_signal_cameras(conn, &mut signals_by_id)?; State::fill_signal_cameras(conn, &mut signals_by_id)?;
Ok(State { Ok(State {
@ -234,8 +241,10 @@ impl State {
} }
pub fn list_changes_by_time( pub fn list_changes_by_time(
&self, desired_time: Range<recording::Time>, f: &mut dyn FnMut(&ListStateChangesRow)) { &self,
desired_time: Range<recording::Time>,
f: &mut dyn FnMut(&ListStateChangesRow),
) {
// First find the state immediately before. If it exists, include it. // First find the state immediately before. If it exists, include it.
if let Some((&when, p)) = self.points_by_time.range(..desired_time.start).next_back() { if let Some((&when, p)) = self.points_by_time.range(..desired_time.start).next_back() {
for (&signal, &state) in &p.after() { for (&signal, &state) in &p.after() {
@ -261,8 +270,11 @@ impl State {
} }
pub fn update_signals( pub fn update_signals(
&mut self, when: Range<recording::Time>, signals: &[u32], states: &[u16]) &mut self,
-> Result<(), base::Error> { when: Range<recording::Time>,
signals: &[u32],
states: &[u16],
) -> Result<(), base::Error> {
// Do input validation before any mutation. // Do input validation before any mutation.
self.update_signals_validate(signals, states)?; self.update_signals_validate(signals, states)?;
@ -294,11 +306,19 @@ impl State {
None => return, None => return,
Some(p) => p, Some(p) => p,
}; };
debug!("Performing signal GC: have {} points, want only {}, so removing {}", debug!(
self.points_by_time.len(), max, to_remove); "Performing signal GC: have {} points, want only {}, so removing {}",
self.points_by_time.len(),
max,
to_remove
);
let remove: smallvec::SmallVec<[recording::Time; 4]> = let remove: smallvec::SmallVec<[recording::Time; 4]> = self
self.points_by_time.keys().take(to_remove).map(|p| *p).collect(); .points_by_time
.keys()
.take(to_remove)
.map(|p| *p)
.collect();
for p in &remove { for p in &remove {
self.points_by_time.remove(p); self.points_by_time.remove(p);
@ -320,14 +340,20 @@ impl State {
None => bail_t!(InvalidArgument, "unknown signal {}", signal), None => bail_t!(InvalidArgument, "unknown signal {}", signal),
Some(ref s) => { Some(ref s) => {
let empty = Vec::new(); let empty = Vec::new();
let states = self.types_by_uuid.get(&s.type_) let states = self
.map(|t| &t.states) .types_by_uuid
.unwrap_or(&empty); .get(&s.type_)
.map(|t| &t.states)
.unwrap_or(&empty);
if state != 0 && states.binary_search_by_key(&state, |s| s.value).is_err() { if state != 0 && states.binary_search_by_key(&state, |s| s.value).is_err() {
bail_t!(FailedPrecondition, "signal {} specifies unknown state {}", bail_t!(
signal, state); FailedPrecondition,
"signal {} specifies unknown state {}",
signal,
state
);
} }
}, }
} }
next_allowed = signal + 1; next_allowed = signal + 1;
} }
@ -354,7 +380,10 @@ impl State {
// Any existing changes should still be applied. They win over reverting to prev. // Any existing changes should still be applied. They win over reverting to prev.
let mut it = p.changes(); let mut it = p.changes();
while let Some((signal, state)) = it.next().expect("in-mem changes is valid") { while let Some((signal, state)) = it.next().expect("in-mem changes is valid") {
changes.entry(signal).and_modify(|e| *e = state).or_insert(state); changes
.entry(signal)
.and_modify(|e| *e = state)
.or_insert(state);
} }
self.dirty_by_time.insert(t); self.dirty_by_time.insert(t);
p.swap(&mut Point::new(&prev, &serialize(&changes))); p.swap(&mut Point::new(&prev, &serialize(&changes)));
@ -374,20 +403,25 @@ impl State {
return; return;
} }
self.dirty_by_time.insert(end); self.dirty_by_time.insert(end);
self.points_by_time.insert(end, Point::new(&prev, &serialize(&changes))); self.points_by_time
.insert(end, Point::new(&prev, &serialize(&changes)));
} }
/// Helper for `update_signals_end`. Adjusts `prev` (the state prior to the end point) to /// Helper for `update_signals_end`. Adjusts `prev` (the state prior to the end point) to
/// reflect the desired update (in `signals` and `states`). Adjusts `changes` (changes to /// reflect the desired update (in `signals` and `states`). Adjusts `changes` (changes to
/// execute at the end point) to undo the change. /// execute at the end point) to undo the change.
fn update_signals_end_maps(signals: &[u32], states: &[u16], prev: &mut BTreeMap<u32, u16>, fn update_signals_end_maps(
changes: &mut BTreeMap<u32, u16>) { signals: &[u32],
states: &[u16],
prev: &mut BTreeMap<u32, u16>,
changes: &mut BTreeMap<u32, u16>,
) {
for (&signal, &state) in signals.iter().zip(states) { for (&signal, &state) in signals.iter().zip(states) {
match prev.entry(signal) { match prev.entry(signal) {
Entry::Vacant(e) => { Entry::Vacant(e) => {
changes.insert(signal, 0); changes.insert(signal, 0);
e.insert(state); e.insert(state);
}, }
Entry::Occupied(mut e) => { Entry::Occupied(mut e) => {
if state == 0 { if state == 0 {
changes.insert(signal, *e.get()); changes.insert(signal, *e.get());
@ -396,7 +430,7 @@ impl State {
changes.insert(signal, *e.get()); changes.insert(signal, *e.get());
*e.get_mut() = state; *e.get_mut() = state;
} }
}, }
} }
} }
} }
@ -421,13 +455,13 @@ impl State {
*e.get_mut() = state; *e.get_mut() = state;
} }
} }
}, }
Entry::Vacant(e) => { Entry::Vacant(e) => {
if signal != 0 { if signal != 0 {
dirty = true; dirty = true;
e.insert(state); e.insert(state);
} }
}, }
} }
} }
if dirty { if dirty {
@ -456,14 +490,19 @@ impl State {
} }
self.dirty_by_time.insert(start); self.dirty_by_time.insert(start);
self.points_by_time.insert(start, Point::new(&prev, &serialize(&changes))); self.points_by_time
.insert(start, Point::new(&prev, &serialize(&changes)));
} }
/// Helper for `update_signals` to apply all points in `(when.start, when.end)`. /// Helper for `update_signals` to apply all points in `(when.start, when.end)`.
fn update_signals_middle(&mut self, when: Range<recording::Time>, signals: &[u32], fn update_signals_middle(
states: &[u16]) { &mut self,
when: Range<recording::Time>,
signals: &[u32],
states: &[u16],
) {
let mut to_delete = Vec::new(); let mut to_delete = Vec::new();
let after_start = recording::Time(when.start.0+1); let after_start = recording::Time(when.start.0 + 1);
for (&t, ref mut p) in self.points_by_time.range_mut(after_start..when.end) { for (&t, ref mut p) in self.points_by_time.range_mut(after_start..when.end) {
let mut prev = p.prev().to_map().expect("in-mem prev is valid"); let mut prev = p.prev().to_map().expect("in-mem prev is valid");
@ -476,7 +515,7 @@ impl State {
} else if *e.get() != state { } else if *e.get() != state {
*e.get_mut() = state; *e.get_mut() = state;
} }
}, }
Entry::Vacant(e) => { Entry::Vacant(e) => {
if state != 0 { if state != 0 {
e.insert(state); e.insert(state);
@ -486,14 +525,16 @@ impl State {
} }
// Trim changes to omit any change to signals. // Trim changes to omit any change to signals.
let mut changes = Vec::with_capacity(3*signals.len()); let mut changes = Vec::with_capacity(3 * signals.len());
let mut it = p.changes(); let mut it = p.changes();
let mut next_allowed = 0; let mut next_allowed = 0;
let mut dirty = false; let mut dirty = false;
while let Some((signal, state)) = it.next().expect("in-memory changes is valid") { while let Some((signal, state)) = it.next().expect("in-memory changes is valid") {
if signals.binary_search(&signal).is_ok() { // discard. if signals.binary_search(&signal).is_ok() {
// discard.
dirty = true; dirty = true;
} else { // keep. } else {
// keep.
assert!(signal >= next_allowed); assert!(signal >= next_allowed);
coding::append_varint32(signal - next_allowed, &mut changes); coding::append_varint32(signal - next_allowed, &mut changes);
coding::append_varint32(state as u32, &mut changes); coding::append_varint32(state as u32, &mut changes);
@ -521,24 +562,25 @@ impl State {
/// The caller is expected to call `post_flush` afterward if the transaction is /// The caller is expected to call `post_flush` afterward if the transaction is
/// successfully committed. No mutations should happen between these calls. /// successfully committed. No mutations should happen between these calls.
pub fn flush(&mut self, tx: &Transaction) -> Result<(), Error> { pub fn flush(&mut self, tx: &Transaction) -> Result<(), Error> {
let mut i_stmt = tx.prepare(r#" let mut i_stmt = tx.prepare(
r#"
insert or replace into signal_change (time_90k, changes) values (?, ?) insert or replace into signal_change (time_90k, changes) values (?, ?)
"#)?; "#,
let mut d_stmt = tx.prepare(r#" )?;
let mut d_stmt = tx.prepare(
r#"
delete from signal_change where time_90k = ? delete from signal_change where time_90k = ?
"#)?; "#,
)?;
for &t in &self.dirty_by_time { for &t in &self.dirty_by_time {
match self.points_by_time.entry(t) { match self.points_by_time.entry(t) {
Entry::Occupied(ref e) => { Entry::Occupied(ref e) => {
let p = e.get(); let p = e.get();
i_stmt.execute(params![ i_stmt.execute(params![t.0, &p.data[p.changes_off..],])?;
t.0, }
&p.data[p.changes_off..],
])?;
},
Entry::Vacant(_) => { Entry::Vacant(_) => {
d_stmt.execute(params![t.0])?; d_stmt.execute(params![t.0])?;
}, }
} }
} }
Ok(()) Ok(())
@ -553,7 +595,8 @@ impl State {
fn init_signals(conn: &Connection) -> Result<BTreeMap<u32, Signal>, Error> { fn init_signals(conn: &Connection) -> Result<BTreeMap<u32, Signal>, Error> {
let mut signals = BTreeMap::new(); let mut signals = BTreeMap::new();
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(
r#"
select select
id, id,
source_uuid, source_uuid,
@ -561,35 +604,41 @@ impl State {
short_name short_name
from from
signal signal
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = row.get(0)?; let id = row.get(0)?;
let source: FromSqlUuid = row.get(1)?; let source: FromSqlUuid = row.get(1)?;
let type_: FromSqlUuid = row.get(2)?; let type_: FromSqlUuid = row.get(2)?;
signals.insert(id, Signal { signals.insert(
id, id,
source: source.0, Signal {
type_: type_.0, id,
short_name: row.get(3)?, source: source.0,
cameras: Vec::new(), type_: type_.0,
}); short_name: row.get(3)?,
cameras: Vec::new(),
},
);
} }
Ok(signals) Ok(signals)
} }
fn init_points(conn: &Connection) -> Result<BTreeMap<recording::Time, Point>, Error> { fn init_points(conn: &Connection) -> Result<BTreeMap<recording::Time, Point>, Error> {
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(
r#"
select select
time_90k, time_90k,
changes changes
from from
signal_change signal_change
order by time_90k order by time_90k
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
let mut points = BTreeMap::new(); let mut points = BTreeMap::new();
let mut cur = BTreeMap::new(); // latest signal -> state, where state != 0 let mut cur = BTreeMap::new(); // latest signal -> state, where state != 0
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let time_90k = recording::Time(row.get(0)?); let time_90k = recording::Time(row.get(0)?);
let changes = row.get_raw_checked(1)?.as_blob()?; let changes = row.get_raw_checked(1)?.as_blob()?;
@ -607,9 +656,12 @@ impl State {
} }
/// Fills the `cameras` field of the `Signal` structs within the supplied `signals`. /// Fills the `cameras` field of the `Signal` structs within the supplied `signals`.
fn fill_signal_cameras(conn: &Connection, signals: &mut BTreeMap<u32, Signal>) fn fill_signal_cameras(
-> Result<(), Error> { conn: &Connection,
let mut stmt = conn.prepare(r#" signals: &mut BTreeMap<u32, Signal>,
) -> Result<(), Error> {
let mut stmt = conn.prepare(
r#"
select select
signal_id, signal_id,
camera_id, camera_id,
@ -617,13 +669,14 @@ impl State {
from from
signal_camera signal_camera
order by signal_id, camera_id order by signal_id, camera_id
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let signal_id = row.get(0)?; let signal_id = row.get(0)?;
let s = signals.get_mut(&signal_id) let s = signals.get_mut(&signal_id).ok_or_else(|| {
.ok_or_else(|| format_err!("signal_camera row for unknown signal id {}", format_err!("signal_camera row for unknown signal id {}", signal_id)
signal_id))?; })?;
let type_ = row.get(2)?; let type_ = row.get(2)?;
s.cameras.push(SignalCamera { s.cameras.push(SignalCamera {
camera_id: row.get(1)?, camera_id: row.get(1)?,
@ -639,7 +692,8 @@ impl State {
fn init_types(conn: &Connection) -> Result<FnvHashMap<Uuid, Type>, Error> { fn init_types(conn: &Connection) -> Result<FnvHashMap<Uuid, Type>, Error> {
let mut types = FnvHashMap::default(); let mut types = FnvHashMap::default();
let mut stmt = conn.prepare(r#" let mut stmt = conn.prepare(
r#"
select select
type_uuid, type_uuid,
value, value,
@ -649,22 +703,31 @@ impl State {
from from
signal_type_enum signal_type_enum
order by type_uuid, value order by type_uuid, value
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let type_: FromSqlUuid = row.get(0)?; let type_: FromSqlUuid = row.get(0)?;
types.entry(type_.0).or_insert_with(Type::default).states.push(TypeState { types
value: row.get(1)?, .entry(type_.0)
name: row.get(2)?, .or_insert_with(Type::default)
motion: row.get(3)?, .states
color: row.get(4)?, .push(TypeState {
}); value: row.get(1)?,
name: row.get(2)?,
motion: row.get(3)?,
color: row.get(4)?,
});
} }
Ok(types) Ok(types)
} }
pub fn signals_by_id(&self) -> &BTreeMap<u32, Signal> { &self.signals_by_id } pub fn signals_by_id(&self) -> &BTreeMap<u32, Signal> {
pub fn types_by_uuid(&self) -> &FnvHashMap<Uuid, Type> { & self.types_by_uuid } &self.signals_by_id
}
pub fn types_by_uuid(&self) -> &FnvHashMap<Uuid, Type> {
&self.types_by_uuid
}
} }
/// Representation of a `signal` row. /// Representation of a `signal` row.
@ -698,9 +761,9 @@ pub struct Type {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use crate::{db, testutil}; use crate::{db, testutil};
use rusqlite::Connection; use rusqlite::Connection;
use super::*;
#[test] #[test]
fn test_point_data_it() { fn test_point_data_it() {
@ -719,8 +782,10 @@ mod tests {
let mut conn = Connection::open_in_memory().unwrap(); let mut conn = Connection::open_in_memory().unwrap();
db::init(&mut conn).unwrap(); db::init(&mut conn).unwrap();
let s = State::init(&conn).unwrap(); let s = State::init(&conn).unwrap();
s.list_changes_by_time(recording::Time::min_value() .. recording::Time::max_value(), s.list_changes_by_time(
&mut |_r| panic!("no changes expected")); recording::Time::min_value()..recording::Time::max_value(),
&mut |_r| panic!("no changes expected"),
);
} }
#[test] #[test]
@ -728,7 +793,8 @@ mod tests {
testutil::init(); testutil::init();
let mut conn = Connection::open_in_memory().unwrap(); let mut conn = Connection::open_in_memory().unwrap();
db::init(&mut conn).unwrap(); db::init(&mut conn).unwrap();
conn.execute_batch(r#" conn.execute_batch(
r#"
update meta set max_signal_changes = 2; update meta set max_signal_changes = 2;
insert into signal (id, source_uuid, type_uuid, short_name) insert into signal (id, source_uuid, type_uuid, short_name)
@ -740,12 +806,16 @@ mod tests {
insert into signal_type_enum (type_uuid, value, name, motion, color) insert into signal_type_enum (type_uuid, value, name, motion, color)
values (x'EE66270FD9C648198B339720D4CBCA6B', 1, 'still', 0, 'black'), values (x'EE66270FD9C648198B339720D4CBCA6B', 1, 'still', 0, 'black'),
(x'EE66270FD9C648198B339720D4CBCA6B', 2, 'moving', 1, 'red'); (x'EE66270FD9C648198B339720D4CBCA6B', 2, 'moving', 1, 'red');
"#).unwrap(); "#,
)
.unwrap();
let mut s = State::init(&conn).unwrap(); let mut s = State::init(&conn).unwrap();
s.list_changes_by_time(recording::Time::min_value() .. recording::Time::max_value(), s.list_changes_by_time(
&mut |_r| panic!("no changes expected")); recording::Time::min_value()..recording::Time::max_value(),
&mut |_r| panic!("no changes expected"),
);
const START: recording::Time = recording::Time(140067462600000); // 2019-04-26T11:59:00 const START: recording::Time = recording::Time(140067462600000); // 2019-04-26T11:59:00
const NOW: recording::Time = recording::Time(140067468000000); // 2019-04-26T12:00:00 const NOW: recording::Time = recording::Time(140067468000000); // 2019-04-26T12:00:00
s.update_signals(START..NOW, &[1, 2], &[2, 1]).unwrap(); s.update_signals(START..NOW, &[1, 2], &[2, 1]).unwrap();
let mut rows = Vec::new(); let mut rows = Vec::new();
@ -770,10 +840,12 @@ mod tests {
signal: 2, signal: 2,
state: 0, state: 0,
}, },
]; ];
s.list_changes_by_time(recording::Time::min_value() .. recording::Time::max_value(), s.list_changes_by_time(
&mut |r| rows.push(*r)); recording::Time::min_value()..recording::Time::max_value(),
&mut |r| rows.push(*r),
);
assert_eq!(&rows[..], EXPECTED); assert_eq!(&rows[..], EXPECTED);
{ {
@ -785,8 +857,10 @@ mod tests {
drop(s); drop(s);
let mut s = State::init(&conn).unwrap(); let mut s = State::init(&conn).unwrap();
rows.clear(); rows.clear();
s.list_changes_by_time(recording::Time::min_value() .. recording::Time::max_value(), s.list_changes_by_time(
&mut |r| rows.push(*r)); recording::Time::min_value()..recording::Time::max_value(),
&mut |r| rows.push(*r),
);
assert_eq!(&rows[..], EXPECTED); assert_eq!(&rows[..], EXPECTED);
// Go through it again. This time, hit the max number of signals, forcing START to be // Go through it again. This time, hit the max number of signals, forcing START to be
@ -815,9 +889,11 @@ mod tests {
signal: 2, signal: 2,
state: 0, state: 0,
}, },
]; ];
s.list_changes_by_time(recording::Time::min_value() .. recording::Time::max_value(), s.list_changes_by_time(
&mut |r| rows.push(*r)); recording::Time::min_value()..recording::Time::max_value(),
&mut |r| rows.push(*r),
);
assert_eq!(&rows[..], EXPECTED2); assert_eq!(&rows[..], EXPECTED2);
{ {
@ -828,8 +904,10 @@ mod tests {
drop(s); drop(s);
let s = State::init(&conn).unwrap(); let s = State::init(&conn).unwrap();
rows.clear(); rows.clear();
s.list_changes_by_time(recording::Time::min_value() .. recording::Time::max_value(), s.list_changes_by_time(
&mut |r| rows.push(*r)); recording::Time::min_value()..recording::Time::max_value(),
&mut |r| rows.push(*r),
);
assert_eq!(&rows[..], EXPECTED2); assert_eq!(&rows[..], EXPECTED2);
} }
} }

View File

@ -28,9 +28,10 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::clock::Clocks;
use crate::db; use crate::db;
use crate::dir; use crate::dir;
use crate::writer;
use base::clock::Clocks;
use fnv::FnvHashMap; use fnv::FnvHashMap;
use mylog; use mylog;
use rusqlite; use rusqlite;
@ -40,7 +41,6 @@ use std::thread;
use tempdir::TempDir; use tempdir::TempDir;
use time; use time;
use uuid::Uuid; use uuid::Uuid;
use crate::writer;
static INIT: parking_lot::Once = parking_lot::Once::new(); static INIT: parking_lot::Once = parking_lot::Once::new();
@ -101,29 +101,39 @@ impl<C: Clocks + Clone> TestDb<C> {
{ {
let mut l = db.lock(); let mut l = db.lock();
sample_file_dir_id = l.add_sample_file_dir(path.to_owned()).unwrap(); sample_file_dir_id = l.add_sample_file_dir(path.to_owned()).unwrap();
assert_eq!(TEST_CAMERA_ID, l.add_camera(db::CameraChange { assert_eq!(
short_name: "test camera".to_owned(), TEST_CAMERA_ID,
description: "".to_owned(), l.add_camera(db::CameraChange {
onvif_host: "test-camera".to_owned(), short_name: "test camera".to_owned(),
username: "foo".to_owned(), description: "".to_owned(),
password: "bar".to_owned(), onvif_host: "test-camera".to_owned(),
streams: [ username: "foo".to_owned(),
db::StreamChange { password: "bar".to_owned(),
sample_file_dir_id: Some(sample_file_dir_id), streams: [
rtsp_url: "rtsp://test-camera/main".to_owned(), db::StreamChange {
record: true, sample_file_dir_id: Some(sample_file_dir_id),
flush_if_sec, rtsp_url: "rtsp://test-camera/main".to_owned(),
}, record: true,
Default::default(), flush_if_sec,
], },
}).unwrap()); Default::default(),
],
})
.unwrap()
);
test_camera_uuid = l.cameras_by_id().get(&TEST_CAMERA_ID).unwrap().uuid; test_camera_uuid = l.cameras_by_id().get(&TEST_CAMERA_ID).unwrap().uuid;
l.update_retention(&[db::RetentionChange { l.update_retention(&[db::RetentionChange {
stream_id: TEST_STREAM_ID, stream_id: TEST_STREAM_ID,
new_record: true, new_record: true,
new_limit: 1048576, new_limit: 1048576,
}]).unwrap(); }])
dir = l.sample_file_dirs_by_id().get(&sample_file_dir_id).unwrap().get().unwrap(); .unwrap();
dir = l
.sample_file_dirs_by_id()
.get(&sample_file_dir_id)
.unwrap()
.get()
.unwrap();
} }
let mut dirs_by_stream_id = FnvHashMap::default(); let mut dirs_by_stream_id = FnvHashMap::default();
dirs_by_stream_id.insert(TEST_STREAM_ID, dir.clone()); dirs_by_stream_id.insert(TEST_STREAM_ID, dir.clone());
@ -142,48 +152,63 @@ impl<C: Clocks + Clone> TestDb<C> {
/// Creates a recording with a fresh `RecordingToInsert` row which has been touched only by /// Creates a recording with a fresh `RecordingToInsert` row which has been touched only by
/// a `SampleIndexEncoder`. Fills in a video sample entry id and such to make it valid. /// a `SampleIndexEncoder`. Fills in a video sample entry id and such to make it valid.
/// There will no backing sample file, so it won't be possible to generate a full `.mp4`. /// There will no backing sample file, so it won't be possible to generate a full `.mp4`.
pub fn insert_recording_from_encoder(&self, r: db::RecordingToInsert) pub fn insert_recording_from_encoder(&self, r: db::RecordingToInsert) -> db::ListRecordingsRow {
-> db::ListRecordingsRow {
use crate::recording::{self, TIME_UNITS_PER_SEC}; use crate::recording::{self, TIME_UNITS_PER_SEC};
let mut db = self.db.lock(); let mut db = self.db.lock();
let video_sample_entry_id = db.insert_video_sample_entry(db::VideoSampleEntryToInsert { let video_sample_entry_id = db
.insert_video_sample_entry(db::VideoSampleEntryToInsert {
width: 1920,
height: 1080,
pasp_h_spacing: 1,
pasp_v_spacing: 1,
data: [0u8; 100].to_vec(),
rfc6381_codec: "avc1.000000".to_owned(),
})
.unwrap();
let (id, _) = db
.add_recording(
TEST_STREAM_ID,
db::RecordingToInsert {
start: recording::Time(1430006400i64 * TIME_UNITS_PER_SEC),
video_sample_entry_id,
wall_duration_90k: r.media_duration_90k,
..r
},
)
.unwrap();
db.mark_synced(id).unwrap();
db.flush("create_recording_from_encoder").unwrap();
let mut row = None;
db.list_recordings_by_id(
TEST_STREAM_ID,
id.recording()..id.recording() + 1,
&mut |r| {
row = Some(r);
Ok(())
},
)
.unwrap();
row.unwrap()
}
}
// For benchmarking
#[cfg(feature = "nightly")]
pub fn add_dummy_recordings_to_db(db: &db::Database, num: usize) {
use crate::recording::{self, TIME_UNITS_PER_SEC};
let mut data = Vec::new();
data.extend_from_slice(include_bytes!("testdata/video_sample_index.bin"));
let mut db = db.lock();
let video_sample_entry_id = db
.insert_video_sample_entry(db::VideoSampleEntryToInsert {
width: 1920, width: 1920,
height: 1080, height: 1080,
pasp_h_spacing: 1, pasp_h_spacing: 1,
pasp_v_spacing: 1, pasp_v_spacing: 1,
data: [0u8; 100].to_vec(), data: [0u8; 100].to_vec(),
rfc6381_codec: "avc1.000000".to_owned(), rfc6381_codec: "avc1.000000".to_owned(),
}).unwrap(); })
let (id, _) = db.add_recording(TEST_STREAM_ID, db::RecordingToInsert { .unwrap();
start: recording::Time(1430006400i64 * TIME_UNITS_PER_SEC),
video_sample_entry_id,
wall_duration_90k: r.media_duration_90k,
..r
}).unwrap();
db.mark_synced(id).unwrap();
db.flush("create_recording_from_encoder").unwrap();
let mut row = None;
db.list_recordings_by_id(TEST_STREAM_ID, id.recording() .. id.recording()+1,
&mut |r| { row = Some(r); Ok(()) }).unwrap();
row.unwrap()
}
}
// For benchmarking
#[cfg(feature="nightly")]
pub fn add_dummy_recordings_to_db(db: &db::Database, num: usize) {
use crate::recording::{self, TIME_UNITS_PER_SEC};
let mut data = Vec::new();
data.extend_from_slice(include_bytes!("testdata/video_sample_index.bin"));
let mut db = db.lock();
let video_sample_entry_id = db.insert_video_sample_entry(db::VideoSampleEntryToInsert {
width: 1920,
height: 1080,
pasp_h_spacing: 1,
pasp_v_spacing: 1,
data: [0u8; 100].to_vec(),
rfc6381_codec: "avc1.000000".to_owned(),
}).unwrap();
let mut recording = db::RecordingToInsert { let mut recording = db::RecordingToInsert {
sample_file_bytes: 30104460, sample_file_bytes: 30104460,
start: recording::Time(1430006400i64 * TIME_UNITS_PER_SEC), start: recording::Time(1430006400i64 * TIME_UNITS_PER_SEC),

View File

@ -31,14 +31,13 @@
/// Upgrades the database schema. /// Upgrades the database schema.
/// ///
/// See `guide/schema.md` for more information. /// See `guide/schema.md` for more information.
use crate::db; use crate::db;
use failure::{Error, bail}; use failure::{bail, Error};
use log::info; use log::info;
use std::ffi::CStr;
use std::io::Write;
use nix::NixPath; use nix::NixPath;
use rusqlite::params; use rusqlite::params;
use std::ffi::CStr;
use std::io::Write;
use uuid::Uuid; use uuid::Uuid;
mod v0_to_v1; mod v0_to_v1;
@ -59,10 +58,16 @@ pub struct Args<'a> {
} }
fn set_journal_mode(conn: &rusqlite::Connection, requested: &str) -> Result<(), Error> { fn set_journal_mode(conn: &rusqlite::Connection, requested: &str) -> Result<(), Error> {
assert!(!requested.contains(';')); // quick check for accidental sql injection. assert!(!requested.contains(';')); // quick check for accidental sql injection.
let actual = conn.query_row(&format!("pragma journal_mode = {}", requested), params![], let actual = conn.query_row(
|row| row.get::<_, String>(0))?; &format!("pragma journal_mode = {}", requested),
info!("...database now in journal_mode {} (requested {}).", actual, requested); params![],
|row| row.get::<_, String>(0),
)?;
info!(
"...database now in journal_mode {} (requested {}).",
actual, requested
);
Ok(()) Ok(())
} }
@ -78,24 +83,31 @@ fn upgrade(args: &Args, target_ver: i32, conn: &mut rusqlite::Connection) -> Res
{ {
assert_eq!(upgraders.len(), db::EXPECTED_VERSION as usize); assert_eq!(upgraders.len(), db::EXPECTED_VERSION as usize);
let old_ver = let old_ver = conn.query_row("select max(id) from version", params![], |row| row.get(0))?;
conn.query_row("select max(id) from version", params![],
|row| row.get(0))?;
if old_ver > db::EXPECTED_VERSION { if old_ver > db::EXPECTED_VERSION {
bail!("Database is at version {}, later than expected {}", bail!(
old_ver, db::EXPECTED_VERSION); "Database is at version {}, later than expected {}",
old_ver,
db::EXPECTED_VERSION
);
} else if old_ver < 0 { } else if old_ver < 0 {
bail!("Database is at negative version {}!", old_ver); bail!("Database is at negative version {}!", old_ver);
} }
info!("Upgrading database from version {} to version {}...", old_ver, target_ver); info!(
for ver in old_ver .. target_ver { "Upgrading database from version {} to version {}...",
old_ver, target_ver
);
for ver in old_ver..target_ver {
info!("...from version {} to version {}", ver, ver + 1); info!("...from version {} to version {}", ver, ver + 1);
let tx = conn.transaction()?; let tx = conn.transaction()?;
upgraders[ver as usize](&args, &tx)?; upgraders[ver as usize](&args, &tx)?;
tx.execute(r#" tx.execute(
r#"
insert into version (id, unix_time, notes) insert into version (id, unix_time, notes)
values (?, cast(strftime('%s', 'now') as int32), ?) values (?, cast(strftime('%s', 'now') as int32), ?)
"#, params![ver + 1, UPGRADE_NOTES])?; "#,
params![ver + 1, UPGRADE_NOTES],
)?;
tx.commit()?; tx.commit()?;
} }
} }
@ -117,10 +129,12 @@ pub fn run(args: &Args, conn: &mut rusqlite::Connection) -> Result<(), Error> {
// non-WAL mode. https://www.sqlite.org/wal.html // non-WAL mode. https://www.sqlite.org/wal.html
if !args.no_vacuum { if !args.no_vacuum {
info!("...vacuuming database after upgrade."); info!("...vacuuming database after upgrade.");
conn.execute_batch(r#" conn.execute_batch(
r#"
pragma page_size = 16384; pragma page_size = 16384;
vacuum; vacuum;
"#)?; "#,
)?;
} }
set_journal_mode(&conn, "wal")?; set_journal_mode(&conn, "wal")?;
@ -142,11 +156,17 @@ impl UuidPath {
} }
impl NixPath for UuidPath { impl NixPath for UuidPath {
fn is_empty(&self) -> bool { false } fn is_empty(&self) -> bool {
fn len(&self) -> usize { 36 } false
}
fn len(&self) -> usize {
36
}
fn with_nix_path<T, F>(&self, f: F) -> Result<T, nix::Error> fn with_nix_path<T, F>(&self, f: F) -> Result<T, nix::Error>
where F: FnOnce(&CStr) -> T { where
F: FnOnce(&CStr) -> T,
{
let p = CStr::from_bytes_with_nul(&self.0[..]).expect("no interior nuls"); let p = CStr::from_bytes_with_nul(&self.0[..]).expect("no interior nuls");
Ok(f(p)) Ok(f(p))
} }
@ -154,14 +174,13 @@ impl NixPath for UuidPath {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use crate::compare; use crate::compare;
use crate::testutil; use crate::testutil;
use failure::ResultExt; use failure::ResultExt;
use fnv::FnvHashMap; use fnv::FnvHashMap;
use super::*;
const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\
b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\
\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x01\x40\x00\xf0\x00\x48\x00\x00\x00\x48\ \x00\x00\x00\x00\x00\x00\x01\x40\x00\xf0\x00\x48\x00\x00\x00\x48\
\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\
@ -209,29 +228,44 @@ mod tests {
//let path = tmpdir.path().to_str().ok_or_else(|| format_err!("invalid UTF-8"))?.to_owned(); //let path = tmpdir.path().to_str().ok_or_else(|| format_err!("invalid UTF-8"))?.to_owned();
let mut upgraded = new_conn()?; let mut upgraded = new_conn()?;
upgraded.execute_batch(include_str!("v0.sql"))?; upgraded.execute_batch(include_str!("v0.sql"))?;
upgraded.execute_batch(r#" upgraded.execute_batch(
r#"
insert into camera (id, uuid, short_name, description, host, username, password, insert into camera (id, uuid, short_name, description, host, username, password,
main_rtsp_path, sub_rtsp_path, retain_bytes) main_rtsp_path, sub_rtsp_path, retain_bytes)
values (1, zeroblob(16), 'test camera', 'desc', 'host', 'user', 'pass', values (1, zeroblob(16), 'test camera', 'desc', 'host', 'user', 'pass',
'main', 'sub', 42); 'main', 'sub', 42);
"#)?; "#,
upgraded.execute(r#" )?;
upgraded.execute(
r#"
insert into video_sample_entry (id, sha1, width, height, data) insert into video_sample_entry (id, sha1, width, height, data)
values (1, X'0000000000000000000000000000000000000000', 1920, 1080, ?); values (1, X'0000000000000000000000000000000000000000', 1920, 1080, ?);
"#, params![testutil::TEST_VIDEO_SAMPLE_ENTRY_DATA])?; "#,
upgraded.execute(r#" params![testutil::TEST_VIDEO_SAMPLE_ENTRY_DATA],
)?;
upgraded.execute(
r#"
insert into video_sample_entry (id, sha1, width, height, data) insert into video_sample_entry (id, sha1, width, height, data)
values (2, X'0000000000000000000000000000000000000001', 320, 240, ?); values (2, X'0000000000000000000000000000000000000001', 320, 240, ?);
"#, params![BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?; "#,
upgraded.execute(r#" params![BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY],
)?;
upgraded.execute(
r#"
insert into video_sample_entry (id, sha1, width, height, data) insert into video_sample_entry (id, sha1, width, height, data)
values (3, X'0000000000000000000000000000000000000002', 704, 480, ?); values (3, X'0000000000000000000000000000000000000002', 704, 480, ?);
"#, params![GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?; "#,
upgraded.execute(r#" params![GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY],
)?;
upgraded.execute(
r#"
insert into video_sample_entry (id, sha1, width, height, data) insert into video_sample_entry (id, sha1, width, height, data)
values (4, X'0000000000000000000000000000000000000003', 704, 480, ?); values (4, X'0000000000000000000000000000000000000003', 704, 480, ?);
"#, params![GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY])?; "#,
upgraded.execute_batch(r#" params![GOOD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY],
)?;
upgraded.execute_batch(
r#"
insert into recording (id, camera_id, sample_file_bytes, start_time_90k, duration_90k, insert into recording (id, camera_id, sample_file_bytes, start_time_90k, duration_90k,
local_time_delta_90k, video_samples, video_sync_samples, local_time_delta_90k, video_samples, video_sync_samples,
video_sample_entry_id, sample_file_uuid, sample_file_sha1, video_sample_entry_id, sample_file_uuid, sample_file_sha1,
@ -244,7 +278,8 @@ mod tests {
X'C94D4D0B533746059CD40B29039E641E', zeroblob(20), X'00'); X'C94D4D0B533746059CD40B29039E641E', zeroblob(20), X'00');
insert into reserved_sample_files values (X'51EF700C933E4197AAE4EE8161E94221', 0), insert into reserved_sample_files values (X'51EF700C933E4197AAE4EE8161E94221', 0),
(X'E69D45E8CBA64DC1BA2ECB1585983A10', 1); (X'E69D45E8CBA64DC1BA2ECB1585983A10', 1);
"#)?; "#,
)?;
let rec1 = tmpdir.path().join("e69d45e8-cba6-4dc1-ba2e-cb1585983a10"); let rec1 = tmpdir.path().join("e69d45e8-cba6-4dc1-ba2e-cb1585983a10");
let rec2 = tmpdir.path().join("94de8484-ff87-4a52-95d4-88c8038a0312"); let rec2 = tmpdir.path().join("94de8484-ff87-4a52-95d4-88c8038a0312");
let rec3 = tmpdir.path().join("c94d4d0b-5337-4605-9cd4-0b29039e641e"); let rec3 = tmpdir.path().join("c94d4d0b-5337-4605-9cd4-0b29039e641e");
@ -254,17 +289,24 @@ mod tests {
std::fs::File::create(&rec3)?; std::fs::File::create(&rec3)?;
std::fs::File::create(&garbage)?; std::fs::File::create(&garbage)?;
for (ver, fresh_sql) in &[(1, Some(include_str!("v1.sql"))), for (ver, fresh_sql) in &[
(2, None), // transitional; don't compare schemas. (1, Some(include_str!("v1.sql"))),
(3, Some(include_str!("v3.sql"))), (2, None), // transitional; don't compare schemas.
(4, None), // transitional; don't compare schemas. (3, Some(include_str!("v3.sql"))),
(5, Some(include_str!("v5.sql"))), (4, None), // transitional; don't compare schemas.
(6, Some(include_str!("../schema.sql")))] { (5, Some(include_str!("v5.sql"))),
upgrade(&Args { (6, Some(include_str!("../schema.sql"))),
sample_file_dir: Some(&tmpdir.path()), ] {
preset_journal: "delete", upgrade(
no_vacuum: false, &Args {
}, *ver, &mut upgraded).context(format!("upgrading to version {}", ver))?; sample_file_dir: Some(&tmpdir.path()),
preset_journal: "delete",
no_vacuum: false,
},
*ver,
&mut upgraded,
)
.context(format!("upgrading to version {}", ver))?;
if let Some(f) = fresh_sql { if let Some(f) = fresh_sql {
compare(&upgraded, *ver, f)?; compare(&upgraded, *ver, f)?;
} }
@ -277,14 +319,16 @@ mod tests {
} }
if *ver == 6 { if *ver == 6 {
// Check that the pasp was set properly. // Check that the pasp was set properly.
let mut stmt = upgraded.prepare(r#" let mut stmt = upgraded.prepare(
r#"
select select
id, id,
pasp_h_spacing, pasp_h_spacing,
pasp_v_spacing pasp_v_spacing
from from
video_sample_entry video_sample_entry
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
let mut pasp_by_id = FnvHashMap::default(); let mut pasp_by_id = FnvHashMap::default();
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {

View File

@ -29,7 +29,6 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
/// Upgrades a version 0 schema to a version 1 schema. /// Upgrades a version 0 schema to a version 1 schema.
use crate::db; use crate::db;
use crate::recording; use crate::recording;
use failure::Error; use failure::Error;
@ -39,7 +38,8 @@ use std::collections::HashMap;
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
// These create statements match the schema.sql when version 1 was the latest. // These create statements match the schema.sql when version 1 was the latest.
tx.execute_batch(r#" tx.execute_batch(
r#"
alter table camera rename to old_camera; alter table camera rename to old_camera;
create table camera ( create table camera (
id integer primary key, id integer primary key,
@ -103,13 +103,16 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
1 as next_recording_id 1 as next_recording_id
from from
old_camera; old_camera;
"#)?; "#,
)?;
let camera_state = fill_recording(tx)?; let camera_state = fill_recording(tx)?;
update_camera(tx, camera_state)?; update_camera(tx, camera_state)?;
tx.execute_batch(r#" tx.execute_batch(
r#"
drop table old_recording; drop table old_recording;
drop table old_camera; drop table old_camera;
"#)?; "#,
)?;
Ok(()) Ok(())
} }
@ -130,7 +133,8 @@ fn has_trailing_zero(video_index: &[u8]) -> Result<bool, Error> {
/// Fills the `recording` and `recording_playback` tables from `old_recording`, returning /// Fills the `recording` and `recording_playback` tables from `old_recording`, returning
/// the `camera_state` map for use by a following call to `fill_cameras`. /// the `camera_state` map for use by a following call to `fill_cameras`.
fn fill_recording(tx: &rusqlite::Transaction) -> Result<HashMap<i32, CameraState>, Error> { fn fill_recording(tx: &rusqlite::Transaction) -> Result<HashMap<i32, CameraState>, Error> {
let mut select = tx.prepare(r#" let mut select = tx.prepare(
r#"
select select
camera_id, camera_id,
sample_file_bytes, sample_file_bytes,
@ -146,27 +150,32 @@ fn fill_recording(tx: &rusqlite::Transaction) -> Result<HashMap<i32, CameraState
id id
from from
old_recording old_recording
"#)?; "#,
let mut insert1 = tx.prepare(r#" )?;
let mut insert1 = tx.prepare(
r#"
insert into recording values (:composite_id, :camera_id, :run_offset, :flags, insert into recording values (:composite_id, :camera_id, :run_offset, :flags,
:sample_file_bytes, :start_time_90k, :duration_90k, :sample_file_bytes, :start_time_90k, :duration_90k,
:local_time_delta_90k, :video_samples, :video_sync_samples, :local_time_delta_90k, :video_samples, :video_sync_samples,
:video_sample_entry_id) :video_sample_entry_id)
"#)?; "#,
let mut insert2 = tx.prepare(r#" )?;
let mut insert2 = tx.prepare(
r#"
insert into recording_playback values (:composite_id, :sample_file_uuid, :sample_file_sha1, insert into recording_playback values (:composite_id, :sample_file_uuid, :sample_file_sha1,
:video_index) :video_index)
"#)?; "#,
)?;
let mut rows = select.query(params![])?; let mut rows = select.query(params![])?;
let mut camera_state: HashMap<i32, CameraState> = HashMap::new(); let mut camera_state: HashMap<i32, CameraState> = HashMap::new();
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let camera_id: i32 = row.get(0)?; let camera_id: i32 = row.get(0)?;
let camera_state = camera_state.entry(camera_id).or_insert_with(|| { let camera_state = camera_state
CameraState{ .entry(camera_id)
.or_insert_with(|| CameraState {
current_run: None, current_run: None,
next_recording_id: 1, next_recording_id: 1,
} });
});
let composite_id = ((camera_id as i64) << 32) | (camera_state.next_recording_id as i64); let composite_id = ((camera_id as i64) << 32) | (camera_state.next_recording_id as i64);
camera_state.next_recording_id += 1; camera_state.next_recording_id += 1;
let sample_file_bytes: i32 = row.get(1)?; let sample_file_bytes: i32 = row.get(1)?;
@ -181,9 +190,15 @@ fn fill_recording(tx: &rusqlite::Transaction) -> Result<HashMap<i32, CameraState
let video_index: Vec<u8> = row.get(10)?; let video_index: Vec<u8> = row.get(10)?;
let old_id: i32 = row.get(11)?; let old_id: i32 = row.get(11)?;
let trailing_zero = has_trailing_zero(&video_index).unwrap_or_else(|e| { let trailing_zero = has_trailing_zero(&video_index).unwrap_or_else(|e| {
warn!("recording {}/{} (sample file {}, formerly recording {}) has corrupt \ warn!(
video_index: {}", "recording {}/{} (sample file {}, formerly recording {}) has corrupt \
camera_id, composite_id & 0xFFFF, sample_file_uuid.0, old_id, e); video_index: {}",
camera_id,
composite_id & 0xFFFF,
sample_file_uuid.0,
old_id,
e
);
false false
}); });
let run_id = match camera_state.current_run { let run_id = match camera_state.current_run {
@ -194,7 +209,14 @@ fn fill_recording(tx: &rusqlite::Transaction) -> Result<HashMap<i32, CameraState
(":composite_id", &composite_id), (":composite_id", &composite_id),
(":camera_id", &camera_id), (":camera_id", &camera_id),
(":run_offset", &(composite_id - run_id)), (":run_offset", &(composite_id - run_id)),
(":flags", &(if trailing_zero { db::RecordingFlags::TrailingZero as i32 } else { 0 })), (
":flags",
&(if trailing_zero {
db::RecordingFlags::TrailingZero as i32
} else {
0
}),
),
(":sample_file_bytes", &sample_file_bytes), (":sample_file_bytes", &sample_file_bytes),
(":start_time_90k", &start_time_90k), (":start_time_90k", &start_time_90k),
(":duration_90k", &duration_90k), (":duration_90k", &duration_90k),
@ -218,11 +240,15 @@ fn fill_recording(tx: &rusqlite::Transaction) -> Result<HashMap<i32, CameraState
Ok(camera_state) Ok(camera_state)
} }
fn update_camera(tx: &rusqlite::Transaction, camera_state: HashMap<i32, CameraState>) fn update_camera(
-> Result<(), Error> { tx: &rusqlite::Transaction,
let mut stmt = tx.prepare(r#" camera_state: HashMap<i32, CameraState>,
) -> Result<(), Error> {
let mut stmt = tx.prepare(
r#"
update camera set next_recording_id = :next_recording_id where id = :id update camera set next_recording_id = :next_recording_id where id = :id
"#)?; "#,
)?;
for (ref id, ref state) in &camera_state { for (ref id, ref state) in &camera_state {
stmt.execute_named(&[ stmt.execute_named(&[
(":id", &id), (":id", &id),

View File

@ -29,29 +29,31 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
/// Upgrades a version 1 schema to a version 2 schema. /// Upgrades a version 1 schema to a version 2 schema.
use crate::dir; use crate::dir;
use failure::{Error, bail, format_err}; use crate::schema::DirMeta;
use failure::{bail, format_err, Error};
use nix::fcntl::{FlockArg, OFlag}; use nix::fcntl::{FlockArg, OFlag};
use nix::sys::stat::Mode; use nix::sys::stat::Mode;
use rusqlite::params; use rusqlite::params;
use crate::schema::DirMeta;
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
use uuid::Uuid; use uuid::Uuid;
pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
let sample_file_path = let sample_file_path = args.sample_file_dir.ok_or_else(|| {
args.sample_file_dir format_err!("--sample-file-dir required when upgrading from schema version 1 to 2.")
.ok_or_else(|| format_err!("--sample-file-dir required when upgrading from \ })?;
schema version 1 to 2."))?;
let mut d = nix::dir::Dir::open(sample_file_path, OFlag::O_DIRECTORY | OFlag::O_RDONLY, let mut d = nix::dir::Dir::open(
Mode::empty())?; sample_file_path,
OFlag::O_DIRECTORY | OFlag::O_RDONLY,
Mode::empty(),
)?;
nix::fcntl::flock(d.as_raw_fd(), FlockArg::LockExclusiveNonblock)?; nix::fcntl::flock(d.as_raw_fd(), FlockArg::LockExclusiveNonblock)?;
verify_dir_contents(sample_file_path, &mut d, tx)?; verify_dir_contents(sample_file_path, &mut d, tx)?;
// These create statements match the schema.sql when version 2 was the latest. // These create statements match the schema.sql when version 2 was the latest.
tx.execute_batch(r#" tx.execute_batch(
r#"
create table meta ( create table meta (
uuid blob not null check (length(uuid) = 16) uuid blob not null check (length(uuid) = 16)
); );
@ -99,13 +101,17 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
use_count not null default 0 use_count not null default 0
) without rowid; ) without rowid;
create index user_session_uid on user_session (user_id); create index user_session_uid on user_session (user_id);
"#)?; "#,
)?;
let db_uuid = ::uuid::Uuid::new_v4(); let db_uuid = ::uuid::Uuid::new_v4();
let db_uuid_bytes = &db_uuid.as_bytes()[..]; let db_uuid_bytes = &db_uuid.as_bytes()[..];
tx.execute("insert into meta (uuid) values (?)", params![db_uuid_bytes])?; tx.execute("insert into meta (uuid) values (?)", params![db_uuid_bytes])?;
let open_uuid = ::uuid::Uuid::new_v4(); let open_uuid = ::uuid::Uuid::new_v4();
let open_uuid_bytes = &open_uuid.as_bytes()[..]; let open_uuid_bytes = &open_uuid.as_bytes()[..];
tx.execute("insert into open (uuid) values (?)", params![open_uuid_bytes])?; tx.execute(
"insert into open (uuid) values (?)",
params![open_uuid_bytes],
)?;
let open_id = tx.last_insert_rowid() as u32; let open_id = tx.last_insert_rowid() as u32;
let dir_uuid = ::uuid::Uuid::new_v4(); let dir_uuid = ::uuid::Uuid::new_v4();
let dir_uuid_bytes = &dir_uuid.as_bytes()[..]; let dir_uuid_bytes = &dir_uuid.as_bytes()[..];
@ -121,15 +127,22 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
} }
dir::write_meta(d.as_raw_fd(), &meta)?; dir::write_meta(d.as_raw_fd(), &meta)?;
let sample_file_path = sample_file_path.to_str() let sample_file_path = sample_file_path.to_str().ok_or_else(|| {
.ok_or_else(|| format_err!("sample file dir {} is not a valid string", format_err!(
sample_file_path.display()))?; "sample file dir {} is not a valid string",
tx.execute(r#" sample_file_path.display()
)
})?;
tx.execute(
r#"
insert into sample_file_dir (path, uuid, last_complete_open_id) insert into sample_file_dir (path, uuid, last_complete_open_id)
values (?, ?, ?) values (?, ?, ?)
"#, params![sample_file_path, dir_uuid_bytes, open_id])?; "#,
params![sample_file_path, dir_uuid_bytes, open_id],
)?;
tx.execute_batch(r#" tx.execute_batch(
r#"
drop table reserved_sample_files; drop table reserved_sample_files;
alter table camera rename to old_camera; alter table camera rename to old_camera;
alter table recording rename to old_recording; alter table recording rename to old_recording;
@ -253,12 +266,14 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
old_camera cross join sample_file_dir old_camera cross join sample_file_dir
where where
old_camera.sub_rtsp_path != ''; old_camera.sub_rtsp_path != '';
"#)?; "#,
)?;
// Add the new video_sample_entry rows, before inserting the recordings referencing them. // Add the new video_sample_entry rows, before inserting the recordings referencing them.
fix_video_sample_entry(tx)?; fix_video_sample_entry(tx)?;
tx.execute_batch(r#" tx.execute_batch(
r#"
insert into recording insert into recording
select select
r.composite_id, r.composite_id,
@ -282,7 +297,8 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
p.sample_file_sha1 p.sample_file_sha1
from from
old_recording r join recording_playback p on (r.composite_id = p.composite_id); old_recording r join recording_playback p on (r.composite_id = p.composite_id);
"#)?; "#,
)?;
Ok(()) Ok(())
} }
@ -295,16 +311,23 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
/// * optional: reserved sample file uuids. /// * optional: reserved sample file uuids.
/// * optional: meta and meta-tmp from half-completed update attempts. /// * optional: meta and meta-tmp from half-completed update attempts.
/// * forbidden: anything else. /// * forbidden: anything else.
fn verify_dir_contents(sample_file_path: &std::path::Path, dir: &mut nix::dir::Dir, fn verify_dir_contents(
tx: &rusqlite::Transaction) -> Result<(), Error> { sample_file_path: &std::path::Path,
dir: &mut nix::dir::Dir,
tx: &rusqlite::Transaction,
) -> Result<(), Error> {
// Build a hash of the uuids found in the directory. // Build a hash of the uuids found in the directory.
let n: i64 = tx.query_row(r#" let n: i64 = tx.query_row(
r#"
select select
a.c + b.c a.c + b.c
from from
(select count(*) as c from recording) a, (select count(*) as c from recording) a,
(select count(*) as c from reserved_sample_files) b; (select count(*) as c from reserved_sample_files) b;
"#, params![], |r| r.get(0))?; "#,
params![],
|r| r.get(0),
)?;
let mut files = ::fnv::FnvHashSet::with_capacity_and_hasher(n as usize, Default::default()); let mut files = ::fnv::FnvHashSet::with_capacity_and_hasher(n as usize, Default::default());
for e in dir.iter() { for e in dir.iter() {
let e = e?; let e = e?;
@ -315,8 +338,8 @@ fn verify_dir_contents(sample_file_path: &std::path::Path, dir: &mut nix::dir::D
// Ignore metadata files. These might from a half-finished update attempt. // Ignore metadata files. These might from a half-finished update attempt.
// If the directory is actually an in-use >v3 format, other contents won't match. // If the directory is actually an in-use >v3 format, other contents won't match.
continue; continue;
}, }
_ => {}, _ => {}
}; };
let s = match f.to_str() { let s = match f.to_str() {
Ok(s) => s, Ok(s) => s,
@ -326,7 +349,8 @@ fn verify_dir_contents(sample_file_path: &std::path::Path, dir: &mut nix::dir::D
Ok(u) => u, Ok(u) => u,
Err(_) => bail!("unexpected file {:?} in {:?}", f, sample_file_path), Err(_) => bail!("unexpected file {:?} in {:?}", f, sample_file_path),
}; };
if s != uuid.to_hyphenated_ref().to_string() { // non-canonical form. if s != uuid.to_hyphenated_ref().to_string() {
// non-canonical form.
bail!("unexpected file {:?} in {:?}", f, sample_file_path); bail!("unexpected file {:?} in {:?}", f, sample_file_path);
} }
files.insert(uuid); files.insert(uuid);
@ -339,7 +363,11 @@ fn verify_dir_contents(sample_file_path: &std::path::Path, dir: &mut nix::dir::D
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let uuid: crate::db::FromSqlUuid = row.get(0)?; let uuid: crate::db::FromSqlUuid = row.get(0)?;
if !files.remove(&uuid.0) { if !files.remove(&uuid.0) {
bail!("{} is missing from dir {}!", uuid.0, sample_file_path.display()); bail!(
"{} is missing from dir {}!",
uuid.0,
sample_file_path.display()
);
} }
} }
} }
@ -355,20 +383,28 @@ fn verify_dir_contents(sample_file_path: &std::path::Path, dir: &mut nix::dir::D
// a garbage file so if the upgrade transation fails this is still a valid and complete // a garbage file so if the upgrade transation fails this is still a valid and complete
// version 1 database. // version 1 database.
let p = super::UuidPath::from(uuid.0); let p = super::UuidPath::from(uuid.0);
nix::unistd::unlinkat(Some(dir.as_raw_fd()), &p, nix::unistd::unlinkat(
nix::unistd::UnlinkatFlags::NoRemoveDir)?; Some(dir.as_raw_fd()),
&p,
nix::unistd::UnlinkatFlags::NoRemoveDir,
)?;
} }
} }
if !files.is_empty() { if !files.is_empty() {
bail!("{} unexpected sample file uuids in dir {}: {:?}!", bail!(
files.len(), sample_file_path.display(), files); "{} unexpected sample file uuids in dir {}: {:?}!",
files.len(),
sample_file_path.display(),
files
);
} }
Ok(()) Ok(())
} }
fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> { fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> {
let mut select = tx.prepare(r#" let mut select = tx.prepare(
r#"
select select
id, id,
sha1, sha1,
@ -377,10 +413,13 @@ fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> {
data data
from from
old_video_sample_entry old_video_sample_entry
"#)?; "#,
let mut insert = tx.prepare(r#" )?;
let mut insert = tx.prepare(
r#"
insert into video_sample_entry values (:id, :sha1, :width, :height, :rfc6381_codec, :data) insert into video_sample_entry values (:id, :sha1, :width, :height, :rfc6381_codec, :data)
"#)?; "#,
)?;
let mut rows = select.query(params![])?; let mut rows = select.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let data: Vec<u8> = row.get(4)?; let data: Vec<u8> = row.get(4)?;
@ -398,12 +437,15 @@ fn fix_video_sample_entry(tx: &rusqlite::Transaction) -> Result<(), Error> {
// This previously lived in h264.rs. As of version 1, H.264 is the only supported codec. // This previously lived in h264.rs. As of version 1, H.264 is the only supported codec.
fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> { fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error> {
if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" || if sample_entry.len() < 99 || &sample_entry[4..8] != b"avc1" || &sample_entry[90..94] != b"avcC"
&sample_entry[90..94] != b"avcC" { {
bail!("not a valid AVCSampleEntry"); bail!("not a valid AVCSampleEntry");
} }
let profile_idc = sample_entry[103]; let profile_idc = sample_entry[103];
let constraint_flags_byte = sample_entry[104]; let constraint_flags_byte = sample_entry[104];
let level_idc = sample_entry[105]; let level_idc = sample_entry[105];
Ok(format!("avc1.{:02x}{:02x}{:02x}", profile_idc, constraint_flags_byte, level_idc)) Ok(format!(
"avc1.{:02x}{:02x}{:02x}",
profile_idc, constraint_flags_byte, level_idc
))
} }

View File

@ -31,11 +31,10 @@
/// Upgrades a version 2 schema to a version 3 schema. /// Upgrades a version 2 schema to a version 3 schema.
/// Note that a version 2 schema is never actually used; so we know the upgrade from version 1 was /// Note that a version 2 schema is never actually used; so we know the upgrade from version 1 was
/// completed, and possibly an upgrade from 2 to 3 is half-finished. /// completed, and possibly an upgrade from 2 to 3 is half-finished.
use crate::db::{self, FromSqlUuid}; use crate::db::{self, FromSqlUuid};
use crate::dir; use crate::dir;
use failure::Error;
use crate::schema; use crate::schema;
use failure::Error;
use rusqlite::params; use rusqlite::params;
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
use std::sync::Arc; use std::sync::Arc;
@ -47,20 +46,26 @@ use std::sync::Arc;
/// * it has a last completed open. /// * it has a last completed open.
fn open_sample_file_dir(tx: &rusqlite::Transaction) -> Result<Arc<dir::SampleFileDir>, Error> { fn open_sample_file_dir(tx: &rusqlite::Transaction) -> Result<Arc<dir::SampleFileDir>, Error> {
let (p, s_uuid, o_id, o_uuid, db_uuid): (String, FromSqlUuid, i32, FromSqlUuid, FromSqlUuid) = let (p, s_uuid, o_id, o_uuid, db_uuid): (String, FromSqlUuid, i32, FromSqlUuid, FromSqlUuid) =
tx.query_row(r#" tx.query_row(
select r#"
s.path, s.uuid, s.last_complete_open_id, o.uuid, m.uuid select
from s.path, s.uuid, s.last_complete_open_id, o.uuid, m.uuid
sample_file_dir s from
join open o on (s.last_complete_open_id = o.id) sample_file_dir s
cross join meta m join open o on (s.last_complete_open_id = o.id)
"#, params![], |row| { cross join meta m
Ok((row.get(0)?, "#,
row.get(1)?, params![],
row.get(2)?, |row| {
row.get(3)?, Ok((
row.get(4)?)) row.get(0)?,
})?; row.get(1)?,
row.get(2)?,
row.get(3)?,
row.get(4)?,
))
},
)?;
let mut meta = schema::DirMeta::default(); let mut meta = schema::DirMeta::default();
meta.db_uuid.extend_from_slice(&db_uuid.0.as_bytes()[..]); meta.db_uuid.extend_from_slice(&db_uuid.0.as_bytes()[..]);
meta.dir_uuid.extend_from_slice(&s_uuid.0.as_bytes()[..]); meta.dir_uuid.extend_from_slice(&s_uuid.0.as_bytes()[..]);
@ -74,30 +79,37 @@ fn open_sample_file_dir(tx: &rusqlite::Transaction) -> Result<Arc<dir::SampleFil
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
let d = open_sample_file_dir(&tx)?; let d = open_sample_file_dir(&tx)?;
let mut stmt = tx.prepare(r#" let mut stmt = tx.prepare(
r#"
select select
composite_id, composite_id,
sample_file_uuid sample_file_uuid
from from
recording_playback recording_playback
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id = db::CompositeId(row.get(0)?); let id = db::CompositeId(row.get(0)?);
let sample_file_uuid: FromSqlUuid = row.get(1)?; let sample_file_uuid: FromSqlUuid = row.get(1)?;
let from_path = super::UuidPath::from(sample_file_uuid.0); let from_path = super::UuidPath::from(sample_file_uuid.0);
let to_path = crate::dir::CompositeIdPath::from(id); let to_path = crate::dir::CompositeIdPath::from(id);
if let Err(e) = nix::fcntl::renameat(Some(d.fd.as_raw_fd()), &from_path, if let Err(e) = nix::fcntl::renameat(
Some(d.fd.as_raw_fd()), &to_path) { Some(d.fd.as_raw_fd()),
&from_path,
Some(d.fd.as_raw_fd()),
&to_path,
) {
if e == nix::Error::Sys(nix::errno::Errno::ENOENT) { if e == nix::Error::Sys(nix::errno::Errno::ENOENT) {
continue; // assume it was already moved. continue; // assume it was already moved.
} }
Err(e)?; Err(e)?;
} }
} }
// These create statements match the schema.sql when version 3 was the latest. // These create statements match the schema.sql when version 3 was the latest.
tx.execute_batch(r#" tx.execute_batch(
r#"
alter table recording_playback rename to old_recording_playback; alter table recording_playback rename to old_recording_playback;
create table recording_playback ( create table recording_playback (
composite_id integer primary key references recording (composite_id), composite_id integer primary key references recording (composite_id),
@ -113,6 +125,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
drop table old_recording; drop table old_recording;
drop table old_camera; drop table old_camera;
drop table old_video_sample_entry; drop table old_video_sample_entry;
"#)?; "#,
)?;
Ok(()) Ok(())
} }

View File

@ -29,12 +29,12 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
/// Upgrades a version 3 schema to a version 4 schema. /// Upgrades a version 3 schema to a version 4 schema.
use failure::Error; use failure::Error;
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
// These create statements match the schema.sql when version 4 was the latest. // These create statements match the schema.sql when version 4 was the latest.
tx.execute_batch(r#" tx.execute_batch(
r#"
alter table meta add column max_signal_changes integer check (max_signal_changes >= 0); alter table meta add column max_signal_changes integer check (max_signal_changes >= 0);
create table signal ( create table signal (
@ -191,6 +191,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
-- This was supposed to be present in version 2, but the upgrade procedure used to miss it. -- This was supposed to be present in version 2, but the upgrade procedure used to miss it.
-- Catch up so we know a version 4 database is right. -- Catch up so we know a version 4 database is right.
create index if not exists user_session_uid on user_session (user_id); create index if not exists user_session_uid on user_session (user_id);
"#)?; "#,
)?;
Ok(()) Ok(())
} }

View File

@ -32,11 +32,10 @@
/// ///
/// This just handles the directory meta files. If they're already in the new format, great. /// This just handles the directory meta files. If they're already in the new format, great.
/// Otherwise, verify they are consistent with the database then upgrade them. /// Otherwise, verify they are consistent with the database then upgrade them.
use crate::db::FromSqlUuid; use crate::db::FromSqlUuid;
use crate::{dir, schema}; use crate::{dir, schema};
use cstr::cstr; use cstr::cstr;
use failure::{Error, Fail, bail}; use failure::{bail, Error, Fail};
use log::info; use log::info;
use nix::fcntl::{FlockArg, OFlag}; use nix::fcntl::{FlockArg, OFlag};
use nix::sys::stat::Mode; use nix::sys::stat::Mode;
@ -61,25 +60,42 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
let mut s = protobuf::CodedInputStream::from_bytes(&data); let mut s = protobuf::CodedInputStream::from_bytes(&data);
let mut dir_meta = schema::DirMeta::new(); let mut dir_meta = schema::DirMeta::new();
dir_meta.merge_from(&mut s) dir_meta
.merge_from(&mut s)
.map_err(|e| e.context("Unable to parse metadata proto: {}"))?; .map_err(|e| e.context("Unable to parse metadata proto: {}"))?;
if !dir::SampleFileDir::consistent(&db_meta, &dir_meta) { if !dir::SampleFileDir::consistent(&db_meta, &dir_meta) {
bail!("Inconsistent db_meta={:?} dir_meta={:?}", &db_meta, &dir_meta); bail!(
"Inconsistent db_meta={:?} dir_meta={:?}",
&db_meta,
&dir_meta
);
} }
let mut f = crate::fs::openat(dir.as_raw_fd(), tmp_path, let mut f = crate::fs::openat(
OFlag::O_CREAT | OFlag::O_TRUNC | OFlag::O_WRONLY, dir.as_raw_fd(),
Mode::S_IRUSR | Mode::S_IWUSR)?; tmp_path,
let mut data = OFlag::O_CREAT | OFlag::O_TRUNC | OFlag::O_WRONLY,
dir_meta.write_length_delimited_to_bytes().expect("proto3->vec is infallible"); Mode::S_IRUSR | Mode::S_IWUSR,
)?;
let mut data = dir_meta
.write_length_delimited_to_bytes()
.expect("proto3->vec is infallible");
if data.len() > FIXED_DIR_META_LEN { if data.len() > FIXED_DIR_META_LEN {
bail!("Length-delimited DirMeta message requires {} bytes, over limit of {}", bail!(
data.len(), FIXED_DIR_META_LEN); "Length-delimited DirMeta message requires {} bytes, over limit of {}",
data.len(),
FIXED_DIR_META_LEN
);
} }
data.resize(FIXED_DIR_META_LEN, 0); // pad to required length. data.resize(FIXED_DIR_META_LEN, 0); // pad to required length.
f.write_all(&data)?; f.write_all(&data)?;
f.sync_all()?; f.sync_all()?;
nix::fcntl::renameat(Some(dir.as_raw_fd()), tmp_path, Some(dir.as_raw_fd()), meta_path)?; nix::fcntl::renameat(
Some(dir.as_raw_fd()),
tmp_path,
Some(dir.as_raw_fd()),
meta_path,
)?;
Ok(true) Ok(true)
} }
@ -91,8 +107,12 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
/// Returns true if something was done (and thus a sync is needed). /// Returns true if something was done (and thus a sync is needed).
fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> { fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> {
let mut need_sync = false; let mut need_sync = false;
let mut dir2 = nix::dir::Dir::openat(dir.as_raw_fd(), ".", let mut dir2 = nix::dir::Dir::openat(
OFlag::O_DIRECTORY | OFlag::O_RDONLY, Mode::empty())?; dir.as_raw_fd(),
".",
OFlag::O_DIRECTORY | OFlag::O_RDONLY,
Mode::empty(),
)?;
for e in dir2.iter() { for e in dir2.iter() {
let e = e?; let e = e?;
let f = e.file_name(); let f = e.file_name();
@ -103,8 +123,11 @@ fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> {
}; };
if Uuid::parse_str(f_str).is_ok() { if Uuid::parse_str(f_str).is_ok() {
info!("removing leftover garbage file {}", f_str); info!("removing leftover garbage file {}", f_str);
nix::unistd::unlinkat(Some(dir.as_raw_fd()), f, nix::unistd::unlinkat(
nix::unistd::UnlinkatFlags::NoRemoveDir)?; Some(dir.as_raw_fd()),
f,
nix::unistd::UnlinkatFlags::NoRemoveDir,
)?;
need_sync = true; need_sync = true;
} }
} }
@ -115,7 +138,8 @@ fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> {
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
let db_uuid: FromSqlUuid = let db_uuid: FromSqlUuid =
tx.query_row_and_then(r"select uuid from meta", params![], |row| row.get(0))?; tx.query_row_and_then(r"select uuid from meta", params![], |row| row.get(0))?;
let mut stmt = tx.prepare(r#" let mut stmt = tx.prepare(
r#"
select select
d.path, d.path,
d.uuid, d.uuid,
@ -124,7 +148,8 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
from from
sample_file_dir d sample_file_dir d
left join open o on (d.last_complete_open_id = o.id); left join open o on (d.last_complete_open_id = o.id);
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let path = row.get_raw_checked(0)?.as_str()?; let path = row.get_raw_checked(0)?.as_str()?;
@ -134,14 +159,16 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
let open_uuid: Option<FromSqlUuid> = row.get(3)?; let open_uuid: Option<FromSqlUuid> = row.get(3)?;
let mut db_meta = schema::DirMeta::new(); let mut db_meta = schema::DirMeta::new();
db_meta.db_uuid.extend_from_slice(&db_uuid.0.as_bytes()[..]); db_meta.db_uuid.extend_from_slice(&db_uuid.0.as_bytes()[..]);
db_meta.dir_uuid.extend_from_slice(&dir_uuid.0.as_bytes()[..]); db_meta
.dir_uuid
.extend_from_slice(&dir_uuid.0.as_bytes()[..]);
match (open_id, open_uuid) { match (open_id, open_uuid) {
(Some(id), Some(uuid)) => { (Some(id), Some(uuid)) => {
let mut o = db_meta.last_complete_open.set_default(); let mut o = db_meta.last_complete_open.set_default();
o.id = id; o.id = id;
o.uuid.extend_from_slice(&uuid.0.as_bytes()[..]); o.uuid.extend_from_slice(&uuid.0.as_bytes()[..]);
}, }
(None, None) => {}, (None, None) => {}
_ => bail!("open table missing id"), _ => bail!("open table missing id"),
} }

View File

@ -29,9 +29,8 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
/// Upgrades a version 4 schema to a version 5 schema. /// Upgrades a version 4 schema to a version 5 schema.
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use failure::{Error, ResultExt, bail, format_err}; use failure::{bail, format_err, Error, ResultExt};
use h264_reader::avcc::AvcDecoderConfigurationRecord; use h264_reader::avcc::AvcDecoderConfigurationRecord;
use rusqlite::{named_params, params}; use rusqlite::{named_params, params};
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
@ -39,9 +38,9 @@ use std::convert::{TryFrom, TryInto};
// Copied from src/h264.rs. h264 stuff really doesn't belong in the db crate, but we do what we // Copied from src/h264.rs. h264 stuff really doesn't belong in the db crate, but we do what we
// must for schema upgrades. // must for schema upgrades.
const PIXEL_ASPECT_RATIOS: [((u16, u16), (u16, u16)); 4] = [ const PIXEL_ASPECT_RATIOS: [((u16, u16), (u16, u16)); 4] = [
((320, 240), ( 4, 3)), ((320, 240), (4, 3)),
((352, 240), (40, 33)), ((352, 240), (40, 33)),
((640, 480), ( 4, 3)), ((640, 480), (4, 3)),
((704, 480), (40, 33)), ((704, 480), (40, 33)),
]; ];
fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) { fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) {
@ -59,13 +58,16 @@ fn parse(data: &[u8]) -> Result<AvcDecoderConfigurationRecord, Error> {
bail!("data of len {} doesn't have an avcC", data.len()); bail!("data of len {} doesn't have an avcC", data.len());
} }
let avcc_len = BigEndian::read_u32(&data[86..90]); let avcc_len = BigEndian::read_u32(&data[86..90]);
if avcc_len < 8 { // length and type. if avcc_len < 8 {
// length and type.
bail!("invalid avcc len {}", avcc_len); bail!("invalid avcc len {}", avcc_len);
} }
let end_pos = 86 + usize::try_from(avcc_len)?; let end_pos = 86 + usize::try_from(avcc_len)?;
if end_pos != data.len() { if end_pos != data.len() {
bail!("expected avcC to be end of extradata; there are {} more bytes.", bail!(
data.len() - end_pos); "expected avcC to be end of extradata; there are {} more bytes.",
data.len() - end_pos
);
} }
AvcDecoderConfigurationRecord::try_from(&data[94..end_pos]) AvcDecoderConfigurationRecord::try_from(&data[94..end_pos])
.map_err(|e| format_err!("Bad AvcDecoderConfigurationRecord: {:?}", e)) .map_err(|e| format_err!("Bad AvcDecoderConfigurationRecord: {:?}", e))
@ -73,7 +75,8 @@ fn parse(data: &[u8]) -> Result<AvcDecoderConfigurationRecord, Error> {
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
// These create statements match the schema.sql when version 5 was the latest. // These create statements match the schema.sql when version 5 was the latest.
tx.execute_batch(r#" tx.execute_batch(
r#"
alter table video_sample_entry rename to old_video_sample_entry; alter table video_sample_entry rename to old_video_sample_entry;
create table video_sample_entry ( create table video_sample_entry (
@ -85,19 +88,23 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
pasp_h_spacing integer not null default 1 check (pasp_h_spacing > 0), pasp_h_spacing integer not null default 1 check (pasp_h_spacing > 0),
pasp_v_spacing integer not null default 1 check (pasp_v_spacing > 0) pasp_v_spacing integer not null default 1 check (pasp_v_spacing > 0)
); );
"#)?; "#,
)?;
let mut insert = tx.prepare(r#" let mut insert = tx.prepare(
r#"
insert into video_sample_entry (id, width, height, rfc6381_codec, data, insert into video_sample_entry (id, width, height, rfc6381_codec, data,
pasp_h_spacing, pasp_v_spacing) pasp_h_spacing, pasp_v_spacing)
values (:id, :width, :height, :rfc6381_codec, :data, values (:id, :width, :height, :rfc6381_codec, :data,
:pasp_h_spacing, :pasp_v_spacing) :pasp_h_spacing, :pasp_v_spacing)
"#)?; "#,
)?;
// Only insert still-referenced video sample entries. I've had problems with // Only insert still-referenced video sample entries. I've had problems with
// no-longer-referenced ones (perhaps from some ancient, buggy version of Moonfire NVR) for // no-longer-referenced ones (perhaps from some ancient, buggy version of Moonfire NVR) for
// which avcc.create_context(()) fails. // which avcc.create_context(()) fails.
let mut stmt = tx.prepare(r#" let mut stmt = tx.prepare(
r#"
select select
id, id,
width, width,
@ -114,7 +121,8 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
recording r recording r
where where
r.video_sample_entry_id = v.id) r.video_sample_entry_id = v.id)
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id: i32 = row.get(0)?; let id: i32 = row.get(0)?;
@ -126,24 +134,31 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
if avcc.num_of_sequence_parameter_sets() != 1 { if avcc.num_of_sequence_parameter_sets() != 1 {
bail!("Multiple SPSs!"); bail!("Multiple SPSs!");
} }
let ctx = avcc.create_context(()) let ctx = avcc.create_context(()).map_err(|e| {
.map_err(|e| format_err!("Can't load SPS+PPS for video_sample_entry_id {}: {:?}", format_err!(
id, e))?; "Can't load SPS+PPS for video_sample_entry_id {}: {:?}",
let sps = ctx.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap()) id,
e
)
})?;
let sps = ctx
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
.ok_or_else(|| format_err!("No SPS 0 for video_sample_entry_id {}", id))?; .ok_or_else(|| format_err!("No SPS 0 for video_sample_entry_id {}", id))?;
let pasp = sps.vui_parameters.as_ref() let pasp = sps
.and_then(|v| v.aspect_ratio_info.as_ref()) .vui_parameters
.and_then(|a| a.clone().get()) .as_ref()
.unwrap_or_else(|| default_pixel_aspect_ratio(width, height)); .and_then(|v| v.aspect_ratio_info.as_ref())
.and_then(|a| a.clone().get())
.unwrap_or_else(|| default_pixel_aspect_ratio(width, height));
if pasp != (1, 1) { if pasp != (1, 1) {
data.extend_from_slice(b"\x00\x00\x00\x10pasp"); // length + box name data.extend_from_slice(b"\x00\x00\x00\x10pasp"); // length + box name
data.write_u32::<BigEndian>(pasp.0.into())?; data.write_u32::<BigEndian>(pasp.0.into())?;
data.write_u32::<BigEndian>(pasp.1.into())?; data.write_u32::<BigEndian>(pasp.1.into())?;
let len = data.len(); let len = data.len();
BigEndian::write_u32(&mut data[0..4], u32::try_from(len)?); BigEndian::write_u32(&mut data[0..4], u32::try_from(len)?);
} }
insert.execute_named(named_params!{ insert.execute_named(named_params! {
":id": id, ":id": id,
":width": width, ":width": width,
":height": height, ":height": height,
@ -153,7 +168,8 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
":pasp_v_spacing": pasp.1, ":pasp_v_spacing": pasp.1,
})?; })?;
} }
tx.execute_batch(r#" tx.execute_batch(
r#"
alter table stream rename to old_stream; alter table stream rename to old_stream;
create table stream ( create table stream (
id integer primary key, id integer primary key,
@ -205,14 +221,16 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
video_sample_entry_id integer references video_sample_entry (id), video_sample_entry_id integer references video_sample_entry (id),
check (composite_id >> 32 = stream_id) check (composite_id >> 32 = stream_id)
); );
"#)?; "#,
)?;
// SQLite added window functions in 3.25.0. macOS still ships SQLite 3.24.0 (no support). // SQLite added window functions in 3.25.0. macOS still ships SQLite 3.24.0 (no support).
// Compute cumulative columns by hand. // Compute cumulative columns by hand.
let mut cur_stream_id = None; let mut cur_stream_id = None;
let mut cum_duration_90k = 0; let mut cum_duration_90k = 0;
let mut cum_runs = 0; let mut cum_runs = 0;
let mut stmt = tx.prepare(r#" let mut stmt = tx.prepare(
r#"
select select
composite_id, composite_id,
open_id, open_id,
@ -228,8 +246,10 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
from from
old_recording old_recording
order by composite_id order by composite_id
"#)?; "#,
let mut insert = tx.prepare(r#" )?;
let mut insert = tx.prepare(
r#"
insert into recording (composite_id, open_id, stream_id, run_offset, flags, insert into recording (composite_id, open_id, stream_id, run_offset, flags,
sample_file_bytes, start_time_90k, prev_media_duration_90k, sample_file_bytes, start_time_90k, prev_media_duration_90k,
prev_runs, wall_duration_90k, media_duration_delta_90k, prev_runs, wall_duration_90k, media_duration_delta_90k,
@ -238,7 +258,8 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
:sample_file_bytes, :start_time_90k, :prev_media_duration_90k, :sample_file_bytes, :start_time_90k, :prev_media_duration_90k,
:prev_runs, :wall_duration_90k, 0, :video_samples, :prev_runs, :wall_duration_90k, 0, :video_samples,
:video_sync_samples, :video_sample_entry_id) :video_sync_samples, :video_sample_entry_id)
"#)?; "#,
)?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let composite_id: i64 = row.get(0)?; let composite_id: i64 = row.get(0)?;
@ -257,25 +278,28 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
cum_runs = 0; cum_runs = 0;
cur_stream_id = Some(stream_id); cur_stream_id = Some(stream_id);
} }
insert.execute_named(named_params!{ insert
":composite_id": composite_id, .execute_named(named_params! {
":open_id": open_id, ":composite_id": composite_id,
":stream_id": stream_id, ":open_id": open_id,
":run_offset": run_offset, ":stream_id": stream_id,
":flags": flags, ":run_offset": run_offset,
":sample_file_bytes": sample_file_bytes, ":flags": flags,
":start_time_90k": start_time_90k, ":sample_file_bytes": sample_file_bytes,
":prev_media_duration_90k": cum_duration_90k, ":start_time_90k": start_time_90k,
":prev_runs": cum_runs, ":prev_media_duration_90k": cum_duration_90k,
":wall_duration_90k": wall_duration_90k, ":prev_runs": cum_runs,
":video_samples": video_samples, ":wall_duration_90k": wall_duration_90k,
":video_sync_samples": video_sync_samples, ":video_samples": video_samples,
":video_sample_entry_id": video_sample_entry_id, ":video_sync_samples": video_sync_samples,
}).with_context(|_| format!("Unable to insert composite_id {}", composite_id))?; ":video_sample_entry_id": video_sample_entry_id,
})
.with_context(|_| format!("Unable to insert composite_id {}", composite_id))?;
cum_duration_90k += i64::from(wall_duration_90k); cum_duration_90k += i64::from(wall_duration_90k);
cum_runs += if run_offset == 0 { 1 } else { 0 }; cum_runs += if run_offset == 0 { 1 } else { 0 };
} }
tx.execute_batch(r#" tx.execute_batch(
r#"
drop index recording_cover; drop index recording_cover;
create index recording_cover on recording ( create index recording_cover on recording (
stream_id, stream_id,
@ -328,6 +352,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
revocation_reason_detail = 'Blake2b->Blake3 upgrade' revocation_reason_detail = 'Blake2b->Blake3 upgrade'
where where
revocation_reason is null; revocation_reason is null;
"#)?; "#,
)?;
Ok(()) Ok(())
} }

File diff suppressed because it is too large Load Diff

View File

@ -50,7 +50,7 @@
//! interval to cut down on expense. //! interval to cut down on expense.
use cstr::cstr; use cstr::cstr;
use failure::{Error, format_err}; use failure::{format_err, Error};
use ffmpeg; use ffmpeg;
use log::info; use log::info;
use std::sync::Arc; use std::sync::Arc;
@ -163,17 +163,28 @@ impl ObjectDetector {
let model = moonfire_tflite::Model::from_static(MODEL) let model = moonfire_tflite::Model::from_static(MODEL)
.map_err(|()| format_err!("TensorFlow Lite model initialization failed"))?; .map_err(|()| format_err!("TensorFlow Lite model initialization failed"))?;
let devices = moonfire_tflite::edgetpu::Devices::list(); let devices = moonfire_tflite::edgetpu::Devices::list();
let device = devices.first().ok_or_else(|| format_err!("No Edge TPU device available"))?; let device = devices
info!("Using device {:?}/{:?} for object detection", device.type_(), device.path()); .first()
.ok_or_else(|| format_err!("No Edge TPU device available"))?;
info!(
"Using device {:?}/{:?} for object detection",
device.type_(),
device.path()
);
let mut builder = moonfire_tflite::Interpreter::builder(); let mut builder = moonfire_tflite::Interpreter::builder();
builder.add_owned_delegate(device.create_delegate() builder.add_owned_delegate(device.create_delegate().map_err(|()| {
.map_err(|()| format_err!("Unable to create delegate for {:?}/{:?}", format_err!(
device.type_(), device.path()))?); "Unable to create delegate for {:?}/{:?}",
let interpreter = builder.build(&model) device.type_(),
device.path()
)
})?);
let interpreter = builder
.build(&model)
.map_err(|()| format_err!("TensorFlow Lite initialization failed"))?; .map_err(|()| format_err!("TensorFlow Lite initialization failed"))?;
Ok(Arc::new(Self { Ok(Arc::new(Self {
interpreter: parking_lot::Mutex::new(interpreter), interpreter: parking_lot::Mutex::new(interpreter),
width: 300, // TODO width: 300, // TODO
height: 300, height: 300,
})) }))
} }
@ -194,17 +205,19 @@ fn copy(from: &ffmpeg::avutil::VideoFrame, to: &mut moonfire_tflite::Tensor) {
let mut from_i = 0; let mut from_i = 0;
let mut to_i = 0; let mut to_i = 0;
for _y in 0..h { for _y in 0..h {
to[to_i..to_i+3*w].copy_from_slice(&from.data[from_i..from_i+3*w]); to[to_i..to_i + 3 * w].copy_from_slice(&from.data[from_i..from_i + 3 * w]);
from_i += from.linesize; from_i += from.linesize;
to_i += 3*w; to_i += 3 * w;
} }
} }
const SCORE_THRESHOLD: f32 = 0.5; const SCORE_THRESHOLD: f32 = 0.5;
impl ObjectDetectorStream { impl ObjectDetectorStream {
pub fn new(par: ffmpeg::avcodec::InputCodecParameters<'_>, pub fn new(
detector: &ObjectDetector) -> Result<Self, Error> { par: ffmpeg::avcodec::InputCodecParameters<'_>,
detector: &ObjectDetector,
) -> Result<Self, Error> {
let mut dopt = ffmpeg::avutil::Dictionary::new(); let mut dopt = ffmpeg::avutil::Dictionary::new();
dopt.set(cstr!("refcounted_frames"), cstr!("0"))?; dopt.set(cstr!("refcounted_frames"), cstr!("0"))?;
let decoder = par.new_decoder(&mut dopt)?; let decoder = par.new_decoder(&mut dopt)?;
@ -223,15 +236,20 @@ impl ObjectDetectorStream {
}) })
} }
pub fn process_frame(&mut self, pkt: &ffmpeg::avcodec::Packet<'_>, pub fn process_frame(
detector: &ObjectDetector) -> Result<(), Error> { &mut self,
pkt: &ffmpeg::avcodec::Packet<'_>,
detector: &ObjectDetector,
) -> Result<(), Error> {
if !self.decoder.decode_video(pkt, &mut self.frame)? { if !self.decoder.decode_video(pkt, &mut self.frame)? {
return Ok(()); return Ok(());
} }
self.scaler.scale(&self.frame, &mut self.scaled); self.scaler.scale(&self.frame, &mut self.scaled);
let mut interpreter = detector.interpreter.lock(); let mut interpreter = detector.interpreter.lock();
copy(&self.scaled, &mut interpreter.inputs()[0]); copy(&self.scaled, &mut interpreter.inputs()[0]);
interpreter.invoke().map_err(|()| format_err!("TFLite interpreter invocation failed"))?; interpreter
.invoke()
.map_err(|()| format_err!("TFLite interpreter invocation failed"))?;
let outputs = interpreter.outputs(); let outputs = interpreter.outputs();
let classes = outputs[1].f32s(); let classes = outputs[1].f32s();
let scores = outputs[2].f32s(); let scores = outputs[2].f32s();

View File

@ -31,7 +31,7 @@
//! Tools for implementing a `http_serve::Entity` body composed from many "slices". //! Tools for implementing a `http_serve::Entity` body composed from many "slices".
use base::Error; use base::Error;
use futures::{Stream, stream}; use futures::{stream, Stream};
use reffers::ARefss; use reffers::ARefss;
use std::error::Error as StdError; use std::error::Error as StdError;
use std::pin::Pin; use std::pin::Pin;
@ -47,28 +47,42 @@ pub fn wrap_error(e: Error) -> BoxedError {
} }
impl From<ARefss<'static, [u8]>> for Chunk { impl From<ARefss<'static, [u8]>> for Chunk {
fn from(r: ARefss<'static, [u8]>) -> Self { Chunk(r) } fn from(r: ARefss<'static, [u8]>) -> Self {
Chunk(r)
}
} }
impl From<&'static [u8]> for Chunk { impl From<&'static [u8]> for Chunk {
fn from(r: &'static [u8]) -> Self { Chunk(ARefss::new(r)) } fn from(r: &'static [u8]) -> Self {
Chunk(ARefss::new(r))
}
} }
impl From<&'static str> for Chunk { impl From<&'static str> for Chunk {
fn from(r: &'static str) -> Self { Chunk(ARefss::new(r.as_bytes())) } fn from(r: &'static str) -> Self {
Chunk(ARefss::new(r.as_bytes()))
}
} }
impl From<String> for Chunk { impl From<String> for Chunk {
fn from(r: String) -> Self { Chunk(ARefss::new(r.into_bytes()).map(|v| &v[..])) } fn from(r: String) -> Self {
Chunk(ARefss::new(r.into_bytes()).map(|v| &v[..]))
}
} }
impl From<Vec<u8>> for Chunk { impl From<Vec<u8>> for Chunk {
fn from(r: Vec<u8>) -> Self { Chunk(ARefss::new(r).map(|v| &v[..])) } fn from(r: Vec<u8>) -> Self {
Chunk(ARefss::new(r).map(|v| &v[..]))
}
} }
impl hyper::body::Buf for Chunk { impl hyper::body::Buf for Chunk {
fn remaining(&self) -> usize { self.0.len() } fn remaining(&self) -> usize {
fn chunk(&self) -> &[u8] { &*self.0 } self.0.len()
}
fn chunk(&self) -> &[u8] {
&*self.0
}
fn advance(&mut self, cnt: usize) { fn advance(&mut self, cnt: usize) {
self.0 = ::std::mem::replace(&mut self.0, ARefss::new(&[][..])).map(|b| &b[cnt..]); self.0 = ::std::mem::replace(&mut self.0, ARefss::new(&[][..])).map(|b| &b[cnt..]);
} }
@ -83,32 +97,46 @@ impl hyper::body::HttpBody for Body {
type Data = Chunk; type Data = Chunk;
type Error = BoxedError; type Error = BoxedError;
fn poll_data(self: Pin<&mut Self>, cx: &mut std::task::Context) fn poll_data(
-> std::task::Poll<Option<Result<Self::Data, Self::Error>>> { self: Pin<&mut Self>,
// This is safe because the pin is not structural. cx: &mut std::task::Context,
// https://doc.rust-lang.org/std/pin/#pinning-is-not-structural-for-field ) -> std::task::Poll<Option<Result<Self::Data, Self::Error>>> {
// (The field _holds_ a pin, but isn't itself pinned.) // This is safe because the pin is not structural.
unsafe { self.get_unchecked_mut() }.0.get_mut().as_mut().poll_next(cx) // https://doc.rust-lang.org/std/pin/#pinning-is-not-structural-for-field
// (The field _holds_ a pin, but isn't itself pinned.)
unsafe { self.get_unchecked_mut() }
.0
.get_mut()
.as_mut()
.poll_next(cx)
} }
fn poll_trailers(self: Pin<&mut Self>, _cx: &mut std::task::Context) fn poll_trailers(
-> std::task::Poll<Result<Option<http::header::HeaderMap>, Self::Error>> { self: Pin<&mut Self>,
std::task::Poll::Ready(Ok(None)) _cx: &mut std::task::Context,
) -> std::task::Poll<Result<Option<http::header::HeaderMap>, Self::Error>> {
std::task::Poll::Ready(Ok(None))
} }
} }
impl From<BodyStream> for Body { impl From<BodyStream> for Body {
fn from(b: BodyStream) -> Self { Body(SyncWrapper::new(Pin::from(b))) } fn from(b: BodyStream) -> Self {
Body(SyncWrapper::new(Pin::from(b)))
}
} }
impl<C: Into<Chunk>> From<C> for Body { impl<C: Into<Chunk>> From<C> for Body {
fn from(c: C) -> Self { fn from(c: C) -> Self {
Body(SyncWrapper::new(Box::pin(stream::once(futures::future::ok(c.into()))))) Body(SyncWrapper::new(Box::pin(stream::once(
futures::future::ok(c.into()),
))))
} }
} }
impl From<Error> for Body { impl From<Error> for Body {
fn from(e: Error) -> Self { fn from(e: Error) -> Self {
Body(SyncWrapper::new(Box::pin(stream::once(futures::future::err(wrap_error(e)))))) Body(SyncWrapper::new(Box::pin(stream::once(
futures::future::err(wrap_error(e)),
))))
} }
} }

View File

@ -38,8 +38,12 @@ use structopt::StructOpt;
#[derive(StructOpt)] #[derive(StructOpt)]
pub struct Args { pub struct Args {
/// Directory holding the SQLite3 index database. /// Directory holding the SQLite3 index database.
#[structopt(long, default_value = "/var/lib/moonfire-nvr/db", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/var/lib/moonfire-nvr/db",
value_name = "path",
parse(from_os_str)
)]
db_dir: PathBuf, db_dir: PathBuf,
/// Compare sample file lengths on disk to the database. /// Compare sample file lengths on disk to the database.
@ -70,10 +74,13 @@ pub struct Args {
pub fn run(args: &Args) -> Result<i32, Error> { pub fn run(args: &Args) -> Result<i32, Error> {
let (_db_dir, mut conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?; let (_db_dir, mut conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
check::run(&mut conn, &check::Options { check::run(
compare_lens: args.compare_lens, &mut conn,
trash_orphan_sample_files: args.trash_orphan_sample_files, &check::Options {
delete_orphan_rows: args.delete_orphan_rows, compare_lens: args.compare_lens,
trash_corrupt_rows: args.trash_corrupt_rows, trash_orphan_sample_files: args.trash_orphan_sample_files,
}) delete_orphan_rows: args.delete_orphan_rows,
trash_corrupt_rows: args.trash_corrupt_rows,
},
)
} }

View File

@ -28,11 +28,11 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::strutil::{decode_size, encode_size};
use crate::stream::{self, Opener, Stream}; use crate::stream::{self, Opener, Stream};
use cursive::Cursive; use base::strutil::{decode_size, encode_size};
use cursive::traits::{Boxable, Identifiable, Finder}; use cursive::traits::{Boxable, Finder, Identifiable};
use cursive::views; use cursive::views;
use cursive::Cursive;
use db::writer; use db::writer;
use failure::Error; use failure::Error;
use std::collections::BTreeMap; use std::collections::BTreeMap;
@ -44,11 +44,35 @@ use url::Url;
fn get_change(siv: &mut Cursive) -> db::CameraChange { fn get_change(siv: &mut Cursive) -> db::CameraChange {
// Note: these find_name calls are separate statements, which seems to be important: // Note: these find_name calls are separate statements, which seems to be important:
// https://github.com/gyscos/Cursive/issues/144 // https://github.com/gyscos/Cursive/issues/144
let sn = siv.find_name::<views::EditView>("short_name").unwrap().get_content().as_str().into(); let sn = siv
let d = siv.find_name::<views::TextArea>("description").unwrap().get_content().into(); .find_name::<views::EditView>("short_name")
let h = siv.find_name::<views::EditView>("onvif_host").unwrap().get_content().as_str().into(); .unwrap()
let u = siv.find_name::<views::EditView>("username").unwrap().get_content().as_str().into(); .get_content()
let p = siv.find_name::<views::EditView>("password").unwrap().get_content().as_str().into(); .as_str()
.into();
let d = siv
.find_name::<views::TextArea>("description")
.unwrap()
.get_content()
.into();
let h = siv
.find_name::<views::EditView>("onvif_host")
.unwrap()
.get_content()
.as_str()
.into();
let u = siv
.find_name::<views::EditView>("username")
.unwrap()
.get_content()
.as_str()
.into();
let p = siv
.find_name::<views::EditView>("password")
.unwrap()
.get_content()
.as_str()
.into();
let mut c = db::CameraChange { let mut c = db::CameraChange {
short_name: sn, short_name: sn,
description: d, description: d,
@ -58,16 +82,28 @@ fn get_change(siv: &mut Cursive) -> db::CameraChange {
streams: Default::default(), streams: Default::default(),
}; };
for &t in &db::ALL_STREAM_TYPES { for &t in &db::ALL_STREAM_TYPES {
let u = siv.find_name::<views::EditView>(&format!("{}_rtsp_url", t.as_str())) let u = siv
.unwrap().get_content().as_str().into(); .find_name::<views::EditView>(&format!("{}_rtsp_url", t.as_str()))
let r = siv.find_name::<views::Checkbox>(&format!("{}_record", t.as_str())) .unwrap()
.unwrap().is_checked(); .get_content()
let f = i64::from_str(siv.find_name::<views::EditView>( .as_str()
&format!("{}_flush_if_sec", t.as_str())).unwrap().get_content().as_str()) .into();
.unwrap_or(0); let r = siv
let d = *siv.find_name::<views::SelectView<Option<i32>>>( .find_name::<views::Checkbox>(&format!("{}_record", t.as_str()))
&format!("{}_sample_file_dir", t.as_str())) .unwrap()
.unwrap().selection().unwrap(); .is_checked();
let f = i64::from_str(
siv.find_name::<views::EditView>(&format!("{}_flush_if_sec", t.as_str()))
.unwrap()
.get_content()
.as_str(),
)
.unwrap_or(0);
let d = *siv
.find_name::<views::SelectView<Option<i32>>>(&format!("{}_sample_file_dir", t.as_str()))
.unwrap()
.selection()
.unwrap();
c.streams[t.index()] = db::StreamChange { c.streams[t.index()] = db::StreamChange {
rtsp_url: u, rtsp_url: u,
sample_file_dir_id: d, sample_file_dir_id: d,
@ -90,11 +126,13 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>) {
} }
}; };
if let Err(e) = result { if let Err(e) = result {
siv.add_layer(views::Dialog::text(format!("Unable to add camera: {}", e)) siv.add_layer(
.title("Error") views::Dialog::text(format!("Unable to add camera: {}", e))
.dismiss_button("Abort")); .title("Error")
.dismiss_button("Abort"),
);
} else { } else {
siv.pop_layer(); // get rid of the add/edit camera dialog. siv.pop_layer(); // get rid of the add/edit camera dialog.
// Recreate the "Edit cameras" dialog from scratch; it's easier than adding the new entry. // Recreate the "Edit cameras" dialog from scratch; it's easier than adding the new entry.
siv.pop_layer(); siv.pop_layer();
@ -105,10 +143,13 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>) {
fn press_test_inner(url: &Url) -> Result<String, Error> { fn press_test_inner(url: &Url) -> Result<String, Error> {
let stream = stream::FFMPEG.open(stream::Source::Rtsp { let stream = stream::FFMPEG.open(stream::Source::Rtsp {
url: url.as_str(), url: url.as_str(),
redacted_url: url.as_str(), // don't need redaction in config UI. redacted_url: url.as_str(), // don't need redaction in config UI.
})?; })?;
let extra_data = stream.get_extra_data()?; let extra_data = stream.get_extra_data()?;
Ok(format!("{}x{} video stream", extra_data.entry.width, extra_data.entry.height)) Ok(format!(
"{}x{} video stream",
extra_data.entry.width, extra_data.entry.height
))
} }
fn press_test(siv: &mut Cursive, t: db::StreamType) { fn press_test(siv: &mut Cursive, t: db::StreamType) {
@ -116,22 +157,28 @@ fn press_test(siv: &mut Cursive, t: db::StreamType) {
let mut url = match Url::parse(&c.streams[t.index()].rtsp_url) { let mut url = match Url::parse(&c.streams[t.index()].rtsp_url) {
Ok(u) => u, Ok(u) => u,
Err(e) => { Err(e) => {
siv.add_layer(views::Dialog::text( siv.add_layer(
format!("Unparseable URL: {}", e)) views::Dialog::text(format!("Unparseable URL: {}", e))
.title("Stream test failed") .title("Stream test failed")
.dismiss_button("Back")); .dismiss_button("Back"),
);
return; return;
}, }
}; };
if !c.username.is_empty() { if !c.username.is_empty() {
let _ = url.set_username(&c.username); let _ = url.set_username(&c.username);
let _ = url.set_password(Some(&c.password)); let _ = url.set_password(Some(&c.password));
} }
siv.add_layer(views::Dialog::text(format!("Testing {} stream at {}. This may take a while \ siv.add_layer(
on timeout or if you have a long key frame interval", views::Dialog::text(format!(
t.as_str(), &url)) "Testing {} stream at {}. This may take a while \
.title("Testing")); on timeout or if you have a long key frame interval",
t.as_str(),
&url
))
.title("Testing"),
);
// Let siv have this thread for its event loop; do the work in a background thread. // Let siv have this thread for its event loop; do the work in a background thread.
// siv.cb_sink doesn't actually wake up the event loop. Tell siv to poll, as a workaround. // siv.cb_sink doesn't actually wake up the event loop. Tell siv to poll, as a workaround.
@ -147,51 +194,75 @@ fn press_test(siv: &mut Cursive, t: db::StreamType) {
Err(ref e) => { Err(ref e) => {
siv.add_layer( siv.add_layer(
views::Dialog::text(format!("{} stream at {}:\n\n{}", t.as_str(), &url, e)) views::Dialog::text(format!("{} stream at {}:\n\n{}", t.as_str(), &url, e))
.title("Stream test failed") .title("Stream test failed")
.dismiss_button("Back")); .dismiss_button("Back"),
);
return; return;
}, }
Ok(ref d) => d, Ok(ref d) => d,
}; };
siv.add_layer(views::Dialog::text( siv.add_layer(
format!("{} stream at {}:\n\n{}", t.as_str(), &url, description)) views::Dialog::text(format!(
.title("Stream test succeeded") "{} stream at {}:\n\n{}",
.dismiss_button("Back")); t.as_str(),
})).unwrap(); &url,
description
))
.title("Stream test succeeded")
.dismiss_button("Back"),
);
}))
.unwrap();
}); });
} }
fn press_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, name: String, to_delete: i64) { fn press_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, name: String, to_delete: i64) {
let dialog = if to_delete > 0 { let dialog = if to_delete > 0 {
let prompt = format!("Camera {} has recorded video. Please confirm the amount \ let prompt = format!(
of data to delete by typing it back:\n\n{}", name, "Camera {} has recorded video. Please confirm the amount \
encode_size(to_delete)); of data to delete by typing it back:\n\n{}",
name,
encode_size(to_delete)
);
views::Dialog::around( views::Dialog::around(
views::LinearLayout::vertical() views::LinearLayout::vertical()
.child(views::TextView::new(prompt)) .child(views::TextView::new(prompt))
.child(views::DummyView) .child(views::DummyView)
.child(views::EditView::new().on_submit({ .child(
let db = db.clone(); views::EditView::new()
move |siv, _| confirm_deletion(siv, &db, id, to_delete) .on_submit({
}).with_name("confirm"))) let db = db.clone();
move |siv, _| confirm_deletion(siv, &db, id, to_delete)
})
.with_name("confirm"),
),
)
.button("Delete", { .button("Delete", {
let db = db.clone(); let db = db.clone();
move |siv| confirm_deletion(siv, &db, id, to_delete) move |siv| confirm_deletion(siv, &db, id, to_delete)
}) })
} else { } else {
views::Dialog::text(format!("Delete camera {}? This camera has no recorded video.", name)) views::Dialog::text(format!(
"Delete camera {}? This camera has no recorded video.",
name
))
.button("Delete", { .button("Delete", {
let db = db.clone(); let db = db.clone();
move |s| actually_delete(s, &db, id) move |s| actually_delete(s, &db, id)
}) })
}.title("Delete camera").dismiss_button("Cancel"); }
.title("Delete camera")
.dismiss_button("Cancel");
siv.add_layer(dialog); siv.add_layer(dialog);
} }
fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delete: i64) { fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delete: i64) {
let typed = siv.find_name::<views::EditView>("confirm").unwrap().get_content(); let typed = siv
.find_name::<views::EditView>("confirm")
.unwrap()
.get_content();
if decode_size(typed.as_str()).ok() == Some(to_delete) { if decode_size(typed.as_str()).ok() == Some(to_delete) {
siv.pop_layer(); // deletion confirmation dialog siv.pop_layer(); // deletion confirmation dialog
let mut zero_limits = BTreeMap::new(); let mut zero_limits = BTreeMap::new();
{ {
@ -202,7 +273,9 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delet
Some(d) => d, Some(d) => d,
None => continue, None => continue,
}; };
let l = zero_limits.entry(dir_id).or_insert_with(|| Vec::with_capacity(2)); let l = zero_limits
.entry(dir_id)
.or_insert_with(|| Vec::with_capacity(2));
l.push(writer::NewLimit { l.push(writer::NewLimit {
stream_id, stream_id,
limit: 0, limit: 0,
@ -211,21 +284,27 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delet
} }
} }
if let Err(e) = lower_retention(db, zero_limits) { if let Err(e) = lower_retention(db, zero_limits) {
siv.add_layer(views::Dialog::text(format!("Unable to delete recordings: {}", e)) siv.add_layer(
.title("Error") views::Dialog::text(format!("Unable to delete recordings: {}", e))
.dismiss_button("Abort")); .title("Error")
.dismiss_button("Abort"),
);
return; return;
} }
actually_delete(siv, db, id); actually_delete(siv, db, id);
} else { } else {
siv.add_layer(views::Dialog::text("Please confirm amount.") siv.add_layer(
.title("Try again") views::Dialog::text("Please confirm amount.")
.dismiss_button("Back")); .title("Try again")
.dismiss_button("Back"),
);
} }
} }
fn lower_retention(db: &Arc<db::Database>, zero_limits: BTreeMap<i32, Vec<writer::NewLimit>>) fn lower_retention(
-> Result<(), Error> { db: &Arc<db::Database>,
zero_limits: BTreeMap<i32, Vec<writer::NewLimit>>,
) -> Result<(), Error> {
let dirs_to_open: Vec<_> = zero_limits.keys().map(|id| *id).collect(); let dirs_to_open: Vec<_> = zero_limits.keys().map(|id| *id).collect();
db.lock().open_sample_file_dirs(&dirs_to_open[..])?; db.lock().open_sample_file_dirs(&dirs_to_open[..])?;
for (&dir_id, l) in &zero_limits { for (&dir_id, l) in &zero_limits {
@ -235,15 +314,17 @@ fn lower_retention(db: &Arc<db::Database>, zero_limits: BTreeMap<i32, Vec<writer
} }
fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) { fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) {
siv.pop_layer(); // get rid of the add/edit camera dialog. siv.pop_layer(); // get rid of the add/edit camera dialog.
let result = { let result = {
let mut l = db.lock(); let mut l = db.lock();
l.delete_camera(id) l.delete_camera(id)
}; };
if let Err(e) = result { if let Err(e) = result {
siv.add_layer(views::Dialog::text(format!("Unable to delete camera: {}", e)) siv.add_layer(
.title("Error") views::Dialog::text(format!("Unable to delete camera: {}", e))
.dismiss_button("Abort")); .title("Error")
.dismiss_button("Abort"),
);
} else { } else {
// Recreate the "Edit cameras" dialog from scratch; it's easier than adding the new entry. // Recreate the "Edit cameras" dialog from scratch; it's easier than adding the new entry.
siv.pop_layer(); siv.pop_layer();
@ -255,10 +336,13 @@ fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) {
/// (The former if `item` is None; the latter otherwise.) /// (The former if `item` is None; the latter otherwise.)
fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i32>) { fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i32>) {
let camera_list = views::ListView::new() let camera_list = views::ListView::new()
.child("id", views::TextView::new(match *item { .child(
None => "<new>".to_string(), "id",
Some(id) => id.to_string(), views::TextView::new(match *item {
})) None => "<new>".to_string(),
Some(id) => id.to_string(),
}),
)
.child("uuid", views::TextView::new("<new>").with_name("uuid")) .child("uuid", views::TextView::new("<new>").with_name("uuid"))
.child("short name", views::EditView::new().with_name("short_name")) .child("short name", views::EditView::new().with_name("short_name"))
.child("onvif_host", views::EditView::new().with_name("onvif_host")) .child("onvif_host", views::EditView::new().with_name("onvif_host"))
@ -268,32 +352,54 @@ fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i
let mut layout = views::LinearLayout::vertical() let mut layout = views::LinearLayout::vertical()
.child(camera_list) .child(camera_list)
.child(views::TextView::new("description")) .child(views::TextView::new("description"))
.child(views::TextArea::new().with_name("description").min_height(3)); .child(
views::TextArea::new()
.with_name("description")
.min_height(3),
);
let dirs: Vec<_> = ::std::iter::once(("<none>".to_owned(), None)) let dirs: Vec<_> = ::std::iter::once(("<none>".to_owned(), None))
.chain(db.lock() .chain(
.sample_file_dirs_by_id() db.lock()
.iter() .sample_file_dirs_by_id()
.map(|(&id, d)| (d.path.as_str().to_owned(), Some(id)))) .iter()
.collect(); .map(|(&id, d)| (d.path.as_str().to_owned(), Some(id))),
)
.collect();
for &type_ in &db::ALL_STREAM_TYPES { for &type_ in &db::ALL_STREAM_TYPES {
let list = views::ListView::new() let list = views::ListView::new()
.child("rtsp url", views::LinearLayout::horizontal() .child(
.child(views::EditView::new() "rtsp url",
.with_name(format!("{}_rtsp_url", type_.as_str())) views::LinearLayout::horizontal()
.full_width()) .child(
.child(views::DummyView) views::EditView::new()
.child(views::Button::new("Test", move |siv| press_test(siv, type_)))) .with_name(format!("{}_rtsp_url", type_.as_str()))
.child("sample file dir", .full_width(),
views::SelectView::<Option<i32>>::new() )
.with_all(dirs.iter().map(|d| d.clone())) .child(views::DummyView)
.popup() .child(views::Button::new("Test", move |siv| {
.with_name(format!("{}_sample_file_dir", type_.as_str()))) press_test(siv, type_)
.child("record", views::Checkbox::new().with_name(format!("{}_record", type_.as_str()))) })),
.child("flush_if_sec", views::EditView::new() )
.with_name(format!("{}_flush_if_sec", type_.as_str()))) .child(
.child("usage/capacity", "sample file dir",
views::TextView::new("").with_name(format!("{}_usage_cap", type_.as_str()))) views::SelectView::<Option<i32>>::new()
.with_all(dirs.iter().map(|d| d.clone()))
.popup()
.with_name(format!("{}_sample_file_dir", type_.as_str())),
)
.child(
"record",
views::Checkbox::new().with_name(format!("{}_record", type_.as_str())),
)
.child(
"flush_if_sec",
views::EditView::new().with_name(format!("{}_flush_if_sec", type_.as_str())),
)
.child(
"usage/capacity",
views::TextView::new("").with_name(format!("{}_usage_cap", type_.as_str())),
)
.min_height(5); .min_height(5);
layout.add_child(views::DummyView); layout.add_child(views::DummyView);
layout.add_child(views::TextView::new(format!("{} stream", type_.as_str()))); layout.add_child(views::TextView::new(format!("{} stream", type_.as_str())));
@ -304,8 +410,11 @@ fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i
let dialog = if let Some(camera_id) = *item { let dialog = if let Some(camera_id) = *item {
let l = db.lock(); let l = db.lock();
let camera = l.cameras_by_id().get(&camera_id).expect("missing camera"); let camera = l.cameras_by_id().get(&camera_id).expect("missing camera");
dialog.call_on_name("uuid", |v: &mut views::TextView| v.set_content(camera.uuid.to_string())) dialog
.expect("missing TextView"); .call_on_name("uuid", |v: &mut views::TextView| {
v.set_content(camera.uuid.to_string())
})
.expect("missing TextView");
let mut bytes = 0; let mut bytes = 0;
for (i, sid) in camera.streams.iter().enumerate() { for (i, sid) in camera.streams.iter().enumerate() {
@ -326,70 +435,96 @@ fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i
let u = if s.retain_bytes == 0 { let u = if s.retain_bytes == 0 {
"0 / 0 (0.0%)".to_owned() "0 / 0 (0.0%)".to_owned()
} else { } else {
format!("{} / {} ({:.1}%)", s.fs_bytes, s.retain_bytes, format!(
100. * s.fs_bytes as f32 / s.retain_bytes as f32) "{} / {} ({:.1}%)",
s.fs_bytes,
s.retain_bytes,
100. * s.fs_bytes as f32 / s.retain_bytes as f32
)
}; };
dialog.call_on_name(&format!("{}_rtsp_url", t.as_str()), dialog.call_on_name(
|v: &mut views::EditView| v.set_content(s.rtsp_url.to_owned())); &format!("{}_rtsp_url", t.as_str()),
dialog.call_on_name(&format!("{}_usage_cap", t.as_str()), |v: &mut views::EditView| v.set_content(s.rtsp_url.to_owned()),
|v: &mut views::TextView| v.set_content(u)); );
dialog.call_on_name(&format!("{}_record", t.as_str()), dialog.call_on_name(
|v: &mut views::Checkbox| v.set_checked(s.record)); &format!("{}_usage_cap", t.as_str()),
|v: &mut views::TextView| v.set_content(u),
);
dialog.call_on_name(
&format!("{}_record", t.as_str()),
|v: &mut views::Checkbox| v.set_checked(s.record),
);
dialog.call_on_name( dialog.call_on_name(
&format!("{}_flush_if_sec", t.as_str()), &format!("{}_flush_if_sec", t.as_str()),
|v: &mut views::EditView| v.set_content(s.flush_if_sec.to_string())); |v: &mut views::EditView| v.set_content(s.flush_if_sec.to_string()),
);
} }
dialog.call_on_name( dialog.call_on_name(
&format!("{}_sample_file_dir", t.as_str()), &format!("{}_sample_file_dir", t.as_str()),
|v: &mut views::SelectView<Option<i32>>| v.set_selection(selected_dir)); |v: &mut views::SelectView<Option<i32>>| v.set_selection(selected_dir),
);
} }
let name = camera.short_name.clone(); let name = camera.short_name.clone();
for &(view_id, content) in &[("short_name", &*camera.short_name), for &(view_id, content) in &[
("onvif_host", &*camera.onvif_host), ("short_name", &*camera.short_name),
("username", &*camera.username), ("onvif_host", &*camera.onvif_host),
("password", &*camera.password)] { ("username", &*camera.username),
dialog.call_on_name(view_id, |v: &mut views::EditView| v.set_content(content.to_string())) ("password", &*camera.password),
.expect("missing EditView"); ] {
dialog
.call_on_name(view_id, |v: &mut views::EditView| {
v.set_content(content.to_string())
})
.expect("missing EditView");
} }
dialog.call_on_name("description", dialog
|v: &mut views::TextArea| v.set_content(camera.description.to_string())) .call_on_name("description", |v: &mut views::TextArea| {
.expect("missing TextArea"); v.set_content(camera.description.to_string())
dialog.title("Edit camera") })
.button("Edit", { .expect("missing TextArea");
let db = db.clone(); dialog
move |s| press_edit(s, &db, Some(camera_id)) .title("Edit camera")
}) .button("Edit", {
.button("Delete", { let db = db.clone();
let db = db.clone(); move |s| press_edit(s, &db, Some(camera_id))
move |s| press_delete(s, &db, camera_id, name.clone(), bytes) })
}) .button("Delete", {
let db = db.clone();
move |s| press_delete(s, &db, camera_id, name.clone(), bytes)
})
} else { } else {
for t in &db::ALL_STREAM_TYPES { for t in &db::ALL_STREAM_TYPES {
dialog.call_on_name(&format!("{}_usage_cap", t.as_str()), dialog.call_on_name(
|v: &mut views::TextView| v.set_content("<new>")); &format!("{}_usage_cap", t.as_str()),
|v: &mut views::TextView| v.set_content("<new>"),
);
} }
dialog.title("Add camera") dialog.title("Add camera").button("Add", {
.button("Add", { let db = db.clone();
let db = db.clone(); move |s| press_edit(s, &db, None)
move |s| press_edit(s, &db, None) })
})
}; };
siv.add_layer(dialog.dismiss_button("Cancel")); siv.add_layer(dialog.dismiss_button("Cancel"));
} }
pub fn top_dialog(db: &Arc<db::Database>, siv: &mut Cursive) { pub fn top_dialog(db: &Arc<db::Database>, siv: &mut Cursive) {
siv.add_layer(views::Dialog::around( siv.add_layer(
views::SelectView::new() views::Dialog::around(
.on_submit({ views::SelectView::new()
let db = db.clone(); .on_submit({
move |siv, item| edit_camera_dialog(&db, siv, item) let db = db.clone();
}) move |siv, item| edit_camera_dialog(&db, siv, item)
.item("<new camera>".to_string(), None) })
.with_all(db.lock() .item("<new camera>".to_string(), None)
.with_all(
db.lock()
.cameras_by_id() .cameras_by_id()
.iter() .iter()
.map(|(&id, camera)| (format!("{}: {}", id, camera.short_name), Some(id)))) .map(|(&id, camera)| (format!("{}: {}", id, camera.short_name), Some(id))),
.full_width()) )
.full_width(),
)
.dismiss_button("Done") .dismiss_button("Done")
.title("Edit cameras")); .title("Edit cameras"),
);
} }

View File

@ -29,9 +29,9 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::strutil::{decode_size, encode_size}; use base::strutil::{decode_size, encode_size};
use cursive::Cursive;
use cursive::traits::{Boxable, Identifiable}; use cursive::traits::{Boxable, Identifiable};
use cursive::views; use cursive::views;
use cursive::Cursive;
use db::writer; use db::writer;
use failure::Error; use failure::Error;
use log::{debug, trace}; use log::{debug, trace};
@ -44,7 +44,7 @@ struct Stream {
label: String, label: String,
used: i64, used: i64,
record: bool, record: bool,
retain: Option<i64>, // None if unparseable retain: Option<i64>, // None if unparseable
} }
struct Model { struct Model {
@ -72,9 +72,11 @@ fn update_limits_inner(model: &Model) -> Result<(), Error> {
fn update_limits(model: &Model, siv: &mut Cursive) { fn update_limits(model: &Model, siv: &mut Cursive) {
if let Err(e) = update_limits_inner(model) { if let Err(e) = update_limits_inner(model) {
siv.add_layer(views::Dialog::text(format!("Unable to update limits: {}", e)) siv.add_layer(
.dismiss_button("Back") views::Dialog::text(format!("Unable to update limits: {}", e))
.title("Error")); .dismiss_button("Back")
.title("Error"),
);
} }
} }
@ -111,8 +113,8 @@ fn edit_limit(model: &RefCell<Model>, siv: &mut Cursive, id: i32, content: &str)
if (model.errors == 0) != (old_errors == 0) { if (model.errors == 0) != (old_errors == 0) {
trace!("toggling change state: errors={}", model.errors); trace!("toggling change state: errors={}", model.errors);
siv.find_name::<views::Button>("change") siv.find_name::<views::Button>("change")
.unwrap() .unwrap()
.set_enabled(model.errors == 0); .set_enabled(model.errors == 0);
} }
} }
@ -124,35 +126,48 @@ fn edit_record(model: &RefCell<Model>, id: i32, record: bool) {
} }
fn confirm_deletion(model: &RefCell<Model>, siv: &mut Cursive, to_delete: i64) { fn confirm_deletion(model: &RefCell<Model>, siv: &mut Cursive, to_delete: i64) {
let typed = siv.find_name::<views::EditView>("confirm") let typed = siv
.unwrap() .find_name::<views::EditView>("confirm")
.get_content(); .unwrap()
debug!("confirm, typed: {} vs expected: {}", typed.as_str(), to_delete); .get_content();
debug!(
"confirm, typed: {} vs expected: {}",
typed.as_str(),
to_delete
);
if decode_size(typed.as_str()).ok() == Some(to_delete) { if decode_size(typed.as_str()).ok() == Some(to_delete) {
actually_delete(model, siv); actually_delete(model, siv);
} else { } else {
siv.add_layer(views::Dialog::text("Please confirm amount.") siv.add_layer(
.title("Try again") views::Dialog::text("Please confirm amount.")
.dismiss_button("Back")); .title("Try again")
.dismiss_button("Back"),
);
} }
} }
fn actually_delete(model: &RefCell<Model>, siv: &mut Cursive) { fn actually_delete(model: &RefCell<Model>, siv: &mut Cursive) {
let model = &*model.borrow(); let model = &*model.borrow();
let new_limits: Vec<_> = let new_limits: Vec<_> = model
model.streams.iter() .streams
.map(|(&id, s)| writer::NewLimit {stream_id: id, limit: s.retain.unwrap()}) .iter()
.collect(); .map(|(&id, s)| writer::NewLimit {
siv.pop_layer(); // deletion confirmation stream_id: id,
siv.pop_layer(); // retention dialog limit: s.retain.unwrap(),
})
.collect();
siv.pop_layer(); // deletion confirmation
siv.pop_layer(); // retention dialog
{ {
let mut l = model.db.lock(); let mut l = model.db.lock();
l.open_sample_file_dirs(&[model.dir_id]).unwrap(); // TODO: don't unwrap. l.open_sample_file_dirs(&[model.dir_id]).unwrap(); // TODO: don't unwrap.
} }
if let Err(e) = writer::lower_retention(model.db.clone(), model.dir_id, &new_limits[..]) { if let Err(e) = writer::lower_retention(model.db.clone(), model.dir_id, &new_limits[..]) {
siv.add_layer(views::Dialog::text(format!("Unable to delete excess video: {}", e)) siv.add_layer(
.title("Error") views::Dialog::text(format!("Unable to delete excess video: {}", e))
.dismiss_button("Abort")); .title("Error")
.dismiss_button("Abort"),
);
} else { } else {
update_limits(model, siv); update_limits(model, siv);
} }
@ -162,26 +177,38 @@ fn press_change(model: &Rc<RefCell<Model>>, siv: &mut Cursive) {
if model.borrow().errors > 0 { if model.borrow().errors > 0 {
return; return;
} }
let to_delete = model.borrow().streams.values().map( let to_delete = model
|s| ::std::cmp::max(s.used - s.retain.unwrap(), 0)).sum(); .borrow()
.streams
.values()
.map(|s| ::std::cmp::max(s.used - s.retain.unwrap(), 0))
.sum();
debug!("change press, to_delete={}", to_delete); debug!("change press, to_delete={}", to_delete);
if to_delete > 0 { if to_delete > 0 {
let prompt = format!("Some streams' usage exceeds new limit. Please confirm the amount \ let prompt = format!(
of data to delete by typing it back:\n\n{}", encode_size(to_delete)); "Some streams' usage exceeds new limit. Please confirm the amount \
of data to delete by typing it back:\n\n{}",
encode_size(to_delete)
);
let dialog = views::Dialog::around( let dialog = views::Dialog::around(
views::LinearLayout::vertical() views::LinearLayout::vertical()
.child(views::TextView::new(prompt)) .child(views::TextView::new(prompt))
.child(views::DummyView) .child(views::DummyView)
.child(views::EditView::new().on_submit({ .child(
let model = model.clone(); views::EditView::new()
move |siv, _| confirm_deletion(&model, siv, to_delete) .on_submit({
}).with_name("confirm"))) let model = model.clone();
.button("Confirm", { move |siv, _| confirm_deletion(&model, siv, to_delete)
let model = model.clone(); })
move |siv| confirm_deletion(&model, siv, to_delete) .with_name("confirm"),
}) ),
.dismiss_button("Cancel") )
.title("Confirm deletion"); .button("Confirm", {
let model = model.clone();
move |siv| confirm_deletion(&model, siv, to_delete)
})
.dismiss_button("Cancel")
.title("Confirm deletion");
siv.add_layer(dialog); siv.add_layer(dialog);
} else { } else {
siv.pop_layer(); siv.pop_layer();
@ -190,23 +217,28 @@ fn press_change(model: &Rc<RefCell<Model>>, siv: &mut Cursive) {
} }
pub fn top_dialog(db: &Arc<db::Database>, siv: &mut Cursive) { pub fn top_dialog(db: &Arc<db::Database>, siv: &mut Cursive) {
siv.add_layer(views::Dialog::around( siv.add_layer(
views::SelectView::new() views::Dialog::around(
.on_submit({ views::SelectView::new()
let db = db.clone(); .on_submit({
move |siv, item| match *item { let db = db.clone();
Some(d) => edit_dir_dialog(&db, siv, d), move |siv, item| match *item {
None => add_dir_dialog(&db, siv), Some(d) => edit_dir_dialog(&db, siv, d),
} None => add_dir_dialog(&db, siv),
}) }
.item("<new sample file dir>".to_string(), None) })
.with_all(db.lock() .item("<new sample file dir>".to_string(), None)
.with_all(
db.lock()
.sample_file_dirs_by_id() .sample_file_dirs_by_id()
.iter() .iter()
.map(|(&id, d)| (d.path.to_string(), Some(id)))) .map(|(&id, d)| (d.path.to_string(), Some(id))),
.full_width()) )
.full_width(),
)
.dismiss_button("Done") .dismiss_button("Done")
.title("Edit sample file directories")); .title("Edit sample file directories"),
);
} }
fn add_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive) { fn add_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive) {
@ -214,29 +246,40 @@ fn add_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive) {
views::Dialog::around( views::Dialog::around(
views::LinearLayout::vertical() views::LinearLayout::vertical()
.child(views::TextView::new("path")) .child(views::TextView::new("path"))
.child(views::EditView::new() .child(
.on_submit({ views::EditView::new()
let db = db.clone(); .on_submit({
move |siv, path| add_dir(&db, siv, path) let db = db.clone();
}) move |siv, path| add_dir(&db, siv, path)
.with_name("path") })
.fixed_width(60))) .with_name("path")
.button("Add", { .fixed_width(60),
let db = db.clone(); ),
move |siv| { )
let path = siv.find_name::<views::EditView>("path").unwrap().get_content(); .button("Add", {
add_dir(&db, siv, &path) let db = db.clone();
} move |siv| {
}) let path = siv
.button("Cancel", |siv| { siv.pop_layer(); }) .find_name::<views::EditView>("path")
.title("Add sample file directory")); .unwrap()
.get_content();
add_dir(&db, siv, &path)
}
})
.button("Cancel", |siv| {
siv.pop_layer();
})
.title("Add sample file directory"),
);
} }
fn add_dir(db: &Arc<db::Database>, siv: &mut Cursive, path: &str) { fn add_dir(db: &Arc<db::Database>, siv: &mut Cursive, path: &str) {
if let Err(e) = db.lock().add_sample_file_dir(path.to_owned()) { if let Err(e) = db.lock().add_sample_file_dir(path.to_owned()) {
siv.add_layer(views::Dialog::text(format!("Unable to add path {}: {}", path, e)) siv.add_layer(
.dismiss_button("Back") views::Dialog::text(format!("Unable to add path {}: {}", path, e))
.title("Error")); .dismiss_button("Back")
.title("Error"),
);
return; return;
} }
siv.pop_layer(); siv.pop_layer();
@ -248,23 +291,25 @@ fn add_dir(db: &Arc<db::Database>, siv: &mut Cursive, path: &str) {
fn delete_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) { fn delete_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
siv.add_layer( siv.add_layer(
views::Dialog::around( views::Dialog::around(views::TextView::new("Empty (no associated streams)."))
views::TextView::new("Empty (no associated streams)."))
.button("Delete", { .button("Delete", {
let db = db.clone(); let db = db.clone();
move |siv| { move |siv| delete_dir(&db, siv, dir_id)
delete_dir(&db, siv, dir_id)
}
}) })
.button("Cancel", |siv| { siv.pop_layer(); }) .button("Cancel", |siv| {
.title("Delete sample file directory")); siv.pop_layer();
})
.title("Delete sample file directory"),
);
} }
fn delete_dir(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) { fn delete_dir(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
if let Err(e) = db.lock().delete_sample_file_dir(dir_id) { if let Err(e) = db.lock().delete_sample_file_dir(dir_id) {
siv.add_layer(views::Dialog::text(format!("Unable to delete dir id {}: {}", dir_id, e)) siv.add_layer(
.dismiss_button("Back") views::Dialog::text(format!("Unable to delete dir id {}: {}", dir_id, e))
.title("Error")); .dismiss_button("Back")
.title("Error"),
);
return; return;
} }
siv.pop_layer(); siv.pop_layer();
@ -284,23 +329,29 @@ fn edit_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
{ {
let mut l = db.lock(); let mut l = db.lock();
for (&id, s) in l.streams_by_id() { for (&id, s) in l.streams_by_id() {
let c = l.cameras_by_id().get(&s.camera_id).expect("stream without camera"); let c = l
.cameras_by_id()
.get(&s.camera_id)
.expect("stream without camera");
if s.sample_file_dir_id != Some(dir_id) { if s.sample_file_dir_id != Some(dir_id) {
continue; continue;
} }
streams.insert(id, Stream { streams.insert(
label: format!("{}: {}: {}", id, c.short_name, s.type_.as_str()), id,
used: s.fs_bytes, Stream {
record: s.record, label: format!("{}: {}: {}", id, c.short_name, s.type_.as_str()),
retain: Some(s.retain_bytes), used: s.fs_bytes,
}); record: s.record,
retain: Some(s.retain_bytes),
},
);
total_used += s.fs_bytes; total_used += s.fs_bytes;
total_retain += s.retain_bytes; total_retain += s.retain_bytes;
} }
if streams.is_empty() { if streams.is_empty() {
return delete_dir_dialog(db, siv, dir_id); return delete_dir_dialog(db, siv, dir_id);
} }
l.open_sample_file_dirs(&[dir_id]).unwrap(); // TODO: don't unwrap. l.open_sample_file_dirs(&[dir_id]).unwrap(); // TODO: don't unwrap.
let dir = l.sample_file_dirs_by_id().get(&dir_id).unwrap(); let dir = l.sample_file_dirs_by_id().get(&dir_id).unwrap();
let stat = dir.get().unwrap().statfs().unwrap(); let stat = dir.get().unwrap().statfs().unwrap();
fs_capacity = stat.block_size() as i64 * stat.blocks_available() as i64 + total_used; fs_capacity = stat.block_size() as i64 * stat.blocks_available() as i64 + total_used;
@ -326,7 +377,8 @@ fn edit_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
views::LinearLayout::horizontal() views::LinearLayout::horizontal()
.child(views::TextView::new("record").fixed_width(RECORD_WIDTH)) .child(views::TextView::new("record").fixed_width(RECORD_WIDTH))
.child(views::TextView::new("usage").fixed_width(BYTES_WIDTH)) .child(views::TextView::new("usage").fixed_width(BYTES_WIDTH))
.child(views::TextView::new("limit").fixed_width(BYTES_WIDTH))); .child(views::TextView::new("limit").fixed_width(BYTES_WIDTH)),
);
for (&id, stream) in &model.borrow().streams { for (&id, stream) in &model.borrow().streams {
let mut record_cb = views::Checkbox::new(); let mut record_cb = views::Checkbox::new();
record_cb.set_checked(stream.record); record_cb.set_checked(stream.record);
@ -339,50 +391,67 @@ fn edit_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
views::LinearLayout::horizontal() views::LinearLayout::horizontal()
.child(record_cb.fixed_width(RECORD_WIDTH)) .child(record_cb.fixed_width(RECORD_WIDTH))
.child(views::TextView::new(encode_size(stream.used)).fixed_width(BYTES_WIDTH)) .child(views::TextView::new(encode_size(stream.used)).fixed_width(BYTES_WIDTH))
.child(views::EditView::new() .child(
.content(encode_size(stream.retain.unwrap())) views::EditView::new()
.on_edit({ .content(encode_size(stream.retain.unwrap()))
let model = model.clone(); .on_edit({
move |siv, content, _pos| edit_limit(&model, siv, id, content) let model = model.clone();
}) move |siv, content, _pos| edit_limit(&model, siv, id, content)
.on_submit({ })
let model = model.clone(); .on_submit({
move |siv, _| press_change(&model, siv) let model = model.clone();
}) move |siv, _| press_change(&model, siv)
.fixed_width(20)) })
.child(views::TextView::new("").with_name(format!("{}_ok", id)).fixed_width(1))); .fixed_width(20),
)
.child(
views::TextView::new("")
.with_name(format!("{}_ok", id))
.fixed_width(1),
),
);
} }
let over = model.borrow().total_retain > model.borrow().fs_capacity; let over = model.borrow().total_retain > model.borrow().fs_capacity;
list.add_child( list.add_child(
"total", "total",
views::LinearLayout::horizontal() views::LinearLayout::horizontal()
.child(views::DummyView{}.fixed_width(RECORD_WIDTH)) .child(views::DummyView {}.fixed_width(RECORD_WIDTH))
.child(views::TextView::new(encode_size(model.borrow().total_used)) .child(
.fixed_width(BYTES_WIDTH)) views::TextView::new(encode_size(model.borrow().total_used))
.child(views::TextView::new(encode_size(model.borrow().total_retain)) .fixed_width(BYTES_WIDTH),
.with_name("total_retain").fixed_width(BYTES_WIDTH)) )
.child(views::TextView::new(if over { "*" } else { " " }).with_name("total_ok"))); .child(
views::TextView::new(encode_size(model.borrow().total_retain))
.with_name("total_retain")
.fixed_width(BYTES_WIDTH),
)
.child(views::TextView::new(if over { "*" } else { " " }).with_name("total_ok")),
);
list.add_child( list.add_child(
"filesystem", "filesystem",
views::LinearLayout::horizontal() views::LinearLayout::horizontal()
.child(views::DummyView{}.fixed_width(3)) .child(views::DummyView {}.fixed_width(3))
.child(views::DummyView{}.fixed_width(20)) .child(views::DummyView {}.fixed_width(20))
.child(views::TextView::new(encode_size(model.borrow().fs_capacity)).fixed_width(25))); .child(views::TextView::new(encode_size(model.borrow().fs_capacity)).fixed_width(25)),
);
let mut change_button = views::Button::new("Change", { let mut change_button = views::Button::new("Change", {
let model = model.clone(); let model = model.clone();
move |siv| press_change(&model, siv) move |siv| press_change(&model, siv)
}); });
change_button.set_enabled(!over); change_button.set_enabled(!over);
let mut buttons = views::LinearLayout::horizontal() let mut buttons = views::LinearLayout::horizontal().child(views::DummyView.full_width());
.child(views::DummyView.full_width());
buttons.add_child(change_button.with_name("change")); buttons.add_child(change_button.with_name("change"));
buttons.add_child(views::DummyView); buttons.add_child(views::DummyView);
buttons.add_child(views::Button::new("Cancel", |siv| { siv.pop_layer(); })); buttons.add_child(views::Button::new("Cancel", |siv| {
siv.pop_layer();
}));
siv.add_layer( siv.add_layer(
views::Dialog::around( views::Dialog::around(
views::LinearLayout::vertical() views::LinearLayout::vertical()
.child(list) .child(list)
.child(views::DummyView) .child(views::DummyView)
.child(buttons)) .child(buttons),
.title(format!("Edit retention for {}", path))); )
.title(format!("Edit retention for {}", path)),
);
} }

View File

@ -34,8 +34,8 @@
//! configuration will likely be almost entirely done through a web-based UI. //! configuration will likely be almost entirely done through a web-based UI.
use base::clock; use base::clock;
use cursive::Cursive;
use cursive::views; use cursive::views;
use cursive::Cursive;
use db; use db;
use failure::Error; use failure::Error;
use std::path::PathBuf; use std::path::PathBuf;
@ -49,8 +49,12 @@ mod users;
#[derive(StructOpt)] #[derive(StructOpt)]
pub struct Args { pub struct Args {
/// Directory holding the SQLite3 index database. /// Directory holding the SQLite3 index database.
#[structopt(long, default_value = "/var/lib/moonfire-nvr/db", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/var/lib/moonfire-nvr/db",
value_name = "path",
parse(from_os_str)
)]
db_dir: PathBuf, db_dir: PathBuf,
} }
@ -62,18 +66,20 @@ pub fn run(args: &Args) -> Result<i32, Error> {
let mut siv = cursive::default(); let mut siv = cursive::default();
//siv.add_global_callback('q', |s| s.quit()); //siv.add_global_callback('q', |s| s.quit());
siv.add_layer(views::Dialog::around( siv.add_layer(
views::SelectView::<fn(&Arc<db::Database>, &mut Cursive)>::new() views::Dialog::around(
.on_submit({ views::SelectView::<fn(&Arc<db::Database>, &mut Cursive)>::new()
let db = db.clone(); .on_submit({
move |siv, item| item(&db, siv) let db = db.clone();
}) move |siv, item| item(&db, siv)
.item("Cameras and streams".to_string(), cameras::top_dialog) })
.item("Directories and retention".to_string(), dirs::top_dialog) .item("Cameras and streams".to_string(), cameras::top_dialog)
.item("Users".to_string(), users::top_dialog) .item("Directories and retention".to_string(), dirs::top_dialog)
) .item("Users".to_string(), users::top_dialog),
)
.button("Quit", |siv| siv.quit()) .button("Quit", |siv| siv.quit())
.title("Main menu")); .title("Main menu"),
);
siv.run(); siv.run();

View File

@ -28,33 +28,51 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use cursive::Cursive;
use cursive::traits::{Boxable, Identifiable}; use cursive::traits::{Boxable, Identifiable};
use cursive::views; use cursive::views;
use cursive::Cursive;
use log::info; use log::info;
use std::sync::Arc; use std::sync::Arc;
/// Builds a `UserChange` from an active `edit_user_dialog`. /// Builds a `UserChange` from an active `edit_user_dialog`.
fn get_change(siv: &mut Cursive, db: &db::LockedDatabase, id: Option<i32>, fn get_change(
pw: PasswordChange) -> db::UserChange { siv: &mut Cursive,
db: &db::LockedDatabase,
id: Option<i32>,
pw: PasswordChange,
) -> db::UserChange {
let mut change = match id { let mut change = match id {
Some(id) => db.users_by_id().get(&id).unwrap().change(), Some(id) => db.users_by_id().get(&id).unwrap().change(),
None => db::UserChange::add_user(String::new()), None => db::UserChange::add_user(String::new()),
}; };
change.username.clear(); change.username.clear();
change.username += siv.find_name::<views::EditView>("username").unwrap().get_content().as_str(); change.username += siv
.find_name::<views::EditView>("username")
.unwrap()
.get_content()
.as_str();
match pw { match pw {
PasswordChange::Leave => {}, PasswordChange::Leave => {}
PasswordChange::Set => { PasswordChange::Set => {
let pwd = siv.find_name::<views::EditView>("new_pw").unwrap().get_content(); let pwd = siv
.find_name::<views::EditView>("new_pw")
.unwrap()
.get_content();
change.set_password(pwd.as_str().into()); change.set_password(pwd.as_str().into());
}, }
PasswordChange::Clear => change.clear_password(), PasswordChange::Clear => change.clear_password(),
}; };
for (id, ref mut b) in &mut [ for (id, ref mut b) in &mut [
("perm_view_video", &mut change.permissions.view_video), ("perm_view_video", &mut change.permissions.view_video),
("perm_read_camera_configs", &mut change.permissions.read_camera_configs), (
("perm_update_signals", &mut change.permissions.update_signals)] { "perm_read_camera_configs",
&mut change.permissions.read_camera_configs,
),
(
"perm_update_signals",
&mut change.permissions.update_signals,
),
] {
**b = siv.find_name::<views::Checkbox>(id).unwrap().is_checked(); **b = siv.find_name::<views::Checkbox>(id).unwrap().is_checked();
info!("{}: {}", id, **b); info!("{}: {}", id, **b);
} }
@ -68,11 +86,13 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>, pw: Pa
l.apply_user_change(c).map(|_| ()) l.apply_user_change(c).map(|_| ())
}; };
if let Err(e) = result { if let Err(e) = result {
siv.add_layer(views::Dialog::text(format!("Unable to apply change: {}", e)) siv.add_layer(
.title("Error") views::Dialog::text(format!("Unable to apply change: {}", e))
.dismiss_button("Abort")); .title("Error")
.dismiss_button("Abort"),
);
} else { } else {
siv.pop_layer(); // get rid of the add/edit user dialog. siv.pop_layer(); // get rid of the add/edit user dialog.
// Recreate the "Edit users" dialog from scratch; it's easier than adding the new entry. // Recreate the "Edit users" dialog from scratch; it's easier than adding the new entry.
siv.pop_layer(); siv.pop_layer();
@ -81,24 +101,29 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>, pw: Pa
} }
fn press_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, name: String) { fn press_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, name: String) {
siv.add_layer(views::Dialog::text(format!("Delete user {}?", name)) siv.add_layer(
.button("Delete", { views::Dialog::text(format!("Delete user {}?", name))
let db = db.clone(); .button("Delete", {
move |s| actually_delete(s, &db, id) let db = db.clone();
}) move |s| actually_delete(s, &db, id)
.title("Delete user").dismiss_button("Cancel")); })
.title("Delete user")
.dismiss_button("Cancel"),
);
} }
fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) { fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) {
siv.pop_layer(); // get rid of the add/edit user dialog. siv.pop_layer(); // get rid of the add/edit user dialog.
let result = { let result = {
let mut l = db.lock(); let mut l = db.lock();
l.delete_user(id) l.delete_user(id)
}; };
if let Err(e) = result { if let Err(e) = result {
siv.add_layer(views::Dialog::text(format!("Unable to delete user: {}", e)) siv.add_layer(
.title("Error") views::Dialog::text(format!("Unable to delete user: {}", e))
.dismiss_button("Abort")); .title("Error")
.dismiss_button("Abort"),
);
} else { } else {
// Recreate the "Edit users" dialog from scratch; it's easier than adding the new entry. // Recreate the "Edit users" dialog from scratch; it's easier than adding the new entry.
siv.pop_layer(); siv.pop_layer();
@ -114,7 +139,9 @@ enum PasswordChange {
} }
fn select_set(siv: &mut Cursive) { fn select_set(siv: &mut Cursive) {
siv.find_name::<views::RadioButton<PasswordChange>>("pw_set").unwrap().select(); siv.find_name::<views::RadioButton<PasswordChange>>("pw_set")
.unwrap()
.select();
} }
/// Adds or updates a user. /// Adds or updates a user.
@ -128,13 +155,18 @@ fn edit_user_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: Option<i32>
username = u.map(|u| u.username.clone()).unwrap_or(String::new()); username = u.map(|u| u.username.clone()).unwrap_or(String::new());
id_str = item.map(|id| id.to_string()).unwrap_or("<new>".to_string()); id_str = item.map(|id| id.to_string()).unwrap_or("<new>".to_string());
has_password = u.map(|u| u.has_password()).unwrap_or(false); has_password = u.map(|u| u.has_password()).unwrap_or(false);
permissions = u.map(|u| u.permissions.clone()).unwrap_or(db::Permissions::default()); permissions = u
.map(|u| u.permissions.clone())
.unwrap_or(db::Permissions::default());
} }
let top_list = views::ListView::new() let top_list = views::ListView::new()
.child("id", views::TextView::new(id_str)) .child("id", views::TextView::new(id_str))
.child("username", views::EditView::new() .child(
.content(username.clone()) "username",
.with_name("username")); views::EditView::new()
.content(username.clone())
.with_name("username"),
);
let mut layout = views::LinearLayout::vertical() let mut layout = views::LinearLayout::vertical()
.child(top_list) .child(top_list)
.child(views::DummyView) .child(views::DummyView)
@ -143,32 +175,48 @@ fn edit_user_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: Option<i32>
if has_password { if has_password {
layout.add_child(pw_group.button(PasswordChange::Leave, "Leave set")); layout.add_child(pw_group.button(PasswordChange::Leave, "Leave set"));
layout.add_child(pw_group.button(PasswordChange::Clear, "Clear")); layout.add_child(pw_group.button(PasswordChange::Clear, "Clear"));
layout.add_child(views::LinearLayout::horizontal() layout.add_child(
.child(pw_group.button(PasswordChange::Set, "Set to:") views::LinearLayout::horizontal()
.with_name("pw_set")) .child(
.child(views::DummyView) pw_group
.child(views::EditView::new() .button(PasswordChange::Set, "Set to:")
.on_edit(|siv, _, _| select_set(siv)) .with_name("pw_set"),
.with_name("new_pw") )
.full_width())); .child(views::DummyView)
.child(
views::EditView::new()
.on_edit(|siv, _, _| select_set(siv))
.with_name("new_pw")
.full_width(),
),
);
} else { } else {
layout.add_child(pw_group.button(PasswordChange::Leave, "Leave unset")); layout.add_child(pw_group.button(PasswordChange::Leave, "Leave unset"));
layout.add_child(views::LinearLayout::horizontal() layout.add_child(
.child(pw_group.button(PasswordChange::Set, "Reset to:") views::LinearLayout::horizontal()
.with_name("pw_set")) .child(
.child(views::DummyView) pw_group
.child(views::EditView::new() .button(PasswordChange::Set, "Reset to:")
.on_edit(|siv, _, _| select_set(siv)) .with_name("pw_set"),
.with_name("new_pw") )
.full_width())); .child(views::DummyView)
.child(
views::EditView::new()
.on_edit(|siv, _, _| select_set(siv))
.with_name("new_pw")
.full_width(),
),
);
} }
layout.add_child(views::DummyView); layout.add_child(views::DummyView);
layout.add_child(views::TextView::new("permissions")); layout.add_child(views::TextView::new("permissions"));
let mut perms = views::ListView::new(); let mut perms = views::ListView::new();
for (name, b) in &[("view_video", permissions.view_video), for (name, b) in &[
("read_camera_configs", permissions.read_camera_configs), ("view_video", permissions.view_video),
("update_signals", permissions.update_signals)] { ("read_camera_configs", permissions.read_camera_configs),
("update_signals", permissions.update_signals),
] {
let mut checkbox = views::Checkbox::new(); let mut checkbox = views::Checkbox::new();
checkbox.set_checked(*b); checkbox.set_checked(*b);
perms.add_child(name, checkbox.with_name(format!("perm_{}", name))); perms.add_child(name, checkbox.with_name(format!("perm_{}", name)));
@ -177,38 +225,43 @@ fn edit_user_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: Option<i32>
let dialog = views::Dialog::around(layout); let dialog = views::Dialog::around(layout);
let dialog = if let Some(id) = item { let dialog = if let Some(id) = item {
dialog.title("Edit user") dialog
.button("Edit", { .title("Edit user")
let db = db.clone(); .button("Edit", {
move |s| press_edit(s, &db, item, *pw_group.selection()) let db = db.clone();
}) move |s| press_edit(s, &db, item, *pw_group.selection())
.button("Delete", { })
let db = db.clone(); .button("Delete", {
move |s| press_delete(s, &db, id, username.clone()) let db = db.clone();
}) move |s| press_delete(s, &db, id, username.clone())
})
} else { } else {
dialog.title("Add user") dialog.title("Add user").button("Add", {
.button("Add", { let db = db.clone();
let db = db.clone(); move |s| press_edit(s, &db, item, *pw_group.selection())
move |s| press_edit(s, &db, item, *pw_group.selection()) })
})
}; };
siv.add_layer(dialog.dismiss_button("Cancel")); siv.add_layer(dialog.dismiss_button("Cancel"));
} }
pub fn top_dialog(db: &Arc<db::Database>, siv: &mut Cursive) { pub fn top_dialog(db: &Arc<db::Database>, siv: &mut Cursive) {
siv.add_layer(views::Dialog::around( siv.add_layer(
views::SelectView::new() views::Dialog::around(
.on_submit({ views::SelectView::new()
let db = db.clone(); .on_submit({
move |siv, &item| edit_user_dialog(&db, siv, item) let db = db.clone();
}) move |siv, &item| edit_user_dialog(&db, siv, item)
.item("<new user>".to_string(), None) })
.with_all(db.lock() .item("<new user>".to_string(), None)
.with_all(
db.lock()
.users_by_id() .users_by_id()
.iter() .iter()
.map(|(&id, user)| (format!("{}: {}", id, user.username), Some(id)))) .map(|(&id, user)| (format!("{}: {}", id, user.username), Some(id))),
.full_width()) )
.full_width(),
)
.dismiss_button("Done") .dismiss_button("Done")
.title("Edit users")); .title("Edit users"),
);
} }

View File

@ -30,14 +30,18 @@
use failure::Error; use failure::Error;
use log::info; use log::info;
use structopt::StructOpt;
use std::path::PathBuf; use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt)] #[derive(StructOpt)]
pub struct Args { pub struct Args {
/// Directory holding the SQLite3 index database. /// Directory holding the SQLite3 index database.
#[structopt(long, default_value = "/var/lib/moonfire-nvr/db", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/var/lib/moonfire-nvr/db",
value_name = "path",
parse(from_os_str)
)]
db_dir: PathBuf, db_dir: PathBuf,
} }
@ -55,12 +59,14 @@ pub fn run(args: &Args) -> Result<i32, Error> {
// page size (so reading large recording_playback rows doesn't require as many seeks). Changing // page size (so reading large recording_playback rows doesn't require as many seeks). Changing
// the page size requires doing a vacuum in non-WAL mode. This will be cheap on an empty // the page size requires doing a vacuum in non-WAL mode. This will be cheap on an empty
// database. https://www.sqlite.org/pragma.html#pragma_page_size // database. https://www.sqlite.org/pragma.html#pragma_page_size
conn.execute_batch(r#" conn.execute_batch(
r#"
pragma journal_mode = delete; pragma journal_mode = delete;
pragma page_size = 16384; pragma page_size = 16384;
vacuum; vacuum;
pragma journal_mode = wal; pragma journal_mode = wal;
"#)?; "#,
)?;
db::init(&mut conn)?; db::init(&mut conn)?;
info!("Database initialized."); info!("Database initialized.");
Ok(0) Ok(0)

View File

@ -32,17 +32,21 @@
use base::clock::{self, Clocks}; use base::clock::{self, Clocks};
use db::auth::SessionFlag; use db::auth::SessionFlag;
use failure::{Error, format_err}; use failure::{format_err, Error};
use std::os::unix::fs::OpenOptionsExt as _;
use std::io::Write as _; use std::io::Write as _;
use std::os::unix::fs::OpenOptionsExt as _;
use std::path::PathBuf; use std::path::PathBuf;
use structopt::StructOpt; use structopt::StructOpt;
#[derive(Debug, Default, StructOpt)] #[derive(Debug, Default, StructOpt)]
pub struct Args { pub struct Args {
/// Directory holding the SQLite3 index database. /// Directory holding the SQLite3 index database.
#[structopt(long, default_value = "/var/lib/moonfire-nvr/db", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/var/lib/moonfire-nvr/db",
value_name = "path",
parse(from_os_str)
)]
db_dir: PathBuf, db_dir: PathBuf,
/// Create a session with the given permissions. /// Create a session with the given permissions.
@ -59,12 +63,16 @@ pub struct Args {
/// Write the cookie to a new curl-compatible cookie-jar file. /// Write the cookie to a new curl-compatible cookie-jar file.
/// ///
/// ---domain must be specified. This file can be used later with curl's --cookie flag. /// ---domain must be specified. This file can be used later with curl's --cookie flag.
#[structopt(long, requires("domain"), value_name="path")] #[structopt(long, requires("domain"), value_name = "path")]
curl_cookie_jar: Option<PathBuf>, curl_cookie_jar: Option<PathBuf>,
/// Set the given db::auth::SessionFlags. /// Set the given db::auth::SessionFlags.
#[structopt(long, default_value="http-only,secure,same-site,same-site-strict", #[structopt(
value_name="flags", use_delimiter=true)] long,
default_value = "http-only,secure,same-site,same-site-strict",
value_name = "flags",
use_delimiter = true
)]
session_flags: Vec<SessionFlag>, session_flags: Vec<SessionFlag>,
/// Create the session for this username. /// Create the session for this username.
@ -76,7 +84,8 @@ pub fn run(args: &Args) -> Result<i32, Error> {
let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?; let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
let db = std::sync::Arc::new(db::Database::new(clocks.clone(), conn, true).unwrap()); let db = std::sync::Arc::new(db::Database::new(clocks.clone(), conn, true).unwrap());
let mut l = db.lock(); let mut l = db.lock();
let u = l.get_user(&args.username) let u = l
.get_user(&args.username)
.ok_or_else(|| format_err!("no such user {:?}", &args.username))?; .ok_or_else(|| format_err!("no such user {:?}", &args.username))?;
let permissions = args.permissions.as_ref().unwrap_or(&u.permissions).clone(); let permissions = args.permissions.as_ref().unwrap_or(&u.permissions).clone();
let creation = db::auth::Request { let creation = db::auth::Request {
@ -90,27 +99,36 @@ pub fn run(args: &Args) -> Result<i32, Error> {
} }
let uid = u.id; let uid = u.id;
drop(u); drop(u);
let (sid, _) = l.make_session(creation, uid, let (sid, _) = l.make_session(
args.domain.as_ref().map(|d| d.as_bytes().to_owned()), creation,
flags, permissions)?; uid,
args.domain.as_ref().map(|d| d.as_bytes().to_owned()),
flags,
permissions,
)?;
let mut encoded = [0u8; 64]; let mut encoded = [0u8; 64];
base64::encode_config_slice(&sid, base64::STANDARD_NO_PAD, &mut encoded); base64::encode_config_slice(&sid, base64::STANDARD_NO_PAD, &mut encoded);
let encoded = std::str::from_utf8(&encoded[..]).expect("base64 is valid UTF-8"); let encoded = std::str::from_utf8(&encoded[..]).expect("base64 is valid UTF-8");
if let Some(ref p) = args.curl_cookie_jar { if let Some(ref p) = args.curl_cookie_jar {
let d = args.domain.as_ref() let d = args
.ok_or_else(|| format_err!("--cookiejar requires --domain"))?; .domain
.as_ref()
.ok_or_else(|| format_err!("--cookiejar requires --domain"))?;
let mut f = std::fs::OpenOptions::new() let mut f = std::fs::OpenOptions::new()
.write(true) .write(true)
.create_new(true) .create_new(true)
.mode(0o600) .mode(0o600)
.open(p) .open(p)
.map_err(|e| format_err!("Unable to open {}: {}", p.display(), e))?; .map_err(|e| format_err!("Unable to open {}: {}", p.display(), e))?;
write!(&mut f, write!(
"# Netscape HTTP Cookie File\n\ &mut f,
# https://curl.haxx.se/docs/http-cookies.html\n\ "# Netscape HTTP Cookie File\n\
# This file was generated by moonfire-nvr login! Edit at your own risk.\n\n\ # https://curl.haxx.se/docs/http-cookies.html\n\
{}\n", curl_cookie(encoded, flags, d))?; # This file was generated by moonfire-nvr login! Edit at your own risk.\n\n\
{}\n",
curl_cookie(encoded, flags, d)
)?;
f.sync_all()?; f.sync_all()?;
println!("Wrote cookie to {}", p.display()); println!("Wrote cookie to {}", p.display());
} else { } else {
@ -120,15 +138,25 @@ pub fn run(args: &Args) -> Result<i32, Error> {
} }
fn curl_cookie(cookie: &str, flags: i32, domain: &str) -> String { fn curl_cookie(cookie: &str, flags: i32, domain: &str) -> String {
format!("{httponly}{domain}\t{tailmatch}\t{path}\t{secure}\t{expires}\t{name}\t{value}", format!(
httponly=if (flags & SessionFlag::HttpOnly as i32) != 0 { "#HttpOnly_" } else { "" }, "{httponly}{domain}\t{tailmatch}\t{path}\t{secure}\t{expires}\t{name}\t{value}",
domain=domain, httponly = if (flags & SessionFlag::HttpOnly as i32) != 0 {
tailmatch="FALSE", "#HttpOnly_"
path="/", } else {
secure=if (flags & SessionFlag::Secure as i32) != 0 { "TRUE" } else { "FALSE" }, ""
expires="9223372036854775807", // 64-bit CURL_OFF_T_MAX, never expires },
name="s", domain = domain,
value=cookie) tailmatch = "FALSE",
path = "/",
secure = if (flags & SessionFlag::Secure as i32) != 0 {
"TRUE"
} else {
"FALSE"
},
expires = "9223372036854775807", // 64-bit CURL_OFF_T_MAX, never expires
name = "s",
value = cookie
)
} }
#[cfg(test)] #[cfg(test)]
@ -137,9 +165,14 @@ mod tests {
#[test] #[test]
fn test_curl_cookie() { fn test_curl_cookie() {
assert_eq!(curl_cookie("o3mx3OntO7GzwwsD54OuyQ4IuipYrwPR2aiULPHSudAa+xIhwWjb+w1TnGRh8Z5Q", assert_eq!(
SessionFlag::HttpOnly as i32, "localhost"), curl_cookie(
"#HttpOnly_localhost\tFALSE\t/\tFALSE\t9223372036854775807\ts\t\ "o3mx3OntO7GzwwsD54OuyQ4IuipYrwPR2aiULPHSudAa+xIhwWjb+w1TnGRh8Z5Q",
o3mx3OntO7GzwwsD54OuyQ4IuipYrwPR2aiULPHSudAa+xIhwWjb+w1TnGRh8Z5Q"); SessionFlag::HttpOnly as i32,
"localhost"
),
"#HttpOnly_localhost\tFALSE\t/\tFALSE\t9223372036854775807\ts\t\
o3mx3OntO7GzwwsD54OuyQ4IuipYrwPR2aiULPHSudAa+xIhwWjb+w1TnGRh8Z5Q"
);
} }
} }

View File

@ -37,8 +37,8 @@ use std::path::Path;
pub mod check; pub mod check;
pub mod config; pub mod config;
pub mod login;
pub mod init; pub mod init;
pub mod login;
pub mod run; pub mod run;
pub mod sql; pub mod sql;
pub mod ts; pub mod ts;
@ -48,23 +48,35 @@ pub mod upgrade;
enum OpenMode { enum OpenMode {
ReadOnly, ReadOnly,
ReadWrite, ReadWrite,
Create Create,
} }
/// Locks the directory without opening the database. /// Locks the directory without opening the database.
/// The returned `dir::Fd` holds the lock and should be kept open as long as the `Connection` is. /// The returned `dir::Fd` holds the lock and should be kept open as long as the `Connection` is.
fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> { fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
let dir = dir::Fd::open(db_dir, mode == OpenMode::Create) let dir = dir::Fd::open(db_dir, mode == OpenMode::Create).map_err(|e| {
.map_err(|e| e.context(if e == nix::Error::Sys(nix::errno::Errno::ENOENT) { e.context(if e == nix::Error::Sys(nix::errno::Errno::ENOENT) {
format!("db dir {} not found; try running moonfire-nvr init", format!(
db_dir.display()) "db dir {} not found; try running moonfire-nvr init",
} else { db_dir.display()
format!("unable to open db dir {}", db_dir.display()) )
}))?; } else {
format!("unable to open db dir {}", db_dir.display())
})
})?;
let ro = mode == OpenMode::ReadOnly; let ro = mode == OpenMode::ReadOnly;
dir.lock(if ro { FlockArg::LockSharedNonblock } else { FlockArg::LockExclusiveNonblock }) dir.lock(if ro {
.map_err(|e| e.context(format!("unable to get {} lock on db dir {} ", FlockArg::LockSharedNonblock
if ro { "shared" } else { "exclusive" }, db_dir.display())))?; } else {
FlockArg::LockExclusiveNonblock
})
.map_err(|e| {
e.context(format!(
"unable to get {} lock on db dir {} ",
if ro { "shared" } else { "exclusive" },
db_dir.display()
))
})?;
Ok(dir) Ok(dir)
} }
@ -73,8 +85,12 @@ fn open_dir(db_dir: &Path, mode: OpenMode) -> Result<dir::Fd, Error> {
fn open_conn(db_dir: &Path, mode: OpenMode) -> Result<(dir::Fd, rusqlite::Connection), Error> { fn open_conn(db_dir: &Path, mode: OpenMode) -> Result<(dir::Fd, rusqlite::Connection), Error> {
let dir = open_dir(db_dir, mode)?; let dir = open_dir(db_dir, mode)?;
let db_path = db_dir.join("db"); let db_path = db_dir.join("db");
info!("Opening {} in {:?} mode with SQLite version {}", info!(
db_path.display(), mode, rusqlite::version()); "Opening {} in {:?} mode with SQLite version {}",
db_path.display(),
mode,
rusqlite::version()
);
let conn = rusqlite::Connection::open_with_flags( let conn = rusqlite::Connection::open_with_flags(
db_path, db_path,
match mode { match mode {
@ -86,6 +102,7 @@ fn open_conn(db_dir: &Path, mode: OpenMode) -> Result<(dir::Fd, rusqlite::Connec
} | } |
// rusqlite::Connection is not Sync, so there's no reason to tell SQLite3 to use the // rusqlite::Connection is not Sync, so there's no reason to tell SQLite3 to use the
// serialized threading mode. // serialized threading mode.
rusqlite::OpenFlags::SQLITE_OPEN_NO_MUTEX)?; rusqlite::OpenFlags::SQLITE_OPEN_NO_MUTEX,
)?;
Ok((dir, conn)) Ok((dir, conn))
} }

View File

@ -28,34 +28,42 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::clock;
use crate::stream; use crate::stream;
use crate::streamer; use crate::streamer;
use crate::web; use crate::web;
use base::clock;
use db::{dir, writer}; use db::{dir, writer};
use failure::{Error, bail}; use failure::{bail, Error};
use fnv::FnvHashMap; use fnv::FnvHashMap;
use futures::future::FutureExt; use futures::future::FutureExt;
use hyper::service::{make_service_fn, service_fn}; use hyper::service::{make_service_fn, service_fn};
use log::{info, warn}; use log::{info, warn};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread; use std::thread;
use structopt::StructOpt; use structopt::StructOpt;
use tokio; use tokio;
use tokio::signal::unix::{SignalKind, signal}; use tokio::signal::unix::{signal, SignalKind};
#[derive(StructOpt)] #[derive(StructOpt)]
pub struct Args { pub struct Args {
/// Directory holding the SQLite3 index database. /// Directory holding the SQLite3 index database.
#[structopt(long, default_value = "/var/lib/moonfire-nvr/db", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/var/lib/moonfire-nvr/db",
value_name = "path",
parse(from_os_str)
)]
db_dir: PathBuf, db_dir: PathBuf,
/// Directory holding user interface files (.html, .js, etc). /// Directory holding user interface files (.html, .js, etc).
#[structopt(long, default_value = "/usr/local/lib/moonfire-nvr/ui", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/usr/local/lib/moonfire-nvr/ui",
value_name = "path",
parse(from_os_str)
)]
ui_dir: std::path::PathBuf, ui_dir: std::path::PathBuf,
/// Bind address for unencrypted HTTP server. /// Bind address for unencrypted HTTP server.
@ -98,7 +106,7 @@ const LOCALTIME_PATH: &'static str = "/etc/localtime";
const TIMEZONE_PATH: &'static str = "/etc/timezone"; const TIMEZONE_PATH: &'static str = "/etc/timezone";
const ZONEINFO_PATHS: [&'static str; 2] = [ const ZONEINFO_PATHS: [&'static str; 2] = [
"/usr/share/zoneinfo/", // Linux, macOS < High Sierra "/usr/share/zoneinfo/", // Linux, macOS < High Sierra
"/var/db/timezone/zoneinfo/" // macOS High Sierra "/var/db/timezone/zoneinfo/", // macOS High Sierra
]; ];
fn trim_zoneinfo(p: &str) -> &str { fn trim_zoneinfo(p: &str) -> &str {
@ -145,25 +153,32 @@ fn resolve_zone() -> Result<String, Error> {
}; };
let p = trim_zoneinfo(localtime_dest); let p = trim_zoneinfo(localtime_dest);
if p.starts_with('/') { if p.starts_with('/') {
bail!("Unable to resolve {} symlink destination {} to a timezone.", bail!(
LOCALTIME_PATH, &localtime_dest); "Unable to resolve {} symlink destination {} to a timezone.",
LOCALTIME_PATH,
&localtime_dest
);
} }
return Ok(p.to_owned()); return Ok(p.to_owned());
}, }
Err(e) => { Err(e) => {
use ::std::io::ErrorKind; use ::std::io::ErrorKind;
if e.kind() != ErrorKind::NotFound && e.kind() != ErrorKind::InvalidInput { if e.kind() != ErrorKind::NotFound && e.kind() != ErrorKind::InvalidInput {
bail!("Unable to read {} symlink: {}", LOCALTIME_PATH, e); bail!("Unable to read {} symlink: {}", LOCALTIME_PATH, e);
} }
}, }
}; };
// If `TIMEZONE_PATH` is a file, use its contents as the zone name. // If `TIMEZONE_PATH` is a file, use its contents as the zone name.
match ::std::fs::read_to_string(TIMEZONE_PATH) { match ::std::fs::read_to_string(TIMEZONE_PATH) {
Ok(z) => return Ok(z), Ok(z) => return Ok(z),
Err(e) => { Err(e) => {
bail!("Unable to resolve timezone from TZ env, {}, or {}. Last error: {}", bail!(
LOCALTIME_PATH, TIMEZONE_PATH, e); "Unable to resolve timezone from TZ env, {}, or {}. Last error: {}",
LOCALTIME_PATH,
TIMEZONE_PATH,
e
);
} }
} }
} }
@ -179,7 +194,12 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
let clocks = clock::RealClocks {}; let clocks = clock::RealClocks {};
let (_db_dir, conn) = super::open_conn( let (_db_dir, conn) = super::open_conn(
&args.db_dir, &args.db_dir,
if args.read_only { super::OpenMode::ReadOnly } else { super::OpenMode::ReadWrite })?; if args.read_only {
super::OpenMode::ReadOnly
} else {
super::OpenMode::ReadWrite
},
)?;
let db = Arc::new(db::Database::new(clocks.clone(), conn, !args.read_only).unwrap()); let db = Arc::new(db::Database::new(clocks.clone(), conn, !args.read_only).unwrap());
info!("Database is loaded."); info!("Database is loaded.");
@ -190,8 +210,11 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
{ {
let mut l = db.lock(); let mut l = db.lock();
let dirs_to_open: Vec<_> = let dirs_to_open: Vec<_> = l
l.streams_by_id().values().filter_map(|s| s.sample_file_dir_id).collect(); .streams_by_id()
.values()
.filter_map(|s| s.sample_file_dir_id)
.collect();
l.open_sample_file_dirs(&dirs_to_open)?; l.open_sample_file_dirs(&dirs_to_open)?;
} }
info!("Directories are opened."); info!("Directories are opened.");
@ -212,7 +235,9 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
let syncers = if !args.read_only { let syncers = if !args.read_only {
let l = db.lock(); let l = db.lock();
let mut dirs = FnvHashMap::with_capacity_and_hasher( let mut dirs = FnvHashMap::with_capacity_and_hasher(
l.sample_file_dirs_by_id().len(), Default::default()); l.sample_file_dirs_by_id().len(),
Default::default(),
);
let streams = l.streams_by_id().len(); let streams = l.streams_by_id().len();
let env = streamer::Environment { let env = streamer::Environment {
db: &db, db: &db,
@ -236,11 +261,7 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
let mut syncers = FnvHashMap::with_capacity_and_hasher(dirs.len(), Default::default()); let mut syncers = FnvHashMap::with_capacity_and_hasher(dirs.len(), Default::default());
for (id, dir) in dirs.drain() { for (id, dir) in dirs.drain() {
let (channel, join) = writer::start_syncer(db.clone(), id)?; let (channel, join) = writer::start_syncer(db.clone(), id)?;
syncers.insert(id, Syncer { syncers.insert(id, Syncer { dir, channel, join });
dir,
channel,
join,
});
} }
// Then start up streams. // Then start up streams.
@ -253,10 +274,14 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
let sample_file_dir_id = match stream.sample_file_dir_id { let sample_file_dir_id = match stream.sample_file_dir_id {
Some(s) => s, Some(s) => s,
None => { None => {
warn!("Can't record stream {} ({}/{}) because it has no sample file dir", warn!(
id, camera.short_name, stream.type_.as_str()); "Can't record stream {} ({}/{}) because it has no sample file dir",
id,
camera.short_name,
stream.type_.as_str()
);
continue; continue;
}, }
}; };
let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64; let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64;
let syncer = syncers.get(&sample_file_dir_id).unwrap(); let syncer = syncers.get(&sample_file_dir_id).unwrap();
@ -264,20 +289,33 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
db::StreamType::SUB => object_detector.as_ref().map(|a| Arc::clone(a)), db::StreamType::SUB => object_detector.as_ref().map(|a| Arc::clone(a)),
_ => None, _ => None,
}; };
let mut streamer = streamer::Streamer::new(&env, syncer.dir.clone(), let mut streamer = streamer::Streamer::new(
syncer.channel.clone(), *id, camera, stream, &env,
rotate_offset_sec, syncer.dir.clone(),
streamer::ROTATE_INTERVAL_SEC, syncer.channel.clone(),
object_detector)?; *id,
camera,
stream,
rotate_offset_sec,
streamer::ROTATE_INTERVAL_SEC,
object_detector,
)?;
info!("Starting streamer for {}", streamer.short_name()); info!("Starting streamer for {}", streamer.short_name());
let name = format!("s-{}", streamer.short_name()); let name = format!("s-{}", streamer.short_name());
streamers.push(thread::Builder::new().name(name).spawn(move|| { streamers.push(
streamer.run(); thread::Builder::new()
}).expect("can't create thread")); .name(name)
.spawn(move || {
streamer.run();
})
.expect("can't create thread"),
);
} }
drop(l); drop(l);
Some(syncers) Some(syncers)
} else { None }; } else {
None
};
// Start the web interface. // Start the web interface.
let make_svc = make_service_fn(move |_conn| { let make_svc = make_service_fn(move |_conn| {
@ -286,13 +324,13 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
move |req| Arc::clone(&svc).serve(req) move |req| Arc::clone(&svc).serve(req)
})) }))
}); });
let server = ::hyper::Server::bind(&args.http_addr).tcp_nodelay(true).serve(make_svc); let server = ::hyper::Server::bind(&args.http_addr)
.tcp_nodelay(true)
.serve(make_svc);
let mut int = signal(SignalKind::interrupt())?; let mut int = signal(SignalKind::interrupt())?;
let mut term = signal(SignalKind::terminate())?; let mut term = signal(SignalKind::terminate())?;
let shutdown = futures::future::select( let shutdown = futures::future::select(Box::pin(int.recv()), Box::pin(term.recv()));
Box::pin(int.recv()),
Box::pin(term.recv()));
let (shutdown_tx, shutdown_rx) = futures::channel::oneshot::channel(); let (shutdown_tx, shutdown_rx) = futures::channel::oneshot::channel();
let server = server.with_graceful_shutdown(shutdown_rx.map(|_| ())); let server = server.with_graceful_shutdown(shutdown_rx.map(|_| ()));

View File

@ -30,19 +30,23 @@
//! Subcommand to run a SQLite shell. //! Subcommand to run a SQLite shell.
use super::OpenMode;
use failure::Error; use failure::Error;
use std::ffi::OsString; use std::ffi::OsString;
use std::os::unix::process::CommandExt; use std::os::unix::process::CommandExt;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use super::OpenMode;
use structopt::StructOpt; use structopt::StructOpt;
#[derive(StructOpt)] #[derive(StructOpt)]
pub struct Args { pub struct Args {
/// Directory holding the SQLite3 index database. /// Directory holding the SQLite3 index database.
#[structopt(long, default_value = "/var/lib/moonfire-nvr/db", value_name="path", #[structopt(
parse(from_os_str))] long,
default_value = "/var/lib/moonfire-nvr/db",
value_name = "path",
parse(from_os_str)
)]
db_dir: PathBuf, db_dir: PathBuf,
/// Opens the database in read-only mode and locks it only for shared access. /// Opens the database in read-only mode and locks it only for shared access.
@ -60,7 +64,11 @@ pub struct Args {
} }
pub fn run(args: &Args) -> Result<i32, Error> { pub fn run(args: &Args) -> Result<i32, Error> {
let mode = if args.read_only { OpenMode::ReadOnly } else { OpenMode::ReadWrite }; let mode = if args.read_only {
OpenMode::ReadOnly
} else {
OpenMode::ReadWrite
};
let _db_dir = super::open_dir(&args.db_dir, mode)?; let _db_dir = super::open_dir(&args.db_dir, mode)?;
let mut db = OsString::new(); let mut db = OsString::new();
db.push("file:"); db.push("file:");
@ -69,5 +77,9 @@ pub fn run(args: &Args) -> Result<i32, Error> {
if args.read_only { if args.read_only {
db.push("?mode=ro"); db.push("?mode=ro");
} }
Err(Command::new("sqlite3").arg(&db).args(&args.arg).exec().into()) Err(Command::new("sqlite3")
.arg(&db)
.args(&args.arg)
.exec()
.into())
} }

View File

@ -31,28 +31,35 @@
/// Upgrades the database schema. /// Upgrades the database schema.
/// ///
/// See `guide/schema.md` for more information. /// See `guide/schema.md` for more information.
use failure::Error; use failure::Error;
use structopt::StructOpt; use structopt::StructOpt;
#[derive(StructOpt)] #[derive(StructOpt)]
pub struct Args { pub struct Args {
#[structopt(long, #[structopt(
help = "Directory holding the SQLite3 index database.", long,
default_value = "/var/lib/moonfire-nvr/db", help = "Directory holding the SQLite3 index database.",
parse(from_os_str))] default_value = "/var/lib/moonfire-nvr/db",
parse(from_os_str)
)]
db_dir: std::path::PathBuf, db_dir: std::path::PathBuf,
#[structopt(help = "When upgrading from schema version 1 to 2, the sample file directory.", #[structopt(
long, parse(from_os_str))] help = "When upgrading from schema version 1 to 2, the sample file directory.",
long,
parse(from_os_str)
)]
sample_file_dir: Option<std::path::PathBuf>, sample_file_dir: Option<std::path::PathBuf>,
#[structopt(help = "Resets the SQLite journal_mode to the specified mode prior to the \ #[structopt(
upgrade. The default, delete, is recommended. off is very dangerous \ help = "Resets the SQLite journal_mode to the specified mode prior to \
but may be desirable in some circumstances. See guide/schema.md for \ the upgrade. The default, delete, is recommended. off is very \
more information. The journal mode will be reset to wal after the \ dangerous but may be desirable in some circumstances. See \
upgrade.", guide/schema.md for more information. The journal mode will be \
long, default_value = "delete")] reset to wal after the upgrade.",
long,
default_value = "delete"
)]
preset_journal: String, preset_journal: String,
#[structopt(help = "Skips the normal post-upgrade vacuum operation.", long)] #[structopt(help = "Skips the normal post-upgrade vacuum operation.", long)]
@ -62,10 +69,16 @@ pub struct Args {
pub fn run(args: &Args) -> Result<i32, Error> { pub fn run(args: &Args) -> Result<i32, Error> {
let (_db_dir, mut conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?; let (_db_dir, mut conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
db::upgrade::run(&db::upgrade::Args { db::upgrade::run(
sample_file_dir: args.sample_file_dir.as_ref().map(std::path::PathBuf::as_path), &db::upgrade::Args {
preset_journal: &args.preset_journal, sample_file_dir: args
no_vacuum: args.no_vacuum, .sample_file_dir
}, &mut conn)?; .as_ref()
.map(std::path::PathBuf::as_path),
preset_journal: &args.preset_journal,
no_vacuum: args.no_vacuum,
},
&mut conn,
)?;
Ok(0) Ok(0)
} }

View File

@ -41,7 +41,7 @@
//! would be more trouble than it's worth. //! would be more trouble than it's worth.
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use failure::{Error, bail, format_err}; use failure::{bail, format_err, Error};
use std::convert::TryFrom; use std::convert::TryFrom;
// See ISO/IEC 14496-10 table 7-1 - NAL unit type codes, syntax element categories, and NAL unit // See ISO/IEC 14496-10 table 7-1 - NAL unit type codes, syntax element categories, and NAL unit
@ -49,13 +49,13 @@ use std::convert::TryFrom;
const NAL_UNIT_SEQ_PARAMETER_SET: u8 = 7; const NAL_UNIT_SEQ_PARAMETER_SET: u8 = 7;
const NAL_UNIT_PIC_PARAMETER_SET: u8 = 8; const NAL_UNIT_PIC_PARAMETER_SET: u8 = 8;
const NAL_UNIT_TYPE_MASK: u8 = 0x1F; // bottom 5 bits of first byte of unit. const NAL_UNIT_TYPE_MASK: u8 = 0x1F; // bottom 5 bits of first byte of unit.
// For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box. // For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box.
const PIXEL_ASPECT_RATIOS: [((u16, u16), (u16, u16)); 4] = [ const PIXEL_ASPECT_RATIOS: [((u16, u16), (u16, u16)); 4] = [
((320, 240), ( 4, 3)), ((320, 240), (4, 3)),
((352, 240), (40, 33)), ((352, 240), (40, 33)),
((640, 480), ( 4, 3)), ((640, 480), (4, 3)),
((704, 480), (40, 33)), ((704, 480), (40, 33)),
]; ];
@ -90,18 +90,22 @@ fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) {
/// TODO: detect invalid byte streams. For example, several 0x00s not followed by a 0x01, a stream /// TODO: detect invalid byte streams. For example, several 0x00s not followed by a 0x01, a stream
/// stream not starting with 0x00 0x00 0x00 0x01, or an empty NAL unit. /// stream not starting with 0x00 0x00 0x00 0x01, or an empty NAL unit.
fn decode_h264_annex_b<'a, F>(mut data: &'a [u8], mut f: F) -> Result<(), Error> fn decode_h264_annex_b<'a, F>(mut data: &'a [u8], mut f: F) -> Result<(), Error>
where F: FnMut(&'a [u8]) -> Result<(), Error> { where
F: FnMut(&'a [u8]) -> Result<(), Error>,
{
let start_code = &b"\x00\x00\x01"[..]; let start_code = &b"\x00\x00\x01"[..];
use nom::FindSubstring; use nom::FindSubstring;
'outer: while let Some(pos) = data.find_substring(start_code) { 'outer: while let Some(pos) = data.find_substring(start_code) {
let mut unit = &data[0..pos]; let mut unit = &data[0..pos];
data = &data[pos + start_code.len() ..]; data = &data[pos + start_code.len()..];
// Have zero or more bytes that end in a start code. Strip out any trailing 0x00s and // Have zero or more bytes that end in a start code. Strip out any trailing 0x00s and
// process the unit if there's anything left. // process the unit if there's anything left.
loop { loop {
match unit.last() { match unit.last() {
None => continue 'outer, None => continue 'outer,
Some(b) if *b == 0 => { unit = &unit[..unit.len()-1]; }, Some(b) if *b == 0 => {
unit = &unit[..unit.len() - 1];
}
Some(_) => break, Some(_) => break,
} }
} }
@ -139,8 +143,8 @@ fn parse_annex_b_extra_data(data: &[u8]) -> Result<(&[u8], &[u8]), Error> {
/// <https://github.com/dholroyd/h264-reader/issues/4>. /// <https://github.com/dholroyd/h264-reader/issues/4>.
fn decode(encoded: &[u8]) -> Vec<u8> { fn decode(encoded: &[u8]) -> Vec<u8> {
struct NalRead(Vec<u8>); struct NalRead(Vec<u8>);
use h264_reader::Context;
use h264_reader::nal::NalHandler; use h264_reader::nal::NalHandler;
use h264_reader::Context;
impl NalHandler for NalRead { impl NalHandler for NalRead {
type Ctx = (); type Ctx = ();
fn start(&mut self, _ctx: &mut Context<Self::Ctx>, _header: h264_reader::nal::NalHeader) {} fn start(&mut self, _ctx: &mut Context<Self::Ctx>, _header: h264_reader::nal::NalHeader) {}
@ -177,8 +181,7 @@ impl ExtraData {
let ctx; let ctx;
let sps_owner; let sps_owner;
let sps; // reference to either within ctx or to sps_owner. let sps; // reference to either within ctx or to sps_owner.
if extradata.starts_with(b"\x00\x00\x00\x01") || if extradata.starts_with(b"\x00\x00\x00\x01") || extradata.starts_with(b"\x00\x00\x01") {
extradata.starts_with(b"\x00\x00\x01") {
// ffmpeg supplied "extradata" in Annex B format. // ffmpeg supplied "extradata" in Annex B format.
let (s, p) = parse_annex_b_extra_data(extradata)?; let (s, p) = parse_annex_b_extra_data(extradata)?;
let rbsp = decode(&s[1..]); let rbsp = decode(&s[1..]);
@ -196,9 +199,11 @@ impl ExtraData {
if avcc.num_of_sequence_parameter_sets() != 1 { if avcc.num_of_sequence_parameter_sets() != 1 {
bail!("Multiple SPSs!"); bail!("Multiple SPSs!");
} }
ctx = avcc.create_context(()) ctx = avcc
.create_context(())
.map_err(|e| format_err!("Can't load SPS+PPS: {:?}", e))?; .map_err(|e| format_err!("Can't load SPS+PPS: {:?}", e))?;
sps = ctx.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap()) sps = ctx
.sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap())
.ok_or_else(|| format_err!("No SPS 0"))?; .ok_or_else(|| format_err!("No SPS 0"))?;
}; };
@ -212,23 +217,23 @@ impl ExtraData {
sample_entry.extend_from_slice(b"\x00\x00\x00\x00avc1\x00\x00\x00\x00\x00\x00\x00\x01"); sample_entry.extend_from_slice(b"\x00\x00\x00\x00avc1\x00\x00\x00\x00\x00\x00\x00\x01");
// VisualSampleEntry, ISO/IEC 14496-12 section 12.1.3. // VisualSampleEntry, ISO/IEC 14496-12 section 12.1.3.
sample_entry.extend_from_slice(&[0; 16]); // pre-defined + reserved sample_entry.extend_from_slice(&[0; 16]); // pre-defined + reserved
sample_entry.write_u16::<BigEndian>(width)?; sample_entry.write_u16::<BigEndian>(width)?;
sample_entry.write_u16::<BigEndian>(height)?; sample_entry.write_u16::<BigEndian>(height)?;
sample_entry.extend_from_slice(&[ sample_entry.extend_from_slice(&[
0x00, 0x48, 0x00, 0x00, // horizresolution 0x00, 0x48, 0x00, 0x00, // horizresolution
0x00, 0x48, 0x00, 0x00, // vertresolution 0x00, 0x48, 0x00, 0x00, // vertresolution
0x00, 0x00, 0x00, 0x00, // reserved 0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x01, // frame count 0x00, 0x01, // frame count
0x00, 0x00, 0x00, 0x00, // compressorname 0x00, 0x00, 0x00, 0x00, // compressorname
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, //
0x00, 0x18, 0xff, 0xff, // depth + pre_defined 0x00, 0x18, 0xff, 0xff, // depth + pre_defined
]); ]);
// AVCSampleEntry, ISO/IEC 14496-15 section 5.3.4.1. // AVCSampleEntry, ISO/IEC 14496-15 section 5.3.4.1.
@ -245,10 +250,10 @@ impl ExtraData {
// "emulation_prevention_three_byte" in ISO/IEC 14496-10 section 7.4. // "emulation_prevention_three_byte" in ISO/IEC 14496-10 section 7.4.
// It looks like 00 is not a valid value of profile_idc, so this distinction // It looks like 00 is not a valid value of profile_idc, so this distinction
// shouldn't be relevant here. And ffmpeg seems to ignore it. // shouldn't be relevant here. And ffmpeg seems to ignore it.
sample_entry.push(1); // configurationVersion sample_entry.push(1); // configurationVersion
sample_entry.push(sps[1]); // profile_idc . AVCProfileIndication sample_entry.push(sps[1]); // profile_idc . AVCProfileIndication
sample_entry.push(sps[2]); // ...misc bits... . profile_compatibility sample_entry.push(sps[2]); // ...misc bits... . profile_compatibility
sample_entry.push(sps[3]); // level_idc . AVCLevelIndication sample_entry.push(sps[3]); // level_idc . AVCLevelIndication
// Hardcode lengthSizeMinusOne to 3, matching TransformSampleData's 4-byte // Hardcode lengthSizeMinusOne to 3, matching TransformSampleData's 4-byte
// lengths. // lengths.
@ -260,41 +265,48 @@ impl ExtraData {
sample_entry.push(0xe1); sample_entry.push(0xe1);
sample_entry.write_u16::<BigEndian>(u16::try_from(sps.len())?)?; sample_entry.write_u16::<BigEndian>(u16::try_from(sps.len())?)?;
sample_entry.extend_from_slice(sps); sample_entry.extend_from_slice(sps);
sample_entry.push(1); // # of PPSs. sample_entry.push(1); // # of PPSs.
sample_entry.write_u16::<BigEndian>(u16::try_from(pps.len())?)?; sample_entry.write_u16::<BigEndian>(u16::try_from(pps.len())?)?;
sample_entry.extend_from_slice(pps); sample_entry.extend_from_slice(pps);
} else { } else {
sample_entry.extend_from_slice(extradata); sample_entry.extend_from_slice(extradata);
}; };
// Fix up avc1 and avcC box lengths. // Fix up avc1 and avcC box lengths.
let cur_pos = sample_entry.len(); let cur_pos = sample_entry.len();
BigEndian::write_u32(&mut sample_entry[avcc_len_pos .. avcc_len_pos + 4], BigEndian::write_u32(
u32::try_from(cur_pos - avcc_len_pos)?); &mut sample_entry[avcc_len_pos..avcc_len_pos + 4],
u32::try_from(cur_pos - avcc_len_pos)?,
);
// PixelAspectRatioBox, ISO/IEC 14496-12 section 12.1.4.2. // PixelAspectRatioBox, ISO/IEC 14496-12 section 12.1.4.2.
// Write a PixelAspectRatioBox if necessary, as the sub streams can be be anamorphic. // Write a PixelAspectRatioBox if necessary, as the sub streams can be be anamorphic.
let pasp = sps.vui_parameters.as_ref() let pasp = sps
.and_then(|v| v.aspect_ratio_info.as_ref()) .vui_parameters
.and_then(|a| a.clone().get()) .as_ref()
.unwrap_or_else(|| default_pixel_aspect_ratio(width, height)); .and_then(|v| v.aspect_ratio_info.as_ref())
.and_then(|a| a.clone().get())
.unwrap_or_else(|| default_pixel_aspect_ratio(width, height));
if pasp != (1, 1) { if pasp != (1, 1) {
sample_entry.extend_from_slice(b"\x00\x00\x00\x10pasp"); // length + box name sample_entry.extend_from_slice(b"\x00\x00\x00\x10pasp"); // length + box name
sample_entry.write_u32::<BigEndian>(pasp.0.into())?; sample_entry.write_u32::<BigEndian>(pasp.0.into())?;
sample_entry.write_u32::<BigEndian>(pasp.1.into())?; sample_entry.write_u32::<BigEndian>(pasp.1.into())?;
} }
let cur_pos = sample_entry.len(); let cur_pos = sample_entry.len();
BigEndian::write_u32(&mut sample_entry[avc1_len_pos .. avc1_len_pos + 4], BigEndian::write_u32(
u32::try_from(cur_pos - avc1_len_pos)?); &mut sample_entry[avc1_len_pos..avc1_len_pos + 4],
u32::try_from(cur_pos - avc1_len_pos)?,
);
let profile_idc = sample_entry[103]; let profile_idc = sample_entry[103];
let constraint_flags = sample_entry[104]; let constraint_flags = sample_entry[104];
let level_idc = sample_entry[105]; let level_idc = sample_entry[105];
let rfc6381_codec = let rfc6381_codec = format!(
format!("avc1.{:02x}{:02x}{:02x}", profile_idc, constraint_flags, level_idc); "avc1.{:02x}{:02x}{:02x}",
profile_idc, constraint_flags, level_idc
);
Ok(ExtraData { Ok(ExtraData {
entry: db::VideoSampleEntryToInsert { entry: db::VideoSampleEntryToInsert {
data: sample_entry, data: sample_entry,
@ -321,7 +333,7 @@ pub fn transform_sample_data(annexb_sample: &[u8], avc_sample: &mut Vec<u8>) ->
avc_sample.reserve(annexb_sample.len() + 4); avc_sample.reserve(annexb_sample.len() + 4);
decode_h264_annex_b(annexb_sample, |unit| { decode_h264_annex_b(annexb_sample, |unit| {
// 4-byte length; this must match ParseExtraData's lengthSizeMinusOne == 3. // 4-byte length; this must match ParseExtraData's lengthSizeMinusOne == 3.
avc_sample.write_u32::<BigEndian>(unit.len() as u32)?; // length avc_sample.write_u32::<BigEndian>(unit.len() as u32)?; // length
avc_sample.extend_from_slice(unit); avc_sample.extend_from_slice(unit);
Ok(()) Ok(())
})?; })?;
@ -332,6 +344,7 @@ pub fn transform_sample_data(annexb_sample: &[u8], avc_sample: &mut Vec<u8>) ->
mod tests { mod tests {
use db::testutil; use db::testutil;
#[rustfmt::skip]
const ANNEX_B_TEST_INPUT: [u8; 35] = [ const ANNEX_B_TEST_INPUT: [u8; 35] = [
0x00, 0x00, 0x00, 0x01, 0x67, 0x4d, 0x00, 0x1f, 0x00, 0x00, 0x00, 0x01, 0x67, 0x4d, 0x00, 0x1f,
0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, 0x35, 0x01, 0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, 0x35, 0x01,
@ -340,6 +353,7 @@ mod tests {
0xee, 0x3c, 0x80, 0xee, 0x3c, 0x80,
]; ];
#[rustfmt::skip]
const AVC_DECODER_CONFIG_TEST_INPUT: [u8; 38] = [ const AVC_DECODER_CONFIG_TEST_INPUT: [u8; 38] = [
0x01, 0x4d, 0x00, 0x1f, 0xff, 0xe1, 0x00, 0x17, 0x01, 0x4d, 0x00, 0x1f, 0xff, 0xe1, 0x00, 0x17,
0x67, 0x4d, 0x00, 0x1f, 0x9a, 0x66, 0x02, 0x80, 0x67, 0x4d, 0x00, 0x1f, 0x9a, 0x66, 0x02, 0x80,
@ -348,6 +362,7 @@ mod tests {
0x00, 0x04, 0x68, 0xee, 0x3c, 0x80, 0x00, 0x04, 0x68, 0xee, 0x3c, 0x80,
]; ];
#[rustfmt::skip]
const TEST_OUTPUT: [u8; 132] = [ const TEST_OUTPUT: [u8; 132] = [
0x00, 0x00, 0x00, 0x84, 0x61, 0x76, 0x63, 0x31, 0x00, 0x00, 0x00, 0x84, 0x61, 0x76, 0x63, 0x31,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
@ -376,8 +391,9 @@ mod tests {
super::decode_h264_annex_b(data, |p| { super::decode_h264_annex_b(data, |p| {
pieces.push(p); pieces.push(p);
Ok(()) Ok(())
}).unwrap(); })
assert_eq!(&pieces, &[&data[4 .. 27], &data[31 ..]]); .unwrap();
assert_eq!(&pieces, &[&data[4..27], &data[31..]]);
} }
#[test] #[test]
@ -404,6 +420,7 @@ mod tests {
#[test] #[test]
fn test_transform_sample_data() { fn test_transform_sample_data() {
testutil::init(); testutil::init();
#[rustfmt::skip]
const INPUT: [u8; 64] = [ const INPUT: [u8; 64] = [
0x00, 0x00, 0x00, 0x01, 0x67, 0x4d, 0x00, 0x1f, 0x00, 0x00, 0x00, 0x01, 0x67, 0x4d, 0x00, 0x1f,
0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, 0x35, 0x01, 0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, 0x35, 0x01,
@ -420,6 +437,7 @@ mod tests {
0xff, 0x8c, 0xd6, 0x35, 0xff, 0x8c, 0xd6, 0x35,
// (truncated) // (truncated)
]; ];
#[rustfmt::skip]
const EXPECTED_OUTPUT: [u8; 64] = [ const EXPECTED_OUTPUT: [u8; 64] = [
0x00, 0x00, 0x00, 0x17, 0x67, 0x4d, 0x00, 0x1f, 0x00, 0x00, 0x00, 0x17, 0x67, 0x4d, 0x00, 0x1f,
0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, 0x35, 0x01, 0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, 0x35, 0x01,

View File

@ -29,15 +29,15 @@
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use db::auth::SessionHash; use db::auth::SessionHash;
use failure::{Error, format_err}; use failure::{format_err, Error};
use serde::{Deserialize, Serialize};
use serde::ser::{Error as _, SerializeMap, SerializeSeq, Serializer}; use serde::ser::{Error as _, SerializeMap, SerializeSeq, Serializer};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::ops::Not; use std::ops::Not;
use uuid::Uuid; use uuid::Uuid;
#[derive(Serialize)] #[derive(Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct TopLevel<'a> { pub struct TopLevel<'a> {
pub time_zone_name: &'a str, pub time_zone_name: &'a str,
@ -57,7 +57,7 @@ pub struct TopLevel<'a> {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct Session { pub struct Session {
pub username: String, pub username: String,
@ -67,7 +67,9 @@ pub struct Session {
impl Session { impl Session {
fn serialize_csrf<S>(csrf: &SessionHash, serializer: S) -> Result<S::Ok, S::Error> fn serialize_csrf<S>(csrf: &SessionHash, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer { where
S: Serializer,
{
let mut tmp = [0u8; 32]; let mut tmp = [0u8; 32];
csrf.encode_base64(&mut tmp); csrf.encode_base64(&mut tmp);
serializer.serialize_str(::std::str::from_utf8(&tmp[..]).expect("base64 is UTF-8")) serializer.serialize_str(::std::str::from_utf8(&tmp[..]).expect("base64 is UTF-8"))
@ -77,7 +79,7 @@ impl Session {
/// JSON serialization wrapper for a single camera when processing `/api/` and /// JSON serialization wrapper for a single camera when processing `/api/` and
/// `/api/cameras/<uuid>/`. See `design/api.md` for details. /// `/api/cameras/<uuid>/`. See `design/api.md` for details.
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct Camera<'a> { pub struct Camera<'a> {
pub uuid: Uuid, pub uuid: Uuid,
pub short_name: &'a str, pub short_name: &'a str,
@ -91,7 +93,7 @@ pub struct Camera<'a> {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct CameraConfig<'a> { pub struct CameraConfig<'a> {
pub onvif_host: &'a str, pub onvif_host: &'a str,
pub username: &'a str, pub username: &'a str,
@ -99,7 +101,7 @@ pub struct CameraConfig<'a> {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct Stream<'a> { pub struct Stream<'a> {
pub retain_bytes: i64, pub retain_bytes: i64,
pub min_start_time_90k: Option<i64>, pub min_start_time_90k: Option<i64>,
@ -117,13 +119,13 @@ pub struct Stream<'a> {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct StreamConfig<'a> { pub struct StreamConfig<'a> {
pub rtsp_url: &'a str, pub rtsp_url: &'a str,
} }
#[derive(Serialize)] #[derive(Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct Signal<'a> { pub struct Signal<'a> {
pub id: u32, pub id: u32,
#[serde(serialize_with = "Signal::serialize_cameras")] #[serde(serialize_with = "Signal::serialize_cameras")]
@ -134,27 +136,27 @@ pub struct Signal<'a> {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub enum PostSignalsEndBase { pub enum PostSignalsEndBase {
Epoch, Epoch,
Now, Now,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct LoginRequest<'a> { pub struct LoginRequest<'a> {
pub username: &'a str, pub username: &'a str,
pub password: String, pub password: String,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct LogoutRequest<'a> { pub struct LogoutRequest<'a> {
pub csrf: &'a str, pub csrf: &'a str,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct PostSignalsRequest { pub struct PostSignalsRequest {
pub signal_ids: Vec<u32>, pub signal_ids: Vec<u32>,
pub states: Vec<u16>, pub states: Vec<u16>,
@ -164,13 +166,13 @@ pub struct PostSignalsRequest {
} }
#[derive(Serialize)] #[derive(Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct PostSignalsResponse { pub struct PostSignalsResponse {
pub time_90k: i64, pub time_90k: i64,
} }
#[derive(Default, Serialize)] #[derive(Default, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct Signals { pub struct Signals {
pub times_90k: Vec<i64>, pub times_90k: Vec<i64>,
pub signal_ids: Vec<u32>, pub signal_ids: Vec<u32>,
@ -178,7 +180,7 @@ pub struct Signals {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct SignalType<'a> { pub struct SignalType<'a> {
pub uuid: Uuid, pub uuid: Uuid,
@ -187,7 +189,7 @@ pub struct SignalType<'a> {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct SignalTypeState<'a> { pub struct SignalTypeState<'a> {
value: u16, value: u16,
name: &'a str, name: &'a str,
@ -198,8 +200,12 @@ pub struct SignalTypeState<'a> {
} }
impl<'a> Camera<'a> { impl<'a> Camera<'a> {
pub fn wrap(c: &'a db::Camera, db: &'a db::LockedDatabase, include_days: bool, pub fn wrap(
include_config: bool) -> Result<Self, Error> { c: &'a db::Camera,
db: &'a db::LockedDatabase,
include_days: bool,
include_config: bool,
) -> Result<Self, Error> {
Ok(Camera { Ok(Camera {
uuid: c.uuid, uuid: c.uuid,
short_name: &c.short_name, short_name: &c.short_name,
@ -220,11 +226,17 @@ impl<'a> Camera<'a> {
} }
fn serialize_streams<S>(streams: &[Option<Stream>; 2], serializer: S) -> Result<S::Ok, S::Error> fn serialize_streams<S>(streams: &[Option<Stream>; 2], serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer { where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(streams.len()))?; let mut map = serializer.serialize_map(Some(streams.len()))?;
for (i, s) in streams.iter().enumerate() { for (i, s) in streams.iter().enumerate() {
if let &Some(ref s) = s { if let &Some(ref s) = s {
map.serialize_key(db::StreamType::from_index(i).expect("invalid stream type index").as_str())?; map.serialize_key(
db::StreamType::from_index(i)
.expect("invalid stream type index")
.as_str(),
)?;
map.serialize_value(s)?; map.serialize_value(s)?;
} }
} }
@ -233,13 +245,20 @@ impl<'a> Camera<'a> {
} }
impl<'a> Stream<'a> { impl<'a> Stream<'a> {
fn wrap(db: &'a db::LockedDatabase, id: Option<i32>, include_days: bool, include_config: bool) fn wrap(
-> Result<Option<Self>, Error> { db: &'a db::LockedDatabase,
id: Option<i32>,
include_days: bool,
include_config: bool,
) -> Result<Option<Self>, Error> {
let id = match id { let id = match id {
Some(id) => id, Some(id) => id,
None => return Ok(None), None => return Ok(None),
}; };
let s = db.streams_by_id().get(&id).ok_or_else(|| format_err!("missing stream {}", id))?; let s = db
.streams_by_id()
.get(&id)
.ok_or_else(|| format_err!("missing stream {}", id))?;
Ok(Some(Stream { Ok(Some(Stream {
retain_bytes: s.retain_bytes, retain_bytes: s.retain_bytes,
min_start_time_90k: s.range.as_ref().map(|r| r.start.0), min_start_time_90k: s.range.as_ref().map(|r| r.start.0),
@ -257,9 +276,13 @@ impl<'a> Stream<'a> {
})) }))
} }
fn serialize_days<S>(days: &Option<BTreeMap<db::StreamDayKey, db::StreamDayValue>>, fn serialize_days<S>(
serializer: S) -> Result<S::Ok, S::Error> days: &Option<BTreeMap<db::StreamDayKey, db::StreamDayValue>>,
where S: Serializer { serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let days = match days.as_ref() { let days = match days.as_ref() {
Some(d) => d, Some(d) => d,
None => return serializer.serialize_none(), None => return serializer.serialize_none(),
@ -268,7 +291,7 @@ impl<'a> Stream<'a> {
for (k, v) in days { for (k, v) in days {
map.serialize_key(k.as_ref())?; map.serialize_key(k.as_ref())?;
let bounds = k.bounds(); let bounds = k.bounds();
map.serialize_value(&StreamDayValue{ map.serialize_value(&StreamDayValue {
start_time_90k: bounds.start.0, start_time_90k: bounds.start.0,
end_time_90k: bounds.end.0, end_time_90k: bounds.end.0,
total_duration_90k: v.duration.0, total_duration_90k: v.duration.0,
@ -289,16 +312,19 @@ impl<'a> Signal<'a> {
} }
} }
fn serialize_cameras<S>(cameras: &(&db::Signal, &db::LockedDatabase), fn serialize_cameras<S>(
serializer: S) -> Result<S::Ok, S::Error> cameras: &(&db::Signal, &db::LockedDatabase),
where S: Serializer { serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let (s, db) = cameras; let (s, db) = cameras;
let mut map = serializer.serialize_map(Some(s.cameras.len()))?; let mut map = serializer.serialize_map(Some(s.cameras.len()))?;
for sc in &s.cameras { for sc in &s.cameras {
let c = db.cameras_by_id() let c = db.cameras_by_id().get(&sc.camera_id).ok_or_else(|| {
.get(&sc.camera_id) S::Error::custom(format!("signal has missing camera id {}", sc.camera_id))
.ok_or_else(|| S::Error::custom(format!("signal has missing camera id {}", })?;
sc.camera_id)))?;
map.serialize_key(&c.uuid)?; map.serialize_key(&c.uuid)?;
map.serialize_value(match sc.type_ { map.serialize_value(match sc.type_ {
db::signal::SignalCameraType::Direct => "direct", db::signal::SignalCameraType::Direct => "direct",
@ -317,9 +343,10 @@ impl<'a> SignalType<'a> {
} }
} }
fn serialize_states<S>(type_: &db::signal::Type, fn serialize_states<S>(type_: &db::signal::Type, serializer: S) -> Result<S::Ok, S::Error>
serializer: S) -> Result<S::Ok, S::Error> where
where S: Serializer { S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(type_.states.len()))?; let mut seq = serializer.serialize_seq(Some(type_.states.len()))?;
for s in &type_.states { for s in &type_.states {
seq.serialize_element(&SignalTypeState::wrap(s))?; seq.serialize_element(&SignalTypeState::wrap(s))?;
@ -340,7 +367,7 @@ impl<'a> SignalTypeState<'a> {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
struct StreamDayValue { struct StreamDayValue {
pub start_time_90k: i64, pub start_time_90k: i64,
pub end_time_90k: i64, pub end_time_90k: i64,
@ -350,24 +377,33 @@ struct StreamDayValue {
impl<'a> TopLevel<'a> { impl<'a> TopLevel<'a> {
/// Serializes cameras as a list (rather than a map), optionally including the `days` and /// Serializes cameras as a list (rather than a map), optionally including the `days` and
/// `cameras` fields. /// `cameras` fields.
fn serialize_cameras<S>(cameras: &(&db::LockedDatabase, bool, bool), fn serialize_cameras<S>(
serializer: S) -> Result<S::Ok, S::Error> cameras: &(&db::LockedDatabase, bool, bool),
where S: Serializer { serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let (db, include_days, include_config) = *cameras; let (db, include_days, include_config) = *cameras;
let cs = db.cameras_by_id(); let cs = db.cameras_by_id();
let mut seq = serializer.serialize_seq(Some(cs.len()))?; let mut seq = serializer.serialize_seq(Some(cs.len()))?;
for (_, c) in cs { for (_, c) in cs {
seq.serialize_element( seq.serialize_element(
&Camera::wrap(c, db, include_days, include_config) &Camera::wrap(c, db, include_days, include_config)
.map_err(|e| S::Error::custom(e))?)?; .map_err(|e| S::Error::custom(e))?,
)?;
} }
seq.end() seq.end()
} }
/// Serializes signals as a list (rather than a map), optionally including the `days` field. /// Serializes signals as a list (rather than a map), optionally including the `days` field.
fn serialize_signals<S>(signals: &(&db::LockedDatabase, bool), fn serialize_signals<S>(
serializer: S) -> Result<S::Ok, S::Error> signals: &(&db::LockedDatabase, bool),
where S: Serializer { serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let (db, include_days) = *signals; let (db, include_days) = *signals;
let ss = db.signals_by_id(); let ss = db.signals_by_id();
let mut seq = serializer.serialize_seq(Some(ss.len()))?; let mut seq = serializer.serialize_seq(Some(ss.len()))?;
@ -378,9 +414,10 @@ impl<'a> TopLevel<'a> {
} }
/// Serializes signals as a list (rather than a map), optionally including the `days` field. /// Serializes signals as a list (rather than a map), optionally including the `days` field.
fn serialize_signal_types<S>(db: &db::LockedDatabase, fn serialize_signal_types<S>(db: &db::LockedDatabase, serializer: S) -> Result<S::Ok, S::Error>
serializer: S) -> Result<S::Ok, S::Error> where
where S: Serializer { S: Serializer,
{
let ss = db.signal_types_by_uuid(); let ss = db.signal_types_by_uuid();
let mut seq = serializer.serialize_seq(Some(ss.len()))?; let mut seq = serializer.serialize_seq(Some(ss.len()))?;
for (u, t) in ss { for (u, t) in ss {
@ -391,7 +428,7 @@ impl<'a> TopLevel<'a> {
} }
#[derive(Serialize)] #[derive(Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct ListRecordings<'a> { pub struct ListRecordings<'a> {
pub recordings: Vec<Recording>, pub recordings: Vec<Recording>,
@ -403,22 +440,27 @@ pub struct ListRecordings<'a> {
} }
impl<'a> ListRecordings<'a> { impl<'a> ListRecordings<'a> {
fn serialize_video_sample_entries<S>(video_sample_entries: &(&db::LockedDatabase, Vec<i32>), fn serialize_video_sample_entries<S>(
serializer: S) -> Result<S::Ok, S::Error> video_sample_entries: &(&db::LockedDatabase, Vec<i32>),
where S: Serializer { serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let (db, ref v) = *video_sample_entries; let (db, ref v) = *video_sample_entries;
let mut map = serializer.serialize_map(Some(v.len()))?; let mut map = serializer.serialize_map(Some(v.len()))?;
for id in v { for id in v {
map.serialize_entry( map.serialize_entry(
id, id,
&VideoSampleEntry::from(&db.video_sample_entries_by_id().get(id).unwrap()))?; &VideoSampleEntry::from(&db.video_sample_entries_by_id().get(id).unwrap()),
)?;
} }
map.end() map.end()
} }
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct Recording { pub struct Recording {
pub start_time_90k: i64, pub start_time_90k: i64,
pub end_time_90k: i64, pub end_time_90k: i64,
@ -439,7 +481,7 @@ pub struct Recording {
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
#[serde(rename_all="camelCase")] #[serde(rename_all = "camelCase")]
pub struct VideoSampleEntry { pub struct VideoSampleEntry {
pub width: u16, pub width: u16,
pub height: u16, pub height: u16,

View File

@ -28,7 +28,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
#![cfg_attr(all(feature="nightly", test), feature(test))] #![cfg_attr(all(feature = "nightly", test), feature(test))]
use log::{debug, error}; use log::{debug, error};
use std::str::FromStr; use std::str::FromStr;
@ -40,7 +40,7 @@ mod analytics;
/// Stub implementation of analytics module when not compiled with TensorFlow Lite. /// Stub implementation of analytics module when not compiled with TensorFlow Lite.
#[cfg(not(feature = "analytics"))] #[cfg(not(feature = "analytics"))]
mod analytics { mod analytics {
use failure::{Error, bail}; use failure::{bail, Error};
pub struct ObjectDetector; pub struct ObjectDetector;
@ -53,13 +53,18 @@ mod analytics {
pub struct ObjectDetectorStream; pub struct ObjectDetectorStream;
impl ObjectDetectorStream { impl ObjectDetectorStream {
pub fn new(_par: ffmpeg::avcodec::InputCodecParameters<'_>, pub fn new(
_detector: &ObjectDetector) -> Result<Self, Error> { _par: ffmpeg::avcodec::InputCodecParameters<'_>,
_detector: &ObjectDetector,
) -> Result<Self, Error> {
unimplemented!(); unimplemented!();
} }
pub fn process_frame(&mut self, _pkt: &ffmpeg::avcodec::Packet<'_>, pub fn process_frame(
_detector: &ObjectDetector) -> Result<(), Error> { &mut self,
_pkt: &ffmpeg::avcodec::Packet<'_>,
_detector: &ObjectDetector,
) -> Result<(), Error> {
unimplemented!(); unimplemented!();
} }
} }
@ -76,7 +81,10 @@ mod streamer;
mod web; mod web;
#[derive(StructOpt)] #[derive(StructOpt)]
#[structopt(name="moonfire-nvr", about="security camera network video recorder")] #[structopt(
name = "moonfire-nvr",
about = "security camera network video recorder"
)]
enum Args { enum Args {
/// Checks database integrity (like fsck). /// Checks database integrity (like fsck).
Check(cmds::check::Args), Check(cmds::check::Args),
@ -128,10 +136,12 @@ impl Args {
fn main() { fn main() {
let args = Args::from_args(); let args = Args::from_args();
let mut h = mylog::Builder::new() let mut h = mylog::Builder::new()
.set_format(::std::env::var("MOONFIRE_FORMAT") .set_format(
.map_err(|_| ()) ::std::env::var("MOONFIRE_FORMAT")
.and_then(|s| mylog::Format::from_str(&s)) .map_err(|_| ())
.unwrap_or(mylog::Format::Google)) .and_then(|s| mylog::Format::from_str(&s))
.unwrap_or(mylog::Format::Google),
)
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or("info".to_owned())) .set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or("info".to_owned()))
.build(); .build();
h.clone().install().unwrap(); h.clone().install().unwrap();
@ -144,10 +154,10 @@ fn main() {
Err(e) => { Err(e) => {
error!("Exiting due to error: {}", base::prettify_failure(&e)); error!("Exiting due to error: {}", base::prettify_failure(&e));
::std::process::exit(1); ::std::process::exit(1);
}, }
Ok(rv) => { Ok(rv) => {
debug!("Exiting with status {}", rv); debug!("Exiting with status {}", rv);
std::process::exit(rv) std::process::exit(rv)
}, }
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -30,17 +30,17 @@
//! Tools for implementing a `http_serve::Entity` body composed from many "slices". //! Tools for implementing a `http_serve::Entity` body composed from many "slices".
use crate::body::{wrap_error, BoxedError};
use base::format_err_t; use base::format_err_t;
use crate::body::{BoxedError, wrap_error}; use failure::{bail, Error};
use failure::{Error, bail}; use futures::{stream, stream::StreamExt, Stream};
use futures::{Stream, stream, stream::StreamExt};
use std::fmt; use std::fmt;
use std::ops::Range; use std::ops::Range;
use std::pin::Pin; use std::pin::Pin;
/// Gets a byte range given a context argument. /// Gets a byte range given a context argument.
/// Each `Slice` instance belongs to a single `Slices`. /// Each `Slice` instance belongs to a single `Slices`.
pub trait Slice : fmt::Debug + Sized + Sync + 'static { pub trait Slice: fmt::Debug + Sized + Sync + 'static {
type Ctx: Send + Sync + Clone; type Ctx: Send + Sync + Clone;
type Chunk: Send + Sync; type Chunk: Send + Sync;
@ -52,15 +52,22 @@ pub trait Slice : fmt::Debug + Sized + Sync + 'static {
/// Gets the body bytes indicated by `r`, which is relative to this slice's start. /// Gets the body bytes indicated by `r`, which is relative to this slice's start.
/// The additional argument `ctx` is as supplied to the `Slices`. /// The additional argument `ctx` is as supplied to the `Slices`.
/// The additional argument `l` is the length of this slice, as determined by the `Slices`. /// The additional argument `l` is the length of this slice, as determined by the `Slices`.
fn get_range(&self, ctx: &Self::Ctx, r: Range<u64>, len: u64) fn get_range(
-> Box<dyn Stream<Item = Result<Self::Chunk, BoxedError>> + Sync + Send>; &self,
ctx: &Self::Ctx,
r: Range<u64>,
len: u64,
) -> Box<dyn Stream<Item = Result<Self::Chunk, BoxedError>> + Sync + Send>;
fn get_slices(ctx: &Self::Ctx) -> &Slices<Self>; fn get_slices(ctx: &Self::Ctx) -> &Slices<Self>;
} }
/// Helper to serve byte ranges from a body which is broken down into many "slices". /// Helper to serve byte ranges from a body which is broken down into many "slices".
/// This is used to implement `.mp4` serving in `mp4::File` from `mp4::Slice` enums. /// This is used to implement `.mp4` serving in `mp4::File` from `mp4::Slice` enums.
pub struct Slices<S> where S: Slice { pub struct Slices<S>
where
S: Slice,
{
/// The total byte length of the `Slices`. /// The total byte length of the `Slices`.
/// Equivalent to `self.slices.back().map(|s| s.end()).unwrap_or(0)`; kept for convenience and /// Equivalent to `self.slices.back().map(|s| s.end()).unwrap_or(0)`; kept for convenience and
/// to avoid a branch. /// to avoid a branch.
@ -70,22 +77,45 @@ pub struct Slices<S> where S: Slice {
slices: Vec<S>, slices: Vec<S>,
} }
impl<S> fmt::Debug for Slices<S> where S: Slice { impl<S> fmt::Debug for Slices<S>
where
S: Slice,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} slices with overall length {}:", self.slices.len(), self.len)?; write!(
f,
"{} slices with overall length {}:",
self.slices.len(),
self.len
)?;
let mut start = 0; let mut start = 0;
for (i, s) in self.slices.iter().enumerate() { for (i, s) in self.slices.iter().enumerate() {
let end = s.end(); let end = s.end();
write!(f, "\ni {:7}: range [{:12}, {:12}) len {:12}: {:?}", write!(
i, start, end, end - start, s)?; f,
"\ni {:7}: range [{:12}, {:12}) len {:12}: {:?}",
i,
start,
end,
end - start,
s
)?;
start = end; start = end;
} }
Ok(()) Ok(())
} }
} }
impl<S> Slices<S> where S: Slice { impl<S> Slices<S>
pub fn new() -> Self { Slices{len: 0, slices: Vec::new()} } where
S: Slice,
{
pub fn new() -> Self {
Slices {
len: 0,
slices: Vec::new(),
}
}
/// Reserves space for at least `additional` more slices to be appended. /// Reserves space for at least `additional` more slices to be appended.
pub fn reserve(&mut self, additional: usize) { pub fn reserve(&mut self, additional: usize) {
@ -95,8 +125,13 @@ impl<S> Slices<S> where S: Slice {
/// Appends the given slice, which must have end > the Slices's current len. /// Appends the given slice, which must have end > the Slices's current len.
pub fn append(&mut self, slice: S) -> Result<(), Error> { pub fn append(&mut self, slice: S) -> Result<(), Error> {
if slice.end() <= self.len { if slice.end() <= self.len {
bail!("end {} <= len {} while adding slice {:?} to slices:\n{:?}", bail!(
slice.end(), self.len, slice, self); "end {} <= len {} while adding slice {:?} to slices:\n{:?}",
slice.end(),
self.len,
slice,
self
);
} }
self.len = slice.end(); self.len = slice.end();
self.slices.push(slice); self.slices.push(slice);
@ -104,59 +139,78 @@ impl<S> Slices<S> where S: Slice {
} }
/// Returns the total byte length of all slices. /// Returns the total byte length of all slices.
pub fn len(&self) -> u64 { self.len } pub fn len(&self) -> u64 {
self.len
}
/// Returns the number of slices. /// Returns the number of slices.
pub fn num(&self) -> usize { self.slices.len() } pub fn num(&self) -> usize {
self.slices.len()
}
/// Writes `range` to `out`. /// Writes `range` to `out`.
/// This interface mirrors `http_serve::Entity::write_to`, with the additional `ctx` argument. /// This interface mirrors `http_serve::Entity::write_to`, with the additional `ctx` argument.
pub fn get_range(&self, ctx: &S::Ctx, range: Range<u64>) pub fn get_range(
-> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> { &self,
ctx: &S::Ctx,
range: Range<u64>,
) -> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> {
if range.start > range.end || range.end > self.len { if range.start > range.end || range.end > self.len {
return Box::new(stream::once(futures::future::err(wrap_error(format_err_t!( return Box::new(stream::once(futures::future::err(wrap_error(
Internal, "Bad range {:?} for slice of length {}", range, self.len))))); format_err_t!(
Internal,
"Bad range {:?} for slice of length {}",
range,
self.len
),
))));
} }
// Binary search for the first slice of the range to write, determining its index and // Binary search for the first slice of the range to write, determining its index and
// (from the preceding slice) the start of its range. // (from the preceding slice) the start of its range.
let (i, slice_start) = match self.slices.binary_search_by_key(&range.start, |s| s.end()) { let (i, slice_start) = match self.slices.binary_search_by_key(&range.start, |s| s.end()) {
Ok(i) => (i+1, self.slices[i].end()), // desired start == slice i's end; first is i+1! Ok(i) => (i + 1, self.slices[i].end()), // desired start == slice i's end; first is i+1!
Err(i) if i == 0 => (0, 0), // desired start < slice 0's end; first is 0. Err(i) if i == 0 => (0, 0), // desired start < slice 0's end; first is 0.
Err(i) => (i, self.slices[i-1].end()), // desired start < slice i's end; first is i. Err(i) => (i, self.slices[i - 1].end()), // desired start < slice i's end; first is i.
}; };
// Iterate through and write each slice until the end. // Iterate through and write each slice until the end.
let start_pos = range.start - slice_start; let start_pos = range.start - slice_start;
let bodies = stream::unfold( let bodies = stream::unfold(
(ctx.clone(), i, start_pos, slice_start), move |(c, i, start_pos, slice_start)| { (ctx.clone(), i, start_pos, slice_start),
let (body, min_end); move |(c, i, start_pos, slice_start)| {
{ let (body, min_end);
let self_ = S::get_slices(&c); {
if i == self_.slices.len() { return futures::future::ready(None) } let self_ = S::get_slices(&c);
let s = &self_.slices[i]; if i == self_.slices.len() {
if range.end == slice_start + start_pos { return futures::future::ready(None) } return futures::future::ready(None);
let s_end = s.end(); }
min_end = ::std::cmp::min(range.end, s_end); let s = &self_.slices[i];
let l = s_end - slice_start; if range.end == slice_start + start_pos {
body = s.get_range(&c, start_pos .. min_end - slice_start, l); return futures::future::ready(None);
}; }
futures::future::ready(Some((Pin::from(body), (c, i+1, 0, min_end)))) let s_end = s.end();
}); min_end = ::std::cmp::min(range.end, s_end);
let l = s_end - slice_start;
body = s.get_range(&c, start_pos..min_end - slice_start, l);
};
futures::future::ready(Some((Pin::from(body), (c, i + 1, 0, min_end))))
},
);
Box::new(bodies.flatten()) Box::new(bodies.flatten())
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{Slice, Slices};
use crate::body::BoxedError; use crate::body::BoxedError;
use db::testutil; use db::testutil;
use futures::stream::{self, Stream, TryStreamExt}; use futures::stream::{self, Stream, TryStreamExt};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use std::ops::Range; use std::ops::Range;
use std::pin::Pin; use std::pin::Pin;
use super::{Slice, Slices};
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
pub struct FakeChunk { pub struct FakeChunk {
@ -174,30 +228,45 @@ mod tests {
type Ctx = &'static Slices<FakeSlice>; type Ctx = &'static Slices<FakeSlice>;
type Chunk = FakeChunk; type Chunk = FakeChunk;
fn end(&self) -> u64 { self.end } fn end(&self) -> u64 {
self.end
fn get_range(&self, _ctx: &&'static Slices<FakeSlice>, r: Range<u64>, _l: u64)
-> Box<dyn Stream<Item = Result<FakeChunk, BoxedError>> + Send + Sync> {
Box::new(stream::once(futures::future::ok(FakeChunk{slice: self.name, range: r})))
} }
fn get_slices(ctx: &&'static Slices<FakeSlice>) -> &'static Slices<Self> { *ctx } fn get_range(
&self,
_ctx: &&'static Slices<FakeSlice>,
r: Range<u64>,
_l: u64,
) -> Box<dyn Stream<Item = Result<FakeChunk, BoxedError>> + Send + Sync> {
Box::new(stream::once(futures::future::ok(FakeChunk {
slice: self.name,
range: r,
})))
}
fn get_slices(ctx: &&'static Slices<FakeSlice>) -> &'static Slices<Self> {
*ctx
}
} }
lazy_static! { lazy_static! {
#[rustfmt::skip]
static ref SLICES: Slices<FakeSlice> = { static ref SLICES: Slices<FakeSlice> = {
let mut s = Slices::new(); let mut s = Slices::new();
s.append(FakeSlice{end: 5, name: "a"}).unwrap(); s.append(FakeSlice { end: 5, name: "a" }).unwrap();
s.append(FakeSlice{end: 5+13, name: "b"}).unwrap(); s.append(FakeSlice { end: 5 + 13, name: "b" }).unwrap();
s.append(FakeSlice{end: 5+13+7, name: "c"}).unwrap(); s.append(FakeSlice { end: 5 + 13 + 7, name: "c" }).unwrap();
s.append(FakeSlice{end: 5+13+7+17, name: "d"}).unwrap(); s.append(FakeSlice { end: 5 + 13 + 7 + 17, name: "d" }).unwrap();
s.append(FakeSlice{end: 5+13+7+17+19, name: "e"}).unwrap(); s.append(FakeSlice { end: 5 + 13 + 7 + 17 + 19, name: "e" }).unwrap();
s s
}; };
} }
async fn get_range(r: Range<u64>) -> Vec<FakeChunk> { async fn get_range(r: Range<u64>) -> Vec<FakeChunk> {
Pin::from(SLICES.get_range(&&*SLICES, r)).try_collect().await.unwrap() Pin::from(SLICES.get_range(&&*SLICES, r))
.try_collect()
.await
.unwrap()
} }
#[test] #[test]
@ -210,48 +279,68 @@ mod tests {
pub async fn exact_slice() { pub async fn exact_slice() {
// Test writing exactly slice b. // Test writing exactly slice b.
testutil::init(); testutil::init();
let out = get_range(5 .. 18).await; let out = get_range(5..18).await;
assert_eq!(&[FakeChunk{slice: "b", range: 0 .. 13}], &out[..]); assert_eq!(
&[FakeChunk {
slice: "b",
range: 0..13
}],
&out[..]
);
} }
#[tokio::test] #[tokio::test]
pub async fn offset_first() { pub async fn offset_first() {
// Test writing part of slice a. // Test writing part of slice a.
testutil::init(); testutil::init();
let out = get_range(1 .. 3).await; let out = get_range(1..3).await;
assert_eq!(&[FakeChunk{slice: "a", range: 1 .. 3}], &out[..]); assert_eq!(
&[FakeChunk {
slice: "a",
range: 1..3
}],
&out[..]
);
} }
#[tokio::test] #[tokio::test]
pub async fn offset_mid() { pub async fn offset_mid() {
// Test writing part of slice b, all of slice c, and part of slice d. // Test writing part of slice b, all of slice c, and part of slice d.
testutil::init(); testutil::init();
let out = get_range(17 .. 26).await; let out = get_range(17..26).await;
assert_eq!(&[ #[rustfmt::skip]
FakeChunk{slice: "b", range: 12 .. 13}, assert_eq!(
FakeChunk{slice: "c", range: 0 .. 7}, &[
FakeChunk{slice: "d", range: 0 .. 1}, FakeChunk { slice: "b", range: 12..13 },
], &out[..]); FakeChunk { slice: "c", range: 0..7 },
FakeChunk { slice: "d", range: 0..1 },
],
&out[..]
);
} }
#[tokio::test] #[tokio::test]
pub async fn everything() { pub async fn everything() {
// Test writing the whole Slices. // Test writing the whole Slices.
testutil::init(); testutil::init();
let out = get_range(0 .. 61).await; let out = get_range(0..61).await;
assert_eq!(&[ #[rustfmt::skip]
FakeChunk{slice: "a", range: 0 .. 5}, assert_eq!(
FakeChunk{slice: "b", range: 0 .. 13}, &[
FakeChunk{slice: "c", range: 0 .. 7}, FakeChunk { slice: "a", range: 0..5 },
FakeChunk{slice: "d", range: 0 .. 17}, FakeChunk { slice: "b", range: 0..13 },
FakeChunk{slice: "e", range: 0 .. 19}, FakeChunk { slice: "c", range: 0..7 },
], &out[..]); FakeChunk { slice: "d", range: 0..17 },
FakeChunk { slice: "e", range: 0..19 },
],
&out[..]
);
} }
#[tokio::test] #[tokio::test]
pub async fn at_end() { pub async fn at_end() {
testutil::init(); testutil::init();
let out = get_range(61 .. 61).await; let out = get_range(61..61).await;
let empty: &[FakeChunk] = &[]; let empty: &[FakeChunk] = &[];
assert_eq!(empty, &out[..]); assert_eq!(empty, &out[..]);
} }

View File

@ -30,7 +30,7 @@
use crate::h264; use crate::h264;
use cstr::cstr; use cstr::cstr;
use failure::{Error, bail}; use failure::{bail, Error};
use ffmpeg; use ffmpeg;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use log::{debug, warn}; use log::{debug, warn};
@ -50,13 +50,10 @@ pub enum Source<'a> {
File(&'a str), File(&'a str),
/// An RTSP stream, for production use. /// An RTSP stream, for production use.
Rtsp { Rtsp { url: &'a str, redacted_url: &'a str },
url: &'a str,
redacted_url: &'a str
},
} }
pub trait Opener<S : Stream> : Sync { pub trait Opener<S: Stream>: Sync {
fn open(&self, src: Source) -> Result<S, Error>; fn open(&self, src: Source) -> Result<S, Error>;
} }
@ -70,8 +67,10 @@ pub struct Ffmpeg {}
impl Ffmpeg { impl Ffmpeg {
fn new() -> Ffmpeg { fn new() -> Ffmpeg {
START.call_once(|| { ffmpeg::Ffmpeg::new(); }); START.call_once(|| {
Ffmpeg{} ffmpeg::Ffmpeg::new();
});
Ffmpeg {}
} }
} }
@ -84,39 +83,57 @@ impl Opener<FfmpegStream> for Ffmpeg {
let mut open_options = ffmpeg::avutil::Dictionary::new(); let mut open_options = ffmpeg::avutil::Dictionary::new();
// Work around https://github.com/scottlamb/moonfire-nvr/issues/10 // Work around https://github.com/scottlamb/moonfire-nvr/issues/10
open_options.set(cstr!("advanced_editlist"), cstr!("false")).unwrap(); open_options
.set(cstr!("advanced_editlist"), cstr!("false"))
.unwrap();
let url = format!("file:{}", filename); let url = format!("file:{}", filename);
let i = InputFormatContext::open(&CString::new(url.clone()).unwrap(), let i = InputFormatContext::open(
&mut open_options)?; &CString::new(url.clone()).unwrap(),
&mut open_options,
)?;
if !open_options.empty() { if !open_options.empty() {
warn!("While opening URL {}, some options were not understood: {}", warn!(
url, open_options); "While opening URL {}, some options were not understood: {}",
url, open_options
);
} }
i i
} }
Source::Rtsp{url, redacted_url} => { Source::Rtsp { url, redacted_url } => {
let mut open_options = ffmpeg::avutil::Dictionary::new(); let mut open_options = ffmpeg::avutil::Dictionary::new();
open_options.set(cstr!("rtsp_transport"), cstr!("tcp")).unwrap(); open_options
open_options.set(cstr!("user-agent"), cstr!("moonfire-nvr")).unwrap(); .set(cstr!("rtsp_transport"), cstr!("tcp"))
.unwrap();
open_options
.set(cstr!("user-agent"), cstr!("moonfire-nvr"))
.unwrap();
// 10-second socket timeout, in microseconds. // 10-second socket timeout, in microseconds.
open_options.set(cstr!("stimeout"), cstr!("10000000")).unwrap(); open_options
.set(cstr!("stimeout"), cstr!("10000000"))
.unwrap();
// Without this option, the first packet has an incorrect pts. // Without this option, the first packet has an incorrect pts.
// https://trac.ffmpeg.org/ticket/5018 // https://trac.ffmpeg.org/ticket/5018
open_options.set(cstr!("fflags"), cstr!("nobuffer")).unwrap(); open_options
.set(cstr!("fflags"), cstr!("nobuffer"))
.unwrap();
// Moonfire NVR currently only supports video, so receiving audio is wasteful. // Moonfire NVR currently only supports video, so receiving audio is wasteful.
// It also triggers <https://github.com/scottlamb/moonfire-nvr/issues/36>. // It also triggers <https://github.com/scottlamb/moonfire-nvr/issues/36>.
open_options.set(cstr!("allowed_media_types"), cstr!("video")).unwrap(); open_options
.set(cstr!("allowed_media_types"), cstr!("video"))
.unwrap();
let i = InputFormatContext::open(&CString::new(url).unwrap(), &mut open_options)?; let i = InputFormatContext::open(&CString::new(url).unwrap(), &mut open_options)?;
if !open_options.empty() { if !open_options.empty() {
warn!("While opening URL {}, some options were not understood: {}", warn!(
redacted_url, open_options); "While opening URL {}, some options were not understood: {}",
redacted_url, open_options
);
} }
i i
}, }
}; };
input.find_stream_info()?; input.find_stream_info()?;
@ -125,7 +142,7 @@ impl Opener<FfmpegStream> for Ffmpeg {
let mut video_i = None; let mut video_i = None;
{ {
let s = input.streams(); let s = input.streams();
for i in 0 .. s.len() { for i in 0..s.len() {
if s.get(i).codecpar().codec_type().is_video() { if s.get(i).codecpar().codec_type().is_video() {
debug!("Video stream index is {}", i); debug!("Video stream index is {}", i);
video_i = Some(i); video_i = Some(i);
@ -138,10 +155,7 @@ impl Opener<FfmpegStream> for Ffmpeg {
None => bail!("no video stream"), None => bail!("no video stream"),
}; };
Ok(FfmpegStream { Ok(FfmpegStream { input, video_i })
input,
video_i,
})
} }
} }
@ -159,7 +173,11 @@ impl Stream for FfmpegStream {
let video = self.input.streams().get(self.video_i); let video = self.input.streams().get(self.video_i);
let tb = video.time_base(); let tb = video.time_base();
if tb.num != 1 || tb.den != 90000 { if tb.num != 1 || tb.den != 90000 {
bail!("video stream has timebase {}/{}; expected 1/90000", tb.num, tb.den); bail!(
"video stream has timebase {}/{}; expected 1/90000",
tb.num,
tb.den
);
} }
let codec = video.codecpar(); let codec = video.codecpar();
let codec_id = codec.codec_id(); let codec_id = codec.codec_id();
@ -167,8 +185,11 @@ impl Stream for FfmpegStream {
bail!("stream's video codec {:?} is not h264", codec_id); bail!("stream's video codec {:?} is not h264", codec_id);
} }
let dims = codec.dims(); let dims = codec.dims();
h264::ExtraData::parse(codec.extradata(), u16::try_from(dims.width)?, h264::ExtraData::parse(
u16::try_from(dims.height)?) codec.extradata(),
u16::try_from(dims.width)?,
u16::try_from(dims.height)?,
)
} }
fn get_next<'i>(&'i mut self) -> Result<ffmpeg::avcodec::Packet<'i>, ffmpeg::Error> { fn get_next<'i>(&'i mut self) -> Result<ffmpeg::avcodec::Packet<'i>, ffmpeg::Error> {

View File

@ -28,11 +28,11 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>. // along with this program. If not, see <http://www.gnu.org/licenses/>.
use base::clock::{Clocks, TimerGuard};
use crate::h264; use crate::h264;
use crate::stream; use crate::stream;
use db::{Camera, Database, Stream, dir, recording, writer}; use base::clock::{Clocks, TimerGuard};
use failure::{Error, bail, format_err}; use db::{dir, recording, writer, Camera, Database, Stream};
use failure::{bail, format_err, Error};
use log::{debug, info, trace, warn}; use log::{debug, info, trace, warn};
use std::result::Result; use std::result::Result;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
@ -43,13 +43,21 @@ use url::Url;
pub static ROTATE_INTERVAL_SEC: i64 = 60; pub static ROTATE_INTERVAL_SEC: i64 = 60;
/// Common state that can be used by multiple `Streamer` instances. /// Common state that can be used by multiple `Streamer` instances.
pub struct Environment<'a, 'b, C, S> where C: Clocks + Clone, S: 'a + stream::Stream { pub struct Environment<'a, 'b, C, S>
where
C: Clocks + Clone,
S: 'a + stream::Stream,
{
pub opener: &'a dyn stream::Opener<S>, pub opener: &'a dyn stream::Opener<S>,
pub db: &'b Arc<Database<C>>, pub db: &'b Arc<Database<C>>,
pub shutdown: &'b Arc<AtomicBool>, pub shutdown: &'b Arc<AtomicBool>,
} }
pub struct Streamer<'a, C, S> where C: Clocks + Clone, S: 'a + stream::Stream { pub struct Streamer<'a, C, S>
where
C: Clocks + Clone,
S: 'a + stream::Stream,
{
shutdown: Arc<AtomicBool>, shutdown: Arc<AtomicBool>,
// State below is only used by the thread in Run. // State below is only used by the thread in Run.
@ -66,17 +74,27 @@ pub struct Streamer<'a, C, S> where C: Clocks + Clone, S: 'a + stream::Stream {
detector: Option<Arc<crate::analytics::ObjectDetector>>, detector: Option<Arc<crate::analytics::ObjectDetector>>,
} }
impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks + Clone, S: 'a + stream::Stream { impl<'a, C, S> Streamer<'a, C, S>
pub fn new<'b>(env: &Environment<'a, 'b, C, S>, dir: Arc<dir::SampleFileDir>, where
syncer_channel: writer::SyncerChannel<::std::fs::File>, C: 'a + Clocks + Clone,
stream_id: i32, c: &Camera, s: &Stream, rotate_offset_sec: i64, S: 'a + stream::Stream,
rotate_interval_sec: i64, {
detector: Option<Arc<crate::analytics::ObjectDetector>>) pub fn new<'b>(
-> Result<Self, Error> { env: &Environment<'a, 'b, C, S>,
dir: Arc<dir::SampleFileDir>,
syncer_channel: writer::SyncerChannel<::std::fs::File>,
stream_id: i32,
c: &Camera,
s: &Stream,
rotate_offset_sec: i64,
rotate_interval_sec: i64,
detector: Option<Arc<crate::analytics::ObjectDetector>>,
) -> Result<Self, Error> {
let mut url = Url::parse(&s.rtsp_url)?; let mut url = Url::parse(&s.rtsp_url)?;
let mut redacted_url = url.clone(); let mut redacted_url = url.clone();
if !c.username.is_empty() { if !c.username.is_empty() {
url.set_username(&c.username).map_err(|_| format_err!("can't set username"))?; url.set_username(&c.username)
.map_err(|_| format_err!("can't set username"))?;
redacted_url.set_username(&c.username).unwrap(); redacted_url.set_username(&c.username).unwrap();
url.set_password(Some(&c.password)).unwrap(); url.set_password(Some(&c.password)).unwrap();
redacted_url.set_password(Some("redacted")).unwrap(); redacted_url.set_password(Some("redacted")).unwrap();
@ -97,14 +115,20 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks + Clone, S: 'a + stream::
}) })
} }
pub fn short_name(&self) -> &str { &self.short_name } pub fn short_name(&self) -> &str {
&self.short_name
}
pub fn run(&mut self) { pub fn run(&mut self) {
while !self.shutdown.load(Ordering::SeqCst) { while !self.shutdown.load(Ordering::SeqCst) {
if let Err(e) = self.run_once() { if let Err(e) = self.run_once() {
let sleep_time = time::Duration::seconds(1); let sleep_time = time::Duration::seconds(1);
warn!("{}: sleeping for {:?} after error: {}", warn!(
self.short_name, sleep_time, base::prettify_failure(&e)); "{}: sleeping for {:?} after error: {}",
self.short_name,
sleep_time,
base::prettify_failure(&e)
);
self.db.clocks().sleep(sleep_time); self.db.clocks().sleep(sleep_time);
} }
} }
@ -127,21 +151,31 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks + Clone, S: 'a + stream::
let mut detector_stream = match self.detector.as_ref() { let mut detector_stream = match self.detector.as_ref() {
None => None, None => None,
Some(od) => Some(crate::analytics::ObjectDetectorStream::new( Some(od) => Some(crate::analytics::ObjectDetectorStream::new(
stream.get_video_codecpar(), &od)?), stream.get_video_codecpar(),
&od,
)?),
}; };
let extra_data = stream.get_extra_data()?; let extra_data = stream.get_extra_data()?;
let video_sample_entry_id = { let video_sample_entry_id = {
let _t = TimerGuard::new(&clocks, || "inserting video sample entry"); let _t = TimerGuard::new(&clocks, || "inserting video sample entry");
self.db.lock().insert_video_sample_entry(extra_data.entry)? self.db.lock().insert_video_sample_entry(extra_data.entry)?
}; };
debug!("{}: video_sample_entry_id={}", self.short_name, video_sample_entry_id); debug!(
"{}: video_sample_entry_id={}",
self.short_name, video_sample_entry_id
);
let mut seen_key_frame = false; let mut seen_key_frame = false;
// Seconds since epoch at which to next rotate. // Seconds since epoch at which to next rotate.
let mut rotate: Option<i64> = None; let mut rotate: Option<i64> = None;
let mut transformed = Vec::new(); let mut transformed = Vec::new();
let mut w = writer::Writer::new(&self.dir, &self.db, &self.syncer_channel, self.stream_id, let mut w = writer::Writer::new(
video_sample_entry_id); &self.dir,
&self.db,
&self.syncer_channel,
self.stream_id,
video_sample_entry_id,
);
while !self.shutdown.load(Ordering::SeqCst) { while !self.shutdown.load(Ordering::SeqCst) {
let pkt = { let pkt = {
let _t = TimerGuard::new(&clocks, || "getting next packet"); let _t = TimerGuard::new(&clocks, || "getting next packet");
@ -168,22 +202,32 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks + Clone, S: 'a + stream::
} else { } else {
Some(r) Some(r)
} }
} else { None }; } else {
None
};
let r = match rotate { let r = match rotate {
Some(r) => r, Some(r) => r,
None => { None => {
let sec = frame_realtime.sec; let sec = frame_realtime.sec;
let r = sec - (sec % self.rotate_interval_sec) + self.rotate_offset_sec; let r = sec - (sec % self.rotate_interval_sec) + self.rotate_offset_sec;
let r = r + if r <= sec { self.rotate_interval_sec } else { 0 }; let r = r + if r <= sec {
self.rotate_interval_sec
} else {
0
};
// On the first recording, set rotate time to not the next rotate offset, but // On the first recording, set rotate time to not the next rotate offset, but
// the one after, so that it's longer than usual rather than shorter than // the one after, so that it's longer than usual rather than shorter than
// usual. This ensures there's plenty of frame times to use when calculating // usual. This ensures there's plenty of frame times to use when calculating
// the start time. // the start time.
let r = r + if w.previously_opened()? { 0 } else { self.rotate_interval_sec }; let r = r + if w.previously_opened()? {
0
} else {
self.rotate_interval_sec
};
let _t = TimerGuard::new(&clocks, || "creating writer"); let _t = TimerGuard::new(&clocks, || "creating writer");
r r
}, }
}; };
let orig_data = match pkt.data() { let orig_data = match pkt.data() {
Some(d) => d, Some(d) => d,
@ -195,8 +239,9 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks + Clone, S: 'a + stream::
} else { } else {
orig_data orig_data
}; };
let _t = TimerGuard::new(&clocks, let _t = TimerGuard::new(&clocks, || {
|| format!("writing {} bytes", transformed_data.len())); format!("writing {} bytes", transformed_data.len())
});
w.write(transformed_data, local_time, pts, pkt.is_key())?; w.write(transformed_data, local_time, pts, pkt.is_key())?;
rotate = Some(r); rotate = Some(r);
} }
@ -210,17 +255,17 @@ impl<'a, C, S> Streamer<'a, C, S> where C: 'a + Clocks + Clone, S: 'a + stream::
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base::clock::{self, Clocks};
use crate::h264; use crate::h264;
use crate::stream::{self, Opener, Stream}; use crate::stream::{self, Opener, Stream};
use db::{CompositeId, recording, testutil}; use base::clock::{self, Clocks};
use failure::{Error, bail}; use db::{recording, testutil, CompositeId};
use failure::{bail, Error};
use log::trace; use log::trace;
use parking_lot::Mutex; use parking_lot::Mutex;
use std::cmp; use std::cmp;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use time; use time;
struct ProxyingStream<'a> { struct ProxyingStream<'a> {
@ -234,8 +279,11 @@ mod tests {
} }
impl<'a> ProxyingStream<'a> { impl<'a> ProxyingStream<'a> {
fn new(clocks: &'a clock::SimulatedClocks, buffered: time::Duration, fn new(
inner: stream::FfmpegStream) -> ProxyingStream { clocks: &'a clock::SimulatedClocks,
buffered: time::Duration,
inner: stream::FfmpegStream,
) -> ProxyingStream {
clocks.sleep(buffered); clocks.sleep(buffered);
ProxyingStream { ProxyingStream {
clocks: clocks, clocks: clocks,
@ -265,7 +313,8 @@ mod tests {
{ {
let goal = pkt.pts().unwrap() + pkt.duration() as i64; let goal = pkt.pts().unwrap() + pkt.duration() as i64;
let goal = time::Duration::nanoseconds( let goal = time::Duration::nanoseconds(
goal * 1_000_000_000 / recording::TIME_UNITS_PER_SEC); goal * 1_000_000_000 / recording::TIME_UNITS_PER_SEC,
);
let duration = goal - self.slept; let duration = goal - self.slept;
let buf_part = cmp::min(self.buffered, duration); let buf_part = cmp::min(self.buffered, duration);
self.buffered = self.buffered - buf_part; self.buffered = self.buffered - buf_part;
@ -293,7 +342,9 @@ mod tests {
self.inner.get_video_codecpar() self.inner.get_video_codecpar()
} }
fn get_extra_data(&self) -> Result<h264::ExtraData, Error> { self.inner.get_extra_data() } fn get_extra_data(&self) -> Result<h264::ExtraData, Error> {
self.inner.get_extra_data()
}
} }
struct MockOpener<'a> { struct MockOpener<'a> {
@ -305,7 +356,7 @@ mod tests {
impl<'a> stream::Opener<ProxyingStream<'a>> for MockOpener<'a> { impl<'a> stream::Opener<ProxyingStream<'a>> for MockOpener<'a> {
fn open(&self, src: stream::Source) -> Result<ProxyingStream<'a>, Error> { fn open(&self, src: stream::Source) -> Result<ProxyingStream<'a>, Error> {
match src { match src {
stream::Source::Rtsp{url, ..} => assert_eq!(url, &self.expected_url), stream::Source::Rtsp { url, .. } => assert_eq!(url, &self.expected_url),
stream::Source::File(_) => panic!("expected rtsp url"), stream::Source::File(_) => panic!("expected rtsp url"),
}; };
let mut l = self.streams.lock(); let mut l = self.streams.lock();
@ -313,12 +364,12 @@ mod tests {
Some(stream) => { Some(stream) => {
trace!("MockOpener returning next stream"); trace!("MockOpener returning next stream");
Ok(stream) Ok(stream)
}, }
None => { None => {
trace!("MockOpener shutting down"); trace!("MockOpener shutting down");
self.shutdown.store(true, Ordering::SeqCst); self.shutdown.store(true, Ordering::SeqCst);
bail!("done") bail!("done")
}, }
} }
} }
} }
@ -335,14 +386,15 @@ mod tests {
let mut it = recording::SampleIndexIterator::new(); let mut it = recording::SampleIndexIterator::new();
let mut frames = Vec::new(); let mut frames = Vec::new();
while it.next(&rec.video_index).unwrap() { while it.next(&rec.video_index).unwrap() {
frames.push(Frame{ frames.push(Frame {
start_90k: it.start_90k, start_90k: it.start_90k,
duration_90k: it.duration_90k, duration_90k: it.duration_90k,
is_key: it.is_key(), is_key: it.is_key(),
}); });
} }
Ok(frames) Ok(frames)
}).unwrap() })
.unwrap()
} }
#[test] #[test]
@ -350,14 +402,16 @@ mod tests {
testutil::init(); testutil::init();
// 2015-04-25 00:00:00 UTC // 2015-04-25 00:00:00 UTC
let clocks = clock::SimulatedClocks::new(time::Timespec::new(1429920000, 0)); let clocks = clock::SimulatedClocks::new(time::Timespec::new(1429920000, 0));
clocks.sleep(time::Duration::seconds(86400)); // to 2015-04-26 00:00:00 UTC clocks.sleep(time::Duration::seconds(86400)); // to 2015-04-26 00:00:00 UTC
let stream = stream::FFMPEG.open(stream::Source::File("src/testdata/clip.mp4")).unwrap(); let stream = stream::FFMPEG
.open(stream::Source::File("src/testdata/clip.mp4"))
.unwrap();
let mut stream = ProxyingStream::new(&clocks, time::Duration::seconds(2), stream); let mut stream = ProxyingStream::new(&clocks, time::Duration::seconds(2), stream);
stream.ts_offset = 123456; // starting pts of the input should be irrelevant stream.ts_offset = 123456; // starting pts of the input should be irrelevant
stream.ts_offset_pkts_left = u32::max_value(); stream.ts_offset_pkts_left = u32::max_value();
stream.pkts_left = u32::max_value(); stream.pkts_left = u32::max_value();
let opener = MockOpener{ let opener = MockOpener {
expected_url: "rtsp://foo:bar@test-camera/main".to_owned(), expected_url: "rtsp://foo:bar@test-camera/main".to_owned(),
streams: Mutex::new(vec![stream]), streams: Mutex::new(vec![stream]),
shutdown: Arc::new(AtomicBool::new(false)), shutdown: Arc::new(AtomicBool::new(false)),
@ -373,9 +427,23 @@ mod tests {
let l = db.db.lock(); let l = db.db.lock();
let camera = l.cameras_by_id().get(&testutil::TEST_CAMERA_ID).unwrap(); let camera = l.cameras_by_id().get(&testutil::TEST_CAMERA_ID).unwrap();
let s = l.streams_by_id().get(&testutil::TEST_STREAM_ID).unwrap(); let s = l.streams_by_id().get(&testutil::TEST_STREAM_ID).unwrap();
let dir = db.dirs_by_stream_id.get(&testutil::TEST_STREAM_ID).unwrap().clone(); let dir = db
stream = super::Streamer::new(&env, dir, db.syncer_channel.clone(), .dirs_by_stream_id
testutil::TEST_STREAM_ID, camera, s, 0, 3, None).unwrap(); .get(&testutil::TEST_STREAM_ID)
.unwrap()
.clone();
stream = super::Streamer::new(
&env,
dir,
db.syncer_channel.clone(),
testutil::TEST_STREAM_ID,
camera,
s,
0,
3,
None,
)
.unwrap();
} }
stream.run(); stream.run();
assert!(opener.streams.lock().is_empty()); assert!(opener.streams.lock().is_empty());
@ -386,25 +454,28 @@ mod tests {
// 3-second boundaries (such as 2016-04-26 00:00:03), rotation happens somewhat later: // 3-second boundaries (such as 2016-04-26 00:00:03), rotation happens somewhat later:
// * the first rotation is always skipped // * the first rotation is always skipped
// * the second rotation is deferred until a key frame. // * the second rotation is deferred until a key frame.
#[rustfmt::skip]
assert_eq!(get_frames(&db, CompositeId::new(testutil::TEST_STREAM_ID, 0)), &[ assert_eq!(get_frames(&db, CompositeId::new(testutil::TEST_STREAM_ID, 0)), &[
Frame{start_90k: 0, duration_90k: 90379, is_key: true}, Frame { start_90k: 0, duration_90k: 90379, is_key: true },
Frame{start_90k: 90379, duration_90k: 89884, is_key: false}, Frame { start_90k: 90379, duration_90k: 89884, is_key: false },
Frame{start_90k: 180263, duration_90k: 89749, is_key: false}, Frame { start_90k: 180263, duration_90k: 89749, is_key: false },
Frame{start_90k: 270012, duration_90k: 89981, is_key: false}, // pts_time 3.0001... Frame { start_90k: 270012, duration_90k: 89981, is_key: false }, // pts_time 3.0001...
Frame{start_90k: 359993, duration_90k: 90055, is_key: true}, Frame { start_90k: 359993, duration_90k: 90055, is_key: true },
Frame{start_90k: 450048, duration_90k: 89967, is_key: false}, Frame { start_90k: 450048, duration_90k: 89967, is_key: false },
Frame{start_90k: 540015, duration_90k: 90021, is_key: false}, // pts_time 6.0001... Frame { start_90k: 540015, duration_90k: 90021, is_key: false }, // pts_time 6.0001...
Frame{start_90k: 630036, duration_90k: 89958, is_key: false}, Frame { start_90k: 630036, duration_90k: 89958, is_key: false },
]); ]);
#[rustfmt::skip]
assert_eq!(get_frames(&db, CompositeId::new(testutil::TEST_STREAM_ID, 1)), &[ assert_eq!(get_frames(&db, CompositeId::new(testutil::TEST_STREAM_ID, 1)), &[
Frame{start_90k: 0, duration_90k: 90011, is_key: true}, Frame { start_90k: 0, duration_90k: 90011, is_key: true },
Frame{start_90k: 90011, duration_90k: 0, is_key: false}, Frame { start_90k: 90011, duration_90k: 0, is_key: false },
]); ]);
let mut recordings = Vec::new(); let mut recordings = Vec::new();
db.list_recordings_by_id(testutil::TEST_STREAM_ID, 0..2, &mut |r| { db.list_recordings_by_id(testutil::TEST_STREAM_ID, 0..2, &mut |r| {
recordings.push(r); recordings.push(r);
Ok(()) Ok(())
}).unwrap(); })
.unwrap();
assert_eq!(2, recordings.len()); assert_eq!(2, recordings.len());
assert_eq!(0, recordings[0].id.recording()); assert_eq!(0, recordings[0].id.recording());
assert_eq!(recording::Time(128700575999999), recordings[0].start); assert_eq!(recording::Time(128700575999999), recordings[0].start);

File diff suppressed because it is too large Load Diff