mirror of
https://github.com/scottlamb/moonfire-nvr.git
synced 2025-01-12 07:23:23 -05:00
address some no-op clippy warnings
This commit is contained in:
parent
603f02b686
commit
54bd068706
@ -5,7 +5,6 @@
|
|||||||
//! Clock interface and implementations for testability.
|
//! Clock interface and implementations for testability.
|
||||||
|
|
||||||
use failure::Error;
|
use failure::Error;
|
||||||
use libc;
|
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
@ -136,7 +135,7 @@ struct SimulatedClocksInner {
|
|||||||
impl SimulatedClocks {
|
impl SimulatedClocks {
|
||||||
pub fn new(boot: Timespec) -> Self {
|
pub fn new(boot: Timespec) -> Self {
|
||||||
SimulatedClocks(Arc::new(SimulatedClocksInner {
|
SimulatedClocks(Arc::new(SimulatedClocksInner {
|
||||||
boot: boot,
|
boot,
|
||||||
uptime: Mutex::new(Duration::seconds(0)),
|
uptime: Mutex::new(Duration::seconds(0)),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -163,7 +162,7 @@ impl Clocks for SimulatedClocks {
|
|||||||
timeout: StdDuration,
|
timeout: StdDuration,
|
||||||
) -> Result<T, mpsc::RecvTimeoutError> {
|
) -> Result<T, mpsc::RecvTimeoutError> {
|
||||||
let r = rcv.recv_timeout(StdDuration::new(0, 0));
|
let r = rcv.recv_timeout(StdDuration::new(0, 0));
|
||||||
if let Err(_) = r {
|
if r.is_err() {
|
||||||
self.sleep(Duration::from_std(timeout).unwrap());
|
self.sleep(Duration::from_std(timeout).unwrap());
|
||||||
}
|
}
|
||||||
r
|
r
|
||||||
|
@ -27,7 +27,7 @@ pub fn encode_size(mut raw: i64) -> String {
|
|||||||
raw &= (1i64 << n) - 1;
|
raw &= (1i64 << n) - 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if raw > 0 || encoded.len() == 0 {
|
if raw > 0 || encoded.is_empty() {
|
||||||
write!(&mut encoded, "{}", raw).unwrap();
|
write!(&mut encoded, "{}", raw).unwrap();
|
||||||
} else {
|
} else {
|
||||||
encoded.pop(); // remove trailing space.
|
encoded.pop(); // remove trailing space.
|
||||||
@ -39,7 +39,7 @@ fn decode_sizepart(input: &str) -> IResult<&str, i64> {
|
|||||||
map(
|
map(
|
||||||
tuple((
|
tuple((
|
||||||
map_res(take_while1(|c: char| c.is_ascii_digit()), |input: &str| {
|
map_res(take_while1(|c: char| c.is_ascii_digit()), |input: &str| {
|
||||||
i64::from_str_radix(input, 10)
|
input.parse::<i64>()
|
||||||
}),
|
}),
|
||||||
opt(alt((
|
opt(alt((
|
||||||
nom::combinator::value(1 << 40, tag("T")),
|
nom::combinator::value(1 << 40, tag("T")),
|
||||||
|
@ -13,7 +13,6 @@ use serde::{Deserialize, Serialize};
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::ops;
|
use std::ops;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use time;
|
|
||||||
|
|
||||||
type IResult<'a, I, O> = nom::IResult<I, O, nom::error::VerboseError<&'a str>>;
|
type IResult<'a, I, O> = nom::IResult<I, O, nom::error::VerboseError<&'a str>>;
|
||||||
|
|
||||||
@ -27,7 +26,7 @@ pub struct Time(pub i64);
|
|||||||
fn fixed_len_num<'a>(len: usize) -> impl FnMut(&'a str) -> IResult<&'a str, i32> {
|
fn fixed_len_num<'a>(len: usize) -> impl FnMut(&'a str) -> IResult<&'a str, i32> {
|
||||||
map_res(
|
map_res(
|
||||||
take_while_m_n(len, len, |c: char| c.is_ascii_digit()),
|
take_while_m_n(len, len, |c: char| c.is_ascii_digit()),
|
||||||
|input: &str| i32::from_str_radix(input, 10),
|
|input: &str| input.parse::<i32>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,9 +95,8 @@ impl Time {
|
|||||||
/// local time zone.
|
/// local time zone.
|
||||||
pub fn parse(input: &str) -> Result<Self, Error> {
|
pub fn parse(input: &str) -> Result<Self, Error> {
|
||||||
// First try parsing as 90,000ths of a second since epoch.
|
// First try parsing as 90,000ths of a second since epoch.
|
||||||
match i64::from_str(input) {
|
if let Ok(i) = i64::from_str(input) {
|
||||||
Ok(i) => return Ok(Time(i)),
|
return Ok(Time(i));
|
||||||
Err(_) => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If that failed, parse as a time string or bust.
|
// If that failed, parse as a time string or bust.
|
||||||
@ -113,7 +111,7 @@ impl Time {
|
|||||||
format_err!("{}", nom::error::convert_error(input, e))
|
format_err!("{}", nom::error::convert_error(input, e))
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
if remaining != "" {
|
if !remaining.is_empty() {
|
||||||
bail!("unexpected suffix {:?} following time string", remaining);
|
bail!("unexpected suffix {:?} following time string", remaining);
|
||||||
}
|
}
|
||||||
let (tm_hour, tm_min, tm_sec, subsec) = opt_time.unwrap_or((0, 0, 0, 0));
|
let (tm_hour, tm_min, tm_sec, subsec) = opt_time.unwrap_or((0, 0, 0, 0));
|
||||||
@ -210,7 +208,7 @@ impl fmt::Display for Time {
|
|||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}:{:05}{}{:02}:{:02}",
|
"{}:{:05}{}{:02}:{:02}",
|
||||||
tm.strftime("%FT%T").or_else(|_| Err(fmt::Error))?,
|
tm.strftime("%FT%T").map_err(|_| fmt::Error)?,
|
||||||
self.0 % TIME_UNITS_PER_SEC,
|
self.0 % TIME_UNITS_PER_SEC,
|
||||||
if tm.tm_utcoff > 0 { '+' } else { '-' },
|
if tm.tm_utcoff > 0 { '+' } else { '-' },
|
||||||
zone_minutes / 60,
|
zone_minutes / 60,
|
||||||
|
@ -9,7 +9,6 @@ use base::{bail_t, format_err_t, strutil, ErrorKind, ResultExt};
|
|||||||
use failure::{bail, format_err, Error};
|
use failure::{bail, format_err, Error};
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use libpasta;
|
|
||||||
use log::info;
|
use log::info;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use protobuf::Message;
|
use protobuf::Message;
|
||||||
@ -239,12 +238,8 @@ impl Session {
|
|||||||
pub struct RawSessionId([u8; 48]);
|
pub struct RawSessionId([u8; 48]);
|
||||||
|
|
||||||
impl RawSessionId {
|
impl RawSessionId {
|
||||||
pub fn new() -> Self {
|
|
||||||
RawSessionId([0u8; 48])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
|
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
|
||||||
let mut s = RawSessionId::new();
|
let mut s = RawSessionId([0u8; 48]);
|
||||||
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut s.0[..])?;
|
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut s.0[..])?;
|
||||||
if l != 48 {
|
if l != 48 {
|
||||||
bail!("session id must be 48 bytes");
|
bail!("session id must be 48 bytes");
|
||||||
@ -625,7 +620,7 @@ impl State {
|
|||||||
sessions: &'s mut FnvHashMap<SessionHash, Session>,
|
sessions: &'s mut FnvHashMap<SessionHash, Session>,
|
||||||
permissions: Permissions,
|
permissions: Permissions,
|
||||||
) -> Result<(RawSessionId, &'s Session), Error> {
|
) -> Result<(RawSessionId, &'s Session), Error> {
|
||||||
let mut session_id = RawSessionId::new();
|
let mut session_id = RawSessionId([0u8; 48]);
|
||||||
rand.fill(&mut session_id.0).unwrap();
|
rand.fill(&mut session_id.0).unwrap();
|
||||||
let mut seed = [0u8; 32];
|
let mut seed = [0u8; 32];
|
||||||
rand.fill(&mut seed).unwrap();
|
rand.fill(&mut seed).unwrap();
|
||||||
@ -793,7 +788,7 @@ impl State {
|
|||||||
":id": &id,
|
":id": &id,
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
for (_, s) in &self.sessions {
|
for s in self.sessions.values() {
|
||||||
if !s.dirty {
|
if !s.dirty {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -813,10 +808,10 @@ impl State {
|
|||||||
///
|
///
|
||||||
/// See notes there.
|
/// See notes there.
|
||||||
pub fn post_flush(&mut self) {
|
pub fn post_flush(&mut self) {
|
||||||
for (_, u) in &mut self.users_by_id {
|
for u in self.users_by_id.values_mut() {
|
||||||
u.dirty = false;
|
u.dirty = false;
|
||||||
}
|
}
|
||||||
for (_, s) in &mut self.sessions {
|
for s in self.sessions.values_mut() {
|
||||||
s.dirty = false;
|
s.dirty = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -232,7 +232,7 @@ struct Stream {
|
|||||||
type Dir = FnvHashMap<i32, Stream>;
|
type Dir = FnvHashMap<i32, Stream>;
|
||||||
|
|
||||||
fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
|
fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
|
||||||
let mut it = recording::SampleIndexIterator::new();
|
let mut it = recording::SampleIndexIterator::default();
|
||||||
let mut media_duration = 0;
|
let mut media_duration = 0;
|
||||||
let mut video_samples = 0;
|
let mut video_samples = 0;
|
||||||
let mut video_sync_samples = 0;
|
let mut video_sync_samples = 0;
|
||||||
|
@ -152,17 +152,16 @@ pub struct SignalChange {
|
|||||||
new_state: u16,
|
new_state: u16,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||||
pub struct Map<V: Value>(pub(crate) BTreeMap<Key, V>);
|
pub struct Map<V: Value>(pub(crate) BTreeMap<Key, V>);
|
||||||
|
|
||||||
impl<V: Value> Map<V> {
|
impl<V: Value> Map<V> {
|
||||||
pub fn new() -> Self {
|
|
||||||
Self(BTreeMap::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.0.len()
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.0.is_empty()
|
||||||
|
}
|
||||||
pub fn get(&self, k: &Key) -> Option<&V> {
|
pub fn get(&self, k: &Key) -> Option<&V> {
|
||||||
self.0.get(k)
|
self.0.get(k)
|
||||||
}
|
}
|
||||||
@ -296,9 +295,9 @@ impl Map<SignalValue> {
|
|||||||
self.adjust_day(
|
self.adjust_day(
|
||||||
day,
|
day,
|
||||||
SignalChange {
|
SignalChange {
|
||||||
|
duration,
|
||||||
old_state,
|
old_state,
|
||||||
new_state,
|
new_state,
|
||||||
duration,
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -338,7 +337,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_adjust_stream() {
|
fn test_adjust_stream() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut m: Map<StreamValue> = Map::new();
|
let mut m: Map<StreamValue> = Map::default();
|
||||||
|
|
||||||
// Create a day.
|
// Create a day.
|
||||||
let test_time = Time(130647162600000i64); // 2015-12-31 23:59:00 (Pacific).
|
let test_time = Time(130647162600000i64); // 2015-12-31 23:59:00 (Pacific).
|
||||||
@ -446,7 +445,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_adjust_signal() {
|
fn test_adjust_signal() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut m: Map<SignalValue> = Map::new();
|
let mut m: Map<SignalValue> = Map::default();
|
||||||
|
|
||||||
let test_time = Time(130646844000000i64); // 2015-12-31 23:00:00 (Pacific).
|
let test_time = Time(130646844000000i64); // 2015-12-31 23:00:00 (Pacific).
|
||||||
let hr = Duration(60 * 60 * TIME_UNITS_PER_SEC);
|
let hr = Duration(60 * 60 * TIME_UNITS_PER_SEC);
|
||||||
|
@ -64,7 +64,7 @@ pub const EXPECTED_VERSION: i32 = 6;
|
|||||||
/// Make it one less than a power of two so that the data structure's size is efficient.
|
/// Make it one less than a power of two so that the data structure's size is efficient.
|
||||||
const VIDEO_INDEX_CACHE_LEN: usize = 1023;
|
const VIDEO_INDEX_CACHE_LEN: usize = 1023;
|
||||||
|
|
||||||
const GET_RECORDING_PLAYBACK_SQL: &'static str = r#"
|
const GET_RECORDING_PLAYBACK_SQL: &str = r#"
|
||||||
select
|
select
|
||||||
video_index
|
video_index
|
||||||
from
|
from
|
||||||
@ -73,14 +73,14 @@ const GET_RECORDING_PLAYBACK_SQL: &'static str = r#"
|
|||||||
composite_id = :composite_id
|
composite_id = :composite_id
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &'static str = r#"
|
const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &str = r#"
|
||||||
insert into video_sample_entry (width, height, pasp_h_spacing, pasp_v_spacing,
|
insert into video_sample_entry (width, height, pasp_h_spacing, pasp_v_spacing,
|
||||||
rfc6381_codec, data)
|
rfc6381_codec, data)
|
||||||
values (:width, :height, :pasp_h_spacing, :pasp_v_spacing,
|
values (:width, :height, :pasp_h_spacing, :pasp_v_spacing,
|
||||||
:rfc6381_codec, :data)
|
:rfc6381_codec, :data)
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
const UPDATE_STREAM_COUNTERS_SQL: &'static str = r#"
|
const UPDATE_STREAM_COUNTERS_SQL: &str = r#"
|
||||||
update stream
|
update stream
|
||||||
set cum_recordings = :cum_recordings,
|
set cum_recordings = :cum_recordings,
|
||||||
cum_media_duration_90k = :cum_media_duration_90k,
|
cum_media_duration_90k = :cum_media_duration_90k,
|
||||||
@ -231,6 +231,7 @@ pub struct RecordingPlayback<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Bitmask in the `flags` field in the `recordings` table; see `schema.sql`.
|
/// Bitmask in the `flags` field in the `recordings` table; see `schema.sql`.
|
||||||
|
#[repr(u32)]
|
||||||
pub enum RecordingFlags {
|
pub enum RecordingFlags {
|
||||||
TrailingZero = 1,
|
TrailingZero = 1,
|
||||||
|
|
||||||
@ -356,37 +357,37 @@ pub struct Camera {
|
|||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum StreamType {
|
pub enum StreamType {
|
||||||
MAIN,
|
Main,
|
||||||
SUB,
|
Sub,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StreamType {
|
impl StreamType {
|
||||||
pub fn from_index(i: usize) -> Option<Self> {
|
pub fn from_index(i: usize) -> Option<Self> {
|
||||||
match i {
|
match i {
|
||||||
0 => Some(StreamType::MAIN),
|
0 => Some(StreamType::Main),
|
||||||
1 => Some(StreamType::SUB),
|
1 => Some(StreamType::Sub),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index(self) -> usize {
|
pub fn index(self) -> usize {
|
||||||
match self {
|
match self {
|
||||||
StreamType::MAIN => 0,
|
StreamType::Main => 0,
|
||||||
StreamType::SUB => 1,
|
StreamType::Sub => 1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_str(self) -> &'static str {
|
pub fn as_str(self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
StreamType::MAIN => "main",
|
StreamType::Main => "main",
|
||||||
StreamType::SUB => "sub",
|
StreamType::Sub => "sub",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(type_: &str) -> Option<Self> {
|
pub fn parse(type_: &str) -> Option<Self> {
|
||||||
match type_ {
|
match type_ {
|
||||||
"main" => Some(StreamType::MAIN),
|
"main" => Some(StreamType::Main),
|
||||||
"sub" => Some(StreamType::SUB),
|
"sub" => Some(StreamType::Sub),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -398,7 +399,7 @@ impl ::std::fmt::Display for StreamType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const ALL_STREAM_TYPES: [StreamType; 2] = [StreamType::MAIN, StreamType::SUB];
|
pub const ALL_STREAM_TYPES: [StreamType; 2] = [StreamType::Main, StreamType::Sub];
|
||||||
|
|
||||||
pub struct Stream {
|
pub struct Stream {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
@ -708,7 +709,7 @@ impl StreamStateChanger {
|
|||||||
bail!("missing stream {}", sid);
|
bail!("missing stream {}", sid);
|
||||||
}
|
}
|
||||||
sids[i] = Some(sid);
|
sids[i] = Some(sid);
|
||||||
let sc = mem::replace(*sc, StreamChange::default());
|
let sc = mem::take(*sc);
|
||||||
streams.push((sid, Some((camera_id, type_, sc))));
|
streams.push((sid, Some((camera_id, type_, sc))));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -737,7 +738,7 @@ impl StreamStateChanger {
|
|||||||
})?;
|
})?;
|
||||||
let id = tx.last_insert_rowid() as i32;
|
let id = tx.last_insert_rowid() as i32;
|
||||||
sids[i] = Some(id);
|
sids[i] = Some(id);
|
||||||
let sc = mem::replace(*sc, StreamChange::default());
|
let sc = mem::take(*sc);
|
||||||
streams.push((id, Some((camera_id, type_, sc))));
|
streams.push((id, Some((camera_id, type_, sc))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -768,7 +769,7 @@ impl StreamStateChanger {
|
|||||||
bytes_to_add: 0,
|
bytes_to_add: 0,
|
||||||
fs_bytes_to_add: 0,
|
fs_bytes_to_add: 0,
|
||||||
duration: recording::Duration(0),
|
duration: recording::Duration(0),
|
||||||
committed_days: days::Map::new(),
|
committed_days: days::Map::default(),
|
||||||
record: sc.record,
|
record: sc.record,
|
||||||
cum_recordings: 0,
|
cum_recordings: 0,
|
||||||
cum_media_duration: recording::Duration(0),
|
cum_media_duration: recording::Duration(0),
|
||||||
@ -929,7 +930,7 @@ impl LockedDatabase {
|
|||||||
/// longer connected). This doesn't work when the system is shutting down and nothing more is
|
/// longer connected). This doesn't work when the system is shutting down and nothing more is
|
||||||
/// sent, though.
|
/// sent, though.
|
||||||
pub fn clear_watches(&mut self) {
|
pub fn clear_watches(&mut self) {
|
||||||
for (_, s) in &mut self.streams_by_id {
|
for s in self.streams_by_id.values_mut() {
|
||||||
s.on_live_segment.clear();
|
s.on_live_segment.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1229,14 +1230,11 @@ impl LockedDatabase {
|
|||||||
|
|
||||||
/// Gets a given camera by uuid.
|
/// Gets a given camera by uuid.
|
||||||
pub fn get_camera(&self, uuid: Uuid) -> Option<&Camera> {
|
pub fn get_camera(&self, uuid: Uuid) -> Option<&Camera> {
|
||||||
match self.cameras_by_uuid.get(&uuid) {
|
self.cameras_by_uuid.get(&uuid).map(|id| {
|
||||||
Some(id) => Some(
|
self.cameras_by_id
|
||||||
self.cameras_by_id
|
.get(id)
|
||||||
.get(id)
|
.expect("uuid->id requires id->cam")
|
||||||
.expect("uuid->id requires id->cam"),
|
})
|
||||||
),
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lists the specified recordings, passing them to a supplied function. Given that the
|
/// Lists the specified recordings, passing them to a supplied function. Given that the
|
||||||
@ -1439,7 +1437,7 @@ impl LockedDatabase {
|
|||||||
trace!("cache hit for recording {}", id);
|
trace!("cache hit for recording {}", id);
|
||||||
occupied.to_back();
|
occupied.to_back();
|
||||||
let video_index = occupied.get();
|
let video_index = occupied.get();
|
||||||
return f(&RecordingPlayback { video_index });
|
f(&RecordingPlayback { video_index })
|
||||||
}
|
}
|
||||||
RawEntryMut::Vacant(vacant) => {
|
RawEntryMut::Vacant(vacant) => {
|
||||||
trace!("cache miss for recording {}", id);
|
trace!("cache miss for recording {}", id);
|
||||||
@ -1602,7 +1600,7 @@ impl LockedDatabase {
|
|||||||
self.cameras_by_id.insert(
|
self.cameras_by_id.insert(
|
||||||
id,
|
id,
|
||||||
Camera {
|
Camera {
|
||||||
id: id,
|
id,
|
||||||
uuid: uuid.0,
|
uuid: uuid.0,
|
||||||
short_name: row.get(2)?,
|
short_name: row.get(2)?,
|
||||||
description: row.get(3)?,
|
description: row.get(3)?,
|
||||||
@ -1671,7 +1669,7 @@ impl LockedDatabase {
|
|||||||
bytes_to_add: 0,
|
bytes_to_add: 0,
|
||||||
fs_bytes_to_add: 0,
|
fs_bytes_to_add: 0,
|
||||||
duration: recording::Duration(0),
|
duration: recording::Duration(0),
|
||||||
committed_days: days::Map::new(),
|
committed_days: days::Map::default(),
|
||||||
cum_recordings: row.get(7)?,
|
cum_recordings: row.get(7)?,
|
||||||
cum_media_duration: recording::Duration(row.get(8)?),
|
cum_media_duration: recording::Duration(row.get(8)?),
|
||||||
cum_runs: row.get(9)?,
|
cum_runs: row.get(9)?,
|
||||||
@ -1781,7 +1779,7 @@ impl LockedDatabase {
|
|||||||
garbage_needs_unlink: FnvHashSet::default(),
|
garbage_needs_unlink: FnvHashSet::default(),
|
||||||
garbage_unlinked: Vec::new(),
|
garbage_unlinked: Vec::new(),
|
||||||
}),
|
}),
|
||||||
Entry::Occupied(_) => Err(format_err!("duplicate sample file dir id {}", id))?,
|
Entry::Occupied(_) => bail!("duplicate sample file dir id {}", id),
|
||||||
};
|
};
|
||||||
d.last_complete_open = Some(*o);
|
d.last_complete_open = Some(*o);
|
||||||
mem::swap(&mut meta.last_complete_open, &mut meta.in_progress_open);
|
mem::swap(&mut meta.last_complete_open, &mut meta.in_progress_open);
|
||||||
@ -2155,22 +2153,21 @@ pub(crate) fn check_schema_version(conn: &rusqlite::Connection) -> Result<(), Er
|
|||||||
that predates schema versioning, see guide/schema.md."
|
that predates schema versioning, see guide/schema.md."
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
if ver < EXPECTED_VERSION {
|
match ver.cmp(&EXPECTED_VERSION) {
|
||||||
bail!(
|
std::cmp::Ordering::Less => bail!(
|
||||||
"Database schema version {} is too old (expected {}); \
|
"Database schema version {} is too old (expected {}); \
|
||||||
see upgrade instructions in guide/upgrade.md.",
|
see upgrade instructions in guide/upgrade.md.",
|
||||||
ver,
|
ver,
|
||||||
EXPECTED_VERSION
|
EXPECTED_VERSION
|
||||||
);
|
),
|
||||||
} else if ver > EXPECTED_VERSION {
|
std::cmp::Ordering::Equal => Ok(()),
|
||||||
bail!(
|
std::cmp::Ordering::Greater => bail!(
|
||||||
"Database schema version {} is too new (expected {}); \
|
"Database schema version {} is too new (expected {}); \
|
||||||
must use a newer binary to match.",
|
must use a newer binary to match.",
|
||||||
ver,
|
ver,
|
||||||
EXPECTED_VERSION
|
EXPECTED_VERSION
|
||||||
);
|
),
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The recording database. Abstracts away SQLite queries. Also maintains in-memory state
|
/// The recording database. Abstracts away SQLite queries. Also maintains in-memory state
|
||||||
|
@ -355,9 +355,9 @@ pub(crate) fn parse_id(id: &[u8]) -> Result<CompositeId, ()> {
|
|||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
let mut v: u64 = 0;
|
let mut v: u64 = 0;
|
||||||
for i in 0..16 {
|
for b in id {
|
||||||
v = (v << 4)
|
v = (v << 4)
|
||||||
| match id[i] {
|
| match b {
|
||||||
b @ b'0'..=b'9' => b - b'0',
|
b @ b'0'..=b'9' => b - b'0',
|
||||||
b @ b'a'..=b'f' => b - b'a' + 10,
|
b @ b'a'..=b'f' => b - b'a' + 10,
|
||||||
_ => return Err(()),
|
_ => return Err(()),
|
||||||
|
@ -13,7 +13,7 @@ use std::ops::Range;
|
|||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
// Note: the magic number "27000000" below is recording::MAX_RECORDING_DURATION.
|
// Note: the magic number "27000000" below is recording::MAX_RECORDING_DURATION.
|
||||||
const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
|
const LIST_RECORDINGS_BY_TIME_SQL: &str = r#"
|
||||||
select
|
select
|
||||||
recording.composite_id,
|
recording.composite_id,
|
||||||
recording.run_offset,
|
recording.run_offset,
|
||||||
@ -37,7 +37,7 @@ const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
|
|||||||
recording.start_time_90k
|
recording.start_time_90k
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
|
const LIST_RECORDINGS_BY_ID_SQL: &str = r#"
|
||||||
select
|
select
|
||||||
recording.composite_id,
|
recording.composite_id,
|
||||||
recording.run_offset,
|
recording.run_offset,
|
||||||
@ -61,7 +61,7 @@ const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
|
|||||||
recording.composite_id
|
recording.composite_id
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
const STREAM_MIN_START_SQL: &'static str = r#"
|
const STREAM_MIN_START_SQL: &str = r#"
|
||||||
select
|
select
|
||||||
start_time_90k
|
start_time_90k
|
||||||
from
|
from
|
||||||
@ -71,7 +71,7 @@ const STREAM_MIN_START_SQL: &'static str = r#"
|
|||||||
order by start_time_90k limit 1
|
order by start_time_90k limit 1
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
const STREAM_MAX_START_SQL: &'static str = r#"
|
const STREAM_MAX_START_SQL: &str = r#"
|
||||||
select
|
select
|
||||||
start_time_90k,
|
start_time_90k,
|
||||||
wall_duration_90k
|
wall_duration_90k
|
||||||
@ -82,7 +82,7 @@ const STREAM_MAX_START_SQL: &'static str = r#"
|
|||||||
order by start_time_90k desc;
|
order by start_time_90k desc;
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
const LIST_OLDEST_RECORDINGS_SQL: &'static str = r#"
|
const LIST_OLDEST_RECORDINGS_SQL: &str = r#"
|
||||||
select
|
select
|
||||||
composite_id,
|
composite_id,
|
||||||
start_time_90k,
|
start_time_90k,
|
||||||
|
@ -50,7 +50,7 @@ pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32)
|
|||||||
|
|
||||||
/// An iterator through a sample index (as described in `design/recording.md`).
|
/// An iterator through a sample index (as described in `design/recording.md`).
|
||||||
/// Initially invalid; call `next()` before each read.
|
/// Initially invalid; call `next()` before each read.
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug, Default)]
|
||||||
pub struct SampleIndexIterator {
|
pub struct SampleIndexIterator {
|
||||||
/// The index byte position of the next sample to read (low 31 bits) and if the current
|
/// The index byte position of the next sample to read (low 31 bits) and if the current
|
||||||
/// same is a key frame (high bit).
|
/// same is a key frame (high bit).
|
||||||
@ -74,17 +74,6 @@ pub struct SampleIndexIterator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SampleIndexIterator {
|
impl SampleIndexIterator {
|
||||||
pub fn new() -> SampleIndexIterator {
|
|
||||||
SampleIndexIterator {
|
|
||||||
i_and_is_key: 0,
|
|
||||||
pos: 0,
|
|
||||||
start_90k: 0,
|
|
||||||
duration_90k: 0,
|
|
||||||
bytes: 0,
|
|
||||||
bytes_other: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next(&mut self, data: &[u8]) -> Result<bool, Error> {
|
pub fn next(&mut self, data: &[u8]) -> Result<bool, Error> {
|
||||||
self.pos += self.bytes;
|
self.pos += self.bytes;
|
||||||
self.start_90k += self.duration_90k;
|
self.start_90k += self.duration_90k;
|
||||||
@ -147,7 +136,7 @@ impl SampleIndexIterator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// An encoder for a sample index (as described in `design/recording.md`).
|
/// An encoder for a sample index (as described in `design/recording.md`).
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Default)]
|
||||||
pub struct SampleIndexEncoder {
|
pub struct SampleIndexEncoder {
|
||||||
prev_duration_90k: i32,
|
prev_duration_90k: i32,
|
||||||
prev_bytes_key: i32,
|
prev_bytes_key: i32,
|
||||||
@ -155,14 +144,6 @@ pub struct SampleIndexEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SampleIndexEncoder {
|
impl SampleIndexEncoder {
|
||||||
pub fn new() -> Self {
|
|
||||||
SampleIndexEncoder {
|
|
||||||
prev_duration_90k: 0,
|
|
||||||
prev_bytes_key: 0,
|
|
||||||
prev_bytes_nonkey: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_sample(
|
pub fn add_sample(
|
||||||
&mut self,
|
&mut self,
|
||||||
duration_90k: i32,
|
duration_90k: i32,
|
||||||
@ -205,7 +186,7 @@ pub struct Segment {
|
|||||||
|
|
||||||
/// An iterator positioned at the beginning of the segment, or `None`. Most segments are
|
/// An iterator positioned at the beginning of the segment, or `None`. Most segments are
|
||||||
/// positioned at the beginning of the recording, so this is an optional box to shrink a long
|
/// positioned at the beginning of the recording, so this is an optional box to shrink a long
|
||||||
/// of segments. `None` is equivalent to `SampleIndexIterator::new()`.
|
/// of segments. `None` is equivalent to `SampleIndexIterator::default()`.
|
||||||
begin: Option<Box<SampleIndexIterator>>,
|
begin: Option<Box<SampleIndexIterator>>,
|
||||||
pub file_end: i32,
|
pub file_end: i32,
|
||||||
|
|
||||||
@ -246,6 +227,7 @@ impl Segment {
|
|||||||
<< 31),
|
<< 31),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[allow(clippy::suspicious_operation_groupings)]
|
||||||
if desired_media_range_90k.start > desired_media_range_90k.end
|
if desired_media_range_90k.start > desired_media_range_90k.end
|
||||||
|| desired_media_range_90k.end > recording.media_duration_90k
|
|| desired_media_range_90k.end > recording.media_duration_90k
|
||||||
{
|
{
|
||||||
@ -275,9 +257,9 @@ impl Segment {
|
|||||||
recording
|
recording
|
||||||
);
|
);
|
||||||
db.with_recording_playback(self_.id, &mut |playback| {
|
db.with_recording_playback(self_.id, &mut |playback| {
|
||||||
let mut begin = Box::new(SampleIndexIterator::new());
|
let mut begin = Box::new(SampleIndexIterator::default());
|
||||||
let data = &(&playback).video_index;
|
let data = &playback.video_index;
|
||||||
let mut it = SampleIndexIterator::new();
|
let mut it = SampleIndexIterator::default();
|
||||||
if !it.next(data)? {
|
if !it.next(data)? {
|
||||||
bail!("no index");
|
bail!("no index");
|
||||||
}
|
}
|
||||||
@ -352,11 +334,11 @@ impl Segment {
|
|||||||
self.frames,
|
self.frames,
|
||||||
self.actual_start_90k()
|
self.actual_start_90k()
|
||||||
);
|
);
|
||||||
let data = &(&playback).video_index;
|
let data = &playback.video_index;
|
||||||
let mut it = match self.begin {
|
let mut it = match self.begin {
|
||||||
Some(ref b) => **b,
|
Some(ref b) => **b,
|
||||||
None => {
|
None => {
|
||||||
let mut it = SampleIndexIterator::new();
|
let mut it = SampleIndexIterator::default();
|
||||||
if !it.next(data)? {
|
if !it.next(data)? {
|
||||||
bail!("recording {} has no frames", self.id);
|
bail!("recording {} has no frames", self.id);
|
||||||
}
|
}
|
||||||
@ -434,7 +416,7 @@ mod tests {
|
|||||||
fn test_encode_example() {
|
fn test_encode_example() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut e = SampleIndexEncoder::new();
|
let mut e = SampleIndexEncoder::default();
|
||||||
e.add_sample(10, 1000, true, &mut r);
|
e.add_sample(10, 1000, true, &mut r);
|
||||||
e.add_sample(9, 10, false, &mut r);
|
e.add_sample(9, 10, false, &mut r);
|
||||||
e.add_sample(11, 15, false, &mut r);
|
e.add_sample(11, 15, false, &mut r);
|
||||||
@ -468,11 +450,11 @@ mod tests {
|
|||||||
Sample { duration_90k: 0, bytes: 1000, is_key: false, },
|
Sample { duration_90k: 0, bytes: 1000, is_key: false, },
|
||||||
];
|
];
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut e = SampleIndexEncoder::new();
|
let mut e = SampleIndexEncoder::default();
|
||||||
for sample in &samples {
|
for sample in &samples {
|
||||||
e.add_sample(sample.duration_90k, sample.bytes, sample.is_key, &mut r);
|
e.add_sample(sample.duration_90k, sample.bytes, sample.is_key, &mut r);
|
||||||
}
|
}
|
||||||
let mut it = SampleIndexIterator::new();
|
let mut it = SampleIndexIterator::default();
|
||||||
for sample in &samples {
|
for sample in &samples {
|
||||||
assert!(it.next(&r.video_index).unwrap());
|
assert!(it.next(&r.video_index).unwrap());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -519,7 +501,7 @@ mod tests {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
for test in &tests {
|
for test in &tests {
|
||||||
let mut it = SampleIndexIterator::new();
|
let mut it = SampleIndexIterator::default();
|
||||||
assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
|
assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -546,7 +528,7 @@ mod tests {
|
|||||||
fn test_segment_clipping_with_all_sync() {
|
fn test_segment_clipping_with_all_sync() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = SampleIndexEncoder::new();
|
let mut encoder = SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
@ -568,7 +550,7 @@ mod tests {
|
|||||||
fn test_segment_clipping_with_half_sync() {
|
fn test_segment_clipping_with_half_sync() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = SampleIndexEncoder::new();
|
let mut encoder = SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
@ -586,7 +568,7 @@ mod tests {
|
|||||||
fn test_segment_clipping_with_trailing_zero() {
|
fn test_segment_clipping_with_trailing_zero() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = SampleIndexEncoder::new();
|
let mut encoder = SampleIndexEncoder::default();
|
||||||
encoder.add_sample(1, 1, true, &mut r);
|
encoder.add_sample(1, 1, true, &mut r);
|
||||||
encoder.add_sample(1, 2, true, &mut r);
|
encoder.add_sample(1, 2, true, &mut r);
|
||||||
encoder.add_sample(0, 3, true, &mut r);
|
encoder.add_sample(0, 3, true, &mut r);
|
||||||
@ -601,7 +583,7 @@ mod tests {
|
|||||||
fn test_segment_zero_desired_duration() {
|
fn test_segment_zero_desired_duration() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = SampleIndexEncoder::new();
|
let mut encoder = SampleIndexEncoder::default();
|
||||||
encoder.add_sample(1, 1, true, &mut r);
|
encoder.add_sample(1, 1, true, &mut r);
|
||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let row = db.insert_recording_from_encoder(r);
|
let row = db.insert_recording_from_encoder(r);
|
||||||
@ -615,7 +597,7 @@ mod tests {
|
|||||||
fn test_segment_fast_path() {
|
fn test_segment_fast_path() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = SampleIndexEncoder::new();
|
let mut encoder = SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
@ -634,7 +616,7 @@ mod tests {
|
|||||||
fn test_segment_fast_path_with_trailing_zero() {
|
fn test_segment_fast_path_with_trailing_zero() {
|
||||||
testutil::init();
|
testutil::init();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = SampleIndexEncoder::new();
|
let mut encoder = SampleIndexEncoder::default();
|
||||||
encoder.add_sample(1, 1, true, &mut r);
|
encoder.add_sample(1, 1, true, &mut r);
|
||||||
encoder.add_sample(1, 2, true, &mut r);
|
encoder.add_sample(1, 2, true, &mut r);
|
||||||
encoder.add_sample(0, 3, true, &mut r);
|
encoder.add_sample(0, 3, true, &mut r);
|
||||||
@ -659,7 +641,7 @@ mod bench {
|
|||||||
let data = include_bytes!("testdata/video_sample_index.bin");
|
let data = include_bytes!("testdata/video_sample_index.bin");
|
||||||
b.bytes = data.len() as u64;
|
b.bytes = data.len() as u64;
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut it = SampleIndexIterator::new();
|
let mut it = SampleIndexIterator::default();
|
||||||
while it.next(data).unwrap() {}
|
while it.next(data).unwrap() {}
|
||||||
assert_eq!(30104460, it.pos);
|
assert_eq!(30104460, it.pos);
|
||||||
assert_eq!(5399985, it.start_90k);
|
assert_eq!(5399985, it.start_90k);
|
||||||
|
@ -167,7 +167,7 @@ impl<'a> PointDataIterator<'a> {
|
|||||||
Ok(Some((signal, state as u16)))
|
Ok(Some((signal, state as u16)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_map(mut self) -> Result<BTreeMap<u32, u16>, Error> {
|
fn into_map(mut self) -> Result<BTreeMap<u32, u16>, Error> {
|
||||||
let mut out = BTreeMap::new();
|
let mut out = BTreeMap::new();
|
||||||
while let Some((signal, state)) = self.next()? {
|
while let Some((signal, state)) = self.next()? {
|
||||||
out.insert(signal, state);
|
out.insert(signal, state);
|
||||||
@ -291,7 +291,7 @@ impl State {
|
|||||||
fn gc(&mut self) {
|
fn gc(&mut self) {
|
||||||
let max = match self.max_signal_changes {
|
let max = match self.max_signal_changes {
|
||||||
None => return,
|
None => return,
|
||||||
Some(m) if m < 0 => 0 as usize,
|
Some(m) if m < 0 => 0_usize,
|
||||||
Some(m) if m > (isize::max_value() as i64) => return,
|
Some(m) if m > (isize::max_value() as i64) => return,
|
||||||
Some(m) => m as usize,
|
Some(m) => m as usize,
|
||||||
};
|
};
|
||||||
@ -311,7 +311,7 @@ impl State {
|
|||||||
.points_by_time
|
.points_by_time
|
||||||
.keys()
|
.keys()
|
||||||
.take(to_remove)
|
.take(to_remove)
|
||||||
.map(|t| *t)
|
.copied()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
for t in &remove {
|
for t in &remove {
|
||||||
@ -406,7 +406,7 @@ impl State {
|
|||||||
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=when.end).next_back() {
|
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=when.end).next_back() {
|
||||||
if t == when.end {
|
if t == when.end {
|
||||||
// Already have a point at end. Adjust it. prev starts unchanged...
|
// Already have a point at end. Adjust it. prev starts unchanged...
|
||||||
prev = p.prev().to_map().expect("in-mem prev is valid");
|
prev = p.prev().into_map().expect("in-mem prev is valid");
|
||||||
|
|
||||||
// ...and then prev and changes are altered to reflect the desired update.
|
// ...and then prev and changes are altered to reflect the desired update.
|
||||||
State::update_signals_end_maps(
|
State::update_signals_end_maps(
|
||||||
@ -505,8 +505,8 @@ impl State {
|
|||||||
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=start).next_back() {
|
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=start).next_back() {
|
||||||
if t == start {
|
if t == start {
|
||||||
// Reuse existing point at start.
|
// Reuse existing point at start.
|
||||||
prev = p.prev().to_map().expect("in-mem prev is valid");
|
prev = p.prev().into_map().expect("in-mem prev is valid");
|
||||||
let mut changes = p.changes().to_map().expect("in-mem changes is valid");
|
let mut changes = p.changes().into_map().expect("in-mem changes is valid");
|
||||||
let mut dirty = false;
|
let mut dirty = false;
|
||||||
for (&signal, &state) in signals.iter().zip(states) {
|
for (&signal, &state) in signals.iter().zip(states) {
|
||||||
match changes.entry(signal) {
|
match changes.entry(signal) {
|
||||||
@ -570,7 +570,7 @@ impl State {
|
|||||||
let after_start = recording::Time(when.start.0 + 1);
|
let after_start = recording::Time(when.start.0 + 1);
|
||||||
let mut prev_t = when.start;
|
let mut prev_t = when.start;
|
||||||
for (&t, ref mut p) in self.points_by_time.range_mut(after_start..when.end) {
|
for (&t, ref mut p) in self.points_by_time.range_mut(after_start..when.end) {
|
||||||
let mut prev = p.prev().to_map().expect("in-mem prev is valid");
|
let mut prev = p.prev().into_map().expect("in-mem prev is valid");
|
||||||
|
|
||||||
// Update prev to reflect desired update; likewise each signal's days index.
|
// Update prev to reflect desired update; likewise each signal's days index.
|
||||||
for (&signal, &state) in signals.iter().zip(states) {
|
for (&signal, &state) in signals.iter().zip(states) {
|
||||||
@ -691,7 +691,7 @@ impl State {
|
|||||||
type_: type_.0,
|
type_: type_.0,
|
||||||
short_name: row.get(3)?,
|
short_name: row.get(3)?,
|
||||||
cameras: Vec::new(),
|
cameras: Vec::new(),
|
||||||
days: days::Map::new(),
|
days: days::Map::default(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -837,7 +837,7 @@ impl State {
|
|||||||
fn debug_assert_point_invariants(&self) {
|
fn debug_assert_point_invariants(&self) {
|
||||||
let mut expected_prev = BTreeMap::new();
|
let mut expected_prev = BTreeMap::new();
|
||||||
for (t, p) in self.points_by_time.iter() {
|
for (t, p) in self.points_by_time.iter() {
|
||||||
let cur = p.prev().to_map().expect("in-mem prev is valid");
|
let cur = p.prev().into_map().expect("in-mem prev is valid");
|
||||||
assert_eq!(&expected_prev, &cur, "time {} prev mismatch", t);
|
assert_eq!(&expected_prev, &cur, "time {} prev mismatch", t);
|
||||||
p.changes().update_map(&mut expected_prev);
|
p.changes().update_map(&mut expected_prev);
|
||||||
}
|
}
|
||||||
@ -973,7 +973,7 @@ mod tests {
|
|||||||
&mut |_r| panic!("no changes expected"),
|
&mut |_r| panic!("no changes expected"),
|
||||||
);
|
);
|
||||||
assert_eq!(&rows[..], EXPECTED);
|
assert_eq!(&rows[..], EXPECTED);
|
||||||
let mut expected_days = days::Map::new();
|
let mut expected_days = days::Map::default();
|
||||||
expected_days.0.insert(
|
expected_days.0.insert(
|
||||||
days::Key(*b"2019-04-26"),
|
days::Key(*b"2019-04-26"),
|
||||||
days::SignalValue {
|
days::SignalValue {
|
||||||
|
@ -10,13 +10,10 @@ use crate::dir;
|
|||||||
use crate::writer;
|
use crate::writer;
|
||||||
use base::clock::Clocks;
|
use base::clock::Clocks;
|
||||||
use fnv::FnvHashMap;
|
use fnv::FnvHashMap;
|
||||||
use mylog;
|
|
||||||
use rusqlite;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
static INIT: parking_lot::Once = parking_lot::Once::new();
|
static INIT: parking_lot::Once = parking_lot::Once::new();
|
||||||
@ -42,7 +39,7 @@ pub const TEST_VIDEO_SAMPLE_ENTRY_DATA: &[u8] =
|
|||||||
pub fn init() {
|
pub fn init() {
|
||||||
INIT.call_once(|| {
|
INIT.call_once(|| {
|
||||||
let h = mylog::Builder::new()
|
let h = mylog::Builder::new()
|
||||||
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or("info".to_owned()))
|
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or_else(|_| "info".to_owned()))
|
||||||
.build();
|
.build();
|
||||||
h.install().unwrap();
|
h.install().unwrap();
|
||||||
env::set_var("TZ", "America/Los_Angeles");
|
env::set_var("TZ", "America/Los_Angeles");
|
||||||
@ -80,7 +77,7 @@ impl<C: Clocks + Clone> TestDb<C> {
|
|||||||
let dir;
|
let dir;
|
||||||
{
|
{
|
||||||
let mut l = db.lock();
|
let mut l = db.lock();
|
||||||
sample_file_dir_id = l.add_sample_file_dir(path.to_owned()).unwrap();
|
sample_file_dir_id = l.add_sample_file_dir(path).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
TEST_CAMERA_ID,
|
TEST_CAMERA_ID,
|
||||||
l.add_camera(db::CameraChange {
|
l.add_camera(db::CameraChange {
|
||||||
@ -116,7 +113,7 @@ impl<C: Clocks + Clone> TestDb<C> {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
let mut dirs_by_stream_id = FnvHashMap::default();
|
let mut dirs_by_stream_id = FnvHashMap::default();
|
||||||
dirs_by_stream_id.insert(TEST_STREAM_ID, dir.clone());
|
dirs_by_stream_id.insert(TEST_STREAM_ID, dir);
|
||||||
let (syncer_channel, syncer_join) =
|
let (syncer_channel, syncer_join) =
|
||||||
writer::start_syncer(db.clone(), sample_file_dir_id).unwrap();
|
writer::start_syncer(db.clone(), sample_file_dir_id).unwrap();
|
||||||
TestDb {
|
TestDb {
|
||||||
|
@ -22,8 +22,7 @@ mod v3_to_v4;
|
|||||||
mod v4_to_v5;
|
mod v4_to_v5;
|
||||||
mod v5_to_v6;
|
mod v5_to_v6;
|
||||||
|
|
||||||
const UPGRADE_NOTES: &'static str =
|
const UPGRADE_NOTES: &str = concat!("upgraded using moonfire-db ", env!("CARGO_PKG_VERSION"));
|
||||||
concat!("upgraded using moonfire-db ", env!("CARGO_PKG_VERSION"));
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Args<'a> {
|
pub struct Args<'a> {
|
||||||
|
@ -99,7 +99,7 @@ struct CameraState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn has_trailing_zero(video_index: &[u8]) -> Result<bool, Error> {
|
fn has_trailing_zero(video_index: &[u8]) -> Result<bool, Error> {
|
||||||
let mut it = recording::SampleIndexIterator::new();
|
let mut it = recording::SampleIndexIterator::default();
|
||||||
while it.next(video_index)? {}
|
while it.next(video_index)? {}
|
||||||
Ok(it.duration_90k == 0)
|
Ok(it.duration_90k == 0)
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
|
|||||||
if e == nix::Error::Sys(nix::errno::Errno::ENOENT) {
|
if e == nix::Error::Sys(nix::errno::Errno::ENOENT) {
|
||||||
continue; // assume it was already moved.
|
continue; // assume it was already moved.
|
||||||
}
|
}
|
||||||
Err(e)?;
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -358,7 +358,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
|
|||||||
/// Called from main thread.
|
/// Called from main thread.
|
||||||
fn initial_rotation(&mut self) -> Result<(), Error> {
|
fn initial_rotation(&mut self) -> Result<(), Error> {
|
||||||
self.do_rotation(|db| {
|
self.do_rotation(|db| {
|
||||||
let streams: Vec<i32> = db.streams_by_id().keys().map(|&id| id).collect();
|
let streams: Vec<i32> = db.streams_by_id().keys().copied().collect();
|
||||||
for &stream_id in &streams {
|
for &stream_id in &streams {
|
||||||
delete_recordings(db, stream_id, 0)?;
|
delete_recordings(db, stream_id, 0)?;
|
||||||
}
|
}
|
||||||
@ -379,7 +379,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
|
|||||||
let mut garbage: Vec<_> = {
|
let mut garbage: Vec<_> = {
|
||||||
let l = self.db.lock();
|
let l = self.db.lock();
|
||||||
let d = l.sample_file_dirs_by_id().get(&self.dir_id).unwrap();
|
let d = l.sample_file_dirs_by_id().get(&self.dir_id).unwrap();
|
||||||
d.garbage_needs_unlink.iter().map(|id| *id).collect()
|
d.garbage_needs_unlink.iter().copied().collect()
|
||||||
};
|
};
|
||||||
if !garbage.is_empty() {
|
if !garbage.is_empty() {
|
||||||
// Try to delete files; retain ones in `garbage` that don't exist.
|
// Try to delete files; retain ones in `garbage` that don't exist.
|
||||||
@ -422,7 +422,9 @@ impl<C: Clocks + Clone, D: DirWriter> Syncer<C, D> {
|
|||||||
let now = self.db.clocks().monotonic();
|
let now = self.db.clocks().monotonic();
|
||||||
|
|
||||||
// Calculate the timeout to use, mapping negative durations to 0.
|
// Calculate the timeout to use, mapping negative durations to 0.
|
||||||
let timeout = (t - now).to_std().unwrap_or(StdDuration::new(0, 0));
|
let timeout = (t - now)
|
||||||
|
.to_std()
|
||||||
|
.unwrap_or_else(|_| StdDuration::new(0, 0));
|
||||||
match self.db.clocks().recv_timeout(&cmds, timeout) {
|
match self.db.clocks().recv_timeout(&cmds, timeout) {
|
||||||
Err(mpsc::RecvTimeoutError::Disconnected) => return false, // cmd senders gone.
|
Err(mpsc::RecvTimeoutError::Disconnected) => return false, // cmd senders gone.
|
||||||
Err(mpsc::RecvTimeoutError::Timeout) => {
|
Err(mpsc::RecvTimeoutError::Timeout) => {
|
||||||
@ -456,7 +458,7 @@ impl<C: Clocks + Clone, D: DirWriter> Syncer<C, D> {
|
|||||||
let mut garbage: Vec<_> = {
|
let mut garbage: Vec<_> = {
|
||||||
let l = self.db.lock();
|
let l = self.db.lock();
|
||||||
let d = l.sample_file_dirs_by_id().get(&self.dir_id).unwrap();
|
let d = l.sample_file_dirs_by_id().get(&self.dir_id).unwrap();
|
||||||
d.garbage_needs_unlink.iter().map(|id| *id).collect()
|
d.garbage_needs_unlink.iter().copied().collect()
|
||||||
};
|
};
|
||||||
if garbage.is_empty() {
|
if garbage.is_empty() {
|
||||||
return;
|
return;
|
||||||
@ -691,7 +693,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
|||||||
self.state = WriterState::Open(InnerWriter {
|
self.state = WriterState::Open(InnerWriter {
|
||||||
f,
|
f,
|
||||||
r,
|
r,
|
||||||
e: recording::SampleIndexEncoder::new(),
|
e: recording::SampleIndexEncoder::default(),
|
||||||
id,
|
id,
|
||||||
hasher: blake3::Hasher::new(),
|
hasher: blake3::Hasher::new(),
|
||||||
local_start: recording::Time(i64::max_value()),
|
local_start: recording::Time(i64::max_value()),
|
||||||
@ -878,7 +880,7 @@ impl<F: FileWriter> InnerWriter<F> {
|
|||||||
let mut l = self.r.lock();
|
let mut l = self.r.lock();
|
||||||
l.flags = flags;
|
l.flags = flags;
|
||||||
l.local_time_delta = self.local_start - l.start;
|
l.local_time_delta = self.local_start - l.start;
|
||||||
l.sample_file_blake3 = Some(blake3.as_bytes().clone());
|
l.sample_file_blake3 = Some(*blake3.as_bytes());
|
||||||
wall_duration = recording::Duration(i64::from(l.wall_duration_90k));
|
wall_duration = recording::Duration(i64::from(l.wall_duration_90k));
|
||||||
run_offset = l.run_offset;
|
run_offset = l.run_offset;
|
||||||
end = l.start + wall_duration;
|
end = l.start + wall_duration;
|
||||||
|
@ -40,13 +40,13 @@ impl From<&'static str> for Chunk {
|
|||||||
|
|
||||||
impl From<String> for Chunk {
|
impl From<String> for Chunk {
|
||||||
fn from(r: String) -> Self {
|
fn from(r: String) -> Self {
|
||||||
Chunk(ARefss::new(r.into_bytes()).map(|v| &v[..]))
|
Chunk(ARefss::new(r.into_bytes()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<u8>> for Chunk {
|
impl From<Vec<u8>> for Chunk {
|
||||||
fn from(r: Vec<u8>) -> Self {
|
fn from(r: Vec<u8>) -> Self {
|
||||||
Chunk(ARefss::new(r).map(|v| &v[..]))
|
Chunk(ARefss::new(r))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -279,7 +279,7 @@ fn lower_retention(
|
|||||||
db: &Arc<db::Database>,
|
db: &Arc<db::Database>,
|
||||||
zero_limits: BTreeMap<i32, Vec<writer::NewLimit>>,
|
zero_limits: BTreeMap<i32, Vec<writer::NewLimit>>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let dirs_to_open: Vec<_> = zero_limits.keys().map(|id| *id).collect();
|
let dirs_to_open: Vec<_> = zero_limits.keys().copied().collect();
|
||||||
db.lock().open_sample_file_dirs(&dirs_to_open[..])?;
|
db.lock().open_sample_file_dirs(&dirs_to_open[..])?;
|
||||||
for (&dir_id, l) in &zero_limits {
|
for (&dir_id, l) in &zero_limits {
|
||||||
writer::lower_retention(db.clone(), dir_id, &l)?;
|
writer::lower_retention(db.clone(), dir_id, &l)?;
|
||||||
@ -358,7 +358,7 @@ fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i
|
|||||||
.child(
|
.child(
|
||||||
"sample file dir",
|
"sample file dir",
|
||||||
views::SelectView::<Option<i32>>::new()
|
views::SelectView::<Option<i32>>::new()
|
||||||
.with_all(dirs.iter().map(|d| d.clone()))
|
.with_all(dirs.iter().cloned())
|
||||||
.popup()
|
.popup()
|
||||||
.with_name(format!("{}_sample_file_dir", type_.as_str())),
|
.with_name(format!("{}_sample_file_dir", type_.as_str())),
|
||||||
)
|
)
|
||||||
|
@ -408,10 +408,7 @@ fn edit_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
|
|||||||
.child(views::DummyView {}.fixed_width(20))
|
.child(views::DummyView {}.fixed_width(20))
|
||||||
.child(views::TextView::new(encode_size(model.borrow().fs_capacity)).fixed_width(25)),
|
.child(views::TextView::new(encode_size(model.borrow().fs_capacity)).fixed_width(25)),
|
||||||
);
|
);
|
||||||
let mut change_button = views::Button::new("Change", {
|
let mut change_button = views::Button::new("Change", move |siv| press_change(&model, siv));
|
||||||
let model = model.clone();
|
|
||||||
move |siv| press_change(&model, siv)
|
|
||||||
});
|
|
||||||
change_button.set_enabled(!over);
|
change_button.set_enabled(!over);
|
||||||
let mut buttons = views::LinearLayout::horizontal().child(views::DummyView.full_width());
|
let mut buttons = views::LinearLayout::horizontal().child(views::DummyView.full_width());
|
||||||
buttons.add_child(change_button.with_name("change"));
|
buttons.add_child(change_button.with_name("change"));
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
use base::clock;
|
use base::clock;
|
||||||
use cursive::views;
|
use cursive::views;
|
||||||
use cursive::Cursive;
|
use cursive::Cursive;
|
||||||
use db;
|
|
||||||
use failure::Error;
|
use failure::Error;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -43,10 +42,7 @@ pub fn run(args: &Args) -> Result<i32, Error> {
|
|||||||
siv.add_layer(
|
siv.add_layer(
|
||||||
views::Dialog::around(
|
views::Dialog::around(
|
||||||
views::SelectView::<fn(&Arc<db::Database>, &mut Cursive)>::new()
|
views::SelectView::<fn(&Arc<db::Database>, &mut Cursive)>::new()
|
||||||
.on_submit({
|
.on_submit(move |siv, item| item(&db, siv))
|
||||||
let db = db.clone();
|
|
||||||
move |siv, item| item(&db, siv)
|
|
||||||
})
|
|
||||||
.item("Cameras and streams".to_string(), cameras::top_dialog)
|
.item("Cameras and streams".to_string(), cameras::top_dialog)
|
||||||
.item("Directories and retention".to_string(), dirs::top_dialog)
|
.item("Directories and retention".to_string(), dirs::top_dialog)
|
||||||
.item("Users".to_string(), users::top_dialog),
|
.item("Users".to_string(), users::top_dialog),
|
||||||
|
@ -126,12 +126,12 @@ fn edit_user_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: Option<i32>
|
|||||||
{
|
{
|
||||||
let l = db.lock();
|
let l = db.lock();
|
||||||
let u = item.map(|id| l.users_by_id().get(&id).unwrap());
|
let u = item.map(|id| l.users_by_id().get(&id).unwrap());
|
||||||
username = u.map(|u| u.username.clone()).unwrap_or(String::new());
|
username = u.map(|u| u.username.clone()).unwrap_or_default();
|
||||||
id_str = item.map(|id| id.to_string()).unwrap_or("<new>".to_string());
|
id_str = item
|
||||||
|
.map(|id| id.to_string())
|
||||||
|
.unwrap_or_else(|| "<new>".to_string());
|
||||||
has_password = u.map(|u| u.has_password()).unwrap_or(false);
|
has_password = u.map(|u| u.has_password()).unwrap_or(false);
|
||||||
permissions = u
|
permissions = u.map(|u| u.permissions.clone()).unwrap_or_default();
|
||||||
.map(|u| u.permissions.clone())
|
|
||||||
.unwrap_or(db::Permissions::default());
|
|
||||||
}
|
}
|
||||||
let top_list = views::ListView::new()
|
let top_list = views::ListView::new()
|
||||||
.child("id", views::TextView::new(id_str))
|
.child("id", views::TextView::new(id_str))
|
||||||
|
@ -56,7 +56,7 @@ pub struct Args {
|
|||||||
pub fn run(args: &Args) -> Result<i32, Error> {
|
pub fn run(args: &Args) -> Result<i32, Error> {
|
||||||
let clocks = clock::RealClocks {};
|
let clocks = clock::RealClocks {};
|
||||||
let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
|
let (_db_dir, conn) = super::open_conn(&args.db_dir, super::OpenMode::ReadWrite)?;
|
||||||
let db = std::sync::Arc::new(db::Database::new(clocks.clone(), conn, true).unwrap());
|
let db = std::sync::Arc::new(db::Database::new(clocks, conn, true).unwrap());
|
||||||
let mut l = db.lock();
|
let mut l = db.lock();
|
||||||
let u = l
|
let u = l
|
||||||
.get_user(&args.username)
|
.get_user(&args.username)
|
||||||
@ -72,7 +72,6 @@ pub fn run(args: &Args) -> Result<i32, Error> {
|
|||||||
flags |= *f as i32;
|
flags |= *f as i32;
|
||||||
}
|
}
|
||||||
let uid = u.id;
|
let uid = u.id;
|
||||||
drop(u);
|
|
||||||
let (sid, _) = l.make_session(
|
let (sid, _) = l.make_session(
|
||||||
creation,
|
creation,
|
||||||
uid,
|
uid,
|
||||||
|
@ -6,7 +6,6 @@ use db::dir;
|
|||||||
use failure::{Error, Fail};
|
use failure::{Error, Fail};
|
||||||
use log::info;
|
use log::info;
|
||||||
use nix::fcntl::FlockArg;
|
use nix::fcntl::FlockArg;
|
||||||
use rusqlite;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub mod check;
|
pub mod check;
|
||||||
|
@ -17,7 +17,6 @@ use std::sync::atomic::{AtomicBool, Ordering};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use structopt::StructOpt;
|
use structopt::StructOpt;
|
||||||
use tokio;
|
|
||||||
use tokio::signal::unix::{signal, SignalKind};
|
use tokio::signal::unix::{signal, SignalKind};
|
||||||
|
|
||||||
#[derive(StructOpt)]
|
#[derive(StructOpt)]
|
||||||
@ -76,20 +75,20 @@ pub struct Args {
|
|||||||
|
|
||||||
// These are used in a hack to get the name of the current time zone (e.g. America/Los_Angeles).
|
// These are used in a hack to get the name of the current time zone (e.g. America/Los_Angeles).
|
||||||
// They seem to be correct for Linux and macOS at least.
|
// They seem to be correct for Linux and macOS at least.
|
||||||
const LOCALTIME_PATH: &'static str = "/etc/localtime";
|
const LOCALTIME_PATH: &str = "/etc/localtime";
|
||||||
const TIMEZONE_PATH: &'static str = "/etc/timezone";
|
const TIMEZONE_PATH: &str = "/etc/timezone";
|
||||||
const ZONEINFO_PATHS: [&'static str; 2] = [
|
const ZONEINFO_PATHS: [&str; 2] = [
|
||||||
"/usr/share/zoneinfo/", // Linux, macOS < High Sierra
|
"/usr/share/zoneinfo/", // Linux, macOS < High Sierra
|
||||||
"/var/db/timezone/zoneinfo/", // macOS High Sierra
|
"/var/db/timezone/zoneinfo/", // macOS High Sierra
|
||||||
];
|
];
|
||||||
|
|
||||||
fn trim_zoneinfo(p: &str) -> &str {
|
fn trim_zoneinfo(path: &str) -> &str {
|
||||||
for zp in &ZONEINFO_PATHS {
|
for zp in &ZONEINFO_PATHS {
|
||||||
if p.starts_with(zp) {
|
if let Some(p) = path.strip_prefix(zp) {
|
||||||
return &p[zp.len()..];
|
return p;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return p;
|
path
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to resolve the timezone of the server.
|
/// Attempt to resolve the timezone of the server.
|
||||||
@ -145,7 +144,7 @@ fn resolve_zone() -> Result<String, Error> {
|
|||||||
|
|
||||||
// If `TIMEZONE_PATH` is a file, use its contents as the zone name.
|
// If `TIMEZONE_PATH` is a file, use its contents as the zone name.
|
||||||
match ::std::fs::read_to_string(TIMEZONE_PATH) {
|
match ::std::fs::read_to_string(TIMEZONE_PATH) {
|
||||||
Ok(z) => return Ok(z),
|
Ok(z) => Ok(z),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
bail!(
|
bail!(
|
||||||
"Unable to resolve timezone from TZ env, {}, or {}. Last error: {}",
|
"Unable to resolve timezone from TZ env, {}, or {}. Last error: {}",
|
||||||
@ -174,7 +173,7 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
|
|||||||
super::OpenMode::ReadWrite
|
super::OpenMode::ReadWrite
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
let db = Arc::new(db::Database::new(clocks.clone(), conn, !args.read_only).unwrap());
|
let db = Arc::new(db::Database::new(clocks, conn, !args.read_only).unwrap());
|
||||||
info!("Database is loaded.");
|
info!("Database is loaded.");
|
||||||
|
|
||||||
let object_detector = match args.object_detection {
|
let object_detector = match args.object_detection {
|
||||||
@ -260,7 +259,7 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
|
|||||||
let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64;
|
let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64;
|
||||||
let syncer = syncers.get(&sample_file_dir_id).unwrap();
|
let syncer = syncers.get(&sample_file_dir_id).unwrap();
|
||||||
let object_detector = match stream.type_ {
|
let object_detector = match stream.type_ {
|
||||||
db::StreamType::SUB => object_detector.clone(),
|
db::StreamType::Sub => object_detector.clone(),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
let mut streamer = streamer::Streamer::new(
|
let mut streamer = streamer::Streamer::new(
|
||||||
|
@ -45,10 +45,7 @@ pub fn run(args: &Args) -> Result<i32, Error> {
|
|||||||
|
|
||||||
db::upgrade::run(
|
db::upgrade::run(
|
||||||
&db::upgrade::Args {
|
&db::upgrade::Args {
|
||||||
sample_file_dir: args
|
sample_file_dir: args.sample_file_dir.as_deref(),
|
||||||
.sample_file_dir
|
|
||||||
.as_ref()
|
|
||||||
.map(std::path::PathBuf::as_path),
|
|
||||||
preset_journal: &args.preset_journal,
|
preset_journal: &args.preset_journal,
|
||||||
no_vacuum: args.no_vacuum,
|
no_vacuum: args.no_vacuum,
|
||||||
},
|
},
|
||||||
|
@ -208,7 +208,7 @@ impl<'a> Camera<'a> {
|
|||||||
{
|
{
|
||||||
let mut map = serializer.serialize_map(Some(streams.len()))?;
|
let mut map = serializer.serialize_map(Some(streams.len()))?;
|
||||||
for (i, s) in streams.iter().enumerate() {
|
for (i, s) in streams.iter().enumerate() {
|
||||||
if let &Some(ref s) = s {
|
if let Some(ref s) = *s {
|
||||||
map.serialize_key(
|
map.serialize_key(
|
||||||
db::StreamType::from_index(i)
|
db::StreamType::from_index(i)
|
||||||
.expect("invalid stream type index")
|
.expect("invalid stream type index")
|
||||||
@ -397,10 +397,9 @@ impl<'a> TopLevel<'a> {
|
|||||||
let (db, include_days, include_config) = *cameras;
|
let (db, include_days, include_config) = *cameras;
|
||||||
let cs = db.cameras_by_id();
|
let cs = db.cameras_by_id();
|
||||||
let mut seq = serializer.serialize_seq(Some(cs.len()))?;
|
let mut seq = serializer.serialize_seq(Some(cs.len()))?;
|
||||||
for (_, c) in cs {
|
for c in cs.values() {
|
||||||
seq.serialize_element(
|
seq.serialize_element(
|
||||||
&Camera::wrap(c, db, include_days, include_config)
|
&Camera::wrap(c, db, include_days, include_config).map_err(S::Error::custom)?,
|
||||||
.map_err(|e| S::Error::custom(e))?,
|
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
seq.end()
|
seq.end()
|
||||||
@ -417,7 +416,7 @@ impl<'a> TopLevel<'a> {
|
|||||||
let (db, include_days) = *signals;
|
let (db, include_days) = *signals;
|
||||||
let ss = db.signals_by_id();
|
let ss = db.signals_by_id();
|
||||||
let mut seq = serializer.serialize_seq(Some(ss.len()))?;
|
let mut seq = serializer.serialize_seq(Some(ss.len()))?;
|
||||||
for (_, s) in ss {
|
for s in ss.values() {
|
||||||
seq.serialize_element(&Signal::wrap(s, db, include_days))?;
|
seq.serialize_element(&Signal::wrap(s, db, include_days))?;
|
||||||
}
|
}
|
||||||
seq.end()
|
seq.end()
|
||||||
|
@ -160,7 +160,7 @@ fn main() {
|
|||||||
.and_then(|s| mylog::ColorMode::from_str(&s))
|
.and_then(|s| mylog::ColorMode::from_str(&s))
|
||||||
.unwrap_or(mylog::ColorMode::Auto),
|
.unwrap_or(mylog::ColorMode::Auto),
|
||||||
)
|
)
|
||||||
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or("info".to_owned()))
|
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or_else(|_| "info".to_owned()))
|
||||||
.build();
|
.build();
|
||||||
h.clone().install().unwrap();
|
h.clone().install().unwrap();
|
||||||
|
|
||||||
|
@ -63,12 +63,9 @@ use db::dir;
|
|||||||
use db::recording::{self, rescale, TIME_UNITS_PER_SEC};
|
use db::recording::{self, rescale, TIME_UNITS_PER_SEC};
|
||||||
use futures::stream;
|
use futures::stream;
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use http;
|
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
use http_serve;
|
|
||||||
use hyper::body::Buf;
|
use hyper::body::Buf;
|
||||||
use log::{debug, error, trace, warn};
|
use log::{debug, error, trace, warn};
|
||||||
use memmap;
|
|
||||||
use parking_lot::Once;
|
use parking_lot::Once;
|
||||||
use reffers::ARefss;
|
use reffers::ARefss;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
@ -88,7 +85,7 @@ use std::time::SystemTime;
|
|||||||
const FORMAT_VERSION: [u8; 1] = [0x08];
|
const FORMAT_VERSION: [u8; 1] = [0x08];
|
||||||
|
|
||||||
/// An `ftyp` (ISO/IEC 14496-12 section 4.3 `FileType`) box.
|
/// An `ftyp` (ISO/IEC 14496-12 section 4.3 `FileType`) box.
|
||||||
const NORMAL_FTYP_BOX: &'static [u8] = &[
|
const NORMAL_FTYP_BOX: &[u8] = &[
|
||||||
0x00, 0x00, 0x00, 0x20, // length = 32, sizeof(NORMAL_FTYP_BOX)
|
0x00, 0x00, 0x00, 0x20, // length = 32, sizeof(NORMAL_FTYP_BOX)
|
||||||
b'f', b't', b'y', b'p', // type
|
b'f', b't', b'y', b'p', // type
|
||||||
b'i', b's', b'o', b'm', // major_brand
|
b'i', b's', b'o', b'm', // major_brand
|
||||||
@ -105,7 +102,7 @@ const NORMAL_FTYP_BOX: &'static [u8] = &[
|
|||||||
/// (8.8.7.1) cannot be set where a file is marked with [the avc1 brand]."
|
/// (8.8.7.1) cannot be set where a file is marked with [the avc1 brand]."
|
||||||
/// Note that Safari insists there be a compatible brand set in this list. The
|
/// Note that Safari insists there be a compatible brand set in this list. The
|
||||||
/// major brand is not enough.
|
/// major brand is not enough.
|
||||||
const INIT_SEGMENT_FTYP_BOX: &'static [u8] = &[
|
const INIT_SEGMENT_FTYP_BOX: &[u8] = &[
|
||||||
0x00, 0x00, 0x00, 0x14, // length = 20, sizeof(INIT_SEGMENT_FTYP_BOX)
|
0x00, 0x00, 0x00, 0x14, // length = 20, sizeof(INIT_SEGMENT_FTYP_BOX)
|
||||||
b'f', b't', b'y', b'p', // type
|
b'f', b't', b'y', b'p', // type
|
||||||
b'i', b's', b'o', b'5', // major_brand
|
b'i', b's', b'o', b'5', // major_brand
|
||||||
@ -114,7 +111,7 @@ const INIT_SEGMENT_FTYP_BOX: &'static [u8] = &[
|
|||||||
];
|
];
|
||||||
|
|
||||||
/// An `hdlr` (ISO/IEC 14496-12 section 8.4.3 `HandlerBox`) box suitable for video.
|
/// An `hdlr` (ISO/IEC 14496-12 section 8.4.3 `HandlerBox`) box suitable for video.
|
||||||
const VIDEO_HDLR_BOX: &'static [u8] = &[
|
const VIDEO_HDLR_BOX: &[u8] = &[
|
||||||
0x00, 0x00, 0x00, 0x21, // length == sizeof(kHdlrBox)
|
0x00, 0x00, 0x00, 0x21, // length == sizeof(kHdlrBox)
|
||||||
b'h', b'd', b'l', b'r', // type == hdlr, ISO/IEC 14496-12 section 8.4.3.
|
b'h', b'd', b'l', b'r', // type == hdlr, ISO/IEC 14496-12 section 8.4.3.
|
||||||
0x00, 0x00, 0x00, 0x00, // version + flags
|
0x00, 0x00, 0x00, 0x00, // version + flags
|
||||||
@ -127,7 +124,7 @@ const VIDEO_HDLR_BOX: &'static [u8] = &[
|
|||||||
];
|
];
|
||||||
|
|
||||||
/// An `hdlr` (ISO/IEC 14496-12 section 8.4.3 `HandlerBox`) box suitable for subtitles.
|
/// An `hdlr` (ISO/IEC 14496-12 section 8.4.3 `HandlerBox`) box suitable for subtitles.
|
||||||
const SUBTITLE_HDLR_BOX: &'static [u8] = &[
|
const SUBTITLE_HDLR_BOX: &[u8] = &[
|
||||||
0x00, 0x00, 0x00, 0x21, // length == sizeof(kHdlrBox)
|
0x00, 0x00, 0x00, 0x21, // length == sizeof(kHdlrBox)
|
||||||
b'h', b'd', b'l', b'r', // type == hdlr, ISO/IEC 14496-12 section 8.4.3.
|
b'h', b'd', b'l', b'r', // type == hdlr, ISO/IEC 14496-12 section 8.4.3.
|
||||||
0x00, 0x00, 0x00, 0x00, // version + flags
|
0x00, 0x00, 0x00, 0x00, // version + flags
|
||||||
@ -141,7 +138,7 @@ const SUBTITLE_HDLR_BOX: &'static [u8] = &[
|
|||||||
|
|
||||||
/// Part of an `mvhd` (`MovieHeaderBox` version 0, ISO/IEC 14496-12 section 8.2.2), used from
|
/// Part of an `mvhd` (`MovieHeaderBox` version 0, ISO/IEC 14496-12 section 8.2.2), used from
|
||||||
/// `append_mvhd`.
|
/// `append_mvhd`.
|
||||||
const MVHD_JUNK: &'static [u8] = &[
|
const MVHD_JUNK: &[u8] = &[
|
||||||
0x00, 0x01, 0x00, 0x00, // rate
|
0x00, 0x01, 0x00, 0x00, // rate
|
||||||
0x01, 0x00, // volume
|
0x01, 0x00, // volume
|
||||||
0x00, 0x00, // reserved
|
0x00, 0x00, // reserved
|
||||||
@ -166,7 +163,7 @@ const MVHD_JUNK: &'static [u8] = &[
|
|||||||
|
|
||||||
/// Part of a `tkhd` (`TrackHeaderBox` version 0, ISO/IEC 14496-12 section 8.3.2), used from
|
/// Part of a `tkhd` (`TrackHeaderBox` version 0, ISO/IEC 14496-12 section 8.3.2), used from
|
||||||
/// `append_video_tkhd` and `append_subtitle_tkhd`.
|
/// `append_video_tkhd` and `append_subtitle_tkhd`.
|
||||||
const TKHD_JUNK: &'static [u8] = &[
|
const TKHD_JUNK: &[u8] = &[
|
||||||
0x00, 0x00, 0x00, 0x00, // reserved
|
0x00, 0x00, 0x00, 0x00, // reserved
|
||||||
0x00, 0x00, 0x00, 0x00, // reserved
|
0x00, 0x00, 0x00, 0x00, // reserved
|
||||||
0x00, 0x00, 0x00, 0x00, // layer + alternate_group
|
0x00, 0x00, 0x00, 0x00, // layer + alternate_group
|
||||||
@ -184,7 +181,7 @@ const TKHD_JUNK: &'static [u8] = &[
|
|||||||
|
|
||||||
/// Part of a `minf` (`MediaInformationBox`, ISO/IEC 14496-12 section 8.4.4), used from
|
/// Part of a `minf` (`MediaInformationBox`, ISO/IEC 14496-12 section 8.4.4), used from
|
||||||
/// `append_video_minf`.
|
/// `append_video_minf`.
|
||||||
const VIDEO_MINF_JUNK: &'static [u8] = &[
|
const VIDEO_MINF_JUNK: &[u8] = &[
|
||||||
b'm', b'i', b'n', b'f', // type = minf, ISO/IEC 14496-12 section 8.4.4.
|
b'm', b'i', b'n', b'f', // type = minf, ISO/IEC 14496-12 section 8.4.4.
|
||||||
// A vmhd box; the "graphicsmode" and "opcolor" values don't have any
|
// A vmhd box; the "graphicsmode" and "opcolor" values don't have any
|
||||||
// meaningful use.
|
// meaningful use.
|
||||||
@ -208,7 +205,7 @@ const VIDEO_MINF_JUNK: &'static [u8] = &[
|
|||||||
|
|
||||||
/// Part of a `minf` (`MediaInformationBox`, ISO/IEC 14496-12 section 8.4.4), used from
|
/// Part of a `minf` (`MediaInformationBox`, ISO/IEC 14496-12 section 8.4.4), used from
|
||||||
/// `append_subtitle_minf`.
|
/// `append_subtitle_minf`.
|
||||||
const SUBTITLE_MINF_JUNK: &'static [u8] = &[
|
const SUBTITLE_MINF_JUNK: &[u8] = &[
|
||||||
b'm', b'i', b'n', b'f', // type = minf, ISO/IEC 14496-12 section 8.4.4.
|
b'm', b'i', b'n', b'f', // type = minf, ISO/IEC 14496-12 section 8.4.4.
|
||||||
// A nmhd box.
|
// A nmhd box.
|
||||||
0x00, 0x00, 0x00, 0x0c, // length == sizeof(kNmhdBox)
|
0x00, 0x00, 0x00, 0x0c, // length == sizeof(kNmhdBox)
|
||||||
@ -230,7 +227,7 @@ const SUBTITLE_MINF_JUNK: &'static [u8] = &[
|
|||||||
/// Part of a `stbl` (`SampleTableBox`, ISO/IEC 14496 section 8.5.1) used from
|
/// Part of a `stbl` (`SampleTableBox`, ISO/IEC 14496 section 8.5.1) used from
|
||||||
/// `append_subtitle_stbl`.
|
/// `append_subtitle_stbl`.
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
const SUBTITLE_STBL_JUNK: &'static [u8] = &[
|
const SUBTITLE_STBL_JUNK: &[u8] = &[
|
||||||
b's', b't', b'b', b'l', // type = stbl, ISO/IEC 14496-12 section 8.5.1.
|
b's', b't', b'b', b'l', // type = stbl, ISO/IEC 14496-12 section 8.5.1.
|
||||||
// A stsd box.
|
// A stsd box.
|
||||||
0x00, 0x00, 0x00, 0x54, // length
|
0x00, 0x00, 0x00, 0x54, // length
|
||||||
@ -270,7 +267,7 @@ const SUBTITLE_STBL_JUNK: &'static [u8] = &[
|
|||||||
|
|
||||||
/// Pointers to each static bytestrings.
|
/// Pointers to each static bytestrings.
|
||||||
/// The order here must match the `StaticBytestring` enum.
|
/// The order here must match the `StaticBytestring` enum.
|
||||||
const STATIC_BYTESTRINGS: [&'static [u8]; 9] = [
|
const STATIC_BYTESTRINGS: [&[u8]; 9] = [
|
||||||
NORMAL_FTYP_BOX,
|
NORMAL_FTYP_BOX,
|
||||||
INIT_SEGMENT_FTYP_BOX,
|
INIT_SEGMENT_FTYP_BOX,
|
||||||
VIDEO_HDLR_BOX,
|
VIDEO_HDLR_BOX,
|
||||||
@ -301,7 +298,7 @@ enum StaticBytestring {
|
|||||||
/// The template fed into strtime for a timestamp subtitle. This must produce fixed-length output
|
/// The template fed into strtime for a timestamp subtitle. This must produce fixed-length output
|
||||||
/// (see `SUBTITLE_LENGTH`) to allow quick calculation of the total size of the subtitles for
|
/// (see `SUBTITLE_LENGTH`) to allow quick calculation of the total size of the subtitles for
|
||||||
/// a given time range.
|
/// a given time range.
|
||||||
const SUBTITLE_TEMPLATE: &'static str = "%Y-%m-%d %H:%M:%S %z";
|
const SUBTITLE_TEMPLATE: &str = "%Y-%m-%d %H:%M:%S %z";
|
||||||
|
|
||||||
/// The length of the output of `SUBTITLE_TEMPLATE`.
|
/// The length of the output of `SUBTITLE_TEMPLATE`.
|
||||||
const SUBTITLE_LENGTH: usize = 25; // "2015-07-02 17:10:00 -0700".len();
|
const SUBTITLE_LENGTH: usize = 25; // "2015-07-02 17:10:00 -0700".len();
|
||||||
@ -419,7 +416,7 @@ impl Segment {
|
|||||||
});
|
});
|
||||||
let index: &'a _ = unsafe { &*self.index.get() };
|
let index: &'a _ = unsafe { &*self.index.get() };
|
||||||
match *index {
|
match *index {
|
||||||
Ok(ref b) => return Ok(f(&b[..], self.lens())),
|
Ok(ref b) => Ok(f(&b[..], self.lens())),
|
||||||
Err(()) => bail_t!(Unknown, "Unable to build index; see previous error."),
|
Err(()) => bail_t!(Unknown, "Unable to build index; see previous error."),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -562,6 +559,7 @@ impl Segment {
|
|||||||
|
|
||||||
if is_key {
|
if is_key {
|
||||||
// first_sample_flags. See trex (8.8.3.1).
|
// first_sample_flags. See trex (8.8.3.1).
|
||||||
|
#[allow(clippy::identity_op)]
|
||||||
v.write_u32::<BigEndian>(
|
v.write_u32::<BigEndian>(
|
||||||
// As defined by the Independent and Disposable Samples Box
|
// As defined by the Independent and Disposable Samples Box
|
||||||
// (sdp, 8.6.4).
|
// (sdp, 8.6.4).
|
||||||
@ -571,8 +569,7 @@ impl Segment {
|
|||||||
(2 << 20) | // sample_has_redundancy: no redundant coding
|
(2 << 20) | // sample_has_redundancy: no redundant coding
|
||||||
// As defined by the sample padding bits (padb, 8.7.6).
|
// As defined by the sample padding bits (padb, 8.7.6).
|
||||||
(0 << 17) | // no padding
|
(0 << 17) | // no padding
|
||||||
(0 << 16) | // sample_is_non_sync_sample=0
|
(0 << 16), // sample_is_non_sync_sample=0
|
||||||
0,
|
|
||||||
)?; // TODO: sample_degradation_priority
|
)?; // TODO: sample_degradation_priority
|
||||||
}
|
}
|
||||||
RunInfo {
|
RunInfo {
|
||||||
@ -767,7 +764,7 @@ impl slices::Slice for Slice {
|
|||||||
type Chunk = Chunk;
|
type Chunk = Chunk;
|
||||||
|
|
||||||
fn end(&self) -> u64 {
|
fn end(&self) -> u64 {
|
||||||
return self.0 & 0xFF_FF_FF_FF_FF;
|
self.0 & 0xFF_FF_FF_FF_FF
|
||||||
}
|
}
|
||||||
fn get_range(
|
fn get_range(
|
||||||
&self,
|
&self,
|
||||||
@ -809,7 +806,7 @@ impl slices::Slice for Slice {
|
|||||||
SliceType::Truns => self.wrap_truns(f, range.clone(), len as usize),
|
SliceType::Truns => self.wrap_truns(f, range.clone(), len as usize),
|
||||||
};
|
};
|
||||||
Box::new(stream::once(futures::future::ready(
|
Box::new(stream::once(futures::future::ready(
|
||||||
res.map_err(|e| wrap_error(e)).and_then(move |c| {
|
res.map_err(wrap_error).and_then(move |c| {
|
||||||
if c.remaining() != (range.end - range.start) as usize {
|
if c.remaining() != (range.end - range.start) as usize {
|
||||||
return Err(wrap_error(format_err_t!(
|
return Err(wrap_error(format_err_t!(
|
||||||
Internal,
|
Internal,
|
||||||
@ -884,7 +881,7 @@ impl FileBuilder {
|
|||||||
buf: Vec::new(),
|
buf: Vec::new(),
|
||||||
unflushed_buf_pos: 0,
|
unflushed_buf_pos: 0,
|
||||||
},
|
},
|
||||||
type_: type_,
|
type_,
|
||||||
include_timestamp_subtitle_track: false,
|
include_timestamp_subtitle_track: false,
|
||||||
content_disposition: None,
|
content_disposition: None,
|
||||||
prev_media_duration_and_cur_runs: None,
|
prev_media_duration_and_cur_runs: None,
|
||||||
@ -1822,7 +1819,7 @@ impl FileInner {
|
|||||||
Ok(ARefss::new(mmap).map(|m| m.deref()).into())
|
Ok(ARefss::new(mmap).map(|m| m.deref()).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_subtitle_sample_data(&self, i: usize, r: Range<u64>, l: u64) -> Result<Chunk, Error> {
|
fn get_subtitle_sample_data(&self, i: usize, r: Range<u64>, len: u64) -> Result<Chunk, Error> {
|
||||||
let s = &self.segments[i];
|
let s = &self.segments[i];
|
||||||
let md = &s.rel_media_range_90k;
|
let md = &s.rel_media_range_90k;
|
||||||
let wd = s.wall(md.start)..s.wall(md.end);
|
let wd = s.wall(md.start)..s.wall(md.end);
|
||||||
@ -1831,8 +1828,8 @@ impl FileInner {
|
|||||||
let end_sec = (s.recording_start
|
let end_sec = (s.recording_start
|
||||||
+ recording::Duration(i64::from(wd.end) + TIME_UNITS_PER_SEC - 1))
|
+ recording::Duration(i64::from(wd.end) + TIME_UNITS_PER_SEC - 1))
|
||||||
.unix_seconds();
|
.unix_seconds();
|
||||||
let l = usize::try_from(l).unwrap();
|
let len = usize::try_from(len).unwrap();
|
||||||
let mut v = Vec::with_capacity(l);
|
let mut v = Vec::with_capacity(len);
|
||||||
// TODO(slamb): is this right?!? might have an off-by-one here.
|
// TODO(slamb): is this right?!? might have an off-by-one here.
|
||||||
for ts in start_sec..end_sec {
|
for ts in start_sec..end_sec {
|
||||||
v.write_u16::<BigEndian>(SUBTITLE_LENGTH as u16)
|
v.write_u16::<BigEndian>(SUBTITLE_LENGTH as u16)
|
||||||
@ -1847,7 +1844,7 @@ impl FileInner {
|
|||||||
)
|
)
|
||||||
.expect("Vec write shouldn't fail");
|
.expect("Vec write shouldn't fail");
|
||||||
}
|
}
|
||||||
assert_eq!(l, v.len());
|
assert_eq!(len, v.len());
|
||||||
Ok(ARefss::new(v)
|
Ok(ARefss::new(v)
|
||||||
.map(|v| &v[r.start as usize..r.end as usize])
|
.map(|v| &v[r.start as usize..r.end as usize])
|
||||||
.into())
|
.into())
|
||||||
@ -2471,7 +2468,7 @@ mod tests {
|
|||||||
testutil::init();
|
testutil::init();
|
||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
@ -2531,7 +2528,7 @@ mod tests {
|
|||||||
testutil::init();
|
testutil::init();
|
||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
@ -2617,13 +2614,13 @@ mod tests {
|
|||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let mut encoders = Vec::new();
|
let mut encoders = Vec::new();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
encoder.add_sample(1, 1, true, &mut r);
|
encoder.add_sample(1, 1, true, &mut r);
|
||||||
encoder.add_sample(2, 2, false, &mut r);
|
encoder.add_sample(2, 2, false, &mut r);
|
||||||
encoder.add_sample(3, 3, true, &mut r);
|
encoder.add_sample(3, 3, true, &mut r);
|
||||||
encoders.push(r);
|
encoders.push(r);
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
encoder.add_sample(4, 4, true, &mut r);
|
encoder.add_sample(4, 4, true, &mut r);
|
||||||
encoder.add_sample(5, 5, false, &mut r);
|
encoder.add_sample(5, 5, false, &mut r);
|
||||||
encoders.push(r);
|
encoders.push(r);
|
||||||
@ -2656,12 +2653,12 @@ mod tests {
|
|||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let mut encoders = Vec::new();
|
let mut encoders = Vec::new();
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
encoder.add_sample(2, 1, true, &mut r);
|
encoder.add_sample(2, 1, true, &mut r);
|
||||||
encoder.add_sample(3, 2, false, &mut r);
|
encoder.add_sample(3, 2, false, &mut r);
|
||||||
encoders.push(r);
|
encoders.push(r);
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
encoder.add_sample(0, 3, true, &mut r);
|
encoder.add_sample(0, 3, true, &mut r);
|
||||||
encoders.push(r);
|
encoders.push(r);
|
||||||
|
|
||||||
@ -2708,7 +2705,7 @@ mod tests {
|
|||||||
testutil::init();
|
testutil::init();
|
||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
@ -2762,7 +2759,7 @@ mod tests {
|
|||||||
testutil::init();
|
testutil::init();
|
||||||
let db = TestDb::new(RealClocks {});
|
let db = TestDb::new(RealClocks {});
|
||||||
let mut r = db::RecordingToInsert::default();
|
let mut r = db::RecordingToInsert::default();
|
||||||
let mut encoder = recording::SampleIndexEncoder::new();
|
let mut encoder = recording::SampleIndexEncoder::default();
|
||||||
for i in 1..6 {
|
for i in 1..6 {
|
||||||
let duration_90k = 2 * i;
|
let duration_90k = 2 * i;
|
||||||
let bytes = 3 * i;
|
let bytes = 3 * i;
|
||||||
|
@ -129,6 +129,7 @@ where
|
|||||||
ctx: &S::Ctx,
|
ctx: &S::Ctx,
|
||||||
range: Range<u64>,
|
range: Range<u64>,
|
||||||
) -> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> {
|
) -> Box<dyn Stream<Item = Result<S::Chunk, BoxedError>> + Sync + Send> {
|
||||||
|
#[allow(clippy::suspicious_operation_groupings)]
|
||||||
if range.start > range.end || range.end > self.len {
|
if range.start > range.end || range.end > self.len {
|
||||||
return Box::new(stream::once(futures::future::err(wrap_error(
|
return Box::new(stream::once(futures::future::err(wrap_error(
|
||||||
format_err_t!(
|
format_err_t!(
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
use crate::h264;
|
use crate::h264;
|
||||||
use cstr::cstr;
|
use cstr::cstr;
|
||||||
use failure::{bail, Error};
|
use failure::{bail, Error};
|
||||||
use ffmpeg;
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use log::{debug, warn};
|
use log::{debug, warn};
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
@ -34,7 +33,7 @@ pub trait Opener<S: Stream>: Sync {
|
|||||||
pub trait Stream {
|
pub trait Stream {
|
||||||
fn get_video_codecpar(&self) -> ffmpeg::avcodec::InputCodecParameters<'_>;
|
fn get_video_codecpar(&self) -> ffmpeg::avcodec::InputCodecParameters<'_>;
|
||||||
fn get_extra_data(&self) -> Result<h264::ExtraData, Error>;
|
fn get_extra_data(&self) -> Result<h264::ExtraData, Error>;
|
||||||
fn get_next<'p>(&'p mut self) -> Result<ffmpeg::avcodec::Packet<'p>, ffmpeg::Error>;
|
fn get_next(&mut self) -> Result<ffmpeg::avcodec::Packet, ffmpeg::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Ffmpeg {}
|
pub struct Ffmpeg {}
|
||||||
@ -166,7 +165,7 @@ impl Stream for FfmpegStream {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_next<'i>(&'i mut self) -> Result<ffmpeg::avcodec::Packet<'i>, ffmpeg::Error> {
|
fn get_next(&mut self) -> Result<ffmpeg::avcodec::Packet, ffmpeg::Error> {
|
||||||
loop {
|
loop {
|
||||||
let p = self.input.read_frame()?;
|
let p = self.input.read_frame()?;
|
||||||
if p.stream_index() == self.video_i {
|
if p.stream_index() == self.video_i {
|
||||||
|
@ -11,7 +11,6 @@ use log::{debug, info, trace, warn};
|
|||||||
use std::result::Result;
|
use std::result::Result;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use time;
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
pub static ROTATE_INTERVAL_SEC: i64 = 60;
|
pub static ROTATE_INTERVAL_SEC: i64 = 60;
|
||||||
@ -359,7 +358,7 @@ mod tests {
|
|||||||
|
|
||||||
fn get_frames(db: &db::LockedDatabase, id: CompositeId) -> Vec<Frame> {
|
fn get_frames(db: &db::LockedDatabase, id: CompositeId) -> Vec<Frame> {
|
||||||
db.with_recording_playback(id, &mut |rec| {
|
db.with_recording_playback(id, &mut |rec| {
|
||||||
let mut it = recording::SampleIndexIterator::new();
|
let mut it = recording::SampleIndexIterator::default();
|
||||||
let mut frames = Vec::new();
|
let mut frames = Vec::new();
|
||||||
while it.next(&rec.video_index).unwrap() {
|
while it.next(&rec.video_index).unwrap() {
|
||||||
frames.push(Frame {
|
frames.push(Frame {
|
||||||
|
@ -479,7 +479,7 @@ impl Service {
|
|||||||
std::time::Duration::new(30, 0),
|
std::time::Duration::new(30, 0),
|
||||||
));
|
));
|
||||||
let mut combo = futures::stream::select(
|
let mut combo = futures::stream::select(
|
||||||
sub_rx.map(|s| Either::Left(s)),
|
sub_rx.map(Either::Left),
|
||||||
keepalive.map(|_| Either::Right(())),
|
keepalive.map(|_| Either::Right(())),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -644,10 +644,10 @@ impl Service {
|
|||||||
req: Request<::hyper::Body>,
|
req: Request<::hyper::Body>,
|
||||||
) -> Result<Response<Body>, std::convert::Infallible> {
|
) -> Result<Response<Body>, std::convert::Infallible> {
|
||||||
let p = Path::decode(req.uri().path());
|
let p = Path::decode(req.uri().path());
|
||||||
let always_allow_unauthenticated = match p {
|
let always_allow_unauthenticated = matches!(
|
||||||
Path::NotFound | Path::Request | Path::Login | Path::Logout | Path::Static => true,
|
p,
|
||||||
_ => false,
|
Path::NotFound | Path::Request | Path::Login | Path::Logout | Path::Static
|
||||||
};
|
);
|
||||||
debug!("request on: {}: {:?}", req.uri(), p);
|
debug!("request on: {}: {:?}", req.uri(), p);
|
||||||
let caller = match self.authenticate(&req, always_allow_unauthenticated) {
|
let caller = match self.authenticate(&req, always_allow_unauthenticated) {
|
||||||
Ok(c) => c,
|
Ok(c) => c,
|
||||||
@ -673,10 +673,8 @@ impl Service {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if camera_configs {
|
if camera_configs && !caller.permissions.read_camera_configs {
|
||||||
if !caller.permissions.read_camera_configs {
|
bail_t!(PermissionDenied, "read_camera_configs required");
|
||||||
bail_t!(PermissionDenied, "read_camera_configs required");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let db = self.db.lock();
|
let db = self.db.lock();
|
||||||
@ -954,7 +952,7 @@ impl Service {
|
|||||||
sec: start.unix_seconds(),
|
sec: start.unix_seconds(),
|
||||||
nsec: 0,
|
nsec: 0,
|
||||||
});
|
});
|
||||||
let stream_abbrev = if stream_type == db::StreamType::MAIN {
|
let stream_abbrev = if stream_type == db::StreamType::Main {
|
||||||
"main"
|
"main"
|
||||||
} else {
|
} else {
|
||||||
"sub"
|
"sub"
|
||||||
@ -1247,11 +1245,7 @@ impl Service {
|
|||||||
if let Some(sid) = extract_sid(req) {
|
if let Some(sid) = extract_sid(req) {
|
||||||
let authreq = self.authreq(req);
|
let authreq = self.authreq(req);
|
||||||
|
|
||||||
match self
|
match self.db.lock().authenticate_session(authreq, &sid.hash()) {
|
||||||
.db
|
|
||||||
.lock()
|
|
||||||
.authenticate_session(authreq.clone(), &sid.hash())
|
|
||||||
{
|
|
||||||
Ok((s, u)) => {
|
Ok((s, u)) => {
|
||||||
return Ok(Caller {
|
return Ok(Caller {
|
||||||
permissions: s.permissions.clone(),
|
permissions: s.permissions.clone(),
|
||||||
@ -1319,7 +1313,7 @@ struct StaticFileRequest<'a> {
|
|||||||
|
|
||||||
impl<'a> StaticFileRequest<'a> {
|
impl<'a> StaticFileRequest<'a> {
|
||||||
fn parse(path: &'a str) -> Option<Self> {
|
fn parse(path: &'a str) -> Option<Self> {
|
||||||
if !path.starts_with("/") || path == "/index.html" {
|
if !path.starts_with('/') || path == "/index.html" {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1492,11 +1486,11 @@ mod tests {
|
|||||||
assert_eq!(Path::decode("/api/cameras/asdf/"), Path::NotFound);
|
assert_eq!(Path::decode("/api/cameras/asdf/"), Path::NotFound);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/recordings"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/recordings"),
|
||||||
Path::StreamRecordings(cam_uuid, db::StreamType::MAIN)
|
Path::StreamRecordings(cam_uuid, db::StreamType::Main)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/sub/recordings"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/sub/recordings"),
|
||||||
Path::StreamRecordings(cam_uuid, db::StreamType::SUB)
|
Path::StreamRecordings(cam_uuid, db::StreamType::Sub)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/junk/recordings"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/junk/recordings"),
|
||||||
@ -1504,23 +1498,23 @@ mod tests {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.mp4"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.mp4"),
|
||||||
Path::StreamViewMp4(cam_uuid, db::StreamType::MAIN, false)
|
Path::StreamViewMp4(cam_uuid, db::StreamType::Main, false)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.mp4.txt"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.mp4.txt"),
|
||||||
Path::StreamViewMp4(cam_uuid, db::StreamType::MAIN, true)
|
Path::StreamViewMp4(cam_uuid, db::StreamType::Main, true)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.m4s"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.m4s"),
|
||||||
Path::StreamViewMp4Segment(cam_uuid, db::StreamType::MAIN, false)
|
Path::StreamViewMp4Segment(cam_uuid, db::StreamType::Main, false)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.m4s.txt"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/view.m4s.txt"),
|
||||||
Path::StreamViewMp4Segment(cam_uuid, db::StreamType::MAIN, true)
|
Path::StreamViewMp4Segment(cam_uuid, db::StreamType::Main, true)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/live.m4s"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/live.m4s"),
|
||||||
Path::StreamLiveMp4Segments(cam_uuid, db::StreamType::MAIN)
|
Path::StreamLiveMp4Segments(cam_uuid, db::StreamType::Main)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/junk"),
|
Path::decode("/api/cameras/35144640-ff1e-4619-b0d5-4c74c185741c/main/junk"),
|
||||||
|
Loading…
Reference in New Issue
Block a user