2019-06-06 19:18:13 -04:00
|
|
|
// This file is part of Moonfire NVR, a security camera network video recorder.
|
2021-02-17 16:28:48 -05:00
|
|
|
// Copyright (C) 2019 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
|
|
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
|
2019-06-06 19:18:13 -04:00
|
|
|
|
|
|
|
use crate::db::FromSqlUuid;
|
|
|
|
use crate::recording;
|
2021-03-23 23:22:29 -04:00
|
|
|
use crate::{coding, days};
|
2021-02-17 01:15:54 -05:00
|
|
|
use base::bail_t;
|
|
|
|
use failure::{bail, format_err, Error};
|
2019-06-06 19:18:13 -04:00
|
|
|
use fnv::FnvHashMap;
|
2019-12-28 08:48:08 -05:00
|
|
|
use log::debug;
|
2021-02-17 01:15:54 -05:00
|
|
|
use rusqlite::{params, Connection, Transaction};
|
2019-06-14 00:55:15 -04:00
|
|
|
use std::collections::btree_map::Entry;
|
2021-02-17 01:15:54 -05:00
|
|
|
use std::collections::{BTreeMap, BTreeSet};
|
2019-06-06 19:18:13 -04:00
|
|
|
use std::ops::Range;
|
|
|
|
use uuid::Uuid;
|
|
|
|
|
|
|
|
/// All state associated with signals. This is the entry point to this module.
|
|
|
|
pub(crate) struct State {
|
|
|
|
signals_by_id: BTreeMap<u32, Signal>,
|
2019-06-14 00:55:15 -04:00
|
|
|
|
|
|
|
/// All types with known states. Note that currently there's no requirement an entry here
|
|
|
|
/// exists for every `type_` specified in a `Signal`, and there's an implied `0` (unknown)
|
|
|
|
/// state for every `Type`.
|
2019-06-06 19:18:13 -04:00
|
|
|
types_by_uuid: FnvHashMap<Uuid, Type>,
|
2019-06-14 00:55:15 -04:00
|
|
|
|
2021-03-23 23:22:29 -04:00
|
|
|
/// All points in time.
|
|
|
|
/// Invariants, checked by `State::debug_assert_point_invariants`:
|
|
|
|
/// * the first point must have an empty previous state (all signals at state 0).
|
|
|
|
/// * each point's prev state matches the previous point's after state.
|
|
|
|
/// * the last point must have an empty final state (all signals changed to state 0).
|
2019-06-14 00:55:15 -04:00
|
|
|
points_by_time: BTreeMap<recording::Time, Point>,
|
|
|
|
|
2019-12-28 08:48:08 -05:00
|
|
|
/// Times which need to be flushed to the database.
|
|
|
|
/// These either have a matching `points_by_time` entry or represent a removal.
|
2019-06-14 00:55:15 -04:00
|
|
|
dirty_by_time: BTreeSet<recording::Time>,
|
2019-12-28 08:48:08 -05:00
|
|
|
|
|
|
|
max_signal_changes: Option<i64>,
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Representation of all signals at a point in time.
|
|
|
|
/// Each point matches a `signal_change` table row (when flushed). However, the in-memory
|
|
|
|
/// representation keeps not only the changes as of that time but also the complete prior state.
|
|
|
|
#[derive(Default)]
|
2019-06-06 19:18:13 -04:00
|
|
|
struct Point {
|
2019-06-14 00:55:15 -04:00
|
|
|
/// All data associated with the point.
|
|
|
|
///
|
|
|
|
/// `data[0..changes_off]` represents previous state (immediately prior to this point).
|
|
|
|
/// `data[changes_off..]` represents the changes at this point.
|
|
|
|
///
|
|
|
|
/// This representation could be 8 bytes shorter on 64-bit platforms by using a u32 for the
|
|
|
|
/// lengths, but this would require some unsafe code.
|
|
|
|
///
|
|
|
|
/// The serialized form stored here must always be valid.
|
|
|
|
data: Box<[u8]>,
|
2019-06-06 19:18:13 -04:00
|
|
|
changes_off: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Point {
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Creates a new point from `prev` and `changes`.
|
|
|
|
///
|
|
|
|
/// The caller is responsible for validation. In particular, `changes` must be a valid
|
|
|
|
/// serialized form.
|
|
|
|
fn new(prev: &BTreeMap<u32, u16>, changes: &[u8]) -> Self {
|
|
|
|
let mut data = Vec::with_capacity(3 * prev.len() + changes.len());
|
|
|
|
append_serialized(prev, &mut data);
|
2019-06-06 19:18:13 -04:00
|
|
|
let changes_off = data.len();
|
|
|
|
data.extend(changes);
|
|
|
|
Point {
|
2019-06-14 00:55:15 -04:00
|
|
|
data: data.into_boxed_slice(),
|
2019-06-06 19:18:13 -04:00
|
|
|
changes_off,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
fn swap(&mut self, other: &mut Point) {
|
|
|
|
std::mem::swap(&mut self.data, &mut other.data);
|
|
|
|
std::mem::swap(&mut self.changes_off, &mut other.changes_off);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns an iterator over state as of immediately before this point.
|
|
|
|
fn prev(&self) -> PointDataIterator {
|
2019-06-06 19:18:13 -04:00
|
|
|
PointDataIterator::new(&self.data[0..self.changes_off])
|
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Returns an iterator over changes in this point.
|
2019-06-06 19:18:13 -04:00
|
|
|
fn changes(&self) -> PointDataIterator {
|
|
|
|
PointDataIterator::new(&self.data[self.changes_off..])
|
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
|
|
|
|
/// Returns a mapping of signals to states immediately after this point.
|
|
|
|
fn after(&self) -> BTreeMap<u32, u16> {
|
|
|
|
let mut after = BTreeMap::new();
|
|
|
|
let mut it = self.prev();
|
|
|
|
while let Some((signal, state)) = it.next().expect("in-mem prev is valid") {
|
|
|
|
after.insert(signal, state);
|
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
self.changes().update_map(&mut after);
|
2019-06-14 00:55:15 -04:00
|
|
|
after
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Appends a serialized form of `from` into `to`.
|
|
|
|
///
|
|
|
|
/// `from` must be an iterator of `(signal, state)` with signal numbers in monotonically increasing
|
|
|
|
/// order.
|
|
|
|
fn append_serialized<'a, I>(from: I, to: &mut Vec<u8>)
|
2021-02-17 01:15:54 -05:00
|
|
|
where
|
|
|
|
I: IntoIterator<Item = (&'a u32, &'a u16)>,
|
|
|
|
{
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut next_allowed = 0;
|
|
|
|
for (&signal, &state) in from.into_iter() {
|
|
|
|
assert!(signal >= next_allowed);
|
|
|
|
coding::append_varint32(signal - next_allowed, to);
|
|
|
|
coding::append_varint32(state as u32, to);
|
|
|
|
next_allowed = signal + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn serialize(from: &BTreeMap<u32, u16>) -> Vec<u8> {
|
|
|
|
let mut to = Vec::with_capacity(3 * from.len());
|
|
|
|
append_serialized(from, &mut to);
|
|
|
|
to
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
struct PointDataIterator<'a> {
|
|
|
|
data: &'a [u8],
|
|
|
|
cur_pos: usize,
|
|
|
|
cur_signal: u32,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> PointDataIterator<'a> {
|
|
|
|
fn new(data: &'a [u8]) -> Self {
|
|
|
|
PointDataIterator {
|
|
|
|
data,
|
|
|
|
cur_pos: 0,
|
|
|
|
cur_signal: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Returns an error, `None`, or `Some((signal, state))`.
|
|
|
|
/// Note that errors should be impossible on in-memory data; this returns `Result` for
|
|
|
|
/// validating blobs as they're read from the database.
|
2019-06-06 19:18:13 -04:00
|
|
|
fn next(&mut self) -> Result<Option<(u32, u16)>, Error> {
|
|
|
|
if self.cur_pos == self.data.len() {
|
|
|
|
return Ok(None);
|
|
|
|
}
|
2021-02-17 01:15:54 -05:00
|
|
|
let (signal_delta, p) = coding::decode_varint32(self.data, self.cur_pos).map_err(|()| {
|
|
|
|
format_err!(
|
|
|
|
"varint32 decode failure; data={:?} pos={}",
|
|
|
|
self.data,
|
|
|
|
self.cur_pos
|
|
|
|
)
|
|
|
|
})?;
|
2019-06-06 19:18:13 -04:00
|
|
|
let (state, p) = coding::decode_varint32(self.data, p)
|
2021-02-17 01:15:54 -05:00
|
|
|
.map_err(|()| format_err!("varint32 decode failure; data={:?} pos={}", self.data, p))?;
|
|
|
|
let signal = self.cur_signal.checked_add(signal_delta).ok_or_else(|| {
|
|
|
|
format_err!("signal overflow: {} + {}", self.cur_signal, signal_delta)
|
|
|
|
})?;
|
2019-06-06 19:18:13 -04:00
|
|
|
if state > u16::max_value() as u32 {
|
|
|
|
bail!("state overflow: {}", state);
|
|
|
|
}
|
|
|
|
self.cur_pos = p;
|
2019-06-14 00:55:15 -04:00
|
|
|
self.cur_signal = signal + 1;
|
|
|
|
Ok(Some((signal, state as u16)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn to_map(mut self) -> Result<BTreeMap<u32, u16>, Error> {
|
|
|
|
let mut out = BTreeMap::new();
|
|
|
|
while let Some((signal, state)) = self.next()? {
|
|
|
|
out.insert(signal, state);
|
|
|
|
}
|
|
|
|
Ok(out)
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
|
|
|
|
fn update_map(mut self, m: &mut BTreeMap<u32, u16>) {
|
|
|
|
while let Some((signal, state)) = self.next().expect("in-mem changes is valid") {
|
|
|
|
if state == 0 {
|
|
|
|
m.remove(&signal);
|
|
|
|
} else {
|
|
|
|
m.insert(signal, state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Representation of a `signal_camera` row.
|
|
|
|
/// `signal_id` is implied by the `Signal` which owns this struct.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct SignalCamera {
|
|
|
|
pub camera_id: i32,
|
|
|
|
pub type_: SignalCameraType,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Representation of the `type` field in a `signal_camera` row.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum SignalCameraType {
|
|
|
|
Direct = 0,
|
|
|
|
Indirect = 1,
|
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
2019-06-06 19:18:13 -04:00
|
|
|
pub struct ListStateChangesRow {
|
|
|
|
pub when: recording::Time,
|
|
|
|
pub signal: u32,
|
|
|
|
pub state: u16,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl State {
|
|
|
|
pub fn init(conn: &Connection) -> Result<Self, Error> {
|
2019-12-28 08:48:08 -05:00
|
|
|
let max_signal_changes: Option<i64> =
|
2021-02-17 01:15:54 -05:00
|
|
|
conn.query_row("select max_signal_changes from meta", params![], |row| {
|
|
|
|
row.get(0)
|
|
|
|
})?;
|
2019-06-06 19:18:13 -04:00
|
|
|
let mut signals_by_id = State::init_signals(conn)?;
|
|
|
|
State::fill_signal_cameras(conn, &mut signals_by_id)?;
|
2021-03-23 23:22:29 -04:00
|
|
|
let mut points_by_time = BTreeMap::new();
|
|
|
|
State::fill_points(conn, &mut points_by_time, &mut signals_by_id)?;
|
|
|
|
let s = State {
|
2019-12-28 08:48:08 -05:00
|
|
|
max_signal_changes,
|
2019-06-06 19:18:13 -04:00
|
|
|
signals_by_id,
|
|
|
|
types_by_uuid: State::init_types(conn)?,
|
2021-03-23 23:22:29 -04:00
|
|
|
points_by_time,
|
2019-06-14 00:55:15 -04:00
|
|
|
dirty_by_time: BTreeSet::new(),
|
2021-03-23 23:22:29 -04:00
|
|
|
};
|
|
|
|
s.debug_assert_point_invariants();
|
|
|
|
Ok(s)
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn list_changes_by_time(
|
2021-02-17 01:15:54 -05:00
|
|
|
&self,
|
|
|
|
desired_time: Range<recording::Time>,
|
|
|
|
f: &mut dyn FnMut(&ListStateChangesRow),
|
|
|
|
) {
|
2019-06-06 19:18:13 -04:00
|
|
|
// First find the state immediately before. If it exists, include it.
|
2019-06-14 00:55:15 -04:00
|
|
|
if let Some((&when, p)) = self.points_by_time.range(..desired_time.start).next_back() {
|
|
|
|
for (&signal, &state) in &p.after() {
|
2019-06-06 19:18:13 -04:00
|
|
|
f(&ListStateChangesRow {
|
2019-06-14 00:55:15 -04:00
|
|
|
when,
|
2019-06-06 19:18:13 -04:00
|
|
|
signal,
|
|
|
|
state,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Then include changes up to (but not including) the end time.
|
2019-06-14 00:55:15 -04:00
|
|
|
for (&when, p) in self.points_by_time.range(desired_time.clone()) {
|
2019-06-06 19:18:13 -04:00
|
|
|
let mut it = p.changes();
|
2019-06-14 00:55:15 -04:00
|
|
|
while let Some((signal, state)) = it.next().expect("in-mem changes is valid") {
|
2019-06-06 19:18:13 -04:00
|
|
|
f(&ListStateChangesRow {
|
2019-06-14 00:55:15 -04:00
|
|
|
when,
|
2019-06-06 19:18:13 -04:00
|
|
|
signal,
|
|
|
|
state,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn update_signals(
|
2021-02-17 01:15:54 -05:00
|
|
|
&mut self,
|
|
|
|
when: Range<recording::Time>,
|
|
|
|
signals: &[u32],
|
|
|
|
states: &[u16],
|
|
|
|
) -> Result<(), base::Error> {
|
2019-06-14 00:55:15 -04:00
|
|
|
// Do input validation before any mutation.
|
|
|
|
self.update_signals_validate(signals, states)?;
|
|
|
|
|
|
|
|
// Follow the std::ops::Range convention of considering a range empty if its start >= end.
|
|
|
|
// Bailing early in the empty case isn't just an optimization; apply_observation_end would
|
|
|
|
// be incorrect otherwise.
|
|
|
|
if when.end <= when.start {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply the end before the start so that the `prev` state can be examined.
|
2021-03-23 23:22:29 -04:00
|
|
|
self.update_signals_end(when.clone(), signals, states);
|
2019-06-14 00:55:15 -04:00
|
|
|
self.update_signals_start(when.start, signals, states);
|
|
|
|
self.update_signals_middle(when, signals, states);
|
2021-03-23 23:22:29 -04:00
|
|
|
self.debug_assert_point_invariants();
|
2019-12-28 08:48:08 -05:00
|
|
|
|
|
|
|
self.gc();
|
2019-06-14 00:55:15 -04:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-12-28 08:48:08 -05:00
|
|
|
/// Performs garbage collection if the number of points exceeds `max_signal_changes`.
|
|
|
|
fn gc(&mut self) {
|
|
|
|
let max = match self.max_signal_changes {
|
|
|
|
None => return,
|
|
|
|
Some(m) if m < 0 => 0 as usize,
|
|
|
|
Some(m) if m > (isize::max_value() as i64) => return,
|
|
|
|
Some(m) => m as usize,
|
|
|
|
};
|
|
|
|
let to_remove = match self.points_by_time.len().checked_sub(max) {
|
|
|
|
None => return,
|
|
|
|
Some(p) => p,
|
|
|
|
};
|
2021-02-17 01:15:54 -05:00
|
|
|
debug!(
|
|
|
|
"Performing signal GC: have {} points, want only {}, so removing {}",
|
|
|
|
self.points_by_time.len(),
|
|
|
|
max,
|
|
|
|
to_remove
|
|
|
|
);
|
|
|
|
|
2021-03-23 23:22:29 -04:00
|
|
|
self.gc_days(to_remove);
|
2021-02-17 01:15:54 -05:00
|
|
|
let remove: smallvec::SmallVec<[recording::Time; 4]> = self
|
|
|
|
.points_by_time
|
|
|
|
.keys()
|
|
|
|
.take(to_remove)
|
2021-03-23 23:22:29 -04:00
|
|
|
.map(|t| *t)
|
2021-02-17 01:15:54 -05:00
|
|
|
.collect();
|
2019-12-28 08:48:08 -05:00
|
|
|
|
2021-03-23 23:22:29 -04:00
|
|
|
for t in &remove {
|
|
|
|
self.points_by_time.remove(t);
|
|
|
|
self.dirty_by_time.insert(*t);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update the first remaining point to keep state starting from it unchanged.
|
|
|
|
let (t, p) = match self.points_by_time.iter_mut().next() {
|
|
|
|
Some(e) => e,
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
let combined = p.after();
|
|
|
|
p.changes_off = 0;
|
|
|
|
p.data = serialize(&combined).into_boxed_slice();
|
|
|
|
self.dirty_by_time.insert(*t);
|
|
|
|
self.debug_assert_point_invariants();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Adjusts each signal's days index to reflect garbage-collecting the first `to_remove` points.
|
|
|
|
fn gc_days(&mut self, to_remove: usize) {
|
|
|
|
let mut it = self.points_by_time.iter().take(to_remove + 1);
|
|
|
|
let (mut prev_time, mut prev_state) = match it.next() {
|
|
|
|
None => return, // nothing to do.
|
|
|
|
Some(p) => (*p.0, p.1.after()),
|
|
|
|
};
|
|
|
|
for (&new_time, point) in it {
|
|
|
|
let mut changes = point.changes();
|
|
|
|
while let Some((signal, state)) = changes.next().expect("in-mem points valid") {
|
|
|
|
let s = self
|
|
|
|
.signals_by_id
|
|
|
|
.get_mut(&signal)
|
|
|
|
.expect("in-mem point signals valid");
|
|
|
|
let prev_state = prev_state.entry(signal).or_default();
|
|
|
|
s.days.adjust(prev_time..new_time, *prev_state, state);
|
|
|
|
*prev_state = state;
|
|
|
|
}
|
|
|
|
prev_time = new_time;
|
2019-12-28 08:48:08 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Helper for `update_signals` to do validation.
|
2019-06-14 19:11:12 -04:00
|
|
|
fn update_signals_validate(&self, signals: &[u32], states: &[u16]) -> Result<(), base::Error> {
|
2019-06-14 00:55:15 -04:00
|
|
|
if signals.len() != states.len() {
|
|
|
|
bail_t!(InvalidArgument, "signals and states must have same length");
|
|
|
|
}
|
|
|
|
let mut next_allowed = 0u32;
|
|
|
|
for (&signal, &state) in signals.iter().zip(states) {
|
|
|
|
if signal < next_allowed {
|
|
|
|
bail_t!(InvalidArgument, "signals must be monotonically increasing");
|
|
|
|
}
|
|
|
|
match self.signals_by_id.get(&signal) {
|
|
|
|
None => bail_t!(InvalidArgument, "unknown signal {}", signal),
|
|
|
|
Some(ref s) => {
|
|
|
|
let empty = Vec::new();
|
2021-02-17 01:15:54 -05:00
|
|
|
let states = self
|
|
|
|
.types_by_uuid
|
|
|
|
.get(&s.type_)
|
|
|
|
.map(|t| &t.states)
|
|
|
|
.unwrap_or(&empty);
|
2019-06-14 19:41:18 -04:00
|
|
|
if state != 0 && states.binary_search_by_key(&state, |s| s.value).is_err() {
|
2021-02-17 01:15:54 -05:00
|
|
|
bail_t!(
|
|
|
|
FailedPrecondition,
|
|
|
|
"signal {} specifies unknown state {}",
|
|
|
|
signal,
|
|
|
|
state
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
next_allowed = signal + 1;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Helper for `update_signals` to apply the end point.
|
2021-03-23 23:22:29 -04:00
|
|
|
fn update_signals_end(
|
|
|
|
&mut self,
|
|
|
|
when: Range<recording::Time>,
|
|
|
|
signals: &[u32],
|
|
|
|
states: &[u16],
|
|
|
|
) {
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut prev;
|
|
|
|
let mut changes = BTreeMap::<u32, u16>::new();
|
2021-03-23 23:22:29 -04:00
|
|
|
let prev_t = self
|
|
|
|
.points_by_time
|
|
|
|
.range(when.clone())
|
|
|
|
.next_back()
|
|
|
|
.map(|e| *e.0)
|
|
|
|
.unwrap_or(when.start);
|
|
|
|
let days_range = prev_t..when.end;
|
|
|
|
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=when.end).next_back() {
|
|
|
|
if t == when.end {
|
2019-06-14 00:55:15 -04:00
|
|
|
// Already have a point at end. Adjust it. prev starts unchanged...
|
|
|
|
prev = p.prev().to_map().expect("in-mem prev is valid");
|
|
|
|
|
|
|
|
// ...and then prev and changes are altered to reflect the desired update.
|
2021-03-23 23:22:29 -04:00
|
|
|
State::update_signals_end_maps(
|
|
|
|
signals,
|
|
|
|
states,
|
|
|
|
days_range,
|
|
|
|
&mut self.signals_by_id,
|
|
|
|
&mut prev,
|
|
|
|
&mut changes,
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
|
|
|
|
// If this doesn't alter the new state, don't dirty the database.
|
|
|
|
if changes.is_empty() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Any existing changes should still be applied. They win over reverting to prev.
|
|
|
|
let mut it = p.changes();
|
|
|
|
while let Some((signal, state)) = it.next().expect("in-mem changes is valid") {
|
2021-02-17 01:15:54 -05:00
|
|
|
changes
|
|
|
|
.entry(signal)
|
|
|
|
.and_modify(|e| *e = state)
|
|
|
|
.or_insert(state);
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
self.dirty_by_time.insert(t);
|
|
|
|
p.swap(&mut Point::new(&prev, &serialize(&changes)));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Don't have a point at end, but do have previous state.
|
|
|
|
prev = p.after();
|
|
|
|
} else {
|
|
|
|
// No point at or before end. Start from scratch (all signals unknown).
|
|
|
|
prev = BTreeMap::new();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create a new end point if necessary.
|
2021-03-23 23:22:29 -04:00
|
|
|
State::update_signals_end_maps(
|
|
|
|
signals,
|
|
|
|
states,
|
|
|
|
days_range,
|
|
|
|
&mut self.signals_by_id,
|
|
|
|
&mut prev,
|
|
|
|
&mut changes,
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
if changes.is_empty() {
|
|
|
|
return;
|
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
self.dirty_by_time.insert(when.end);
|
2021-02-17 01:15:54 -05:00
|
|
|
self.points_by_time
|
2021-03-23 23:22:29 -04:00
|
|
|
.insert(when.end, Point::new(&prev, &serialize(&changes)));
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Helper for `update_signals_end`. Adjusts `prev` (the state prior to the end point) to
|
|
|
|
/// reflect the desired update (in `signals` and `states`). Adjusts `changes` (changes to
|
2021-03-23 23:22:29 -04:00
|
|
|
/// execute at the end point) to undo the change. Adjust each signal's days index for
|
|
|
|
/// the range from the penultimate point of the range (or lacking that, its start) to the end.
|
2021-02-17 01:15:54 -05:00
|
|
|
fn update_signals_end_maps(
|
|
|
|
signals: &[u32],
|
|
|
|
states: &[u16],
|
2021-03-23 23:22:29 -04:00
|
|
|
days_range: Range<recording::Time>,
|
|
|
|
signals_by_id: &mut BTreeMap<u32, Signal>,
|
2021-02-17 01:15:54 -05:00
|
|
|
prev: &mut BTreeMap<u32, u16>,
|
|
|
|
changes: &mut BTreeMap<u32, u16>,
|
|
|
|
) {
|
2019-06-14 00:55:15 -04:00
|
|
|
for (&signal, &state) in signals.iter().zip(states) {
|
2021-03-23 23:22:29 -04:00
|
|
|
let old_state;
|
2019-06-14 00:55:15 -04:00
|
|
|
match prev.entry(signal) {
|
|
|
|
Entry::Vacant(e) => {
|
2021-03-23 23:22:29 -04:00
|
|
|
old_state = 0;
|
2019-06-14 00:55:15 -04:00
|
|
|
changes.insert(signal, 0);
|
|
|
|
e.insert(state);
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
Entry::Occupied(mut e) => {
|
2021-03-23 23:22:29 -04:00
|
|
|
old_state = *e.get();
|
2019-06-14 00:55:15 -04:00
|
|
|
if state == 0 {
|
|
|
|
changes.insert(signal, *e.get());
|
|
|
|
e.remove();
|
|
|
|
} else if *e.get() != state {
|
|
|
|
changes.insert(signal, *e.get());
|
|
|
|
*e.get_mut() = state;
|
|
|
|
}
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
signals_by_id
|
|
|
|
.get_mut(&signal)
|
|
|
|
.expect("signal valid")
|
|
|
|
.days
|
|
|
|
.adjust(days_range.clone(), old_state, state);
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Helper for `update_signals` to apply the start point.
|
|
|
|
fn update_signals_start(&mut self, start: recording::Time, signals: &[u32], states: &[u16]) {
|
|
|
|
let prev;
|
|
|
|
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=start).next_back() {
|
|
|
|
if t == start {
|
|
|
|
// Reuse existing point at start.
|
|
|
|
prev = p.prev().to_map().expect("in-mem prev is valid");
|
|
|
|
let mut changes = p.changes().to_map().expect("in-mem changes is valid");
|
|
|
|
let mut dirty = false;
|
|
|
|
for (&signal, &state) in signals.iter().zip(states) {
|
|
|
|
match changes.entry(signal) {
|
|
|
|
Entry::Occupied(mut e) => {
|
|
|
|
if *e.get() != state {
|
|
|
|
dirty = true;
|
|
|
|
if state == *prev.get(&signal).unwrap_or(&0) {
|
|
|
|
e.remove();
|
|
|
|
} else {
|
|
|
|
*e.get_mut() = state;
|
|
|
|
}
|
|
|
|
}
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
Entry::Vacant(e) => {
|
|
|
|
if signal != 0 {
|
|
|
|
dirty = true;
|
|
|
|
e.insert(state);
|
|
|
|
}
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if dirty {
|
|
|
|
p.swap(&mut Point::new(&prev, &serialize(&changes)));
|
|
|
|
self.dirty_by_time.insert(start);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create new point at start, using state from previous point.
|
|
|
|
prev = p.after();
|
|
|
|
} else {
|
|
|
|
// Create new point at start, from scratch.
|
|
|
|
prev = BTreeMap::new();
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut changes = BTreeMap::new();
|
|
|
|
for (&signal, &state) in signals.iter().zip(states) {
|
|
|
|
if state != *prev.get(&signal).unwrap_or(&0) {
|
|
|
|
changes.insert(signal, state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if changes.is_empty() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.dirty_by_time.insert(start);
|
2021-02-17 01:15:54 -05:00
|
|
|
self.points_by_time
|
|
|
|
.insert(start, Point::new(&prev, &serialize(&changes)));
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Helper for `update_signals` to apply all points in `(when.start, when.end)`.
|
2021-03-23 23:22:29 -04:00
|
|
|
/// This also updates each signal's days index for the points it finds.
|
2021-02-17 01:15:54 -05:00
|
|
|
fn update_signals_middle(
|
|
|
|
&mut self,
|
|
|
|
when: Range<recording::Time>,
|
|
|
|
signals: &[u32],
|
|
|
|
states: &[u16],
|
|
|
|
) {
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut to_delete = Vec::new();
|
2021-02-17 01:15:54 -05:00
|
|
|
let after_start = recording::Time(when.start.0 + 1);
|
2021-03-23 23:22:29 -04:00
|
|
|
let mut prev_t = when.start;
|
2019-06-14 00:55:15 -04:00
|
|
|
for (&t, ref mut p) in self.points_by_time.range_mut(after_start..when.end) {
|
|
|
|
let mut prev = p.prev().to_map().expect("in-mem prev is valid");
|
|
|
|
|
2021-03-23 23:22:29 -04:00
|
|
|
// Update prev to reflect desired update; likewise each signal's days index.
|
2019-06-14 00:55:15 -04:00
|
|
|
for (&signal, &state) in signals.iter().zip(states) {
|
2021-03-23 23:22:29 -04:00
|
|
|
let s = self.signals_by_id.get_mut(&signal).expect("valid signals");
|
|
|
|
let prev_state;
|
2019-06-14 00:55:15 -04:00
|
|
|
match prev.entry(signal) {
|
|
|
|
Entry::Occupied(mut e) => {
|
2021-03-23 23:22:29 -04:00
|
|
|
prev_state = *e.get();
|
2019-06-14 00:55:15 -04:00
|
|
|
if state == 0 {
|
|
|
|
e.remove_entry();
|
|
|
|
} else if *e.get() != state {
|
|
|
|
*e.get_mut() = state;
|
|
|
|
}
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
Entry::Vacant(e) => {
|
2021-03-23 23:22:29 -04:00
|
|
|
prev_state = 0;
|
2019-06-14 00:55:15 -04:00
|
|
|
if state != 0 {
|
|
|
|
e.insert(state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
s.days.adjust(prev_t..t, prev_state, state);
|
|
|
|
prev_t = t;
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Trim changes to omit any change to signals.
|
2021-02-17 01:15:54 -05:00
|
|
|
let mut changes = Vec::with_capacity(3 * signals.len());
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut it = p.changes();
|
|
|
|
let mut next_allowed = 0;
|
|
|
|
let mut dirty = false;
|
|
|
|
while let Some((signal, state)) = it.next().expect("in-memory changes is valid") {
|
2021-02-17 01:15:54 -05:00
|
|
|
if signals.binary_search(&signal).is_ok() {
|
|
|
|
// discard.
|
2019-06-14 00:55:15 -04:00
|
|
|
dirty = true;
|
2021-02-17 01:15:54 -05:00
|
|
|
} else {
|
|
|
|
// keep.
|
2019-06-14 00:55:15 -04:00
|
|
|
assert!(signal >= next_allowed);
|
|
|
|
coding::append_varint32(signal - next_allowed, &mut changes);
|
|
|
|
coding::append_varint32(state as u32, &mut changes);
|
|
|
|
next_allowed = signal + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if changes.is_empty() {
|
|
|
|
to_delete.push(t);
|
|
|
|
} else {
|
|
|
|
p.swap(&mut Point::new(&prev, &changes));
|
|
|
|
}
|
|
|
|
if dirty {
|
|
|
|
self.dirty_by_time.insert(t);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete any points with no more changes.
|
|
|
|
for &t in &to_delete {
|
|
|
|
self.points_by_time.remove(&t).expect("point exists");
|
|
|
|
}
|
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Flushes all pending database changes to the given transaction.
|
|
|
|
///
|
|
|
|
/// The caller is expected to call `post_flush` afterward if the transaction is
|
|
|
|
/// successfully committed. No mutations should happen between these calls.
|
|
|
|
pub fn flush(&mut self, tx: &Transaction) -> Result<(), Error> {
|
2021-02-17 01:15:54 -05:00
|
|
|
let mut i_stmt = tx.prepare(
|
|
|
|
r#"
|
2019-06-14 00:55:15 -04:00
|
|
|
insert or replace into signal_change (time_90k, changes) values (?, ?)
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)?;
|
|
|
|
let mut d_stmt = tx.prepare(
|
|
|
|
r#"
|
2019-06-14 00:55:15 -04:00
|
|
|
delete from signal_change where time_90k = ?
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)?;
|
2019-06-14 00:55:15 -04:00
|
|
|
for &t in &self.dirty_by_time {
|
|
|
|
match self.points_by_time.entry(t) {
|
|
|
|
Entry::Occupied(ref e) => {
|
|
|
|
let p = e.get();
|
2021-02-17 01:15:54 -05:00
|
|
|
i_stmt.execute(params![t.0, &p.data[p.changes_off..],])?;
|
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
Entry::Vacant(_) => {
|
2020-03-19 23:46:25 -04:00
|
|
|
d_stmt.execute(params![t.0])?;
|
2021-02-17 01:15:54 -05:00
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
/// Marks that the previous `flush` was completed successfully.
|
|
|
|
///
|
|
|
|
/// See notes there.
|
|
|
|
pub fn post_flush(&mut self) {
|
|
|
|
self.dirty_by_time.clear();
|
|
|
|
}
|
|
|
|
|
2019-06-06 19:18:13 -04:00
|
|
|
fn init_signals(conn: &Connection) -> Result<BTreeMap<u32, Signal>, Error> {
|
|
|
|
let mut signals = BTreeMap::new();
|
2021-02-17 01:15:54 -05:00
|
|
|
let mut stmt = conn.prepare(
|
|
|
|
r#"
|
2019-06-06 19:18:13 -04:00
|
|
|
select
|
|
|
|
id,
|
|
|
|
source_uuid,
|
|
|
|
type_uuid,
|
|
|
|
short_name
|
|
|
|
from
|
|
|
|
signal
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)?;
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut rows = stmt.query(params![])?;
|
2019-06-06 19:18:13 -04:00
|
|
|
while let Some(row) = rows.next()? {
|
|
|
|
let id = row.get(0)?;
|
|
|
|
let source: FromSqlUuid = row.get(1)?;
|
|
|
|
let type_: FromSqlUuid = row.get(2)?;
|
2021-02-17 01:15:54 -05:00
|
|
|
signals.insert(
|
2019-06-06 19:18:13 -04:00
|
|
|
id,
|
2021-02-17 01:15:54 -05:00
|
|
|
Signal {
|
|
|
|
id,
|
|
|
|
source: source.0,
|
|
|
|
type_: type_.0,
|
|
|
|
short_name: row.get(3)?,
|
|
|
|
cameras: Vec::new(),
|
2021-03-23 23:22:29 -04:00
|
|
|
days: days::Map::new(),
|
2021-02-17 01:15:54 -05:00
|
|
|
},
|
|
|
|
);
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
Ok(signals)
|
|
|
|
}
|
|
|
|
|
2021-03-23 23:22:29 -04:00
|
|
|
/// Fills `points_by_time` from the database, also filling the `days`
|
|
|
|
/// index of each signal.
|
|
|
|
fn fill_points(
|
|
|
|
conn: &Connection,
|
|
|
|
points_by_time: &mut BTreeMap<recording::Time, Point>,
|
|
|
|
signals_by_id: &mut BTreeMap<u32, Signal>,
|
|
|
|
) -> Result<(), Error> {
|
2021-02-17 01:15:54 -05:00
|
|
|
let mut stmt = conn.prepare(
|
|
|
|
r#"
|
2019-06-06 19:18:13 -04:00
|
|
|
select
|
2019-06-14 00:55:15 -04:00
|
|
|
time_90k,
|
2019-06-06 19:18:13 -04:00
|
|
|
changes
|
|
|
|
from
|
2019-06-14 00:55:15 -04:00
|
|
|
signal_change
|
|
|
|
order by time_90k
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)?;
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut rows = stmt.query(params![])?;
|
2021-02-17 01:15:54 -05:00
|
|
|
let mut cur = BTreeMap::new(); // latest signal -> state, where state != 0
|
2021-03-23 23:22:29 -04:00
|
|
|
|
|
|
|
let mut sig_last_state = BTreeMap::new();
|
2019-06-06 19:18:13 -04:00
|
|
|
while let Some(row) = rows.next()? {
|
2019-06-14 00:55:15 -04:00
|
|
|
let time_90k = recording::Time(row.get(0)?);
|
2021-03-23 23:22:29 -04:00
|
|
|
|
2019-06-06 19:18:13 -04:00
|
|
|
let changes = row.get_raw_checked(1)?.as_blob()?;
|
2021-03-23 23:22:29 -04:00
|
|
|
let before = cur.clone();
|
2019-06-06 19:18:13 -04:00
|
|
|
let mut it = PointDataIterator::new(changes);
|
|
|
|
while let Some((signal, state)) = it.next()? {
|
2021-03-23 23:22:29 -04:00
|
|
|
let e = sig_last_state.entry(signal);
|
|
|
|
if let Entry::Occupied(ref e) = e {
|
|
|
|
let (prev_time, prev_state) = *e.get();
|
|
|
|
let s = signals_by_id.get_mut(&signal).ok_or_else(|| {
|
|
|
|
format_err!("time {} references invalid signal {}", time_90k, signal)
|
|
|
|
})?;
|
|
|
|
s.days.adjust(prev_time..time_90k, 0, prev_state);
|
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
if state == 0 {
|
|
|
|
cur.remove(&signal);
|
2021-03-23 23:22:29 -04:00
|
|
|
if let Entry::Occupied(e) = e {
|
|
|
|
e.remove_entry();
|
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
} else {
|
|
|
|
cur.insert(signal, state);
|
2021-03-23 23:22:29 -04:00
|
|
|
*e.or_default() = (time_90k, state);
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
points_by_time.insert(time_90k, Point::new(&before, changes));
|
|
|
|
}
|
|
|
|
if !cur.is_empty() {
|
|
|
|
bail!(
|
|
|
|
"far future state should be unknown for all signals; is: {:?}",
|
|
|
|
cur
|
|
|
|
);
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
Ok(())
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Fills the `cameras` field of the `Signal` structs within the supplied `signals`.
|
2021-02-17 01:15:54 -05:00
|
|
|
fn fill_signal_cameras(
|
|
|
|
conn: &Connection,
|
|
|
|
signals: &mut BTreeMap<u32, Signal>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let mut stmt = conn.prepare(
|
|
|
|
r#"
|
2019-06-06 19:18:13 -04:00
|
|
|
select
|
|
|
|
signal_id,
|
|
|
|
camera_id,
|
|
|
|
type
|
|
|
|
from
|
|
|
|
signal_camera
|
|
|
|
order by signal_id, camera_id
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)?;
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut rows = stmt.query(params![])?;
|
2019-06-06 19:18:13 -04:00
|
|
|
while let Some(row) = rows.next()? {
|
|
|
|
let signal_id = row.get(0)?;
|
2021-02-17 01:15:54 -05:00
|
|
|
let s = signals.get_mut(&signal_id).ok_or_else(|| {
|
|
|
|
format_err!("signal_camera row for unknown signal id {}", signal_id)
|
|
|
|
})?;
|
2019-06-06 19:18:13 -04:00
|
|
|
let type_ = row.get(2)?;
|
|
|
|
s.cameras.push(SignalCamera {
|
|
|
|
camera_id: row.get(1)?,
|
|
|
|
type_: match type_ {
|
|
|
|
0 => SignalCameraType::Direct,
|
|
|
|
1 => SignalCameraType::Indirect,
|
|
|
|
_ => bail!("unknown signal_camera type {}", type_),
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn init_types(conn: &Connection) -> Result<FnvHashMap<Uuid, Type>, Error> {
|
|
|
|
let mut types = FnvHashMap::default();
|
2021-02-17 01:15:54 -05:00
|
|
|
let mut stmt = conn.prepare(
|
|
|
|
r#"
|
2019-06-06 19:18:13 -04:00
|
|
|
select
|
|
|
|
type_uuid,
|
|
|
|
value,
|
|
|
|
name,
|
|
|
|
motion,
|
|
|
|
color
|
|
|
|
from
|
|
|
|
signal_type_enum
|
|
|
|
order by type_uuid, value
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)?;
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut rows = stmt.query(params![])?;
|
2019-06-06 19:18:13 -04:00
|
|
|
while let Some(row) = rows.next()? {
|
|
|
|
let type_: FromSqlUuid = row.get(0)?;
|
2021-02-17 01:15:54 -05:00
|
|
|
types
|
|
|
|
.entry(type_.0)
|
|
|
|
.or_insert_with(Type::default)
|
|
|
|
.states
|
|
|
|
.push(TypeState {
|
|
|
|
value: row.get(1)?,
|
|
|
|
name: row.get(2)?,
|
|
|
|
motion: row.get(3)?,
|
|
|
|
color: row.get(4)?,
|
|
|
|
});
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
Ok(types)
|
|
|
|
}
|
|
|
|
|
2021-02-17 01:15:54 -05:00
|
|
|
pub fn signals_by_id(&self) -> &BTreeMap<u32, Signal> {
|
|
|
|
&self.signals_by_id
|
|
|
|
}
|
|
|
|
pub fn types_by_uuid(&self) -> &FnvHashMap<Uuid, Type> {
|
|
|
|
&self.types_by_uuid
|
|
|
|
}
|
2021-03-23 23:22:29 -04:00
|
|
|
|
|
|
|
#[cfg(not(debug_assertions))]
|
|
|
|
fn debug_assert_point_invariants(&self) {}
|
|
|
|
|
|
|
|
/// Checks invariants on `points_by_time` (expensive).
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
fn debug_assert_point_invariants(&self) {
|
|
|
|
let mut expected_prev = BTreeMap::new();
|
|
|
|
for (t, p) in self.points_by_time.iter() {
|
|
|
|
let cur = p.prev().to_map().expect("in-mem prev is valid");
|
|
|
|
assert_eq!(&expected_prev, &cur, "time {} prev mismatch", t);
|
|
|
|
p.changes().update_map(&mut expected_prev);
|
|
|
|
}
|
|
|
|
assert_eq!(
|
|
|
|
expected_prev.len(),
|
|
|
|
0,
|
|
|
|
"last point final state should be empty"
|
|
|
|
);
|
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Representation of a `signal` row.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct Signal {
|
|
|
|
pub id: u32,
|
|
|
|
pub source: Uuid,
|
|
|
|
pub type_: Uuid,
|
|
|
|
pub short_name: String,
|
|
|
|
|
|
|
|
/// The cameras this signal is associated with. Sorted by camera id, which is unique.
|
|
|
|
pub cameras: Vec<SignalCamera>,
|
2021-03-23 23:22:29 -04:00
|
|
|
|
|
|
|
pub days: days::Map<days::SignalValue>,
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Representation of a `signal_type_enum` row.
|
|
|
|
/// `type_uuid` is implied by the `Type` which owns this struct.
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct TypeState {
|
|
|
|
pub value: u16,
|
|
|
|
pub name: String,
|
|
|
|
pub motion: bool,
|
|
|
|
pub color: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Representation of a signal type; currently this just gathers together the TypeStates.
|
|
|
|
#[derive(Debug, Default)]
|
|
|
|
pub struct Type {
|
|
|
|
/// The possible states associated with this type. They are sorted by value, which is unique.
|
|
|
|
pub states: Vec<TypeState>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2021-02-17 01:15:54 -05:00
|
|
|
use super::*;
|
2019-06-14 00:55:15 -04:00
|
|
|
use crate::{db, testutil};
|
|
|
|
use rusqlite::Connection;
|
2021-03-23 23:22:29 -04:00
|
|
|
use smallvec::smallvec;
|
2019-06-14 00:55:15 -04:00
|
|
|
|
2019-06-06 19:18:13 -04:00
|
|
|
#[test]
|
|
|
|
fn test_point_data_it() {
|
|
|
|
// Example taken from the .sql file.
|
2019-06-14 00:55:15 -04:00
|
|
|
let data = b"\x01\x01\x01\x01\xc4\x01\x02";
|
2019-06-06 19:18:13 -04:00
|
|
|
let mut it = super::PointDataIterator::new(data);
|
|
|
|
assert_eq!(it.next().unwrap(), Some((1, 1)));
|
|
|
|
assert_eq!(it.next().unwrap(), Some((3, 1)));
|
|
|
|
assert_eq!(it.next().unwrap(), Some((200, 2)));
|
|
|
|
assert_eq!(it.next().unwrap(), None);
|
|
|
|
}
|
2019-06-14 00:55:15 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_empty_db() {
|
|
|
|
testutil::init();
|
|
|
|
let mut conn = Connection::open_in_memory().unwrap();
|
|
|
|
db::init(&mut conn).unwrap();
|
|
|
|
let s = State::init(&conn).unwrap();
|
2021-02-17 01:15:54 -05:00
|
|
|
s.list_changes_by_time(
|
|
|
|
recording::Time::min_value()..recording::Time::max_value(),
|
|
|
|
&mut |_r| panic!("no changes expected"),
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn round_trip() {
|
|
|
|
testutil::init();
|
|
|
|
let mut conn = Connection::open_in_memory().unwrap();
|
|
|
|
db::init(&mut conn).unwrap();
|
2021-02-17 01:15:54 -05:00
|
|
|
conn.execute_batch(
|
|
|
|
r#"
|
2019-12-28 08:48:08 -05:00
|
|
|
update meta set max_signal_changes = 2;
|
|
|
|
|
2019-06-14 00:55:15 -04:00
|
|
|
insert into signal (id, source_uuid, type_uuid, short_name)
|
|
|
|
values (1, x'1B3889C0A59F400DA24C94EBEB19CC3A',
|
|
|
|
x'EE66270FD9C648198B339720D4CBCA6B', 'a'),
|
|
|
|
(2, x'A4A73D9A53424EBCB9F6366F1E5617FA',
|
|
|
|
x'EE66270FD9C648198B339720D4CBCA6B', 'b');
|
|
|
|
|
|
|
|
insert into signal_type_enum (type_uuid, value, name, motion, color)
|
|
|
|
values (x'EE66270FD9C648198B339720D4CBCA6B', 1, 'still', 0, 'black'),
|
|
|
|
(x'EE66270FD9C648198B339720D4CBCA6B', 2, 'moving', 1, 'red');
|
2021-02-17 01:15:54 -05:00
|
|
|
"#,
|
|
|
|
)
|
|
|
|
.unwrap();
|
2019-06-14 00:55:15 -04:00
|
|
|
let mut s = State::init(&conn).unwrap();
|
2021-02-17 01:15:54 -05:00
|
|
|
s.list_changes_by_time(
|
|
|
|
recording::Time::min_value()..recording::Time::max_value(),
|
|
|
|
&mut |_r| panic!("no changes expected"),
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
const START: recording::Time = recording::Time(140067462600000); // 2019-04-26T11:59:00
|
2021-02-17 01:15:54 -05:00
|
|
|
const NOW: recording::Time = recording::Time(140067468000000); // 2019-04-26T12:00:00
|
2019-06-14 00:55:15 -04:00
|
|
|
s.update_signals(START..NOW, &[1, 2], &[2, 1]).unwrap();
|
|
|
|
let mut rows = Vec::new();
|
|
|
|
|
|
|
|
const EXPECTED: &[ListStateChangesRow] = &[
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: START,
|
|
|
|
signal: 1,
|
|
|
|
state: 2,
|
|
|
|
},
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: START,
|
|
|
|
signal: 2,
|
|
|
|
state: 1,
|
|
|
|
},
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: NOW,
|
|
|
|
signal: 1,
|
|
|
|
state: 0,
|
|
|
|
},
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: NOW,
|
|
|
|
signal: 2,
|
|
|
|
state: 0,
|
|
|
|
},
|
2021-02-17 01:15:54 -05:00
|
|
|
];
|
2019-06-14 00:55:15 -04:00
|
|
|
|
2021-02-17 01:15:54 -05:00
|
|
|
s.list_changes_by_time(
|
|
|
|
recording::Time::min_value()..recording::Time::max_value(),
|
|
|
|
&mut |r| rows.push(*r),
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
assert_eq!(&rows[..], EXPECTED);
|
2021-03-23 23:22:29 -04:00
|
|
|
let mut expected_days = days::Map::new();
|
|
|
|
expected_days.0.insert(
|
|
|
|
days::Key(*b"2019-04-26"),
|
|
|
|
days::SignalValue {
|
|
|
|
states: smallvec![0, (NOW - START).0 as u64],
|
|
|
|
},
|
|
|
|
);
|
|
|
|
assert_eq!(&s.signals_by_id.get(&1).unwrap().days, &expected_days);
|
|
|
|
expected_days.0.clear();
|
|
|
|
expected_days.0.insert(
|
|
|
|
days::Key(*b"2019-04-26"),
|
|
|
|
days::SignalValue {
|
|
|
|
states: smallvec![(NOW - START).0 as u64],
|
|
|
|
},
|
|
|
|
);
|
|
|
|
assert_eq!(&s.signals_by_id.get(&2).unwrap().days, &expected_days);
|
2019-06-14 00:55:15 -04:00
|
|
|
|
|
|
|
{
|
|
|
|
let tx = conn.transaction().unwrap();
|
|
|
|
s.flush(&tx).unwrap();
|
|
|
|
tx.commit().unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
drop(s);
|
2019-12-28 08:48:08 -05:00
|
|
|
let mut s = State::init(&conn).unwrap();
|
2019-06-14 00:55:15 -04:00
|
|
|
rows.clear();
|
2021-02-17 01:15:54 -05:00
|
|
|
s.list_changes_by_time(
|
|
|
|
recording::Time::min_value()..recording::Time::max_value(),
|
|
|
|
&mut |r| rows.push(*r),
|
|
|
|
);
|
2019-06-14 00:55:15 -04:00
|
|
|
assert_eq!(&rows[..], EXPECTED);
|
2019-12-28 08:48:08 -05:00
|
|
|
|
|
|
|
// Go through it again. This time, hit the max number of signals, forcing START to be
|
|
|
|
// dropped.
|
|
|
|
const SOON: recording::Time = recording::Time(140067473400000); // 2019-04-26T12:01:00
|
|
|
|
s.update_signals(NOW..SOON, &[1, 2], &[1, 2]).unwrap();
|
|
|
|
rows.clear();
|
|
|
|
const EXPECTED2: &[ListStateChangesRow] = &[
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: NOW,
|
|
|
|
signal: 1,
|
|
|
|
state: 1,
|
|
|
|
},
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: NOW,
|
|
|
|
signal: 2,
|
|
|
|
state: 2,
|
|
|
|
},
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: SOON,
|
|
|
|
signal: 1,
|
|
|
|
state: 0,
|
|
|
|
},
|
|
|
|
ListStateChangesRow {
|
|
|
|
when: SOON,
|
|
|
|
signal: 2,
|
|
|
|
state: 0,
|
|
|
|
},
|
2021-02-17 01:15:54 -05:00
|
|
|
];
|
|
|
|
s.list_changes_by_time(
|
|
|
|
recording::Time::min_value()..recording::Time::max_value(),
|
|
|
|
&mut |r| rows.push(*r),
|
|
|
|
);
|
2019-12-28 08:48:08 -05:00
|
|
|
assert_eq!(&rows[..], EXPECTED2);
|
|
|
|
|
|
|
|
{
|
|
|
|
let tx = conn.transaction().unwrap();
|
|
|
|
s.flush(&tx).unwrap();
|
|
|
|
tx.commit().unwrap();
|
|
|
|
}
|
|
|
|
drop(s);
|
|
|
|
let s = State::init(&conn).unwrap();
|
|
|
|
rows.clear();
|
2021-02-17 01:15:54 -05:00
|
|
|
s.list_changes_by_time(
|
|
|
|
recording::Time::min_value()..recording::Time::max_value(),
|
|
|
|
&mut |r| rows.push(*r),
|
|
|
|
);
|
2019-12-28 08:48:08 -05:00
|
|
|
assert_eq!(&rows[..], EXPECTED2);
|
2019-06-14 00:55:15 -04:00
|
|
|
}
|
2019-06-06 19:18:13 -04:00
|
|
|
}
|