address some no-op clippy warnings

This commit is contained in:
Scott Lamb
2021-05-17 14:31:50 -07:00
parent 603f02b686
commit 54bd068706
32 changed files with 185 additions and 241 deletions

View File

@@ -9,7 +9,6 @@ use base::{bail_t, format_err_t, strutil, ErrorKind, ResultExt};
use failure::{bail, format_err, Error};
use fnv::FnvHashMap;
use lazy_static::lazy_static;
use libpasta;
use log::info;
use parking_lot::Mutex;
use protobuf::Message;
@@ -239,12 +238,8 @@ impl Session {
pub struct RawSessionId([u8; 48]);
impl RawSessionId {
pub fn new() -> Self {
RawSessionId([0u8; 48])
}
pub fn decode_base64(input: &[u8]) -> Result<Self, Error> {
let mut s = RawSessionId::new();
let mut s = RawSessionId([0u8; 48]);
let l = ::base64::decode_config_slice(input, ::base64::STANDARD_NO_PAD, &mut s.0[..])?;
if l != 48 {
bail!("session id must be 48 bytes");
@@ -625,7 +620,7 @@ impl State {
sessions: &'s mut FnvHashMap<SessionHash, Session>,
permissions: Permissions,
) -> Result<(RawSessionId, &'s Session), Error> {
let mut session_id = RawSessionId::new();
let mut session_id = RawSessionId([0u8; 48]);
rand.fill(&mut session_id.0).unwrap();
let mut seed = [0u8; 32];
rand.fill(&mut seed).unwrap();
@@ -793,7 +788,7 @@ impl State {
":id": &id,
})?;
}
for (_, s) in &self.sessions {
for s in self.sessions.values() {
if !s.dirty {
continue;
}
@@ -813,10 +808,10 @@ impl State {
///
/// See notes there.
pub fn post_flush(&mut self) {
for (_, u) in &mut self.users_by_id {
for u in self.users_by_id.values_mut() {
u.dirty = false;
}
for (_, s) in &mut self.sessions {
for s in self.sessions.values_mut() {
s.dirty = false;
}
}

View File

@@ -232,7 +232,7 @@ struct Stream {
type Dir = FnvHashMap<i32, Stream>;
fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
let mut it = recording::SampleIndexIterator::new();
let mut it = recording::SampleIndexIterator::default();
let mut media_duration = 0;
let mut video_samples = 0;
let mut video_sync_samples = 0;

View File

@@ -152,17 +152,16 @@ pub struct SignalChange {
new_state: u16,
}
#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct Map<V: Value>(pub(crate) BTreeMap<Key, V>);
impl<V: Value> Map<V> {
pub fn new() -> Self {
Self(BTreeMap::new())
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn get(&self, k: &Key) -> Option<&V> {
self.0.get(k)
}
@@ -296,9 +295,9 @@ impl Map<SignalValue> {
self.adjust_day(
day,
SignalChange {
duration,
old_state,
new_state,
duration,
},
);
@@ -338,7 +337,7 @@ mod tests {
#[test]
fn test_adjust_stream() {
testutil::init();
let mut m: Map<StreamValue> = Map::new();
let mut m: Map<StreamValue> = Map::default();
// Create a day.
let test_time = Time(130647162600000i64); // 2015-12-31 23:59:00 (Pacific).
@@ -446,7 +445,7 @@ mod tests {
#[test]
fn test_adjust_signal() {
testutil::init();
let mut m: Map<SignalValue> = Map::new();
let mut m: Map<SignalValue> = Map::default();
let test_time = Time(130646844000000i64); // 2015-12-31 23:00:00 (Pacific).
let hr = Duration(60 * 60 * TIME_UNITS_PER_SEC);

View File

@@ -64,7 +64,7 @@ pub const EXPECTED_VERSION: i32 = 6;
/// Make it one less than a power of two so that the data structure's size is efficient.
const VIDEO_INDEX_CACHE_LEN: usize = 1023;
const GET_RECORDING_PLAYBACK_SQL: &'static str = r#"
const GET_RECORDING_PLAYBACK_SQL: &str = r#"
select
video_index
from
@@ -73,14 +73,14 @@ const GET_RECORDING_PLAYBACK_SQL: &'static str = r#"
composite_id = :composite_id
"#;
const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &'static str = r#"
const INSERT_VIDEO_SAMPLE_ENTRY_SQL: &str = r#"
insert into video_sample_entry (width, height, pasp_h_spacing, pasp_v_spacing,
rfc6381_codec, data)
values (:width, :height, :pasp_h_spacing, :pasp_v_spacing,
:rfc6381_codec, :data)
"#;
const UPDATE_STREAM_COUNTERS_SQL: &'static str = r#"
const UPDATE_STREAM_COUNTERS_SQL: &str = r#"
update stream
set cum_recordings = :cum_recordings,
cum_media_duration_90k = :cum_media_duration_90k,
@@ -231,6 +231,7 @@ pub struct RecordingPlayback<'a> {
}
/// Bitmask in the `flags` field in the `recordings` table; see `schema.sql`.
#[repr(u32)]
pub enum RecordingFlags {
TrailingZero = 1,
@@ -356,37 +357,37 @@ pub struct Camera {
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum StreamType {
MAIN,
SUB,
Main,
Sub,
}
impl StreamType {
pub fn from_index(i: usize) -> Option<Self> {
match i {
0 => Some(StreamType::MAIN),
1 => Some(StreamType::SUB),
0 => Some(StreamType::Main),
1 => Some(StreamType::Sub),
_ => None,
}
}
pub fn index(self) -> usize {
match self {
StreamType::MAIN => 0,
StreamType::SUB => 1,
StreamType::Main => 0,
StreamType::Sub => 1,
}
}
pub fn as_str(self) -> &'static str {
match self {
StreamType::MAIN => "main",
StreamType::SUB => "sub",
StreamType::Main => "main",
StreamType::Sub => "sub",
}
}
pub fn parse(type_: &str) -> Option<Self> {
match type_ {
"main" => Some(StreamType::MAIN),
"sub" => Some(StreamType::SUB),
"main" => Some(StreamType::Main),
"sub" => Some(StreamType::Sub),
_ => None,
}
}
@@ -398,7 +399,7 @@ impl ::std::fmt::Display for StreamType {
}
}
pub const ALL_STREAM_TYPES: [StreamType; 2] = [StreamType::MAIN, StreamType::SUB];
pub const ALL_STREAM_TYPES: [StreamType; 2] = [StreamType::Main, StreamType::Sub];
pub struct Stream {
pub id: i32,
@@ -708,7 +709,7 @@ impl StreamStateChanger {
bail!("missing stream {}", sid);
}
sids[i] = Some(sid);
let sc = mem::replace(*sc, StreamChange::default());
let sc = mem::take(*sc);
streams.push((sid, Some((camera_id, type_, sc))));
}
} else {
@@ -737,7 +738,7 @@ impl StreamStateChanger {
})?;
let id = tx.last_insert_rowid() as i32;
sids[i] = Some(id);
let sc = mem::replace(*sc, StreamChange::default());
let sc = mem::take(*sc);
streams.push((id, Some((camera_id, type_, sc))));
}
}
@@ -768,7 +769,7 @@ impl StreamStateChanger {
bytes_to_add: 0,
fs_bytes_to_add: 0,
duration: recording::Duration(0),
committed_days: days::Map::new(),
committed_days: days::Map::default(),
record: sc.record,
cum_recordings: 0,
cum_media_duration: recording::Duration(0),
@@ -929,7 +930,7 @@ impl LockedDatabase {
/// longer connected). This doesn't work when the system is shutting down and nothing more is
/// sent, though.
pub fn clear_watches(&mut self) {
for (_, s) in &mut self.streams_by_id {
for s in self.streams_by_id.values_mut() {
s.on_live_segment.clear();
}
}
@@ -1229,14 +1230,11 @@ impl LockedDatabase {
/// Gets a given camera by uuid.
pub fn get_camera(&self, uuid: Uuid) -> Option<&Camera> {
match self.cameras_by_uuid.get(&uuid) {
Some(id) => Some(
self.cameras_by_id
.get(id)
.expect("uuid->id requires id->cam"),
),
None => None,
}
self.cameras_by_uuid.get(&uuid).map(|id| {
self.cameras_by_id
.get(id)
.expect("uuid->id requires id->cam")
})
}
/// Lists the specified recordings, passing them to a supplied function. Given that the
@@ -1439,7 +1437,7 @@ impl LockedDatabase {
trace!("cache hit for recording {}", id);
occupied.to_back();
let video_index = occupied.get();
return f(&RecordingPlayback { video_index });
f(&RecordingPlayback { video_index })
}
RawEntryMut::Vacant(vacant) => {
trace!("cache miss for recording {}", id);
@@ -1602,7 +1600,7 @@ impl LockedDatabase {
self.cameras_by_id.insert(
id,
Camera {
id: id,
id,
uuid: uuid.0,
short_name: row.get(2)?,
description: row.get(3)?,
@@ -1671,7 +1669,7 @@ impl LockedDatabase {
bytes_to_add: 0,
fs_bytes_to_add: 0,
duration: recording::Duration(0),
committed_days: days::Map::new(),
committed_days: days::Map::default(),
cum_recordings: row.get(7)?,
cum_media_duration: recording::Duration(row.get(8)?),
cum_runs: row.get(9)?,
@@ -1781,7 +1779,7 @@ impl LockedDatabase {
garbage_needs_unlink: FnvHashSet::default(),
garbage_unlinked: Vec::new(),
}),
Entry::Occupied(_) => Err(format_err!("duplicate sample file dir id {}", id))?,
Entry::Occupied(_) => bail!("duplicate sample file dir id {}", id),
};
d.last_complete_open = Some(*o);
mem::swap(&mut meta.last_complete_open, &mut meta.in_progress_open);
@@ -2155,22 +2153,21 @@ pub(crate) fn check_schema_version(conn: &rusqlite::Connection) -> Result<(), Er
that predates schema versioning, see guide/schema.md."
)
})?;
if ver < EXPECTED_VERSION {
bail!(
match ver.cmp(&EXPECTED_VERSION) {
std::cmp::Ordering::Less => bail!(
"Database schema version {} is too old (expected {}); \
see upgrade instructions in guide/upgrade.md.",
ver,
EXPECTED_VERSION
);
} else if ver > EXPECTED_VERSION {
bail!(
),
std::cmp::Ordering::Equal => Ok(()),
std::cmp::Ordering::Greater => bail!(
"Database schema version {} is too new (expected {}); \
must use a newer binary to match.",
ver,
EXPECTED_VERSION
);
),
}
Ok(())
}
/// The recording database. Abstracts away SQLite queries. Also maintains in-memory state

View File

@@ -355,9 +355,9 @@ pub(crate) fn parse_id(id: &[u8]) -> Result<CompositeId, ()> {
return Err(());
}
let mut v: u64 = 0;
for i in 0..16 {
for b in id {
v = (v << 4)
| match id[i] {
| match b {
b @ b'0'..=b'9' => b - b'0',
b @ b'a'..=b'f' => b - b'a' + 10,
_ => return Err(()),

View File

@@ -13,7 +13,7 @@ use std::ops::Range;
use uuid::Uuid;
// Note: the magic number "27000000" below is recording::MAX_RECORDING_DURATION.
const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
const LIST_RECORDINGS_BY_TIME_SQL: &str = r#"
select
recording.composite_id,
recording.run_offset,
@@ -37,7 +37,7 @@ const LIST_RECORDINGS_BY_TIME_SQL: &'static str = r#"
recording.start_time_90k
"#;
const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
const LIST_RECORDINGS_BY_ID_SQL: &str = r#"
select
recording.composite_id,
recording.run_offset,
@@ -61,7 +61,7 @@ const LIST_RECORDINGS_BY_ID_SQL: &'static str = r#"
recording.composite_id
"#;
const STREAM_MIN_START_SQL: &'static str = r#"
const STREAM_MIN_START_SQL: &str = r#"
select
start_time_90k
from
@@ -71,7 +71,7 @@ const STREAM_MIN_START_SQL: &'static str = r#"
order by start_time_90k limit 1
"#;
const STREAM_MAX_START_SQL: &'static str = r#"
const STREAM_MAX_START_SQL: &str = r#"
select
start_time_90k,
wall_duration_90k
@@ -82,7 +82,7 @@ const STREAM_MAX_START_SQL: &'static str = r#"
order by start_time_90k desc;
"#;
const LIST_OLDEST_RECORDINGS_SQL: &'static str = r#"
const LIST_OLDEST_RECORDINGS_SQL: &str = r#"
select
composite_id,
start_time_90k,

View File

@@ -50,7 +50,7 @@ pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32)
/// An iterator through a sample index (as described in `design/recording.md`).
/// Initially invalid; call `next()` before each read.
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, Default)]
pub struct SampleIndexIterator {
/// The index byte position of the next sample to read (low 31 bits) and if the current
/// same is a key frame (high bit).
@@ -74,17 +74,6 @@ pub struct SampleIndexIterator {
}
impl SampleIndexIterator {
pub fn new() -> SampleIndexIterator {
SampleIndexIterator {
i_and_is_key: 0,
pos: 0,
start_90k: 0,
duration_90k: 0,
bytes: 0,
bytes_other: 0,
}
}
pub fn next(&mut self, data: &[u8]) -> Result<bool, Error> {
self.pos += self.bytes;
self.start_90k += self.duration_90k;
@@ -147,7 +136,7 @@ impl SampleIndexIterator {
}
/// An encoder for a sample index (as described in `design/recording.md`).
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct SampleIndexEncoder {
prev_duration_90k: i32,
prev_bytes_key: i32,
@@ -155,14 +144,6 @@ pub struct SampleIndexEncoder {
}
impl SampleIndexEncoder {
pub fn new() -> Self {
SampleIndexEncoder {
prev_duration_90k: 0,
prev_bytes_key: 0,
prev_bytes_nonkey: 0,
}
}
pub fn add_sample(
&mut self,
duration_90k: i32,
@@ -205,7 +186,7 @@ pub struct Segment {
/// An iterator positioned at the beginning of the segment, or `None`. Most segments are
/// positioned at the beginning of the recording, so this is an optional box to shrink a long
/// of segments. `None` is equivalent to `SampleIndexIterator::new()`.
/// of segments. `None` is equivalent to `SampleIndexIterator::default()`.
begin: Option<Box<SampleIndexIterator>>,
pub file_end: i32,
@@ -246,6 +227,7 @@ impl Segment {
<< 31),
};
#[allow(clippy::suspicious_operation_groupings)]
if desired_media_range_90k.start > desired_media_range_90k.end
|| desired_media_range_90k.end > recording.media_duration_90k
{
@@ -275,9 +257,9 @@ impl Segment {
recording
);
db.with_recording_playback(self_.id, &mut |playback| {
let mut begin = Box::new(SampleIndexIterator::new());
let data = &(&playback).video_index;
let mut it = SampleIndexIterator::new();
let mut begin = Box::new(SampleIndexIterator::default());
let data = &playback.video_index;
let mut it = SampleIndexIterator::default();
if !it.next(data)? {
bail!("no index");
}
@@ -352,11 +334,11 @@ impl Segment {
self.frames,
self.actual_start_90k()
);
let data = &(&playback).video_index;
let data = &playback.video_index;
let mut it = match self.begin {
Some(ref b) => **b,
None => {
let mut it = SampleIndexIterator::new();
let mut it = SampleIndexIterator::default();
if !it.next(data)? {
bail!("recording {} has no frames", self.id);
}
@@ -434,7 +416,7 @@ mod tests {
fn test_encode_example() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut e = SampleIndexEncoder::new();
let mut e = SampleIndexEncoder::default();
e.add_sample(10, 1000, true, &mut r);
e.add_sample(9, 10, false, &mut r);
e.add_sample(11, 15, false, &mut r);
@@ -468,11 +450,11 @@ mod tests {
Sample { duration_90k: 0, bytes: 1000, is_key: false, },
];
let mut r = db::RecordingToInsert::default();
let mut e = SampleIndexEncoder::new();
let mut e = SampleIndexEncoder::default();
for sample in &samples {
e.add_sample(sample.duration_90k, sample.bytes, sample.is_key, &mut r);
}
let mut it = SampleIndexIterator::new();
let mut it = SampleIndexIterator::default();
for sample in &samples {
assert!(it.next(&r.video_index).unwrap());
assert_eq!(
@@ -519,7 +501,7 @@ mod tests {
},
];
for test in &tests {
let mut it = SampleIndexIterator::new();
let mut it = SampleIndexIterator::default();
assert_eq!(it.next(test.encoded).unwrap_err().to_string(), test.err);
}
}
@@ -546,7 +528,7 @@ mod tests {
fn test_segment_clipping_with_all_sync() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut encoder = SampleIndexEncoder::new();
let mut encoder = SampleIndexEncoder::default();
for i in 1..6 {
let duration_90k = 2 * i;
let bytes = 3 * i;
@@ -568,7 +550,7 @@ mod tests {
fn test_segment_clipping_with_half_sync() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut encoder = SampleIndexEncoder::new();
let mut encoder = SampleIndexEncoder::default();
for i in 1..6 {
let duration_90k = 2 * i;
let bytes = 3 * i;
@@ -586,7 +568,7 @@ mod tests {
fn test_segment_clipping_with_trailing_zero() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut encoder = SampleIndexEncoder::new();
let mut encoder = SampleIndexEncoder::default();
encoder.add_sample(1, 1, true, &mut r);
encoder.add_sample(1, 2, true, &mut r);
encoder.add_sample(0, 3, true, &mut r);
@@ -601,7 +583,7 @@ mod tests {
fn test_segment_zero_desired_duration() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut encoder = SampleIndexEncoder::new();
let mut encoder = SampleIndexEncoder::default();
encoder.add_sample(1, 1, true, &mut r);
let db = TestDb::new(RealClocks {});
let row = db.insert_recording_from_encoder(r);
@@ -615,7 +597,7 @@ mod tests {
fn test_segment_fast_path() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut encoder = SampleIndexEncoder::new();
let mut encoder = SampleIndexEncoder::default();
for i in 1..6 {
let duration_90k = 2 * i;
let bytes = 3 * i;
@@ -634,7 +616,7 @@ mod tests {
fn test_segment_fast_path_with_trailing_zero() {
testutil::init();
let mut r = db::RecordingToInsert::default();
let mut encoder = SampleIndexEncoder::new();
let mut encoder = SampleIndexEncoder::default();
encoder.add_sample(1, 1, true, &mut r);
encoder.add_sample(1, 2, true, &mut r);
encoder.add_sample(0, 3, true, &mut r);
@@ -659,7 +641,7 @@ mod bench {
let data = include_bytes!("testdata/video_sample_index.bin");
b.bytes = data.len() as u64;
b.iter(|| {
let mut it = SampleIndexIterator::new();
let mut it = SampleIndexIterator::default();
while it.next(data).unwrap() {}
assert_eq!(30104460, it.pos);
assert_eq!(5399985, it.start_90k);

View File

@@ -167,7 +167,7 @@ impl<'a> PointDataIterator<'a> {
Ok(Some((signal, state as u16)))
}
fn to_map(mut self) -> Result<BTreeMap<u32, u16>, Error> {
fn into_map(mut self) -> Result<BTreeMap<u32, u16>, Error> {
let mut out = BTreeMap::new();
while let Some((signal, state)) = self.next()? {
out.insert(signal, state);
@@ -291,7 +291,7 @@ impl State {
fn gc(&mut self) {
let max = match self.max_signal_changes {
None => return,
Some(m) if m < 0 => 0 as usize,
Some(m) if m < 0 => 0_usize,
Some(m) if m > (isize::max_value() as i64) => return,
Some(m) => m as usize,
};
@@ -311,7 +311,7 @@ impl State {
.points_by_time
.keys()
.take(to_remove)
.map(|t| *t)
.copied()
.collect();
for t in &remove {
@@ -406,7 +406,7 @@ impl State {
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=when.end).next_back() {
if t == when.end {
// Already have a point at end. Adjust it. prev starts unchanged...
prev = p.prev().to_map().expect("in-mem prev is valid");
prev = p.prev().into_map().expect("in-mem prev is valid");
// ...and then prev and changes are altered to reflect the desired update.
State::update_signals_end_maps(
@@ -505,8 +505,8 @@ impl State {
if let Some((&t, ref mut p)) = self.points_by_time.range_mut(..=start).next_back() {
if t == start {
// Reuse existing point at start.
prev = p.prev().to_map().expect("in-mem prev is valid");
let mut changes = p.changes().to_map().expect("in-mem changes is valid");
prev = p.prev().into_map().expect("in-mem prev is valid");
let mut changes = p.changes().into_map().expect("in-mem changes is valid");
let mut dirty = false;
for (&signal, &state) in signals.iter().zip(states) {
match changes.entry(signal) {
@@ -570,7 +570,7 @@ impl State {
let after_start = recording::Time(when.start.0 + 1);
let mut prev_t = when.start;
for (&t, ref mut p) in self.points_by_time.range_mut(after_start..when.end) {
let mut prev = p.prev().to_map().expect("in-mem prev is valid");
let mut prev = p.prev().into_map().expect("in-mem prev is valid");
// Update prev to reflect desired update; likewise each signal's days index.
for (&signal, &state) in signals.iter().zip(states) {
@@ -691,7 +691,7 @@ impl State {
type_: type_.0,
short_name: row.get(3)?,
cameras: Vec::new(),
days: days::Map::new(),
days: days::Map::default(),
},
);
}
@@ -837,7 +837,7 @@ impl State {
fn debug_assert_point_invariants(&self) {
let mut expected_prev = BTreeMap::new();
for (t, p) in self.points_by_time.iter() {
let cur = p.prev().to_map().expect("in-mem prev is valid");
let cur = p.prev().into_map().expect("in-mem prev is valid");
assert_eq!(&expected_prev, &cur, "time {} prev mismatch", t);
p.changes().update_map(&mut expected_prev);
}
@@ -973,7 +973,7 @@ mod tests {
&mut |_r| panic!("no changes expected"),
);
assert_eq!(&rows[..], EXPECTED);
let mut expected_days = days::Map::new();
let mut expected_days = days::Map::default();
expected_days.0.insert(
days::Key(*b"2019-04-26"),
days::SignalValue {

View File

@@ -10,13 +10,10 @@ use crate::dir;
use crate::writer;
use base::clock::Clocks;
use fnv::FnvHashMap;
use mylog;
use rusqlite;
use std::env;
use std::sync::Arc;
use std::thread;
use tempfile::TempDir;
use time;
use uuid::Uuid;
static INIT: parking_lot::Once = parking_lot::Once::new();
@@ -42,7 +39,7 @@ pub const TEST_VIDEO_SAMPLE_ENTRY_DATA: &[u8] =
pub fn init() {
INIT.call_once(|| {
let h = mylog::Builder::new()
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or("info".to_owned()))
.set_spec(&::std::env::var("MOONFIRE_LOG").unwrap_or_else(|_| "info".to_owned()))
.build();
h.install().unwrap();
env::set_var("TZ", "America/Los_Angeles");
@@ -80,7 +77,7 @@ impl<C: Clocks + Clone> TestDb<C> {
let dir;
{
let mut l = db.lock();
sample_file_dir_id = l.add_sample_file_dir(path.to_owned()).unwrap();
sample_file_dir_id = l.add_sample_file_dir(path).unwrap();
assert_eq!(
TEST_CAMERA_ID,
l.add_camera(db::CameraChange {
@@ -116,7 +113,7 @@ impl<C: Clocks + Clone> TestDb<C> {
.unwrap();
}
let mut dirs_by_stream_id = FnvHashMap::default();
dirs_by_stream_id.insert(TEST_STREAM_ID, dir.clone());
dirs_by_stream_id.insert(TEST_STREAM_ID, dir);
let (syncer_channel, syncer_join) =
writer::start_syncer(db.clone(), sample_file_dir_id).unwrap();
TestDb {

View File

@@ -22,8 +22,7 @@ mod v3_to_v4;
mod v4_to_v5;
mod v5_to_v6;
const UPGRADE_NOTES: &'static str =
concat!("upgraded using moonfire-db ", env!("CARGO_PKG_VERSION"));
const UPGRADE_NOTES: &str = concat!("upgraded using moonfire-db ", env!("CARGO_PKG_VERSION"));
#[derive(Debug)]
pub struct Args<'a> {

View File

@@ -99,7 +99,7 @@ struct CameraState {
}
fn has_trailing_zero(video_index: &[u8]) -> Result<bool, Error> {
let mut it = recording::SampleIndexIterator::new();
let mut it = recording::SampleIndexIterator::default();
while it.next(video_index)? {}
Ok(it.duration_90k == 0)
}

View File

@@ -77,7 +77,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
if e == nix::Error::Sys(nix::errno::Errno::ENOENT) {
continue; // assume it was already moved.
}
Err(e)?;
return Err(e.into());
}
}

View File

@@ -358,7 +358,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
/// Called from main thread.
fn initial_rotation(&mut self) -> Result<(), Error> {
self.do_rotation(|db| {
let streams: Vec<i32> = db.streams_by_id().keys().map(|&id| id).collect();
let streams: Vec<i32> = db.streams_by_id().keys().copied().collect();
for &stream_id in &streams {
delete_recordings(db, stream_id, 0)?;
}
@@ -379,7 +379,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
let mut garbage: Vec<_> = {
let l = self.db.lock();
let d = l.sample_file_dirs_by_id().get(&self.dir_id).unwrap();
d.garbage_needs_unlink.iter().map(|id| *id).collect()
d.garbage_needs_unlink.iter().copied().collect()
};
if !garbage.is_empty() {
// Try to delete files; retain ones in `garbage` that don't exist.
@@ -422,7 +422,9 @@ impl<C: Clocks + Clone, D: DirWriter> Syncer<C, D> {
let now = self.db.clocks().monotonic();
// Calculate the timeout to use, mapping negative durations to 0.
let timeout = (t - now).to_std().unwrap_or(StdDuration::new(0, 0));
let timeout = (t - now)
.to_std()
.unwrap_or_else(|_| StdDuration::new(0, 0));
match self.db.clocks().recv_timeout(&cmds, timeout) {
Err(mpsc::RecvTimeoutError::Disconnected) => return false, // cmd senders gone.
Err(mpsc::RecvTimeoutError::Timeout) => {
@@ -456,7 +458,7 @@ impl<C: Clocks + Clone, D: DirWriter> Syncer<C, D> {
let mut garbage: Vec<_> = {
let l = self.db.lock();
let d = l.sample_file_dirs_by_id().get(&self.dir_id).unwrap();
d.garbage_needs_unlink.iter().map(|id| *id).collect()
d.garbage_needs_unlink.iter().copied().collect()
};
if garbage.is_empty() {
return;
@@ -691,7 +693,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
self.state = WriterState::Open(InnerWriter {
f,
r,
e: recording::SampleIndexEncoder::new(),
e: recording::SampleIndexEncoder::default(),
id,
hasher: blake3::Hasher::new(),
local_start: recording::Time(i64::max_value()),
@@ -878,7 +880,7 @@ impl<F: FileWriter> InnerWriter<F> {
let mut l = self.r.lock();
l.flags = flags;
l.local_time_delta = self.local_start - l.start;
l.sample_file_blake3 = Some(blake3.as_bytes().clone());
l.sample_file_blake3 = Some(*blake3.as_bytes());
wall_duration = recording::Duration(i64::from(l.wall_duration_90k));
run_offset = l.run_offset;
end = l.start + wall_duration;