cargo clippy --fix

This switches to inlining variable names into format args. clippy
now suggests this syntax, and I like it.
This commit is contained in:
Scott Lamb 2023-01-29 15:01:19 -08:00
parent 159e426943
commit a9430464b6
No known key found for this signature in database
31 changed files with 119 additions and 162 deletions

View File

@ -9,7 +9,7 @@ use std::fmt::{self, Write};
pub fn prettify_failure(e: &failure::Error) -> String {
let mut msg = e.to_string();
for cause in e.iter_causes() {
write!(&mut msg, "\ncaused by: {}", cause).unwrap();
write!(&mut msg, "\ncaused by: {cause}").unwrap();
}
if e.backtrace().is_empty() {
write!(

View File

@ -28,7 +28,7 @@ pub fn encode_size(mut raw: i64) -> String {
}
}
if raw > 0 || encoded.is_empty() {
write!(&mut encoded, "{}", raw).unwrap();
write!(&mut encoded, "{raw}").unwrap();
} else {
encoded.pop(); // remove trailing space.
}

View File

@ -451,7 +451,7 @@ impl State {
.context(ErrorKind::Unknown)?;
let e = self.users_by_id.entry(id);
let e = match e {
::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {}!", id),
::std::collections::btree_map::Entry::Vacant(_) => panic!("missing uid {id}!"),
::std::collections::btree_map::Entry::Occupied(e) => e,
};
{
@ -519,7 +519,7 @@ impl State {
let e = self.users_by_id.entry(id);
let e = match e {
::std::collections::btree_map::Entry::Vacant(e) => e,
::std::collections::btree_map::Entry::Occupied(_) => panic!("uid {} conflict!", id),
::std::collections::btree_map::Entry::Occupied(_) => panic!("uid {id} conflict!"),
};
Ok(e.insert(User {
id,
@ -963,7 +963,7 @@ mod tests {
0,
)
.unwrap_err();
assert_eq!(format!("{}", e), "Unauthenticated: incorrect password");
assert_eq!(format!("{e}"), "Unauthenticated: incorrect password");
c.set_password("hunter2".to_owned());
state.apply(&conn, c).unwrap();
let e = state
@ -976,7 +976,7 @@ mod tests {
0,
)
.unwrap_err();
assert_eq!(format!("{}", e), "Unauthenticated: incorrect password");
assert_eq!(format!("{e}"), "Unauthenticated: incorrect password");
let sid = {
let (sid, s) = state
.login_by_password(
@ -1011,7 +1011,7 @@ mod tests {
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(
format!("{}", e),
format!("{e}"),
"Unauthenticated: session is no longer valid (reason=1)"
);
@ -1022,7 +1022,7 @@ mod tests {
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(
format!("{}", e),
format!("{e}"),
"Unauthenticated: session is no longer valid (reason=1)"
);
}
@ -1076,7 +1076,7 @@ mod tests {
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(
format!("{}", e),
format!("{e}"),
"Unauthenticated: session is no longer valid (reason=1)"
);
}
@ -1131,14 +1131,14 @@ mod tests {
0,
)
.unwrap_err();
assert_eq!(format!("{}", e), "user \"slamb\" is disabled");
assert_eq!(format!("{e}"), "user \"slamb\" is disabled");
// Authenticating existing sessions shouldn't work either.
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(
format!("{}", e),
format!("{e}"),
"Unauthenticated: user \"slamb\" is disabled"
);
@ -1149,7 +1149,7 @@ mod tests {
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(
format!("{}", e),
format!("{e}"),
"Unauthenticated: user \"slamb\" is disabled"
);
}
@ -1211,16 +1211,16 @@ mod tests {
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "Unauthenticated: no such session");
assert_eq!(format!("{e}"), "Unauthenticated: no such session");
// The user should still be deleted after reload.
drop(state);
let mut state = State::init(&conn).unwrap();
assert!(state.users_by_id().get(&uid).is_none());
let e = state
.authenticate_session(&conn, req.clone(), &sid.hash())
.authenticate_session(&conn, req, &sid.hash())
.unwrap_err();
assert_eq!(format!("{}", e), "Unauthenticated: no such session");
assert_eq!(format!("{e}"), "Unauthenticated: no such session");
}
#[test]

View File

@ -207,7 +207,7 @@ mod tests {
b"\x80\x80\x80\x80\x80\x00",
];
for (i, encoded) in tests.iter().enumerate() {
assert!(decode_varint32(encoded, 0).is_err(), "while on test {}", i);
assert!(decode_varint32(encoded, 0).is_err(), "while on test {i}");
}
}
}

View File

@ -22,7 +22,7 @@ struct Column {
impl std::fmt::Display for Column {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
write!(f, "{self:?}")
}
}
@ -37,7 +37,7 @@ struct Index {
impl std::fmt::Display for Index {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
write!(f, "{self:?}")
}
}
@ -50,7 +50,7 @@ struct IndexColumn {
impl std::fmt::Display for IndexColumn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
write!(f, "{self:?}")
}
}
@ -61,18 +61,18 @@ fn diff_slices<T: std::fmt::Display + PartialEq>(
name2: &str,
slice2: &[T],
) -> Option<String> {
let mut diff = format!("--- {}\n+++ {}\n", name1, name2);
let mut diff = format!("--- {name1}\n+++ {name2}\n");
let mut changed = false;
for item in diff::slice(slice1, slice2) {
match item {
diff::Result::Left(i) => {
changed = true;
writeln!(&mut diff, "-{}", i)
writeln!(&mut diff, "-{i}")
}
diff::Result::Both(i, _) => writeln!(&mut diff, " {}", i),
diff::Result::Both(i, _) => writeln!(&mut diff, " {i}"),
diff::Result::Right(i) => {
changed = true;
writeln!(&mut diff, "+{}", i)
writeln!(&mut diff, "+{i}")
}
}
.unwrap();
@ -109,7 +109,7 @@ fn get_table_columns(
// Note that placeholders aren't allowed for these pragmas. Just assume sane table names
// (no escaping). "select * from pragma_..." syntax would be nicer but requires SQLite
// 3.16.0 (2017-01-02). Ubuntu 16.04 Xenial (still used on Travis CI) has an older SQLite.
c.prepare(&format!("pragma table_info(\"{}\")", table))?
c.prepare(&format!("pragma table_info(\"{table}\")"))?
.query_map(params![], |r| {
Ok(Column {
cid: r.get(0)?,
@ -126,7 +126,7 @@ fn get_table_columns(
/// Returns a vec of indices associated with the given table.
fn get_indices(c: &rusqlite::Connection, table: &str) -> Result<Vec<Index>, rusqlite::Error> {
// See note at get_tables_columns about placeholders.
c.prepare(&format!("pragma index_list(\"{}\")", table))?
c.prepare(&format!("pragma index_list(\"{table}\")"))?
.query_map(params![], |r| {
Ok(Index {
seq: r.get(0)?,
@ -145,7 +145,7 @@ fn get_index_columns(
index: &str,
) -> Result<Vec<IndexColumn>, rusqlite::Error> {
// See note at get_tables_columns about placeholders.
c.prepare(&format!("pragma index_info(\"{}\")", index))?
c.prepare(&format!("pragma index_info(\"{index}\")"))?
.query_map(params![], |r| {
Ok(IndexColumn {
seqno: r.get(0)?,
@ -168,11 +168,7 @@ pub fn get_diffs(
let tables1 = get_tables(c1)?;
let tables2 = get_tables(c2)?;
if let Some(diff) = diff_slices(n1, &tables1[..], n2, &tables2[..]) {
write!(
&mut diffs,
"table list mismatch, {} vs {}:\n{}",
n1, n2, diff
)?;
write!(&mut diffs, "table list mismatch, {n1} vs {n2}:\n{diff}")?;
}
// Compare columns and indices for each table.
@ -180,11 +176,7 @@ pub fn get_diffs(
let columns1 = get_table_columns(c1, t)?;
let columns2 = get_table_columns(c2, t)?;
if let Some(diff) = diff_slices(n1, &columns1[..], n2, &columns2[..]) {
write!(
&mut diffs,
"table {:?} column, {} vs {}:\n{}",
t, n1, n2, diff
)?;
write!(&mut diffs, "table {t:?} column, {n1} vs {n2}:\n{diff}")?;
}
let mut indices1 = get_indices(c1, t)?;
@ -192,11 +184,7 @@ pub fn get_diffs(
indices1.sort_by(|a, b| a.name.cmp(&b.name));
indices2.sort_by(|a, b| a.name.cmp(&b.name));
if let Some(diff) = diff_slices(n1, &indices1[..], n2, &indices2[..]) {
write!(
&mut diffs,
"table {:?} indices, {} vs {}:\n{}",
t, n1, n2, diff
)?;
write!(&mut diffs, "table {t:?} indices, {n1} vs {n2}:\n{diff}")?;
}
for i in &indices1 {
@ -205,8 +193,7 @@ pub fn get_diffs(
if let Some(diff) = diff_slices(n1, &ic1[..], n2, &ic2[..]) {
write!(
&mut diffs,
"table {:?} index {:?} columns {} vs {}:\n{}",
t, i, n1, n2, diff
"table {t:?} index {i:?} columns {n1} vs {n2}:\n{diff}"
)?;
}
}

View File

@ -106,7 +106,7 @@ impl Value for SignalValue {
let s = &mut self.states[c.new_state as usize - 1];
let n = s
.checked_add(u64::try_from(c.duration.0).unwrap())
.unwrap_or_else(|| panic!("add range violation: s={:?} c={:?}", s, c));
.unwrap_or_else(|| panic!("add range violation: s={s:?} c={c:?}"));
*s = n;
}
@ -115,14 +115,12 @@ impl Value for SignalValue {
let i = usize::try_from(c.old_state).unwrap() - 1;
assert!(
self.states.len() > i,
"no such old state: s={:?} c={:?}",
self,
c
"no such old state: s={self:?} c={c:?}"
);
let s = &mut self.states[c.old_state as usize - 1];
let n = s
.checked_sub(u64::try_from(c.duration.0).unwrap())
.unwrap_or_else(|| panic!("sub range violation: s={:?} c={:?}", s, c));
.unwrap_or_else(|| panic!("sub range violation: s={s:?} c={c:?}"));
*s = n;
}

View File

@ -2568,8 +2568,7 @@ mod tests {
assert!(
e.to_string()
.starts_with("Database schema version 6 is too old (expected 7)"),
"got: {:?}",
e
"got: {e:?}"
);
}
@ -2583,8 +2582,7 @@ mod tests {
assert!(
e.to_string()
.starts_with("Database schema version 8 is too new (expected 7)"),
"got: {:?}",
e
"got: {e:?}"
);
}
@ -2728,7 +2726,7 @@ mod tests {
rfc6381_codec: "avc1.4d0029".to_owned(),
})
.unwrap();
assert!(vse_id > 0, "vse_id = {}", vse_id);
assert!(vse_id > 0, "vse_id = {vse_id}");
// Inserting a recording should succeed and advance the next recording id.
let start = recording::Time(1430006400 * TIME_UNITS_PER_SEC);
@ -2818,7 +2816,7 @@ mod tests {
.unwrap()
.garbage_needs_unlink
.iter()
.map(|&id| id)
.copied()
.collect();
assert_eq!(&g, &[id]);
let g: Vec<_> = db
@ -2828,7 +2826,7 @@ mod tests {
.unwrap()
.garbage_unlinked
.iter()
.map(|&id| id)
.copied()
.collect();
assert_eq!(&g, &[]);
}

View File

@ -53,7 +53,7 @@ impl Reader {
.expect("PAGE_SIZE must be defined"),
)
.expect("PAGE_SIZE fits in usize");
assert_eq!(page_size.count_ones(), 1, "invalid page size {}", page_size);
assert_eq!(page_size.count_ones(), 1, "invalid page size {page_size}");
std::thread::Builder::new()
.name(format!("r-{}", path.display()))
.spawn(move || ReaderInt { dir, page_size }.run(rx))
@ -258,8 +258,7 @@ impl ReaderInt {
// avoid spending effort on expired commands
continue;
}
let _guard =
TimerGuard::new(&RealClocks {}, || format!("open {}", composite_id));
let _guard = TimerGuard::new(&RealClocks {}, || format!("open {composite_id}"));
let _ = tx.send(self.open(composite_id, range));
}
ReaderCommand::ReadNextChunk { file, tx } => {
@ -269,7 +268,7 @@ impl ReaderInt {
}
let composite_id = file.composite_id;
let _guard =
TimerGuard::new(&RealClocks {}, || format!("read from {}", composite_id));
TimerGuard::new(&RealClocks {}, || format!("read from {composite_id}"));
let _ = tx.send(Ok(self.chunk(file)));
}
ReaderCommand::CloseFile(_) => {}
@ -417,7 +416,7 @@ mod tests {
let fd = std::sync::Arc::new(super::super::Fd::open(tmpdir.path(), false).unwrap());
let reader = super::Reader::spawn(tmpdir.path(), fd);
std::fs::write(tmpdir.path().join("0123456789abcdef"), b"blah blah").unwrap();
let f = reader.open_file(crate::CompositeId(0x01234567_89abcdef), 1..8);
let f = reader.open_file(crate::CompositeId(0x0123_4567_89ab_cdef), 1..8);
assert_eq!(f.try_concat().await.unwrap(), b"lah bla");
}
}

View File

@ -205,7 +205,7 @@ pub(crate) fn insert_recording(
:end_reason)
"#,
)
.with_context(|e| format!("can't prepare recording insert: {}", e))?;
.with_context(|e| format!("can't prepare recording insert: {e}"))?;
stmt.execute(named_params! {
":composite_id": id.0,
":stream_id": i64::from(id.stream()),
@ -223,12 +223,7 @@ pub(crate) fn insert_recording(
":video_sample_entry_id": r.video_sample_entry_id,
":end_reason": r.end_reason.as_deref(),
})
.with_context(|e| {
format!(
"unable to insert recording for recording {} {:#?}: {}",
id, r, e
)
})?;
.with_context(|e| format!("unable to insert recording for recording {id} {r:#?}: {e}"))?;
let mut stmt = tx
.prepare_cached(
@ -239,7 +234,7 @@ pub(crate) fn insert_recording(
:sample_file_blake3)
"#,
)
.with_context(|e| format!("can't prepare recording_integrity insert: {}", e))?;
.with_context(|e| format!("can't prepare recording_integrity insert: {e}"))?;
let blake3 = r.sample_file_blake3.as_ref().map(|b| &b[..]);
let delta = match r.run_offset {
0 => None,
@ -250,7 +245,7 @@ pub(crate) fn insert_recording(
":local_time_delta_90k": delta,
":sample_file_blake3": blake3,
})
.with_context(|e| format!("unable to insert recording_integrity for {:#?}: {}", r, e))?;
.with_context(|e| format!("unable to insert recording_integrity for {r:#?}: {e}"))?;
let mut stmt = tx
.prepare_cached(
@ -259,12 +254,12 @@ pub(crate) fn insert_recording(
values (:composite_id, :video_index)
"#,
)
.with_context(|e| format!("can't prepare recording_playback insert: {}", e))?;
.with_context(|e| format!("can't prepare recording_playback insert: {e}"))?;
stmt.execute(named_params! {
":composite_id": id.0,
":video_index": &r.video_index,
})
.with_context(|e| format!("unable to insert recording_playback for {:#?}: {}", r, e))?;
.with_context(|e| format!("unable to insert recording_playback for {r:#?}: {e}"))?;
Ok(())
}
@ -371,7 +366,7 @@ pub(crate) fn mark_sample_files_deleted(
// Tempting to just consider logging error and moving on, but this represents a logic
// flaw, so complain loudly. The freshly deleted file might still be referenced in the
// recording table.
panic!("no garbage row for {}", id);
panic!("no garbage row for {id}");
}
}
Ok(())

View File

@ -23,10 +23,7 @@ pub use base::time::Time;
pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32) -> i32 {
debug_assert!(
from_off_90k <= from_duration_90k,
"from_off_90k={} from_duration_90k={} to_duration_90k={}",
from_off_90k,
from_duration_90k,
to_duration_90k
"from_off_90k={from_off_90k} from_duration_90k={from_duration_90k} to_duration_90k={to_duration_90k}"
);
if from_duration_90k == 0 {
return 0; // avoid a divide by zero.
@ -41,8 +38,7 @@ pub fn rescale(from_off_90k: i32, from_duration_90k: i32, to_duration_90k: i32)
)
.map_err(|_| {
format!(
"rescale overflow: {} * {} / {} > i32::max_value()",
from_off_90k, to_duration_90k, from_duration_90k
"rescale overflow: {from_off_90k} * {to_duration_90k} / {from_duration_90k} > i32::max_value()"
)
})
.unwrap()

View File

@ -783,7 +783,7 @@ impl State {
let mut expected_prev = BTreeMap::new();
for (t, p) in self.points_by_time.iter() {
let cur = p.prev().into_map().expect("in-mem prev is valid");
assert_eq!(&expected_prev, &cur, "time {} prev mismatch", t);
assert_eq!(&expected_prev, &cur, "time {t} prev mismatch");
p.changes().update_map(&mut expected_prev);
}
assert_eq!(

View File

@ -35,7 +35,7 @@ pub struct Args<'a> {
fn set_journal_mode(conn: &rusqlite::Connection, requested: &str) -> Result<(), Error> {
assert!(!requested.contains(';')); // quick check for accidental sql injection.
let actual = conn.query_row(
&format!("pragma journal_mode = {}", requested),
&format!("pragma journal_mode = {requested}"),
params![],
|row| row.get::<_, String>(0),
)?;
@ -191,12 +191,12 @@ mod tests {
let fresh = new_conn()?;
fresh.execute_batch(fresh_sql)?;
if let Some(diffs) = compare::get_diffs(
&format!("upgraded to version {}", ver),
&c,
&format!("fresh version {}", ver),
&format!("upgraded to version {ver}"),
c,
&format!("fresh version {ver}"),
&fresh,
)? {
panic!("Version {}: differences found:\n{}", ver, diffs);
panic!("Version {ver}: differences found:\n{diffs}");
}
Ok(())
}
@ -284,14 +284,14 @@ mod tests {
] {
upgrade(
&Args {
sample_file_dir: Some(&tmpdir.path()),
sample_file_dir: Some(tmpdir.path()),
preset_journal: "delete",
no_vacuum: false,
},
*ver,
&mut upgraded,
)
.context(format!("upgrading to version {}", ver))?;
.context(format!("upgrading to version {ver}"))?;
if let Some(f) = fresh_sql {
compare(&upgraded, *ver, f)?;
}

View File

@ -419,7 +419,6 @@ fn rfc6381_codec_from_sample_entry(sample_entry: &[u8]) -> Result<String, Error>
let constraint_flags_byte = sample_entry[104];
let level_idc = sample_entry[105];
Ok(format!(
"avc1.{:02x}{:02x}{:02x}",
profile_idc, constraint_flags_byte, level_idc
"avc1.{profile_idc:02x}{constraint_flags_byte:02x}{level_idc:02x}"
))
}

View File

@ -149,7 +149,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
let dir = dir::Fd::open(path, false)?;
dir.lock(FlockArg::LockExclusiveNonblock)
.map_err(|e| e.context(format!("unable to lock dir {}", path)))?;
.map_err(|e| e.context(format!("unable to lock dir {path}")))?;
let mut need_sync = maybe_upgrade_meta(&dir, &db_meta)?;
if maybe_cleanup_garbage_uuids(&dir)? {

View File

@ -268,7 +268,7 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
":video_sync_samples": video_sync_samples,
":video_sample_entry_id": video_sample_entry_id,
})
.with_context(|_| format!("Unable to insert composite_id {}", composite_id))?;
.with_context(|_| format!("Unable to insert composite_id {composite_id}"))?;
cum_duration_90k += i64::from(wall_duration_90k);
cum_runs += if run_offset == 0 { 1 } else { 0 };
}

View File

@ -259,7 +259,7 @@ fn copy_cameras(tx: &rusqlite::Transaction) -> Result<(), Error> {
// of using a SQL NULL, so convert empty to None here.
// https://github.com/scottlamb/moonfire-nvr/issues/182
.filter(|h| !h.is_empty())
.map(|h| Url::parse(&format!("http://{}/", h)))
.map(|h| Url::parse(&format!("http://{h}/")))
.transpose()
.with_context(|_| "bad onvif_host")?,
username: username.take().unwrap_or_default(),

View File

@ -1032,7 +1032,7 @@ mod tests {
assert_eq!(id, expected_id);
f(id)
}
_ => panic!("got create_file({}), expected something else", id),
_ => panic!("got create_file({id}), expected something else"),
}
}
fn sync(&self) -> Result<(), nix::Error> {
@ -1059,7 +1059,7 @@ mod tests {
assert_eq!(id, expected_id);
f(id)
}
_ => panic!("got unlink({}), expected something else", id),
_ => panic!("got unlink({id}), expected something else"),
}
}
}
@ -1114,7 +1114,7 @@ mod tests {
.expect("got write with no expectation")
{
MockFileAction::Write(f) => f(buf),
_ => panic!("got write({:?}), expected something else", buf),
_ => panic!("got write({buf:?}), expected something else"),
}
}
}

View File

@ -339,8 +339,7 @@ fn press_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, name: String
})
} else {
views::Dialog::text(format!(
"Delete camera {}? This camera has no recorded video.",
name
"Delete camera {name}? This camera has no recorded video."
))
.button("Delete", {
let db = db.clone();
@ -381,7 +380,7 @@ fn confirm_deletion(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, to_delet
}
if let Err(e) = lower_retention(db, zero_limits) {
siv.add_layer(
views::Dialog::text(format!("Unable to delete recordings: {}", e))
views::Dialog::text(format!("Unable to delete recordings: {e}"))
.title("Error")
.dismiss_button("Abort"),
);
@ -417,7 +416,7 @@ fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) {
};
if let Err(e) = result {
siv.add_layer(
views::Dialog::text(format!("Unable to delete camera: {}", e))
views::Dialog::text(format!("Unable to delete camera: {e}"))
.title("Error")
.dismiss_button("Abort"),
);

View File

@ -48,7 +48,7 @@ fn update_limits_inner(model: &Model) -> Result<(), Error> {
fn update_limits(model: &Model, siv: &mut Cursive) {
if let Err(e) = update_limits_inner(model) {
siv.add_layer(
views::Dialog::text(format!("Unable to update limits: {}", e))
views::Dialog::text(format!("Unable to update limits: {e}"))
.dismiss_button("Back")
.title("Error"),
);
@ -79,7 +79,7 @@ fn edit_limit(model: &RefCell<Model>, siv: &mut Cursive, id: i32, content: &str)
}
if new_value.is_none() != stream.retain.is_none() {
model.errors += if new_value.is_none() { 1 } else { -1 };
siv.find_name::<views::TextView>(&format!("{}_ok", id))
siv.find_name::<views::TextView>(&format!("{id}_ok"))
.unwrap()
.set_content(if new_value.is_none() { "*" } else { " " });
}
@ -139,7 +139,7 @@ fn actually_delete(model: &RefCell<Model>, siv: &mut Cursive) {
}
if let Err(e) = writer::lower_retention(model.db.clone(), model.dir_id, &new_limits[..]) {
siv.add_layer(
views::Dialog::text(format!("Unable to delete excess video: {}", e))
views::Dialog::text(format!("Unable to delete excess video: {e}"))
.title("Error")
.dismiss_button("Abort"),
);
@ -281,7 +281,7 @@ fn delete_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
fn delete_dir(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
if let Err(e) = db.lock().delete_sample_file_dir(dir_id) {
siv.add_layer(
views::Dialog::text(format!("Unable to delete dir id {}: {}", dir_id, e))
views::Dialog::text(format!("Unable to delete dir id {dir_id}: {e}"))
.dismiss_button("Back")
.title("Error"),
);
@ -381,7 +381,7 @@ fn edit_dir_dialog(db: &Arc<db::Database>, siv: &mut Cursive, dir_id: i32) {
)
.child(
views::TextView::new("")
.with_name(format!("{}_ok", id))
.with_name(format!("{id}_ok"))
.fixed_width(1),
),
);

View File

@ -61,7 +61,7 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>, pw: Pa
};
if let Err(e) = result {
siv.add_layer(
views::Dialog::text(format!("Unable to apply change: {}", e))
views::Dialog::text(format!("Unable to apply change: {e}"))
.title("Error")
.dismiss_button("Abort"),
);
@ -76,7 +76,7 @@ fn press_edit(siv: &mut Cursive, db: &Arc<db::Database>, id: Option<i32>, pw: Pa
fn press_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32, name: String) {
siv.add_layer(
views::Dialog::text(format!("Delete user {}?", name))
views::Dialog::text(format!("Delete user {name}?"))
.button("Delete", {
let db = db.clone();
move |s| actually_delete(s, &db, id)
@ -94,7 +94,7 @@ fn actually_delete(siv: &mut Cursive, db: &Arc<db::Database>, id: i32) {
};
if let Err(e) = result {
siv.add_layer(
views::Dialog::text(format!("Unable to delete user: {}", e))
views::Dialog::text(format!("Unable to delete user: {e}"))
.title("Error")
.dismiss_button("Abort"),
);
@ -193,7 +193,7 @@ fn edit_user_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: Option<i32>
] {
let mut checkbox = views::Checkbox::new();
checkbox.set_checked(*b);
perms.add_child(name, checkbox.with_name(format!("perm_{}", name)));
perms.add_child(name, checkbox.with_name(format!("perm_{name}")));
}
layout.add_child(perms);

View File

@ -105,7 +105,7 @@ pub fn run(args: Args) -> Result<i32, Error> {
f.sync_all()?;
println!("Wrote cookie to {}", p.display());
} else {
println!("s={}", encoded);
println!("s={encoded}");
}
Ok(0)
}

View File

@ -157,10 +157,7 @@ pub fn parse_extra_data(extradata: &[u8]) -> Result<VideoSampleEntryToInsert, Er
let constraint_flags = sample_entry[104];
let level_idc = sample_entry[105];
let rfc6381_codec = format!(
"avc1.{:02x}{:02x}{:02x}",
profile_idc, constraint_flags, level_idc
);
let rfc6381_codec = format!("avc1.{profile_idc:02x}{constraint_flags:02x}{level_idc:02x}");
Ok(VideoSampleEntryToInsert {
data: sample_entry,
rfc6381_codec,

View File

@ -296,7 +296,7 @@ impl<'a> Signal<'a> {
let mut map = serializer.serialize_map(Some(s.config.camera_associations.len()))?;
for (camera_id, association) in &s.config.camera_associations {
let c = db.cameras_by_id().get(camera_id).ok_or_else(|| {
S::Error::custom(format!("signal has missing camera id {}", camera_id))
S::Error::custom(format!("signal has missing camera id {camera_id}"))
})?;
map.serialize_key(&c.uuid)?;
map.serialize_value(association.as_str())?;

View File

@ -81,14 +81,14 @@ impl Args {
fn panic_hook(p: &std::panic::PanicInfo) {
let mut msg;
if let Some(l) = p.location() {
msg = format!("panic at '{}'", l);
msg = format!("panic at '{l}'");
} else {
msg = "panic".to_owned();
}
if let Some(s) = p.payload().downcast_ref::<&str>() {
write!(&mut msg, ": {}", s).unwrap();
write!(&mut msg, ": {s}").unwrap();
} else if let Some(s) = p.payload().downcast_ref::<String>() {
write!(&mut msg, ": {}", s).unwrap();
write!(&mut msg, ": {s}").unwrap();
}
let b = failure::Backtrace::new();
if b.is_empty() {
@ -98,7 +98,7 @@ fn panic_hook(p: &std::panic::PanicInfo) {
)
.unwrap();
} else {
write!(&mut msg, "\n\nBacktrace:\n{}", b).unwrap();
write!(&mut msg, "\n\nBacktrace:\n{b}").unwrap();
}
error!("{}", msg);
}
@ -106,9 +106,8 @@ fn panic_hook(p: &std::panic::PanicInfo) {
fn main() {
if let Err(e) = nix::time::clock_gettime(nix::time::ClockId::CLOCK_MONOTONIC) {
eprintln!(
"clock_gettime failed: {}\n\n\
This indicates a broken environment. See the troubleshooting guide.",
e
"clock_gettime failed: {e}\n\n\
This indicates a broken environment. See the troubleshooting guide."
);
std::process::exit(1);
}

View File

@ -974,7 +974,7 @@ impl FileBuilder {
pub fn set_filename(&mut self, filename: &str) -> Result<(), Error> {
self.content_disposition = Some(
HeaderValue::try_from(format!("attachment; filename=\"{}\"", filename))
HeaderValue::try_from(format!("attachment; filename=\"{filename}\""))
.err_kind(ErrorKind::InvalidArgument)?,
);
Ok(())
@ -2073,7 +2073,7 @@ mod tests {
impl BoxCursor {
pub fn new(mp4: File) -> BoxCursor {
BoxCursor {
mp4: mp4,
mp4,
stack: Vec::new(),
}
}
@ -2107,7 +2107,7 @@ mod tests {
boxtype[..].copy_from_slice(boxtype_slice);
self.stack.push(Mp4Box {
interior: pos + hdr_len as u64..pos + len,
boxtype: boxtype,
boxtype,
});
trace!("positioned at {}", self.path());
true
@ -2272,7 +2272,7 @@ mod tests {
cursor.down().await;
assert!(cursor.find(b"stbl").await);
Track {
edts_cursor: edts_cursor,
edts_cursor,
stbl_cursor: cursor,
}
}
@ -2353,13 +2353,11 @@ mod tests {
let d = r.media_duration_90k;
assert!(
skip_90k + shorten_90k < d,
"skip_90k={} shorten_90k={} r={:?}",
skip_90k,
shorten_90k,
r
"{}",
"skip_90k={skip_90k} shorten_90k={shorten_90k} r={r:?}"
);
builder
.append(&*db, r, skip_90k..d - shorten_90k, true)
.append(&db, r, skip_90k..d - shorten_90k, true)
.unwrap();
Ok(())
})
@ -2442,11 +2440,10 @@ mod tests {
};
assert_eq!(
orig_pkt.pts, new_pkt.pts, /*+ pts_offset*/
"pkt {} pts",
i
"pkt {i} pts"
);
assert_eq!(orig_pkt.data, new_pkt.data, "pkt {} data", i);
assert_eq!(orig_pkt.is_key, new_pkt.is_key, "pkt {} key", i);
assert_eq!(orig_pkt.data, new_pkt.data, "pkt {i} data");
assert_eq!(orig_pkt.is_key, new_pkt.is_key, "pkt {i} key");
final_durations = Some((i64::from(orig_pkt.duration), i64::from(new_pkt.duration)));
}
@ -2457,11 +2454,8 @@ mod tests {
// See <https://github.com/scottlamb/moonfire-nvr/issues/10>.
assert!(
orig_dur - shorten + pts_offset == new_dur || orig_dur - shorten == new_dur,
"orig_dur={} new_dur={} shorten={} pts_offset={}",
orig_dur,
new_dur,
shorten,
pts_offset
"{}",
"orig_dur={orig_dur} new_dur={new_dur} shorten={shorten} pts_offset={pts_offset}"
);
}
}
@ -2845,7 +2839,7 @@ mod tests {
"64f23b856692702b13d1811cd02dc83395b3d501dead7fd16f175eb26b4d8eee",
hash.to_hex().as_str()
);
const EXPECTED_ETAG: &'static str =
const EXPECTED_ETAG: &str =
"\"791114c469130970608dd999b0ecf5861d077ec33fad2f0b040996e4aae4e30f\"";
assert_eq!(
Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()),
@ -2874,7 +2868,7 @@ mod tests {
"f9e4ed946187b2dd22ef049c4c1869d0f6c4f377ef08f8f53570850b61a06701",
hash.to_hex().as_str()
);
const EXPECTED_ETAG: &'static str =
const EXPECTED_ETAG: &str =
"\"85703b9abadd4292e119f2f7b0d6a16e99acf8b3ba98fcb6498e60ac5cb0b0b7\"";
assert_eq!(
Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()),
@ -2903,7 +2897,7 @@ mod tests {
"f913d46d0119a03291e85459455b9a75a84cc9a1a5e3b88ca7e93eb718d73190",
hash.to_hex().as_str()
);
const EXPECTED_ETAG: &'static str =
const EXPECTED_ETAG: &str =
"\"3d2031124fb995bf2fc4930e7affdcd51add396e062cfab97e1001224c5ee42c\"";
assert_eq!(
Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()),
@ -2933,7 +2927,7 @@ mod tests {
"64cc763fa2533118bc6bf0b01249f02524ae87e0c97815079447b235722c1e2d",
hash.to_hex().as_str()
);
const EXPECTED_ETAG: &'static str =
const EXPECTED_ETAG: &str =
"\"aa9bb2f63787a7d21227981135326c948db3e0b3dae5d0d39c77df69d0baf504\"";
assert_eq!(
Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()),
@ -2962,7 +2956,7 @@ mod tests {
"6886b36ae6df9ce538f6db7ebd6159e68c2936b9d43307f7728fe75e0b62cad2",
hash.to_hex().as_str()
);
const EXPECTED_ETAG: &'static str =
const EXPECTED_ETAG: &str =
"\"0a6accaa7b583c94209eba58b00b39a804a5c4a8c99043e58e72fed7acd8dfc6\"";
assert_eq!(
Some(HeaderValue::from_str(EXPECTED_ETAG).unwrap()),

View File

@ -220,7 +220,7 @@ mod tests {
}
fn get_slices(ctx: &&'static Slices<FakeSlice>) -> &'static Slices<Self> {
*ctx
ctx
}
}

View File

@ -293,7 +293,6 @@ mod tests {
use std::convert::TryFrom;
use std::sync::Arc;
use std::sync::Mutex;
use time;
struct ProxyingStream {
clocks: clock::SimulatedClocks,
@ -412,7 +411,7 @@ mod tests {
db.with_recording_playback(id, &mut |rec| {
let mut it = recording::SampleIndexIterator::default();
let mut frames = Vec::new();
while it.next(&rec.video_index).unwrap() {
while it.next(rec.video_index).unwrap() {
frames.push(Frame {
start_90k: it.start_90k,
duration_90k: it.duration_90k,
@ -443,7 +442,7 @@ mod tests {
streams: Mutex::new(vec![Box::new(stream)]),
shutdown_tx: Mutex::new(Some(shutdown_tx)),
};
let db = testutil::TestDb::new(clocks.clone());
let db = testutil::TestDb::new(clocks);
let env = super::Environment {
opener: &opener,
db: &db.db,

View File

@ -91,7 +91,7 @@ impl Service {
Some(o) => o.id,
};
let camera = db.get_camera(uuid).ok_or_else(|| {
plain_response(StatusCode::NOT_FOUND, format!("no such camera {}", uuid))
plain_response(StatusCode::NOT_FOUND, format!("no such camera {uuid}"))
})?;
stream_id = camera.streams[stream_type.index()].ok_or_else(|| {
format_err_t!(NotFound, "no such stream {}/{}", uuid, stream_type)

View File

@ -366,7 +366,7 @@ impl Service {
let db = self.db.lock();
let camera = db
.get_camera(uuid)
.ok_or_else(|| not_found(format!("no such camera {}", uuid)))?;
.ok_or_else(|| not_found(format!("no such camera {uuid}")))?;
serve_json(
req,
&json::Camera::wrap(camera, &db, true, false).map_err(internal_server_err)?,
@ -412,12 +412,12 @@ impl Service {
video_sample_entries: (&db, Vec::new()),
};
let camera = db.get_camera(uuid).ok_or_else(|| {
plain_response(StatusCode::NOT_FOUND, format!("no such camera {}", uuid))
plain_response(StatusCode::NOT_FOUND, format!("no such camera {uuid}"))
})?;
let stream_id = camera.streams[type_.index()].ok_or_else(|| {
plain_response(
StatusCode::NOT_FOUND,
format!("no such stream {}/{}", uuid, type_),
format!("no such stream {uuid}/{type_}"),
)
})?;
db.list_aggregated_recordings(stream_id, r, split, &mut |row| {
@ -464,7 +464,7 @@ impl Service {
.build(self.db.clone(), self.dirs_by_stream_id.clone())
.map_err(from_base_error)?;
if debug {
Ok(plain_response(StatusCode::OK, format!("{:#?}", mp4)))
Ok(plain_response(StatusCode::OK, format!("{mp4:#?}")))
} else {
Ok(http_serve::serve(mp4, req))
}

View File

@ -275,14 +275,11 @@ mod tests {
| (SessionFlag::SameSite as i32)
| (SessionFlag::SameSiteStrict as i32)
),
format!(
"s={}; HttpOnly; Secure; SameSite=Strict; Max-Age=2147483648; Path=/",
s64
)
format!("s={s64}; HttpOnly; Secure; SameSite=Strict; Max-Age=2147483648; Path=/")
);
assert_eq!(
encode_sid(s, SessionFlag::SameSite as i32),
format!("s={}; SameSite=Lax; Max-Age=2147483648; Path=/", s64)
format!("s={s64}; SameSite=Lax; Max-Age=2147483648; Path=/")
);
}

View File

@ -46,10 +46,10 @@ impl Service {
let db = self.db.lock();
let camera = db
.get_camera(uuid)
.ok_or_else(|| not_found(format!("no such camera {}", uuid)))?;
.ok_or_else(|| not_found(format!("no such camera {uuid}")))?;
camera_name = camera.short_name.clone();
stream_id = camera.streams[stream_type.index()]
.ok_or_else(|| not_found(format!("no such stream {}/{}", uuid, stream_type)))?;
.ok_or_else(|| not_found(format!("no such stream {uuid}/{stream_type}")))?;
};
let mut start_time_for_filename = None;
let mut builder = mp4::FileBuilder::new(mp4_type);
@ -61,7 +61,7 @@ impl Service {
let s = Segments::from_str(value).map_err(|()| {
plain_response(
StatusCode::BAD_REQUEST,
format!("invalid s parameter: {}", value),
format!("invalid s parameter: {value}"),
)
})?;
trace!("stream_view_mp4: appending s={:?}", s);
@ -177,7 +177,7 @@ impl Service {
"ts" => builder
.include_timestamp_subtitle_track(value == "true")
.map_err(from_base_error)?,
_ => return Err(bad_req(format!("parameter {} not understood", key))),
_ => return Err(bad_req(format!("parameter {key} not understood"))),
}
}
}
@ -210,7 +210,7 @@ impl Service {
.build(self.db.clone(), self.dirs_by_stream_id.clone())
.map_err(from_base_error)?;
if debug {
return Ok(plain_response(StatusCode::OK, format!("{:#?}", mp4)));
return Ok(plain_response(StatusCode::OK, format!("{mp4:#?}")));
}
Ok(http_serve::serve(mp4, req))
}