clean up the easy clippy errors

I'm still not running clippy on CI and probably should.
There are a few left that were a little more involved to address.
This commit is contained in:
Scott Lamb 2022-09-28 09:29:16 -07:00
parent b03eceb21a
commit 0866b23991
23 changed files with 42 additions and 47 deletions

View File

@ -61,6 +61,7 @@ fn decode_size_internal(input: &str) -> IResult<&str, i64> {
} }
/// Decodes a human-readable size as output by encode_size. /// Decodes a human-readable size as output by encode_size.
#[allow(clippy::result_unit_err)]
pub fn decode_size(encoded: &str) -> Result<i64, ()> { pub fn decode_size(encoded: &str) -> Result<i64, ()> {
let (remaining, decoded) = decode_size_internal(encoded).map_err(|_e| ())?; let (remaining, decoded) = decode_size_internal(encoded).map_err(|_e| ())?;
if !remaining.is_empty() { if !remaining.is_empty() {
@ -85,6 +86,7 @@ pub fn hex(raw: &[u8]) -> String {
} }
/// Returns [0, 16) or error. /// Returns [0, 16) or error.
#[allow(clippy::result_unit_err)]
fn dehex_byte(hex_byte: u8) -> Result<u8, ()> { fn dehex_byte(hex_byte: u8) -> Result<u8, ()> {
match hex_byte { match hex_byte {
b'0'..=b'9' => Ok(hex_byte - b'0'), b'0'..=b'9' => Ok(hex_byte - b'0'),
@ -95,6 +97,7 @@ fn dehex_byte(hex_byte: u8) -> Result<u8, ()> {
/// Returns a 20-byte raw form of the given hex string. /// Returns a 20-byte raw form of the given hex string.
/// (This is the size of a SHA1 hash, the only current use of this function.) /// (This is the size of a SHA1 hash, the only current use of this function.)
#[allow(clippy::result_unit_err)]
pub fn dehex(hexed: &[u8]) -> Result<[u8; 20], ()> { pub fn dehex(hexed: &[u8]) -> Result<[u8; 20], ()> {
if hexed.len() != 40 { if hexed.len() != 40 {
return Err(()); return Err(());

View File

@ -92,7 +92,7 @@ impl UserChange {
pub fn set_password(&mut self, pwd: String) { pub fn set_password(&mut self, pwd: String) {
let salt = SaltString::generate(&mut scrypt::password_hash::rand_core::OsRng); let salt = SaltString::generate(&mut scrypt::password_hash::rand_core::OsRng);
let params = PARAMS.lock().clone(); let params = *PARAMS.lock();
let hash = scrypt::Scrypt let hash = scrypt::Scrypt
.hash_password_customized(pwd.as_bytes(), None, None, params, &salt) .hash_password_customized(pwd.as_bytes(), None, None, params, &salt)
.unwrap(); .unwrap();
@ -142,7 +142,7 @@ impl rusqlite::types::FromSql for FromSqlIpAddr {
use rusqlite::types::ValueRef; use rusqlite::types::ValueRef;
match value { match value {
ValueRef::Null => Ok(FromSqlIpAddr(None)), ValueRef::Null => Ok(FromSqlIpAddr(None)),
ValueRef::Blob(ref b) => match b.len() { ValueRef::Blob(b) => match b.len() {
4 => { 4 => {
let mut buf = [0u8; 4]; let mut buf = [0u8; 4];
buf.copy_from_slice(b); buf.copy_from_slice(b);

View File

@ -73,7 +73,7 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
warn!("The following analysis may be incorrect or encounter errors due to schema differences."); warn!("The following analysis may be incorrect or encounter errors due to schema differences.");
} }
let (db_uuid, _config) = raw::read_meta(&conn)?; let (db_uuid, _config) = raw::read_meta(conn)?;
// Scan directories. // Scan directories.
let mut dirs_by_id: FnvHashMap<i32, Dir> = FnvHashMap::default(); let mut dirs_by_id: FnvHashMap<i32, Dir> = FnvHashMap::default();
@ -141,7 +141,7 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
let cum_recordings = row.get(2)?; let cum_recordings = row.get(2)?;
let mut stream = match dirs_by_id.get_mut(&dir_id) { let mut stream = match dirs_by_id.get_mut(&dir_id) {
None => Stream::default(), None => Stream::default(),
Some(d) => d.remove(&stream_id).unwrap_or_else(Stream::default), Some(d) => d.remove(&stream_id).unwrap_or_default(),
}; };
stream.cum_recordings = Some(cum_recordings); stream.cum_recordings = Some(cum_recordings);
printed_error |= compare_stream(conn, dir_id, stream_id, opts, stream, &mut ctx)?; printed_error |= compare_stream(conn, dir_id, stream_id, opts, stream, &mut ctx)?;

View File

@ -67,12 +67,12 @@ fn diff_slices<T: std::fmt::Display + PartialEq>(
match item { match item {
diff::Result::Left(i) => { diff::Result::Left(i) => {
changed = true; changed = true;
write!(&mut diff, "-{}\n", i) writeln!(&mut diff, "-{}", i)
} }
diff::Result::Both(i, _) => write!(&mut diff, " {}\n", i), diff::Result::Both(i, _) => writeln!(&mut diff, " {}", i),
diff::Result::Right(i) => { diff::Result::Right(i) => {
changed = true; changed = true;
write!(&mut diff, "+{}\n", i) writeln!(&mut diff, "+{}", i)
} }
} }
.unwrap(); .unwrap();
@ -177,8 +177,8 @@ pub fn get_diffs(
// Compare columns and indices for each table. // Compare columns and indices for each table.
for t in &tables1 { for t in &tables1 {
let columns1 = get_table_columns(c1, &t)?; let columns1 = get_table_columns(c1, t)?;
let columns2 = get_table_columns(c2, &t)?; let columns2 = get_table_columns(c2, t)?;
if let Some(diff) = diff_slices(n1, &columns1[..], n2, &columns2[..]) { if let Some(diff) = diff_slices(n1, &columns1[..], n2, &columns2[..]) {
write!( write!(
&mut diffs, &mut diffs,
@ -187,8 +187,8 @@ pub fn get_diffs(
)?; )?;
} }
let mut indices1 = get_indices(c1, &t)?; let mut indices1 = get_indices(c1, t)?;
let mut indices2 = get_indices(c2, &t)?; let mut indices2 = get_indices(c2, t)?;
indices1.sort_by(|a, b| a.name.cmp(&b.name)); indices1.sort_by(|a, b| a.name.cmp(&b.name));
indices2.sort_by(|a, b| a.name.cmp(&b.name)); indices2.sort_by(|a, b| a.name.cmp(&b.name));
if let Some(diff) = diff_slices(n1, &indices1[..], n2, &indices2[..]) { if let Some(diff) = diff_slices(n1, &indices1[..], n2, &indices2[..]) {

View File

@ -2113,7 +2113,7 @@ impl LockedDatabase {
/// ///
/// These are `pub` so that the `moonfire-nvr sql` command can pass to the SQLite3 binary with /// These are `pub` so that the `moonfire-nvr sql` command can pass to the SQLite3 binary with
/// `-cmd`. /// `-cmd`.
pub static INTEGRITY_PRAGMAS: [&'static str; 3] = [ pub static INTEGRITY_PRAGMAS: [&str; 3] = [
// Enforce foreign keys. This is on by default with --features=bundled (as rusqlite // Enforce foreign keys. This is on by default with --features=bundled (as rusqlite
// compiles the SQLite3 amalgamation with -DSQLITE_DEFAULT_FOREIGN_KEYS=1). Ensure it's // compiles the SQLite3 amalgamation with -DSQLITE_DEFAULT_FOREIGN_KEYS=1). Ensure it's
// always on. Note that our foreign keys are immediate rather than deferred, so we have to // always on. Note that our foreign keys are immediate rather than deferred, so we have to

View File

@ -156,7 +156,7 @@ pub(crate) fn read_meta(dir: &Fd) -> Result<schema::DirMeta, Error> {
); );
} }
let data = &data[pos..pos + len as usize]; let data = &data[pos..pos + len as usize];
let mut s = protobuf::CodedInputStream::from_bytes(&data); let mut s = protobuf::CodedInputStream::from_bytes(data);
meta.merge_from(&mut s) meta.merge_from(&mut s)
.map_err(|e| e.context("Unable to parse metadata proto"))?; .map_err(|e| e.context("Unable to parse metadata proto"))?;
Ok(meta) Ok(meta)

View File

@ -239,7 +239,7 @@ pub struct StreamConfig {
} }
sql!(StreamConfig); sql!(StreamConfig);
pub const STREAM_MODE_RECORD: &'static str = "record"; pub const STREAM_MODE_RECORD: &str = "record";
impl StreamConfig { impl StreamConfig {
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {

View File

@ -374,6 +374,7 @@ impl Segment {
// Note: this inner loop avoids ? for performance. Don't change these lines without // Note: this inner loop avoids ? for performance. Don't change these lines without
// reading https://github.com/rust-lang/rust/issues/37939 and running // reading https://github.com/rust-lang/rust/issues/37939 and running
// mp4::bench::build_index. // mp4::bench::build_index.
#[allow(clippy::question_mark)]
if let Err(e) = f(&it) { if let Err(e) = f(&it) {
return Err(e); return Err(e);
} }

View File

@ -344,7 +344,7 @@ impl State {
} }
match self.signals_by_id.get(&signal) { match self.signals_by_id.get(&signal) {
None => bail_t!(InvalidArgument, "unknown signal {}", signal), None => bail_t!(InvalidArgument, "unknown signal {}", signal),
Some(ref s) => { Some(s) => {
let states = self let states = self
.types_by_uuid .types_by_uuid
.get(&s.type_) .get(&s.type_)

View File

@ -76,7 +76,7 @@ fn upgrade(args: &Args, target_ver: i32, conn: &mut rusqlite::Connection) -> Res
for ver in old_ver..target_ver { for ver in old_ver..target_ver {
info!("...from version {} to version {}", ver, ver + 1); info!("...from version {} to version {}", ver, ver + 1);
let tx = conn.transaction()?; let tx = conn.transaction()?;
upgraders[ver as usize](&args, &tx)?; upgraders[ver as usize](args, &tx)?;
tx.execute( tx.execute(
r#" r#"
insert into version (id, unix_time, notes) insert into version (id, unix_time, notes)
@ -94,7 +94,7 @@ fn upgrade(args: &Args, target_ver: i32, conn: &mut rusqlite::Connection) -> Res
pub fn run(args: &Args, conn: &mut rusqlite::Connection) -> Result<(), Error> { pub fn run(args: &Args, conn: &mut rusqlite::Connection) -> Result<(), Error> {
db::check_sqlite_version()?; db::check_sqlite_version()?;
db::set_integrity_pragmas(conn)?; db::set_integrity_pragmas(conn)?;
set_journal_mode(&conn, args.preset_journal)?; set_journal_mode(conn, args.preset_journal)?;
upgrade(args, db::EXPECTED_VERSION, conn)?; upgrade(args, db::EXPECTED_VERSION, conn)?;
// As in "moonfire-nvr init": try for page_size=16384 and wal for the reasons explained there. // As in "moonfire-nvr init": try for page_size=16384 and wal for the reasons explained there.
@ -114,7 +114,7 @@ pub fn run(args: &Args, conn: &mut rusqlite::Connection) -> Result<(), Error> {
)?; )?;
} }
set_journal_mode(&conn, "wal")?; set_journal_mode(conn, "wal")?;
info!("...done."); info!("...done.");
Ok(()) Ok(())

View File

@ -221,7 +221,7 @@ fn update_camera(
update camera set next_recording_id = :next_recording_id where id = :id update camera set next_recording_id = :next_recording_id where id = :id
"#, "#,
)?; )?;
for (ref id, ref state) in &camera_state { for (ref id, state) in &camera_state {
stmt.execute(named_params! { stmt.execute(named_params! {
":id": &id, ":id": &id,
":next_recording_id": &state.next_recording_id, ":next_recording_id": &state.next_recording_id,

View File

@ -97,7 +97,7 @@ pub fn run(args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
meta.dir_uuid.extend_from_slice(dir_uuid_bytes); meta.dir_uuid.extend_from_slice(dir_uuid_bytes);
let open = meta.last_complete_open.mut_or_insert_default(); let open = meta.last_complete_open.mut_or_insert_default();
open.id = open_id; open.id = open_id;
open.uuid.extend_from_slice(&open_uuid_bytes); open.uuid.extend_from_slice(open_uuid_bytes);
} }
dir::write_meta(d.as_raw_fd(), &meta)?; dir::write_meta(d.as_raw_fd(), &meta)?;

View File

@ -55,7 +55,7 @@ fn open_sample_file_dir(tx: &rusqlite::Transaction) -> Result<Arc<dir::SampleFil
} }
pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> { pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error> {
let d = open_sample_file_dir(&tx)?; let d = open_sample_file_dir(tx)?;
let mut stmt = tx.prepare( let mut stmt = tx.prepare(
r#" r#"
select select

View File

@ -37,7 +37,7 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
dir_meta dir_meta
.merge_from(&mut s) .merge_from(&mut s)
.map_err(|e| e.context("Unable to parse metadata proto: {}"))?; .map_err(|e| e.context("Unable to parse metadata proto: {}"))?;
if let Err(e) = dir::SampleFileDir::check_consistent(&db_meta, &dir_meta) { if let Err(e) = dir::SampleFileDir::check_consistent(db_meta, &dir_meta) {
bail!( bail!(
"Inconsistent db_meta={:?} dir_meta={:?}: {}", "Inconsistent db_meta={:?} dir_meta={:?}: {}",
&db_meta, &db_meta,

View File

@ -432,7 +432,7 @@ impl<C: Clocks + Clone, D: DirWriter> Syncer<C, D> {
let timeout = (t - now) let timeout = (t - now)
.to_std() .to_std()
.unwrap_or_else(|_| StdDuration::new(0, 0)); .unwrap_or_else(|_| StdDuration::new(0, 0));
match self.db.clocks().recv_timeout(&cmds, timeout) { match self.db.clocks().recv_timeout(cmds, timeout) {
Err(mpsc::RecvTimeoutError::Disconnected) => return false, // cmd senders gone. Err(mpsc::RecvTimeoutError::Disconnected) => return false, // cmd senders gone.
Err(mpsc::RecvTimeoutError::Timeout) => { Err(mpsc::RecvTimeoutError::Timeout) => {
self.flush(); self.flush();

View File

@ -118,12 +118,8 @@ fn parse_url(raw: &str, allowed_schemes: &'static [&'static str]) -> Result<Opti
if raw.is_empty() { if raw.is_empty() {
return Ok(None); return Ok(None);
} }
let url = url::Url::parse(&raw).with_context(|_| format!("can't parse {:?} as URL", &raw))?; let url = url::Url::parse(raw).with_context(|_| format!("can't parse {:?} as URL", &raw))?;
if allowed_schemes if allowed_schemes.iter().any(|scheme| *scheme == url.scheme()) {
.iter()
.find(|scheme| **scheme == url.scheme())
.is_none()
{
bail!("Unexpected scheme in URL {}", &url); bail!("Unexpected scheme in URL {}", &url);
} }
if !url.username().is_empty() || url.password().is_some() { if !url.username().is_empty() || url.password().is_some() {
@ -391,7 +387,7 @@ fn lower_retention(
let dirs_to_open: Vec<_> = zero_limits.keys().copied().collect(); let dirs_to_open: Vec<_> = zero_limits.keys().copied().collect();
db.lock().open_sample_file_dirs(&dirs_to_open[..])?; db.lock().open_sample_file_dirs(&dirs_to_open[..])?;
for (&dir_id, l) in &zero_limits { for (&dir_id, l) in &zero_limits {
writer::lower_retention(db.clone(), dir_id, &l)?; writer::lower_retention(db.clone(), dir_id, l)?;
} }
Ok(()) Ok(())
} }
@ -557,7 +553,7 @@ fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i
.find_name::<views::Button>(&format!("{}_test", t.as_str())) .find_name::<views::Button>(&format!("{}_test", t.as_str()))
.unwrap(); .unwrap();
edit_url( edit_url(
&s.config.url.as_ref().map(Url::as_str).unwrap_or(""), s.config.url.as_ref().map(Url::as_str).unwrap_or(""),
test_button, test_button,
); );
dialog.call_on_name( dialog.call_on_name(
@ -596,7 +592,7 @@ fn edit_camera_dialog(db: &Arc<db::Database>, siv: &mut Cursive, item: &Option<i
("short_name", &*camera.short_name), ("short_name", &*camera.short_name),
( (
"onvif_base_url", "onvif_base_url",
&camera camera
.config .config
.onvif_base_url .onvif_base_url
.as_ref() .as_ref()

View File

@ -205,8 +205,8 @@ fn prepare_unix_socket(p: &Path) {
fn make_listener(addr: &config::AddressConfig) -> Result<Listener, Error> { fn make_listener(addr: &config::AddressConfig) -> Result<Listener, Error> {
let sa: SocketAddr = match addr { let sa: SocketAddr = match addr {
config::AddressConfig::Ipv4(a) => a.clone().into(), config::AddressConfig::Ipv4(a) => (*a).into(),
config::AddressConfig::Ipv6(a) => a.clone().into(), config::AddressConfig::Ipv6(a) => (*a).into(),
config::AddressConfig::Unix(p) => { config::AddressConfig::Unix(p) => {
prepare_unix_socket(p); prepare_unix_socket(p);
return Ok(Listener::Unix( return Ok(Listener::Unix(
@ -390,7 +390,7 @@ async fn inner(
let web_handles = web_handles?; let web_handles = web_handles?;
info!("Ready to serve HTTP requests"); info!("Ready to serve HTTP requests");
let _ = shutdown_rx.as_future().await; shutdown_rx.as_future().await;
info!("Shutting down streamers and syncers."); info!("Shutting down streamers and syncers.");
tokio::task::spawn_blocking({ tokio::task::spawn_blocking({

View File

@ -52,12 +52,7 @@ pub fn run(args: Args) -> Result<i32, Error> {
db.push("?mode=ro"); db.push("?mode=ro");
} }
Err(Command::new("sqlite3") Err(Command::new("sqlite3")
.args( .args(db::db::INTEGRITY_PRAGMAS.iter().flat_map(|p| ["-cmd", p]))
db::db::INTEGRITY_PRAGMAS
.iter()
.map(|p| ["-cmd", p])
.flatten(),
)
.arg(&db) .arg(&db)
.args(&args.arg) .args(&args.arg)
.exec() .exec()

View File

@ -446,7 +446,7 @@ impl<'a> ListRecordings<'a> {
for id in v { for id in v {
map.serialize_entry( map.serialize_entry(
id, id,
&VideoSampleEntry::from(&db.video_sample_entries_by_id().get(id).unwrap()), &VideoSampleEntry::from(db.video_sample_entries_by_id().get(id).unwrap()),
)?; )?;
} }
map.end() map.end()

View File

@ -801,7 +801,7 @@ impl slices::Slice for Slice {
SliceType::Stsz => self.wrap_index(f, range.clone(), len, &Segment::stsz), SliceType::Stsz => self.wrap_index(f, range.clone(), len, &Segment::stsz),
SliceType::Stss => self.wrap_index(f, range.clone(), len, &Segment::stss), SliceType::Stss => self.wrap_index(f, range.clone(), len, &Segment::stss),
SliceType::Co64 => f.0.get_co64(range.clone(), len), SliceType::Co64 => f.0.get_co64(range.clone(), len),
SliceType::VideoSampleData => return f.0.get_video_sample_data(p, range.clone()), SliceType::VideoSampleData => return f.0.get_video_sample_data(p, range),
SliceType::SubtitleSampleData => f.0.get_subtitle_sample_data(p, range.clone(), len), SliceType::SubtitleSampleData => f.0.get_subtitle_sample_data(p, range.clone(), len),
SliceType::Truns => self.wrap_truns(f, range.clone(), len as usize), SliceType::Truns => self.wrap_truns(f, range.clone(), len as usize),
}; };

View File

@ -98,7 +98,7 @@ struct Caller {
type ResponseResult = Result<Response<Body>, HttpError>; type ResponseResult = Result<Response<Body>, HttpError>;
fn serve_json<T: serde::ser::Serialize>(req: &Request<hyper::Body>, out: &T) -> ResponseResult { fn serve_json<T: serde::ser::Serialize>(req: &Request<hyper::Body>, out: &T) -> ResponseResult {
let (mut resp, writer) = http_serve::streaming_body(&req).build(); let (mut resp, writer) = http_serve::streaming_body(req).build();
resp.headers_mut().insert( resp.headers_mut().insert(
header::CONTENT_TYPE, header::CONTENT_TYPE,
HeaderValue::from_static("application/json"), HeaderValue::from_static("application/json"),

View File

@ -50,10 +50,10 @@ impl Path {
Some(p) => p, Some(p) => p,
None => return Path::NotFound, None => return Path::NotFound,
}; };
if let Ok(id) = i32::from_str(&path) { if let Ok(id) = i32::from_str(path) {
return Path::InitSegment(id, debug); return Path::InitSegment(id, debug);
} }
return Path::NotFound; Path::NotFound
} else if let Some(path) = path.strip_prefix("cameras/") { } else if let Some(path) = path.strip_prefix("cameras/") {
let (uuid, path) = match path.split_once('/') { let (uuid, path) = match path.split_once('/') {
Some(pair) => pair, Some(pair) => pair,

View File

@ -53,7 +53,7 @@ impl Service {
0 0
}; };
let (sid, _) = l let (sid, _) = l
.login_by_password(authreq, &r.username, r.password, Some(domain), flags) .login_by_password(authreq, r.username, r.password, Some(domain), flags)
.map_err(|e| plain_response(StatusCode::UNAUTHORIZED, e.to_string()))?; .map_err(|e| plain_response(StatusCode::UNAUTHORIZED, e.to_string()))?;
let cookie = encode_sid(sid, flags); let cookie = encode_sid(sid, flags);
Ok(Response::builder() Ok(Response::builder()