update some Rust dependencies

I didn't go to quite the latest version of everything, in an effort to
minimize duplicates in the cargo tree.
This commit is contained in:
Scott Lamb 2024-01-06 10:43:20 -08:00
parent 2bcee02ea6
commit 86816e862a
25 changed files with 695 additions and 597 deletions

1089
server/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -25,8 +25,11 @@ members = ["base", "db"]
[workspace.dependencies] [workspace.dependencies]
base64 = "0.21.0" base64 = "0.21.0"
h264-reader = "0.7.0" h264-reader = "0.7.0"
nix = "0.26.1" itertools = "0.12.0"
nix = "0.27.0"
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
tracing-log = "0.2"
ring = "0.17.0"
rusqlite = "0.30.0" rusqlite = "0.30.0"
[dependencies] [dependencies]
@ -40,22 +43,21 @@ chrono = "0.4.23"
cursive = { version = "0.20.0", default-features = false, features = ["termion-backend"] } cursive = { version = "0.20.0", default-features = false, features = ["termion-backend"] }
db = { package = "moonfire-db", path = "db" } db = { package = "moonfire-db", path = "db" }
futures = "0.3" futures = "0.3"
fnv = "1.0"
h264-reader = { workspace = true } h264-reader = { workspace = true }
http = "0.2.3" http = "0.2.3"
http-serve = { version = "0.3.1", features = ["dir"] } http-serve = { version = "0.3.1", features = ["dir"] }
hyper = { version = "0.14.2", features = ["http1", "server", "stream", "tcp"] } hyper = { version = "0.14.2", features = ["http1", "server", "stream", "tcp"] }
itertools = "0.10.0" itertools = { workspace = true }
libc = "0.2" libc = "0.2"
log = { version = "0.4" } log = { version = "0.4" }
memchr = "2.0.2" memchr = "2.0.2"
nix = { workspace = true} nix = { workspace = true, features = ["time", "user"] }
nom = "7.0.0" nom = "7.0.0"
password-hash = "0.4.2" password-hash = "0.5.0"
protobuf = "3.0" protobuf = "3.0"
reffers = "0.7.0" reffers = "0.7.0"
retina = "0.4.0" retina = "0.4.0"
ring = "0.16.2" ring = { workspace = true }
rusqlite = { workspace = true } rusqlite = { workspace = true }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
@ -65,12 +67,12 @@ time = "0.1"
tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "signal", "sync", "time"] } tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "signal", "sync", "time"] }
tokio-stream = "0.1.5" tokio-stream = "0.1.5"
tokio-tungstenite = "0.20.0" tokio-tungstenite = "0.20.0"
toml = "0.5" toml = "0.6"
tracing = { workspace = true } tracing = { workspace = true }
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }
tracing-core = "0.1.30" tracing-core = "0.1.30"
tracing-futures = { version = "0.2.5", features = ["futures-03", "std-future"] } tracing-futures = { version = "0.2.5", features = ["futures-03", "std-future"] }
tracing-log = "0.1.3" tracing-log = { workspace = true }
ulid = "1.0.0" ulid = "1.0.0"
url = "2.1.1" url = "2.1.1"
uuid = { version = "1.1.2", features = ["serde", "std", "v4"] } uuid = { version = "1.1.2", features = ["serde", "std", "v4"] }
@ -78,11 +80,11 @@ flate2 = "1.0.26"
git-version = "0.3.5" git-version = "0.3.5"
[target.'cfg(target_os = "linux")'.dependencies] [target.'cfg(target_os = "linux")'.dependencies]
libsystemd = "0.6.0" libsystemd = "0.7.0"
[build-dependencies] [build-dependencies]
ahash = "0.8"
blake3 = "1.0.0" blake3 = "1.0.0"
fnv = "1.0"
walkdir = "2.3.3" walkdir = "2.3.3"
[dev-dependencies] [dev-dependencies]

View File

@ -14,6 +14,7 @@ nightly = []
path = "lib.rs" path = "lib.rs"
[dependencies] [dependencies]
ahash = "0.8"
chrono = "0.4.23" chrono = "0.4.23"
coded = { git = "https://github.com/scottlamb/coded", rev = "2c97994974a73243d5dd12134831814f42cdb0e8"} coded = { git = "https://github.com/scottlamb/coded", rev = "2c97994974a73243d5dd12134831814f42cdb0e8"}
futures = "0.3" futures = "0.3"
@ -27,5 +28,5 @@ slab = "0.4"
time = "0.1" time = "0.1"
tracing = { workspace = true } tracing = { workspace = true }
tracing-core = "0.1.30" tracing-core = "0.1.30"
tracing-log = "0.1.3" tracing-log = { workspace = true }
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }

View File

@ -10,3 +10,7 @@ pub mod time;
pub mod tracing_setup; pub mod tracing_setup;
pub use crate::error::{Error, ErrorBuilder, ErrorKind, ResultExt}; pub use crate::error::{Error, ErrorBuilder, ErrorKind, ResultExt};
pub use ahash::RandomState;
pub type FastHashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;
pub type FastHashSet<K> = std::collections::HashSet<K, ahash::RandomState>;

View File

@ -56,7 +56,7 @@ impl FileEncoding {
/// `favicons/blah.ico` rather than `../../ui/dist/favicons/blah.ico.gz`. /// `favicons/blah.ico` rather than `../../ui/dist/favicons/blah.ico.gz`.
/// ///
/// The best representation is gzipped if available, uncompressed otherwise. /// The best representation is gzipped if available, uncompressed otherwise.
type FileMap = fnv::FnvHashMap<String, File>; type FileMap = std::collections::HashMap<String, File, ahash::RandomState>;
fn stringify_files(files: &FileMap) -> Result<String, std::fmt::Error> { fn stringify_files(files: &FileMap) -> Result<String, std::fmt::Error> {
let mut buf = String::new(); let mut buf = String::new();

View File

@ -21,23 +21,21 @@ blake3 = "1.0.0"
byteorder = "1.0" byteorder = "1.0"
cstr = "0.2.5" cstr = "0.2.5"
diff = "0.1.12" diff = "0.1.12"
fnv = "1.0"
futures = "0.3" futures = "0.3"
h264-reader = { workspace = true } h264-reader = { workspace = true }
hashlink = "0.8.1" hashlink = "0.8.1"
itertools = "0.10.0" itertools = { workspace = true }
libc = "0.2" libc = "0.2"
nix = "0.26.1" nix = { workspace = true, features = ["dir", "feature", "fs", "mman"] }
num-rational = { version = "0.4.0", default-features = false, features = ["std"] } num-rational = { version = "0.4.0", default-features = false, features = ["std"] }
odds = { version = "0.4.0", features = ["std-vec"] } odds = { version = "0.4.0", features = ["std-vec"] }
pretty-hex = "0.3.0" pretty-hex = "0.4.0"
protobuf = "3.0" protobuf = "3.0"
ring = "0.16.2" ring = { workspace = true }
rusqlite = { workspace = true } rusqlite = { workspace = true }
scrypt = "0.10.0" scrypt = "0.11.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
#similar = "2.1.0"
smallvec = "1.0" smallvec = "1.0"
tempfile = "3.2.0" tempfile = "3.2.0"
time = "0.1" time = "0.1"

View File

@ -6,9 +6,9 @@
use crate::json::UserConfig; use crate::json::UserConfig;
use crate::schema::Permissions; use crate::schema::Permissions;
use base::FastHashMap;
use base::{bail, err, strutil, Error, ErrorKind, ResultExt as _}; use base::{bail, err, strutil, Error, ErrorKind, ResultExt as _};
use base64::{engine::general_purpose::STANDARD_NO_PAD, Engine as _}; use base64::{engine::general_purpose::STANDARD_NO_PAD, Engine as _};
use fnv::FnvHashMap;
use protobuf::Message; use protobuf::Message;
use ring::rand::{SecureRandom, SystemRandom}; use ring::rand::{SecureRandom, SystemRandom};
use rusqlite::{named_params, params, Connection, Transaction}; use rusqlite::{named_params, params, Connection, Transaction};
@ -42,7 +42,8 @@ fn params() -> &'static Params {
/// For testing only: use fast but insecure hashes. /// For testing only: use fast but insecure hashes.
/// Call via `testutil::init()`. /// Call via `testutil::init()`.
pub(crate) fn set_test_config() { pub(crate) fn set_test_config() {
let test_params = scrypt::Params::new(8, 8, 1).expect("test params should be valid"); let test_params = scrypt::Params::new(8, 8, 1, scrypt::Params::RECOMMENDED_LEN)
.expect("test params should be valid");
if let Err(existing_params) = PARAMS.set(Params { if let Err(existing_params) = PARAMS.set(Params {
actual: test_params, actual: test_params,
is_test: true, is_test: true,
@ -386,7 +387,7 @@ pub(crate) struct State {
/// TODO: Add eviction of clean sessions. Keep a linked hash set of clean session hashes and /// TODO: Add eviction of clean sessions. Keep a linked hash set of clean session hashes and
/// evict the oldest when its size exceeds a threshold. Or just evict everything on every flush /// evict the oldest when its size exceeds a threshold. Or just evict everything on every flush
/// (and accept more frequent database accesses). /// (and accept more frequent database accesses).
sessions: FnvHashMap<SessionHash, Session>, sessions: FastHashMap<SessionHash, Session>,
rand: SystemRandom, rand: SystemRandom,
} }
@ -396,7 +397,7 @@ impl State {
let mut state = State { let mut state = State {
users_by_id: BTreeMap::new(), users_by_id: BTreeMap::new(),
users_by_name: BTreeMap::new(), users_by_name: BTreeMap::new(),
sessions: FnvHashMap::default(), sessions: FastHashMap::default(),
rand: ring::rand::SystemRandom::new(), rand: ring::rand::SystemRandom::new(),
}; };
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
@ -657,7 +658,7 @@ impl State {
domain: Option<Vec<u8>>, domain: Option<Vec<u8>>,
creation_password_id: Option<i32>, creation_password_id: Option<i32>,
flags: i32, flags: i32,
sessions: &'s mut FnvHashMap<SessionHash, Session>, sessions: &'s mut FastHashMap<SessionHash, Session>,
permissions: Permissions, permissions: Permissions,
) -> Result<(RawSessionId, &'s Session), base::Error> { ) -> Result<(RawSessionId, &'s Session), base::Error> {
let mut session_id = RawSessionId([0u8; 48]); let mut session_id = RawSessionId([0u8; 48]);

View File

@ -12,7 +12,7 @@ use crate::raw;
use crate::recording; use crate::recording;
use crate::schema; use crate::schema;
use base::{err, Error}; use base::{err, Error};
use fnv::{FnvHashMap, FnvHashSet}; use base::{FastHashMap, FastHashSet};
use nix::fcntl::AtFlags; use nix::fcntl::AtFlags;
use rusqlite::params; use rusqlite::params;
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
@ -27,8 +27,8 @@ pub struct Options {
#[derive(Default)] #[derive(Default)]
pub struct Context { pub struct Context {
rows_to_delete: FnvHashSet<CompositeId>, rows_to_delete: FastHashSet<CompositeId>,
files_to_trash: FnvHashSet<(i32, CompositeId)>, // (dir_id, composite_id) files_to_trash: FastHashSet<(i32, CompositeId)>, // (dir_id, composite_id)
} }
pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error> { pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error> {
@ -79,7 +79,7 @@ pub fn run(conn: &mut rusqlite::Connection, opts: &Options) -> Result<i32, Error
let (db_uuid, _config) = raw::read_meta(conn)?; let (db_uuid, _config) = raw::read_meta(conn)?;
// Scan directories. // Scan directories.
let mut dirs_by_id: FnvHashMap<i32, Dir> = FnvHashMap::default(); let mut dirs_by_id: FastHashMap<i32, Dir> = FastHashMap::default();
{ {
let mut dir_stmt = conn.prepare( let mut dir_stmt = conn.prepare(
r#" r#"
@ -229,11 +229,11 @@ struct Recording {
#[derive(Default)] #[derive(Default)]
struct Stream { struct Stream {
recordings: FnvHashMap<i32, Recording>, recordings: FastHashMap<i32, Recording>,
cum_recordings: Option<i32>, cum_recordings: Option<i32>,
} }
type Dir = FnvHashMap<i32, Stream>; type Dir = FastHashMap<i32, Stream>;
fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> { fn summarize_index(video_index: &[u8]) -> Result<RecordingSummary, Error> {
let mut it = recording::SampleIndexIterator::default(); let mut it = recording::SampleIndexIterator::default();

View File

@ -37,8 +37,7 @@ use crate::signal;
use base::clock::{self, Clocks}; use base::clock::{self, Clocks};
use base::strutil::encode_size; use base::strutil::encode_size;
use base::{bail, err, Error}; use base::{bail, err, Error};
// use failure::{bail, err, Error, ResultExt}; use base::{FastHashMap, FastHashSet};
use fnv::{FnvHashMap, FnvHashSet};
use hashlink::LinkedHashMap; use hashlink::LinkedHashMap;
use itertools::Itertools; use itertools::Itertools;
use rusqlite::{named_params, params}; use rusqlite::{named_params, params};
@ -325,7 +324,7 @@ pub struct SampleFileDir {
/// ids which are in the `garbage` database table (rather than `recording`) as of last commit /// ids which are in the `garbage` database table (rather than `recording`) as of last commit
/// but may still exist on disk. These can't be safely removed from the database yet. /// but may still exist on disk. These can't be safely removed from the database yet.
pub(crate) garbage_needs_unlink: FnvHashSet<CompositeId>, pub(crate) garbage_needs_unlink: FastHashSet<CompositeId>,
/// ids which are in the `garbage` database table and are guaranteed to no longer exist on /// ids which are in the `garbage` database table and are guaranteed to no longer exist on
/// disk (have been unlinked and the dir has been synced). These may be removed from the /// disk (have been unlinked and the dir has been synced). These may be removed from the
@ -620,7 +619,7 @@ pub struct LockedDatabase {
streams_by_id: BTreeMap<i32, Stream>, streams_by_id: BTreeMap<i32, Stream>,
cameras_by_uuid: BTreeMap<Uuid, i32>, // values are ids. cameras_by_uuid: BTreeMap<Uuid, i32>, // values are ids.
video_sample_entries_by_id: BTreeMap<i32, Arc<VideoSampleEntry>>, video_sample_entries_by_id: BTreeMap<i32, Arc<VideoSampleEntry>>,
video_index_cache: RefCell<LinkedHashMap<i64, Box<[u8]>, fnv::FnvBuildHasher>>, video_index_cache: RefCell<LinkedHashMap<i64, Box<[u8]>, base::RandomState>>,
on_flush: Vec<Box<dyn Fn() + Send>>, on_flush: Vec<Box<dyn Fn() + Send>>,
} }
@ -1010,7 +1009,7 @@ impl LockedDatabase {
}; };
let tx = self.conn.transaction()?; let tx = self.conn.transaction()?;
let mut new_ranges = let mut new_ranges =
FnvHashMap::with_capacity_and_hasher(self.streams_by_id.len(), Default::default()); FastHashMap::with_capacity_and_hasher(self.streams_by_id.len(), Default::default());
{ {
let mut stmt = tx.prepare_cached(UPDATE_STREAM_COUNTERS_SQL)?; let mut stmt = tx.prepare_cached(UPDATE_STREAM_COUNTERS_SQL)?;
for (&stream_id, s) in &self.streams_by_id { for (&stream_id, s) in &self.streams_by_id {
@ -1100,7 +1099,7 @@ impl LockedDatabase {
added_bytes: i64, added_bytes: i64,
deleted_bytes: i64, deleted_bytes: i64,
} }
let mut dir_logs: FnvHashMap<i32, DirLog> = FnvHashMap::default(); let mut dir_logs: FastHashMap<i32, DirLog> = FastHashMap::default();
// Process delete_garbage. // Process delete_garbage.
for (&id, dir) in &mut self.sample_file_dirs_by_id { for (&id, dir) in &mut self.sample_file_dirs_by_id {
@ -1214,7 +1213,7 @@ impl LockedDatabase {
/// Currently this only happens at startup (or during configuration), so this isn't a problem /// Currently this only happens at startup (or during configuration), so this isn't a problem
/// in practice. /// in practice.
pub fn open_sample_file_dirs(&mut self, ids: &[i32]) -> Result<(), Error> { pub fn open_sample_file_dirs(&mut self, ids: &[i32]) -> Result<(), Error> {
let mut in_progress = FnvHashMap::with_capacity_and_hasher(ids.len(), Default::default()); let mut in_progress = FastHashMap::with_capacity_and_hasher(ids.len(), Default::default());
for &id in ids { for &id in ids {
let e = in_progress.entry(id); let e = in_progress.entry(id);
use ::std::collections::hash_map::Entry; use ::std::collections::hash_map::Entry;
@ -1837,7 +1836,7 @@ impl LockedDatabase {
uuid, uuid,
dir: Some(dir), dir: Some(dir),
last_complete_open: Some(*o), last_complete_open: Some(*o),
garbage_needs_unlink: FnvHashSet::default(), garbage_needs_unlink: FastHashSet::default(),
garbage_unlinked: Vec::new(), garbage_unlinked: Vec::new(),
}), }),
Entry::Occupied(_) => bail!(Internal, msg("duplicate sample file dir id {id}")), Entry::Occupied(_) => bail!(Internal, msg("duplicate sample file dir id {id}")),
@ -2161,7 +2160,7 @@ impl LockedDatabase {
pub fn signals_by_id(&self) -> &BTreeMap<u32, signal::Signal> { pub fn signals_by_id(&self) -> &BTreeMap<u32, signal::Signal> {
self.signal.signals_by_id() self.signal.signals_by_id()
} }
pub fn signal_types_by_uuid(&self) -> &FnvHashMap<Uuid, signal::Type> { pub fn signal_types_by_uuid(&self) -> &FastHashMap<Uuid, signal::Type> {
self.signal.types_by_uuid() self.signal.types_by_uuid()
} }
pub fn list_changes_by_time( pub fn list_changes_by_time(

View File

@ -25,6 +25,7 @@ use std::ffi::CStr;
use std::fs; use std::fs;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::ops::Range; use std::ops::Range;
use std::os::fd::{AsFd, BorrowedFd};
use std::os::unix::io::{AsRawFd, RawFd}; use std::os::unix::io::{AsRawFd, RawFd};
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
@ -87,9 +88,9 @@ impl NixPath for CompositeIdPath {
#[derive(Debug)] #[derive(Debug)]
pub struct Fd(std::os::unix::io::RawFd); pub struct Fd(std::os::unix::io::RawFd);
impl std::os::unix::io::AsRawFd for Fd { impl AsFd for Fd {
fn as_raw_fd(&self) -> std::os::unix::io::RawFd { fn as_fd(&self) -> std::os::unix::prelude::BorrowedFd<'_> {
self.0 unsafe { BorrowedFd::borrow_raw(self.0) }
} }
} }
@ -316,7 +317,7 @@ impl SampleFileDir {
pub(crate) fn opendir(&self) -> Result<nix::dir::Dir, nix::Error> { pub(crate) fn opendir(&self) -> Result<nix::dir::Dir, nix::Error> {
nix::dir::Dir::openat( nix::dir::Dir::openat(
self.fd.as_raw_fd(), self.fd.as_fd().as_raw_fd(),
".", ".",
OFlag::O_DIRECTORY | OFlag::O_RDONLY, OFlag::O_DIRECTORY | OFlag::O_RDONLY,
Mode::empty(), Mode::empty(),

View File

@ -22,7 +22,6 @@
use std::convert::TryFrom; use std::convert::TryFrom;
use std::future::Future; use std::future::Future;
use std::os::unix::prelude::AsRawFd;
use std::path::Path; use std::path::Path;
use std::{ use std::{
ops::Range, ops::Range,
@ -352,7 +351,7 @@ impl ReaderInt {
map_len, map_len,
nix::sys::mman::ProtFlags::PROT_READ, nix::sys::mman::ProtFlags::PROT_READ,
nix::sys::mman::MapFlags::MAP_SHARED, nix::sys::mman::MapFlags::MAP_SHARED,
file.as_raw_fd(), Some(&file),
offset, offset,
) )
} }

View File

@ -7,8 +7,8 @@
use crate::db::{self, CompositeId, SqlUuid}; use crate::db::{self, CompositeId, SqlUuid};
use crate::json::GlobalConfig; use crate::json::GlobalConfig;
use crate::recording; use crate::recording;
use base::FastHashSet;
use base::{bail, err, Error, ErrorKind, ResultExt as _}; use base::{bail, err, Error, ErrorKind, ResultExt as _};
use fnv::FnvHashSet;
use rusqlite::{named_params, params}; use rusqlite::{named_params, params};
use std::ops::Range; use std::ops::Range;
use uuid::Uuid; use uuid::Uuid;
@ -422,8 +422,8 @@ pub(crate) fn get_range(
pub(crate) fn list_garbage( pub(crate) fn list_garbage(
conn: &rusqlite::Connection, conn: &rusqlite::Connection,
dir_id: i32, dir_id: i32,
) -> Result<FnvHashSet<CompositeId>, Error> { ) -> Result<FastHashSet<CompositeId>, Error> {
let mut garbage = FnvHashSet::default(); let mut garbage = FastHashSet::default();
let mut stmt = let mut stmt =
conn.prepare_cached("select composite_id from garbage where sample_file_dir_id = ?")?; conn.prepare_cached("select composite_id from garbage where sample_file_dir_id = ?")?;
let mut rows = stmt.query([&dir_id])?; let mut rows = stmt.query([&dir_id])?;

View File

@ -8,8 +8,8 @@
use crate::json::{SignalConfig, SignalTypeConfig}; use crate::json::{SignalConfig, SignalTypeConfig};
use crate::{coding, days}; use crate::{coding, days};
use crate::{recording, SqlUuid}; use crate::{recording, SqlUuid};
use base::FastHashMap;
use base::{bail, err, Error}; use base::{bail, err, Error};
use fnv::FnvHashMap;
use rusqlite::{params, Connection, Transaction}; use rusqlite::{params, Connection, Transaction};
use std::collections::btree_map::Entry; use std::collections::btree_map::Entry;
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
@ -25,7 +25,7 @@ pub(crate) struct State {
/// All types with known states. Note that currently there's no requirement an entry here /// All types with known states. Note that currently there's no requirement an entry here
/// exists for every `type_` specified in a `Signal`, and there's an implied `0` (unknown) /// exists for every `type_` specified in a `Signal`, and there's an implied `0` (unknown)
/// state for every `Type`. /// state for every `Type`.
types_by_uuid: FnvHashMap<Uuid, Type>, types_by_uuid: FastHashMap<Uuid, Type>,
/// All points in time. /// All points in time.
/// Invariants, checked by `State::debug_assert_point_invariants`: /// Invariants, checked by `State::debug_assert_point_invariants`:
@ -691,8 +691,8 @@ impl State {
Ok(signals) Ok(signals)
} }
fn init_types(conn: &Connection) -> Result<FnvHashMap<Uuid, Type>, Error> { fn init_types(conn: &Connection) -> Result<FastHashMap<Uuid, Type>, Error> {
let mut types = FnvHashMap::default(); let mut types = FastHashMap::default();
let mut stmt = conn.prepare( let mut stmt = conn.prepare(
r#" r#"
select select
@ -790,7 +790,7 @@ impl State {
pub fn signals_by_id(&self) -> &BTreeMap<u32, Signal> { pub fn signals_by_id(&self) -> &BTreeMap<u32, Signal> {
&self.signals_by_id &self.signals_by_id
} }
pub fn types_by_uuid(&self) -> &FnvHashMap<Uuid, Type> { pub fn types_by_uuid(&self) -> &FastHashMap<Uuid, Type> {
&self.types_by_uuid &self.types_by_uuid
} }

View File

@ -9,7 +9,7 @@ use crate::db;
use crate::dir; use crate::dir;
use crate::writer; use crate::writer;
use base::clock::Clocks; use base::clock::Clocks;
use fnv::FnvHashMap; use base::FastHashMap;
use std::env; use std::env;
use std::sync::Arc; use std::sync::Arc;
use std::thread; use std::thread;
@ -47,7 +47,7 @@ pub fn init() {
pub struct TestDb<C: Clocks + Clone> { pub struct TestDb<C: Clocks + Clone> {
pub db: Arc<db::Database<C>>, pub db: Arc<db::Database<C>>,
pub dirs_by_stream_id: Arc<FnvHashMap<i32, Arc<dir::SampleFileDir>>>, pub dirs_by_stream_id: Arc<FastHashMap<i32, Arc<dir::SampleFileDir>>>,
pub shutdown_tx: base::shutdown::Sender, pub shutdown_tx: base::shutdown::Sender,
pub shutdown_rx: base::shutdown::Receiver, pub shutdown_rx: base::shutdown::Receiver,
pub syncer_channel: writer::SyncerChannel<::std::fs::File>, pub syncer_channel: writer::SyncerChannel<::std::fs::File>,
@ -116,7 +116,7 @@ impl<C: Clocks + Clone> TestDb<C> {
.get() .get()
.unwrap(); .unwrap();
} }
let mut dirs_by_stream_id = FnvHashMap::default(); let mut dirs_by_stream_id = FastHashMap::default();
dirs_by_stream_id.insert(TEST_STREAM_ID, dir); dirs_by_stream_id.insert(TEST_STREAM_ID, dir);
let (shutdown_tx, shutdown_rx) = base::shutdown::channel(); let (shutdown_tx, shutdown_rx) = base::shutdown::channel();
let (syncer_channel, syncer_join) = let (syncer_channel, syncer_join) =

View File

@ -166,7 +166,7 @@ mod tests {
use crate::compare; use crate::compare;
use crate::testutil; use crate::testutil;
use base::err; use base::err;
use fnv::FnvHashMap; use base::FastHashMap;
const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\ const BAD_ANAMORPHIC_VIDEO_SAMPLE_ENTRY: &[u8] = b"\x00\x00\x00\x84\x61\x76\x63\x31\x00\x00\
\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
@ -344,7 +344,7 @@ mod tests {
"#, "#,
)?; )?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
let mut pasp_by_id = FnvHashMap::default(); let mut pasp_by_id = FastHashMap::default();
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let id: i32 = row.get(0)?; let id: i32 = row.get(0)?;
let pasp_h_spacing: i32 = row.get(1)?; let pasp_h_spacing: i32 = row.get(1)?;

View File

@ -308,7 +308,7 @@ fn verify_dir_contents(
params![], params![],
|r| r.get(0), |r| r.get(0),
)?; )?;
let mut files = ::fnv::FnvHashSet::with_capacity_and_hasher(n as usize, Default::default()); let mut files = ::base::FastHashSet::with_capacity_and_hasher(n as usize, Default::default());
for e in dir.iter() { for e in dir.iter() {
let e = e?; let e = e?;
let f = e.file_name(); let f = e.file_name();

View File

@ -10,6 +10,7 @@ use crate::dir;
use crate::schema; use crate::schema;
use base::Error; use base::Error;
use rusqlite::params; use rusqlite::params;
use std::os::fd::AsFd as _;
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -71,9 +72,9 @@ pub fn run(_args: &super::Args, tx: &rusqlite::Transaction) -> Result<(), Error>
let from_path = super::UuidPath::from(sample_file_uuid.0); let from_path = super::UuidPath::from(sample_file_uuid.0);
let to_path = crate::dir::CompositeIdPath::from(id); let to_path = crate::dir::CompositeIdPath::from(id);
if let Err(e) = nix::fcntl::renameat( if let Err(e) = nix::fcntl::renameat(
Some(d.fd.as_raw_fd()), Some(d.fd.as_fd().as_raw_fd()),
&from_path, &from_path,
Some(d.fd.as_raw_fd()), Some(d.fd.as_fd().as_raw_fd()),
&to_path, &to_path,
) { ) {
if e == nix::Error::ENOENT { if e == nix::Error::ENOENT {

View File

@ -15,6 +15,7 @@ use nix::sys::stat::Mode;
use protobuf::Message; use protobuf::Message;
use rusqlite::params; use rusqlite::params;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::os::fd::AsFd as _;
use std::os::unix::io::AsRawFd; use std::os::unix::io::AsRawFd;
use tracing::info; use tracing::info;
use uuid::Uuid; use uuid::Uuid;
@ -25,7 +26,12 @@ const FIXED_DIR_META_LEN: usize = 512;
fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool, Error> { fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool, Error> {
let tmp_path = cstr!("meta.tmp"); let tmp_path = cstr!("meta.tmp");
let meta_path = cstr!("meta"); let meta_path = cstr!("meta");
let mut f = crate::fs::openat(dir.as_raw_fd(), meta_path, OFlag::O_RDONLY, Mode::empty())?; let mut f = crate::fs::openat(
dir.as_fd().as_raw_fd(),
meta_path,
OFlag::O_RDONLY,
Mode::empty(),
)?;
let mut data = Vec::new(); let mut data = Vec::new();
f.read_to_end(&mut data)?; f.read_to_end(&mut data)?;
if data.len() == FIXED_DIR_META_LEN { if data.len() == FIXED_DIR_META_LEN {
@ -49,7 +55,7 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
); );
} }
let mut f = crate::fs::openat( let mut f = crate::fs::openat(
dir.as_raw_fd(), dir.as_fd().as_raw_fd(),
tmp_path, tmp_path,
OFlag::O_CREAT | OFlag::O_TRUNC | OFlag::O_WRONLY, OFlag::O_CREAT | OFlag::O_TRUNC | OFlag::O_WRONLY,
Mode::S_IRUSR | Mode::S_IWUSR, Mode::S_IRUSR | Mode::S_IWUSR,
@ -72,9 +78,9 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
f.sync_all()?; f.sync_all()?;
nix::fcntl::renameat( nix::fcntl::renameat(
Some(dir.as_raw_fd()), Some(dir.as_fd().as_raw_fd()),
tmp_path, tmp_path,
Some(dir.as_raw_fd()), Some(dir.as_fd().as_raw_fd()),
meta_path, meta_path,
)?; )?;
Ok(true) Ok(true)
@ -89,7 +95,7 @@ fn maybe_upgrade_meta(dir: &dir::Fd, db_meta: &schema::DirMeta) -> Result<bool,
fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> { fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> {
let mut need_sync = false; let mut need_sync = false;
let mut dir2 = nix::dir::Dir::openat( let mut dir2 = nix::dir::Dir::openat(
dir.as_raw_fd(), dir.as_fd().as_raw_fd(),
".", ".",
OFlag::O_DIRECTORY | OFlag::O_RDONLY, OFlag::O_DIRECTORY | OFlag::O_RDONLY,
Mode::empty(), Mode::empty(),
@ -105,7 +111,7 @@ fn maybe_cleanup_garbage_uuids(dir: &dir::Fd) -> Result<bool, Error> {
if Uuid::parse_str(f_str).is_ok() { if Uuid::parse_str(f_str).is_ok() {
info!("removing leftover garbage file {}", f_str); info!("removing leftover garbage file {}", f_str);
nix::unistd::unlinkat( nix::unistd::unlinkat(
Some(dir.as_raw_fd()), Some(dir.as_fd().as_raw_fd()),
f, f,
nix::unistd::UnlinkatFlags::NoRemoveDir, nix::unistd::UnlinkatFlags::NoRemoveDir,
)?; )?;

View File

@ -2,9 +2,9 @@
// Copyright (C) 2021 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt. // Copyright (C) 2021 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception // SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception
use base::FastHashMap;
/// Upgrades a version 6 schema to a version 7 schema. /// Upgrades a version 6 schema to a version 7 schema.
use base::{err, Error}; use base::{err, Error};
use fnv::FnvHashMap;
use rusqlite::{named_params, params}; use rusqlite::{named_params, params};
use std::{convert::TryFrom, path::PathBuf}; use std::{convert::TryFrom, path::PathBuf};
use tracing::debug; use tracing::debug;
@ -133,7 +133,7 @@ fn copy_users(tx: &rusqlite::Transaction) -> Result<(), Error> {
} }
fn copy_signal_types(tx: &rusqlite::Transaction) -> Result<(), Error> { fn copy_signal_types(tx: &rusqlite::Transaction) -> Result<(), Error> {
let mut types_ = FnvHashMap::default(); let mut types_ = FastHashMap::default();
let mut stmt = tx.prepare("select type_uuid, value, name from signal_type_enum")?; let mut stmt = tx.prepare("select type_uuid, value, name from signal_type_enum")?;
let mut rows = stmt.query(params![])?; let mut rows = stmt.query(params![])?;
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
@ -164,7 +164,7 @@ struct Signal {
} }
fn copy_signals(tx: &rusqlite::Transaction) -> Result<(), Error> { fn copy_signals(tx: &rusqlite::Transaction) -> Result<(), Error> {
let mut signals = FnvHashMap::default(); let mut signals = FastHashMap::default();
// Read from signal table. // Read from signal table.
{ {

View File

@ -9,8 +9,8 @@ use crate::dir;
use crate::recording::{self, MAX_RECORDING_WALL_DURATION}; use crate::recording::{self, MAX_RECORDING_WALL_DURATION};
use base::clock::{self, Clocks}; use base::clock::{self, Clocks};
use base::shutdown::ShutdownError; use base::shutdown::ShutdownError;
use base::FastHashMap;
use base::{bail, err, Error}; use base::{bail, err, Error};
use fnv::FnvHashMap;
use std::cmp::{self, Ordering}; use std::cmp::{self, Ordering};
use std::convert::TryFrom; use std::convert::TryFrom;
use std::io; use std::io;
@ -294,7 +294,7 @@ impl<F: FileWriter> SyncerChannel<F> {
/// on opening. /// on opening.
fn list_files_to_abandon( fn list_files_to_abandon(
dir: &dir::SampleFileDir, dir: &dir::SampleFileDir,
streams_to_next: FnvHashMap<i32, i32>, streams_to_next: FastHashMap<i32, i32>,
) -> Result<Vec<CompositeId>, Error> { ) -> Result<Vec<CompositeId>, Error> {
let mut v = Vec::new(); let mut v = Vec::new();
let mut d = dir.opendir()?; let mut d = dir.opendir()?;
@ -330,7 +330,7 @@ impl<C: Clocks + Clone> Syncer<C, Arc<dir::SampleFileDir>> {
// Abandon files. // Abandon files.
// First, get a list of the streams in question. // First, get a list of the streams in question.
let streams_to_next: FnvHashMap<_, _> = l let streams_to_next: FastHashMap<_, _> = l
.streams_by_id() .streams_by_id()
.iter() .iter()
.filter_map(|(&k, v)| { .filter_map(|(&k, v)| {

View File

@ -4,14 +4,14 @@
//! UI bundled (compiled/linked) into the executable for single-file deployment. //! UI bundled (compiled/linked) into the executable for single-file deployment.
use fnv::FnvHashMap; use base::FastHashMap;
use http::{header, HeaderMap, HeaderValue}; use http::{header, HeaderMap, HeaderValue};
use std::io::Read; use std::io::Read;
use std::sync::OnceLock; use std::sync::OnceLock;
use crate::body::{BoxedError, Chunk}; use crate::body::{BoxedError, Chunk};
pub struct Ui(FnvHashMap<&'static str, FileSet>); pub struct Ui(FastHashMap<&'static str, FileSet>);
/// A file as passed in from `build.rs`. /// A file as passed in from `build.rs`.
struct BuildFile { struct BuildFile {

View File

@ -7,10 +7,10 @@ use crate::web;
use crate::web::accept::Listener; use crate::web::accept::Listener;
use base::clock; use base::clock;
use base::err; use base::err;
use base::FastHashMap;
use base::{bail, Error}; use base::{bail, Error};
use bpaf::Bpaf; use bpaf::Bpaf;
use db::{dir, writer}; use db::{dir, writer};
use fnv::FnvHashMap;
use hyper::service::{make_service_fn, service_fn}; use hyper::service::{make_service_fn, service_fn};
use itertools::Itertools; use itertools::Itertools;
use retina::client::SessionGroup; use retina::client::SessionGroup;
@ -134,7 +134,7 @@ struct Syncer {
} }
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
fn get_preopened_sockets() -> Result<FnvHashMap<String, Listener>, Error> { fn get_preopened_sockets() -> Result<FastHashMap<String, Listener>, Error> {
use libsystemd::activation::IsType as _; use libsystemd::activation::IsType as _;
use std::os::fd::{FromRawFd, IntoRawFd}; use std::os::fd::{FromRawFd, IntoRawFd};
@ -142,7 +142,7 @@ fn get_preopened_sockets() -> Result<FnvHashMap<String, Listener>, Error> {
// activation. // activation.
if std::env::var_os("LISTEN_FDS").is_none() { if std::env::var_os("LISTEN_FDS").is_none() {
info!("no LISTEN_FDs"); info!("no LISTEN_FDs");
return Ok(FnvHashMap::default()); return Ok(FastHashMap::default());
} }
let sockets = libsystemd::activation::receive_descriptors_with_names(false) let sockets = libsystemd::activation::receive_descriptors_with_names(false)
@ -176,13 +176,14 @@ fn get_preopened_sockets() -> Result<FnvHashMap<String, Listener>, Error> {
} }
#[cfg(not(target_os = "linux"))] #[cfg(not(target_os = "linux"))]
fn get_preopened_sockets() -> Result<FnvHashMap<String, Listener>, Error> { fn get_preopened_sockets() -> Result<FastHashMap<String, Listener>, Error> {
Ok(FnvHashMap::default()) Ok(FastHashMap::default())
} }
fn read_config(path: &Path) -> Result<ConfigFile, Error> { fn read_config(path: &Path) -> Result<ConfigFile, Error> {
let config = std::fs::read(path)?; let config = std::fs::read(path)?;
let config = toml::from_slice(&config).map_err(|e| err!(InvalidArgument, source(e)))?; let config = std::str::from_utf8(&config).map_err(|e| err!(InvalidArgument, source(e)))?;
let config = toml::from_str(&config).map_err(|e| err!(InvalidArgument, source(e)))?;
Ok(config) Ok(config)
} }
@ -267,7 +268,7 @@ fn prepare_unix_socket(p: &Path) {
fn make_listener( fn make_listener(
addr: &config::AddressConfig, addr: &config::AddressConfig,
#[cfg_attr(not(target_os = "linux"), allow(unused))] preopened: &mut FnvHashMap< #[cfg_attr(not(target_os = "linux"), allow(unused))] preopened: &mut FastHashMap<
String, String,
Listener, Listener,
>, >,
@ -341,11 +342,11 @@ async fn inner(
// Start a streamer for each stream. // Start a streamer for each stream.
let mut streamers = Vec::new(); let mut streamers = Vec::new();
let mut session_groups_by_camera: FnvHashMap<i32, Arc<retina::client::SessionGroup>> = let mut session_groups_by_camera: FastHashMap<i32, Arc<retina::client::SessionGroup>> =
FnvHashMap::default(); FastHashMap::default();
let syncers = if !read_only { let syncers = if !read_only {
let l = db.lock(); let l = db.lock();
let mut dirs = FnvHashMap::with_capacity_and_hasher( let mut dirs = FastHashMap::with_capacity_and_hasher(
l.sample_file_dirs_by_id().len(), l.sample_file_dirs_by_id().len(),
Default::default(), Default::default(),
); );
@ -377,7 +378,7 @@ async fn inner(
// Then, with the lock dropped, create syncers. // Then, with the lock dropped, create syncers.
drop(l); drop(l);
let mut syncers = FnvHashMap::with_capacity_and_hasher(dirs.len(), Default::default()); let mut syncers = FastHashMap::with_capacity_and_hasher(dirs.len(), Default::default());
for (id, dir) in dirs.drain() { for (id, dir) in dirs.drain() {
let (channel, join) = writer::start_syncer(db.clone(), shutdown_rx.clone(), id)?; let (channel, join) = writer::start_syncer(db.clone(), shutdown_rx.clone(), id)?;
syncers.insert(id, Syncer { dir, channel, join }); syncers.insert(id, Syncer { dir, channel, join });

View File

@ -985,7 +985,7 @@ impl FileBuilder {
pub fn build( pub fn build(
mut self, mut self,
db: Arc<db::Database>, db: Arc<db::Database>,
dirs_by_stream_id: Arc<::fnv::FnvHashMap<i32, Arc<dir::SampleFileDir>>>, dirs_by_stream_id: Arc<::base::FastHashMap<i32, Arc<dir::SampleFileDir>>>,
) -> Result<File, Error> { ) -> Result<File, Error> {
let mut max_end = None; let mut max_end = None;
let mut etag = blake3::Hasher::new(); let mut etag = blake3::Hasher::new();
@ -1777,7 +1777,7 @@ impl BodyState {
struct FileInner { struct FileInner {
db: Arc<db::Database>, db: Arc<db::Database>,
dirs_by_stream_id: Arc<::fnv::FnvHashMap<i32, Arc<dir::SampleFileDir>>>, dirs_by_stream_id: Arc<::base::FastHashMap<i32, Arc<dir::SampleFileDir>>>,
segments: Vec<Segment>, segments: Vec<Segment>,
slices: Slices<Slice>, slices: Slices<Slice>,
buf: Vec<u8>, buf: Vec<u8>,

View File

@ -20,13 +20,13 @@ use crate::mp4;
use crate::web::static_file::Ui; use crate::web::static_file::Ui;
use base::err; use base::err;
use base::Error; use base::Error;
use base::FastHashMap;
use base::ResultExt; use base::ResultExt;
use base::{bail, clock::Clocks, ErrorKind}; use base::{bail, clock::Clocks, ErrorKind};
use core::borrow::Borrow; use core::borrow::Borrow;
use core::str::FromStr; use core::str::FromStr;
use db::dir::SampleFileDir; use db::dir::SampleFileDir;
use db::{auth, recording}; use db::{auth, recording};
use fnv::FnvHashMap;
use http::header::{self, HeaderValue}; use http::header::{self, HeaderValue};
use http::{status::StatusCode, Request, Response}; use http::{status::StatusCode, Request, Response};
use hyper::body::Bytes; use hyper::body::Bytes;
@ -172,7 +172,7 @@ pub struct Config<'a> {
pub struct Service { pub struct Service {
db: Arc<db::Database>, db: Arc<db::Database>,
ui: Ui, ui: Ui,
dirs_by_stream_id: Arc<FnvHashMap<i32, Arc<SampleFileDir>>>, dirs_by_stream_id: Arc<FastHashMap<i32, Arc<SampleFileDir>>>,
time_zone_name: String, time_zone_name: String,
allow_unauthenticated_permissions: Option<db::Permissions>, allow_unauthenticated_permissions: Option<db::Permissions>,
trust_forward_hdrs: bool, trust_forward_hdrs: bool,
@ -199,7 +199,7 @@ impl Service {
let dirs_by_stream_id = { let dirs_by_stream_id = {
let l = config.db.lock(); let l = config.db.lock();
let mut d = let mut d =
FnvHashMap::with_capacity_and_hasher(l.streams_by_id().len(), Default::default()); FastHashMap::with_capacity_and_hasher(l.streams_by_id().len(), Default::default());
for (&id, s) in l.streams_by_id().iter() { for (&id, s) in l.streams_by_id().iter() {
let dir_id = match s.sample_file_dir_id { let dir_id = match s.sample_file_dir_id {
Some(d) => d, Some(d) => d,

View File

@ -144,8 +144,8 @@ fn encode_sid(sid: db::RawSessionId, flags: i32) -> String {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base::FastHashMap;
use db::testutil; use db::testutil;
use fnv::FnvHashMap;
use tracing::info; use tracing::info;
use crate::web::tests::Server; use crate::web::tests::Server;
@ -163,7 +163,7 @@ mod tests {
let resp = cli.post(&login_url).send().await.unwrap(); let resp = cli.post(&login_url).send().await.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::BAD_REQUEST); assert_eq!(resp.status(), reqwest::StatusCode::BAD_REQUEST);
let mut p = FnvHashMap::default(); let mut p = FastHashMap::default();
p.insert("username", "slamb"); p.insert("username", "slamb");
p.insert("password", "asdf"); p.insert("password", "asdf");
let resp = cli.post(&login_url).json(&p).send().await.unwrap(); let resp = cli.post(&login_url).json(&p).send().await.unwrap();
@ -190,7 +190,7 @@ mod tests {
testutil::init(); testutil::init();
let s = Server::new(None); let s = Server::new(None);
let cli = reqwest::Client::new(); let cli = reqwest::Client::new();
let mut p = FnvHashMap::default(); let mut p = FastHashMap::default();
p.insert("username", "slamb"); p.insert("username", "slamb");
p.insert("password", "hunter2"); p.insert("password", "hunter2");
let resp = cli let resp = cli
@ -239,7 +239,7 @@ mod tests {
.get("csrf") .get("csrf")
.unwrap() .unwrap()
.as_str(); .as_str();
let mut p = FnvHashMap::default(); let mut p = FastHashMap::default();
p.insert("csrf", csrf); p.insert("csrf", csrf);
let resp = cli let resp = cli
.post(&format!("{}/api/logout", &s.base_url)) .post(&format!("{}/api/logout", &s.base_url))