clippy
This commit is contained in:
parent
dad664c244
commit
140f625a80
|
@ -74,17 +74,13 @@ fn parse_zone(input: &str) -> IResult<&str, i32> {
|
|||
}
|
||||
|
||||
impl Time {
|
||||
pub const MIN: Self = Time(i64::MIN);
|
||||
pub const MAX: Self = Time(i64::MAX);
|
||||
|
||||
pub fn new(tm: time::Timespec) -> Self {
|
||||
Time(tm.sec * TIME_UNITS_PER_SEC + tm.nsec as i64 * TIME_UNITS_PER_SEC / 1_000_000_000)
|
||||
}
|
||||
|
||||
pub const fn min_value() -> Self {
|
||||
Time(i64::min_value())
|
||||
}
|
||||
pub const fn max_value() -> Self {
|
||||
Time(i64::max_value())
|
||||
}
|
||||
|
||||
/// Parses a time as either 90,000ths of a second since epoch or a RFC 3339-like string.
|
||||
///
|
||||
/// The former is 90,000ths of a second since 1970-01-01T00:00:00 UTC, excluding leap seconds.
|
||||
|
|
|
@ -309,7 +309,7 @@ fn compare_stream(
|
|||
ctx: &mut Context,
|
||||
) -> Result<bool, Error> {
|
||||
let start = CompositeId::new(stream_id, 0);
|
||||
let end = CompositeId::new(stream_id, i32::max_value());
|
||||
let end = CompositeId::new(stream_id, i32::MAX);
|
||||
let mut printed_error = false;
|
||||
let cum_recordings = stream
|
||||
.cum_recordings
|
||||
|
|
|
@ -281,7 +281,7 @@ impl Segment {
|
|||
// include it for consistency with the fast path. It'd be bizarre to have it included or
|
||||
// not based on desired_media_range_90k.start.
|
||||
let end_90k = if desired_media_range_90k.end == recording.media_duration_90k {
|
||||
i32::max_value()
|
||||
i32::MAX
|
||||
} else {
|
||||
desired_media_range_90k.end
|
||||
};
|
||||
|
|
|
@ -169,7 +169,7 @@ impl<'a> PointDataIterator<'a> {
|
|||
msg("signal overflow: {} + {}", self.cur_signal, signal_delta)
|
||||
)
|
||||
})?;
|
||||
if state > u16::max_value() as u32 {
|
||||
if state > u32::from(u16::MAX) {
|
||||
bail!(OutOfRange, msg("state overflow: {state}"));
|
||||
}
|
||||
self.cur_pos = p;
|
||||
|
@ -858,10 +858,9 @@ mod tests {
|
|||
let mut conn = Connection::open_in_memory().unwrap();
|
||||
db::init(&mut conn).unwrap();
|
||||
let s = State::init(&conn, &GlobalConfig::default()).unwrap();
|
||||
s.list_changes_by_time(
|
||||
recording::Time::min_value()..recording::Time::max_value(),
|
||||
&mut |_r| panic!("no changes expected"),
|
||||
);
|
||||
s.list_changes_by_time(recording::Time::MIN..recording::Time::MAX, &mut |_r| {
|
||||
panic!("no changes expected")
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -912,10 +911,9 @@ mod tests {
|
|||
..Default::default()
|
||||
};
|
||||
let mut s = State::init(&conn, &config).unwrap();
|
||||
s.list_changes_by_time(
|
||||
recording::Time::min_value()..recording::Time::max_value(),
|
||||
&mut |_r| panic!("no changes expected"),
|
||||
);
|
||||
s.list_changes_by_time(recording::Time::MIN..recording::Time::MAX, &mut |_r| {
|
||||
panic!("no changes expected")
|
||||
});
|
||||
const START: recording::Time = recording::Time(140067462600000); // 2019-04-26T11:59:00
|
||||
const NOW: recording::Time = recording::Time(140067468000000); // 2019-04-26T12:00:00
|
||||
s.update_signals(START..NOW, &[1, 2], &[2, 1]).unwrap();
|
||||
|
@ -944,14 +942,12 @@ mod tests {
|
|||
},
|
||||
];
|
||||
|
||||
s.list_changes_by_time(
|
||||
recording::Time::min_value()..recording::Time::max_value(),
|
||||
&mut |r| rows.push(*r),
|
||||
);
|
||||
s.list_changes_by_time(
|
||||
recording::Time::max_value()..recording::Time::min_value(),
|
||||
&mut |_r| panic!("no changes expected"),
|
||||
);
|
||||
s.list_changes_by_time(recording::Time::MIN..recording::Time::MAX, &mut |r| {
|
||||
rows.push(*r)
|
||||
});
|
||||
s.list_changes_by_time(recording::Time::MAX..recording::Time::MIN, &mut |_r| {
|
||||
panic!("no changes expected")
|
||||
});
|
||||
assert_eq!(&rows[..], EXPECTED);
|
||||
let mut expected_days = days::Map::default();
|
||||
expected_days.0.insert(
|
||||
|
@ -979,10 +975,9 @@ mod tests {
|
|||
drop(s);
|
||||
let mut s = State::init(&conn, &config).unwrap();
|
||||
rows.clear();
|
||||
s.list_changes_by_time(
|
||||
recording::Time::min_value()..recording::Time::max_value(),
|
||||
&mut |r| rows.push(*r),
|
||||
);
|
||||
s.list_changes_by_time(recording::Time::MIN..recording::Time::MAX, &mut |r| {
|
||||
rows.push(*r)
|
||||
});
|
||||
assert_eq!(&rows[..], EXPECTED);
|
||||
|
||||
// Go through it again. This time, hit the max number of signals, forcing START to be
|
||||
|
@ -1012,10 +1007,9 @@ mod tests {
|
|||
state: 0,
|
||||
},
|
||||
];
|
||||
s.list_changes_by_time(
|
||||
recording::Time::min_value()..recording::Time::max_value(),
|
||||
&mut |r| rows.push(*r),
|
||||
);
|
||||
s.list_changes_by_time(recording::Time::MIN..recording::Time::MAX, &mut |r| {
|
||||
rows.push(*r)
|
||||
});
|
||||
assert_eq!(&rows[..], EXPECTED2);
|
||||
|
||||
{
|
||||
|
@ -1026,10 +1020,9 @@ mod tests {
|
|||
drop(s);
|
||||
let s = State::init(&conn, &config).unwrap();
|
||||
rows.clear();
|
||||
s.list_changes_by_time(
|
||||
recording::Time::min_value()..recording::Time::max_value(),
|
||||
&mut |r| rows.push(*r),
|
||||
);
|
||||
s.list_changes_by_time(recording::Time::MIN..recording::Time::MAX, &mut |r| {
|
||||
rows.push(*r)
|
||||
});
|
||||
assert_eq!(&rows[..], EXPECTED2);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -730,9 +730,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
|||
self.stream_id,
|
||||
db::RecordingToInsert {
|
||||
run_offset: prev.map(|p| p.run_offset + 1).unwrap_or(0),
|
||||
start: prev
|
||||
.map(|p| p.end)
|
||||
.unwrap_or(recording::Time(i64::max_value())),
|
||||
start: prev.map(|p| p.end).unwrap_or(recording::Time::MAX),
|
||||
video_sample_entry_id,
|
||||
flags: db::RecordingFlags::Growing as i32,
|
||||
..Default::default()
|
||||
|
@ -749,7 +747,7 @@ impl<'a, C: Clocks + Clone, D: DirWriter> Writer<'a, C, D> {
|
|||
e: recording::SampleIndexEncoder::default(),
|
||||
id,
|
||||
hasher: blake3::Hasher::new(),
|
||||
local_start: recording::Time(i64::max_value()),
|
||||
local_start: recording::Time::MAX,
|
||||
unindexed_sample: None,
|
||||
video_sample_entry_id,
|
||||
});
|
||||
|
|
|
@ -87,7 +87,7 @@ fn tab_complete(
|
|||
menu::Tree::new().with(|tree| {
|
||||
for completion in completions {
|
||||
let edit_view = edit_view.clone();
|
||||
tree.add_leaf(&completion.clone(), move |siv| {
|
||||
tree.add_leaf(completion.clone(), move |siv| {
|
||||
edit_view.borrow_mut().set_content(&completion)(siv)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -951,7 +951,7 @@ impl FileBuilder {
|
|||
}
|
||||
let s = Segment::new(
|
||||
db,
|
||||
&row,
|
||||
row,
|
||||
rel_media_range_90k,
|
||||
self.next_frame_num,
|
||||
start_at_key,
|
||||
|
@ -1071,7 +1071,7 @@ impl FileBuilder {
|
|||
|
||||
// If the segment is > 4 GiB, the 32-bit trun data offsets are untrustworthy.
|
||||
// We'd need multiple moof+mdat sequences to support large media segments properly.
|
||||
if self.body.slices.len() > u32::max_value() as u64 {
|
||||
if self.body.slices.len() > u64::from(u32::MAX) {
|
||||
bail!(
|
||||
OutOfRange,
|
||||
msg(
|
||||
|
|
|
@ -429,8 +429,8 @@ impl Service {
|
|||
type_: db::StreamType,
|
||||
) -> ResponseResult {
|
||||
let (r, split) = {
|
||||
let mut time = recording::Time::min_value()..recording::Time::max_value();
|
||||
let mut split = recording::Duration(i64::max_value());
|
||||
let mut time = recording::Time::MIN..recording::Time::MAX;
|
||||
let mut split = recording::Duration(i64::MAX);
|
||||
if let Some(q) = req.uri().query() {
|
||||
for (key, value) in form_urlencoded::parse(q.as_bytes()) {
|
||||
let (key, value) = (key.borrow(), value.borrow());
|
||||
|
|
|
@ -56,7 +56,7 @@ impl Service {
|
|||
}
|
||||
|
||||
fn get_signals(&self, req: &Request<hyper::Body>) -> ResponseResult {
|
||||
let mut time = recording::Time::min_value()..recording::Time::max_value();
|
||||
let mut time = recording::Time::MIN..recording::Time::MAX;
|
||||
if let Some(q) = req.uri().query() {
|
||||
for (key, value) in form_urlencoded::parse(q.as_bytes()) {
|
||||
let (key, value) = (key.borrow(), value.borrow());
|
||||
|
|
|
@ -117,7 +117,7 @@ impl Service {
|
|||
|
||||
// Add a segment for the relevant part of the recording, if any.
|
||||
// Note all calculations here are in wall times / wall durations.
|
||||
let end_time = s.end_time.unwrap_or(i64::max_value());
|
||||
let end_time = s.end_time.unwrap_or(i64::MAX);
|
||||
let wd = i64::from(r.wall_duration_90k);
|
||||
if s.start_time <= cur_off + wd && cur_off < end_time {
|
||||
let start = cmp::max(0, s.start_time - cur_off);
|
||||
|
|
Loading…
Reference in New Issue