ui list view: tool tip to see why recording ended

Users are often puzzled why there are short recordings. Previously
the only way to see this was to examine Moonfire's logs. This should
be a much better experience to find it right in the UI where you're
wondering, and without the potential the logs are gone.

Fixes #302
This commit is contained in:
Scott Lamb
2024-06-01 07:46:11 -07:00
parent adf73a2da1
commit 0422593ec6
11 changed files with 54 additions and 15 deletions

View File

@@ -179,7 +179,7 @@ impl std::fmt::Debug for VideoSampleEntryToInsert {
}
/// A row used in `list_recordings_by_time` and `list_recordings_by_id`.
#[derive(Copy, Clone, Debug)]
#[derive(Clone, Debug)]
pub struct ListRecordingsRow {
pub start: recording::Time,
pub video_sample_entry_id: i32,
@@ -200,6 +200,7 @@ pub struct ListRecordingsRow {
/// (It's not included in the `recording_cover` index, so adding it to
/// `list_recordings_by_time` would be inefficient.)
pub prev_media_duration_and_runs: Option<(recording::Duration, i32)>,
pub end_reason: Option<String>,
}
/// A row used in `list_aggregated_recordings`.
@@ -217,6 +218,7 @@ pub struct ListAggregatedRecordingsRow {
pub first_uncommitted: Option<i32>,
pub growing: bool,
pub has_trailing_zero: bool,
pub end_reason: Option<String>,
}
impl ListAggregatedRecordingsRow {
@@ -241,6 +243,7 @@ impl ListAggregatedRecordingsRow {
},
growing,
has_trailing_zero: (row.flags & RecordingFlags::TrailingZero as i32) != 0,
end_reason: row.end_reason,
}
}
}
@@ -301,6 +304,7 @@ impl RecordingToInsert {
open_id,
flags: self.flags | RecordingFlags::Uncommitted as i32,
prev_media_duration_and_runs: Some((self.prev_media_duration, self.prev_runs)),
end_reason: self.end_reason.clone(),
}
}
}
@@ -1376,7 +1380,7 @@ impl LockedDatabase {
stream_id: i32,
desired_time: Range<recording::Time>,
forced_split: recording::Duration,
f: &mut dyn FnMut(&ListAggregatedRecordingsRow) -> Result<(), base::Error>,
f: &mut dyn FnMut(ListAggregatedRecordingsRow) -> Result<(), base::Error>,
) -> Result<(), base::Error> {
// Iterate, maintaining a map from a recording_id to the aggregated row for the latest
// batch of recordings from the run starting at that id. Runs can be split into multiple
@@ -1410,8 +1414,7 @@ impl LockedDatabase {
|| new_dur >= forced_split;
if needs_flush {
// flush then start a new entry.
f(a)?;
*a = ListAggregatedRecordingsRow::from(row);
f(std::mem::replace(a, ListAggregatedRecordingsRow::from(row)))?;
} else {
// append.
if a.time.end != row.start {
@@ -1450,6 +1453,7 @@ impl LockedDatabase {
}
a.growing = growing;
a.has_trailing_zero = has_trailing_zero;
a.end_reason = row.end_reason;
}
}
Entry::Vacant(e) => {
@@ -1458,7 +1462,7 @@ impl LockedDatabase {
}
Ok(())
})?;
for a in aggs.values() {
for a in aggs.into_values() {
f(a)?;
}
Ok(())

View File

@@ -26,7 +26,8 @@ const LIST_RECORDINGS_BY_TIME_SQL: &str = r#"
recording.video_samples,
recording.video_sync_samples,
recording.video_sample_entry_id,
recording.open_id
recording.open_id,
recording.end_reason
from
recording
where
@@ -51,6 +52,7 @@ const LIST_RECORDINGS_BY_ID_SQL: &str = r#"
recording.video_sync_samples,
recording.video_sample_entry_id,
recording.open_id,
recording.end_reason,
recording.prev_media_duration_90k,
recording.prev_runs
from
@@ -158,11 +160,12 @@ fn list_recordings_inner(
video_sync_samples: row.get(8).err_kind(ErrorKind::Internal)?,
video_sample_entry_id: row.get(9).err_kind(ErrorKind::Internal)?,
open_id: row.get(10).err_kind(ErrorKind::Internal)?,
end_reason: row.get(11).err_kind(ErrorKind::Internal)?,
prev_media_duration_and_runs: match include_prev {
false => None,
true => Some((
recording::Duration(row.get(11).err_kind(ErrorKind::Internal)?),
row.get(12).err_kind(ErrorKind::Internal)?,
recording::Duration(row.get(12).err_kind(ErrorKind::Internal)?),
row.get(13).err_kind(ErrorKind::Internal)?,
)),
},
})?;

View File

@@ -483,6 +483,9 @@ pub struct Recording {
#[serde(skip_serializing_if = "Not::not")]
pub has_trailing_zero: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_reason: Option<String>,
}
#[derive(Debug, Serialize)]

View File

@@ -927,7 +927,7 @@ impl FileBuilder {
pub fn append(
&mut self,
db: &db::LockedDatabase,
row: db::ListRecordingsRow,
row: &db::ListRecordingsRow,
rel_media_range_90k: Range<i32>,
start_at_key: bool,
) -> Result<(), Error> {
@@ -2364,7 +2364,7 @@ mod tests {
"skip_90k={skip_90k} shorten_90k={shorten_90k} r={r:?}"
);
builder
.append(&db, r, skip_90k..d - shorten_90k, true)
.append(&db, &r, skip_90k..d - shorten_90k, true)
.unwrap();
Ok(())
})
@@ -2492,7 +2492,7 @@ mod tests {
};
duration_so_far += row.media_duration_90k;
builder
.append(&db.db.lock(), row, d_start..d_end, start_at_key)
.append(&db.db.lock(), &row, d_start..d_end, start_at_key)
.unwrap();
}
builder.build(db.db.clone(), db.dirs_by_stream_id.clone())

View File

@@ -111,8 +111,8 @@ impl Service {
let mut rows = 0;
db.list_recordings_by_id(stream_id, live.recording..live.recording + 1, &mut |r| {
rows += 1;
builder.append(&db, &r, live.media_off_90k.clone(), start_at_key)?;
row = Some(r);
builder.append(&db, r, live.media_off_90k.clone(), start_at_key)?;
Ok(())
})?;
}

View File

@@ -486,6 +486,7 @@ impl Service {
video_sample_entry_id: row.video_sample_entry_id,
growing: row.growing,
has_trailing_zero: row.has_trailing_zero,
end_reason: row.end_reason.clone(),
});
if !out
.video_sample_entries

View File

@@ -141,7 +141,7 @@ impl Service {
r.wall_duration_90k,
r.media_duration_90k,
);
builder.append(&db, r, mr, true)?;
builder.append(&db, &r, mr, true)?;
} else {
trace!("...skipping recording {} wall dur {}", r.id, wd);
}