mirror of
https://github.com/scottlamb/moonfire-nvr.git
synced 2024-12-26 23:25:55 -05:00
Coalesce adjacent recordings for efficiency
This commit is contained in:
parent
b27df92cac
commit
d083797e42
@ -141,13 +141,15 @@ object has the following properties:
|
|||||||
* `video_sample_entry_sha1`
|
* `video_sample_entry_sha1`
|
||||||
* `video_sample_entry_width`
|
* `video_sample_entry_width`
|
||||||
* `video_sample_entry_height`
|
* `video_sample_entry_height`
|
||||||
|
* `video_samples`: the number of samples (aka frames) of video in this
|
||||||
|
recording.
|
||||||
|
* TODO: recording id(s)? interior split points for coalesced recordings?
|
||||||
|
|
||||||
TODO(slamb): consider ways to reduce the data size; this is in theory quite
|
Recordings may be coalesced if they are adjacent and have the same
|
||||||
compressible but I'm not sure how effective gzip will be without some tweaks.
|
`video_sample_entry_*` data. That is, if recording A spans times [t, u) and
|
||||||
One simple approach would be to just combine some adjacent list entries if
|
recording B spans times [u, v), they may be returned as a single recording
|
||||||
one's start matches the other's end exactly and the `video_sample_entry_*`
|
AB spanning times [t, v). Arbitrarily many recordings may be coalesced in this
|
||||||
parameters are the same. So you might get one entry that represents 2 hours of
|
fashion.
|
||||||
video instead of 120 entries representing a minute each.
|
|
||||||
|
|
||||||
Example request URI (with added whitespace between parameters):
|
Example request URI (with added whitespace between parameters):
|
||||||
|
|
||||||
|
67
src/web.cc
67
src/web.cc
@ -193,6 +193,37 @@ void WebInterface::HandleJsonCameraList(evhttp_request *req) {
|
|||||||
ReplyWithJson(req, cameras);
|
ReplyWithJson(req, cameras);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool WebInterface::ListAggregatedCameraRecordings(
|
||||||
|
Uuid camera_uuid, int64_t start_time_90k, int64_t end_time_90k,
|
||||||
|
int64_t forced_split_duration_90k,
|
||||||
|
const std::function<void(const ListCameraRecordingsRow &)> &fn,
|
||||||
|
std::string *error_message) {
|
||||||
|
ListCameraRecordingsRow aggregated;
|
||||||
|
auto handle_sql_row = [&](const ListCameraRecordingsRow &row) {
|
||||||
|
auto new_duration_90k = aggregated.end_time_90k - row.start_time_90k;
|
||||||
|
if (row.video_sample_entry_sha1 == aggregated.video_sample_entry_sha1 &&
|
||||||
|
row.end_time_90k == aggregated.start_time_90k &&
|
||||||
|
new_duration_90k < forced_split_duration_90k) {
|
||||||
|
// Append to current .mp4.
|
||||||
|
aggregated.start_time_90k = row.start_time_90k;
|
||||||
|
aggregated.video_samples += row.video_samples;
|
||||||
|
aggregated.sample_file_bytes += row.sample_file_bytes;
|
||||||
|
} else {
|
||||||
|
// Start a new .mp4.
|
||||||
|
if (aggregated.start_time_90k != -1) { fn(aggregated); }
|
||||||
|
aggregated = row;
|
||||||
|
}
|
||||||
|
return IterationControl::kContinue;
|
||||||
|
};
|
||||||
|
if (!env_->mdb->ListCameraRecordings(camera_uuid, start_time_90k,
|
||||||
|
end_time_90k, handle_sql_row,
|
||||||
|
error_message)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (aggregated.start_time_90k != -1) { fn(aggregated); }
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
void WebInterface::HandleHtmlCameraDetail(evhttp_request *req,
|
void WebInterface::HandleHtmlCameraDetail(evhttp_request *req,
|
||||||
Uuid camera_uuid) {
|
Uuid camera_uuid) {
|
||||||
GetCameraRow camera_row;
|
GetCameraRow camera_row;
|
||||||
@ -234,11 +265,7 @@ void WebInterface::HandleHtmlCameraDetail(evhttp_request *req,
|
|||||||
// aggregated .mp4 files of up to kForceSplitDuration90k each, provided
|
// aggregated .mp4 files of up to kForceSplitDuration90k each, provided
|
||||||
// there is no gap or change in video parameters between recordings.
|
// there is no gap or change in video parameters between recordings.
|
||||||
static const int64_t kForceSplitDuration90k = 60 * 60 * kTimeUnitsPerSecond;
|
static const int64_t kForceSplitDuration90k = 60 * 60 * kTimeUnitsPerSecond;
|
||||||
ListCameraRecordingsRow aggregated;
|
auto finish_html_row = [&](const ListCameraRecordingsRow &aggregated) {
|
||||||
auto maybe_finish_html_row = [&]() {
|
|
||||||
if (aggregated.start_time_90k == -1) {
|
|
||||||
return; // there is no row to finish.
|
|
||||||
}
|
|
||||||
auto seconds = static_cast<float>(aggregated.end_time_90k -
|
auto seconds = static_cast<float>(aggregated.end_time_90k -
|
||||||
aggregated.start_time_90k) /
|
aggregated.start_time_90k) /
|
||||||
kTimeUnitsPerSecond;
|
kTimeUnitsPerSecond;
|
||||||
@ -258,31 +285,14 @@ void WebInterface::HandleHtmlCameraDetail(evhttp_request *req,
|
|||||||
"bps")
|
"bps")
|
||||||
.c_str());
|
.c_str());
|
||||||
};
|
};
|
||||||
auto handle_sql_row = [&](const ListCameraRecordingsRow &row) {
|
|
||||||
auto new_duration_90k = aggregated.end_time_90k - row.start_time_90k;
|
|
||||||
if (row.video_sample_entry_sha1 == aggregated.video_sample_entry_sha1 &&
|
|
||||||
row.end_time_90k == aggregated.start_time_90k &&
|
|
||||||
new_duration_90k < kForceSplitDuration90k) {
|
|
||||||
// Append to current .mp4.
|
|
||||||
aggregated.start_time_90k = row.start_time_90k;
|
|
||||||
aggregated.video_samples += row.video_samples;
|
|
||||||
aggregated.sample_file_bytes += row.sample_file_bytes;
|
|
||||||
} else {
|
|
||||||
// Start a new .mp4.
|
|
||||||
maybe_finish_html_row();
|
|
||||||
aggregated = row;
|
|
||||||
}
|
|
||||||
return IterationControl::kContinue;
|
|
||||||
};
|
|
||||||
std::string error_message;
|
std::string error_message;
|
||||||
if (!env_->mdb->ListCameraRecordings(camera_uuid, start_time_90k,
|
if (!ListAggregatedCameraRecordings(camera_uuid, start_time_90k,
|
||||||
end_time_90k, handle_sql_row,
|
end_time_90k, kForceSplitDuration90k,
|
||||||
&error_message)) {
|
finish_html_row, &error_message)) {
|
||||||
return evhttp_send_error(
|
return evhttp_send_error(
|
||||||
req, HTTP_INTERNAL,
|
req, HTTP_INTERNAL,
|
||||||
StrCat("sqlite query failed: ", EscapeHtml(error_message)).c_str());
|
StrCat("sqlite query failed: ", EscapeHtml(error_message)).c_str());
|
||||||
}
|
}
|
||||||
maybe_finish_html_row();
|
|
||||||
buf.Add(
|
buf.Add(
|
||||||
"</table>\n"
|
"</table>\n"
|
||||||
"</html>\n");
|
"</html>\n");
|
||||||
@ -366,9 +376,10 @@ void WebInterface::HandleJsonCameraRecordings(evhttp_request *req,
|
|||||||
return IterationControl::kContinue;
|
return IterationControl::kContinue;
|
||||||
};
|
};
|
||||||
std::string error_message;
|
std::string error_message;
|
||||||
if (!env_->mdb->ListCameraRecordings(camera_uuid, start_time_90k,
|
const auto kForceSplitDuration90k = std::numeric_limits<int64_t>::max();
|
||||||
end_time_90k, handle_row,
|
if (!ListAggregatedCameraRecordings(camera_uuid, start_time_90k,
|
||||||
&error_message)) {
|
end_time_90k, kForceSplitDuration90k,
|
||||||
|
handle_row, &error_message)) {
|
||||||
return evhttp_send_error(
|
return evhttp_send_error(
|
||||||
req, HTTP_INTERNAL,
|
req, HTTP_INTERNAL,
|
||||||
StrCat("sqlite query failed: ", EscapeHtml(error_message)).c_str());
|
StrCat("sqlite query failed: ", EscapeHtml(error_message)).c_str());
|
||||||
|
@ -72,6 +72,12 @@ class WebInterface {
|
|||||||
void HandleJsonCameraRecordings(evhttp_request *req, Uuid camera_uuid);
|
void HandleJsonCameraRecordings(evhttp_request *req, Uuid camera_uuid);
|
||||||
void HandleMp4View(evhttp_request *req, Uuid camera_uuid);
|
void HandleMp4View(evhttp_request *req, Uuid camera_uuid);
|
||||||
|
|
||||||
|
bool ListAggregatedCameraRecordings(
|
||||||
|
Uuid camera_uuid, int64_t start_time_90k, int64_t end_time_90k,
|
||||||
|
int64_t forced_split_duration_90k,
|
||||||
|
const std::function<void (const ListCameraRecordingsRow &)> &fn,
|
||||||
|
std::string *error_message);
|
||||||
|
|
||||||
// TODO: more nuanced error code for HTTP.
|
// TODO: more nuanced error code for HTTP.
|
||||||
std::shared_ptr<VirtualFile> BuildMp4(Uuid camera_uuid,
|
std::shared_ptr<VirtualFile> BuildMp4(Uuid camera_uuid,
|
||||||
int64_t start_time_90k,
|
int64_t start_time_90k,
|
||||||
|
Loading…
Reference in New Issue
Block a user