mirror of
https://github.com/scottlamb/moonfire-nvr.git
synced 2025-02-07 03:38:09 -05:00
3ed397bacd
When compiled with cargo build --features=analytics and enabled via moonfire-nvr run --object-detection, this runs object detection on every sub stream frame through an Edge TPU (a Coral USB accelerator) and logs the result. This is a very small step toward a working system. It doesn't actually record the result in the database or send it out on the live stream yet. It doesn't support running object detection at a lower frame rate than the sub streams come in at either. To address those problems, I need to do some refactoring. Currently moonfire_db::writer::Writer::Write is the only place that knows the duration of the frame it's about to flush, before it gets added to the index or sent out on the live stream. I don't want to do the detection from there; I'd prefer the moonfire_nvr crate. So I either need to introduce an analytics callback or move a bunch of that logic to the other crate. Once I do that, I need to add database support (although I have some experiments for that in moonfire-playground) and API support, then some kind of useful frontend. Note edgetpu.tflite is taken from the Apache 2.0-licensed https://github.com/google-coral/edgetpu, test_data/mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite. The following page says it's fine to include Apache 2.0 stuff in GPLv3 projects: https://www.apache.org/licenses/GPL-compatibility.html
185 lines
6.5 KiB
Rust
185 lines
6.5 KiB
Rust
// This file is part of Moonfire NVR, a security camera network video recorder.
|
|
// Copyright (C) 2016 The Moonfire NVR Authors
|
|
//
|
|
// This program is free software: you can redistribute it and/or modify
|
|
// it under the terms of the GNU General Public License as published by
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
// (at your option) any later version.
|
|
//
|
|
// In addition, as a special exception, the copyright holders give
|
|
// permission to link the code of portions of this program with the
|
|
// OpenSSL library under certain conditions as described in each
|
|
// individual source file, and distribute linked combinations including
|
|
// the two.
|
|
//
|
|
// You must obey the GNU General Public License in all respects for all
|
|
// of the code used other than OpenSSL. If you modify file(s) with this
|
|
// exception, you may extend this exception to your version of the
|
|
// file(s), but you are not obligated to do so. If you do not wish to do
|
|
// so, delete this exception statement from your version. If you delete
|
|
// this exception statement from all source files in the program, then
|
|
// also delete it here.
|
|
//
|
|
// This program is distributed in the hope that it will be useful,
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
// GNU General Public License for more details.
|
|
//
|
|
// You should have received a copy of the GNU General Public License
|
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
use crate::h264;
|
|
use cstr::*;
|
|
use failure::{Error, bail};
|
|
use ffmpeg;
|
|
use lazy_static::lazy_static;
|
|
use log::{debug, info, warn};
|
|
use std::convert::TryFrom;
|
|
use std::ffi::CString;
|
|
use std::result::Result;
|
|
|
|
static START: parking_lot::Once = parking_lot::Once::new();
|
|
|
|
lazy_static! {
|
|
pub static ref FFMPEG: Ffmpeg = Ffmpeg::new();
|
|
}
|
|
|
|
pub enum Source<'a> {
|
|
/// A filename, for testing.
|
|
#[cfg(test)]
|
|
File(&'a str),
|
|
|
|
/// An RTSP stream, for production use.
|
|
Rtsp {
|
|
url: &'a str,
|
|
redacted_url: &'a str
|
|
},
|
|
}
|
|
|
|
pub trait Opener<S : Stream> : Sync {
|
|
fn open(&self, src: Source) -> Result<S, Error>;
|
|
}
|
|
|
|
pub trait Stream {
|
|
fn get_video_codecpar(&self) -> ffmpeg::avcodec::InputCodecParameters<'_>;
|
|
fn get_extra_data(&self) -> Result<h264::ExtraData, Error>;
|
|
fn get_next<'p>(&'p mut self) -> Result<ffmpeg::avcodec::Packet<'p>, ffmpeg::Error>;
|
|
}
|
|
|
|
pub struct Ffmpeg {}
|
|
|
|
impl Ffmpeg {
|
|
fn new() -> Ffmpeg {
|
|
START.call_once(|| { ffmpeg::Ffmpeg::new(); });
|
|
Ffmpeg{}
|
|
}
|
|
}
|
|
|
|
impl Opener<FfmpegStream> for Ffmpeg {
|
|
fn open(&self, src: Source) -> Result<FfmpegStream, Error> {
|
|
use ffmpeg::avformat::InputFormatContext;
|
|
let (mut input, discard_first) = match src {
|
|
#[cfg(test)]
|
|
Source::File(filename) => {
|
|
let mut open_options = ffmpeg::avutil::Dictionary::new();
|
|
|
|
// Work around https://github.com/scottlamb/moonfire-nvr/issues/10
|
|
open_options.set(cstr!("advanced_editlist"), cstr!("false")).unwrap();
|
|
let url = format!("file:{}", filename);
|
|
let i = InputFormatContext::open(&CString::new(url.clone()).unwrap(),
|
|
&mut open_options)?;
|
|
if !open_options.empty() {
|
|
warn!("While opening URL {}, some options were not understood: {}",
|
|
url, open_options);
|
|
}
|
|
(i, false)
|
|
}
|
|
Source::Rtsp{url, redacted_url} => {
|
|
let mut open_options = ffmpeg::avutil::Dictionary::new();
|
|
open_options.set(cstr!("rtsp_transport"), cstr!("tcp")).unwrap();
|
|
open_options.set(cstr!("user-agent"), cstr!("moonfire-nvr")).unwrap();
|
|
// 10-second socket timeout, in microseconds.
|
|
open_options.set(cstr!("stimeout"), cstr!("10000000")).unwrap();
|
|
|
|
// Moonfire NVR currently only supports video, so receiving audio is wasteful.
|
|
// It also triggers <https://github.com/scottlamb/moonfire-nvr/issues/36>.
|
|
open_options.set(cstr!("allowed_media_types"), cstr!("video")).unwrap();
|
|
|
|
let i = InputFormatContext::open(&CString::new(url).unwrap(), &mut open_options)?;
|
|
if !open_options.empty() {
|
|
warn!("While opening URL {}, some options were not understood: {}",
|
|
redacted_url, open_options);
|
|
}
|
|
(i, true)
|
|
},
|
|
};
|
|
|
|
input.find_stream_info()?;
|
|
|
|
// Find the video stream.
|
|
let mut video_i = None;
|
|
{
|
|
let s = input.streams();
|
|
for i in 0 .. s.len() {
|
|
if s.get(i).codecpar().codec_type().is_video() {
|
|
debug!("Video stream index is {}", i);
|
|
video_i = Some(i);
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
let video_i = match video_i {
|
|
Some(i) => i,
|
|
None => bail!("no video stream"),
|
|
};
|
|
|
|
let mut stream = FfmpegStream{
|
|
input,
|
|
video_i,
|
|
};
|
|
|
|
if discard_first {
|
|
info!("Discarding the first packet to work around https://trac.ffmpeg.org/ticket/5018");
|
|
stream.get_next()?;
|
|
}
|
|
|
|
Ok(stream)
|
|
}
|
|
}
|
|
|
|
pub struct FfmpegStream {
|
|
input: ffmpeg::avformat::InputFormatContext<'static>,
|
|
video_i: usize,
|
|
}
|
|
|
|
impl Stream for FfmpegStream {
|
|
fn get_video_codecpar(&self) -> ffmpeg::avcodec::InputCodecParameters {
|
|
self.input.streams().get(self.video_i).codecpar()
|
|
}
|
|
|
|
fn get_extra_data(&self) -> Result<h264::ExtraData, Error> {
|
|
let video = self.input.streams().get(self.video_i);
|
|
let tb = video.time_base();
|
|
if tb.num != 1 || tb.den != 90000 {
|
|
bail!("video stream has timebase {}/{}; expected 1/90000", tb.num, tb.den);
|
|
}
|
|
let codec = video.codecpar();
|
|
let codec_id = codec.codec_id();
|
|
if !codec_id.is_h264() {
|
|
bail!("stream's video codec {:?} is not h264", codec_id);
|
|
}
|
|
let dims = codec.dims();
|
|
h264::ExtraData::parse(codec.extradata(), u16::try_from(dims.width)?,
|
|
u16::try_from(dims.height)?)
|
|
}
|
|
|
|
fn get_next<'i>(&'i mut self) -> Result<ffmpeg::avcodec::Packet<'i>, ffmpeg::Error> {
|
|
loop {
|
|
let p = self.input.read_frame()?;
|
|
if p.stream_index() == self.video_i {
|
|
return Ok(p);
|
|
}
|
|
}
|
|
}
|
|
}
|