remove half-baked analytics module

This is (slightly) complicating the switch from ffmpeg to retina
as the RTSP client. And it's not really that close to what I want
to end up with for analytics:

*   I'd prefer the analytics happen in a separate process for
    several reasons
*   Feeding the entire frame to the object detector doesn't produce
    good results.
*   It doesn't do anything with the results yet anyway.
This commit is contained in:
Scott Lamb 2021-06-06 21:14:42 -07:00
parent cf57073d6e
commit 7699696bd9
7 changed files with 0 additions and 327 deletions

11
server/Cargo.lock generated
View File

@ -1252,7 +1252,6 @@ dependencies = [
"moonfire-base", "moonfire-base",
"moonfire-db", "moonfire-db",
"moonfire-ffmpeg", "moonfire-ffmpeg",
"moonfire-tflite",
"mylog", "mylog",
"nix", "nix",
"nom", "nom",
@ -1278,16 +1277,6 @@ dependencies = [
"uuid", "uuid",
] ]
[[package]]
name = "moonfire-tflite"
version = "0.0.1"
source = "git+https://github.com/scottlamb/moonfire-tflite#b1d30c09045c02966249676fd716e917761a7de5"
dependencies = [
"cc",
"libc",
"log",
]
[[package]] [[package]]
name = "mp4ra-rust" name = "mp4ra-rust"
version = "0.1.0" version = "0.1.0"

View File

@ -15,8 +15,6 @@ nightly = ["db/nightly", "parking_lot/nightly", "smallvec/union"]
# native libraries where possible. # native libraries where possible.
bundled = ["rusqlite/bundled"] bundled = ["rusqlite/bundled"]
analytics = ["moonfire-tflite", "ffmpeg/swscale"]
[workspace] [workspace]
members = ["base", "db"] members = ["base", "db"]
@ -41,7 +39,6 @@ lazy_static = "1.0"
libc = "0.2" libc = "0.2"
log = { version = "0.4" } log = { version = "0.4" }
memchr = "2.0.2" memchr = "2.0.2"
moonfire-tflite = { git = "https://github.com/scottlamb/moonfire-tflite", features = ["edgetpu"], optional = true }
mylog = { git = "https://github.com/scottlamb/mylog" } mylog = { git = "https://github.com/scottlamb/mylog" }
nix = "0.20.0" nix = "0.20.0"
nom = "6.0.0" nom = "6.0.0"

View File

@ -1,246 +0,0 @@
// This file is part of Moonfire NVR, a security camera network video recorder.
// Copyright (C) 2020 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception.
//! Video analytics via TensorFlow Lite and an Edge TPU.
//!
//! Note this module is only compiled with `--features=analytics`. There's a stub implementation in
//! `src/main.rs` which is used otherwise.
//!
//! Currently results are only logged (rather spammily, on each frame), not persisted to the
//! database. This will change soon.
//!
//! Currently does object detection on every frame with a single hardcoded model: the 300x300
//! MobileNet SSD v2 (COCO) from https://coral.ai/models/. Eventually analytics might include:
//!
//! * an object detection model retrained on surveillance images and/or larger input sizes
//! for increased accuracy.
//! * multiple invocations per image to improve resolution with current model sizes
//! (either fixed, overlapping subsets of the image or zooming in on full-frame detections to
//! increase confidence).
//! * support for other hardware setups (GPUs, other brands of NPUs).
//! * a motion detection model.
//! * H.264/H.265 decoding on every frame but performing object detection at a minimum pts
//! interval to cut down on expense.
use cstr::cstr;
use failure::{format_err, Error};
use ffmpeg;
use log::info;
use std::sync::Arc;
static MODEL: &[u8] = include_bytes!("edgetpu.tflite");
//static MODEL_UUID: Uuid = Uuid::from_u128(0x02054a38_62cf_42ff_9ffa_04876a2970d0_u128);
pub static MODEL_LABELS: [Option<&str>; 90] = [
Some("person"),
Some("bicycle"),
Some("car"),
Some("motorcycle"),
Some("airplane"),
Some("bus"),
Some("train"),
Some("truck"),
Some("boat"),
Some("traffic light"),
Some("fire hydrant"),
None,
Some("stop sign"),
Some("parking meter"),
Some("bench"),
Some("bird"),
Some("cat"),
Some("dog"),
Some("horse"),
Some("sheep"),
Some("cow"),
Some("elephant"),
Some("bear"),
Some("zebra"),
Some("giraffe"),
None,
Some("backpack"),
Some("umbrella"),
None,
None,
Some("handbag"),
Some("tie"),
Some("suitcase"),
Some("frisbee"),
Some("skis"),
Some("snowboard"),
Some("sports ball"),
Some("kite"),
Some("baseball bat"),
Some("baseball glove"),
Some("skateboard"),
Some("surfboard"),
Some("tennis racket"),
Some("bottle"),
None,
Some("wine glass"),
Some("cup"),
Some("fork"),
Some("knife"),
Some("spoon"),
Some("bowl"),
Some("banana"),
Some("apple"),
Some("sandwich"),
Some("orange"),
Some("broccoli"),
Some("carrot"),
Some("hot dog"),
Some("pizza"),
Some("donut"),
Some("cake"),
Some("chair"),
Some("couch"),
Some("potted plant"),
Some("bed"),
None,
Some("dining table"),
None,
None,
Some("toilet"),
None,
Some("tv"),
Some("laptop"),
Some("mouse"),
Some("remote"),
Some("keyboard"),
Some("cell phone"),
Some("microwave"),
Some("oven"),
Some("toaster"),
Some("sink"),
Some("refrigerator"),
None,
Some("book"),
Some("clock"),
Some("vase"),
Some("scissors"),
Some("teddy bear"),
Some("hair drier"),
Some("toothbrush"),
];
pub struct ObjectDetector {
interpreter: parking_lot::Mutex<moonfire_tflite::Interpreter<'static>>,
width: i32,
height: i32,
}
impl ObjectDetector {
pub fn new(/*db: &db::LockedDatabase*/) -> Result<Arc<Self>, Error> {
let model = moonfire_tflite::Model::from_static(MODEL)
.map_err(|()| format_err!("TensorFlow Lite model initialization failed"))?;
let devices = moonfire_tflite::edgetpu::Devices::list();
let device = devices
.first()
.ok_or_else(|| format_err!("No Edge TPU device available"))?;
info!(
"Using device {:?}/{:?} for object detection",
device.type_(),
device.path()
);
let mut builder = moonfire_tflite::Interpreter::builder();
builder.add_owned_delegate(device.create_delegate().map_err(|()| {
format_err!(
"Unable to create delegate for {:?}/{:?}",
device.type_(),
device.path()
)
})?);
let interpreter = builder
.build(&model)
.map_err(|()| format_err!("TensorFlow Lite initialization failed"))?;
Ok(Arc::new(Self {
interpreter: parking_lot::Mutex::new(interpreter),
width: 300, // TODO
height: 300,
}))
}
}
pub struct ObjectDetectorStream {
decoder: ffmpeg::avcodec::DecodeContext,
frame: ffmpeg::avutil::VideoFrame,
scaler: ffmpeg::swscale::Scaler,
scaled: ffmpeg::avutil::VideoFrame,
}
/// Copies from a RGB24 VideoFrame to a 1xHxWx3 Tensor.
fn copy(from: &ffmpeg::avutil::VideoFrame, to: &mut moonfire_tflite::Tensor) {
let from = from.plane(0);
let to = to.bytes_mut();
let (w, h) = (from.width, from.height);
let mut from_i = 0;
let mut to_i = 0;
for _y in 0..h {
to[to_i..to_i + 3 * w].copy_from_slice(&from.data[from_i..from_i + 3 * w]);
from_i += from.linesize;
to_i += 3 * w;
}
}
const SCORE_THRESHOLD: f32 = 0.5;
impl ObjectDetectorStream {
pub fn new(
par: ffmpeg::avcodec::InputCodecParameters<'_>,
detector: &ObjectDetector,
) -> Result<Self, Error> {
let mut dopt = ffmpeg::avutil::Dictionary::new();
dopt.set(cstr!("refcounted_frames"), cstr!("0"))?;
let decoder = par.new_decoder(&mut dopt)?;
let scaled = ffmpeg::avutil::VideoFrame::owned(ffmpeg::avutil::ImageDimensions {
width: detector.width,
height: detector.height,
pix_fmt: ffmpeg::avutil::PixelFormat::rgb24(),
})?;
let frame = ffmpeg::avutil::VideoFrame::empty()?;
let scaler = ffmpeg::swscale::Scaler::new(par.dims(), scaled.dims())?;
Ok(Self {
decoder,
frame,
scaler,
scaled,
})
}
pub fn process_frame(
&mut self,
pkt: &ffmpeg::avcodec::Packet<'_>,
detector: &ObjectDetector,
) -> Result<(), Error> {
if !self.decoder.decode_video(pkt, &mut self.frame)? {
return Ok(());
}
self.scaler.scale(&self.frame, &mut self.scaled);
let mut interpreter = detector.interpreter.lock();
copy(&self.scaled, &mut interpreter.inputs()[0]);
interpreter
.invoke()
.map_err(|()| format_err!("TFLite interpreter invocation failed"))?;
let outputs = interpreter.outputs();
let classes = outputs[1].f32s();
let scores = outputs[2].f32s();
for (i, &score) in scores.iter().enumerate() {
if score < SCORE_THRESHOLD {
continue;
}
let class = classes[i] as usize;
if class >= MODEL_LABELS.len() {
continue;
}
let label = match MODEL_LABELS[class] {
None => continue,
Some(l) => l,
};
info!("{}, score {}", label, score);
}
Ok(())
}
}

View File

@ -65,12 +65,6 @@ pub struct Args {
/// --http-addr=127.0.0.1:8080. /// --http-addr=127.0.0.1:8080.
#[structopt(long)] #[structopt(long)]
trust_forward_hdrs: bool, trust_forward_hdrs: bool,
/// Perform object detection on SUB streams.
///
/// Note: requires compilation with --feature=analytics.
#[structopt(long)]
object_detection: bool,
} }
// These are used in a hack to get the name of the current time zone (e.g. America/Los_Angeles). // These are used in a hack to get the name of the current time zone (e.g. America/Los_Angeles).
@ -176,11 +170,6 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
let db = Arc::new(db::Database::new(clocks, conn, !args.read_only).unwrap()); let db = Arc::new(db::Database::new(clocks, conn, !args.read_only).unwrap());
info!("Database is loaded."); info!("Database is loaded.");
let object_detector = match args.object_detection {
false => None,
true => Some(crate::analytics::ObjectDetector::new()?),
};
{ {
let mut l = db.lock(); let mut l = db.lock();
let dirs_to_open: Vec<_> = l let dirs_to_open: Vec<_> = l
@ -258,10 +247,6 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
}; };
let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64; let rotate_offset_sec = streamer::ROTATE_INTERVAL_SEC * i as i64 / streams as i64;
let syncer = syncers.get(&sample_file_dir_id).unwrap(); let syncer = syncers.get(&sample_file_dir_id).unwrap();
let object_detector = match stream.type_ {
db::StreamType::Sub => object_detector.clone(),
_ => None,
};
let mut streamer = streamer::Streamer::new( let mut streamer = streamer::Streamer::new(
&env, &env,
syncer.dir.clone(), syncer.dir.clone(),
@ -271,7 +256,6 @@ pub async fn run(args: &Args) -> Result<i32, Error> {
stream, stream,
rotate_offset_sec, rotate_offset_sec,
streamer::ROTATE_INTERVAL_SEC, streamer::ROTATE_INTERVAL_SEC,
object_detector,
)?; )?;
info!("Starting streamer for {}", streamer.short_name()); info!("Starting streamer for {}", streamer.short_name());
let name = format!("s-{}", streamer.short_name()); let name = format!("s-{}", streamer.short_name());

Binary file not shown.

View File

@ -9,42 +9,6 @@ use std::fmt::Write;
use std::str::FromStr; use std::str::FromStr;
use structopt::StructOpt; use structopt::StructOpt;
#[cfg(feature = "analytics")]
mod analytics;
/// Stub implementation of analytics module when not compiled with TensorFlow Lite.
#[cfg(not(feature = "analytics"))]
mod analytics {
use failure::{bail, Error};
pub struct ObjectDetector;
impl ObjectDetector {
pub fn new() -> Result<std::sync::Arc<ObjectDetector>, Error> {
bail!("Recompile with --features=analytics for object detection.");
}
}
pub struct ObjectDetectorStream;
impl ObjectDetectorStream {
pub fn new(
_par: ffmpeg::avcodec::InputCodecParameters<'_>,
_detector: &ObjectDetector,
) -> Result<Self, Error> {
unimplemented!();
}
pub fn process_frame(
&mut self,
_pkt: &ffmpeg::avcodec::Packet<'_>,
_detector: &ObjectDetector,
) -> Result<(), Error> {
unimplemented!();
}
}
}
mod body; mod body;
mod cmds; mod cmds;
mod h264; mod h264;

View File

@ -46,7 +46,6 @@ where
short_name: String, short_name: String,
url: Url, url: Url,
redacted_url: Url, redacted_url: Url,
detector: Option<Arc<crate::analytics::ObjectDetector>>,
} }
impl<'a, C, S> Streamer<'a, C, S> impl<'a, C, S> Streamer<'a, C, S>
@ -63,7 +62,6 @@ where
s: &Stream, s: &Stream,
rotate_offset_sec: i64, rotate_offset_sec: i64,
rotate_interval_sec: i64, rotate_interval_sec: i64,
detector: Option<Arc<crate::analytics::ObjectDetector>>,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let mut url = Url::parse(&s.rtsp_url)?; let mut url = Url::parse(&s.rtsp_url)?;
let mut redacted_url = url.clone(); let mut redacted_url = url.clone();
@ -86,7 +84,6 @@ where
short_name: format!("{}-{}", c.short_name, s.type_.as_str()), short_name: format!("{}-{}", c.short_name, s.type_.as_str()),
url, url,
redacted_url, redacted_url,
detector,
}) })
} }
@ -122,14 +119,6 @@ where
})? })?
}; };
let realtime_offset = self.db.clocks().realtime() - clocks.monotonic(); let realtime_offset = self.db.clocks().realtime() - clocks.monotonic();
// TODO: verify width/height.
let mut detector_stream = match self.detector.as_ref() {
None => None,
Some(od) => Some(crate::analytics::ObjectDetectorStream::new(
stream.get_video_codecpar(),
&od,
)?),
};
let extra_data = stream.get_extra_data()?; let extra_data = stream.get_extra_data()?;
let video_sample_entry_id = { let video_sample_entry_id = {
let _t = TimerGuard::new(&clocks, || "inserting video sample entry"); let _t = TimerGuard::new(&clocks, || "inserting video sample entry");
@ -159,9 +148,6 @@ where
debug!("{}: have first key frame", self.short_name); debug!("{}: have first key frame", self.short_name);
seen_key_frame = true; seen_key_frame = true;
} }
if let (Some(a_s), Some(a)) = (detector_stream.as_mut(), self.detector.as_ref()) {
a_s.process_frame(&pkt, &a)?;
}
let frame_realtime = clocks.monotonic() + realtime_offset; let frame_realtime = clocks.monotonic() + realtime_offset;
let local_time = recording::Time::new(frame_realtime); let local_time = recording::Time::new(frame_realtime);
rotate = if let Some(r) = rotate { rotate = if let Some(r) = rotate {
@ -412,7 +398,6 @@ mod tests {
s, s,
0, 0,
3, 3,
None,
) )
.unwrap(); .unwrap();
} }