2021-03-26 13:43:04 -07:00
|
|
|
// This file is part of Moonfire NVR, a security camera network video recorder.
|
|
|
|
// Copyright (C) 2021 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
|
|
|
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception
|
|
|
|
|
|
|
|
import React, { SyntheticEvent } from "react";
|
|
|
|
import { Camera } from "../types";
|
|
|
|
import { Part, parsePart } from "./parser";
|
|
|
|
import * as api from "../api";
|
2021-09-24 10:57:29 -07:00
|
|
|
import Box from "@mui/material/Box";
|
|
|
|
import CircularProgress from "@mui/material/CircularProgress";
|
|
|
|
import Alert from "@mui/material/Alert";
|
2021-08-12 13:32:01 -07:00
|
|
|
import useResizeObserver from "@react-hook/resize-observer";
|
|
|
|
import { fillAspect } from "../aspect";
|
2021-03-26 13:43:04 -07:00
|
|
|
|
|
|
|
interface LiveCameraProps {
|
2021-03-29 11:49:33 -07:00
|
|
|
camera: Camera | null;
|
|
|
|
chooser: JSX.Element;
|
2021-03-26 13:43:04 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
interface BufferStateClosed {
|
|
|
|
state: "closed";
|
|
|
|
}
|
|
|
|
|
|
|
|
interface BufferStateOpen {
|
|
|
|
state: "open";
|
|
|
|
srcBuf: SourceBuffer;
|
|
|
|
busy: boolean;
|
|
|
|
mimeType: string;
|
|
|
|
videoSampleEntryId: number;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface BufferStateError {
|
|
|
|
state: "error";
|
|
|
|
}
|
|
|
|
|
|
|
|
type BufferState = BufferStateClosed | BufferStateOpen | BufferStateError;
|
|
|
|
|
|
|
|
interface PlaybackStateNormal {
|
|
|
|
state: "normal";
|
|
|
|
}
|
|
|
|
|
|
|
|
interface PlaybackStateWaiting {
|
|
|
|
state: "waiting";
|
|
|
|
}
|
|
|
|
|
|
|
|
interface PlaybackStateError {
|
|
|
|
state: "error";
|
|
|
|
message: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
type PlaybackState =
|
|
|
|
| PlaybackStateNormal
|
|
|
|
| PlaybackStateWaiting
|
|
|
|
| PlaybackStateError;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Drives a live camera.
|
|
|
|
* Implementation detail of LiveCamera which listens to various DOM events and
|
|
|
|
* drives the WebSocket feed and the MediaSource and SourceBuffers.
|
|
|
|
*/
|
|
|
|
class LiveCameraDriver {
|
|
|
|
constructor(
|
|
|
|
camera: Camera,
|
|
|
|
setPlaybackState: (state: PlaybackState) => void,
|
2021-08-12 13:32:01 -07:00
|
|
|
setAspect: (aspect: [number, number]) => void,
|
2021-03-26 13:43:04 -07:00
|
|
|
videoRef: React.RefObject<HTMLVideoElement>
|
|
|
|
) {
|
|
|
|
this.camera = camera;
|
|
|
|
this.setPlaybackState = setPlaybackState;
|
2021-08-12 13:32:01 -07:00
|
|
|
this.setAspect = setAspect;
|
2021-03-26 13:43:04 -07:00
|
|
|
this.videoRef = videoRef;
|
|
|
|
this.src.addEventListener("sourceopen", this.onMediaSourceOpen);
|
|
|
|
}
|
|
|
|
|
|
|
|
onMediaSourceOpen = () => {
|
|
|
|
this.startStream("sourceopen");
|
|
|
|
};
|
|
|
|
|
|
|
|
startStream = (reason: string) => {
|
|
|
|
if (this.ws !== undefined) {
|
|
|
|
return;
|
|
|
|
}
|
2021-08-13 12:02:42 -07:00
|
|
|
const subStream = this.camera.streams.sub;
|
|
|
|
if (subStream === undefined || !subStream.record) {
|
|
|
|
this.error(
|
|
|
|
"Must have sub stream set to record; see " +
|
|
|
|
"https://github.com/scottlamb/moonfire-nvr/issues/119 and " +
|
|
|
|
"https://github.com/scottlamb/moonfire-nvr/issues/120"
|
|
|
|
);
|
|
|
|
return;
|
|
|
|
}
|
2021-03-26 13:43:04 -07:00
|
|
|
console.log(`${this.camera.shortName}: starting stream: ${reason}`);
|
|
|
|
const loc = window.location;
|
|
|
|
const proto = loc.protocol === "https:" ? "wss" : "ws";
|
|
|
|
|
|
|
|
// TODO: switch between sub and main based on window size/bandwidth.
|
|
|
|
const url = `${proto}://${loc.host}/api/cameras/${this.camera.uuid}/sub/live.m4s`;
|
|
|
|
this.ws = new WebSocket(url);
|
|
|
|
this.ws.addEventListener("close", this.onWsClose);
|
2021-03-30 21:59:41 -07:00
|
|
|
this.ws.addEventListener("open", this.onWsOpen);
|
2021-03-26 13:43:04 -07:00
|
|
|
this.ws.addEventListener("error", this.onWsError);
|
|
|
|
this.ws.addEventListener("message", this.onWsMessage);
|
|
|
|
};
|
|
|
|
|
|
|
|
error = (reason: string) => {
|
|
|
|
console.error(`${this.camera.shortName}: aborting due to ${reason}`);
|
|
|
|
this.stopStream(reason);
|
|
|
|
this.buf = { state: "error" };
|
|
|
|
this.setPlaybackState({ state: "error", message: reason });
|
|
|
|
};
|
|
|
|
|
|
|
|
onWsClose = (e: CloseEvent) => {
|
|
|
|
this.error(`ws close: ${e.code} ${e.reason}`);
|
|
|
|
};
|
|
|
|
|
2021-03-30 21:59:41 -07:00
|
|
|
onWsOpen = (e: Event) => {
|
|
|
|
console.debug(`${this.camera.shortName}: ws open`);
|
|
|
|
};
|
|
|
|
|
|
|
|
onWsError = (e: Event) => {
|
|
|
|
console.error(`${this.camera.shortName}: ws error`);
|
2021-03-26 13:43:04 -07:00
|
|
|
};
|
|
|
|
|
2022-03-03 14:48:20 -08:00
|
|
|
onWsMessage = async (e: MessageEvent<Blob>) => {
|
2021-03-26 13:43:04 -07:00
|
|
|
let raw;
|
|
|
|
try {
|
|
|
|
raw = new Uint8Array(await e.data.arrayBuffer());
|
|
|
|
} catch (e) {
|
2022-03-03 14:48:20 -08:00
|
|
|
if (!(e instanceof DOMException)) {
|
|
|
|
throw e;
|
|
|
|
}
|
2021-03-26 13:43:04 -07:00
|
|
|
this.error(`error reading part: ${e.message}`);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (this.buf.state === "error") {
|
2021-03-30 21:59:41 -07:00
|
|
|
console.log(`${this.camera.shortName}: onWsMessage while in state error`);
|
2021-03-26 13:43:04 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
let result = parsePart(raw);
|
|
|
|
if (result.status === "error") {
|
2022-03-03 14:48:20 -08:00
|
|
|
this.error(`unparseable part: ${result.errorMessage}`);
|
2021-03-26 13:43:04 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
const part = result.part;
|
|
|
|
if (!MediaSource.isTypeSupported(part.mimeType)) {
|
|
|
|
this.error(`unsupported mime type ${part.mimeType}`);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
this.queue.push(part);
|
|
|
|
this.queuedBytes += part.body.byteLength;
|
|
|
|
if (this.buf.state === "closed") {
|
|
|
|
const srcBuf = this.src.addSourceBuffer(part.mimeType);
|
|
|
|
srcBuf.mode = "segments";
|
|
|
|
srcBuf.addEventListener("updateend", this.bufUpdateEnd);
|
|
|
|
srcBuf.addEventListener("error", this.bufEvent);
|
|
|
|
srcBuf.addEventListener("abort", this.bufEvent);
|
|
|
|
this.buf = {
|
|
|
|
state: "open",
|
|
|
|
srcBuf,
|
|
|
|
busy: true,
|
|
|
|
mimeType: part.mimeType,
|
|
|
|
videoSampleEntryId: part.videoSampleEntryId,
|
|
|
|
};
|
|
|
|
let initSegmentResult = await api.init(part.videoSampleEntryId, {});
|
2021-08-31 16:41:43 -07:00
|
|
|
switch (initSegmentResult.status) {
|
|
|
|
case "error":
|
|
|
|
this.error(`init segment fetch error: ${initSegmentResult.message}`);
|
|
|
|
return;
|
|
|
|
case "aborted":
|
|
|
|
this.error(`init segment fetch aborted`);
|
|
|
|
return;
|
|
|
|
case "success":
|
|
|
|
break;
|
2021-03-26 13:43:04 -07:00
|
|
|
}
|
2021-08-12 13:32:01 -07:00
|
|
|
this.setAspect(initSegmentResult.response.aspect);
|
|
|
|
srcBuf.appendBuffer(initSegmentResult.response.body);
|
2021-03-26 13:43:04 -07:00
|
|
|
return;
|
|
|
|
} else if (this.buf.state === "open") {
|
|
|
|
this.tryAppendPart(this.buf);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
bufUpdateEnd = () => {
|
|
|
|
if (this.buf.state !== "open") {
|
2021-03-30 21:59:41 -07:00
|
|
|
console.error(
|
|
|
|
`${this.camera.shortName}: bufUpdateEnd in state ${this.buf.state}`
|
|
|
|
);
|
2021-03-26 13:43:04 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!this.buf.busy) {
|
|
|
|
this.error("bufUpdateEnd when not busy");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
this.buf.busy = false;
|
|
|
|
this.tryTrimBuffer();
|
|
|
|
this.tryAppendPart(this.buf);
|
|
|
|
};
|
|
|
|
|
|
|
|
tryAppendPart = (buf: BufferStateOpen) => {
|
|
|
|
if (buf.busy) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const part = this.queue.shift();
|
|
|
|
if (part === undefined) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
this.queuedBytes -= part.body.byteLength;
|
|
|
|
|
|
|
|
if (
|
|
|
|
part.mimeType !== buf.mimeType ||
|
|
|
|
part.videoSampleEntryId !== buf.videoSampleEntryId
|
|
|
|
) {
|
|
|
|
this.error("Switching MIME type or videoSampleEntryId unimplemented");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Always put the new part at the end. SourceBuffer.mode "sequence" is
|
|
|
|
// supposed to generate timestamps automatically, but on Chrome 89.0.4389.90
|
|
|
|
// it doesn't appear to work as expected. So use SourceBuffer.mode
|
|
|
|
// "segments" and use the existing end as the timestampOffset.
|
|
|
|
const b = buf.srcBuf.buffered;
|
|
|
|
buf.srcBuf.timestampOffset = b.length > 0 ? b.end(b.length - 1) : 0;
|
|
|
|
|
|
|
|
try {
|
|
|
|
buf.srcBuf.appendBuffer(part.body);
|
|
|
|
} catch (e) {
|
2022-03-03 14:48:20 -08:00
|
|
|
if (!(e instanceof DOMException)) {
|
|
|
|
throw e;
|
|
|
|
}
|
2021-03-26 13:43:04 -07:00
|
|
|
// In particular, appendBuffer can throw QuotaExceededError.
|
|
|
|
// <https://developers.google.com/web/updates/2017/10/quotaexceedederror>
|
|
|
|
// tryTrimBuffer removes already-played stuff from the buffer to avoid
|
|
|
|
// this, but in theory even one GOP could be more than the total buffer
|
|
|
|
// size. At least report error properly.
|
|
|
|
this.error(`${e.name} while appending buffer`);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
buf.busy = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
tryTrimBuffer = () => {
|
|
|
|
if (
|
|
|
|
this.buf.state !== "open" ||
|
|
|
|
this.buf.busy ||
|
|
|
|
this.buf.srcBuf.buffered.length === 0 ||
|
|
|
|
this.videoRef.current === null
|
|
|
|
) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const curTs = this.videoRef.current.currentTime;
|
|
|
|
|
|
|
|
// TODO: call out key frames in the part headers. The "- 5" here is a guess
|
|
|
|
// to avoid removing anything from the current GOP.
|
|
|
|
const firstTs = this.buf.srcBuf.buffered.start(0);
|
|
|
|
if (firstTs < curTs - 5) {
|
|
|
|
console.log(`${this.camera.shortName}: trimming ${firstTs}-${curTs}`);
|
|
|
|
this.buf.srcBuf.remove(firstTs, curTs - 5);
|
|
|
|
this.buf.busy = true;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
bufEvent = (e: Event) => {
|
2021-03-30 16:31:13 -07:00
|
|
|
this.error(`SourceBuffer ${e.type}`);
|
2021-03-26 13:43:04 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
videoPlaying = (e: SyntheticEvent<HTMLVideoElement, Event>) => {
|
|
|
|
if (this.buf.state !== "error") {
|
|
|
|
this.setPlaybackState({ state: "normal" });
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
videoWaiting = (e: SyntheticEvent<HTMLVideoElement, Event>) => {
|
|
|
|
if (this.buf.state !== "error") {
|
|
|
|
this.setPlaybackState({ state: "waiting" });
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
stopStream = (reason: string) => {
|
|
|
|
if (this.ws === undefined) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
console.log(`${this.camera.shortName}: stopping stream: ${reason}`);
|
|
|
|
const NORMAL_CLOSURE = 1000; // https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent
|
|
|
|
this.ws.close(NORMAL_CLOSURE);
|
|
|
|
this.ws.removeEventListener("close", this.onWsClose);
|
2021-03-30 21:59:41 -07:00
|
|
|
this.ws.removeEventListener("open", this.onWsOpen);
|
2021-03-26 13:43:04 -07:00
|
|
|
this.ws.removeEventListener("error", this.onWsError);
|
|
|
|
this.ws.removeEventListener("message", this.onWsMessage);
|
|
|
|
this.ws = undefined;
|
|
|
|
};
|
|
|
|
|
|
|
|
camera: Camera;
|
|
|
|
setPlaybackState: (state: PlaybackState) => void;
|
2021-08-12 13:32:01 -07:00
|
|
|
setAspect: (aspect: [number, number]) => void;
|
2021-03-26 13:43:04 -07:00
|
|
|
videoRef: React.RefObject<HTMLVideoElement>;
|
|
|
|
|
|
|
|
src = new MediaSource();
|
|
|
|
buf: BufferState = { state: "closed" };
|
|
|
|
queue: Part[] = [];
|
|
|
|
queuedBytes: number = 0;
|
|
|
|
|
|
|
|
/// The object URL for the HTML video element, not the WebSocket URL.
|
|
|
|
url = URL.createObjectURL(this.src);
|
|
|
|
|
|
|
|
ws?: WebSocket;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* A live view of a camera.
|
2021-03-29 11:49:33 -07:00
|
|
|
*
|
2021-03-26 13:43:04 -07:00
|
|
|
* Note there's a significant setup cost to creating a LiveCamera, so the parent
|
|
|
|
* should use React's <tt>key</tt> attribute to avoid unnecessarily mounting
|
|
|
|
* and unmounting a camera.
|
2021-03-29 11:49:33 -07:00
|
|
|
*
|
2021-03-26 13:43:04 -07:00
|
|
|
*/
|
2021-03-29 11:49:33 -07:00
|
|
|
const LiveCamera = ({ camera, chooser }: LiveCameraProps) => {
|
2021-08-12 13:32:01 -07:00
|
|
|
const [aspect, setAspect] = React.useState<[number, number]>([16, 9]);
|
2021-03-26 13:43:04 -07:00
|
|
|
const videoRef = React.useRef<HTMLVideoElement>(null);
|
2021-08-12 13:32:01 -07:00
|
|
|
const boxRef = React.useRef<HTMLElement>(null);
|
2021-03-26 13:43:04 -07:00
|
|
|
const [playbackState, setPlaybackState] = React.useState<PlaybackState>({
|
|
|
|
state: "normal",
|
|
|
|
});
|
|
|
|
|
2021-08-12 13:32:01 -07:00
|
|
|
React.useLayoutEffect(() => {
|
|
|
|
fillAspect(boxRef.current!.getBoundingClientRect(), videoRef, aspect);
|
|
|
|
}, [boxRef, videoRef, aspect]);
|
|
|
|
useResizeObserver(boxRef, (entry: ResizeObserverEntry) => {
|
|
|
|
fillAspect(entry.contentRect, videoRef, aspect);
|
|
|
|
});
|
|
|
|
|
2021-03-26 13:43:04 -07:00
|
|
|
// Load the camera driver.
|
|
|
|
const [driver, setDriver] = React.useState<LiveCameraDriver | null>(null);
|
|
|
|
React.useEffect(() => {
|
2021-03-30 21:59:41 -07:00
|
|
|
setPlaybackState({ state: "normal" });
|
2021-03-29 11:49:33 -07:00
|
|
|
if (camera === null) {
|
|
|
|
setDriver(null);
|
|
|
|
return;
|
|
|
|
}
|
2021-08-12 13:32:01 -07:00
|
|
|
const d = new LiveCameraDriver(
|
|
|
|
camera,
|
|
|
|
setPlaybackState,
|
|
|
|
setAspect,
|
|
|
|
videoRef
|
|
|
|
);
|
2021-03-26 13:43:04 -07:00
|
|
|
setDriver(d);
|
|
|
|
return () => {
|
|
|
|
// Explictly stop the stream on unmount. There don't seem to be any DOM
|
|
|
|
// event handlers that run in this case. (In particular, the MediaSource's
|
|
|
|
// sourceclose doesn't run.)
|
|
|
|
d.stopStream("unmount or camera change");
|
|
|
|
};
|
|
|
|
}, [camera]);
|
|
|
|
|
|
|
|
// Display circular progress after 100 ms of waiting.
|
|
|
|
const [showProgress, setShowProgress] = React.useState(false);
|
|
|
|
React.useEffect(() => {
|
|
|
|
setShowProgress(false);
|
|
|
|
if (playbackState.state !== "waiting") {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const timerId = setTimeout(() => setShowProgress(true), 100);
|
|
|
|
return () => clearTimeout(timerId);
|
|
|
|
}, [playbackState]);
|
|
|
|
|
2021-03-29 11:49:33 -07:00
|
|
|
const videoElement =
|
|
|
|
driver === null ? (
|
|
|
|
<video />
|
|
|
|
) : (
|
|
|
|
<video
|
|
|
|
ref={videoRef}
|
|
|
|
muted
|
|
|
|
autoPlay
|
|
|
|
src={driver.url}
|
|
|
|
onPause={() => driver.stopStream("pause")}
|
|
|
|
onPlay={() => driver.startStream("play")}
|
|
|
|
onPlaying={driver.videoPlaying}
|
|
|
|
onTimeUpdate={driver.tryTrimBuffer}
|
|
|
|
onWaiting={driver.videoWaiting}
|
|
|
|
/>
|
|
|
|
);
|
2021-03-26 13:43:04 -07:00
|
|
|
return (
|
|
|
|
<Box
|
2021-08-12 13:32:01 -07:00
|
|
|
ref={boxRef}
|
2021-03-26 13:43:04 -07:00
|
|
|
sx={{
|
2021-03-29 11:49:33 -07:00
|
|
|
width: "100%",
|
|
|
|
height: "100%",
|
|
|
|
position: "relative",
|
2021-08-12 13:32:01 -07:00
|
|
|
display: "flex",
|
|
|
|
alignItems: "center",
|
|
|
|
justifyContent: "center",
|
2021-03-29 11:49:33 -07:00
|
|
|
"& video": {
|
|
|
|
width: "100%",
|
|
|
|
height: "100%",
|
|
|
|
|
|
|
|
// It'd be nice to use "contain" here so non-16x9 videos display
|
|
|
|
// with letterboxing rather than by being stretched. Unfortunately
|
|
|
|
// Firefox 87.0 doesn't honor the PixelAspectRatioBox of anamorphic
|
|
|
|
// sub streams. For now, make anamorphic 16x9 sub streams display
|
|
|
|
// correctly (at the expense of non-16x9 streams).
|
|
|
|
// TODO: adjust width/height dynamically to handle the letterboxing
|
|
|
|
// on non-16x9 streams.
|
|
|
|
objectFit: "fill",
|
|
|
|
},
|
|
|
|
"& .controls": {
|
|
|
|
position: "absolute",
|
|
|
|
width: "100%",
|
|
|
|
height: "100%",
|
|
|
|
zIndex: 1,
|
|
|
|
},
|
2021-03-26 13:43:04 -07:00
|
|
|
"& .progress-overlay": {
|
|
|
|
position: "absolute",
|
|
|
|
display: "flex",
|
|
|
|
alignItems: "center",
|
|
|
|
justifyContent: "center",
|
|
|
|
width: "100%",
|
2021-03-29 11:49:33 -07:00
|
|
|
height: "100%",
|
2021-03-26 13:43:04 -07:00
|
|
|
},
|
|
|
|
"& .alert-overlay": {
|
|
|
|
position: "absolute",
|
|
|
|
display: "flex",
|
|
|
|
width: "100%",
|
2021-03-29 11:49:33 -07:00
|
|
|
height: "100%",
|
2021-03-26 13:43:04 -07:00
|
|
|
alignItems: "flex-end",
|
|
|
|
p: 1,
|
|
|
|
},
|
|
|
|
}}
|
|
|
|
>
|
2021-03-29 11:49:33 -07:00
|
|
|
<div className="controls">{chooser}</div>
|
2021-03-26 13:43:04 -07:00
|
|
|
{showProgress && (
|
|
|
|
<div className="progress-overlay">
|
|
|
|
<CircularProgress />
|
|
|
|
</div>
|
|
|
|
)}
|
|
|
|
{playbackState.state === "error" && (
|
|
|
|
<div className="alert-overlay">
|
|
|
|
<Alert severity="error">{playbackState.message}</Alert>
|
|
|
|
</div>
|
|
|
|
)}
|
2021-03-29 11:49:33 -07:00
|
|
|
{videoElement}
|
2021-03-26 13:43:04 -07:00
|
|
|
</Box>
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
export default LiveCamera;
|