mirror of
https://github.com/scottlamb/moonfire-nvr.git
synced 2024-12-24 22:25:55 -05:00
UI: improve aspect ratio handling
As written in the changelog: Live streams formerly worked around a Firefox pixel aspect ratio bug by forcing all videos to 16:9, which dramatically distorted 9:16 camera views. Playback didn't, so anamorphic videos looked correct on Chrome but slightly stretched on Firefox. Now both live streams and playback are fully correct on all browsers.
This commit is contained in:
parent
115b081dcd
commit
27395ecd4e
@ -8,6 +8,13 @@ Each release is tagged in Git and on the Docker repository
|
||||
|
||||
## unreleased
|
||||
|
||||
* UI: improve video aspect ratio handling. Live streams formerly worked
|
||||
around a Firefox pixel aspect ratio bug by forcing all videos to 16:9, which
|
||||
dramatically distorted 9:16 camera views. Playback didn't have the same
|
||||
workaround, so anamorphic videos looked correct on Chrome but slightly
|
||||
stretched on Firefox. Now both live streams and playback are fully correct
|
||||
on all browsers.
|
||||
|
||||
## `v0.6.4` (2021-06-28)
|
||||
|
||||
* Default to a new pure-Rust RTSP library, `retina`. If you hit problems, you
|
||||
|
@ -333,6 +333,10 @@ the following properties:
|
||||
section 12.1.4.3 `PixelAspectRatioBox`. If absent, assumed to be 1.
|
||||
* `pixelVSpacing`: the relative height of a pixel, as in a ISO/IEC 14496-12
|
||||
section 12.1.4.3 `PixelAspectRatioBox`. If absent, assumed to be 1.
|
||||
* `aspectWidth`: the width component of the aspect ratio. (The aspect ratio
|
||||
can be computed from the dimensions and pixel spacing; it's included as a
|
||||
convenience.)
|
||||
* `aspectHeight`: the height component of the aspect ratio.
|
||||
|
||||
The full initialization segment data for a given video sample entry can be
|
||||
retrieved at the URL `/api/init/<id>.mp4`.
|
||||
@ -584,6 +588,11 @@ Returns a `.mp4` suitable for use as a [HTML5 Media Source Extensions
|
||||
initialization segment][init-segment]. The MIME type will be `video/mp4`, with
|
||||
a `codecs` parameter as specified in [RFC 6381][rfc-6381].
|
||||
|
||||
An `X-Aspect` HTTP header will include the aspect ratio as width:height,
|
||||
eg `16:9` (most cameras) or `9:16` (rotated 90 degrees).
|
||||
This is redundant with the returned `.mp4` but is far easier to parse from
|
||||
Javascript.
|
||||
|
||||
### `GET /api/init/<id>.mp4.txt`
|
||||
|
||||
Returns a `text/plain` debugging string for the `.mp4` generated by the
|
||||
|
1
server/Cargo.lock
generated
1
server/Cargo.lock
generated
@ -1264,6 +1264,7 @@ dependencies = [
|
||||
"moonfire-base",
|
||||
"mylog",
|
||||
"nix 0.22.0",
|
||||
"num-rational 0.4.0",
|
||||
"odds",
|
||||
"parking_lot",
|
||||
"pretty-hex",
|
||||
|
@ -29,6 +29,7 @@ libpasta = "0.1.2"
|
||||
log = "0.4"
|
||||
mylog = { git = "https://github.com/scottlamb/mylog" }
|
||||
nix = "0.22.0"
|
||||
num-rational = { version = "0.4.0", default-features = false, features = ["std"] }
|
||||
odds = { version = "0.4.0", features = ["std-vec"] }
|
||||
parking_lot = { version = "0.11.1", features = [] }
|
||||
pretty-hex = "0.2.1"
|
||||
|
@ -135,6 +135,16 @@ pub struct VideoSampleEntry {
|
||||
pub pasp_v_spacing: u16,
|
||||
}
|
||||
|
||||
impl VideoSampleEntry {
|
||||
/// Returns the aspect ratio as a minimized ratio.
|
||||
pub fn aspect(&self) -> num_rational::Ratio<u32> {
|
||||
num_rational::Ratio::new(
|
||||
u32::from(self.width) * u32::from(self.pasp_h_spacing),
|
||||
u32::from(self.height) * u32::from(self.pasp_v_spacing),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub struct VideoSampleEntryToInsert {
|
||||
pub data: Vec<u8>,
|
||||
|
@ -496,15 +496,20 @@ pub struct VideoSampleEntry {
|
||||
pub height: u16,
|
||||
pub pasp_h_spacing: u16,
|
||||
pub pasp_v_spacing: u16,
|
||||
pub aspect_width: u32,
|
||||
pub aspect_height: u32,
|
||||
}
|
||||
|
||||
impl VideoSampleEntry {
|
||||
fn from(e: &db::VideoSampleEntry) -> Self {
|
||||
let aspect = e.aspect();
|
||||
Self {
|
||||
width: e.width,
|
||||
height: e.height,
|
||||
pasp_h_spacing: e.pasp_h_spacing,
|
||||
pasp_v_spacing: e.pasp_v_spacing,
|
||||
aspect_width: *aspect.numer(),
|
||||
aspect_height: *aspect.denom(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1928,6 +1928,15 @@ impl http_serve::Entity for File {
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if self.0.type_ == Type::InitSegment {
|
||||
// FileBuilder::build() should have failed if there were no video_sample_entries.
|
||||
let ent = self.0.video_sample_entries.first().expect("no video_sample_entries");
|
||||
let aspect = ent.aspect();
|
||||
hdrs.insert("X-Aspect",
|
||||
HeaderValue::try_from(
|
||||
format!("{}:{}", aspect.numer(), aspect.denom())
|
||||
).expect("no invalid chars in X-Aspect format")
|
||||
);
|
||||
}
|
||||
}
|
||||
fn last_modified(&self) -> Option<SystemTime> {
|
||||
@ -2690,6 +2699,9 @@ mod tests {
|
||||
let mp4 = builder
|
||||
.build(db.db.clone(), db.dirs_by_stream_id.clone())
|
||||
.unwrap();
|
||||
let mut hdrs = http::header::HeaderMap::new();
|
||||
mp4.add_headers(&mut hdrs);
|
||||
assert_eq!(hdrs.get("X-Aspect").unwrap(), "16:9");
|
||||
traverse(mp4.clone()).await;
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,9 @@ interface Props {
|
||||
range90k: [number, number] | null;
|
||||
split90k?: number;
|
||||
trimStartAndEnd: boolean;
|
||||
setActiveRecording: (recording: [Stream, api.Recording] | null) => void;
|
||||
setActiveRecording: (
|
||||
recording: [Stream, api.Recording, api.VideoSampleEntry] | null
|
||||
) => void;
|
||||
formatTime: (time90k: number) => string;
|
||||
}
|
||||
|
||||
@ -169,7 +171,7 @@ const VideoList = ({
|
||||
<Row
|
||||
key={r.startId}
|
||||
className="recording"
|
||||
onClick={() => setActiveRecording([stream, r])}
|
||||
onClick={() => setActiveRecording([stream, r, vse])}
|
||||
start={formatTime(start)}
|
||||
end={formatTime(end)}
|
||||
resolution={`${vse.width}x${vse.height}`}
|
||||
|
@ -18,6 +18,9 @@ import DisplaySelector, { DEFAULT_DURATION } from "./DisplaySelector";
|
||||
import StreamMultiSelector from "./StreamMultiSelector";
|
||||
import TimerangeSelector from "./TimerangeSelector";
|
||||
import VideoList from "./VideoList";
|
||||
import { useLayoutEffect } from "react";
|
||||
import { fillAspect } from "../aspect";
|
||||
import useResizeObserver from "@react-hook/resize-observer";
|
||||
|
||||
const useStyles = makeStyles((theme: Theme) => ({
|
||||
root: {
|
||||
@ -64,13 +67,33 @@ const useStyles = makeStyles((theme: Theme) => ({
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
"& video": {
|
||||
objectFit: "contain",
|
||||
maxWidth: "100%",
|
||||
maxHeight: "100%",
|
||||
objectFit: "fill",
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
interface FullScreenVideoProps {
|
||||
src: string;
|
||||
aspect: [number, number];
|
||||
}
|
||||
|
||||
/**
|
||||
* A video sized for the entire document window constrained to aspect ratio.
|
||||
* This is particularly helpful for Firefox (89), which doesn't honor the
|
||||
* pixel aspect ratio specified in .mp4 files. Thus we need to specify it
|
||||
* out-of-band.
|
||||
*/
|
||||
const FullScreenVideo = ({ src, aspect }: FullScreenVideoProps) => {
|
||||
const ref = React.useRef<HTMLVideoElement>(null);
|
||||
useLayoutEffect(() => {
|
||||
fillAspect(document.body.getBoundingClientRect(), ref, aspect);
|
||||
});
|
||||
useResizeObserver(document.body, (entry: ResizeObserverEntry) => {
|
||||
fillAspect(entry.contentRect, ref, aspect);
|
||||
});
|
||||
return <video ref={ref} controls preload="auto" autoPlay src={src} />;
|
||||
};
|
||||
|
||||
interface Props {
|
||||
timeZoneName: string;
|
||||
cameras: Camera[];
|
||||
@ -98,7 +121,7 @@ const Main = ({ cameras, timeZoneName, showSelectors }: Props) => {
|
||||
const [timestampTrack, setTimestampTrack] = useState(false);
|
||||
|
||||
const [activeRecording, setActiveRecording] = useState<
|
||||
[Stream, api.Recording] | null
|
||||
[Stream, api.Recording, api.VideoSampleEntry] | null
|
||||
>(null);
|
||||
const formatTime = useMemo(() => {
|
||||
return (time90k: number) => {
|
||||
@ -160,10 +183,7 @@ const Main = ({ cameras, timeZoneName, showSelectors }: Props) => {
|
||||
{videoLists.length > 0 && recordingsTable}
|
||||
{activeRecording != null && (
|
||||
<Modal open onClose={closeModal} className={classes.videoModal}>
|
||||
<video
|
||||
controls
|
||||
preload="auto"
|
||||
autoPlay
|
||||
<FullScreenVideo
|
||||
src={api.recordingUrl(
|
||||
activeRecording[0].camera.uuid,
|
||||
activeRecording[0].streamType,
|
||||
@ -171,6 +191,10 @@ const Main = ({ cameras, timeZoneName, showSelectors }: Props) => {
|
||||
timestampTrack,
|
||||
trimStartAndEnd ? range90k! : undefined
|
||||
)}
|
||||
aspect={[
|
||||
activeRecording[2].aspectWidth,
|
||||
activeRecording[2].aspectHeight,
|
||||
]}
|
||||
/>
|
||||
</Modal>
|
||||
)}
|
||||
|
@ -9,6 +9,8 @@ import * as api from "../api";
|
||||
import Box from "@material-ui/core/Box";
|
||||
import CircularProgress from "@material-ui/core/CircularProgress";
|
||||
import Alert from "@material-ui/core/Alert";
|
||||
import useResizeObserver from "@react-hook/resize-observer";
|
||||
import { fillAspect } from "../aspect";
|
||||
|
||||
interface LiveCameraProps {
|
||||
camera: Camera | null;
|
||||
@ -60,10 +62,12 @@ class LiveCameraDriver {
|
||||
constructor(
|
||||
camera: Camera,
|
||||
setPlaybackState: (state: PlaybackState) => void,
|
||||
setAspect: (aspect: [number, number]) => void,
|
||||
videoRef: React.RefObject<HTMLVideoElement>
|
||||
) {
|
||||
this.camera = camera;
|
||||
this.setPlaybackState = setPlaybackState;
|
||||
this.setAspect = setAspect;
|
||||
this.videoRef = videoRef;
|
||||
this.src.addEventListener("sourceopen", this.onMediaSourceOpen);
|
||||
}
|
||||
@ -151,7 +155,8 @@ class LiveCameraDriver {
|
||||
this.error(`init segment fetch status ${initSegmentResult.status}`);
|
||||
return;
|
||||
}
|
||||
srcBuf.appendBuffer(initSegmentResult.response);
|
||||
this.setAspect(initSegmentResult.response.aspect);
|
||||
srcBuf.appendBuffer(initSegmentResult.response.body);
|
||||
return;
|
||||
} else if (this.buf.state === "open") {
|
||||
this.tryAppendPart(this.buf);
|
||||
@ -267,6 +272,7 @@ class LiveCameraDriver {
|
||||
|
||||
camera: Camera;
|
||||
setPlaybackState: (state: PlaybackState) => void;
|
||||
setAspect: (aspect: [number, number]) => void;
|
||||
videoRef: React.RefObject<HTMLVideoElement>;
|
||||
|
||||
src = new MediaSource();
|
||||
@ -283,19 +289,26 @@ class LiveCameraDriver {
|
||||
/**
|
||||
* A live view of a camera.
|
||||
*
|
||||
* The caller is currently expected to put this into a 16x9 block.
|
||||
*
|
||||
* Note there's a significant setup cost to creating a LiveCamera, so the parent
|
||||
* should use React's <tt>key</tt> attribute to avoid unnecessarily mounting
|
||||
* and unmounting a camera.
|
||||
*
|
||||
*/
|
||||
const LiveCamera = ({ camera, chooser }: LiveCameraProps) => {
|
||||
const [aspect, setAspect] = React.useState<[number, number]>([16, 9]);
|
||||
const videoRef = React.useRef<HTMLVideoElement>(null);
|
||||
const boxRef = React.useRef<HTMLElement>(null);
|
||||
const [playbackState, setPlaybackState] = React.useState<PlaybackState>({
|
||||
state: "normal",
|
||||
});
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
fillAspect(boxRef.current!.getBoundingClientRect(), videoRef, aspect);
|
||||
}, [boxRef, videoRef, aspect]);
|
||||
useResizeObserver(boxRef, (entry: ResizeObserverEntry) => {
|
||||
fillAspect(entry.contentRect, videoRef, aspect);
|
||||
});
|
||||
|
||||
// Load the camera driver.
|
||||
const [driver, setDriver] = React.useState<LiveCameraDriver | null>(null);
|
||||
React.useEffect(() => {
|
||||
@ -304,7 +317,12 @@ const LiveCamera = ({ camera, chooser }: LiveCameraProps) => {
|
||||
setDriver(null);
|
||||
return;
|
||||
}
|
||||
const d = new LiveCameraDriver(camera, setPlaybackState, videoRef);
|
||||
const d = new LiveCameraDriver(
|
||||
camera,
|
||||
setPlaybackState,
|
||||
setAspect,
|
||||
videoRef
|
||||
);
|
||||
setDriver(d);
|
||||
return () => {
|
||||
// Explictly stop the stream on unmount. There don't seem to be any DOM
|
||||
@ -343,10 +361,14 @@ const LiveCamera = ({ camera, chooser }: LiveCameraProps) => {
|
||||
);
|
||||
return (
|
||||
<Box
|
||||
ref={boxRef}
|
||||
sx={{
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
position: "relative",
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
"& video": {
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
|
@ -4,10 +4,9 @@
|
||||
|
||||
import Select, { SelectChangeEvent } from "@material-ui/core/Select";
|
||||
import MenuItem from "@material-ui/core/MenuItem";
|
||||
import React, { useReducer, useState } from "react";
|
||||
import React, { useReducer } from "react";
|
||||
import { Camera } from "../types";
|
||||
import { makeStyles } from "@material-ui/styles";
|
||||
import useResizeObserver from "@react-hook/resize-observer";
|
||||
import { Theme } from "@material-ui/core/styles";
|
||||
|
||||
export interface Layout {
|
||||
@ -31,20 +30,12 @@ const useStyles = makeStyles((theme: Theme) => ({
|
||||
marginTop: theme.spacing(2),
|
||||
overflow: "hidden",
|
||||
|
||||
// TODO: this mid-level div can probably be removed.
|
||||
"& .mid": {
|
||||
position: "relative",
|
||||
aspectRatio: "16 / 9",
|
||||
display: "inline-block",
|
||||
},
|
||||
|
||||
// Set the width based on the height.
|
||||
"& .mid.wider": {
|
||||
height: "100%",
|
||||
},
|
||||
|
||||
// Set the height based on the width.
|
||||
"& .mid.taller": {
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
position: "relative",
|
||||
display: "inline-block",
|
||||
},
|
||||
},
|
||||
inner: {
|
||||
@ -156,53 +147,15 @@ function selectedReducer(old: SelectedCameras, op: SelectOp): SelectedCameras {
|
||||
* Presents one or more camera views in one of several layouts.
|
||||
*
|
||||
* The parent should arrange for the multiview's outer div to be as large
|
||||
* as possible. Internally, multiview uses the largest possible aspect
|
||||
* ratio-constrained section of it. It uses a ResizeObserver to determine if
|
||||
* the outer div is wider or taller than 16x9, and then sets an appropriate CSS
|
||||
* class to constrain the width or height respectively. The goal is to have the
|
||||
* smoothest resizing by changing the DOM/CSS as little as possible.
|
||||
* as possible.
|
||||
*/
|
||||
const Multiview = (props: MultiviewProps) => {
|
||||
const [selected, updateSelected] = useReducer(
|
||||
selectedReducer,
|
||||
Array(MAX_CAMERAS).fill(null)
|
||||
);
|
||||
const [widerOrTaller, setWiderOrTaller] = useState("");
|
||||
const outerRef = React.useRef<HTMLDivElement>(null);
|
||||
const midRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
// Keep a constant 16x9 aspect ratio. Chrome 89.0.4389.90 supports the
|
||||
// "aspect-ratio" CSS property and seems to behave in a predictable way.
|
||||
// Intuition suggests using that is more performant than extra DOM
|
||||
// manipulations. Firefox 87.0 doesn't support aspect-ratio. Emulating it
|
||||
// with an <img> child doesn't work well either for using a (flex item)
|
||||
// ancestor's (calculated) height to compute
|
||||
// the <img>'s width and then the parent's width. There are some open bugs
|
||||
// that look related, eg:
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=1349738
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=1690423
|
||||
// so when there's no "aspect-ratio", just calculate everything here.
|
||||
const aspectRatioSupported = CSS.supports("aspect-ratio: 16 / 9");
|
||||
useResizeObserver(outerRef, (entry: ResizeObserverEntry) => {
|
||||
const w = entry.contentRect.width;
|
||||
const h = entry.contentRect.height;
|
||||
const hFromW = (w * 9) / 16;
|
||||
if (aspectRatioSupported) {
|
||||
setWiderOrTaller(hFromW > h ? "wider" : "taller");
|
||||
return;
|
||||
}
|
||||
const mid = midRef.current;
|
||||
if (mid === null) {
|
||||
return;
|
||||
}
|
||||
if (hFromW > h) {
|
||||
mid.style.width = `${(h * 16) / 9}px`;
|
||||
mid.style.height = `${h}px`;
|
||||
} else {
|
||||
mid.style.width = `${w}px`;
|
||||
mid.style.height = `${hFromW}px`;
|
||||
}
|
||||
});
|
||||
const classes = useStyles();
|
||||
const layout = LAYOUTS[props.layoutIndex];
|
||||
const monoviews = selected.slice(0, layout.cameras).map((e, i) => {
|
||||
@ -227,7 +180,7 @@ const Multiview = (props: MultiviewProps) => {
|
||||
});
|
||||
return (
|
||||
<div className={classes.root} ref={outerRef}>
|
||||
<div className={`mid ${widerOrTaller}`} ref={midRef}>
|
||||
<div className="mid">
|
||||
<div className={`${classes.inner} ${layout.className}`}>
|
||||
{monoviews}
|
||||
</div>
|
||||
|
@ -78,16 +78,29 @@ async function myfetch(
|
||||
};
|
||||
}
|
||||
|
||||
export interface InitSegmentResponse {
|
||||
aspect: [number, number];
|
||||
body: ArrayBuffer;
|
||||
}
|
||||
|
||||
/** Fetches an initialization segment. */
|
||||
export async function init(
|
||||
videoSampleEntryId: number,
|
||||
init: RequestInit
|
||||
): Promise<FetchResult<ArrayBuffer>> {
|
||||
): Promise<FetchResult<InitSegmentResponse>> {
|
||||
const url = `/api/init/${videoSampleEntryId}.mp4`;
|
||||
const fetchRes = await myfetch(url, init);
|
||||
if (fetchRes.status !== "success") {
|
||||
return fetchRes;
|
||||
}
|
||||
const rawAspect = fetchRes.response.headers.get("X-Aspect");
|
||||
const aspect = rawAspect?.split(":").map((x) => parseInt(x, 10));
|
||||
if (aspect === undefined) {
|
||||
return {
|
||||
status: "error",
|
||||
message: `invalid/missing X-Aspect: ${rawAspect}`,
|
||||
};
|
||||
}
|
||||
let body;
|
||||
try {
|
||||
body = await fetchRes.response.arrayBuffer();
|
||||
@ -98,9 +111,10 @@ export async function init(
|
||||
message: `unable to read body: ${e.message}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
status: "success",
|
||||
response: body,
|
||||
response: { aspect: aspect as [number, number], body },
|
||||
};
|
||||
}
|
||||
|
||||
@ -256,6 +270,8 @@ export interface VideoSampleEntry {
|
||||
height: number;
|
||||
pixelHSpacing?: number;
|
||||
pixelVSpacing?: number;
|
||||
aspectWidth: number;
|
||||
aspectHeight: number;
|
||||
}
|
||||
|
||||
export interface RecordingsRequest {
|
||||
|
37
ui/src/aspect.ts
Normal file
37
ui/src/aspect.ts
Normal file
@ -0,0 +1,37 @@
|
||||
// This file is part of Moonfire NVR, a security camera network video recorder.
|
||||
// Copyright (C) 2021 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt.
|
||||
// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception
|
||||
|
||||
/**
|
||||
* Sets CSS properties on <tt>innerRef</tt> to fill as much of <tt>rect</tt>
|
||||
* as possible while maintaining aspect ratio.
|
||||
*
|
||||
* While Chrome 89 supports the "aspect-ratio" CSS property and behaves in a
|
||||
* predictable way, Firefox 87 doesn't. Emulating it with an <img> child
|
||||
* doesn't work well either for using a (flex item) ancestor's (calculated)
|
||||
* height to compute the <img>'s width and then the parent's width. There are
|
||||
* open bugs that look related, eg:
|
||||
* https://bugzilla.mozilla.org/show_bug.cgi?id=1349738
|
||||
* https://bugzilla.mozilla.org/show_bug.cgi?id=1690423
|
||||
* so just do it all by hand. The caller should use a ResizeObserver.
|
||||
*/
|
||||
export function fillAspect(
|
||||
rect: DOMRectReadOnly,
|
||||
innerRef: React.RefObject<HTMLElement>,
|
||||
aspect: [number, number]
|
||||
) {
|
||||
const w = rect.width;
|
||||
const h = rect.height;
|
||||
const hFromW = (w * aspect[1]) / aspect[0];
|
||||
const inner = innerRef.current;
|
||||
if (inner === null) {
|
||||
return;
|
||||
}
|
||||
if (hFromW > h) {
|
||||
inner.style.width = `${(h * aspect[0]) / aspect[1]}px`;
|
||||
inner.style.height = `${h}px`;
|
||||
} else {
|
||||
inner.style.width = `${w}px`;
|
||||
inner.style.height = `${hFromW}px`;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user