diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6444de0..7a482fc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: name: Rust ${{ matrix.rust }} strategy: matrix: - rust: [ "stable", "1.70", "nightly" ] + rust: [ "stable", "1.79", "nightly" ] include: - rust: nightly extra_args: "--features nightly --benches" diff --git a/CHANGELOG.md b/CHANGELOG.md index 17b998d..f04e8b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,9 @@ even on minor releases, e.g. `v0.7.5` -> `v0.7.6`. ## unreleased +* upgrade to Retina 0.4.9, adding support for recording MJPEG video. Note + browser playback is unlikely to work. +* bump minimum Rust version to 1.79. * in UI's list view, add a tooltip on the end time which shows why the recording ended. diff --git a/guide/build.md b/guide/build.md index 5bc8647..d284f4c 100644 --- a/guide/build.md +++ b/guide/build.md @@ -68,7 +68,7 @@ following command: $ brew install node ``` -Next, you need Rust 1.65+ and Cargo. The easiest way to install them is by +Next, you need Rust 1.79+ and Cargo. The easiest way to install them is by following the instructions at [rustup.rs](https://www.rustup.rs/). Avoid your Linux distribution's Rust packages, which tend to be too old. (At least on Debian-based systems; Arch and Gentoo might be okay.) diff --git a/server/Cargo.lock b/server/Cargo.lock index ef61977..817e524 100644 --- a/server/Cargo.lock +++ b/server/Cargo.lock @@ -1747,9 +1747,9 @@ dependencies = [ [[package]] name = "retina" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd73fbdea4177bdc50179d23a85d1db7c329bfbe06e064947a6b92d87332d81" +checksum = "ef9828fb04b8b2bd763887cf4be07aa85aecaa7fce3ee3c7f57bf61e804e9e5c" dependencies = [ "base64", "bitstream-io", diff --git a/server/Cargo.toml b/server/Cargo.toml index 3e0c748..5a2e5a9 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -5,7 +5,7 @@ authors = ["Scott Lamb "] edition = "2021" resolver = "2" license-file = "../LICENSE.txt" -rust-version = "1.70" +rust-version = "1.79" publish = false [features] @@ -58,7 +58,7 @@ password-hash = "0.5.0" pretty-hex = { workspace = true } protobuf = "3.0" reffers = "0.7.0" -retina = "0.4.0" +retina = "0.4.9" ring = { workspace = true } rusqlite = { workspace = true } serde = { version = "1.0", features = ["derive"] } @@ -117,4 +117,4 @@ protobuf-codegen = { git = "https://github.com/scottlamb/rust-protobuf.git", rev protobuf-parse = { git = "https://github.com/scottlamb/rust-protobuf.git", rev = "a61e09785c957eb9a183d129b426710146bfde38" } # This version uses fallible-iterator v0.3 (same one rusqlite 0.30 uses) and hasn't been released yet. -sdp-types = { git = "https://github.com/sdroege/sdp-types", rev = "e8d0a2c4b8b1fc1ddf1c60a01dc717a2f4e2d514" } \ No newline at end of file +sdp-types = { git = "https://github.com/sdroege/sdp-types", rev = "e8d0a2c4b8b1fc1ddf1c60a01dc717a2f4e2d514" } diff --git a/server/db/Cargo.toml b/server/db/Cargo.toml index 73767fa..8a087b4 100644 --- a/server/db/Cargo.toml +++ b/server/db/Cargo.toml @@ -5,7 +5,7 @@ authors = ["Scott Lamb "] readme = "../README.md" edition = "2021" license-file = "../../LICENSE.txt" -rust-version = "1.70" +rust-version = "1.79" publish = false [features] diff --git a/server/src/h264.rs b/server/src/h264.rs deleted file mode 100644 index 828799b..0000000 --- a/server/src/h264.rs +++ /dev/null @@ -1,383 +0,0 @@ -// This file is part of Moonfire NVR, a security camera network video recorder. -// Copyright (C) 2021 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt. -// SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception. - -//! H.264 decoding -//! -//! For the most part, Moonfire NVR does not try to understand the video codec. However, H.264 has -//! two byte stream encodings: ISO/IEC 14496-10 Annex B, and ISO/IEC 14496-15 AVC access units. -//! When streaming from RTSP, ffmpeg supplies the former. We need the latter to stick into `.mp4` -//! files. This file manages the conversion, both for the ffmpeg "extra data" (which should become -//! the ISO/IEC 14496-15 section 5.2.4.1 `AVCDecoderConfigurationRecord`) and the actual samples. -//! -//! See the [wiki page on standards and -//! specifications](https://github.com/scottlamb/moonfire-nvr/wiki/Standards-and-specifications) -//! for help finding a copy of the relevant standards. This code won't make much sense without them! -//! -//! ffmpeg of course has logic to do the same thing, but unfortunately it is not exposed except -//! through ffmpeg's own generated `.mp4` file. Extracting just this part of their `.mp4` files -//! would be more trouble than it's worth. - -use base::{bail, err, Error}; -use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; -use db::VideoSampleEntryToInsert; -use h264_reader::nal::Nal; -use pretty_hex::PrettyHex as _; -use std::convert::TryFrom; - -// For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box. -// Assume the camera is 16x9. These are just the standard wide mode; default_pixel_aspect_ratio -// tries the transpose also. -const PIXEL_ASPECT_RATIOS: [((u16, u16), (u16, u16)); 6] = [ - ((320, 240), (4, 3)), - ((352, 240), (40, 33)), - ((640, 352), (44, 45)), - ((640, 480), (4, 3)), - ((704, 480), (40, 33)), - ((720, 480), (32, 27)), -]; - -/// Get the pixel aspect ratio to use if none is specified. -/// -/// The Dahua IPC-HDW5231R-Z sets the aspect ratio in the H.264 SPS (correctly) for both square and -/// non-square pixels. The Hikvision DS-2CD2032-I doesn't set it, even though the sub stream's -/// pixels aren't square. So define a default based on the pixel dimensions to use if the camera -/// doesn't tell us what to do. -/// -/// Note that at least in the case of .mp4 muxing, we don't need to fix up the underlying SPS. -/// PixelAspectRatioBox's definition says that it overrides the H.264-level declaration. -fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) { - if width >= height { - PIXEL_ASPECT_RATIOS - .iter() - .find(|r| r.0 == (width, height)) - .map(|r| r.1) - .unwrap_or((1, 1)) - } else { - PIXEL_ASPECT_RATIOS - .iter() - .find(|r| r.0 == (height, width)) - .map(|r| (r.1 .1, r.1 .0)) - .unwrap_or((1, 1)) - } -} - -/// `h264_reader::rbsp::BitRead` impl that does not care about extra trailing data. -/// -/// Some (Reolink) cameras appear to have a stray extra byte at the end. Follow the lead of most -/// other RTSP implementations in tolerating this. -#[derive(Debug)] -struct TolerantBitReader { - inner: R, -} - -impl h264_reader::rbsp::BitRead for TolerantBitReader { - fn read_ue(&mut self, name: &'static str) -> Result { - self.inner.read_ue(name) - } - - fn read_se(&mut self, name: &'static str) -> Result { - self.inner.read_se(name) - } - - fn read_bool(&mut self, name: &'static str) -> Result { - self.inner.read_bool(name) - } - - fn read_u8( - &mut self, - bit_count: u32, - name: &'static str, - ) -> Result { - self.inner.read_u8(bit_count, name) - } - - fn read_u16( - &mut self, - bit_count: u32, - name: &'static str, - ) -> Result { - self.inner.read_u16(bit_count, name) - } - - fn read_u32( - &mut self, - bit_count: u32, - name: &'static str, - ) -> Result { - self.inner.read_u32(bit_count, name) - } - - fn read_i32( - &mut self, - bit_count: u32, - name: &'static str, - ) -> Result { - self.inner.read_i32(bit_count, name) - } - - fn has_more_rbsp_data( - &mut self, - name: &'static str, - ) -> Result { - self.inner.has_more_rbsp_data(name) - } - - fn finish_rbsp(self) -> Result<(), h264_reader::rbsp::BitReaderError> { - match self.inner.finish_rbsp() { - Ok(()) => Ok(()), - Err(h264_reader::rbsp::BitReaderError::RemainingData) => { - tracing::debug!("extra data at end of NAL unit"); - Ok(()) - } - Err(e) => Err(e), - } - } - - fn finish_sei_payload(self) -> Result<(), h264_reader::rbsp::BitReaderError> { - self.inner.finish_sei_payload() - } -} - -fn parse_extra_data_inner(extradata: &[u8]) -> Result { - let avcc = - h264_reader::avcc::AvcDecoderConfigurationRecord::try_from(extradata).map_err(|e| { - err!( - InvalidArgument, - msg("bad AvcDecoderConfigurationRecord: {:?}", e) - ) - })?; - if avcc.num_of_sequence_parameter_sets() != 1 { - bail!(Unimplemented, msg("multiple SPSs!")); - } - - // This logic is essentially copied from - // `h264_reader::avcc::AvcDecoderConfigurationRecord::create_context` but - // using our `TolerantBitReader` wrapper. - let mut ctx = h264_reader::Context::new(); - for sps in avcc.sequence_parameter_sets() { - let sps = h264_reader::nal::RefNal::new( - sps.map_err(|e| err!(InvalidArgument, msg("bad SPS: {e:?}")))?, - &[], - true, - ); - let sps = h264_reader::nal::sps::SeqParameterSet::from_bits(TolerantBitReader { - inner: sps.rbsp_bits(), - }) - .map_err(|e| err!(InvalidArgument, msg("bad SPS: {e:?}")))?; - ctx.put_seq_param_set(sps); - } - for pps in avcc.picture_parameter_sets() { - let pps = h264_reader::nal::RefNal::new( - pps.map_err(|e| err!(InvalidArgument, msg("bad PPS: {e:?}")))?, - &[], - true, - ); - let pps = h264_reader::nal::pps::PicParameterSet::from_bits( - &ctx, - TolerantBitReader { - inner: pps.rbsp_bits(), - }, - ) - .map_err(|e| err!(InvalidArgument, msg("bad PPS: {e:?}")))?; - ctx.put_pic_param_set(pps); - } - - let sps = ctx - .sps_by_id(h264_reader::nal::pps::ParamSetId::from_u32(0).unwrap()) - .ok_or_else(|| err!(Unimplemented, msg("no SPS 0")))?; - let pixel_dimensions = sps.pixel_dimensions().map_err(|e| { - err!( - InvalidArgument, - msg("SPS has invalid pixel dimensions: {:?}", e) - ) - })?; - let (Ok(width), Ok(height)) = ( - u16::try_from(pixel_dimensions.0), - u16::try_from(pixel_dimensions.1), - ) else { - bail!( - InvalidArgument, - msg( - "bad dimensions {}x{}", - pixel_dimensions.0, - pixel_dimensions.1 - ) - ); - }; - - let mut sample_entry = Vec::with_capacity(256); - - // This is a concatenation of the following boxes/classes. - - // SampleEntry, ISO/IEC 14496-12 section 8.5.2. - let avc1_len_pos = sample_entry.len(); - // length placeholder + type + reserved + data_reference_index = 1 - sample_entry.extend_from_slice(b"\x00\x00\x00\x00avc1\x00\x00\x00\x00\x00\x00\x00\x01"); - - // VisualSampleEntry, ISO/IEC 14496-12 section 12.1.3. - sample_entry.extend_from_slice(&[0; 16]); // pre-defined + reserved - sample_entry.write_u16::(width)?; - sample_entry.write_u16::(height)?; - sample_entry.extend_from_slice(&[ - 0x00, 0x48, 0x00, 0x00, // horizresolution - 0x00, 0x48, 0x00, 0x00, // vertresolution - 0x00, 0x00, 0x00, 0x00, // reserved - 0x00, 0x01, // frame count - 0x00, 0x00, 0x00, 0x00, // compressorname - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x00, 0x00, 0x00, // - 0x00, 0x18, 0xff, 0xff, // depth + pre_defined - ]); - - // AVCSampleEntry, ISO/IEC 14496-15 section 5.3.4.1. - // AVCConfigurationBox, ISO/IEC 14496-15 section 5.3.4.1. - let avcc_len_pos = sample_entry.len(); - sample_entry.extend_from_slice(b"\x00\x00\x00\x00avcC"); - sample_entry.extend_from_slice(extradata); - - // Fix up avc1 and avcC box lengths. - let cur_pos = sample_entry.len(); - BigEndian::write_u32( - &mut sample_entry[avcc_len_pos..avcc_len_pos + 4], - u32::try_from(cur_pos - avcc_len_pos).map_err(|_| err!(OutOfRange))?, - ); - - // PixelAspectRatioBox, ISO/IEC 14496-12 section 12.1.4.2. - // Write a PixelAspectRatioBox if necessary, as the sub streams can be be anamorphic. - let pasp = sps - .vui_parameters - .as_ref() - .and_then(|v| v.aspect_ratio_info.as_ref()) - .and_then(|a| a.clone().get()) - .unwrap_or_else(|| default_pixel_aspect_ratio(width, height)); - if pasp != (1, 1) { - sample_entry.extend_from_slice(b"\x00\x00\x00\x10pasp"); // length + box name - sample_entry.write_u32::(pasp.0.into())?; - sample_entry.write_u32::(pasp.1.into())?; - } - - let cur_pos = sample_entry.len(); - BigEndian::write_u32( - &mut sample_entry[avc1_len_pos..avc1_len_pos + 4], - u32::try_from(cur_pos - avc1_len_pos).map_err(|_| err!(OutOfRange))?, - ); - - let profile_idc = sample_entry[103]; - let constraint_flags = sample_entry[104]; - let level_idc = sample_entry[105]; - - let rfc6381_codec = format!("avc1.{profile_idc:02x}{constraint_flags:02x}{level_idc:02x}"); - Ok(VideoSampleEntryToInsert { - data: sample_entry, - rfc6381_codec, - width, - height, - pasp_h_spacing: pasp.0, - pasp_v_spacing: pasp.1, - }) -} - -/// Parses the `AvcDecoderConfigurationRecord` in the "extra data". -pub fn parse_extra_data(extradata: &[u8]) -> Result { - parse_extra_data_inner(extradata).map_err(|e| { - err!( - e, - msg( - "can't parse AvcDecoderRecord {}", - extradata.hex_conf(pretty_hex::HexConfig { - width: 0, - group: 0, - chunk: 0, - ..Default::default() - }) - ) - ) - }) -} - -#[cfg(test)] -mod tests { - use db::testutil; - - #[rustfmt::skip] - const AVC_DECODER_CONFIG_TEST_INPUT: [u8; 38] = [ - 0x01, 0x4d, 0x00, 0x1f, 0xff, - - 0xe1, 0x00, 0x17, // 1 SPS, length 0x17 - 0x67, 0x4d, 0x00, 0x1f, 0x9a, 0x66, 0x02, 0x80, - 0x2d, 0xff, 0x35, 0x01, 0x01, 0x01, 0x40, 0x00, - 0x00, 0xfa, 0x00, 0x00, 0x1d, 0x4c, 0x01, - - 0x01, 0x00, 0x04, // 1 PPS, length 0x04 - 0x68, 0xee, 0x3c, 0x80, - ]; - - #[rustfmt::skip] - const AVC_DECODER_CONFIG_TEST_INPUT_WITH_TRAILING_GARBAGE: [u8; 40] = [ - 0x01, 0x4d, 0x00, 0x1f, 0xff, - - 0xe1, 0x00, 0x18, // 1 SPS, length 0x18 - 0x67, 0x4d, 0x00, 0x1f, 0x9a, 0x66, 0x02, 0x80, - 0x2d, 0xff, 0x35, 0x01, 0x01, 0x01, 0x40, 0x00, - 0x00, 0xfa, 0x00, 0x00, 0x1d, 0x4c, 0x01, 0x01, - - 0x01, 0x00, 0x04, // 1 PPS, length 0x05 - 0x68, 0xee, 0x3c, 0x80, 0x80, - ]; - - #[rustfmt::skip] - const TEST_OUTPUT: [u8; 132] = [ - 0x00, 0x00, 0x00, 0x84, 0x61, 0x76, 0x63, 0x31, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x05, 0x00, 0x02, 0xd0, 0x00, 0x48, 0x00, 0x00, - 0x00, 0x48, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x18, 0xff, 0xff, 0x00, 0x00, - 0x00, 0x2e, 0x61, 0x76, 0x63, 0x43, 0x01, 0x4d, - 0x00, 0x1f, 0xff, 0xe1, 0x00, 0x17, 0x67, 0x4d, - 0x00, 0x1f, 0x9a, 0x66, 0x02, 0x80, 0x2d, 0xff, - 0x35, 0x01, 0x01, 0x01, 0x40, 0x00, 0x00, 0xfa, - 0x00, 0x00, 0x1d, 0x4c, 0x01, 0x01, 0x00, 0x04, - 0x68, 0xee, 0x3c, 0x80, - ]; - - #[test] - fn test_sample_entry_from_avc_decoder_config() { - testutil::init(); - let e = super::parse_extra_data(&AVC_DECODER_CONFIG_TEST_INPUT).unwrap(); - assert_eq!(&e.data[..], &TEST_OUTPUT[..]); - assert_eq!(e.width, 1280); - assert_eq!(e.height, 720); - assert_eq!(e.rfc6381_codec, "avc1.4d001f"); - } - - #[test] - fn pixel_aspect_ratios() { - use super::default_pixel_aspect_ratio; - use num_rational::Ratio; - for &((w, h), _) in &super::PIXEL_ASPECT_RATIOS { - let (h_spacing, v_spacing) = default_pixel_aspect_ratio(w, h); - assert_eq!(Ratio::new(w * h_spacing, h * v_spacing), Ratio::new(16, 9)); - - // 90 or 270 degree rotation. - let (h_spacing, v_spacing) = default_pixel_aspect_ratio(h, w); - assert_eq!(Ratio::new(h * h_spacing, w * v_spacing), Ratio::new(9, 16)); - } - } - - #[test] - fn extra_sps_data() { - super::parse_extra_data(&AVC_DECODER_CONFIG_TEST_INPUT_WITH_TRAILING_GARBAGE).unwrap(); - } -} diff --git a/server/src/main.rs b/server/src/main.rs index b41d6cc..ce975ee 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -12,7 +12,6 @@ use tracing::{debug, error}; mod body; mod cmds; -mod h264; mod json; mod mp4; mod slices; diff --git a/server/src/mp4.rs b/server/src/mp4.rs index d4cd56a..28eaac7 100644 --- a/server/src/mp4.rs +++ b/server/src/mp4.rs @@ -2844,7 +2844,7 @@ mod tests { // combine ranges from the new format with ranges from the old format. let hash = digest(&mp4).await; assert_eq!( - "64f23b856692702b13d1811cd02dc83395b3d501dead7fd16f175eb26b4d8eee", + "123e2cf075125c81e80820bffa412d38729aff05c252c7ea2ab3384905903bb7", hash.to_hex().as_str() ); const EXPECTED_ETAG: &str = @@ -2873,7 +2873,7 @@ mod tests { // combine ranges from the new format with ranges from the old format. let hash = digest(&mp4).await; assert_eq!( - "f9e4ed946187b2dd22ef049c4c1869d0f6c4f377ef08f8f53570850b61a06701", + "1f85ec7ea7f061b7d8f696c337a3258abc2bf830e81ac23c1342131669d7bb14", hash.to_hex().as_str() ); const EXPECTED_ETAG: &str = @@ -2902,7 +2902,7 @@ mod tests { // combine ranges from the new format with ranges from the old format. let hash = digest(&mp4).await; assert_eq!( - "f913d46d0119a03291e85459455b9a75a84cc9a1a5e3b88ca7e93eb718d73190", + "1debe76fc6277546209454919550ff4c3a379560f481fa0ce78378cbf3c646f8", hash.to_hex().as_str() ); const EXPECTED_ETAG: &str = @@ -2932,7 +2932,7 @@ mod tests { // combine ranges from the new format with ranges from the old format. let hash = digest(&mp4).await; assert_eq!( - "64cc763fa2533118bc6bf0b01249f02524ae87e0c97815079447b235722c1e2d", + "9c0302294f8f34d14fc8069fea1a65c1593a4c01134c07ab994b7398004f2b63", hash.to_hex().as_str() ); const EXPECTED_ETAG: &str = @@ -2961,7 +2961,7 @@ mod tests { // combine ranges from the new format with ranges from the old format. let hash = digest(&mp4).await; assert_eq!( - "6886b36ae6df9ce538f6db7ebd6159e68c2936b9d43307f7728fe75e0b62cad2", + "e06b5627788828b73b98726dfb6466d32305df64af0acbe6164fc8ab296de473", hash.to_hex().as_str() ); const EXPECTED_ETAG: &str = diff --git a/server/src/stream.rs b/server/src/stream.rs index 66b2abd..c36c701 100644 --- a/server/src/stream.rs +++ b/server/src/stream.rs @@ -2,7 +2,6 @@ // Copyright (C) 2016 The Moonfire NVR Authors; see AUTHORS and LICENSE.txt. // SPDX-License-Identifier: GPL-v3.0-or-later WITH GPL-3.0-linking-exception. -use crate::h264; use base::{bail, err, Error}; use bytes::Bytes; use futures::StreamExt; @@ -15,6 +14,43 @@ use url::Url; static RETINA_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(30); +// For certain common sub stream anamorphic resolutions, add a pixel aspect ratio box. +// Assume the camera is 16x9. These are just the standard wide mode; default_pixel_aspect_ratio +// tries the transpose also. +const PIXEL_ASPECT_RATIOS: [((u16, u16), (u16, u16)); 6] = [ + ((320, 240), (4, 3)), + ((352, 240), (40, 33)), + ((640, 352), (44, 45)), + ((640, 480), (4, 3)), + ((704, 480), (40, 33)), + ((720, 480), (32, 27)), +]; + +/// Gets the pixel aspect ratio to use if none is specified. +/// +/// The Dahua IPC-HDW5231R-Z sets the aspect ratio in the H.264 SPS (correctly) for both square and +/// non-square pixels. The Hikvision DS-2CD2032-I doesn't set it, even though the sub stream's +/// pixels aren't square. So define a default based on the pixel dimensions to use if the camera +/// doesn't tell us what to do. +/// +/// Note that at least in the case of .mp4 muxing, we don't need to fix up the underlying SPS. +/// PixelAspectRatioBox's definition says that it overrides the H.264-level declaration. +fn default_pixel_aspect_ratio(width: u16, height: u16) -> (u16, u16) { + if width >= height { + PIXEL_ASPECT_RATIOS + .iter() + .find(|r| r.0 == (width, height)) + .map(|r| r.1) + .unwrap_or((1, 1)) + } else { + PIXEL_ASPECT_RATIOS + .iter() + .find(|r| r.0 == (height, width)) + .map(|r| (r.1 .1, r.1 .0)) + .unwrap_or((1, 1)) + } +} + pub struct Options { pub session: retina::client::SessionOptions, pub setup: retina::client::SetupOptions, @@ -115,6 +151,27 @@ struct RetinaStreamInner { video_sample_entry: db::VideoSampleEntryToInsert, } +fn params_to_sample_entry( + params: &retina::codec::VideoParameters, +) -> Result { + let (width, height) = params.pixel_dimensions(); + let width = u16::try_from(width).map_err(|e| err!(Unknown, source(e)))?; + let height = u16::try_from(height).map_err(|e| err!(Unknown, source(e)))?; + let aspect = default_pixel_aspect_ratio(width, height); + Ok(db::VideoSampleEntryToInsert { + data: params + .mp4_sample_entry() + .with_aspect_ratio(aspect) + .build() + .map_err(|e| err!(Unknown, source(e)))?, + rfc6381_codec: "avc1.4d401e".to_string(), + width, + height, + pasp_h_spacing: aspect.0, + pasp_v_spacing: aspect.1, + }) +} + impl RetinaStreamInner { /// Plays to first frame. No timeout; that's the caller's responsibility. async fn play( @@ -129,8 +186,13 @@ impl RetinaStreamInner { let video_i = session .streams() .iter() - .position(|s| s.media() == "video" && s.encoding_name() == "h264") - .ok_or_else(|| err!(FailedPrecondition, msg("couldn't find H.264 video stream")))?; + .position(|s| s.media() == "video" && matches!(s.encoding_name(), "h264" | "jpeg")) + .ok_or_else(|| { + err!( + FailedPrecondition, + msg("couldn't find supported video stream") + ) + })?; session .setup(video_i, options.setup) .await @@ -157,9 +219,9 @@ impl RetinaStreamInner { let video_params = match session.streams()[video_i].parameters() { Some(retina::codec::ParametersRef::Video(v)) => v.clone(), Some(_) => unreachable!(), - None => bail!(Unknown, msg("couldn't find H.264 parameters")), + None => bail!(Unknown, msg("couldn't find video parameters")), }; - let video_sample_entry = h264::parse_extra_data(video_params.extra_data())?; + let video_sample_entry = params_to_sample_entry(&video_params)?; let self_ = Box::new(Self { label, session, @@ -246,7 +308,7 @@ impl Stream for RetinaStream { })??; let mut new_video_sample_entry = false; if let Some(p) = new_parameters { - let video_sample_entry = h264::parse_extra_data(p.extra_data())?; + let video_sample_entry = params_to_sample_entry(&p)?; if video_sample_entry != inner.video_sample_entry { tracing::debug!( "{}: parameter change:\nold: {:?}\nnew: {:?}", @@ -274,6 +336,8 @@ impl Stream for RetinaStream { #[cfg(test)] pub mod testutil { + use mp4::mp4box::WriteBox as _; + use super::*; use std::convert::TryFrom; use std::io::Cursor; @@ -300,14 +364,35 @@ pub mod testutil { .values() .find(|t| matches!(t.media_type(), Ok(mp4::MediaType::H264))) { - None => bail!(InvalidArgument, msg("expected a H.264 track")), + None => bail!( + InvalidArgument, + msg( + "expected a H.264 track, tracks were: {:#?}", + reader.tracks() + ) + ), Some(t) => t, }; - let video_sample_entry = h264::parse_extra_data( - &h264_track - .extra_data() - .map_err(|e| err!(Unknown, source(e)))?[..], - )?; + let mut data = Vec::new(); + h264_track + .trak + .mdia + .minf + .stbl + .stsd + .avc1 + .as_ref() + .unwrap() + .write_box(&mut data) + .unwrap(); + let video_sample_entry = db::VideoSampleEntryToInsert { + data, + rfc6381_codec: "avc1.4d401e".to_string(), + width: h264_track.width(), + height: h264_track.height(), + pasp_h_spacing: 1, + pasp_v_spacing: 1, + }; let h264_track_id = h264_track.track_id(); let stream = Mp4Stream { reader, @@ -360,3 +445,23 @@ pub mod testutil { } } } + +#[cfg(test)] +mod tests { + use db::testutil; + + #[test] + fn pixel_aspect_ratios() { + testutil::init(); + use super::default_pixel_aspect_ratio; + use num_rational::Ratio; + for &((w, h), _) in &super::PIXEL_ASPECT_RATIOS { + let (h_spacing, v_spacing) = default_pixel_aspect_ratio(w, h); + assert_eq!(Ratio::new(w * h_spacing, h * v_spacing), Ratio::new(16, 9)); + + // 90 or 270 degree rotation. + let (h_spacing, v_spacing) = default_pixel_aspect_ratio(h, w); + assert_eq!(Ratio::new(h * h_spacing, w * v_spacing), Ratio::new(9, 16)); + } + } +}