From 2eb413678b000572776b1a430a24379cc657cc71 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Thu, 12 Sep 2024 19:17:23 +0200 Subject: [PATCH 01/21] media type cosmetics media type fixup --- .../re_types/src/components/media_type_ext.rs | 7 ++++++ crates/viewer/re_renderer/src/video/mod.rs | 7 +++--- .../re_space_view_spatial/src/video_cache.rs | 24 +++++++++++++------ .../src/visualizers/videos.rs | 2 +- rerun_cpp/src/rerun/components/media_type.hpp | 3 +++ .../src/rerun/components/media_type_ext.cpp | 5 +++- 6 files changed, 35 insertions(+), 13 deletions(-) diff --git a/crates/store/re_types/src/components/media_type_ext.rs b/crates/store/re_types/src/components/media_type_ext.rs index 4540f17bbb9b..d000b3b38f15 100644 --- a/crates/store/re_types/src/components/media_type_ext.rs +++ b/crates/store/re_types/src/components/media_type_ext.rs @@ -46,6 +46,9 @@ impl MediaType { /// pub const STL: &'static str = "model/stl"; + // ------------------------------------------------------- + /// Videos: + /// [MP4 video](https://en.wikipedia.org/wiki/MP4_file_format): `video/mp4`. /// /// @@ -107,6 +110,9 @@ impl MediaType { Self(Self::STL.into()) } + // ------------------------------------------------------- + // Video: + /// `video/mp4` #[inline] pub fn mp4() -> Self { @@ -240,6 +246,7 @@ fn test_media_type_extension() { assert_eq!(MediaType::glb().file_extension(), Some("glb")); assert_eq!(MediaType::gltf().file_extension(), Some("gltf")); assert_eq!(MediaType::jpeg().file_extension(), Some("jpg")); + assert_eq!(MediaType::mp4().file_extension(), Some("mp4")); assert_eq!(MediaType::markdown().file_extension(), Some("md")); assert_eq!(MediaType::plain_text().file_extension(), Some("txt")); assert_eq!(MediaType::png().file_extension(), Some("png")); diff --git a/crates/viewer/re_renderer/src/video/mod.rs b/crates/viewer/re_renderer/src/video/mod.rs index 7b981b48fe28..feb53e12b769 100644 --- a/crates/viewer/re_renderer/src/video/mod.rs +++ b/crates/viewer/re_renderer/src/video/mod.rs @@ -63,17 +63,16 @@ impl Video { /// - `video/mp4` pub fn load( render_context: &RenderContext, - media_type: Option<&str>, + media_type: &str, data: &[u8], ) -> Result { let data = match media_type { - Some("video/mp4") => re_video::load_mp4(data)?, - Some(media_type) => { + "video/mp4" => re_video::load_mp4(data)?, + media_type => { return Err(VideoError::Load(VideoLoadError::UnsupportedMediaType( media_type.to_owned(), ))) } - None => return Err(VideoError::Load(VideoLoadError::UnknownMediaType)), }; let decoder = decoder::VideoDecoder::new(render_context, data)?; diff --git a/crates/viewer/re_space_view_spatial/src/video_cache.rs b/crates/viewer/re_space_view_spatial/src/video_cache.rs index 8c6768dee6f4..38a92f87d2f3 100644 --- a/crates/viewer/re_space_view_spatial/src/video_cache.rs +++ b/crates/viewer/re_space_view_spatial/src/video_cache.rs @@ -33,7 +33,7 @@ impl VideoCache { name: &str, key: VideoCacheKey, video_data: &[u8], - media_type: Option<&str>, + media_type: Option, render_ctx: &RenderContext, ) -> Option>> { re_tracing::profile_function!(); @@ -41,14 +41,24 @@ impl VideoCache { let entry = self.0.entry(key).or_insert_with(|| { re_log::debug!("Loading video {name:?}…"); - let result = Video::load(render_ctx, media_type, video_data); - let video = match result { - Ok(video) => Some(Arc::new(Mutex::new(video))), - Err(err) => { - re_log::warn_once!("Failed to load video {name:?}: {err}"); - None + let media_type = media_type.or(MediaType::guess_from_data(video_data)); + + let video = if let Some(media_type) = media_type { + let result = Video::load(render_ctx, media_type.as_str(), video_data); + match result { + Ok(video) => Some(Arc::new(Mutex::new(video))), + Err(err) => { + re_log::warn_once!("Failed to load video {name:?}: {err}"); + None + } } + } else { + re_log::warn_once!( + "Failed to determine media type from data for video at {name:?}" + ); + None }; + Entry { used_this_frame: AtomicBool::new(false), video, diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs b/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs index b89fcebeb5d5..0dc8284a625e 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs @@ -258,7 +258,7 @@ fn latest_at_query_video_from_datastore( media_type: media_type.clone(), }, &blob, - media_type.as_ref().map(|v| v.as_str()), + media_type, ctx.render_ctx?, ) }) diff --git a/rerun_cpp/src/rerun/components/media_type.hpp b/rerun_cpp/src/rerun/components/media_type.hpp index 80af7167ee33..9d8358e40e36 100644 --- a/rerun_cpp/src/rerun/components/media_type.hpp +++ b/rerun_cpp/src/rerun/components/media_type.hpp @@ -86,6 +86,9 @@ namespace rerun::components { return "model/stl"; } + // ------------------------------------------------------- + /// Videos: + /// [MP4 video](https://en.wikipedia.org/wiki/MP4_file_format): `video/mp4`. /// /// diff --git a/rerun_cpp/src/rerun/components/media_type_ext.cpp b/rerun_cpp/src/rerun/components/media_type_ext.cpp index 2c402b4986bc..bf41b335ad52 100644 --- a/rerun_cpp/src/rerun/components/media_type_ext.cpp +++ b/rerun_cpp/src/rerun/components/media_type_ext.cpp @@ -90,6 +90,9 @@ namespace rerun { return "model/stl"; } + // ------------------------------------------------------- + /// Videos: + /// [MP4 video](https://en.wikipedia.org/wiki/MP4_file_format): `video/mp4`. /// /// @@ -135,4 +138,4 @@ namespace rerun { return std::nullopt; } }; // namespace components -}; // namespace rerun +}; // namespace rerun From 831bc9212485e76dd881a27205d50ef132ca77fc Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Thu, 12 Sep 2024 20:06:57 +0200 Subject: [PATCH 02/21] expose read_frame_timestamps --- crates/store/re_data_loader/Cargo.toml | 2 +- .../re_data_loader/src/loader_archetype.rs | 144 +++++++----------- crates/store/re_types/Cargo.toml | 4 +- .../src/archetypes/asset_video_ext.rs | 51 +++++++ crates/store/re_video/src/lib.rs | 2 - 5 files changed, 108 insertions(+), 95 deletions(-) diff --git a/crates/store/re_data_loader/Cargo.toml b/crates/store/re_data_loader/Cargo.toml index 804ff9889f46..b048f70be539 100644 --- a/crates/store/re_data_loader/Cargo.toml +++ b/crates/store/re_data_loader/Cargo.toml @@ -31,7 +31,7 @@ re_log_types.workspace = true re_log.workspace = true re_smart_channel.workspace = true re_tracing.workspace = true -re_types = { workspace = true, features = ["image"] } +re_types = { workspace = true, features = ["image", "video"] } re_video.workspace = true ahash.workspace = true diff --git a/crates/store/re_data_loader/src/loader_archetype.rs b/crates/store/re_data_loader/src/loader_archetype.rs index e1433b7f54f3..b44e1cb893f3 100644 --- a/crates/store/re_data_loader/src/loader_archetype.rs +++ b/crates/store/re_data_loader/src/loader_archetype.rs @@ -1,12 +1,11 @@ use re_chunk::{Chunk, RowId}; use re_log_types::{EntityPath, TimeInt, TimePoint}; -use re_types::archetypes::VideoFrameReference; +use re_types::archetypes::{AssetVideo, VideoFrameReference}; +use re_types::datatypes::VideoTimeMode; use re_types::Archetype; use re_types::{components::MediaType, ComponentBatch}; -use arrow2::array::{ - ListArray as ArrowListArray, NullArray as ArrowNullArray, PrimitiveArray as ArrowPrimitiveArray, -}; +use arrow2::array::PrimitiveArray as ArrowPrimitiveArray; use arrow2::Either; use crate::{DataLoader, DataLoaderError, LoadedData}; @@ -220,100 +219,65 @@ fn load_video( let video_timeline = re_log_types::Timeline::new_temporal("video"); timepoint.insert(video_timeline, re_log_types::TimeInt::new_temporal(0)); - let media_type = MediaType::guess_from_path(filepath); + let video_asset = AssetVideo::new(contents); - // TODO(andreas): Video frame reference generation should be available as a utility from the SDK. + let video_frame_reference_chunk = match video_asset.read_frame_timestamps() { + Ok(video_timestamps) => { + // Time column. + let is_sorted = Some(true); - let video = if media_type.as_ref().map(|v| v.as_str()) == Some("video/mp4") { - match re_video::load_mp4(&contents) { - Ok(video) => Some(video), - Err(err) => { - re_log::warn!("Failed to load video asset {filepath:?}: {err}"); - None - } + let time_column_times = match video_timestamps + .first() + .map_or(VideoTimeMode::Nanoseconds, |t| t.time_mode) + { + // TODO(andreas): If we add other modes, how do we statically assert that we're dealing with time here? + VideoTimeMode::Nanoseconds => { + ArrowPrimitiveArray::from_values(video_timestamps.iter().map(|t| t.video_time)) + } + }; + let time_column = + re_chunk::TimeColumn::new(is_sorted, video_timeline, time_column_times); + + // VideoTimestamp component column. + let video_timestamp_batch = &video_timestamps as &dyn ComponentBatch; + let video_timestamp_list_array = video_timestamp_batch + .to_arrow_list_array() + .map_err(re_chunk::ChunkError::from)?; + + // Indicator column. + let video_frame_reference_indicators = + ::Indicator::new_array(video_timestamps.len()); + let video_frame_reference_indicators_list_array = video_frame_reference_indicators + .to_arrow_list_array() + .map_err(re_chunk::ChunkError::from)?; + + Some(Chunk::from_auto_row_ids( + re_chunk::ChunkId::new(), + entity_path.clone(), + std::iter::once((video_timeline, time_column)).collect(), + [ + ( + VideoFrameReference::indicator().name(), + video_frame_reference_indicators_list_array, + ), + (video_timestamp_batch.name(), video_timestamp_list_array), + ] + .into_iter() + .collect(), + )?) } - } else { - re_log::warn!("Video asset {filepath:?} has an unsupported container format."); - None - }; - // Log video frame references on the `video` timeline. - let video_frame_reference_chunk = if let Some(video) = video { - let first_timestamp = video - .segments - .first() - .map_or(0, |segment| segment.timestamp.as_nanoseconds()); - - // Time column. - let is_sorted = Some(true); - let time_column_times = - ArrowPrimitiveArray::::from_values(video.segments.iter().flat_map(|segment| { - segment - .samples - .iter() - .map(|s| s.timestamp.as_nanoseconds() - first_timestamp) - })); - - let time_column = re_chunk::TimeColumn::new(is_sorted, video_timeline, time_column_times); - - // VideoTimestamp component column. - let video_timestamps = video - .segments - .iter() - .flat_map(|segment| { - segment.samples.iter().map(|s| { - // TODO(andreas): Use sample indices instead of timestamps once possible. - re_types::components::VideoTimestamp::from_nanoseconds( - s.timestamp.as_nanoseconds(), - ) - }) - }) - .collect::>(); - let video_timestamp_batch = &video_timestamps as &dyn ComponentBatch; - let video_timestamp_list_array = video_timestamp_batch - .to_arrow_list_array() - .map_err(re_chunk::ChunkError::from)?; - - // Indicator column. - let video_frame_reference_indicator_datatype = arrow2::datatypes::DataType::Null; - let video_frame_reference_indicator_list_array = ArrowListArray::::try_new( - ArrowListArray::::default_datatype( - video_frame_reference_indicator_datatype.clone(), - ), - video_timestamp_list_array.offsets().clone(), - Box::new(ArrowNullArray::new( - video_frame_reference_indicator_datatype, - video_timestamps.len(), - )), - None, - ) - .map_err(re_chunk::ChunkError::from)?; - - Some(Chunk::from_auto_row_ids( - re_chunk::ChunkId::new(), - entity_path.clone(), - std::iter::once((video_timeline, time_column)).collect(), - [ - ( - VideoFrameReference::indicator().name(), - video_frame_reference_indicator_list_array, - ), - (video_timestamp_batch.name(), video_timestamp_list_array), - ] - .into_iter() - .collect(), - )?) - } else { - None + Err(err) => { + re_log::warn_once!( + "Failed to read frame timestamps from video asset {filepath:?}: {err}" + ); + None + } }; // Put video asset into its own chunk since it can be fairly large. let video_asset_chunk = Chunk::builder(entity_path.clone()) - .with_archetype( - RowId::new(), - timepoint.clone(), - &re_types::archetypes::AssetVideo::from_file_contents(contents, media_type.clone()), - ) + .with_archetype(RowId::new(), timepoint.clone(), &video_asset) .with_component_batch(RowId::new(), timepoint.clone(), &ExperimentalFeature) .build()?; diff --git a/crates/store/re_types/Cargo.toml b/crates/store/re_types/Cargo.toml index b03ded33f1d0..022912fc5e96 100644 --- a/crates/store/re_types/Cargo.toml +++ b/crates/store/re_types/Cargo.toml @@ -25,7 +25,7 @@ features = ["all"] default = ["ecolor"] ## All features except `testing`. -all = ["ecolor", "egui_plot", "glam", "image", "mint", "serde"] +all = ["ecolor", "egui_plot", "glam", "image", "mint", "serde", "video"] ## Enable color conversions. ecolor = ["dep:ecolor"] @@ -39,7 +39,7 @@ glam = ["dep:glam"] ## Integration with the [`image`](https://crates.io/crates/image/) crate, plus JPEG support. image = ["dep:ecolor", "dep:image"] -## Conversion to/from our video format +## Inspecting video data. video = ["dep:re_video"] ## Enable (de)serialization using serde. diff --git a/crates/store/re_types/src/archetypes/asset_video_ext.rs b/crates/store/re_types/src/archetypes/asset_video_ext.rs index 2d7c3074eeda..d81d130a47bb 100644 --- a/crates/store/re_types/src/archetypes/asset_video_ext.rs +++ b/crates/store/re_types/src/archetypes/asset_video_ext.rs @@ -2,6 +2,20 @@ use crate::components::MediaType; use super::AssetVideo; +/// Errors that can occur when calling [`AssetVideo::extract_frame_timestamps`]. +#[cfg(feature = "video")] +#[derive(thiserror::Error, Debug)] +pub enum TimeStampExtractionError { + #[error("Failed to determine media type from data")] + FailedToDetermineMediaTypeFromData, + + #[error("Media type {0} is not supported.")] + UnsupportedMediaType(String), + + #[error(transparent)] + VideoLoadError(#[from] re_video::VideoLoadError), +} + impl AssetVideo { /// Creates a new [`AssetVideo`] from the file contents at `path`. /// @@ -38,4 +52,41 @@ impl AssetVideo { media_type, } } + + /// Determines the presentation timestamps of all frames inside the video. + /// + /// Returned timestamps are guranteed to be monotonically increasing. + #[cfg(feature = "video")] + pub fn read_frame_timestamps( + &self, + ) -> Result, TimeStampExtractionError> { + let media_type = if let Some(media_type) = self.media_type.as_ref() { + media_type.clone() + } else { + MediaType::guess_from_data(self.blob.as_slice()) + .ok_or(TimeStampExtractionError::FailedToDetermineMediaTypeFromData)? + }; + + let video = if media_type == MediaType::mp4() { + // TODO(andreas, jan): Should not copy all the contents just to determine the samples. + // -> should provide a mode that doesn't do that or (even better!) only store slices into a shared buffer. + re_video::load_mp4(self.blob.as_slice())? + } else { + return Err(TimeStampExtractionError::UnsupportedMediaType( + media_type.to_string(), + )); + }; + + Ok(video + .segments + .iter() + .flat_map(|seg| { + seg.samples.iter().map(|sample| { + crate::components::VideoTimestamp::from_nanoseconds( + sample.timestamp.as_nanoseconds(), + ) + }) + }) + .collect()) + } } diff --git a/crates/store/re_video/src/lib.rs b/crates/store/re_video/src/lib.rs index f84a836aaaf5..02199427322e 100644 --- a/crates/store/re_video/src/lib.rs +++ b/crates/store/re_video/src/lib.rs @@ -114,7 +114,6 @@ pub enum VideoLoadError { InvalidConfigFormat, InvalidSamples, UnsupportedMediaType(String), - UnknownMediaType, UnsupportedCodec(String), } @@ -128,7 +127,6 @@ impl std::fmt::Display for VideoLoadError { Self::UnsupportedMediaType(type_) => { write!(f, "unsupported media type {type_:?}") } - Self::UnknownMediaType => write!(f, "unknown media type"), Self::UnsupportedCodec(codec) => write!(f, "unsupported codec {codec:?}"), } } From 68b3177bb77ded9915103f6b50112940db6d20f4 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 10:46:07 +0200 Subject: [PATCH 03/21] remove unused re_video dependency from data_loader --- Cargo.lock | 1 - crates/store/re_data_loader/Cargo.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b01de3d39c17..00186f1c5f59 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4936,7 +4936,6 @@ dependencies = [ "re_smart_channel", "re_tracing", "re_types", - "re_video", "thiserror", "walkdir", ] diff --git a/crates/store/re_data_loader/Cargo.toml b/crates/store/re_data_loader/Cargo.toml index b048f70be539..b5f4afb54d96 100644 --- a/crates/store/re_data_loader/Cargo.toml +++ b/crates/store/re_data_loader/Cargo.toml @@ -32,7 +32,6 @@ re_log.workspace = true re_smart_channel.workspace = true re_tracing.workspace = true re_types = { workspace = true, features = ["image", "video"] } -re_video.workspace = true ahash.workspace = true anyhow.workspace = true From 0aac9fc38d75fbcf98ae482b102fcda45f7ee883 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 10:46:30 +0200 Subject: [PATCH 04/21] asset_video is now guaranteed to return nanoseconds --- .../store/re_data_loader/src/loader_archetype.rs | 14 +++----------- .../re_types/src/archetypes/asset_video_ext.rs | 4 ++-- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/crates/store/re_data_loader/src/loader_archetype.rs b/crates/store/re_data_loader/src/loader_archetype.rs index b44e1cb893f3..44fad2df0a28 100644 --- a/crates/store/re_data_loader/src/loader_archetype.rs +++ b/crates/store/re_data_loader/src/loader_archetype.rs @@ -1,7 +1,6 @@ use re_chunk::{Chunk, RowId}; use re_log_types::{EntityPath, TimeInt, TimePoint}; use re_types::archetypes::{AssetVideo, VideoFrameReference}; -use re_types::datatypes::VideoTimeMode; use re_types::Archetype; use re_types::{components::MediaType, ComponentBatch}; @@ -221,20 +220,13 @@ fn load_video( let video_asset = AssetVideo::new(contents); - let video_frame_reference_chunk = match video_asset.read_frame_timestamps() { + let video_frame_reference_chunk = match video_asset.read_frame_timestamps_ns() { Ok(video_timestamps) => { // Time column. let is_sorted = Some(true); - let time_column_times = match video_timestamps - .first() - .map_or(VideoTimeMode::Nanoseconds, |t| t.time_mode) - { - // TODO(andreas): If we add other modes, how do we statically assert that we're dealing with time here? - VideoTimeMode::Nanoseconds => { - ArrowPrimitiveArray::from_values(video_timestamps.iter().map(|t| t.video_time)) - } - }; + let time_column_times = + ArrowPrimitiveArray::from_values(video_timestamps.iter().map(|t| t.video_time)); let time_column = re_chunk::TimeColumn::new(is_sorted, video_timeline, time_column_times); diff --git a/crates/store/re_types/src/archetypes/asset_video_ext.rs b/crates/store/re_types/src/archetypes/asset_video_ext.rs index d81d130a47bb..653c01c6916d 100644 --- a/crates/store/re_types/src/archetypes/asset_video_ext.rs +++ b/crates/store/re_types/src/archetypes/asset_video_ext.rs @@ -55,9 +55,9 @@ impl AssetVideo { /// Determines the presentation timestamps of all frames inside the video. /// - /// Returned timestamps are guranteed to be monotonically increasing. + /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. #[cfg(feature = "video")] - pub fn read_frame_timestamps( + pub fn read_frame_timestamps_ns( &self, ) -> Result, TimeStampExtractionError> { let media_type = if let Some(media_type) = self.media_type.as_ref() { From 33aee55921cd94cf2b3e0e044745909a50287315 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 10:59:56 +0200 Subject: [PATCH 05/21] split read_frame_timestamp_ns, add rust example using it --- .../re_data_loader/src/loader_archetype.rs | 3 +- .../src/archetypes/asset_video_ext.rs | 38 +++++++++++------- .../all/archetypes/video_auto_frames.rs | 39 +++++++++++++++++++ 3 files changed, 65 insertions(+), 15 deletions(-) create mode 100644 docs/snippets/all/archetypes/video_auto_frames.rs diff --git a/crates/store/re_data_loader/src/loader_archetype.rs b/crates/store/re_data_loader/src/loader_archetype.rs index 44fad2df0a28..b7e1940ef0fe 100644 --- a/crates/store/re_data_loader/src/loader_archetype.rs +++ b/crates/store/re_data_loader/src/loader_archetype.rs @@ -222,9 +222,10 @@ fn load_video( let video_frame_reference_chunk = match video_asset.read_frame_timestamps_ns() { Ok(video_timestamps) => { + let video_timestamps = video_timestamps.collect::>(); + // Time column. let is_sorted = Some(true); - let time_column_times = ArrowPrimitiveArray::from_values(video_timestamps.iter().map(|t| t.video_time)); let time_column = diff --git a/crates/store/re_types/src/archetypes/asset_video_ext.rs b/crates/store/re_types/src/archetypes/asset_video_ext.rs index 653c01c6916d..112554c403ba 100644 --- a/crates/store/re_types/src/archetypes/asset_video_ext.rs +++ b/crates/store/re_types/src/archetypes/asset_video_ext.rs @@ -53,13 +53,15 @@ impl AssetVideo { } } - /// Determines the presentation timestamps of all frames inside the video. + /// Determines the presentation timestamps of all frames inside the video, returning raw time values. /// /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. + /// + /// See also [`Self::read_frame_timestamps_ns`] for values wrapped in [`crate::components::VideoTimestamp`]. #[cfg(feature = "video")] - pub fn read_frame_timestamps_ns( + pub fn read_frame_timestamps_ns_raw( &self, - ) -> Result, TimeStampExtractionError> { + ) -> Result, TimeStampExtractionError> { let media_type = if let Some(media_type) = self.media_type.as_ref() { media_type.clone() } else { @@ -77,16 +79,24 @@ impl AssetVideo { )); }; - Ok(video - .segments - .iter() - .flat_map(|seg| { - seg.samples.iter().map(|sample| { - crate::components::VideoTimestamp::from_nanoseconds( - sample.timestamp.as_nanoseconds(), - ) - }) - }) - .collect()) + Ok(video.segments.into_iter().flat_map(|seg| { + seg.samples + .into_iter() + .map(|sample| sample.timestamp.as_nanoseconds()) + })) + } + + /// Determines the presentation timestamps of all frames inside the video. + /// + /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. + /// + /// See also [`Self::read_frame_timestamps_ns_raw`] for values not wrapped in [`crate::components::VideoTimestamp`]. + #[cfg(feature = "video")] + pub fn read_frame_timestamps_ns( + &self, + ) -> Result, TimeStampExtractionError> + { + self.read_frame_timestamps_ns_raw() + .map(|timestamps| timestamps.map(crate::components::VideoTimestamp::from_nanoseconds)) } } diff --git a/docs/snippets/all/archetypes/video_auto_frames.rs b/docs/snippets/all/archetypes/video_auto_frames.rs new file mode 100644 index 000000000000..f0e5e4b7fa77 --- /dev/null +++ b/docs/snippets/all/archetypes/video_auto_frames.rs @@ -0,0 +1,39 @@ +//! Log a video asset using automatically determined frame references. +//! TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. + +use rerun::{external::anyhow, TimeColumn}; + +fn main() -> anyhow::Result<()> { + let args = _args; + let Some(path) = args.get(1) else { + // TODO(#7354): Only mp4 is supported for now. + anyhow::bail!("Usage: {} ", args[0]); + }; + + let rec = + rerun::RecordingStreamBuilder::new("rerun_example_asset_video_auto_frames").spawn()?; + + // Log video asset which is referred to by frame references. + rec.set_time_seconds("video_time", 0.0); // Make sure it's available on the timeline used for the frame references. + let video_asset = rerun::AssetVideo::from_file_path(path)?; + rec.log("video", &video_asset)?; + + // Send automatically determined video frame timestamps. + let video_timestamps = video_asset.read_frame_timestamps_ns()?.collect::>(); + let time_column = TimeColumn::new_nanos( + "video_time", + // Note timeline values don't have to be the same as the video timestamps. + video_timestamps.iter().map(|ts| ts.video_time), + ); + let frame_reference_indicators = + ::Indicator::new_array( + video_timestamps.len(), + ); + rec.send_columns( + "video", + [time_column], + [&frame_reference_indicators as _, &video_timestamps as _], + )?; + + Ok(()) +} From dc8416157530a04b930ce2d7dc438c669b225012 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 11:54:55 +0200 Subject: [PATCH 06/21] rework how video loading and media type handling is done so it's easier to export functionality of re_video to other languages --- Cargo.lock | 1 + crates/store/README.md | 2 +- .../re_data_loader/src/loader_archetype.rs | 2 - .../src/archetypes/asset_video_ext.rs | 59 ++------------ crates/store/re_video/Cargo.toml | 1 + crates/store/re_video/src/lib.rs | 77 +++++++++++++------ crates/store/re_video/src/mp4.rs | 36 +++++++++ crates/viewer/re_renderer/src/video/mod.rs | 11 +-- .../re_space_view_spatial/src/video_cache.rs | 22 ++---- .../src/visualizers/videos.rs | 2 +- 10 files changed, 108 insertions(+), 105 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 00186f1c5f59..1fa7f86646db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5789,6 +5789,7 @@ version = "0.19.0-alpha.1+dev" dependencies = [ "mp4", "ordered-float", + "thiserror", ] [[package]] diff --git a/crates/store/README.md b/crates/store/README.md index b4841b792024..2c2c71c97606 100644 --- a/crates/store/README.md +++ b/crates/store/README.md @@ -1 +1 @@ -Creates related to storing, indexing, trasmitting, and handling data. +Creates related to storing, indexing, transmitting, and handling data. diff --git a/crates/store/re_data_loader/src/loader_archetype.rs b/crates/store/re_data_loader/src/loader_archetype.rs index b7e1940ef0fe..81f32fa81d80 100644 --- a/crates/store/re_data_loader/src/loader_archetype.rs +++ b/crates/store/re_data_loader/src/loader_archetype.rs @@ -222,8 +222,6 @@ fn load_video( let video_frame_reference_chunk = match video_asset.read_frame_timestamps_ns() { Ok(video_timestamps) => { - let video_timestamps = video_timestamps.collect::>(); - // Time column. let is_sorted = Some(true); let time_column_times = diff --git a/crates/store/re_types/src/archetypes/asset_video_ext.rs b/crates/store/re_types/src/archetypes/asset_video_ext.rs index 112554c403ba..9f29eb4d687a 100644 --- a/crates/store/re_types/src/archetypes/asset_video_ext.rs +++ b/crates/store/re_types/src/archetypes/asset_video_ext.rs @@ -2,20 +2,6 @@ use crate::components::MediaType; use super::AssetVideo; -/// Errors that can occur when calling [`AssetVideo::extract_frame_timestamps`]. -#[cfg(feature = "video")] -#[derive(thiserror::Error, Debug)] -pub enum TimeStampExtractionError { - #[error("Failed to determine media type from data")] - FailedToDetermineMediaTypeFromData, - - #[error("Media type {0} is not supported.")] - UnsupportedMediaType(String), - - #[error(transparent)] - VideoLoadError(#[from] re_video::VideoLoadError), -} - impl AssetVideo { /// Creates a new [`AssetVideo`] from the file contents at `path`. /// @@ -53,39 +39,6 @@ impl AssetVideo { } } - /// Determines the presentation timestamps of all frames inside the video, returning raw time values. - /// - /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. - /// - /// See also [`Self::read_frame_timestamps_ns`] for values wrapped in [`crate::components::VideoTimestamp`]. - #[cfg(feature = "video")] - pub fn read_frame_timestamps_ns_raw( - &self, - ) -> Result, TimeStampExtractionError> { - let media_type = if let Some(media_type) = self.media_type.as_ref() { - media_type.clone() - } else { - MediaType::guess_from_data(self.blob.as_slice()) - .ok_or(TimeStampExtractionError::FailedToDetermineMediaTypeFromData)? - }; - - let video = if media_type == MediaType::mp4() { - // TODO(andreas, jan): Should not copy all the contents just to determine the samples. - // -> should provide a mode that doesn't do that or (even better!) only store slices into a shared buffer. - re_video::load_mp4(self.blob.as_slice())? - } else { - return Err(TimeStampExtractionError::UnsupportedMediaType( - media_type.to_string(), - )); - }; - - Ok(video.segments.into_iter().flat_map(|seg| { - seg.samples - .into_iter() - .map(|sample| sample.timestamp.as_nanoseconds()) - })) - } - /// Determines the presentation timestamps of all frames inside the video. /// /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. @@ -94,9 +47,13 @@ impl AssetVideo { #[cfg(feature = "video")] pub fn read_frame_timestamps_ns( &self, - ) -> Result, TimeStampExtractionError> - { - self.read_frame_timestamps_ns_raw() - .map(|timestamps| timestamps.map(crate::components::VideoTimestamp::from_nanoseconds)) + ) -> Result, re_video::VideoLoadError> { + Ok(re_video::VideoData::load_from_bytes( + self.blob.as_slice(), + self.media_type.as_ref().map(|m| m.as_str()), + )? + .frame_timestamps_ns() + .map(crate::components::VideoTimestamp::from_nanoseconds) + .collect::>()) } } diff --git a/crates/store/re_video/Cargo.toml b/crates/store/re_video/Cargo.toml index 24b0072ab5f3..be2b34667708 100644 --- a/crates/store/re_video/Cargo.toml +++ b/crates/store/re_video/Cargo.toml @@ -28,3 +28,4 @@ features = ["all"] [dependencies] mp4.workspace = true ordered-float.workspace = true +thiserror.workspace = true diff --git a/crates/store/re_video/src/lib.rs b/crates/store/re_video/src/lib.rs index 02199427322e..0b172776d924 100644 --- a/crates/store/re_video/src/lib.rs +++ b/crates/store/re_video/src/lib.rs @@ -3,7 +3,6 @@ //! The entry point is [`load_mp4`], which produces an instance of [`VideoData`]. mod mp4; -pub use mp4::load_mp4; use ordered_float::OrderedFloat; /// Decoded video data. @@ -22,6 +21,43 @@ pub struct VideoData { pub data: Vec, } +impl VideoData { + /// Loads a video from the given data. + /// + /// TODO(andreas, jan): This should not copy the data, but instead store slices into a shared buffer. + /// at the very least the should be a way to extract only metadata. + pub fn load_from_bytes(data: &[u8], media_type: Option<&str>) -> Result { + // Media type guessing here should be identical to to `re_types::MediaType::guess_from_data`, + // but we don't want to depend on `re_types` here. + let media_type = if let Some(media_type) = media_type { + media_type.to_owned() + } else if mp4::is_mp4(data) { + "video/mp4".to_owned() + } else { + // Technically this means that we failed to determine the media type alltogether, + // but we don't want to call it `FailedToDetermineMediaType` since the rest of Rerun has + // access to `re_types::components::MediaType` which has a much wider range of media type detection. + return Err(VideoLoadError::UnsupportedVideoType); + }; + + match media_type.as_str() { + "video/mp4" => mp4::load_mp4(data), + media_type => Err(VideoLoadError::UnsupportedMediaType(media_type.to_owned())), + } + } + + /// Determines the presentation timestamps of all frames inside a video, returning raw time values. + /// + /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. + pub fn frame_timestamps_ns(&self) -> impl Iterator + '_ { + self.segments.iter().flat_map(|seg| { + seg.samples + .iter() + .map(|sample| sample.timestamp.as_nanoseconds()) + }) + } +} + /// A segment of a video. #[derive(Clone)] pub struct Segment { @@ -107,37 +143,28 @@ impl std::ops::Sub for TimeMs { } /// Errors that can occur when loading a video. -#[derive(Debug)] +#[derive(thiserror::Error, Debug)] pub enum VideoLoadError { - ParseMp4(::mp4::Error), + #[error("Failed to determine media type from data: {0}")] + ParseMp4(#[from] ::mp4::Error), + + #[error("Video file has no video tracks")] NoVideoTrack, + + #[error("Video file track config is invalid")] InvalidConfigFormat, + + #[error("Video file has invalid sample entries")] InvalidSamples, - UnsupportedMediaType(String), - UnsupportedCodec(String), -} -impl std::fmt::Display for VideoLoadError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::ParseMp4(err) => write!(f, "failed to parse video: {err}"), - Self::NoVideoTrack => write!(f, "video file has no video tracks"), - Self::InvalidConfigFormat => write!(f, "video file track config is invalid"), - Self::InvalidSamples => write!(f, "video file has invalid sample entries"), - Self::UnsupportedMediaType(type_) => { - write!(f, "unsupported media type {type_:?}") - } - Self::UnsupportedCodec(codec) => write!(f, "unsupported codec {codec:?}"), - } - } -} + #[error("Video file has unsupported media type {0}")] + UnsupportedMediaType(String), -impl std::error::Error for VideoLoadError {} + #[error("Video file has unsupported format")] + UnsupportedVideoType, -impl From<::mp4::Error> for VideoLoadError { - fn from(value: ::mp4::Error) -> Self { - Self::ParseMp4(value) - } + #[error("Video file has unsupported codec {0}")] + UnsupportedCodec(String), } impl std::fmt::Debug for VideoData { diff --git a/crates/store/re_video/src/mp4.rs b/crates/store/re_video/src/mp4.rs index b3806121765c..5d28efb91805 100644 --- a/crates/store/re_video/src/mp4.rs +++ b/crates/store/re_video/src/mp4.rs @@ -94,3 +94,39 @@ pub fn load_mp4(bytes: &[u8]) -> Result { segments, }) } + +/// Returns whether a buffer is MP4 video data. +/// +/// From `infer` crate. +pub fn is_mp4(buf: &[u8]) -> bool { + buf.len() > 11 + && (buf[4] == b'f' && buf[5] == b't' && buf[6] == b'y' && buf[7] == b'p') + && ((buf[8] == b'a' && buf[9] == b'v' && buf[10] == b'c' && buf[11] == b'1') + || (buf[8] == b'd' && buf[9] == b'a' && buf[10] == b's' && buf[11] == b'h') + || (buf[8] == b'i' && buf[9] == b's' && buf[10] == b'o' && buf[11] == b'2') + || (buf[8] == b'i' && buf[9] == b's' && buf[10] == b'o' && buf[11] == b'3') + || (buf[8] == b'i' && buf[9] == b's' && buf[10] == b'o' && buf[11] == b'4') + || (buf[8] == b'i' && buf[9] == b's' && buf[10] == b'o' && buf[11] == b'5') + || (buf[8] == b'i' && buf[9] == b's' && buf[10] == b'o' && buf[11] == b'6') + || (buf[8] == b'i' && buf[9] == b's' && buf[10] == b'o' && buf[11] == b'm') + || (buf[8] == b'm' && buf[9] == b'm' && buf[10] == b'p' && buf[11] == b'4') + || (buf[8] == b'm' && buf[9] == b'p' && buf[10] == b'4' && buf[11] == b'1') + || (buf[8] == b'm' && buf[9] == b'p' && buf[10] == b'4' && buf[11] == b'2') + || (buf[8] == b'm' && buf[9] == b'p' && buf[10] == b'4' && buf[11] == b'v') + || (buf[8] == b'm' && buf[9] == b'p' && buf[10] == b'7' && buf[11] == b'1') + || (buf[8] == b'M' && buf[9] == b'S' && buf[10] == b'N' && buf[11] == b'V') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'A' && buf[11] == b'S') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'S' && buf[11] == b'C') + || (buf[8] == b'N' && buf[9] == b'S' && buf[10] == b'D' && buf[11] == b'C') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'S' && buf[11] == b'H') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'S' && buf[11] == b'M') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'S' && buf[11] == b'P') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'S' && buf[11] == b'S') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'X' && buf[11] == b'C') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'X' && buf[11] == b'H') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'X' && buf[11] == b'M') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'X' && buf[11] == b'P') + || (buf[8] == b'N' && buf[9] == b'D' && buf[10] == b'X' && buf[11] == b'S') + || (buf[8] == b'F' && buf[9] == b'4' && buf[10] == b'V' && buf[11] == b' ') + || (buf[8] == b'F' && buf[9] == b'4' && buf[10] == b'P' && buf[11] == b' ')) +} diff --git a/crates/viewer/re_renderer/src/video/mod.rs b/crates/viewer/re_renderer/src/video/mod.rs index feb53e12b769..0c54d5efc2e9 100644 --- a/crates/viewer/re_renderer/src/video/mod.rs +++ b/crates/viewer/re_renderer/src/video/mod.rs @@ -63,17 +63,10 @@ impl Video { /// - `video/mp4` pub fn load( render_context: &RenderContext, - media_type: &str, data: &[u8], + media_type: Option<&str>, ) -> Result { - let data = match media_type { - "video/mp4" => re_video::load_mp4(data)?, - media_type => { - return Err(VideoError::Load(VideoLoadError::UnsupportedMediaType( - media_type.to_owned(), - ))) - } - }; + let data = re_video::VideoData::load_from_bytes(data, media_type)?; let decoder = decoder::VideoDecoder::new(render_context, data)?; Ok(Self { decoder }) diff --git a/crates/viewer/re_space_view_spatial/src/video_cache.rs b/crates/viewer/re_space_view_spatial/src/video_cache.rs index 38a92f87d2f3..4b197a7d5fa7 100644 --- a/crates/viewer/re_space_view_spatial/src/video_cache.rs +++ b/crates/viewer/re_space_view_spatial/src/video_cache.rs @@ -33,7 +33,7 @@ impl VideoCache { name: &str, key: VideoCacheKey, video_data: &[u8], - media_type: Option, + media_type: Option<&str>, render_ctx: &RenderContext, ) -> Option>> { re_tracing::profile_function!(); @@ -41,22 +41,12 @@ impl VideoCache { let entry = self.0.entry(key).or_insert_with(|| { re_log::debug!("Loading video {name:?}…"); - let media_type = media_type.or(MediaType::guess_from_data(video_data)); - - let video = if let Some(media_type) = media_type { - let result = Video::load(render_ctx, media_type.as_str(), video_data); - match result { - Ok(video) => Some(Arc::new(Mutex::new(video))), - Err(err) => { - re_log::warn_once!("Failed to load video {name:?}: {err}"); - None - } + let video = match Video::load(render_ctx, video_data, media_type) { + Ok(video) => Some(Arc::new(Mutex::new(video))), + Err(err) => { + re_log::warn_once!("Failed to load video {name:?}: {err}"); + None } - } else { - re_log::warn_once!( - "Failed to determine media type from data for video at {name:?}" - ); - None }; Entry { diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs b/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs index 0dc8284a625e..340e99763c69 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/videos.rs @@ -258,7 +258,7 @@ fn latest_at_query_video_from_datastore( media_type: media_type.clone(), }, &blob, - media_type, + media_type.as_ref().map(|m| m.as_str()), ctx.render_ctx?, ) }) From 2103fa0e06092846316080157de992738747ce4d Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 14:46:11 +0200 Subject: [PATCH 07/21] Expose asset_video_read_frame_timestamps_ns to python --- Cargo.lock | 1 + .../src/archetypes/asset_video_ext.rs | 2 - .../all/archetypes/video_auto_frames.py | 27 +++++++++++++ .../all/archetypes/video_auto_frames.rs | 8 ++-- .../all/archetypes/video_manual_frames.py | 2 +- .../all/archetypes/video_manual_frames.rs | 2 +- docs/snippets/snippets.toml | 3 ++ rerun_py/Cargo.toml | 1 + .../rerun/archetypes/asset_video_ext.py | 20 ++++++++++ rerun_py/src/arrow.rs | 5 ++- rerun_py/src/lib.rs | 1 + rerun_py/src/python_bridge.rs | 9 +++-- rerun_py/src/video.rs | 39 +++++++++++++++++++ 13 files changed, 108 insertions(+), 12 deletions(-) create mode 100644 docs/snippets/all/archetypes/video_auto_frames.py create mode 100644 rerun_py/src/video.rs diff --git a/Cargo.lock b/Cargo.lock index 1fa7f86646db..48e16a576c9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6169,6 +6169,7 @@ dependencies = [ "re_log_types", "re_memory", "re_sdk", + "re_video", "re_web_viewer_server", "re_ws_comms", "uuid", diff --git a/crates/store/re_types/src/archetypes/asset_video_ext.rs b/crates/store/re_types/src/archetypes/asset_video_ext.rs index 9f29eb4d687a..33a5ad990fbf 100644 --- a/crates/store/re_types/src/archetypes/asset_video_ext.rs +++ b/crates/store/re_types/src/archetypes/asset_video_ext.rs @@ -42,8 +42,6 @@ impl AssetVideo { /// Determines the presentation timestamps of all frames inside the video. /// /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. - /// - /// See also [`Self::read_frame_timestamps_ns_raw`] for values not wrapped in [`crate::components::VideoTimestamp`]. #[cfg(feature = "video")] pub fn read_frame_timestamps_ns( &self, diff --git a/docs/snippets/all/archetypes/video_auto_frames.py b/docs/snippets/all/archetypes/video_auto_frames.py new file mode 100644 index 000000000000..05c030da11f3 --- /dev/null +++ b/docs/snippets/all/archetypes/video_auto_frames.py @@ -0,0 +1,27 @@ +"""Log a video asset using automatically determined frame references.""" +# TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. + +import sys + +import rerun as rr + +if len(sys.argv) < 2: + # TODO(#7354): Only mp4 is supported for now. + print(f"Usage: {sys.argv[0]} ") + sys.exit(1) + +rr.init("rerun_example_asset_video_auto_frames", spawn=True) + +# Log video asset which is referred to by frame references. +rr.set_time_seconds("video_time", 0) # Make sure it's available on the timeline used for the frame references. +video_asset = rr.AssetVideo(path=sys.argv[1]) +rr.log("video", video_asset) + +# Send automatically determined video frame timestamps. +video_timestamps_ns = video_asset.read_frame_timestamps_ns() +rr.send_columns( + "video", + # Note timeline values don't have to be the same as the video timestamps. + times=[rr.TimeNanosColumn("video_time", video_timestamps_ns)], + components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.nanoseconds(video_timestamps_ns)], +) diff --git a/docs/snippets/all/archetypes/video_auto_frames.rs b/docs/snippets/all/archetypes/video_auto_frames.rs index f0e5e4b7fa77..c6f5fc4e9c29 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.rs +++ b/docs/snippets/all/archetypes/video_auto_frames.rs @@ -19,20 +19,20 @@ fn main() -> anyhow::Result<()> { rec.log("video", &video_asset)?; // Send automatically determined video frame timestamps. - let video_timestamps = video_asset.read_frame_timestamps_ns()?.collect::>(); + let video_timestamps_ns = video_asset.read_frame_timestamps_ns()?.collect::>(); let time_column = TimeColumn::new_nanos( "video_time", // Note timeline values don't have to be the same as the video timestamps. - video_timestamps.iter().map(|ts| ts.video_time), + video_timestamps_ns.iter().map(|ts| ts.video_time), ); let frame_reference_indicators = ::Indicator::new_array( - video_timestamps.len(), + video_timestamps_ns.len(), ); rec.send_columns( "video", [time_column], - [&frame_reference_indicators as _, &video_timestamps as _], + [&frame_reference_indicators as _, &video_timestamps_ns as _], )?; Ok(()) diff --git a/docs/snippets/all/archetypes/video_manual_frames.py b/docs/snippets/all/archetypes/video_manual_frames.py index db69b510dd01..2cfe650c8bc9 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.py +++ b/docs/snippets/all/archetypes/video_manual_frames.py @@ -19,7 +19,7 @@ # Send frame references for every 0.1 seconds over a total of 10 seconds. # Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. -# TODO(#7368): Point to example using `send_video_frames`. +# To get all frame times of a video use `rr.AssetVideo.read_frame_timestamps_ns`. # # Use `send_columns` to send all frame references in a single call. times = np.arange(0.0, 10.0, 0.1) diff --git a/docs/snippets/all/archetypes/video_manual_frames.rs b/docs/snippets/all/archetypes/video_manual_frames.rs index 24e7f25fe799..12d9fd41fdd9 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.rs +++ b/docs/snippets/all/archetypes/video_manual_frames.rs @@ -19,7 +19,7 @@ fn main() -> anyhow::Result<()> { // Send frame references for every 0.1 seconds over a total of 10 seconds. // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - // TODO(#7368): Point to example using `send_video_frames`. + // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. // // Use `send_columns` to send all frame references in a single call. let times = (0..(10 * 10)).map(|t| t as f64 * 0.1).collect::>(); diff --git a/docs/snippets/snippets.toml b/docs/snippets/snippets.toml index 2be554f6a1bd..dc4ae3d66749 100644 --- a/docs/snippets/snippets.toml +++ b/docs/snippets/snippets.toml @@ -191,3 +191,6 @@ quick_start = [ # These examples don't have exactly the same implementation. "archetypes/video_manual_frames" = [ "$config_dir/../../tests/assets/video/Big_Buck_Bunny_1080_10s_av1.mp4", ] +"archetypes/video_auto_frames" = [ + "$config_dir/../../tests/assets/video/Big_Buck_Bunny_1080_10s_av1.mp4", +] diff --git a/rerun_py/Cargo.toml b/rerun_py/Cargo.toml index e43151c47f2a..81e276a8fd28 100644 --- a/rerun_py/Cargo.toml +++ b/rerun_py/Cargo.toml @@ -46,6 +46,7 @@ re_log = { workspace = true, features = ["setup"] } re_log_types.workspace = true re_memory.workspace = true re_sdk = { workspace = true, features = ["data_loaders"] } +re_video.workspace = true re_web_viewer_server = { workspace = true, optional = true } re_ws_comms = { workspace = true, optional = true } diff --git a/rerun_py/rerun_sdk/rerun/archetypes/asset_video_ext.py b/rerun_py/rerun_sdk/rerun/archetypes/asset_video_ext.py index 23cc56fb660a..bb927c515918 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/asset_video_ext.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/asset_video_ext.py @@ -3,6 +3,10 @@ import pathlib from typing import Any +import numpy as np +import numpy.typing as npt +import rerun_bindings as bindings + from .. import datatypes from ..error_utils import catch_and_log_exceptions @@ -59,3 +63,19 @@ def __init__( return self.__attrs_clear__() + + def read_frame_timestamps_ns(self: Any) -> npt.NDArray[np.int64]: + """ + Determines the presentation timestamps of all frames inside the video. + + Throws a runtime exception if the video cannot be read. + """ + if self.blob is not None: + video_buffer = self.blob.as_arrow_array() + else: + raise RuntimeError("Asset video has no video buffer") + + if self.media_type is not None: + media_type = self.media_type.as_arrow_array().storage[0].as_py() + + return np.array(bindings.asset_video_read_frame_timestamps_ns(video_buffer, media_type), dtype=np.int64) diff --git a/rerun_py/src/arrow.rs b/rerun_py/src/arrow.rs index dde1ce3cbbdc..4adb8e710c12 100644 --- a/rerun_py/src/arrow.rs +++ b/rerun_py/src/arrow.rs @@ -24,7 +24,10 @@ use re_sdk::{ComponentName, EntityPath, Timeline}; /// Perform conversion between a pyarrow array to arrow2 types. /// /// `name` is the name of the Rerun component, and the name of the pyarrow `Field` (column name). -fn array_to_rust(arrow_array: &Bound<'_, PyAny>, name: &str) -> PyResult<(Box, Field)> { +pub fn array_to_rust( + arrow_array: &Bound<'_, PyAny>, + name: &str, +) -> PyResult<(Box, Field)> { let py_array: PyArrowType = arrow_array.extract()?; let arr1_array = make_array(py_array.0); diff --git a/rerun_py/src/lib.rs b/rerun_py/src/lib.rs index 73c8e4a368a5..cd625e1f89b4 100644 --- a/rerun_py/src/lib.rs +++ b/rerun_py/src/lib.rs @@ -15,3 +15,4 @@ static GLOBAL: AccountingAllocator = mod arrow; mod python_bridge; +mod video; diff --git a/rerun_py/src/python_bridge.rs b/rerun_py/src/python_bridge.rs index 15b472e01dce..ad194790371f 100644 --- a/rerun_py/src/python_bridge.rs +++ b/rerun_py/src/python_bridge.rs @@ -1,6 +1,6 @@ -#![allow(clippy::needless_pass_by_value)] // A lot of arguments to #[pufunction] need to be by value -#![allow(clippy::borrow_deref_ref)] // False positive due to #[pufunction] macro -#![allow(unsafe_op_in_unsafe_fn)] // False positive due to #[pufunction] macro +#![allow(clippy::needless_pass_by_value)] // A lot of arguments to #[pyfunction] need to be by value +#![allow(clippy::borrow_deref_ref)] // False positive due to #[pyfunction] macro +#![allow(unsafe_op_in_unsafe_fn)] // False positive due to #[pyfunction] macro use std::collections::HashMap; use std::io::IsTerminal as _; @@ -169,6 +169,9 @@ fn rerun_bindings(_py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_function(wrap_pyfunction!(escape_entity_path_part, m)?)?; m.add_function(wrap_pyfunction!(new_entity_path, m)?)?; + use crate::video::asset_video_read_frame_timestamps_ns; + m.add_function(wrap_pyfunction!(asset_video_read_frame_timestamps_ns, m)?)?; + Ok(()) } diff --git a/rerun_py/src/video.rs b/rerun_py/src/video.rs new file mode 100644 index 000000000000..c0b6a9ddd01a --- /dev/null +++ b/rerun_py/src/video.rs @@ -0,0 +1,39 @@ +#![allow(unsafe_op_in_unsafe_fn)] // False positive due to #[pyfunction] macro + +use pyo3::{exceptions::PyRuntimeError, pyfunction, Bound, PyAny, PyResult}; + +use crate::arrow::array_to_rust; + +/// Reads the timestamps of all frames in a video asset. +/// +/// Implementation note: +/// On the Python side we start out with a pyarrow array of bytes. Converting it to +/// Python `bytes` can be done with `to_pybytes` but this requires copying the data. +/// So instead, we pass the arrow array directly. +#[pyfunction] +pub fn asset_video_read_frame_timestamps_ns( + video_bytes_arrow_array: &Bound<'_, PyAny>, + media_type: Option<&str>, +) -> PyResult> { + let video_bytes_arrow_array = + array_to_rust(video_bytes_arrow_array, "rerun.components.Blob")?.0; + + let video_bytes_arrow_uint8_array = video_bytes_arrow_array + .as_any() + .downcast_ref::>() + .and_then(|arr| arr.values().as_any().downcast_ref::()) + .ok_or_else(|| { + PyRuntimeError::new_err(format!( + "Expected arrow array to be a list with a single uint8 array, instead it has the datatype {:?}", + video_bytes_arrow_array.data_type() + )) + })?; + + Ok(re_video::VideoData::load_from_bytes( + video_bytes_arrow_uint8_array.values().as_slice(), + media_type, + ) + .map_err(|err| PyRuntimeError::new_err(err.to_string()))? + .frame_timestamps_ns() + .collect()) +} From 8b8560ef594d10337c7da97668fa043a22991865 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 16:14:40 +0200 Subject: [PATCH 08/21] C++ implementation of read_frame_timestamps_ns --- Cargo.lock | 1 + .../re_types/src/archetypes/asset_video.rs | 2 +- .../src/archetypes/video_frame_reference.rs | 2 +- crates/top/rerun_c/Cargo.toml | 1 + crates/top/rerun_c/src/lib.rs | 4 ++ crates/top/rerun_c/src/video.rs | 53 +++++++++++++++++++ .../all/archetypes/video_auto_frames.cpp | 51 ++++++++++++++++++ .../all/archetypes/video_manual_frames.cpp | 8 +-- .../src/rerun/archetypes/asset_video.hpp | 8 ++- .../src/rerun/archetypes/asset_video_ext.cpp | 42 +++++++++++++++ rerun_cpp/src/rerun/c/rerun.h | 28 +++++++++- rerun_cpp/src/rerun/error.hpp | 6 ++- rerun_cpp/src/rerun/result.hpp | 2 +- 13 files changed, 197 insertions(+), 11 deletions(-) create mode 100644 crates/top/rerun_c/src/video.rs create mode 100644 docs/snippets/all/archetypes/video_auto_frames.cpp diff --git a/Cargo.lock b/Cargo.lock index 48e16a576c9f..3cdc51c26fa5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6145,6 +6145,7 @@ dependencies = [ "re_arrow2", "re_log", "re_sdk", + "re_video", ] [[package]] diff --git a/crates/store/re_types/src/archetypes/asset_video.rs b/crates/store/re_types/src/archetypes/asset_video.rs index ba29d3d91e6f..027ca3b6ef8b 100644 --- a/crates/store/re_types/src/archetypes/asset_video.rs +++ b/crates/store/re_types/src/archetypes/asset_video.rs @@ -50,7 +50,7 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// /// // Send frame references for every 0.1 seconds over a total of 10 seconds. /// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. -/// // TODO(#7368): Point to example using `send_video_frames`. +/// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. /// // /// // Use `send_columns` to send all frame references in a single call. /// let times = (0..(10 * 10)).map(|t| t as f64 * 0.1).collect::>(); diff --git a/crates/store/re_types/src/archetypes/video_frame_reference.rs b/crates/store/re_types/src/archetypes/video_frame_reference.rs index aeca145292ea..e9d3ba6a6126 100644 --- a/crates/store/re_types/src/archetypes/video_frame_reference.rs +++ b/crates/store/re_types/src/archetypes/video_frame_reference.rs @@ -47,7 +47,7 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// /// // Send frame references for every 0.1 seconds over a total of 10 seconds. /// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. -/// // TODO(#7368): Point to example using `send_video_frames`. +/// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. /// // /// // Use `send_columns` to send all frame references in a single call. /// let times = (0..(10 * 10)).map(|t| t as f64 * 0.1).collect::>(); diff --git a/crates/top/rerun_c/Cargo.toml b/crates/top/rerun_c/Cargo.toml index d1c21f196cc9..c949fce3ec04 100644 --- a/crates/top/rerun_c/Cargo.toml +++ b/crates/top/rerun_c/Cargo.toml @@ -36,6 +36,7 @@ test = false [dependencies] re_log = { workspace = true, features = ["setup"] } re_sdk = { workspace = true, features = ["data_loaders"] } +re_video.workspace = true ahash.workspace = true arrow2.workspace = true diff --git a/crates/top/rerun_c/src/lib.rs b/crates/top/rerun_c/src/lib.rs index 3cfb74edcafd..d1f8785a6baf 100644 --- a/crates/top/rerun_c/src/lib.rs +++ b/crates/top/rerun_c/src/lib.rs @@ -10,6 +10,7 @@ mod component_type_registry; mod error; mod ptr; mod recording_streams; +mod video; use std::{ collections::BTreeMap, @@ -286,6 +287,9 @@ pub enum CErrorCode { ArrowFfiSchemaImportError, ArrowFfiArrayImportError, + _CategoryUtilities = 0x0001_0000, + VideoLoadError, + Unknown = 0xFFFF_FFFF, } diff --git a/crates/top/rerun_c/src/video.rs b/crates/top/rerun_c/src/video.rs new file mode 100644 index 000000000000..4bc91542c050 --- /dev/null +++ b/crates/top/rerun_c/src/video.rs @@ -0,0 +1,53 @@ +use core::num; + +use crate::{CError, CErrorCode, CStringView}; + +#[allow(unsafe_code)] +#[no_mangle] +pub extern "C" fn rr_video_asset_read_frame_timestamps_ns( + video_bytes: *const u8, + video_bytes_len: u64, + media_type: CStringView, + alloc_context: *mut std::ffi::c_void, + alloc_func: Option< + extern "C" fn(context: *mut std::ffi::c_void, num_timestamps: u32) -> *mut i64, + >, + error: *mut CError, +) -> *mut i64 { + if video_bytes.is_null() { + CError::unexpected_null("video_bytes").write_error(error); + return std::ptr::null_mut(); + } + let Some(alloc_func) = alloc_func else { + CError::unexpected_null("alloc_func").write_error(error); + return std::ptr::null_mut(); + }; + + let video_bytes = unsafe { std::slice::from_raw_parts(video_bytes, video_bytes_len as usize) }; + let media_type_str = media_type.as_str("media_type").ok(); + + let video = match re_video::VideoData::load_from_bytes(video_bytes, media_type_str) { + Ok(video) => video, + Err(err) => { + CError::new( + CErrorCode::VideoLoadError, + &format!("Failed to load video data: {err}"), + ) + .write_error(error); + return std::ptr::null_mut(); + } + }; + + // TODO(andreas): Producing this iterator isn't super expensive, but an ExactSizeIterator would be good to avoid + // the somewhat brittle size-oracle here! + // (note that since we create a slice from the allocation, this won't be able to go out of bound even if this value is too small) + let num_timestamps = video.segments.iter().map(|s| s.samples.len()).sum(); + let timestamps_ns_memory = alloc_func(alloc_context, num_timestamps as u32); + let timestamps_ns = + unsafe { std::slice::from_raw_parts_mut(timestamps_ns_memory, num_timestamps) }; + for (segment, timestamp_ns) in video.frame_timestamps_ns().zip(timestamps_ns.iter_mut()) { + *timestamp_ns = segment; + } + + timestamps_ns.as_mut_ptr() +} diff --git a/docs/snippets/all/archetypes/video_auto_frames.cpp b/docs/snippets/all/archetypes/video_auto_frames.cpp new file mode 100644 index 000000000000..2def780149df --- /dev/null +++ b/docs/snippets/all/archetypes/video_auto_frames.cpp @@ -0,0 +1,51 @@ +// Log a video asset using automatically determined frame references. +// TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. + +#include + +#include + +using namespace std::chrono_literals; + +int main(int argc, char* argv[]) { + if (argc < 2) { + // TODO(#7354): Only mp4 is supported for now. + std::cerr << "Usage: " << argv[0] << " " << std::endl; + return 1; + } + + const auto path = argv[1]; + + const auto rec = rerun::RecordingStream("rerun_example_asset_video_manual_frames"); + rec.spawn().exit_on_failure(); + + // Log video asset which is referred to by frame references. + // Make sure it's available on the timeline used for the frame references. + rec.set_time_seconds("video_time", 0.0); + auto video_asset = rerun::AssetVideo::from_file(path).value_or_throw(); + rec.log("video", video_asset); + + // Send frame references for every 0.1 seconds over a total of 10 seconds. + // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. + // TODO(#7368): Point to example using `send_video_frames`. + // + // Use `send_columns` to send all frame references in a single call. + std::vector times = + video_asset.read_frame_timestamps_ns().value_or_throw(); + std::vector video_timestamps(times.size()); + for (size_t i = 0; i < times.size(); i++) { + video_timestamps[i] = rerun::components::VideoTimestamp(times[i]); + } + auto video_frame_reference_indicators = + rerun::ComponentColumn::from_indicators( + static_cast(times.size()) + ); + rec.send_columns( + "video", + rerun::TimeColumn::from_times("video_time", rerun::borrow(times)), + { + video_frame_reference_indicators.value_or_throw(), + rerun::ComponentColumn::from_loggable(rerun::borrow(video_timestamps)).value_or_throw(), + } + ); +} diff --git a/docs/snippets/all/archetypes/video_manual_frames.cpp b/docs/snippets/all/archetypes/video_manual_frames.cpp index b24fd12eef85..4ecbe1e72c7e 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.cpp +++ b/docs/snippets/all/archetypes/video_manual_frames.cpp @@ -1,5 +1,5 @@ -// Log a video asset using manually created frame references. -// TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. +// Log a video asset using manually created frame references. +// TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. #include @@ -26,11 +26,11 @@ int main(int argc, char* argv[]) { // Send frame references for every 0.1 seconds over a total of 10 seconds. // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - // TODO(#7368): Point to example using `send_video_frames`. + // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. // // Use `send_columns` to send all frame references in a single call. std::vector times(10 * 10); - std::vector video_timestamps(10 * 10); + std::vector video_timestamps(times.size()); for (size_t i = 0; i < times.size(); i++) { times[i] = 100ms * i; video_timestamps[i] = rerun::components::VideoTimestamp(times[i]); diff --git a/rerun_cpp/src/rerun/archetypes/asset_video.hpp b/rerun_cpp/src/rerun/archetypes/asset_video.hpp index bc25a1247fe7..2571c985379f 100644 --- a/rerun_cpp/src/rerun/archetypes/asset_video.hpp +++ b/rerun_cpp/src/rerun/archetypes/asset_video.hpp @@ -11,6 +11,7 @@ #include "../indicator_component.hpp" #include "../result.hpp" +#include #include #include #include @@ -57,7 +58,7 @@ namespace rerun::archetypes { /// /// // Send frame references for every 0.1 seconds over a total of 10 seconds. /// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - /// // TODO(#7368): Point to example using `send_video_frames`. + /// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps`. /// // /// // Use `send_columns` to send all frame references in a single call. /// std::vector times(10 * 10); @@ -125,6 +126,11 @@ namespace rerun::archetypes { return asset; } + /// Determines the presentation timestamps of all frames inside the video. + /// + /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. + Result> read_frame_timestamps_ns() const; + // END of extensions from asset_video_ext.cpp, start of generated code: public: diff --git a/rerun_cpp/src/rerun/archetypes/asset_video_ext.cpp b/rerun_cpp/src/rerun/archetypes/asset_video_ext.cpp index 5b44f36da89c..7225dcc8acc5 100644 --- a/rerun_cpp/src/rerun/archetypes/asset_video_ext.cpp +++ b/rerun_cpp/src/rerun/archetypes/asset_video_ext.cpp @@ -2,12 +2,15 @@ #include #include +#include "../c/rerun.h" +#include "../string_utils.hpp" #include "asset_video.hpp" // It's undefined behavior to pre-declare std types, see http://www.gotw.ca/gotw/034.htm // We want to use `std::filesystem::path`, so we have it include it in the header. // +#include #include // @@ -38,6 +41,11 @@ namespace rerun::archetypes { return asset; } + /// Determines the presentation timestamps of all frames inside the video. + /// + /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. + Result> read_frame_timestamps_ns() const; + // #endif @@ -59,4 +67,38 @@ namespace rerun::archetypes { rerun::components::MediaType::guess_from_path(path) ); } + + static int64_t* alloc_timestamps(void* alloc_context, uint32_t num_timestamps) { + auto frame_timestamps_ptr = + static_cast*>(alloc_context); + frame_timestamps_ptr->resize(num_timestamps); + return reinterpret_cast(frame_timestamps_ptr->data()); + } + + Result> AssetVideo::read_frame_timestamps_ns() const { + static_assert(sizeof(int64_t) == sizeof(std::chrono::nanoseconds::rep)); + + rr_string media_type_c = detail::to_rr_string(std::nullopt); + if (media_type.has_value()) { + media_type_c = detail::to_rr_string(media_type.value().value.value); + } + + std::vector frame_timestamps; + + rr_error status = {}; + rr_video_asset_read_frame_timestamps_ns( + blob.data.data.begin(), + blob.data.data.size(), + media_type_c, + &frame_timestamps, + &alloc_timestamps, + &status + ); + if (status.code != RR_ERROR_CODE_OK) { + return Error(status); + } + + return frame_timestamps; + } + } // namespace rerun::archetypes diff --git a/rerun_cpp/src/rerun/c/rerun.h b/rerun_cpp/src/rerun/c/rerun.h index 6d473ceb3e94..f14f52f4cec9 100644 --- a/rerun_cpp/src/rerun/c/rerun.h +++ b/rerun_cpp/src/rerun/c/rerun.h @@ -291,7 +291,7 @@ enum { RR_ERROR_CODE_INVALID_COMPONENT_TYPE_HANDLE, // Recording stream errors - _RR_ERROR_CODE_CATEGORY_RECORDING_STREAM = 0x000000100, + _RR_ERROR_CODE_CATEGORY_RECORDING_STREAM = 0x00000100, RR_ERROR_CODE_RECORDING_STREAM_RUNTIME_FAILURE, RR_ERROR_CODE_RECORDING_STREAM_CREATION_FAILURE, RR_ERROR_CODE_RECORDING_STREAM_SAVE_FAILURE, @@ -300,10 +300,14 @@ enum { RR_ERROR_CODE_RECORDING_STREAM_CHUNK_VALIDATION_FAILURE, // Arrow data processing errors. - _RR_ERROR_CODE_CATEGORY_ARROW = 0x000001000, + _RR_ERROR_CODE_CATEGORY_ARROW = 0x00001000, RR_ERROR_CODE_ARROW_FFI_SCHEMA_IMPORT_ERROR, RR_ERROR_CODE_ARROW_FFI_ARRAY_IMPORT_ERROR, + // Utility errors. + _RR_ERROR_CODE_CATEGORY_UTILITIES = 0x00010000, + RR_ERROR_CODE_VIDEO_LOAD_ERROR, + // Generic errors. RR_ERROR_CODE_UNKNOWN, }; @@ -546,6 +550,26 @@ extern void rr_recording_stream_send_columns( rr_error* error ); +// ---------------------------------------------------------------------------- +// Other utilities + +/// Allocation method for `rr_video_asset_read_frame_timestamps_ns`. +typedef int64_t* (*rr_alloc_timestamps)(void* alloc_context, uint32_t num_timestamps); + +/// Determines the presentation timestamps of all frames inside the video. +/// +/// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. +/// +/// \param media_type +/// If not specified (null or empty string), the media type will be guessed from the data. +/// \param alloc_func +/// Function used to allocate memory for the returned timestamps. +/// Guaranteed to be called exactly once with the `alloc_context` pointer as argument. +extern int64_t* rr_video_asset_read_frame_timestamps_ns( + const uint8_t* video_bytes, uint64_t video_bytes_len, rr_string media_type, void* alloc_context, + rr_alloc_timestamps alloc_timestamps, rr_error* error +); + // ---------------------------------------------------------------------------- // Private functions diff --git a/rerun_cpp/src/rerun/error.hpp b/rerun_cpp/src/rerun/error.hpp index ce4838faa22a..979b49324910 100644 --- a/rerun_cpp/src/rerun/error.hpp +++ b/rerun_cpp/src/rerun/error.hpp @@ -57,8 +57,12 @@ namespace rerun { ArrowFfiSchemaImportError, ArrowFfiArrayImportError, + // Utility errors. + _CategoryUtilities = 0x0001'0000, + VideoLoadError, + // Errors relating to file IO. - _CategoryFileIO = 0x0001'0000, + _CategoryFileIO = 0x0010'0000, FileOpenFailure, // Errors directly translated from arrow::StatusCode. diff --git a/rerun_cpp/src/rerun/result.hpp b/rerun_cpp/src/rerun/result.hpp index d9fd1009fad3..0b51bac6b319 100644 --- a/rerun_cpp/src/rerun/result.hpp +++ b/rerun_cpp/src/rerun/result.hpp @@ -27,7 +27,7 @@ namespace rerun { /// Construct a result from an error, default constructing the value. Result(rerun::Error _error) : value(), error(std::move(_error)) {} - /// Construct a result from an arrow status, default constructing the value. + /// Construct a result from an arrow setatus, default constructing the value. Result(const arrow::Status& status) : value(), error(status) {} /// Construct a result from an arrow status, default constructing the value. From 1c8e474a1938f9d9c52457ea3699cc5f70dfe17f Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 16:32:17 +0200 Subject: [PATCH 09/21] Make Rust's read_frame_timestamps_ns not operate on components again since it's more consistent --- crates/store/re_data_loader/src/loader_archetype.rs | 10 +++++++--- .../store/re_types/src/archetypes/asset_video_ext.rs | 7 ++----- crates/top/rerun_c/src/video.rs | 2 -- docs/snippets/all/archetypes/video_auto_frames.rs | 11 ++++++++--- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/crates/store/re_data_loader/src/loader_archetype.rs b/crates/store/re_data_loader/src/loader_archetype.rs index 81f32fa81d80..2d66e2f10166 100644 --- a/crates/store/re_data_loader/src/loader_archetype.rs +++ b/crates/store/re_data_loader/src/loader_archetype.rs @@ -1,6 +1,7 @@ use re_chunk::{Chunk, RowId}; use re_log_types::{EntityPath, TimeInt, TimePoint}; use re_types::archetypes::{AssetVideo, VideoFrameReference}; +use re_types::components::VideoTimestamp; use re_types::Archetype; use re_types::{components::MediaType, ComponentBatch}; @@ -221,15 +222,18 @@ fn load_video( let video_asset = AssetVideo::new(contents); let video_frame_reference_chunk = match video_asset.read_frame_timestamps_ns() { - Ok(video_timestamps) => { + Ok(frame_timestamps_ns) => { // Time column. let is_sorted = Some(true); - let time_column_times = - ArrowPrimitiveArray::from_values(video_timestamps.iter().map(|t| t.video_time)); + let time_column_times = ArrowPrimitiveArray::from_slice(&frame_timestamps_ns); let time_column = re_chunk::TimeColumn::new(is_sorted, video_timeline, time_column_times); // VideoTimestamp component column. + let video_timestamps = frame_timestamps_ns + .into_iter() + .map(VideoTimestamp::from_nanoseconds) + .collect::>(); let video_timestamp_batch = &video_timestamps as &dyn ComponentBatch; let video_timestamp_list_array = video_timestamp_batch .to_arrow_list_array() diff --git a/crates/store/re_types/src/archetypes/asset_video_ext.rs b/crates/store/re_types/src/archetypes/asset_video_ext.rs index 33a5ad990fbf..8eaf5175402c 100644 --- a/crates/store/re_types/src/archetypes/asset_video_ext.rs +++ b/crates/store/re_types/src/archetypes/asset_video_ext.rs @@ -43,15 +43,12 @@ impl AssetVideo { /// /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. #[cfg(feature = "video")] - pub fn read_frame_timestamps_ns( - &self, - ) -> Result, re_video::VideoLoadError> { + pub fn read_frame_timestamps_ns(&self) -> Result, re_video::VideoLoadError> { Ok(re_video::VideoData::load_from_bytes( self.blob.as_slice(), self.media_type.as_ref().map(|m| m.as_str()), )? .frame_timestamps_ns() - .map(crate::components::VideoTimestamp::from_nanoseconds) - .collect::>()) + .collect()) } } diff --git a/crates/top/rerun_c/src/video.rs b/crates/top/rerun_c/src/video.rs index 4bc91542c050..c83a5e671872 100644 --- a/crates/top/rerun_c/src/video.rs +++ b/crates/top/rerun_c/src/video.rs @@ -1,5 +1,3 @@ -use core::num; - use crate::{CError, CErrorCode, CStringView}; #[allow(unsafe_code)] diff --git a/docs/snippets/all/archetypes/video_auto_frames.rs b/docs/snippets/all/archetypes/video_auto_frames.rs index c6f5fc4e9c29..8cab1c40db65 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.rs +++ b/docs/snippets/all/archetypes/video_auto_frames.rs @@ -19,15 +19,20 @@ fn main() -> anyhow::Result<()> { rec.log("video", &video_asset)?; // Send automatically determined video frame timestamps. - let video_timestamps_ns = video_asset.read_frame_timestamps_ns()?.collect::>(); + let frame_timestamps_ns = video_asset.read_frame_timestamps_ns()?; + let video_timestamps_ns = frame_timestamps_ns + .iter() + .copied() + .map(rerun::components::VideoTimestamp::from_nanoseconds) + .collect::>(); let time_column = TimeColumn::new_nanos( "video_time", // Note timeline values don't have to be the same as the video timestamps. - video_timestamps_ns.iter().map(|ts| ts.video_time), + frame_timestamps_ns, ); let frame_reference_indicators = ::Indicator::new_array( - video_timestamps_ns.len(), + time_column.num_rows(), ); rec.send_columns( "video", From 764a39312cb812708f0a8dcfeafb052d7688a045 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 16:34:25 +0200 Subject: [PATCH 10/21] iterating on examples --- .../all/archetypes/video_auto_frames.cpp | 23 ++++++++++--------- .../all/archetypes/video_auto_frames.py | 6 ++--- .../src/rerun/archetypes/asset_video.hpp | 4 ++-- .../archetypes/video_frame_reference.hpp | 4 ++-- .../rerun_sdk/rerun/archetypes/asset_video.py | 2 +- .../rerun/archetypes/video_frame_reference.py | 2 +- 6 files changed, 21 insertions(+), 20 deletions(-) diff --git a/docs/snippets/all/archetypes/video_auto_frames.cpp b/docs/snippets/all/archetypes/video_auto_frames.cpp index 2def780149df..806a6fd79fcf 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.cpp +++ b/docs/snippets/all/archetypes/video_auto_frames.cpp @@ -25,24 +25,25 @@ int main(int argc, char* argv[]) { auto video_asset = rerun::AssetVideo::from_file(path).value_or_throw(); rec.log("video", video_asset); - // Send frame references for every 0.1 seconds over a total of 10 seconds. - // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - // TODO(#7368): Point to example using `send_video_frames`. - // - // Use `send_columns` to send all frame references in a single call. - std::vector times = + // Send automatically determined video frame timestamps. + std::vector frame_timestamps_ns = video_asset.read_frame_timestamps_ns().value_or_throw(); - std::vector video_timestamps(times.size()); - for (size_t i = 0; i < times.size(); i++) { - video_timestamps[i] = rerun::components::VideoTimestamp(times[i]); + // Note timeline values don't have to be the same as the video timestamps. + auto time_column = + rerun::TimeColumn::from_times("video_time", rerun::borrow(frame_timestamps_ns)); + + std::vector video_timestamps(frame_timestamps_ns.size()); + for (size_t i = 0; i < frame_timestamps_ns.size(); i++) { + video_timestamps[i] = rerun::components::VideoTimestamp(frame_timestamps_ns[i]); } auto video_frame_reference_indicators = rerun::ComponentColumn::from_indicators( - static_cast(times.size()) + static_cast(video_timestamps.size()) ); + rec.send_columns( "video", - rerun::TimeColumn::from_times("video_time", rerun::borrow(times)), + time_column, { video_frame_reference_indicators.value_or_throw(), rerun::ComponentColumn::from_loggable(rerun::borrow(video_timestamps)).value_or_throw(), diff --git a/docs/snippets/all/archetypes/video_auto_frames.py b/docs/snippets/all/archetypes/video_auto_frames.py index 05c030da11f3..ad6694fb687b 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.py +++ b/docs/snippets/all/archetypes/video_auto_frames.py @@ -18,10 +18,10 @@ rr.log("video", video_asset) # Send automatically determined video frame timestamps. -video_timestamps_ns = video_asset.read_frame_timestamps_ns() +frame_timestamps_ns = video_asset.read_frame_timestamps_ns() rr.send_columns( "video", # Note timeline values don't have to be the same as the video timestamps. - times=[rr.TimeNanosColumn("video_time", video_timestamps_ns)], - components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.nanoseconds(video_timestamps_ns)], + times=[rr.TimeNanosColumn("video_time", frame_timestamps_ns)], + components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.nanoseconds(frame_timestamps_ns)], ) diff --git a/rerun_cpp/src/rerun/archetypes/asset_video.hpp b/rerun_cpp/src/rerun/archetypes/asset_video.hpp index 2571c985379f..59f5e3f6fec4 100644 --- a/rerun_cpp/src/rerun/archetypes/asset_video.hpp +++ b/rerun_cpp/src/rerun/archetypes/asset_video.hpp @@ -58,11 +58,11 @@ namespace rerun::archetypes { /// /// // Send frame references for every 0.1 seconds over a total of 10 seconds. /// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - /// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps`. + /// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. /// // /// // Use `send_columns` to send all frame references in a single call. /// std::vector times(10 * 10); - /// std::vector video_timestamps(10 * 10); + /// std::vector video_timestamps(times.size()); /// for (size_t i = 0; i times(10 * 10); - /// std::vector video_timestamps(10 * 10); + /// std::vector video_timestamps(times.size()); /// for (size_t i = 0; i Date: Fri, 13 Sep 2024 17:49:35 +0200 Subject: [PATCH 11/21] use static logging of video asset in snippets --- docs/snippets/all/archetypes/video_auto_frames.cpp | 6 ++---- docs/snippets/all/archetypes/video_auto_frames.py | 3 +-- docs/snippets/all/archetypes/video_auto_frames.rs | 7 +++---- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/docs/snippets/all/archetypes/video_auto_frames.cpp b/docs/snippets/all/archetypes/video_auto_frames.cpp index 806a6fd79fcf..26f5a694af87 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.cpp +++ b/docs/snippets/all/archetypes/video_auto_frames.cpp @@ -16,14 +16,12 @@ int main(int argc, char* argv[]) { const auto path = argv[1]; - const auto rec = rerun::RecordingStream("rerun_example_asset_video_manual_frames"); + const auto rec = rerun::RecordingStream("rerun_example_asset_video_auto_frames"); rec.spawn().exit_on_failure(); // Log video asset which is referred to by frame references. - // Make sure it's available on the timeline used for the frame references. - rec.set_time_seconds("video_time", 0.0); auto video_asset = rerun::AssetVideo::from_file(path).value_or_throw(); - rec.log("video", video_asset); + rec.log_static("video", video_asset); // Send automatically determined video frame timestamps. std::vector frame_timestamps_ns = diff --git a/docs/snippets/all/archetypes/video_auto_frames.py b/docs/snippets/all/archetypes/video_auto_frames.py index ad6694fb687b..2e81fd8c42ba 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.py +++ b/docs/snippets/all/archetypes/video_auto_frames.py @@ -13,9 +13,8 @@ rr.init("rerun_example_asset_video_auto_frames", spawn=True) # Log video asset which is referred to by frame references. -rr.set_time_seconds("video_time", 0) # Make sure it's available on the timeline used for the frame references. video_asset = rr.AssetVideo(path=sys.argv[1]) -rr.log("video", video_asset) +rr.log("video", video_asset, static=True) # Send automatically determined video frame timestamps. frame_timestamps_ns = video_asset.read_frame_timestamps_ns() diff --git a/docs/snippets/all/archetypes/video_auto_frames.rs b/docs/snippets/all/archetypes/video_auto_frames.rs index 8cab1c40db65..df52ccda0ef2 100644 --- a/docs/snippets/all/archetypes/video_auto_frames.rs +++ b/docs/snippets/all/archetypes/video_auto_frames.rs @@ -1,7 +1,7 @@ //! Log a video asset using automatically determined frame references. //! TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. -use rerun::{external::anyhow, TimeColumn}; +use rerun::external::anyhow; fn main() -> anyhow::Result<()> { let args = _args; @@ -14,9 +14,8 @@ fn main() -> anyhow::Result<()> { rerun::RecordingStreamBuilder::new("rerun_example_asset_video_auto_frames").spawn()?; // Log video asset which is referred to by frame references. - rec.set_time_seconds("video_time", 0.0); // Make sure it's available on the timeline used for the frame references. let video_asset = rerun::AssetVideo::from_file_path(path)?; - rec.log("video", &video_asset)?; + rec.log_static("video", &video_asset)?; // Send automatically determined video frame timestamps. let frame_timestamps_ns = video_asset.read_frame_timestamps_ns()?; @@ -25,7 +24,7 @@ fn main() -> anyhow::Result<()> { .copied() .map(rerun::components::VideoTimestamp::from_nanoseconds) .collect::>(); - let time_column = TimeColumn::new_nanos( + let time_column = rerun::TimeColumn::new_nanos( "video_time", // Note timeline values don't have to be the same as the video timestamps. frame_timestamps_ns, From 1aab2861bec8172b455ca8356f4e829a9e147047 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Fri, 13 Sep 2024 17:50:37 +0200 Subject: [PATCH 12/21] overhaul manual frame snippet --- .../rerun/archetypes/asset_video.fbs | 5 +- .../archetypes/video_frame_reference.fbs | 5 +- .../re_types/src/archetypes/asset_video.rs | 84 ++++++++++++++----- .../src/archetypes/video_frame_reference.rs | 84 ++++++++++++++----- .../src/datatypes/entity_path_ext.rs | 52 ++++++++++++ .../store/re_types_core/src/datatypes/mod.rs | 1 + .../reference/types/archetypes/asset_video.md | 16 +++- .../types/archetypes/video_frame_reference.md | 16 +++- .../all/archetypes/video_manual_frames.cpp | 36 +++----- .../all/archetypes/video_manual_frames.py | 42 ++++++---- .../all/archetypes/video_manual_frames.rs | 36 ++++---- .../src/rerun/archetypes/asset_video.hpp | 74 +++++++++++----- .../archetypes/video_frame_reference.hpp | 74 +++++++++++----- .../src/rerun/components/entity_path.hpp | 7 ++ .../src/rerun/components/entity_path_ext.cpp | 14 ++++ .../rerun_sdk/rerun/archetypes/asset_video.py | 80 ++++++++++++++---- .../rerun/archetypes/video_frame_reference.py | 80 ++++++++++++++---- .../rerun/components/video_timestamp.py | 2 +- .../rerun/components/video_timestamp_ext.py | 41 ++++++++- 19 files changed, 568 insertions(+), 181 deletions(-) create mode 100644 crates/store/re_types_core/src/datatypes/entity_path_ext.rs create mode 100644 rerun_cpp/src/rerun/components/entity_path_ext.cpp diff --git a/crates/store/re_types/definitions/rerun/archetypes/asset_video.fbs b/crates/store/re_types/definitions/rerun/archetypes/asset_video.fbs index 7b61fc0f683a..82ceaa816e5e 100644 --- a/crates/store/re_types/definitions/rerun/archetypes/asset_video.fbs +++ b/crates/store/re_types/definitions/rerun/archetypes/asset_video.fbs @@ -8,8 +8,9 @@ namespace rerun.archetypes; /// /// In order to display a video, you need to log a [archetypes.VideoFrameReference] for each frame. /// -/// \example archetypes/video_manual_frames title="Video with explicit frames" image="https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/1200w.png" -// TODO(#7368): Example and reference to `send_video_frames` API. +/// \example archetypes/video_auto_frames title="Video with automatically determined frames" image="https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/1200w.png" +/// \example archetypes/video_manual_frames title="Demonstrates manual use of video frame references" image="https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/1200w.png" +// TODO(#7420): update screenshot for manual frames example table AssetVideo ( "attr.rerun.experimental" ) { diff --git a/crates/store/re_types/definitions/rerun/archetypes/video_frame_reference.fbs b/crates/store/re_types/definitions/rerun/archetypes/video_frame_reference.fbs index f503867b41bc..ecb348d37e28 100644 --- a/crates/store/re_types/definitions/rerun/archetypes/video_frame_reference.fbs +++ b/crates/store/re_types/definitions/rerun/archetypes/video_frame_reference.fbs @@ -5,8 +5,9 @@ namespace rerun.archetypes; /// Used to display individual video frames from a [archetypes.AssetVideo]. /// To show an entire video, a fideo frame reference for each frame of the video should be logged. /// -/// \example archetypes/video_manual_frames title="Video with explicit frames" image="https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/1200w.png" -// TODO(#7368): Example and reference to `send_video_frames` API. +/// \example archetypes/video_auto_frames title="Video with automatically determined frames" image="https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/1200w.png" +/// \example archetypes/video_manual_frames title="Demonstrates manual use of video frame references" image="https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/1200w.png" +// TODO(#7420): update screenshot for manual frames example table VideoFrameReference ( "attr.rerun.experimental" ){ diff --git a/crates/store/re_types/src/archetypes/asset_video.rs b/crates/store/re_types/src/archetypes/asset_video.rs index 027ca3b6ef8b..997c93f6d754 100644 --- a/crates/store/re_types/src/archetypes/asset_video.rs +++ b/crates/store/re_types/src/archetypes/asset_video.rs @@ -28,11 +28,11 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// /// ⚠️ **This type is experimental and may be removed in future versions** /// -/// ## Example +/// ## Examples /// -/// ### Video with explicit frames +/// ### Video with automatically determined frames /// ```ignore -/// use rerun::{external::anyhow, TimeColumn}; +/// use rerun::external::anyhow; /// /// fn main() -> anyhow::Result<()> { /// let args = _args; @@ -42,29 +42,32 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// }; /// /// let rec = -/// rerun::RecordingStreamBuilder::new("rerun_example_asset_video_manual_frames").spawn()?; +/// rerun::RecordingStreamBuilder::new("rerun_example_asset_video_auto_frames").spawn()?; /// /// // Log video asset which is referred to by frame references. -/// rec.set_time_seconds("video_time", 0.0); // Make sure it's available on the timeline used for the frame references. -/// rec.log("video", &rerun::AssetVideo::from_file_path(path)?)?; +/// let video_asset = rerun::AssetVideo::from_file_path(path)?; +/// rec.log_static("video", &video_asset)?; /// -/// // Send frame references for every 0.1 seconds over a total of 10 seconds. -/// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. -/// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. -/// // -/// // Use `send_columns` to send all frame references in a single call. -/// let times = (0..(10 * 10)).map(|t| t as f64 * 0.1).collect::>(); -/// let time_column = TimeColumn::new_seconds("video_time", times.iter().copied()); -/// let frame_reference_indicators = -/// ::Indicator::new_array(times.len()); -/// let video_timestamps = times -/// .into_iter() -/// .map(rerun::components::VideoTimestamp::from_seconds) +/// // Send automatically determined video frame timestamps. +/// let frame_timestamps_ns = video_asset.read_frame_timestamps_ns()?; +/// let video_timestamps_ns = frame_timestamps_ns +/// .iter() +/// .copied() +/// .map(rerun::components::VideoTimestamp::from_nanoseconds) /// .collect::>(); +/// let time_column = rerun::TimeColumn::new_nanos( +/// "video_time", +/// // Note timeline values don't have to be the same as the video timestamps. +/// frame_timestamps_ns, +/// ); +/// let frame_reference_indicators = +/// ::Indicator::new_array( +/// time_column.num_rows(), +/// ); /// rec.send_columns( /// "video", /// [time_column], -/// [&frame_reference_indicators as _, &video_timestamps as _], +/// [&frame_reference_indicators as _, &video_timestamps_ns as _], /// )?; /// /// Ok(()) @@ -79,6 +82,49 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// /// /// +/// +/// ### Demonstrates manual use of video frame references +/// ```ignore +/// use rerun::external::anyhow; +/// +/// fn main() -> anyhow::Result<()> { +/// let args = _args; +/// let Some(path) = args.get(1) else { +/// // TODO(#7354): Only mp4 is supported for now. +/// anyhow::bail!("Usage: {} ", args[0]); +/// }; +/// +/// let rec = +/// rerun::RecordingStreamBuilder::new("rerun_example_asset_video_manual_frames").spawn()?; +/// +/// // Log video asset which is referred to by frame references. +/// rec.log_static("video_asset", &rerun::AssetVideo::from_file_path(path)?)?; +/// +/// // Create two entites, showing the same video frozen at different times. +/// rec.log( +/// "frame_at_start", +/// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) +/// .with_video_reference("video_asset"), +/// ); +/// rec.log( +/// "frame_at_one_second", +/// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(1.0)) +/// .with_video_reference("video_asset"), +/// ); +/// +/// // TODO(#5520): log blueprint once supported +/// Ok(()) +/// } +/// ``` +///
+/// +/// +/// +/// +/// +/// +/// +///
#[derive(Clone, Debug)] pub struct AssetVideo { /// The asset's bytes. diff --git a/crates/store/re_types/src/archetypes/video_frame_reference.rs b/crates/store/re_types/src/archetypes/video_frame_reference.rs index e9d3ba6a6126..41ebf3f46587 100644 --- a/crates/store/re_types/src/archetypes/video_frame_reference.rs +++ b/crates/store/re_types/src/archetypes/video_frame_reference.rs @@ -25,11 +25,11 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// /// ⚠️ **This type is experimental and may be removed in future versions** /// -/// ## Example +/// ## Examples /// -/// ### Video with explicit frames +/// ### Video with automatically determined frames /// ```ignore -/// use rerun::{external::anyhow, TimeColumn}; +/// use rerun::external::anyhow; /// /// fn main() -> anyhow::Result<()> { /// let args = _args; @@ -39,29 +39,32 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// }; /// /// let rec = -/// rerun::RecordingStreamBuilder::new("rerun_example_asset_video_manual_frames").spawn()?; +/// rerun::RecordingStreamBuilder::new("rerun_example_asset_video_auto_frames").spawn()?; /// /// // Log video asset which is referred to by frame references. -/// rec.set_time_seconds("video_time", 0.0); // Make sure it's available on the timeline used for the frame references. -/// rec.log("video", &rerun::AssetVideo::from_file_path(path)?)?; +/// let video_asset = rerun::AssetVideo::from_file_path(path)?; +/// rec.log_static("video", &video_asset)?; /// -/// // Send frame references for every 0.1 seconds over a total of 10 seconds. -/// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. -/// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. -/// // -/// // Use `send_columns` to send all frame references in a single call. -/// let times = (0..(10 * 10)).map(|t| t as f64 * 0.1).collect::>(); -/// let time_column = TimeColumn::new_seconds("video_time", times.iter().copied()); -/// let frame_reference_indicators = -/// ::Indicator::new_array(times.len()); -/// let video_timestamps = times -/// .into_iter() -/// .map(rerun::components::VideoTimestamp::from_seconds) +/// // Send automatically determined video frame timestamps. +/// let frame_timestamps_ns = video_asset.read_frame_timestamps_ns()?; +/// let video_timestamps_ns = frame_timestamps_ns +/// .iter() +/// .copied() +/// .map(rerun::components::VideoTimestamp::from_nanoseconds) /// .collect::>(); +/// let time_column = rerun::TimeColumn::new_nanos( +/// "video_time", +/// // Note timeline values don't have to be the same as the video timestamps. +/// frame_timestamps_ns, +/// ); +/// let frame_reference_indicators = +/// ::Indicator::new_array( +/// time_column.num_rows(), +/// ); /// rec.send_columns( /// "video", /// [time_column], -/// [&frame_reference_indicators as _, &video_timestamps as _], +/// [&frame_reference_indicators as _, &video_timestamps_ns as _], /// )?; /// /// Ok(()) @@ -76,6 +79,49 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// /// /// +/// +/// ### Demonstrates manual use of video frame references +/// ```ignore +/// use rerun::external::anyhow; +/// +/// fn main() -> anyhow::Result<()> { +/// let args = _args; +/// let Some(path) = args.get(1) else { +/// // TODO(#7354): Only mp4 is supported for now. +/// anyhow::bail!("Usage: {} ", args[0]); +/// }; +/// +/// let rec = +/// rerun::RecordingStreamBuilder::new("rerun_example_asset_video_manual_frames").spawn()?; +/// +/// // Log video asset which is referred to by frame references. +/// rec.log_static("video_asset", &rerun::AssetVideo::from_file_path(path)?)?; +/// +/// // Create two entites, showing the same video frozen at different times. +/// rec.log( +/// "frame_at_start", +/// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) +/// .with_video_reference("video_asset"), +/// ); +/// rec.log( +/// "frame_at_one_second", +/// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(1.0)) +/// .with_video_reference("video_asset"), +/// ); +/// +/// // TODO(#5520): log blueprint once supported +/// Ok(()) +/// } +/// ``` +///
+/// +/// +/// +/// +/// +/// +/// +///
#[derive(Clone, Debug)] pub struct VideoFrameReference { /// References the closest video frame to this timestamp. diff --git a/crates/store/re_types_core/src/datatypes/entity_path_ext.rs b/crates/store/re_types_core/src/datatypes/entity_path_ext.rs new file mode 100644 index 000000000000..babef32b7818 --- /dev/null +++ b/crates/store/re_types_core/src/datatypes/entity_path_ext.rs @@ -0,0 +1,52 @@ +use super::EntityPath; + +impl EntityPath { + #[inline] + pub fn as_str(&self) -> &str { + self.0.as_str() + } +} + +impl From for EntityPath { + #[inline] + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl From<&str> for EntityPath { + #[inline] + fn from(value: &str) -> Self { + Self(value.into()) + } +} + +impl From for String { + #[inline] + fn from(value: EntityPath) -> Self { + value.as_str().to_owned() + } +} + +impl AsRef for EntityPath { + #[inline] + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl std::borrow::Borrow for EntityPath { + #[inline] + fn borrow(&self) -> &str { + self.as_str() + } +} + +impl std::ops::Deref for EntityPath { + type Target = str; + + #[inline] + fn deref(&self) -> &str { + self.as_str() + } +} diff --git a/crates/store/re_types_core/src/datatypes/mod.rs b/crates/store/re_types_core/src/datatypes/mod.rs index eea41d67b2f2..0ddbfbaa39e8 100644 --- a/crates/store/re_types_core/src/datatypes/mod.rs +++ b/crates/store/re_types_core/src/datatypes/mod.rs @@ -3,6 +3,7 @@ mod bool; mod bool_ext; mod entity_path; +mod entity_path_ext; mod float32; mod float32_ext; mod float64; diff --git a/docs/content/reference/types/archetypes/asset_video.md b/docs/content/reference/types/archetypes/asset_video.md index 4db7fff313df..ab885a1f30c0 100644 --- a/docs/content/reference/types/archetypes/asset_video.md +++ b/docs/content/reference/types/archetypes/asset_video.md @@ -25,9 +25,21 @@ In order to display a video, you need to log a [`archetypes.VideoFrameReference` * 🐍 [Python API docs for `AssetVideo`](https://ref.rerun.io/docs/python/stable/common/archetypes#rerun.archetypes.AssetVideo) * 🦀 [Rust API docs for `AssetVideo`](https://docs.rs/rerun/latest/rerun/archetypes/struct.AssetVideo.html) -## Example +## Examples -### Video with explicit frames +### Video with automatically determined frames + +snippet: archetypes/video_auto_frames + + + + + + + + + +### Demonstrates manual use of video frame references snippet: archetypes/video_manual_frames diff --git a/docs/content/reference/types/archetypes/video_frame_reference.md b/docs/content/reference/types/archetypes/video_frame_reference.md index 0fe64d88afef..05863b929207 100644 --- a/docs/content/reference/types/archetypes/video_frame_reference.md +++ b/docs/content/reference/types/archetypes/video_frame_reference.md @@ -22,9 +22,21 @@ To show an entire video, a fideo frame reference for each frame of the video sho * 🐍 [Python API docs for `VideoFrameReference`](https://ref.rerun.io/docs/python/stable/common/archetypes#rerun.archetypes.VideoFrameReference) * 🦀 [Rust API docs for `VideoFrameReference`](https://docs.rs/rerun/latest/rerun/archetypes/struct.VideoFrameReference.html) -## Example +## Examples -### Video with explicit frames +### Video with automatically determined frames + +snippet: archetypes/video_auto_frames + + + + + + + + + +### Demonstrates manual use of video frame references snippet: archetypes/video_manual_frames diff --git a/docs/snippets/all/archetypes/video_manual_frames.cpp b/docs/snippets/all/archetypes/video_manual_frames.cpp index 4ecbe1e72c7e..5c6dd3bb95a9 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.cpp +++ b/docs/snippets/all/archetypes/video_manual_frames.cpp @@ -1,5 +1,6 @@ // Log a video asset using manually created frame references. // TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. +// TODO(#7420): This sample doesn't render yet. #include @@ -20,31 +21,14 @@ int main(int argc, char* argv[]) { rec.spawn().exit_on_failure(); // Log video asset which is referred to by frame references. - // Make sure it's available on the timeline used for the frame references. - rec.set_time_seconds("video_time", 0.0); - rec.log("video", rerun::AssetVideo::from_file(path).value_or_throw()); - - // Send frame references for every 0.1 seconds over a total of 10 seconds. - // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. - // - // Use `send_columns` to send all frame references in a single call. - std::vector times(10 * 10); - std::vector video_timestamps(times.size()); - for (size_t i = 0; i < times.size(); i++) { - times[i] = 100ms * i; - video_timestamps[i] = rerun::components::VideoTimestamp(times[i]); - } - auto video_frame_reference_indicators = - rerun::ComponentColumn::from_indicators( - static_cast(times.size()) - ); - rec.send_columns( - "video", - rerun::TimeColumn::from_times("video_time", rerun::borrow(times)), - { - video_frame_reference_indicators.value_or_throw(), - rerun::ComponentColumn::from_loggable(rerun::borrow(video_timestamps)).value_or_throw(), - } + rec.log_static("video_asset", rerun::AssetVideo::from_file(path).value_or_throw()); + + // Create two entites, showing the same video frozen at different times. + rec.log("frame_at_start", rerun::VideoFrameReference(0.0s).with_video_reference("video_asset")); + rec.log( + "frame_at_one_second", + rerun::VideoFrameReference(1.0s).with_video_reference("video_asset") ); + + // TODO(#5520): log blueprint once supported } diff --git a/docs/snippets/all/archetypes/video_manual_frames.py b/docs/snippets/all/archetypes/video_manual_frames.py index 2cfe650c8bc9..3bf7dde322ed 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.py +++ b/docs/snippets/all/archetypes/video_manual_frames.py @@ -1,10 +1,11 @@ -"""Log a video asset using manually created frame references.""" +"""Manual use of individual video frame references.""" # TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. +# TODO(#7420): This sample doesn't render yet. import sys -import numpy as np import rerun as rr +import rerun.blueprint as rrb if len(sys.argv) < 2: # TODO(#7354): Only mp4 is supported for now. @@ -14,17 +15,28 @@ rr.init("rerun_example_asset_video_manual_frames", spawn=True) # Log video asset which is referred to by frame references. -rr.set_time_seconds("video_time", 0) # Make sure it's available on the timeline used for the frame references. -rr.log("video", rr.AssetVideo(path=sys.argv[1])) - -# Send frame references for every 0.1 seconds over a total of 10 seconds. -# Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. -# To get all frame times of a video use `rr.AssetVideo.read_frame_timestamps_ns`. -# -# Use `send_columns` to send all frame references in a single call. -times = np.arange(0.0, 10.0, 0.1) -rr.send_columns( - "video", - times=[rr.TimeSecondsColumn("video_time", times)], - components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.seconds(times)], +rr.log("video_asset", rr.AssetVideo(path=sys.argv[1]), static=True) + +# Create two entites, showing the same video frozen at different times. +rr.log( + "frame_at_start", + rr.VideoFrameReference( + timestamp=rr.components.VideoTimestamp(seconds=0.0), + video_reference="video_asset", + ), +) +rr.log( + "frame_at_one_second", + rr.VideoFrameReference( + timestamp=rr.components.VideoTimestamp(seconds=1.0), + video_reference="video_asset", + ), +) + +# Send blueprint that shows two 2D views next to each other. +rr.send_blueprint( + rrb.Horizontal(rrb.Spatial2DView(origin="frame_at_start"), rrb.Spatial2DView(origin="frame_at_one_second")) ) + + +# TODO: doesn't show video frames right now. diff --git a/docs/snippets/all/archetypes/video_manual_frames.rs b/docs/snippets/all/archetypes/video_manual_frames.rs index 12d9fd41fdd9..bce79f1e043e 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.rs +++ b/docs/snippets/all/archetypes/video_manual_frames.rs @@ -1,7 +1,8 @@ //! Log a video asset using manually created frame references. //! TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. +//! TODO(#7420): This sample doesn't render yet. -use rerun::{external::anyhow, TimeColumn}; +use rerun::external::anyhow; fn main() -> anyhow::Result<()> { let args = _args; @@ -14,27 +15,20 @@ fn main() -> anyhow::Result<()> { rerun::RecordingStreamBuilder::new("rerun_example_asset_video_manual_frames").spawn()?; // Log video asset which is referred to by frame references. - rec.set_time_seconds("video_time", 0.0); // Make sure it's available on the timeline used for the frame references. - rec.log("video", &rerun::AssetVideo::from_file_path(path)?)?; + rec.log_static("video_asset", &rerun::AssetVideo::from_file_path(path)?)?; - // Send frame references for every 0.1 seconds over a total of 10 seconds. - // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. - // - // Use `send_columns` to send all frame references in a single call. - let times = (0..(10 * 10)).map(|t| t as f64 * 0.1).collect::>(); - let time_column = TimeColumn::new_seconds("video_time", times.iter().copied()); - let frame_reference_indicators = - ::Indicator::new_array(times.len()); - let video_timestamps = times - .into_iter() - .map(rerun::components::VideoTimestamp::from_seconds) - .collect::>(); - rec.send_columns( - "video", - [time_column], - [&frame_reference_indicators as _, &video_timestamps as _], - )?; + // Create two entites, showing the same video frozen at different times. + rec.log( + "frame_at_start", + &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) + .with_video_reference("video_asset"), + ); + rec.log( + "frame_at_one_second", + &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(1.0)) + .with_video_reference("video_asset"), + ); + // TODO(#5520): log blueprint once supported Ok(()) } diff --git a/rerun_cpp/src/rerun/archetypes/asset_video.hpp b/rerun_cpp/src/rerun/archetypes/asset_video.hpp index 59f5e3f6fec4..d3173492d3cc 100644 --- a/rerun_cpp/src/rerun/archetypes/asset_video.hpp +++ b/rerun_cpp/src/rerun/archetypes/asset_video.hpp @@ -27,9 +27,9 @@ namespace rerun::archetypes { /// /// In order to display a video, you need to log a `archetypes::VideoFrameReference` for each frame. /// - /// ## Example + /// ## Examples /// - /// ### Video with explicit frames + /// ### Video with automatically determined frames /// ![image](https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/full.png) /// /// ```cpp @@ -48,32 +48,32 @@ namespace rerun::archetypes { /// /// const auto path = argv[1]; /// - /// const auto rec = rerun::RecordingStream("rerun_example_asset_video_manual_frames"); + /// const auto rec = rerun::RecordingStream("rerun_example_asset_video_auto_frames"); /// rec.spawn().exit_on_failure(); /// /// // Log video asset which is referred to by frame references. - /// // Make sure it's available on the timeline used for the frame references. - /// rec.set_time_seconds("video_time", 0.0); - /// rec.log("video", rerun::AssetVideo::from_file(path).value_or_throw()); - /// - /// // Send frame references for every 0.1 seconds over a total of 10 seconds. - /// // Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - /// // To get all frame times of a video use `rerun::AssetVideo::read_frame_timestamps_ns`. - /// // - /// // Use `send_columns` to send all frame references in a single call. - /// std::vector times(10 * 10); - /// std::vector video_timestamps(times.size()); - /// for (size_t i = 0; i frame_timestamps_ns = + /// video_asset.read_frame_timestamps_ns().value_or_throw(); + /// // Note timeline values don't have to be the same as the video timestamps. + /// auto time_column = + /// rerun::TimeColumn::from_times("video_time", rerun::borrow(frame_timestamps_ns)); + /// + /// std::vector video_timestamps(frame_timestamps_ns.size()); + /// for (size_t i = 0; i ( - /// static_cast(times.size()) + /// static_cast(video_timestamps.size()) /// ); + /// /// rec.send_columns( /// "video", - /// rerun::TimeColumn::from_times("video_time", rerun::borrow(times)), + /// time_column, /// { /// video_frame_reference_indicators.value_or_throw(), /// rerun::ComponentColumn::from_loggable(rerun::borrow(video_timestamps)).value_or_throw(), @@ -82,6 +82,42 @@ namespace rerun::archetypes { /// } /// ``` /// + /// ### Demonstrates manual use of video frame references + /// ![image](https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/full.png) + /// + /// ```cpp + /// #include + /// + /// #include + /// + /// using namespace std::chrono_literals; + /// + /// int main(int argc, char* argv[]) { + /// if (argc <2) { + /// // TODO(#7354): Only mp4 is supported for now. + /// std::cerr <<"Usage: " <" < times(10 * 10); - /// std::vector video_timestamps(times.size()); - /// for (size_t i = 0; i frame_timestamps_ns = + /// video_asset.read_frame_timestamps_ns().value_or_throw(); + /// // Note timeline values don't have to be the same as the video timestamps. + /// auto time_column = + /// rerun::TimeColumn::from_times("video_time", rerun::borrow(frame_timestamps_ns)); + /// + /// std::vector video_timestamps(frame_timestamps_ns.size()); + /// for (size_t i = 0; i ( - /// static_cast(times.size()) + /// static_cast(video_timestamps.size()) /// ); + /// /// rec.send_columns( /// "video", - /// rerun::TimeColumn::from_times("video_time", rerun::borrow(times)), + /// time_column, /// { /// video_frame_reference_indicators.value_or_throw(), /// rerun::ComponentColumn::from_loggable(rerun::borrow(video_timestamps)).value_or_throw(), @@ -77,6 +77,42 @@ namespace rerun::archetypes { /// } /// ``` /// + /// ### Demonstrates manual use of video frame references + /// ![image](https://static.rerun.io/video_manual_frames/320a44e1e06b8b3a3161ecbbeae3e04d1ccb9589/full.png) + /// + /// ```cpp + /// #include + /// + /// #include + /// + /// using namespace std::chrono_literals; + /// + /// int main(int argc, char* argv[]) { + /// if (argc <2) { + /// // TODO(#7354): Only mp4 is supported for now. + /// std::cerr <<"Usage: " <" < + EntityPath(std::string_view path_) : value(std::string(path_)) {} + + EntityPath(const char* path_) : value(std::string(path_)) {} + // + +} // namespace rerun::components +#endif diff --git a/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py b/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py index 98a94ac4ed72..f48fdf40e6b6 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py @@ -29,15 +29,14 @@ class AssetVideo(AssetVideoExt, Archetype): ⚠️ **This is an experimental API! It is not fully supported, and is likely to change significantly in future versions.** - Example - ------- - ### Video with explicit frames: + Examples + -------- + ### Video with automatically determined frames: ```python # TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. import sys - import numpy as np import rerun as rr if len(sys.argv) < 2: @@ -45,23 +44,74 @@ class AssetVideo(AssetVideoExt, Archetype): print(f"Usage: {sys.argv[0]} ") sys.exit(1) - rr.init("rerun_example_asset_video_manual_frames", spawn=True) + rr.init("rerun_example_asset_video_auto_frames", spawn=True) # Log video asset which is referred to by frame references. - rr.set_time_seconds("video_time", 0) # Make sure it's available on the timeline used for the frame references. - rr.log("video", rr.AssetVideo(path=sys.argv[1])) + video_asset = rr.AssetVideo(path=sys.argv[1]) + rr.log("video", video_asset, static=True) - # Send frame references for every 0.1 seconds over a total of 10 seconds. - # Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - # To get all frame times of a video use `rr.AssetVideo.read_frame_timestamps_ns`. - # - # Use `send_columns` to send all frame references in a single call. - times = np.arange(0.0, 10.0, 0.1) + # Send automatically determined video frame timestamps. + frame_timestamps_ns = video_asset.read_frame_timestamps_ns() rr.send_columns( "video", - times=[rr.TimeSecondsColumn("video_time", times)], - components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.seconds(times)], + # Note timeline values don't have to be the same as the video timestamps. + times=[rr.TimeNanosColumn("video_time", frame_timestamps_ns)], + components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.nanoseconds(frame_timestamps_ns)], + ) + ``` +
+ + + + + + + +
+ + ### Demonstrates manual use of video frame references: + ```python + # TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. + # TODO(#7420): This sample doesn't render yet. + + import sys + + import rerun as rr + import rerun.blueprint as rrb + + if len(sys.argv) < 2: + # TODO(#7354): Only mp4 is supported for now. + print(f"Usage: {sys.argv[0]} ") + sys.exit(1) + + rr.init("rerun_example_asset_video_manual_frames", spawn=True) + + # Log video asset which is referred to by frame references. + rr.log("video_asset", rr.AssetVideo(path=sys.argv[1]), static=True) + + # Create two entites, showing the same video frozen at different times. + rr.log( + "frame_at_start", + rr.VideoFrameReference( + timestamp=rr.components.VideoTimestamp(seconds=0.0), + video_reference="video_asset", + ), + ) + rr.log( + "frame_at_one_second", + rr.VideoFrameReference( + timestamp=rr.components.VideoTimestamp(seconds=1.0), + video_reference="video_asset", + ), ) + + # Send blueprint that shows two 2D views next to each other. + rr.send_blueprint( + rrb.Horizontal(rrb.Spatial2DView(origin="frame_at_start"), rrb.Spatial2DView(origin="frame_at_one_second")) + ) + + + # TODO: doesn't show video frames right now. ```
diff --git a/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py b/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py index 1407025e4a1b..6a697c05e869 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py @@ -28,15 +28,14 @@ class VideoFrameReference(Archetype): ⚠️ **This is an experimental API! It is not fully supported, and is likely to change significantly in future versions.** - Example - ------- - ### Video with explicit frames: + Examples + -------- + ### Video with automatically determined frames: ```python # TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. import sys - import numpy as np import rerun as rr if len(sys.argv) < 2: @@ -44,23 +43,74 @@ class VideoFrameReference(Archetype): print(f"Usage: {sys.argv[0]} ") sys.exit(1) - rr.init("rerun_example_asset_video_manual_frames", spawn=True) + rr.init("rerun_example_asset_video_auto_frames", spawn=True) # Log video asset which is referred to by frame references. - rr.set_time_seconds("video_time", 0) # Make sure it's available on the timeline used for the frame references. - rr.log("video", rr.AssetVideo(path=sys.argv[1])) + video_asset = rr.AssetVideo(path=sys.argv[1]) + rr.log("video", video_asset, static=True) - # Send frame references for every 0.1 seconds over a total of 10 seconds. - # Naturally, this will result in a choppy playback and only makes sense if the video is 10 seconds or longer. - # To get all frame times of a video use `rr.AssetVideo.read_frame_timestamps_ns`. - # - # Use `send_columns` to send all frame references in a single call. - times = np.arange(0.0, 10.0, 0.1) + # Send automatically determined video frame timestamps. + frame_timestamps_ns = video_asset.read_frame_timestamps_ns() rr.send_columns( "video", - times=[rr.TimeSecondsColumn("video_time", times)], - components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.seconds(times)], + # Note timeline values don't have to be the same as the video timestamps. + times=[rr.TimeNanosColumn("video_time", frame_timestamps_ns)], + components=[rr.VideoFrameReference.indicator(), rr.components.VideoTimestamp.nanoseconds(frame_timestamps_ns)], + ) + ``` +
+ + + + + + + +
+ + ### Demonstrates manual use of video frame references: + ```python + # TODO(#7298): ⚠️ Video is currently only supported in the Rerun web viewer. + # TODO(#7420): This sample doesn't render yet. + + import sys + + import rerun as rr + import rerun.blueprint as rrb + + if len(sys.argv) < 2: + # TODO(#7354): Only mp4 is supported for now. + print(f"Usage: {sys.argv[0]} ") + sys.exit(1) + + rr.init("rerun_example_asset_video_manual_frames", spawn=True) + + # Log video asset which is referred to by frame references. + rr.log("video_asset", rr.AssetVideo(path=sys.argv[1]), static=True) + + # Create two entites, showing the same video frozen at different times. + rr.log( + "frame_at_start", + rr.VideoFrameReference( + timestamp=rr.components.VideoTimestamp(seconds=0.0), + video_reference="video_asset", + ), + ) + rr.log( + "frame_at_one_second", + rr.VideoFrameReference( + timestamp=rr.components.VideoTimestamp(seconds=1.0), + video_reference="video_asset", + ), ) + + # Send blueprint that shows two 2D views next to each other. + rr.send_blueprint( + rrb.Horizontal(rrb.Spatial2DView(origin="frame_at_start"), rrb.Spatial2DView(origin="frame_at_one_second")) + ) + + + # TODO: doesn't show video frames right now. ```
diff --git a/rerun_py/rerun_sdk/rerun/components/video_timestamp.py b/rerun_py/rerun_sdk/rerun/components/video_timestamp.py index 23b11eae4ddc..41fa33bcf3c0 100644 --- a/rerun_py/rerun_sdk/rerun/components/video_timestamp.py +++ b/rerun_py/rerun_sdk/rerun/components/video_timestamp.py @@ -23,7 +23,7 @@ class VideoTimestamp(VideoTimestampExt, datatypes.VideoTimestamp, ComponentMixin """ _BATCH_TYPE = None - # You can define your own __init__ function as a member of VideoTimestampExt in video_timestamp_ext.py + # __init__ can be found in video_timestamp_ext.py # Note: there are no fields here because VideoTimestamp delegates to datatypes.VideoTimestamp pass diff --git a/rerun_py/rerun_sdk/rerun/components/video_timestamp_ext.py b/rerun_py/rerun_sdk/rerun/components/video_timestamp_ext.py index 98403aa558cc..307ba0cb1122 100644 --- a/rerun_py/rerun_sdk/rerun/components/video_timestamp_ext.py +++ b/rerun_py/rerun_sdk/rerun/components/video_timestamp_ext.py @@ -1,18 +1,51 @@ from __future__ import annotations +from typing import Any, Union + import numpy as np import numpy.typing as npt +from rerun.error_utils import catch_and_log_exceptions + from .. import components, datatypes class VideoTimestampExt: """Extension for [VideoTimestamp][rerun.components.VideoTimestamp].""" - # Implementation note: - # We could add an init method that deals with seconds/milliseconds/nanoseconds etc. - # However, this would require _a lot_ of slow parameter validation on a per timestamp basis. - # When in actuallity, this data practically always comes in homogeneous batches. + def __init__( + self: Any, + *, + video_time: Union[int, None] = None, + time_mode: Union[datatypes.VideoTimeModeLike, None] = None, + seconds: Union[float, None] = None, + ): + """ + Create a new instance of the VideoTimestamp component. + + Parameters + ---------- + video_time: + Timestamp value, type defined by `time_mode`. + time_mode: + How to interpret `video_time`. + seconds: + The timestamp in seconds since the start of the video. + Mutually exclusive with `video_time` and `time_mode`. + + """ + + with catch_and_log_exceptions(context=self.__class__.__name__): + if seconds is not None: + if video_time is not None or time_mode is not None: + raise ValueError("Cannot specify both `seconds` and `video_time`/`time_mode`.") + video_time = int(seconds * 1e9) + time_mode = datatypes.VideoTimeMode.Nanoseconds + + self.__attrs_init__(video_time=video_time, time_mode=time_mode) + return + + self.__attrs_clear__() @staticmethod def seconds( From 13e930f8b8e74b1e4466438cbf5c7f17ed304b9c Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 16 Sep 2024 12:25:34 +0200 Subject: [PATCH 13/21] remove superfluous todo --- docs/content/reference/types/archetypes.md | 2 +- docs/content/reference/types/components.md | 2 +- docs/snippets/all/archetypes/video_manual_frames.py | 3 --- rerun_py/rerun_sdk/rerun/archetypes/asset_video.py | 3 --- rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py | 3 --- 5 files changed, 2 insertions(+), 11 deletions(-) diff --git a/docs/content/reference/types/archetypes.md b/docs/content/reference/types/archetypes.md index be8c53d8ec70..8f927c594022 100644 --- a/docs/content/reference/types/archetypes.md +++ b/docs/content/reference/types/archetypes.md @@ -7,7 +7,7 @@ order: 1 Archetypes are bundles of components for which the Rerun viewer has first-class built-in support. When logged, each archetype also includes an _indicator component_ which captures the intent of the logging code and triggers the activation of the corresponding visualizers. See -[Entities and Compponents](../../concepts/entity-component.md) and +[entities and Compponents](../../concepts/entity-component.md) and [Visualizers and Overrides](../../concepts/visualizers-and-overrides.md) for more information. This page lists all built-in archetypes. diff --git a/docs/content/reference/types/components.md b/docs/content/reference/types/components.md index 1139ef975b74..41fb4120dfe8 100644 --- a/docs/content/reference/types/components.md +++ b/docs/content/reference/types/components.md @@ -10,7 +10,7 @@ An entity can only ever contain a single array of any given component type. If you log the same component several times on an entity, the last value (or array of values) will overwrite the previous. For more information on the relationship between **archetypes** and **components**, check out the concept page -on [Entities and Components](../../concepts/entity-component.md). +on [entities and Components](../../concepts/entity-component.md). * [`AggregationPolicy`](components/aggregation_policy.md): Policy for aggregation of multiple scalar plot values. diff --git a/docs/snippets/all/archetypes/video_manual_frames.py b/docs/snippets/all/archetypes/video_manual_frames.py index 3bf7dde322ed..f0f0683b2f88 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.py +++ b/docs/snippets/all/archetypes/video_manual_frames.py @@ -37,6 +37,3 @@ rr.send_blueprint( rrb.Horizontal(rrb.Spatial2DView(origin="frame_at_start"), rrb.Spatial2DView(origin="frame_at_one_second")) ) - - -# TODO: doesn't show video frames right now. diff --git a/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py b/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py index f48fdf40e6b6..7131d21967f6 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py @@ -109,9 +109,6 @@ class AssetVideo(AssetVideoExt, Archetype): rr.send_blueprint( rrb.Horizontal(rrb.Spatial2DView(origin="frame_at_start"), rrb.Spatial2DView(origin="frame_at_one_second")) ) - - - # TODO: doesn't show video frames right now. ```
diff --git a/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py b/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py index 6a697c05e869..5ba08cb6fa1e 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py @@ -108,9 +108,6 @@ class VideoFrameReference(Archetype): rr.send_blueprint( rrb.Horizontal(rrb.Spatial2DView(origin="frame_at_start"), rrb.Spatial2DView(origin="frame_at_one_second")) ) - - - # TODO: doesn't show video frames right now. ```
From a0acc5622d1fe2c006331ef8b1d3bcf662bda807 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 16 Sep 2024 12:30:18 +0200 Subject: [PATCH 14/21] more typos / lints --- crates/store/re_video/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/store/re_video/src/lib.rs b/crates/store/re_video/src/lib.rs index 0b172776d924..efd545fbd83e 100644 --- a/crates/store/re_video/src/lib.rs +++ b/crates/store/re_video/src/lib.rs @@ -27,7 +27,7 @@ impl VideoData { /// TODO(andreas, jan): This should not copy the data, but instead store slices into a shared buffer. /// at the very least the should be a way to extract only metadata. pub fn load_from_bytes(data: &[u8], media_type: Option<&str>) -> Result { - // Media type guessing here should be identical to to `re_types::MediaType::guess_from_data`, + // Media type guessing here should be identical to `re_types::MediaType::guess_from_data`, // but we don't want to depend on `re_types` here. let media_type = if let Some(media_type) = media_type { media_type.to_owned() From 1da4436eb79facf263a88f8030c79d4f2b908160 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 16 Sep 2024 12:32:04 +0200 Subject: [PATCH 15/21] sample fixes --- docs/snippets/all/archetypes/video_manual_frames.rs | 4 ++-- rerun_cpp/src/rerun/components/media_type_ext.cpp | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/snippets/all/archetypes/video_manual_frames.rs b/docs/snippets/all/archetypes/video_manual_frames.rs index bce79f1e043e..3762312a6403 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.rs +++ b/docs/snippets/all/archetypes/video_manual_frames.rs @@ -22,12 +22,12 @@ fn main() -> anyhow::Result<()> { "frame_at_start", &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) .with_video_reference("video_asset"), - ); + )?; rec.log( "frame_at_one_second", &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(1.0)) .with_video_reference("video_asset"), - ); + )?; // TODO(#5520): log blueprint once supported Ok(()) diff --git a/rerun_cpp/src/rerun/components/media_type_ext.cpp b/rerun_cpp/src/rerun/components/media_type_ext.cpp index bf41b335ad52..968c5b1ca7db 100644 --- a/rerun_cpp/src/rerun/components/media_type_ext.cpp +++ b/rerun_cpp/src/rerun/components/media_type_ext.cpp @@ -138,4 +138,4 @@ namespace rerun { return std::nullopt; } }; // namespace components -}; // namespace rerun +}; // namespace rerun From 463e58116aef786bd3b8a58c06ef4c39504469eb Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 16 Sep 2024 14:51:18 +0200 Subject: [PATCH 16/21] fix check_all_components_ui script --- tests/python/release_checklist/check_all_components_ui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/release_checklist/check_all_components_ui.py b/tests/python/release_checklist/check_all_components_ui.py index a7daab5b2fbc..af638f024364 100644 --- a/tests/python/release_checklist/check_all_components_ui.py +++ b/tests/python/release_checklist/check_all_components_ui.py @@ -218,7 +218,7 @@ def alternatives(self) -> list[Any] | None: "TriangleIndicesBatch": TestCase(batch=[(0, 1, 2), (3, 4, 5), (6, 7, 8)]), "Vector2DBatch": TestCase(batch=[(0, 1), (2, 3), (4, 5)]), "Vector3DBatch": TestCase(batch=[(0, 3, 4), (1, 4, 5), (2, 5, 6)]), - "VideoTimestampBatch": TestCase(rr.components.VideoTimestamp(0, rr.datatypes.VideoTimeMode.Nanoseconds)), + "VideoTimestampBatch": TestCase(rr.components.VideoTimestamp(seconds=0.0)), "ViewCoordinatesBatch": TestCase(rr.components.ViewCoordinates.LBD), "VisualizerOverridesBatch": TestCase(disabled=True), # no Python-based serialization } From 5049e93a0e5884faa2a693ec2ffb61e5c477a434 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 16 Sep 2024 16:28:25 +0200 Subject: [PATCH 17/21] fix typos --- crates/store/re_types/src/archetypes/asset_video.rs | 6 +++--- .../store/re_types/src/archetypes/video_frame_reference.rs | 6 +++--- crates/store/re_video/src/lib.rs | 2 +- docs/content/reference/types/archetypes.md | 2 +- docs/content/reference/types/components.md | 2 +- docs/snippets/all/archetypes/video_manual_frames.cpp | 2 +- docs/snippets/all/archetypes/video_manual_frames.py | 2 +- docs/snippets/all/archetypes/video_manual_frames.rs | 2 +- rerun_cpp/src/rerun/archetypes/asset_video.hpp | 2 +- rerun_cpp/src/rerun/archetypes/video_frame_reference.hpp | 2 +- rerun_py/rerun_sdk/rerun/archetypes/asset_video.py | 2 +- .../rerun_sdk/rerun/archetypes/video_frame_reference.py | 2 +- 12 files changed, 16 insertions(+), 16 deletions(-) diff --git a/crates/store/re_types/src/archetypes/asset_video.rs b/crates/store/re_types/src/archetypes/asset_video.rs index 997c93f6d754..3e91d5761c48 100644 --- a/crates/store/re_types/src/archetypes/asset_video.rs +++ b/crates/store/re_types/src/archetypes/asset_video.rs @@ -100,17 +100,17 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// // Log video asset which is referred to by frame references. /// rec.log_static("video_asset", &rerun::AssetVideo::from_file_path(path)?)?; /// -/// // Create two entites, showing the same video frozen at different times. +/// // Create two entities, showing the same video frozen at different times. /// rec.log( /// "frame_at_start", /// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) /// .with_video_reference("video_asset"), -/// ); +/// )?; /// rec.log( /// "frame_at_one_second", /// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(1.0)) /// .with_video_reference("video_asset"), -/// ); +/// )?; /// /// // TODO(#5520): log blueprint once supported /// Ok(()) diff --git a/crates/store/re_types/src/archetypes/video_frame_reference.rs b/crates/store/re_types/src/archetypes/video_frame_reference.rs index 41ebf3f46587..c876ce0a8d32 100644 --- a/crates/store/re_types/src/archetypes/video_frame_reference.rs +++ b/crates/store/re_types/src/archetypes/video_frame_reference.rs @@ -97,17 +97,17 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// // Log video asset which is referred to by frame references. /// rec.log_static("video_asset", &rerun::AssetVideo::from_file_path(path)?)?; /// -/// // Create two entites, showing the same video frozen at different times. +/// // Create two entities, showing the same video frozen at different times. /// rec.log( /// "frame_at_start", /// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) /// .with_video_reference("video_asset"), -/// ); +/// )?; /// rec.log( /// "frame_at_one_second", /// &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(1.0)) /// .with_video_reference("video_asset"), -/// ); +/// )?; /// /// // TODO(#5520): log blueprint once supported /// Ok(()) diff --git a/crates/store/re_video/src/lib.rs b/crates/store/re_video/src/lib.rs index efd545fbd83e..d806c65c3225 100644 --- a/crates/store/re_video/src/lib.rs +++ b/crates/store/re_video/src/lib.rs @@ -34,7 +34,7 @@ impl VideoData { } else if mp4::is_mp4(data) { "video/mp4".to_owned() } else { - // Technically this means that we failed to determine the media type alltogether, + // Technically this means that we failed to determine the media type altogether, // but we don't want to call it `FailedToDetermineMediaType` since the rest of Rerun has // access to `re_types::components::MediaType` which has a much wider range of media type detection. return Err(VideoLoadError::UnsupportedVideoType); diff --git a/docs/content/reference/types/archetypes.md b/docs/content/reference/types/archetypes.md index 8f927c594022..be8c53d8ec70 100644 --- a/docs/content/reference/types/archetypes.md +++ b/docs/content/reference/types/archetypes.md @@ -7,7 +7,7 @@ order: 1 Archetypes are bundles of components for which the Rerun viewer has first-class built-in support. When logged, each archetype also includes an _indicator component_ which captures the intent of the logging code and triggers the activation of the corresponding visualizers. See -[entities and Compponents](../../concepts/entity-component.md) and +[Entities and Compponents](../../concepts/entity-component.md) and [Visualizers and Overrides](../../concepts/visualizers-and-overrides.md) for more information. This page lists all built-in archetypes. diff --git a/docs/content/reference/types/components.md b/docs/content/reference/types/components.md index 41fb4120dfe8..1139ef975b74 100644 --- a/docs/content/reference/types/components.md +++ b/docs/content/reference/types/components.md @@ -10,7 +10,7 @@ An entity can only ever contain a single array of any given component type. If you log the same component several times on an entity, the last value (or array of values) will overwrite the previous. For more information on the relationship between **archetypes** and **components**, check out the concept page -on [entities and Components](../../concepts/entity-component.md). +on [Entities and Components](../../concepts/entity-component.md). * [`AggregationPolicy`](components/aggregation_policy.md): Policy for aggregation of multiple scalar plot values. diff --git a/docs/snippets/all/archetypes/video_manual_frames.cpp b/docs/snippets/all/archetypes/video_manual_frames.cpp index 5c6dd3bb95a9..f4e02d3416a4 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.cpp +++ b/docs/snippets/all/archetypes/video_manual_frames.cpp @@ -23,7 +23,7 @@ int main(int argc, char* argv[]) { // Log video asset which is referred to by frame references. rec.log_static("video_asset", rerun::AssetVideo::from_file(path).value_or_throw()); - // Create two entites, showing the same video frozen at different times. + // Create two entities, showing the same video frozen at different times. rec.log("frame_at_start", rerun::VideoFrameReference(0.0s).with_video_reference("video_asset")); rec.log( "frame_at_one_second", diff --git a/docs/snippets/all/archetypes/video_manual_frames.py b/docs/snippets/all/archetypes/video_manual_frames.py index f0f0683b2f88..0dec27a11347 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.py +++ b/docs/snippets/all/archetypes/video_manual_frames.py @@ -17,7 +17,7 @@ # Log video asset which is referred to by frame references. rr.log("video_asset", rr.AssetVideo(path=sys.argv[1]), static=True) -# Create two entites, showing the same video frozen at different times. +# Create two entities, showing the same video frozen at different times. rr.log( "frame_at_start", rr.VideoFrameReference( diff --git a/docs/snippets/all/archetypes/video_manual_frames.rs b/docs/snippets/all/archetypes/video_manual_frames.rs index 3762312a6403..447c98e848a6 100644 --- a/docs/snippets/all/archetypes/video_manual_frames.rs +++ b/docs/snippets/all/archetypes/video_manual_frames.rs @@ -17,7 +17,7 @@ fn main() -> anyhow::Result<()> { // Log video asset which is referred to by frame references. rec.log_static("video_asset", &rerun::AssetVideo::from_file_path(path)?)?; - // Create two entites, showing the same video frozen at different times. + // Create two entities, showing the same video frozen at different times. rec.log( "frame_at_start", &rerun::VideoFrameReference::new(rerun::components::VideoTimestamp::from_seconds(0.0)) diff --git a/rerun_cpp/src/rerun/archetypes/asset_video.hpp b/rerun_cpp/src/rerun/archetypes/asset_video.hpp index d3173492d3cc..a39ef4208f49 100644 --- a/rerun_cpp/src/rerun/archetypes/asset_video.hpp +++ b/rerun_cpp/src/rerun/archetypes/asset_video.hpp @@ -107,7 +107,7 @@ namespace rerun::archetypes { /// // Log video asset which is referred to by frame references. /// rec.log_static("video_asset", rerun::AssetVideo::from_file(path).value_or_throw()); /// - /// // Create two entites, showing the same video frozen at different times. + /// // Create two entities, showing the same video frozen at different times. /// rec.log("frame_at_start", rerun::VideoFrameReference(0.0s).with_video_reference("video_asset")); /// rec.log( /// "frame_at_one_second", diff --git a/rerun_cpp/src/rerun/archetypes/video_frame_reference.hpp b/rerun_cpp/src/rerun/archetypes/video_frame_reference.hpp index ad8b68c9caeb..86e40596f1b2 100644 --- a/rerun_cpp/src/rerun/archetypes/video_frame_reference.hpp +++ b/rerun_cpp/src/rerun/archetypes/video_frame_reference.hpp @@ -102,7 +102,7 @@ namespace rerun::archetypes { /// // Log video asset which is referred to by frame references. /// rec.log_static("video_asset", rerun::AssetVideo::from_file(path).value_or_throw()); /// - /// // Create two entites, showing the same video frozen at different times. + /// // Create two entities, showing the same video frozen at different times. /// rec.log("frame_at_start", rerun::VideoFrameReference(0.0s).with_video_reference("video_asset")); /// rec.log( /// "frame_at_one_second", diff --git a/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py b/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py index 7131d21967f6..c35d9b4ed86e 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/asset_video.py @@ -89,7 +89,7 @@ class AssetVideo(AssetVideoExt, Archetype): # Log video asset which is referred to by frame references. rr.log("video_asset", rr.AssetVideo(path=sys.argv[1]), static=True) - # Create two entites, showing the same video frozen at different times. + # Create two entities, showing the same video frozen at different times. rr.log( "frame_at_start", rr.VideoFrameReference( diff --git a/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py b/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py index 5ba08cb6fa1e..1b72bf645919 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/video_frame_reference.py @@ -88,7 +88,7 @@ class VideoFrameReference(Archetype): # Log video asset which is referred to by frame references. rr.log("video_asset", rr.AssetVideo(path=sys.argv[1]), static=True) - # Create two entites, showing the same video frozen at different times. + # Create two entities, showing the same video frozen at different times. rr.log( "frame_at_start", rr.VideoFrameReference( From 0350eda30bff6bd3052119d282723294ab636b03 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 16 Sep 2024 16:35:43 +0200 Subject: [PATCH 18/21] fixup snippets.toml --- docs/snippets/snippets.toml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/docs/snippets/snippets.toml b/docs/snippets/snippets.toml index dc4ae3d66749..82ad3289c7b2 100644 --- a/docs/snippets/snippets.toml +++ b/docs/snippets/snippets.toml @@ -181,16 +181,21 @@ quick_start = [ # These examples don't have exactly the same implementation. "py", "rust", ] - +"archetypes/video_auto_frames" = [ # This mixes `log` and `send_columns`. Since `log` is suspect to delays by the batcher, this test gets flaky. + "cpp", + "py", + "rust", +] # `$config_dir` will be replaced with the absolute path of `docs/snippets`. # Note that the snippet comparison tool will automatically run `/tests/assets/download_test_assets.py` before running the snippets. [extra_args] "archetypes/asset3d_simple" = ["$config_dir/../../tests/assets/cube.glb"] "archetypes/asset3d_out_of_tree" = ["$config_dir/../../tests/assets/cube.glb"] -"archetypes/video_manual_frames" = [ +"archetypes/video_auto_frames" = [ "$config_dir/../../tests/assets/video/Big_Buck_Bunny_1080_10s_av1.mp4", ] -"archetypes/video_auto_frames" = [ +# TODO(#7420): This sample doesn't render yet. Once it does it would be nice to have a video that looks significantly different after 1s. +"archetypes/video_manual_frames" = [ "$config_dir/../../tests/assets/video/Big_Buck_Bunny_1080_10s_av1.mp4", ] From de223e61c128794a22bdf35ca928d5613ce52aef Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Tue, 17 Sep 2024 10:49:50 +0200 Subject: [PATCH 19/21] ensure sorting of timestamps --- Cargo.lock | 1 + crates/store/re_video/Cargo.toml | 1 + crates/store/re_video/src/lib.rs | 5 +++++ 3 files changed, 7 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 3cdc51c26fa5..de684947a684 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5787,6 +5787,7 @@ dependencies = [ name = "re_video" version = "0.19.0-alpha.1+dev" dependencies = [ + "itertools 0.13.0", "mp4", "ordered-float", "thiserror", diff --git a/crates/store/re_video/Cargo.toml b/crates/store/re_video/Cargo.toml index be2b34667708..3f1d45f98717 100644 --- a/crates/store/re_video/Cargo.toml +++ b/crates/store/re_video/Cargo.toml @@ -26,6 +26,7 @@ features = ["all"] [features] [dependencies] +itertools.workspace = true mp4.workspace = true ordered-float.workspace = true thiserror.workspace = true diff --git a/crates/store/re_video/src/lib.rs b/crates/store/re_video/src/lib.rs index d806c65c3225..866fbc39ddb1 100644 --- a/crates/store/re_video/src/lib.rs +++ b/crates/store/re_video/src/lib.rs @@ -3,6 +3,8 @@ //! The entry point is [`load_mp4`], which produces an instance of [`VideoData`]. mod mp4; + +use itertools::Itertools; use ordered_float::OrderedFloat; /// Decoded video data. @@ -50,10 +52,13 @@ impl VideoData { /// /// Returned timestamps are in nanoseconds since start and are guaranteed to be monotonically increasing. pub fn frame_timestamps_ns(&self) -> impl Iterator + '_ { + // Segments are guaranteed to be sorted among each other, but within a segment, + // presentation timestamps may not be sorted since this is sorted by decode timestamps. self.segments.iter().flat_map(|seg| { seg.samples .iter() .map(|sample| sample.timestamp.as_nanoseconds()) + .sorted() }) } } From 22db1544f3bb0054beb1580c8090d68d89730e74 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Tue, 17 Sep 2024 10:50:55 +0200 Subject: [PATCH 20/21] typo fix --- rerun_cpp/src/rerun/result.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rerun_cpp/src/rerun/result.hpp b/rerun_cpp/src/rerun/result.hpp index 0b51bac6b319..d9fd1009fad3 100644 --- a/rerun_cpp/src/rerun/result.hpp +++ b/rerun_cpp/src/rerun/result.hpp @@ -27,7 +27,7 @@ namespace rerun { /// Construct a result from an error, default constructing the value. Result(rerun::Error _error) : value(), error(std::move(_error)) {} - /// Construct a result from an arrow setatus, default constructing the value. + /// Construct a result from an arrow status, default constructing the value. Result(const arrow::Status& status) : value(), error(status) {} /// Construct a result from an arrow status, default constructing the value. From bb152538c2c069843533f21f1dac73ebb29181da Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Tue, 17 Sep 2024 11:19:39 +0200 Subject: [PATCH 21/21] fix doc string --- crates/store/re_video/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/store/re_video/src/lib.rs b/crates/store/re_video/src/lib.rs index 866fbc39ddb1..62204081dedc 100644 --- a/crates/store/re_video/src/lib.rs +++ b/crates/store/re_video/src/lib.rs @@ -1,6 +1,7 @@ //! Video decoding library. //! -//! The entry point is [`load_mp4`], which produces an instance of [`VideoData`]. +//! The entry point is [`VideoData::load_from_bytes`] +//! which produces an instance of [`VideoData`] from any supported video container. mod mp4;