diff --git a/crates/store/re_video/src/decode/av1.rs b/crates/store/re_video/src/decode/av1.rs
index c28215ae0f95..7d2f852adeb7 100644
--- a/crates/store/re_video/src/decode/av1.rs
+++ b/crates/store/re_video/src/decode/av1.rs
@@ -253,9 +253,10 @@ fn create_frame(debug_name: &str, picture: &dav1d::Picture) -> Result {
format,
},
info: FrameInfo {
+ is_sync: None, // TODO(emilk)
presentation_timestamp: Time(picture.timestamp().unwrap_or(0)),
duration: Time(picture.duration()),
- ..Default::default()
+ latest_decode_timestamp: None,
},
})
}
diff --git a/crates/store/re_video/src/decode/ffmpeg_h264/ffmpeg.rs b/crates/store/re_video/src/decode/ffmpeg_h264/ffmpeg.rs
index 9829e05df12d..cd8218234e42 100644
--- a/crates/store/re_video/src/decode/ffmpeg_h264/ffmpeg.rs
+++ b/crates/store/re_video/src/decode/ffmpeg_h264/ffmpeg.rs
@@ -93,6 +93,11 @@ impl From for crate::decode::Error {
/// ffmpeg does not tell us the timestamp/duration of a given frame, so we need to remember it.
#[derive(Clone)]
struct FfmpegFrameInfo {
+ /// The start of a new [`crate::demux::GroupOfPictures`]?
+ ///
+ /// This probably means this is a _keyframe_, and that and entire frame
+ /// can be decoded from only this one sample (though I'm not 100% sure).
+ is_sync: bool,
presentation_timestamp: Time,
duration: Time,
decode_timestamp: Time,
@@ -575,6 +580,7 @@ fn read_ffmpeg_output(
format: pixel_format.clone(),
},
info: FrameInfo {
+ is_sync: Some(frame_info.is_sync),
presentation_timestamp: frame_info.presentation_timestamp,
duration: frame_info.duration,
latest_decode_timestamp: Some(frame_info.decode_timestamp),
@@ -690,6 +696,7 @@ impl AsyncDecoder for FfmpegCliH264Decoder {
// We send the information about this chunk first.
// Chunks are defined to always yield a single frame.
let frame_info = FfmpegFrameInfo {
+ is_sync: chunk.is_sync,
presentation_timestamp: chunk.presentation_timestamp,
decode_timestamp: chunk.decode_timestamp,
duration: chunk.duration,
diff --git a/crates/store/re_video/src/decode/mod.rs b/crates/store/re_video/src/decode/mod.rs
index e4a46805a929..83f5055ec07b 100644
--- a/crates/store/re_video/src/decode/mod.rs
+++ b/crates/store/re_video/src/decode/mod.rs
@@ -215,6 +215,9 @@ pub fn new_decoder(
/// For details on how to interpret the data, see [`crate::Sample`].
pub struct Chunk {
/// The start of a new [`crate::demux::GroupOfPictures`]?
+ ///
+ /// This probably means this is a _keyframe_, and that and entire frame
+ /// can be decoded from only this one sample (though I'm not 100% sure).
pub is_sync: bool,
pub data: Vec,
@@ -250,17 +253,18 @@ pub type FrameContent = webcodecs::WebVideoFrame;
/// Meta information about a decoded video frame, as reported by the decoder.
#[derive(Debug, Clone)]
pub struct FrameInfo {
- /// The presentation timestamp of the frame.
+ /// The start of a new [`crate::demux::GroupOfPictures`]?
///
- /// Decoders are required to report this.
- /// A timestamp of [`Time::MAX`] indicates that the frame is invalid or not yet available.
+ /// This probably means this is a _keyframe_, and that and entire frame
+ /// can be decoded from only this one sample (though I'm not 100% sure).
+ ///
+ /// None indicates that the information is not available.
+ pub is_sync: Option,
+
+ /// The presentation timestamp of the frame.
pub presentation_timestamp: Time,
/// How long the frame is valid.
- ///
- /// Decoders are required to report this.
- /// A duration of [`Time::MAX`] indicates that the frame is invalid or not yet available.
- // Implementation note: unlike with presentation timestamp we may be able fine with making this optional.
pub duration: Time,
/// The decode timestamp of the last chunk that was needed to decode this frame.
@@ -269,16 +273,6 @@ pub struct FrameInfo {
pub latest_decode_timestamp: Option,
}
-impl Default for FrameInfo {
- fn default() -> Self {
- Self {
- presentation_timestamp: Time::MAX,
- duration: Time::MAX,
- latest_decode_timestamp: None,
- }
- }
-}
-
impl FrameInfo {
/// Presentation timestamp range in which this frame is valid.
pub fn presentation_time_range(&self) -> std::ops::Range {
diff --git a/crates/store/re_video/src/decode/webcodecs.rs b/crates/store/re_video/src/decode/webcodecs.rs
index f90c8fc61158..b2c096e4b2bc 100644
--- a/crates/store/re_video/src/decode/webcodecs.rs
+++ b/crates/store/re_video/src/decode/webcodecs.rs
@@ -204,9 +204,10 @@ fn init_video_decoder(
on_output(Ok(Frame {
content: WebVideoFrame(frame),
info: FrameInfo {
+ is_sync: None,
presentation_timestamp,
duration,
- ..Default::default()
+ latest_decode_timestamp: None,
},
}));
}) as Box)
diff --git a/crates/store/re_video/src/demux/mod.rs b/crates/store/re_video/src/demux/mod.rs
index e603343a85f0..61af9286f857 100644
--- a/crates/store/re_video/src/demux/mod.rs
+++ b/crates/store/re_video/src/demux/mod.rs
@@ -457,7 +457,10 @@ impl GroupOfPictures {
/// > The decoding of each access unit results in one decoded picture.
#[derive(Debug, Clone)]
pub struct Sample {
- /// Is t his the start of a new [`GroupOfPictures`]?
+ /// Is this the start of a new [`GroupOfPictures`]?
+ ///
+ /// This probably means this is a _keyframe_, and that and entire frame
+ /// can be decoded from only this one sample (though I'm not 100% sure).
pub is_sync: bool,
/// Time at which this sample appears in the decoded bitstream, in time units.
diff --git a/crates/utils/re_log/src/setup.rs b/crates/utils/re_log/src/setup.rs
index 0949287ba15a..59b7e44e4a14 100644
--- a/crates/utils/re_log/src/setup.rs
+++ b/crates/utils/re_log/src/setup.rs
@@ -39,7 +39,8 @@ pub fn setup_logging() {
use std::io::Write as _;
writeln!(
buf,
- "{}:{} {}",
+ "{} {}:{} {}",
+ record.level(),
record.file().unwrap_or_default(),
record.line().unwrap_or_default(),
record.args()
diff --git a/crates/viewer/re_data_ui/src/video.rs b/crates/viewer/re_data_ui/src/video.rs
index 7a3447e771ea..1b4d83c317d8 100644
--- a/crates/viewer/re_data_ui/src/video.rs
+++ b/crates/viewer/re_data_ui/src/video.rs
@@ -166,14 +166,16 @@ pub fn show_decoded_frame_info(
}) => {
re_ui::list_item::list_item_scope(ui, "decoded_frame_ui", |ui| {
let default_open = false;
- ui.list_item_collapsible_noninteractive_label(
- "Current decoded frame",
- default_open,
- |ui| {
- frame_info_ui(ui, &frame_info, video.data());
- source_image_data_format_ui(ui, &source_pixel_format);
- },
- );
+ if let Some(frame_info) = frame_info {
+ ui.list_item_collapsible_noninteractive_label(
+ "Current decoded frame",
+ default_open,
+ |ui| {
+ frame_info_ui(ui, &frame_info, video.data());
+ source_image_data_format_ui(ui, &source_pixel_format);
+ },
+ );
+ }
});
let response = crate::image::texture_preview_ui(
@@ -226,14 +228,29 @@ fn samples_statistics_ui(ui: &mut egui::Ui, samples_statistics: &SamplesStatisti
}
fn frame_info_ui(ui: &mut egui::Ui, frame_info: &FrameInfo, video_data: &re_video::VideoData) {
- let time_range = frame_info.presentation_time_range();
+ let FrameInfo {
+ is_sync,
+ presentation_timestamp,
+ duration,
+ latest_decode_timestamp,
+ } = *frame_info;
+
+ if let Some(is_sync) = is_sync {
+ ui.list_item_flat_noninteractive(PropertyContent::new("Sync").value_bool(is_sync))
+ .on_hover_text(
+ "The start of a new GOP (Group of Frames)?\n\
+ If true, it likely means the frame is a keyframe.",
+ );
+ }
+
+ let presentation_time_range = presentation_timestamp..presentation_timestamp + duration;
ui.list_item_flat_noninteractive(PropertyContent::new("Time range").value_text(format!(
"{} - {}",
- re_format::format_timestamp_seconds(time_range.start.into_secs_since_start(
+ re_format::format_timestamp_seconds(presentation_time_range.start.into_secs_since_start(
video_data.timescale,
video_data.samples_statistics.minimum_presentation_timestamp
)),
- re_format::format_timestamp_seconds(time_range.end.into_secs_since_start(
+ re_format::format_timestamp_seconds(presentation_time_range.end.into_secs_since_start(
video_data.timescale,
video_data.samples_statistics.minimum_presentation_timestamp
)),
@@ -255,7 +272,7 @@ fn frame_info_ui(ui: &mut egui::Ui, frame_info: &FrameInfo, video_data: &re_vide
}
}
- if let Some(dts) = frame_info.latest_decode_timestamp {
+ if let Some(dts) = latest_decode_timestamp {
ui.list_item_flat_noninteractive(
PropertyContent::new("DTS").value_fn(value_fn_for_time(dts, video_data)),
)
@@ -264,7 +281,7 @@ fn frame_info_ui(ui: &mut egui::Ui, frame_info: &FrameInfo, video_data: &re_vide
}
ui.list_item_flat_noninteractive(
- PropertyContent::new("PTS").value_fn(value_fn_for_time(frame_info.presentation_timestamp, video_data)),
+ PropertyContent::new("PTS").value_fn(value_fn_for_time(presentation_timestamp, video_data)),
)
.on_hover_text("Raw presentation timestamp prior to applying the timescale.\n\
This specifies the time at which the frame should be shown relative to the start of a video stream.");
@@ -277,11 +294,11 @@ fn frame_info_ui(ui: &mut egui::Ui, frame_info: &FrameInfo, video_data: &re_vide
.has_sample_highest_pts_so_far
.as_ref()
{
- if let Some(sample_idx) = video_data
- .latest_sample_index_at_presentation_timestamp(frame_info.presentation_timestamp)
+ if let Some(sample_idx) =
+ video_data.latest_sample_index_at_presentation_timestamp(presentation_timestamp)
{
ui.list_item_flat_noninteractive(
- PropertyContent::new("Highest PTS so far").value_text(has_sample_highest_pts_so_far[sample_idx].to_string())
+ PropertyContent::new("Highest PTS so far").value_bool(has_sample_highest_pts_so_far[sample_idx])
).on_hover_text("Whether the presentation timestamp (PTS) at the this frame is the highest encountered so far. If false there are lower PTS values prior in the list.");
}
}
@@ -290,7 +307,7 @@ fn frame_info_ui(ui: &mut egui::Ui, frame_info: &FrameInfo, video_data: &re_vide
// Information about the current group of pictures this frame is part of.
// Lookup via decode timestamp is faster, but it may not always be available.
if let Some(gop_index) =
- video_data.gop_index_containing_presentation_timestamp(frame_info.presentation_timestamp)
+ video_data.gop_index_containing_presentation_timestamp(presentation_timestamp)
{
ui.list_item_flat_noninteractive(
PropertyContent::new("GOP index").value_text(gop_index.to_string()),
diff --git a/crates/viewer/re_renderer/src/video/chunk_decoder.rs b/crates/viewer/re_renderer/src/video/chunk_decoder.rs
index df191cfe7c05..f0a607c3e1b7 100644
--- a/crates/viewer/re_renderer/src/video/chunk_decoder.rs
+++ b/crates/viewer/re_renderer/src/video/chunk_decoder.rs
@@ -58,7 +58,7 @@ impl VideoChunkDecoder {
Err(err) => {
// Many of the errors we get from a decoder are recoverable.
// They may be very frequent, but it's still useful to see them in the debug log for troubleshooting.
- re_log::debug!("Error during decoding of {debug_name}: {err}");
+ re_log::debug_once!("Error during decoding of {debug_name}: {err}");
let err = VideoPlayerError::Decoding(err);
let mut output = decoder_output.lock();
@@ -80,7 +80,7 @@ impl VideoChunkDecoder {
}
/// Start decoding the given chunk.
- pub fn decode(&mut self, chunk: Chunk, _is_keyframe: bool) -> Result<(), VideoPlayerError> {
+ pub fn decode(&mut self, chunk: Chunk) -> Result<(), VideoPlayerError> {
self.decoder.submit_chunk(chunk)?;
Ok(())
}
@@ -119,9 +119,12 @@ impl VideoChunkDecoder {
let frame_time_range = frame.info.presentation_time_range();
- if frame_time_range.contains(&presentation_timestamp)
- && video_texture.frame_info.presentation_time_range() != frame_time_range
- {
+ let is_up_to_date = video_texture
+ .frame_info
+ .as_ref()
+ .is_some_and(|info| info.presentation_time_range() == frame_time_range);
+
+ if frame_time_range.contains(&presentation_timestamp) && !is_up_to_date {
#[cfg(target_arch = "wasm32")]
{
video_texture.source_pixel_format = copy_web_video_frame_to_texture(
@@ -139,7 +142,7 @@ impl VideoChunkDecoder {
)?;
}
- video_texture.frame_info = frame.info.clone();
+ video_texture.frame_info = Some(frame.info.clone());
}
Ok(())
diff --git a/crates/viewer/re_renderer/src/video/mod.rs b/crates/viewer/re_renderer/src/video/mod.rs
index 67b52d4d2453..15bad868326a 100644
--- a/crates/viewer/re_renderer/src/video/mod.rs
+++ b/crates/viewer/re_renderer/src/video/mod.rs
@@ -1,7 +1,7 @@
mod chunk_decoder;
mod player;
-use std::{collections::hash_map::Entry, ops::Range, sync::Arc};
+use std::{collections::hash_map::Entry, sync::Arc};
use ahash::HashMap;
use parking_lot::Mutex;
@@ -64,14 +64,7 @@ pub struct VideoFrameTexture {
pub source_pixel_format: SourceImageDataFormat,
/// Meta information about the decoded frame.
- pub frame_info: re_video::decode::FrameInfo,
-}
-
-impl VideoFrameTexture {
- pub fn presentation_time_range(&self) -> Range {
- self.frame_info.presentation_timestamp
- ..self.frame_info.presentation_timestamp + self.frame_info.duration
- }
+ pub frame_info: Option,
}
/// Identifier for an independent video decoding stream.
diff --git a/crates/viewer/re_renderer/src/video/player.rs b/crates/viewer/re_renderer/src/video/player.rs
index 935d20003ce6..f449ed8903b6 100644
--- a/crates/viewer/re_renderer/src/video/player.rs
+++ b/crates/viewer/re_renderer/src/video/player.rs
@@ -41,7 +41,7 @@ impl TimedDecodingError {
/// A texture of a specific video frame.
pub struct VideoTexture {
pub texture: GpuTexture2D,
- pub frame_info: FrameInfo,
+ pub frame_info: Option,
pub source_pixel_format: SourceImageDataFormat,
}
@@ -101,7 +101,7 @@ impl VideoPlayer {
video_texture: VideoTexture {
texture,
- frame_info: FrameInfo::default(),
+ frame_info: None,
source_pixel_format: SourceImageDataFormat::WgpuCompatible(
wgpu::TextureFormat::Rgba8Unorm,
),
@@ -136,51 +136,52 @@ impl VideoPlayer {
.min(self.data.duration + self.data.samples_statistics.minimum_presentation_timestamp); // Don't seek past the end of the video.
let error_on_last_frame_at = self.last_error.is_some();
- let result = self.frame_at_internal(render_ctx, presentation_timestamp, video_data);
-
- match result {
- Ok(()) => {
- let is_active_frame = self
- .video_texture
- .frame_info
- .presentation_time_range()
- .contains(&presentation_timestamp);
-
- let is_pending = !is_active_frame;
- if is_pending && error_on_last_frame_at {
- // If we switched from error to pending, clear the texture.
- // This is important to avoid flickering, in particular when switching from
- // benign errors like DecodingError::NegativeTimestamp.
- // If we don't do this, we see the last valid texture which can look really weird.
- clear_texture(render_ctx, &self.video_texture.texture);
- self.video_texture.frame_info = FrameInfo::default();
- }
-
- let time_range = self.video_texture.frame_info.presentation_time_range();
- let show_spinner = if presentation_timestamp < time_range.start {
- // We're seeking backwards and somehow forgot to reset.
- true
- } else if presentation_timestamp < time_range.end {
- false // it is an active frame
+ self.frame_at_internal(render_ctx, presentation_timestamp, video_data)?;
+
+ let frame_info = self.video_texture.frame_info.clone();
+
+ if let Some(frame_info) = frame_info {
+ let time_range = frame_info.presentation_time_range();
+ let is_active_frame = time_range.contains(&presentation_timestamp);
+
+ let is_pending = !is_active_frame;
+
+ let show_spinner = if is_pending && error_on_last_frame_at {
+ // If we switched from error to pending, clear the texture.
+ // This is important to avoid flickering, in particular when switching from
+ // benign errors like DecodingError::NegativeTimestamp.
+ // If we don't do this, we see the last valid texture which can look really weird.
+ clear_texture(render_ctx, &self.video_texture.texture);
+ self.video_texture.frame_info = None;
+ true
+ } else if presentation_timestamp < time_range.start {
+ // We're seeking backwards and somehow forgot to reset.
+ true
+ } else if presentation_timestamp < time_range.end {
+ false // it is an active frame
+ } else {
+ let how_outdated = presentation_timestamp - time_range.end;
+ if how_outdated.duration(self.data.timescale) < DECODING_GRACE_DELAY {
+ false // Just outdated by a little bit - show no spinner
} else {
- let how_outdated = presentation_timestamp - time_range.end;
- if how_outdated.duration(self.data.timescale) < DECODING_GRACE_DELAY {
- false // Just outdated by a little bit - show no spinner
- } else {
- true // Very old frame - show spinner
- }
- };
-
- Ok(VideoFrameTexture {
- texture: self.video_texture.texture.clone(),
- is_pending,
- show_spinner,
- frame_info: self.video_texture.frame_info.clone(),
- source_pixel_format: self.video_texture.source_pixel_format,
- })
- }
-
- Err(err) => Err(err),
+ true // Very old frame - show spinner
+ }
+ };
+ Ok(VideoFrameTexture {
+ texture: self.video_texture.texture.clone(),
+ is_pending,
+ show_spinner,
+ frame_info: Some(frame_info),
+ source_pixel_format: self.video_texture.source_pixel_format,
+ })
+ } else {
+ Ok(VideoFrameTexture {
+ texture: self.video_texture.texture.clone(),
+ is_pending: true,
+ show_spinner: true,
+ frame_info: None,
+ source_pixel_format: self.video_texture.source_pixel_format,
+ })
}
}
@@ -324,10 +325,9 @@ impl VideoPlayer {
let samples = &self.data.samples[gop.decode_time_range()];
- for (i, sample) in samples.iter().enumerate() {
+ for sample in samples {
let chunk = sample.get(video_data).ok_or(VideoPlayerError::BadData)?;
- let is_keyframe = i == 0;
- self.chunk_decoder.decode(chunk, is_keyframe)?;
+ self.chunk_decoder.decode(chunk)?;
}
Ok(())
diff --git a/crates/viewer/re_ui/src/list_item/property_content.rs b/crates/viewer/re_ui/src/list_item/property_content.rs
index 7fa13373f227..ebe1ea4c5a03 100644
--- a/crates/viewer/re_ui/src/list_item/property_content.rs
+++ b/crates/viewer/re_ui/src/list_item/property_content.rs
@@ -152,9 +152,14 @@ impl<'a> PropertyContent<'a> {
/// Show a read-only boolean in the value column.
#[inline]
pub fn value_bool(self, mut b: bool) -> Self {
- self.value_fn(move |ui: &mut Ui, _| {
- ui.add_enabled_ui(false, |ui| ui.re_checkbox(&mut b, ""));
- })
+ if true {
+ self.value_text(b.to_string())
+ } else {
+ // This is not readable, which is why it is disabled
+ self.value_fn(move |ui: &mut Ui, _| {
+ ui.add_enabled_ui(false, |ui| ui.re_checkbox(&mut b, ""));
+ })
+ }
}
/// Show an editable boolean in the value column.