diff --git a/crates/viewer/re_renderer/shader/conversions/yuv_converter.wgsl b/crates/viewer/re_renderer/shader/conversions/yuv_converter.wgsl index 3e312a7c06672..9f34c0bb2a096 100644 --- a/crates/viewer/re_renderer/shader/conversions/yuv_converter.wgsl +++ b/crates/viewer/re_renderer/shader/conversions/yuv_converter.wgsl @@ -14,11 +14,11 @@ var uniform_buffer: UniformBuffer; var input_texture: texture_2d; // see `enum YuvPixelLayout`. -const YUV_LAYOUT_Y_U_V_444 = 0u; -const YUV_LAYOUT_Y_U_V_422 = 1u; -const YUV_LAYOUT_Y_U_V_420 = 2u; -const YUV_LAYOUT_Y_UV_420 = 100u; -const YUV_LAYOUT_YUYV_422 = 200u; +const YUV_LAYOUT_Y_U_V444 = 0u; +const YUV_LAYOUT_Y_U_V422 = 1u; +const YUV_LAYOUT_Y_U_V420 = 2u; +const YUV_LAYOUT_Y_UV420 = 100u; +const YUV_LAYOUT_YUYV422 = 200u; const YUV_LAYOUT_Y_400 = 300u; // see `enum ColorPrimaries`. @@ -77,7 +77,7 @@ fn sample_yuv(yuv_layout: u32, texture: texture_2d, coords: vec2f) -> vec3f var yuv: vec3f; switch (yuv_layout) { - case YUV_LAYOUT_Y_UV_420: { + case YUV_LAYOUT_Y_UV420: { let uv_offset = u32(floor(texture_dim.y / 1.5)); let uv_row = u32(coords.y / 2); var uv_col = u32(coords.x / 2) * 2u; @@ -87,7 +87,7 @@ fn sample_yuv(yuv_layout: u32, texture: texture_2d, coords: vec2f) -> vec3f yuv[2] = f32(textureLoad(texture, vec2u((u32(uv_col) + 1u), uv_offset + uv_row), 0).r); } - case YUV_LAYOUT_YUYV_422: { + case YUV_LAYOUT_YUYV422: { // texture is 2 * width * height // every 4 bytes is 2 pixels let uv_row = u32(coords.y); diff --git a/crates/viewer/re_renderer/src/resource_managers/yuv_converter.rs b/crates/viewer/re_renderer/src/resource_managers/yuv_converter.rs index 86d516cfc545d..0340dc015cf70 100644 --- a/crates/viewer/re_renderer/src/resource_managers/yuv_converter.rs +++ b/crates/viewer/re_renderer/src/resource_managers/yuv_converter.rs @@ -29,7 +29,7 @@ use super::ColorPrimaries; /// /// Naming schema: /// * every time a plane starts add a `_` -/// * end with `_4xy` for 4:x:y subsampling. +/// * end with `4xy` for 4:x:y subsampling. /// /// This picture gives a great overview of how to interpret the 4:x:y naming scheme for subsampling: /// @@ -63,7 +63,7 @@ pub enum YuvPixelLayout { /// | | /// |_________| /// ``` - Y_U_V_444 = 0, + Y_U_V444 = 0, /// 4:2:2 subsampling with 3 separate planes. /// Also known as `I422` @@ -85,7 +85,7 @@ pub enum YuvPixelLayout { /// height/2 | V | /// |_________| /// ``` - Y_U_V_422 = 1, + Y_U_V422 = 1, /// 4:2:0 subsampling with 3 separate planes. /// Also known as `I420` @@ -105,7 +105,7 @@ pub enum YuvPixelLayout { /// height/4 |___◌̲U____| /// height/4 |___◌̲V____| /// ``` - Y_U_V_420 = 2, + Y_U_V420 = 2, // --------------------------- // Semi-planar formats @@ -128,7 +128,7 @@ pub enum YuvPixelLayout { /// height/2 | U,V,U,… | /// |_________| /// ``` - Y_UV12 = 0, + Y_UV420 = 100, // --------------------------- // Interleaved formats @@ -147,19 +147,40 @@ pub enum YuvPixelLayout { /// height | Y0, U0, Y1, V0… | /// |_________________| /// ``` - YUYV16 = 1, + YUYV422 = 200, + + // --------------------------- + // Monochrome formats + // --------------------------- + // + /// 4:0:0, single plane of chroma only. + /// Also known as I400 + /// + /// Expects single channel texture format. + /// + /// Note that we still convert this to RGBA, for convenience. + /// + /// ```text + /// width + /// __________ + /// | | + /// height | Y | + /// | | + /// |_________| + /// ``` + Y400 = 300, } impl YuvPixelLayout { /// Given the dimensions of the output picture, what are the expected dimensions of the input data texture. pub fn data_texture_width_height(&self, [decoded_width, decoded_height]: [u32; 2]) -> [u32; 2] { match self { - Self::Y_U_V_444 => [decoded_width, decoded_height * 3], - Self::Y_U_V_422 => [decoded_width, decoded_height * 2], - Self::Y_U_V_420 => [decoded_width, decoded_height + decoded_height / 2], - Self::Y_UV_420 => [decoded_width, decoded_height + decoded_height / 2], - Self::YUYV_422 => [decoded_width * 2, decoded_height], - Self::Y_400 => [decoded_width, decoded_height], + Self::Y_U_V444 => [decoded_width, decoded_height * 3], + Self::Y_U_V422 => [decoded_width, decoded_height * 2], + Self::Y_U_V420 => [decoded_width, decoded_height + decoded_height / 2], + Self::Y_UV420 => [decoded_width, decoded_height + decoded_height / 2], + Self::YUYV422 => [decoded_width * 2, decoded_height], + Self::Y400 => [decoded_width, decoded_height], } } @@ -176,17 +197,17 @@ impl YuvPixelLayout { #[allow(clippy::match_same_arms)] match self { // Only thing that makes sense for 8 bit planar data is the R8Uint format. - Self::Y_U_V_444 | Self::Y_U_V_422 | Self::Y_U_V_420 => wgpu::TextureFormat::R8Uint, + Self::Y_U_V444 | Self::Y_U_V422 | Self::Y_U_V420 => wgpu::TextureFormat::R8Uint, // Same for planar - Self::Y_UV_420 => wgpu::TextureFormat::R8Uint, + Self::Y_UV420 => wgpu::TextureFormat::R8Uint, // Interleaved have opportunities here! // TODO(andreas): Why not use [`wgpu::TextureFormat::Rg8Uint`] here? - Self::YUYV_422 => wgpu::TextureFormat::R8Uint, + Self::YUYV422 => wgpu::TextureFormat::R8Uint, // Monochrome have only one channel anyways. - Self::Y_400 => wgpu::TextureFormat::R8Uint, + Self::Y400 => wgpu::TextureFormat::R8Uint, } } diff --git a/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs b/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs index c1e74342a859c..3c77b4ba76fc3 100644 --- a/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs +++ b/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs @@ -254,14 +254,14 @@ pub fn texture_creation_desc_from_color_image<'a>( PixelFormat::NV12 => ( cast_slice_to_cow(image.buffer.as_slice()), SourceImageDataFormat::Yuv { - format: YuvPixelLayout::Y_UV_420, + format: YuvPixelLayout::Y_UV420, primaries: ColorPrimaries::Bt601, }, ), PixelFormat::YUY2 => ( cast_slice_to_cow(image.buffer.as_slice()), SourceImageDataFormat::Yuv { - format: YuvPixelLayout::YUYV_422, + format: YuvPixelLayout::YUYV422, primaries: ColorPrimaries::Bt601, }, ),