diff --git a/Cargo.toml b/Cargo.toml index 4168bcbb78083f..e97be2d462eaa4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1294,6 +1294,16 @@ description = "Shows how to rumble a gamepad using force feedback" category = "Input" wasm = false +[[example]] +name = "gpu_picking" +path = "examples/input/gpu_picking.rs" + +[package.metadata.example.gpu_picking] +name = "GPU picking" +description = "Mouse picking using the gpu" +category = "Input" +wasm = true + [[example]] name = "keyboard_input" path = "examples/input/keyboard_input.rs" diff --git a/assets/shaders/gpu_picking_material.wgsl b/assets/shaders/gpu_picking_material.wgsl new file mode 100644 index 00000000000000..f62e85ce34a04f --- /dev/null +++ b/assets/shaders/gpu_picking_material.wgsl @@ -0,0 +1,29 @@ +// This shader shows how to enable the gpu picking feature for a material + +// You'll need the mesh binding because that's where the entity index is +#import bevy_pbr::mesh_bindings + +@group(1) @binding(0) +var color: vec4; + +// Gpu picking uses multiple fragment output +struct FragmentOutput { + @location(0) color: vec4, +// You can detect the feature with this flag +#ifdef GPU_PICKING + @location(1) entity: vec2, +#endif +}; + +@fragment +fn fragment( + #import bevy_pbr::mesh_vertex_output +) -> FragmentOutput { + var out: FragmentOutput; + out.color = color; +// make sure to output the entity index for gpu picking to work correctly +#ifdef GPU_PICKING + out.entity = mesh.entity; +#endif + return out; +} diff --git a/crates/bevy_core_pipeline/src/core_3d/main_opaque_pass_3d_node.rs b/crates/bevy_core_pipeline/src/core_3d/main_opaque_pass_3d_node.rs index 34d8c299c94c90..73d1e01088eeb6 100644 --- a/crates/bevy_core_pipeline/src/core_3d/main_opaque_pass_3d_node.rs +++ b/crates/bevy_core_pipeline/src/core_3d/main_opaque_pass_3d_node.rs @@ -7,6 +7,7 @@ use crate::{ use bevy_ecs::{prelude::*, query::QueryItem}; use bevy_render::{ camera::ExtractedCamera, + picking::{EntityTextures, ExtractedGpuPickingCamera}, render_graph::{NodeRunError, RenderGraphContext, ViewNode}, render_phase::RenderPhase, render_resource::{ @@ -34,8 +35,10 @@ impl ViewNode for MainOpaquePass3dNode { Option<&'static DepthPrepass>, Option<&'static NormalPrepass>, Option<&'static MotionVectorPrepass>, + Option<&'static ExtractedGpuPickingCamera>, Option<&'static SkyboxPipelineId>, Option<&'static SkyboxBindGroup>, + Option<&'static EntityTextures>, &'static ViewUniformOffset, ); @@ -53,8 +56,10 @@ impl ViewNode for MainOpaquePass3dNode { depth_prepass, normal_prepass, motion_vector_prepass, + gpu_picking_camera, skybox_pipeline, skybox_bind_group, + entity_index_textures, view_uniform_offset, ): QueryItem, world: &World, @@ -64,21 +69,34 @@ impl ViewNode for MainOpaquePass3dNode { #[cfg(feature = "trace")] let _main_opaque_pass_3d_span = info_span!("main_opaque_pass_3d").entered(); + let mut color_attachments = vec![Some(target.get_color_attachment(Operations { + load: match camera_3d.clear_color { + ClearColorConfig::Default => LoadOp::Clear(world.resource::().0.into()), + ClearColorConfig::Custom(color) => LoadOp::Clear(color.into()), + ClearColorConfig::None => LoadOp::Load, + }, + store: true, + }))]; + + if gpu_picking_camera.is_some() { + if let Some(picking_textures) = entity_index_textures { + color_attachments.push(Some(picking_textures.get_color_attachment(Operations { + load: match camera_3d.clear_color { + ClearColorConfig::None => LoadOp::Load, + // TODO clear this earlier? + _ => LoadOp::Clear(EntityTextures::no_entity_color()), + }, + store: true, + }))); + } + } + // Setup render pass let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor { label: Some("main_opaque_pass_3d"), // NOTE: The opaque pass loads the color // buffer as well as writing to it. - color_attachments: &[Some(target.get_color_attachment(Operations { - load: match camera_3d.clear_color { - ClearColorConfig::Default => { - LoadOp::Clear(world.resource::().0.into()) - } - ClearColorConfig::Custom(color) => LoadOp::Clear(color.into()), - ClearColorConfig::None => LoadOp::Load, - }, - store: true, - }))], + color_attachments: &color_attachments, depth_stencil_attachment: Some(RenderPassDepthStencilAttachment { view: &depth.view, // NOTE: The opaque main pass loads the depth buffer and possibly overwrites it diff --git a/crates/bevy_core_pipeline/src/core_3d/main_transparent_pass_3d_node.rs b/crates/bevy_core_pipeline/src/core_3d/main_transparent_pass_3d_node.rs index ce5a2127f1d44f..b5cf6fe4793737 100644 --- a/crates/bevy_core_pipeline/src/core_3d/main_transparent_pass_3d_node.rs +++ b/crates/bevy_core_pipeline/src/core_3d/main_transparent_pass_3d_node.rs @@ -2,6 +2,7 @@ use crate::core_3d::Transparent3d; use bevy_ecs::{prelude::*, query::QueryItem}; use bevy_render::{ camera::ExtractedCamera, + picking::{EntityTextures, ExtractedGpuPickingCamera}, render_graph::{NodeRunError, RenderGraphContext, ViewNode}, render_phase::RenderPhase, render_resource::{LoadOp, Operations, RenderPassDepthStencilAttachment, RenderPassDescriptor}, @@ -21,12 +22,14 @@ impl ViewNode for MainTransparentPass3dNode { &'static RenderPhase, &'static ViewTarget, &'static ViewDepthTexture, + Option<&'static ExtractedGpuPickingCamera>, + Option<&'static EntityTextures>, ); fn run( &self, graph: &mut RenderGraphContext, render_context: &mut RenderContext, - (camera, transparent_phase, target, depth): QueryItem, + (camera, transparent_phase, target, depth, gpu_picking_camera, entity_index_textures): QueryItem, world: &World, ) -> Result<(), NodeRunError> { let view_entity = graph.view_entity(); @@ -37,13 +40,27 @@ impl ViewNode for MainTransparentPass3dNode { #[cfg(feature = "trace")] let _main_transparent_pass_3d_span = info_span!("main_transparent_pass_3d").entered(); + let mut color_attachments = vec![Some(target.get_color_attachment(Operations { + load: LoadOp::Load, + store: true, + }))]; + + if gpu_picking_camera.is_some() { + if let Some(entity_index_textures) = entity_index_textures { + color_attachments.push(Some(entity_index_textures.get_color_attachment( + Operations { + // The texture is already cleared in the opaque pass + load: LoadOp::Load, + store: true, + }, + ))); + } + } + let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor { label: Some("main_transparent_pass_3d"), // NOTE: The transparent pass loads the color buffer as well as overwriting it where appropriate. - color_attachments: &[Some(target.get_color_attachment(Operations { - load: LoadOp::Load, - store: true, - }))], + color_attachments: &color_attachments, depth_stencil_attachment: Some(RenderPassDepthStencilAttachment { view: &depth.view, // NOTE: For the transparent pass we load the depth buffer. There should be no diff --git a/crates/bevy_core_pipeline/src/core_3d/mod.rs b/crates/bevy_core_pipeline/src/core_3d/mod.rs index 5d9a42989d1c0d..fd5237076e47e5 100644 --- a/crates/bevy_core_pipeline/src/core_3d/mod.rs +++ b/crates/bevy_core_pipeline/src/core_3d/mod.rs @@ -14,6 +14,7 @@ pub mod graph { pub const MAIN_OPAQUE_PASS: &str = "main_opaque_pass"; pub const MAIN_TRANSPARENT_PASS: &str = "main_transparent_pass"; pub const END_MAIN_PASS: &str = "end_main_pass"; + pub const ENTITY_INDEX_BUFFER_COPY: &str = "entity_index_buffer_copy"; pub const BLOOM: &str = "bloom"; pub const TONEMAPPING: &str = "tonemapping"; pub const FXAA: &str = "fxaa"; @@ -35,6 +36,7 @@ use bevy_ecs::prelude::*; use bevy_render::{ camera::{Camera, ExtractedCamera}, extract_component::ExtractComponentPlugin, + picking::{EntityTextures, ExtractedGpuPickingCamera, ENTITY_TEXTURE_FORMAT}, prelude::Msaa, render_graph::{EmptyNode, RenderGraphApp, ViewNodeRunner}, render_phase::{ @@ -91,7 +93,7 @@ impl Plugin for Core3dPlugin { prepare_core_3d_depth_textures .in_set(RenderSet::Prepare) .after(bevy_render::view::prepare_windows), - prepare_prepass_textures + prepare_entity_textures .in_set(RenderSet::Prepare) .after(bevy_render::view::prepare_windows), sort_phase_system::.in_set(RenderSet::PhaseSort), @@ -480,3 +482,63 @@ pub fn prepare_prepass_textures( }); } } + +/// Create the required buffers based on the camera size +pub fn prepare_entity_textures( + mut commands: Commands, + mut texture_cache: ResMut, + msaa: Res, + render_device: Res, + views_3d: Query< + (Entity, &ExtractedCamera, Option<&ExtractedGpuPickingCamera>), + (With>, With>), + >, +) { + for (entity, camera, gpu_picking_camera) in &views_3d { + if gpu_picking_camera.is_none() { + continue; + } + + let Some(physical_target_size) = camera.physical_target_size else { + continue; + }; + + let size = Extent3d { + depth_or_array_layers: 1, + width: physical_target_size.x, + height: physical_target_size.y, + }; + + let descriptor = TextureDescriptor { + label: None, + size, + mip_level_count: 1, + sample_count: 1, + dimension: TextureDimension::D2, + format: ENTITY_TEXTURE_FORMAT, + usage: TextureUsages::RENDER_ATTACHMENT | TextureUsages::COPY_SRC, + view_formats: &[], + }; + + let entity_textures = EntityTextures { + main: texture_cache.get( + &render_device, + TextureDescriptor { + label: Some("main_entity_texture"), + ..descriptor + }, + ), + sampled: (msaa.samples() > 1).then(|| { + texture_cache.get( + &render_device, + TextureDescriptor { + label: Some("main_entity_texture_sampled"), + sample_count: msaa.samples(), + ..descriptor + }, + ) + }), + }; + commands.entity(entity).insert(entity_textures); + } +} diff --git a/crates/bevy_core_pipeline/src/entity_index_buffer_copy/mod.rs b/crates/bevy_core_pipeline/src/entity_index_buffer_copy/mod.rs new file mode 100644 index 00000000000000..ff985a93d26aa4 --- /dev/null +++ b/crates/bevy_core_pipeline/src/entity_index_buffer_copy/mod.rs @@ -0,0 +1,51 @@ +use bevy_app::Plugin; +use bevy_ecs::{query::QueryItem, world::World}; +use bevy_render::{ + picking::{EntityTextures, ExtractedGpuPickingCamera}, + render_graph::{RenderGraphApp, RenderGraphContext, ViewNode, ViewNodeRunner}, + renderer::RenderContext, + RenderApp, +}; + +use crate::core_3d::CORE_3D; + +#[derive(Default)] +pub struct EntityIndexBufferCopyNode; +impl ViewNode for EntityIndexBufferCopyNode { + type ViewQuery = (&'static EntityTextures, &'static ExtractedGpuPickingCamera); + + fn run( + &self, + _graph: &mut RenderGraphContext, + render_context: &mut RenderContext, + (entity_index_textures, gpu_picking_camera): QueryItem, + _world: &World, + ) -> Result<(), bevy_render::render_graph::NodeRunError> { + let Some(buffers) = gpu_picking_camera.buffers.as_ref() else { + return Ok(()); + }; + + buffers.copy_texture_to_buffer( + render_context.command_encoder(), + &entity_index_textures.main.texture, + ); + + Ok(()) + } +} + +pub struct EntityIndexBufferCopyPlugin; +impl Plugin for EntityIndexBufferCopyPlugin { + fn build(&self, app: &mut bevy_app::App) { + let Ok(render_app) = app.get_sub_app_mut(RenderApp) else { return; }; + + // 3D + use crate::core_3d::graph::node::*; + render_app + .add_render_graph_node::>( + CORE_3D, + ENTITY_INDEX_BUFFER_COPY, + ) + .add_render_graph_edge(CORE_3D, UPSCALING, ENTITY_INDEX_BUFFER_COPY); + } +} diff --git a/crates/bevy_core_pipeline/src/lib.rs b/crates/bevy_core_pipeline/src/lib.rs index f169cdd89e08dd..7e2472bc851b93 100644 --- a/crates/bevy_core_pipeline/src/lib.rs +++ b/crates/bevy_core_pipeline/src/lib.rs @@ -6,6 +6,7 @@ pub mod clear_color; pub mod contrast_adaptive_sharpening; pub mod core_2d; pub mod core_3d; +pub mod entity_index_buffer_copy; pub mod fullscreen_vertex_shader; pub mod fxaa; pub mod msaa_writeback; @@ -40,6 +41,7 @@ use crate::{ contrast_adaptive_sharpening::CASPlugin, core_2d::Core2dPlugin, core_3d::Core3dPlugin, + entity_index_buffer_copy::EntityIndexBufferCopyPlugin, fullscreen_vertex_shader::FULLSCREEN_SHADER_HANDLE, fxaa::FxaaPlugin, msaa_writeback::MsaaWritebackPlugin, @@ -77,6 +79,7 @@ impl Plugin for CorePipelinePlugin { .add_plugin(UpscalingPlugin) .add_plugin(BloomPlugin) .add_plugin(FxaaPlugin) - .add_plugin(CASPlugin); + .add_plugin(CASPlugin) + .add_plugin(EntityIndexBufferCopyPlugin); } } diff --git a/crates/bevy_pbr/src/material.rs b/crates/bevy_pbr/src/material.rs index 0016634533eeb8..61015c33858697 100644 --- a/crates/bevy_pbr/src/material.rs +++ b/crates/bevy_pbr/src/material.rs @@ -23,6 +23,7 @@ use bevy_reflect::{TypePath, TypeUuid}; use bevy_render::{ extract_component::ExtractComponentPlugin, mesh::{Mesh, MeshVertexBufferLayout}, + picking::{ExtractedGpuPickingCamera, GpuPickingMesh}, prelude::Image, render_asset::{PrepareAssetSet, RenderAssets}, render_phase::{ @@ -379,7 +380,12 @@ pub fn queue_material_meshes( msaa: Res, render_meshes: Res>, render_materials: Res>, - material_meshes: Query<(&Handle, &Handle, &MeshUniform)>, + material_meshes: Query<( + &Handle, + &Handle, + &MeshUniform, + Option<&GpuPickingMesh>, + )>, images: Res>, mut views: Query<( &ExtractedView, @@ -388,6 +394,7 @@ pub fn queue_material_meshes( Option<&DebandDither>, Option<&EnvironmentMapLight>, Option<&NormalPrepass>, + Option<&ExtractedGpuPickingCamera>, Option<&TemporalAntiAliasSettings>, &mut RenderPhase, &mut RenderPhase, @@ -403,6 +410,7 @@ pub fn queue_material_meshes( dither, environment_map, normal_prepass, + gpu_picking_camera, taa_settings, mut opaque_phase, mut alpha_mask_phase, @@ -457,7 +465,7 @@ pub fn queue_material_meshes( let rangefinder = view.rangefinder3d(); for visible_entity in &visible_entities.entities { - if let Ok((material_handle, mesh_handle, mesh_uniform)) = + if let Ok((material_handle, mesh_handle, mesh_uniform, gpu_picking_mesh)) = material_meshes.get(*visible_entity) { if let (Some(mesh), Some(material)) = ( @@ -485,6 +493,14 @@ pub fn queue_material_meshes( _ => (), } + if gpu_picking_camera.is_some() { + // This is to indicate that the mesh pipeline needs to have the target + mesh_key |= MeshPipelineKey::ENTITY_INDEX_TEXTURE_TARGET; + if gpu_picking_mesh.is_some() { + mesh_key |= MeshPipelineKey::GPU_PICKING; + } + } + let pipeline_id = pipelines.specialize( &pipeline_cache, &material_pipeline, diff --git a/crates/bevy_pbr/src/render/mesh.rs b/crates/bevy_pbr/src/render/mesh.rs index d4a80c55acd323..8d0d329cdb4349 100644 --- a/crates/bevy_pbr/src/render/mesh.rs +++ b/crates/bevy_pbr/src/render/mesh.rs @@ -18,7 +18,7 @@ use bevy_ecs::{ query::ROQueryItem, system::{lifetimeless::*, SystemParamItem, SystemState}, }; -use bevy_math::{Mat3A, Mat4, Vec2}; +use bevy_math::{Mat3A, Mat4, UVec2, Vec2}; use bevy_reflect::TypeUuid; use bevy_render::{ extract_component::{ComponentUniforms, DynamicUniformIndex, UniformComponentPlugin}, @@ -27,6 +27,7 @@ use bevy_render::{ skinning::{SkinnedMesh, SkinnedMeshInverseBindposes}, GpuBufferInfo, Mesh, MeshVertexBufferLayout, }, + picking::{self, ENTITY_TEXTURE_FORMAT}, prelude::Msaa, render_asset::RenderAssets, render_phase::{PhaseItem, RenderCommand, RenderCommandResult, TrackedRenderPass}, @@ -131,6 +132,7 @@ pub struct MeshUniform { pub transform: Mat4, pub previous_transform: Mat4, pub inverse_transpose_model: Mat4, + pub entity: UVec2, pub flags: u32, } @@ -184,6 +186,7 @@ pub fn extract_meshes( flags: flags.bits(), transform, previous_transform, + entity: picking::entity_as_uvec2(entity), inverse_transpose_model: transform.inverse().transpose(), }; if not_caster.is_some() { @@ -588,6 +591,10 @@ bitflags::bitflags! { const ENVIRONMENT_MAP = (1 << 7); const DEPTH_CLAMP_ORTHO = (1 << 8); const TAA = (1 << 9); + /// Indicates if the mesh should output it's entity index + const GPU_PICKING = (1 << 10); + /// Indicates if the entity index texture should be added as a target + const ENTITY_INDEX_TEXTURE_TARGET = (1 << 11); const BLEND_RESERVED_BITS = Self::BLEND_MASK_BITS << Self::BLEND_SHIFT_BITS; // ← Bitmask reserving bits for the blend state const BLEND_OPAQUE = (0 << Self::BLEND_SHIFT_BITS); // ← Values are just sequential within the mask, and can range from 0 to 3 const BLEND_PREMULTIPLIED_ALPHA = (1 << Self::BLEND_SHIFT_BITS); // @@ -816,6 +823,25 @@ impl SpecializedMeshPipeline for MeshPipeline { TextureFormat::bevy_default() }; + let mut targets = vec![Some(ColorTargetState { + format, + blend, + write_mask: ColorWrites::ALL, + })]; + + if key.contains(MeshPipelineKey::GPU_PICKING) { + shader_defs.push("GPU_PICKING".into()); + } + + if key.contains(MeshPipelineKey::ENTITY_INDEX_TEXTURE_TARGET) { + // we need to add the target even if the mesh isn't pickable + targets.push(Some(ColorTargetState { + format: ENTITY_TEXTURE_FORMAT, + blend: None, + write_mask: ColorWrites::ALL, + })); + } + Ok(RenderPipelineDescriptor { vertex: VertexState { shader: MESH_SHADER_HANDLE.typed::(), @@ -827,11 +853,7 @@ impl SpecializedMeshPipeline for MeshPipeline { shader: MESH_SHADER_HANDLE.typed::(), shader_defs, entry_point: "fragment".into(), - targets: vec![Some(ColorTargetState { - format, - blend, - write_mask: ColorWrites::ALL, - })], + targets, }), layout: bind_group_layout, push_constant_ranges: Vec::new(), diff --git a/crates/bevy_pbr/src/render/mesh_types.wgsl b/crates/bevy_pbr/src/render/mesh_types.wgsl index d2e3276fe696a6..dd1d5aa1a5f2b0 100644 --- a/crates/bevy_pbr/src/render/mesh_types.wgsl +++ b/crates/bevy_pbr/src/render/mesh_types.wgsl @@ -4,6 +4,7 @@ struct Mesh { model: mat4x4, previous_model: mat4x4, inverse_transpose_model: mat4x4, + entity: vec2, // 'flags' is a bit field indicating various options. u32 is 32 bits so we have up to 32 options. flags: u32, }; diff --git a/crates/bevy_pbr/src/render/pbr.wgsl b/crates/bevy_pbr/src/render/pbr.wgsl index 7043c4e92abe7c..3001c1d07fb764 100644 --- a/crates/bevy_pbr/src/render/pbr.wgsl +++ b/crates/bevy_pbr/src/render/pbr.wgsl @@ -19,8 +19,15 @@ struct FragmentInput { #import bevy_pbr::mesh_vertex_output }; +struct FragmentOutput { + @location(0) color: vec4, +#ifdef GPU_PICKING + @location(1) entity: vec2, +#endif +}; + @fragment -fn fragment(in: FragmentInput) -> @location(0) vec4 { +fn fragment(in: FragmentInput) -> FragmentOutput { let is_orthographic = view.projection[3].w == 1.0; let V = calculate_view(in.world_position, is_orthographic); #ifdef VERTEX_UVS @@ -151,5 +158,11 @@ fn fragment(in: FragmentInput) -> @location(0) vec4 { #ifdef PREMULTIPLY_ALPHA output_color = premultiply_alpha(material.flags, output_color); #endif - return output_color; + + var out: FragmentOutput; + out.color = output_color; +#ifdef GPU_PICKING + out.entity = mesh.entity; +#endif + return out; } diff --git a/crates/bevy_render/src/lib.rs b/crates/bevy_render/src/lib.rs index 3df8d2295436b2..5f235460bf206a 100644 --- a/crates/bevy_render/src/lib.rs +++ b/crates/bevy_render/src/lib.rs @@ -12,6 +12,7 @@ mod extract_param; pub mod extract_resource; pub mod globals; pub mod mesh; +pub mod picking; pub mod pipelined_rendering; pub mod primitives; pub mod render_asset; diff --git a/crates/bevy_render/src/picking.rs b/crates/bevy_render/src/picking.rs new file mode 100644 index 00000000000000..5296c1aa3ee167 --- /dev/null +++ b/crates/bevy_render/src/picking.rs @@ -0,0 +1,335 @@ +use crate::{ + camera::ExtractedCamera, + extract_component::ExtractComponentPlugin, + render_resource::{Buffer, Texture}, + renderer::RenderDevice, + texture::{CachedTexture, TextureFormatPixelInfo}, + Render, RenderApp, RenderSet, +}; +use async_channel::{Receiver, Sender}; +use bevy_app::{Plugin, Update}; +use bevy_ecs::prelude::*; + +use bevy_math::UVec2; +use bevy_render_macros::ExtractComponent; +use bevy_tasks::AsyncComputeTaskPool; +use bevy_utils::default; +use wgpu::{ + BufferDescriptor, BufferUsages, Color, CommandEncoder, Extent3d, ImageDataLayout, MapMode, + Operations, RenderPassColorAttachment, TextureFormat, +}; + +pub const ENTITY_TEXTURE_FORMAT: TextureFormat = TextureFormat::Rg32Uint; + +/// This plugin enables the gpu picking feature of bevy. +/// +/// Gpu picking let's you know which entity is currently under the mouse. +/// +/// # How this works: +/// +/// - For each entity being rendered, it will output it's entity id to a texture. +/// - Once everything is rendered it will copy that texture to the cpu and send it to the main world +/// - From the main world you can give it a position like the current mouse position and +/// know exactly which entity was rendered at that specific screen location. +/// - This works at the [`Camera`] level, so it will work with multiple windows or split screen +/// +/// # Api Overview: +/// +/// You need to add the [`GpuPickingCamera`] to any `Camera` that will be used for picking. +/// Then add the [`GpuPickingMesh`] comnponent to any `Mesh` that will need to be picked. +/// +/// Once those components are added, you can query for [`GpuPickingCamera`] +/// and use `GpuPickingCamera::get_entity(position)` to know which entity is at the given position on screen +pub struct GpuPickingPlugin; +impl Plugin for GpuPickingPlugin { + fn build(&self, app: &mut bevy_app::App) { + app.add_plugin(ExtractComponentPlugin::::default()) + .add_plugin(ExtractComponentPlugin::::default()) + .add_systems(Update, receive_buffer); + + let Ok(render_app) = app.get_sub_app_mut(RenderApp) else { return; }; + + render_app.add_systems( + Render, + ( + prepare_gpu_picking_buffers.in_set(RenderSet::Prepare), + map_and_send_buffer_async.in_set(RenderSet::RenderFlush), + ), + ); + } +} + +/// Maps the entity buffer and sends it to the main world asynchronously +fn map_and_send_buffer_async(query: Query<&ExtractedGpuPickingCamera, With>) { + for gpu_picking_camera in &query { + let Some(buffers) = gpu_picking_camera.buffers.as_ref() else { + return; + }; + let buffers = buffers.clone(); + let sender = gpu_picking_camera.sender.clone(); + + // Mapping the buffer is an asynchronous process. + // This means we need to wait until the buffer is mapped before sending it to the main world + let task = async move { + let (tx, rx) = async_channel::bounded(1); + + // Map entity buffer + let buffer_slice = buffers.entity_buffer.slice(..); + buffer_slice.map_async(MapMode::Read, move |result| match result { + Ok(_) => tx.try_send(()).unwrap(), + Err(err) => bevy_log::error!("Failed to map entity buffer: {err}"), + }); + + // This waits until the Buffer is mapped + rx.recv().await.unwrap(); + + // Send the buffer to the main world + let buffer_view = buffer_slice.get_mapped_range(); + // Copy the data to the CPU + let entity_data = Vec::from(&*buffer_view); + + // Because the channel is bounded to 1 entry, the channel will sometimes be full. + // This isn't ideal but not blocking makes it faster which is preferred. + // + // The other possible error is for the channel to be closed and in that case we can't do anything + let _ = sender.try_send(GpuPickingData { + padded_bytes_per_row: buffers.buffer_dimensions.padded_bytes_per_row, + entity_data, + }); + }; + AsyncComputeTaskPool::get().spawn(task).detach(); + } +} + +/// Marker component to indicate that a mesh should be available for gpu picking +#[derive(Component, ExtractComponent, Clone)] +pub struct GpuPickingMesh; + +/// Data needed in the render world to manage the entity buffer +#[derive(Component)] +pub struct ExtractedGpuPickingCamera { + pub buffers: Option, + sender: Sender, +} + +/// Data sent between the render world and main world +#[derive(Default)] +struct GpuPickingData { + /// Padding required to compute the entity with the exact position in the buffer + padded_bytes_per_row: usize, + /// Buffer representing the entity texture + entity_data: Vec, +} + +/// This component is used to indicate if a camera should support gpu picking. +/// Any mesh with the [`GpuPickingMesh`] component that is visible from this camera +/// will be pickable. +#[derive(Component)] +pub struct GpuPickingCamera { + channel: (Sender, Receiver), + data: GpuPickingData, +} + +impl Default for GpuPickingCamera { + fn default() -> Self { + Self::new() + } +} + +impl GpuPickingCamera { + pub fn new() -> Self { + Self { + channel: async_channel::bounded(1), + data: GpuPickingData::default(), + } + } + + /// Get the entity at the given position. + /// If there is no entity, returns `None`. + pub fn get_entity(&self, pos: UVec2) -> Option { + // We know the position, but in order to find the true position of the bytes + // we're interested in, we have to know how wide a single line in the GPU written buffer is. + // Due to alignment requirements this may be wider than the physical camera size because + // of padding. + let pixel_size = ENTITY_TEXTURE_FORMAT.pixel_size(); + + let start = + (pos.y as usize * self.data.padded_bytes_per_row) + (pos.x as usize * pixel_size); + let end = start + pixel_size; + if end > self.data.entity_data.len() { + return None; + } + + let index = u32::from_ne_bytes(self.data.entity_data[start..end][0..4].try_into().ok()?); + let generation = + u32::from_ne_bytes(self.data.entity_data[start..end][4..8].try_into().ok()?); + + // WARN this assumes that the data stored in the buffer was created from `entity_as_uvec2` + let bits = (generation as u64) << 32 | index as u64; + + let entity = Entity::from_bits(bits); + + if entity != Entity::PLACEHOLDER { + Some(entity) + } else { + None + } + } +} + +impl crate::extract_component::ExtractComponent for GpuPickingCamera { + type Query = &'static Self; + type Filter = (); + type Out = ExtractedGpuPickingCamera; + fn extract_component(item: bevy_ecs::query::QueryItem<'_, Self::Query>) -> Option { + let (sender, _) = item.channel.clone(); + Some(ExtractedGpuPickingCamera { + buffers: None, + sender, + }) + } +} + +/// Contains the buffer and it's dimension required for gpu picking +#[derive(Clone)] +pub struct GpuPickingCameraBuffers { + pub entity_buffer: Buffer, + buffer_dimensions: BufferDimensions, +} + +impl GpuPickingCameraBuffers { + /// Copies the given texture to the entity buffer + pub fn copy_texture_to_buffer(&self, encoder: &mut CommandEncoder, texture: &Texture) { + // This can't be in the Node because it needs access to wgpu but + // bevy_core_pipeline doesn't depend on wgpu + encoder.copy_texture_to_buffer( + texture.as_image_copy(), + wgpu::ImageCopyBuffer { + buffer: &self.entity_buffer, + layout: ImageDataLayout { + offset: 0, + bytes_per_row: Some(self.buffer_dimensions.padded_bytes_per_row as u32), + rows_per_image: None, + }, + }, + Extent3d { + width: self.buffer_dimensions.width as u32, + height: self.buffer_dimensions.height as u32, + ..default() + }, + ); + } +} + +/// Receives the entity buffer from the render world through an async channel +fn receive_buffer(mut q: Query<&mut GpuPickingCamera>) { + for mut cam in &mut q { + let (_, receiver) = cam.channel.clone(); + let Ok(data) = receiver.try_recv() else { continue; }; + cam.data = data; + } +} + +/// Creates a [`UVec2`] from an [`Entity`] +/// This uses `Entity::to_bits()` and splits the `u64` in 2 `u32` +/// The position of the index/generation is not guaranteed +/// +/// # Note +/// +/// This is intended to be used for gpu picking not for anything else. +pub fn entity_as_uvec2(entity: Entity) -> UVec2 { + let bits = entity.to_bits(); + UVec2::new(bits as u32, (bits >> 32) as u32) +} + +/// The textures used to draw the entity for each rendered mesh +#[derive(Component, Clone)] +pub struct EntityTextures { + pub main: CachedTexture, + pub sampled: Option, +} + +impl EntityTextures { + /// This is the color that will represent "no entity" in the entity buffer + pub fn no_entity_color() -> wgpu::Color { + let v = entity_as_uvec2(Entity::PLACEHOLDER); + // The texture only uses the red and green channel + Color { + r: v.x as f64, + g: v.y as f64, + b: 0.0, + a: 0.0, + } + } + + /// Retrieve this target's color attachment. This will use [`Self::sampled`] and resolve to [`Self::main`] if + /// the target has sampling enabled. Otherwise it will use [`Self::main`] directly. + pub fn get_color_attachment(&self, ops: Operations) -> RenderPassColorAttachment { + match &self.sampled { + Some(sampled_texture) => RenderPassColorAttachment { + view: &sampled_texture.default_view, + resolve_target: Some(&self.main.default_view), + ops, + }, + None => RenderPassColorAttachment { + view: &self.main.default_view, + resolve_target: None, + ops, + }, + } + } +} + +/// This creates the required buffers for each camera +fn prepare_gpu_picking_buffers( + render_device: Res, + mut cameras: Query< + (&ExtractedCamera, &mut ExtractedGpuPickingCamera), + Changed, + >, +) { + for (camera, mut gpu_picking_camera) in &mut cameras { + let Some(size) = camera.physical_target_size else { continue; }; + + let buffer_dimensions = + BufferDimensions::new(size.x as usize, size.y as usize, ENTITY_TEXTURE_FORMAT); + // TODO We currently create one buffer per frame. This isn't ideal. + // We should have a way to only create a buffer when required. Unfortunately, the async nature of + // the buffer map api makes it hard to know how many buffer we'll need since we need at least one per frame. + let entity_buffer = render_device.create_buffer(&BufferDescriptor { + label: Some("Entity Buffer"), + size: (buffer_dimensions.padded_bytes_per_row * buffer_dimensions.height) as u64, + usage: BufferUsages::COPY_DST | BufferUsages::MAP_READ, + mapped_at_creation: false, + }); + + gpu_picking_camera.buffers = Some(GpuPickingCameraBuffers { + entity_buffer: entity_buffer.clone(), + buffer_dimensions, + }); + } +} + +/// Used to represent the size of a [`Buffer`] and the padding required for each row. +/// We need to know the padding because the rows need to be 256 bit aligned. +#[derive(Clone, Copy)] +pub struct BufferDimensions { + width: usize, + height: usize, + padded_bytes_per_row: usize, +} + +impl BufferDimensions { + fn new(width: usize, height: usize, texture_format: TextureFormat) -> Self { + let bytes_per_pixel = texture_format.pixel_size(); + let unpadded_bytes_per_row = width * bytes_per_pixel; + let align = wgpu::COPY_BYTES_PER_ROW_ALIGNMENT as usize; + let padded_bytes_per_row_padding = (align - unpadded_bytes_per_row % align) % align; + let padded_bytes_per_row = unpadded_bytes_per_row + padded_bytes_per_row_padding; + Self { + width, + height, + padded_bytes_per_row, + } + } +} diff --git a/examples/README.md b/examples/README.md index 4751f537cfabf7..8c4eb706cc9029 100644 --- a/examples/README.md +++ b/examples/README.md @@ -240,6 +240,7 @@ Example | Description Example | Description --- | --- [Char Input Events](../examples/input/char_input_events.rs) | Prints out all chars as they are inputted +[GPU picking](../examples/input/gpu_picking.rs) | Mouse picking using the gpu [Gamepad Input](../examples/input/gamepad_input.rs) | Shows handling of gamepad input, connections, and disconnections [Gamepad Input Events](../examples/input/gamepad_input_events.rs) | Iterates and prints gamepad input and connection events [Gamepad Rumble](../examples/input/gamepad_rumble.rs) | Shows how to rumble a gamepad using force feedback diff --git a/examples/input/gpu_picking.rs b/examples/input/gpu_picking.rs new file mode 100644 index 00000000000000..657358b492ad84 --- /dev/null +++ b/examples/input/gpu_picking.rs @@ -0,0 +1,210 @@ +//! This example shows how to use the gpu picking api. +//! +//! Gpu picking is a way to generate a texture of all the rendered entity and +//! use this texture to determine exactly which entity is under the mouse. + +use bevy::prelude::*; +use bevy_internal::{ + reflect::{TypePath, TypeUuid}, + render::{ + picking::{GpuPickingCamera, GpuPickingMesh, GpuPickingPlugin}, + render_resource::{AsBindGroup, ShaderRef}, + }, + window::PresentMode, +}; + +fn main() { + App::new() + .add_plugins(DefaultPlugins.set(WindowPlugin { + primary_window: Some(Window { + present_mode: PresentMode::AutoNoVsync, + ..default() + }), + ..default() + })) + .add_plugin(MaterialPlugin::::default()) + // Add the plugin + .add_plugin(GpuPickingPlugin) + .add_systems(Startup, setup) + .add_systems(Update, (mouse_picking, move_cube)) + .run(); +} + +fn setup( + mut commands: Commands, + mut meshes: ResMut>, + mut materials: ResMut>, + mut custom_materials: ResMut>, + asset_server: Res, +) { + // opaque cube + commands.spawn(( + PbrBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()), + transform: Transform::from_xyz(0.0, 0.5, 0.0), + ..default() + }, + // Add this component to any mesh that you want to be able to pick + GpuPickingMesh, + )); + + // alpha mask cube + commands.spawn(( + PbrBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + material: materials.add(StandardMaterial { + alpha_mode: AlphaMode::Mask(1.0), + base_color_texture: Some(asset_server.load("branding/icon.png")), + ..default() + }), + transform: Transform::from_xyz(1.0, 0.5, 0.0), + ..default() + }, + GpuPickingMesh, + )); + + // transparent cube + commands.spawn(( + PbrBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + material: materials.add(Color::rgba(0.8, 0.7, 0.6, 0.5).into()), + transform: Transform::from_xyz(-1.0, 0.5, 0.0), + ..default() + }, + GpuPickingMesh, + )); + + // cube with custom material + commands.spawn(( + MaterialMeshBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + transform: Transform::from_xyz(2.0, 0.5, 0.0), + material: custom_materials.add(GpuPickingMaterial { + color: Color::GREEN, + }), + ..default() + }, + GpuPickingMesh, + )); + + // This cube will move from left to right. It shows that picking works correctly when things are moving. + commands.spawn(( + PbrBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()), + transform: Transform::from_xyz(0.0, 0.5, 1.0), + ..default() + }, + GpuPickingMesh, + MoveCube, + )); + + // plane + commands.spawn(PbrBundle { + mesh: meshes.add(shape::Plane::from_size(5.0).into()), + material: materials.add(Color::rgb(0.3, 0.5, 0.3).into()), + ..default() + }); + // light + commands.spawn(PointLightBundle { + point_light: PointLight { + intensity: 1500.0, + shadows_enabled: true, + ..default() + }, + transform: Transform::from_xyz(4.0, 8.0, 4.0), + ..default() + }); + // camera + commands.spawn(( + Camera3dBundle { + transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), + ..default() + }, + GpuPickingCamera::default(), + )); +} + +fn mouse_picking( + mut cursor_moved: EventReader, + gpu_picking_cameras: Query<&GpuPickingCamera>, + material_handle: Query<( + Option<&Handle>, + Option<&Handle>, + )>, + mut materials: ResMut>, + mut custom_materials: ResMut>, + mut hovered: Local>, +) { + // Sets the color of the given entity + let mut set_color = |entity, color: Color| { + let (std_handle, custom_handle) = material_handle.get(entity).expect("Entity should exist"); + if let Some(material) = std_handle.and_then(|h| materials.get_mut(h)) { + let a = material.base_color.a(); + material.base_color = color.with_a(a); + }; + if let Some(material) = custom_handle.and_then(|h| custom_materials.get_mut(h)) { + let a = material.color.a(); + material.color = color.with_a(a); + }; + }; + + let Some(moved_event) = cursor_moved.iter().last() else { return; }; + let mouse_position = moved_event.position.as_uvec2(); + + for gpu_picking_camera in &gpu_picking_cameras { + // This will read the entity texture and get the entity that is at the given position + if let Some(entity) = gpu_picking_camera.get_entity(mouse_position) { + if let Some(hovered) = *hovered { + if entity != hovered { + set_color(hovered, Color::BLUE); + } + } + set_color(entity, Color::RED); + *hovered = Some(entity); + } else { + if let Some(hovered) = *hovered { + set_color(hovered, Color::BLUE); + } + *hovered = None; + } + } +} + +// You can also use a custom material with it, you just need to make sure it correctly outputs the entity id +// See assets/shaders/gpu_picking_material.wgsl for more information +#[derive(AsBindGroup, TypeUuid, TypePath, Debug, Clone)] +#[uuid = "fb9ea5e0-316d-4992-852b-aa1faa2a5a0d"] +pub struct GpuPickingMaterial { + #[uniform(0)] + color: Color, +} + +impl Material for GpuPickingMaterial { + fn fragment_shader() -> ShaderRef { + "shaders/gpu_picking_material.wgsl".into() + } +} + +#[derive(Component)] +struct MoveCube; + +// Moves a mesh from left to right +// Used to show that picking works even if things are moving +fn move_cube( + mut q: Query<&mut Transform, With>, + time: Res