Skip to content

Commit

Permalink
Add depthStencil support to WebGPU barebones samples
Browse files Browse the repository at this point in the history
  • Loading branch information
toji committed Oct 15, 2024
1 parent 2666b21 commit 97550eb
Show file tree
Hide file tree
Showing 2 changed files with 135 additions and 55 deletions.
93 changes: 67 additions & 26 deletions webgpu/ar-barebones.html
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,18 @@
// Because we are not using the 'secondary-views' feature we can be sure
// that WebXR will never provide more than two views.
const MAX_VIEWS = 2;
// We have two matrices per view, which is only 32 floats, but we're going
// to allocate 64 of them because uniform buffers bindings must be aligned
// to 256-bytes.
const UNIFORM_FLOATS_PER_VIEW = 64;

// A simple shader that draws a single triangle
const SHADER_SRC = `
struct Camera {
projection: mat4x4f,
view: mat4x4f,
}
@group(0) @binding(0) var<uniform> cameras: array<Camera, ${MAX_VIEWS}>;
@group(0) @binding(0) var<uniform> camera: Camera;
struct VertexOut {
@builtin(position) pos: vec4f,
Expand All @@ -65,7 +69,7 @@
@vertex
fn vertexMain(@builtin(vertex_index) vert_index: u32,
@builtin(instance_index) view_index: u32) -> VertexOut {
@builtin(instance_index) instance: u32) -> VertexOut {
var pos = array<vec4f, 3>(
vec4f(0.0, 0.25, -0.5, 1),
vec4f(-0.25, -0.25, -0.5, 1),
Expand All @@ -78,7 +82,9 @@
vec4f(0, 0, 1, 1)
);
let posOut = cameras[view_index].projection * cameras[view_index].view * pos[vert_index];
// Give each instance a small offset to help with the sense of depth.
let instancePos = pos[vert_index] + vec4f(0, 0, f32(instance) * -0.1, 0);
let posOut = camera.projection * camera.view * instancePos;
return VertexOut(posOut, color[vert_index]);
}
Expand All @@ -98,12 +104,15 @@
let gpuDevice = null;
let gpuContext = null;
let gpuUniformBuffer = null;
let gpuUniformArray = new Float32Array(32 * MAX_VIEWS); // Enough room for two matrices per view.
let gpuUniformArray = new Float32Array(UNIFORM_FLOATS_PER_VIEW * MAX_VIEWS);
let gpuBindGroupLayout = null;
let gpuBindGroup = null;
let gpuBindGroups = [];
let gpuModule = null;
let gpuPipeline = null;
let gpuDepthTexture = null;
let colorFormat = null;
let depthStencilFormat = 'depth24plus';


// WebXR/WebGPU interop globals.
let xrGpuBinding = null;
Expand Down Expand Up @@ -191,9 +200,16 @@
device: gpuDevice,
});

// A depth texture to use when not in an immersive session.
gpuDepthTexture = gpuDevice.createTexture({
size: { width: webgpu_canvas.width, height: webgpu_canvas.height },
format: depthStencilFormat,
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});

// Allocate a uniform buffer with enough space for two uniforms per-view
gpuUniformBuffer = gpuDevice.createBuffer({
size: Float32Array.BYTES_PER_ELEMENT * 32 * MAX_VIEWS,
size: Float32Array.BYTES_PER_ELEMENT * UNIFORM_FLOATS_PER_VIEW * MAX_VIEWS,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});

Expand All @@ -212,7 +228,7 @@

gpuDevice.queue.writeBuffer(gpuUniformBuffer, 0, gpuUniformArray);

// Create a bind group for the uniforms
// Create a bind group layout for the uniforms
gpuBindGroupLayout = gpuDevice.createBindGroupLayout({
entries: [{
binding: 0,
Expand All @@ -221,13 +237,19 @@
}]
});

gpuBindGroup = gpuDevice.createBindGroup({
layout: gpuBindGroupLayout,
entries: [{
binding: 0,
resource: { buffer: gpuUniformBuffer }
}]
});
// Create a bind group for each potential view
for (let i = 0; i < MAX_VIEWS; ++i) {
gpuBindGroups.push(gpuDevice.createBindGroup({
layout: gpuBindGroupLayout,
entries: [{
binding: 0,
resource: {
buffer: gpuUniformBuffer,
offset: Float32Array.BYTES_PER_ELEMENT * UNIFORM_FLOATS_PER_VIEW * i
}
}]
}));
}

gpuModule = gpuDevice.createShaderModule({ code: SHADER_SRC });
}
Expand All @@ -238,6 +260,11 @@
module: gpuModule,
entryPoint: 'vertexMain',
},
depthStencil: {
format: depthStencilFormat,
depthWriteEnabled: true,
depthCompare: 'less-equal',
},
fragment: {
module: gpuModule,
entryPoint: 'fragmentMain',
Expand Down Expand Up @@ -282,7 +309,8 @@
}

projectionLayer = xrGpuBinding.createProjectionLayer({
colorFormat
colorFormat,
depthStencilFormat,
});

// Set the session's layers to display the projection layer. This allows
Expand All @@ -304,7 +332,7 @@
// session.end() or when the UA has ended the session for any reason.
// At this point the session object is no longer usable and should be
// discarded.
function onSessionEnded(event) {
async function onSessionEnded(event) {
xrSession = null;
xrGpuBinding = null;
xrButton.textContent = 'Enter AR';
Expand All @@ -313,6 +341,7 @@
// pipeline again upon switching back.
if (colorFormat != navigator.gpu.getPreferredCanvasFormat()) {
colorFormat = navigator.gpu.getPreferredCanvasFormat();
await initWebGPU();
}

requestAnimationFrame(onFrame);
Expand Down Expand Up @@ -343,7 +372,7 @@
// matrices into the uniform buffer.
for (let viewIndex = 0; viewIndex < pose.views.length; ++viewIndex) {
const view = pose.views[viewIndex];
const offset = 32 * viewIndex;
const offset = UNIFORM_FLOATS_PER_VIEW * viewIndex;
gpuUniformArray.set(view.projectionMatrix, offset);
gpuUniformArray.set(view.transform.inverse.matrix, offset + 16);
}
Expand All @@ -366,7 +395,14 @@
// Clear the canvas to transparent black so the user's environment
// shows through.
clearValue: [0.0, 0.0, 0.0, 0.0],
}]
}],
depthStencilAttachment: {
view: subImage.depthStencilTexture.createView(subImage.getViewDescriptor()),
// Clear the depth texture
depthLoadOp: 'clear',
depthStoreOp: 'store',
depthClearValue: 1.0,
}
});

let vp = subImage.viewport;
Expand Down Expand Up @@ -400,7 +436,14 @@
loadOp: 'clear',
storeOp: 'store',
clearValue: [0.1, 0.1, 0.4, 1.0],
}]
}],
depthStencilAttachment: {
view: gpuDepthTexture.createView(),
// Clear the depth texture
depthLoadOp: 'clear',
depthStoreOp: 'store',
depthClearValue: 1.0,
}
});

drawScene(renderPass);
Expand All @@ -412,14 +455,12 @@
}

function drawScene(renderPass, viewIndex = 0) {
// Render from the viewpoint of view using view.projectionMatrix as
// the projection matrix and view.transform to position the virtual
// camera. If you need a view matrix, use view.transform.inverse.matrix.
// Renders the scene using the uniforms saved for view[viewIndex], which
// are accessible in gpuBindGroups[viewIndex].
renderPass.setPipeline(gpuPipeline);
renderPass.setBindGroup(0, gpuBindGroup);
// Passing viewIndex as the firstInstance as an easy way to tell the
// shader which camera uniforms to use.
renderPass.draw(3, 1, 0, viewIndex);
renderPass.setBindGroup(0, gpuBindGroups[viewIndex]);
// Draw 5 instances of the triangle so that our scene has some depth
renderPass.draw(3, 5);
}

// Start the XR application.
Expand Down
Loading

0 comments on commit 97550eb

Please sign in to comment.