diff --git a/sample/cubemap/main.ts b/sample/cubemap/main.ts index 615cd99a..aa206c5e 100644 --- a/sample/cubemap/main.ts +++ b/sample/cubemap/main.ts @@ -1,15 +1,6 @@ import { mat4 } from 'wgpu-matrix'; -import { - cubeVertexArray, - cubeVertexSize, - cubeUVOffset, - cubePositionOffset, - cubeVertexCount, -} from '../../meshes/cube'; - -import basicVertWGSL from '../../shaders/basic.vert.wgsl'; -import sampleCubemapWGSL from './sampleCubemap.frag.wgsl'; +import sampleCubemapWGSL from './sampleCubemap.wgsl'; import { quitIfWebGPUNotAvailableOrMissingFeatures } from '../util'; const canvas = document.querySelector('canvas') as HTMLCanvasElement; @@ -31,73 +22,18 @@ context.configure({ format: presentationFormat, }); -// Create a vertex buffer from the cube data. -const verticesBuffer = device.createBuffer({ - size: cubeVertexArray.byteLength, - usage: GPUBufferUsage.VERTEX, - mappedAtCreation: true, -}); -new Float32Array(verticesBuffer.getMappedRange()).set(cubeVertexArray); -verticesBuffer.unmap(); - +const module = device.createShaderModule({ code: sampleCubemapWGSL }); const pipeline = device.createRenderPipeline({ layout: 'auto', - vertex: { - module: device.createShaderModule({ - code: basicVertWGSL, - }), - buffers: [ - { - arrayStride: cubeVertexSize, - attributes: [ - { - // position - shaderLocation: 0, - offset: cubePositionOffset, - format: 'float32x4', - }, - { - // uv - shaderLocation: 1, - offset: cubeUVOffset, - format: 'float32x2', - }, - ], - }, - ], - }, + vertex: { module }, fragment: { - module: device.createShaderModule({ - code: sampleCubemapWGSL, - }), + module, targets: [ { format: presentationFormat, }, ], }, - primitive: { - topology: 'triangle-list', - - // Since we are seeing from inside of the cube - // and we are using the regular cube geomtry data with outward-facing normals, - // the cullMode should be 'front' or 'none'. - cullMode: 'none', - }, - - // Enable depth testing so that the fragment closest to the camera - // is rendered in front. - depthStencil: { - depthWriteEnabled: true, - depthCompare: 'less', - format: 'depth24plus', - }, -}); - -const depthTexture = device.createTexture({ - size: [canvas.width, canvas.height], - format: 'depth24plus', - usage: GPUTextureUsage.RENDER_ATTACHMENT, }); // Fetch the 6 separate images for negative/positive x, y, z axis of a cubemap @@ -181,25 +117,18 @@ const renderPassDescriptor: GPURenderPassDescriptor = { storeOp: 'store', }, ], - depthStencilAttachment: { - view: depthTexture.createView(), - - depthClearValue: 1.0, - depthLoadOp: 'clear', - depthStoreOp: 'store', - }, }; const aspect = canvas.width / canvas.height; const projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 3000); -const modelMatrix = mat4.scaling([1000, 1000, 1000]); -const modelViewProjectionMatrix = mat4.create(); +const modelMatrix = mat4.identity(); +const modelViewProjectionInverseMatrix = mat4.create(); const viewMatrix = mat4.identity(); const tmpMat4 = mat4.create(); -// Comppute camera movement: +// Compute camera movement: // It rotates around Y axis with a slight pitch movement. function updateTransformationMatrix() { const now = Date.now() / 800; @@ -207,11 +136,15 @@ function updateTransformationMatrix() { mat4.rotate(viewMatrix, [1, 0, 0], (Math.PI / 10) * Math.sin(now), tmpMat4); mat4.rotate(tmpMat4, [0, 1, 0], now * 0.2, tmpMat4); - mat4.multiply(tmpMat4, modelMatrix, modelViewProjectionMatrix); + mat4.multiply(tmpMat4, modelMatrix, modelViewProjectionInverseMatrix); mat4.multiply( projectionMatrix, - modelViewProjectionMatrix, - modelViewProjectionMatrix + modelViewProjectionInverseMatrix, + modelViewProjectionInverseMatrix + ); + mat4.inverse( + modelViewProjectionInverseMatrix, + modelViewProjectionInverseMatrix ); } @@ -220,9 +153,9 @@ function frame() { device.queue.writeBuffer( uniformBuffer, 0, - modelViewProjectionMatrix.buffer, - modelViewProjectionMatrix.byteOffset, - modelViewProjectionMatrix.byteLength + modelViewProjectionInverseMatrix.buffer, + modelViewProjectionInverseMatrix.byteOffset, + modelViewProjectionInverseMatrix.byteLength ); renderPassDescriptor.colorAttachments[0].view = context @@ -232,9 +165,8 @@ function frame() { const commandEncoder = device.createCommandEncoder(); const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); passEncoder.setPipeline(pipeline); - passEncoder.setVertexBuffer(0, verticesBuffer); passEncoder.setBindGroup(0, uniformBindGroup); - passEncoder.draw(cubeVertexCount); + passEncoder.draw(3); passEncoder.end(); device.queue.submit([commandEncoder.finish()]); diff --git a/sample/cubemap/meta.ts b/sample/cubemap/meta.ts index 028b3ec9..6dcb223b 100644 --- a/sample/cubemap/meta.ts +++ b/sample/cubemap/meta.ts @@ -1,12 +1,12 @@ export default { name: 'Cubemap', - description: - 'This example shows how to render and sample from a cubemap texture. Cubemap image available under a Creative Commons Attribution 3.0 Unported License at ', + description: ` +This example shows how to render and sample from a cubemap texture. +See [this article](https://webgpufundamentals.org/webgpu/lessons/webgpu-skybox.html) +for more details. +Cubemap image available under a Creative Commons Attribution 3.0 Unported License at + +`, filename: __DIRNAME__, - sources: [ - { path: 'main.ts' }, - { path: '../../shaders/basic.vert.wgsl' }, - { path: './sampleCubemap.frag.wgsl' }, - { path: '../../meshes/cube.ts' }, - ], + sources: [{ path: 'main.ts' }, { path: './sampleCubemap.wgsl' }], }; diff --git a/sample/cubemap/sampleCubemap.frag.wgsl b/sample/cubemap/sampleCubemap.frag.wgsl deleted file mode 100644 index 1e04597c..00000000 --- a/sample/cubemap/sampleCubemap.frag.wgsl +++ /dev/null @@ -1,18 +0,0 @@ -@group(0) @binding(1) var mySampler: sampler; -@group(0) @binding(2) var myTexture: texture_cube; - -@fragment -fn main( - @location(0) fragUV: vec2f, - @location(1) fragPosition: vec4f -) -> @location(0) vec4f { - // Our camera and the skybox cube are both centered at (0, 0, 0) - // so we can use the cube geometry position to get viewing vector to sample - // the cube texture. The magnitude of the vector doesn't matter. - var cubemapVec = fragPosition.xyz - vec3(0.5); - // When viewed from the inside, cubemaps are left-handed (z away from viewer), - // but common camera matrix convention results in a right-handed world space - // (z toward viewer), so we have to flip it. - cubemapVec.z *= -1; - return textureSample(myTexture, mySampler, cubemapVec); -} diff --git a/sample/cubemap/sampleCubemap.wgsl b/sample/cubemap/sampleCubemap.wgsl new file mode 100644 index 00000000..9eb7bea5 --- /dev/null +++ b/sample/cubemap/sampleCubemap.wgsl @@ -0,0 +1,39 @@ +@group(0) @binding(0) var viewDirectionProjectionInverse: mat4x4f; +@group(0) @binding(1) var mySampler: sampler; +@group(0) @binding(2) var myTexture: texture_cube; + +struct VertexOutput { + @builtin(position) position: vec4f, + @location(1) direction: vec4f, +}; + +@vertex +fn mainVS( + @builtin(vertex_index) vertexIndex: u32 +) -> VertexOutput { + // A triangle large enough to cover all of clip space. + let pos = array( + vec2f(-1, -1), + vec2f(-1, 3), + vec2f( 3, -1), + ); + let p = pos[vertexIndex]; + // We return the position twice. Once for @builtin(position) + // Once for the fragment shader. The values in the fragment shader + // will go from -1,-1 to 1,1 across the entire texture. + return VertexOutput( + vec4f(p, 0, 1), + vec4f(p, -1, 1), + ); +} + +@fragment +fn mainFS( + in: VertexOutput, +) -> @location(0) vec4f { + // orient the direction to the view + let t = viewDirectionProjectionInverse * in.direction; + // remove the perspective. + let uvw = normalize(t.xyz / t.w); + return textureSample(myTexture, mySampler, uvw); +}