blob: fb0c5223abb6fdf0d4d57caa040a69d84c725e42 [file] [log] [blame]
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=1000">
<title>WebGPU Cube</title>
<script src="scripts/gl-matrix-min.js"></script>
<link rel="stylesheet" href="css/style.css"/>
<style>
body {
font-family: system-ui;
color: #f7f7ff;
background-color: rgb(13, 77, 153);
text-align: center;
}
canvas {
margin: 0 auto;
}
p {
margin: 0 8px;
}
</style>
</head>
<body>
<div id="contents">
<h1>Textured Cube</h1>
<p>
This demo uploads a PNG image as texture data and uses it on the faces of a cube.
</p>
<canvas width="1200" height="1200"></canvas>
</div>
<div id="error">
<h2>WebGPU not available</h2>
<p>
Make sure you are on a system with WebGPU enabled. In
Safari, first make sure the Developer Menu is visible (Preferences >
Advanced), then Develop > Experimental Features > WebGPU.
</p>
<p>
In addition, you must be using Safari Technology Preview 92 or above.
You can get the latest STP <a href="https://developer.apple.com/safari/download/">here</a>.
</p>
</div>
<script>
if (!navigator.gpu || GPUBufferUsage.COPY_SRC === undefined)
document.body.className = 'error';
const positionAttributeNum = 0;
const texCoordsAttributeNum = 1;
const transformBindingNum = 0;
const textureBindingNum = 1;
const samplerBindingNum = 2;
const bindGroupIndex = 0;
const shader = `
struct FragmentData {
float4 position : SV_Position;
float2 texCoords : attribute(${texCoordsAttributeNum});
}
vertex FragmentData vertex_main(
float4 position : attribute(${positionAttributeNum}),
float2 texCoords : attribute(${texCoordsAttributeNum}),
constant float4x4[] modelViewProjectionMatrix : register(b${transformBindingNum}))
{
FragmentData out;
out.position = mul(modelViewProjectionMatrix[0], position);
out.texCoords = texCoords;
return out;
}
fragment float4 fragment_main(
float2 texCoords : attribute(${texCoordsAttributeNum}),
Texture2D<float4> faceTexture : register(t${textureBindingNum}),
sampler faceSampler : register(s${samplerBindingNum})) : SV_Target 0
{
return Sample(faceTexture, faceSampler, texCoords);
}
`;
let device, swapChain, verticesBuffer, bindGroupLayout, pipeline, renderPassDescriptor, queue, textureViewBinding, samplerBinding;
let projectionMatrix = mat4.create();
const texCoordsOffset = 4 * 4;
const vertexSize = 4 * 6;
const verticesArray = new Float32Array([
// float4 position, float2 texCoords
1, -1, -1, 1, 0, 1,
-1, -1, -1, 1, 1, 1,
-1, 1, -1, 1, 1, 0,
1, 1, -1, 1, 0, 0,
1, -1, -1, 1, 0, 1,
-1, 1, -1, 1, 1, 0,
1, 1, 1, 1, 0, 0,
1, -1, 1, 1, 0, 1,
1, -1, -1, 1, 1, 1,
1, 1, -1, 1, 1, 0,
1, 1, 1, 1, 0, 0,
1, -1, -1, 1, 1, 1,
1, -1, 1, 1, 1, 0,
-1, -1, 1, 1, 0, 0,
-1, -1, -1, 1, 0, 1,
1, -1, -1, 1, 1, 1,
1, -1, 1, 1, 1, 0,
-1, -1, -1, 1, 0, 1,
-1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 1,
1, 1, -1, 1, 1, 0,
-1, 1, -1, 1, 0, 0,
-1, 1, 1, 1, 0, 1,
1, 1, -1, 1, 1, 0,
-1, -1, 1, 1, 1, 1,
-1, 1, 1, 1, 1, 0,
-1, 1, -1, 1, 0, 0,
-1, -1, -1, 1, 0, 1,
-1, -1, 1, 1, 1, 1,
-1, 1, -1, 1, 0, 0,
1, 1, 1, 1, 1, 0,
-1, 1, 1, 1, 0, 0,
-1, -1, 1, 1, 0, 1,
-1, -1, 1, 1, 0, 1,
1, -1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 0,
]);
async function init() {
const adapter = await navigator.gpu.requestAdapter();
device = await adapter.requestDevice();
const canvas = document.querySelector('canvas');
const aspect = Math.abs(canvas.width / canvas.height);
mat4.perspective(projectionMatrix, (2 * Math.PI) / 5, aspect, 1, 100.0);
const context = canvas.getContext('gpu');
const swapChainDescriptor = {
device: device,
format: "bgra8unorm"
};
swapChain = context.configureSwapChain(swapChainDescriptor);
// WebKit WebGPU accepts only MSL for now.
const shaderModuleDescriptor = { code: shader, isWHLSL: true };
const shaderModule = device.createShaderModule(shaderModuleDescriptor);
const verticesBufferDescriptor = {
size: verticesArray.byteLength,
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST
};
let verticesArrayBuffer;
[verticesBuffer, verticesArrayBuffer] = device.createBufferMapped(verticesBufferDescriptor);
const verticesWriteArray = new Float32Array(verticesArrayBuffer);
verticesWriteArray.set(verticesArray);
verticesBuffer.unmap();
// Vertex Input
const positionAttributeDescriptor = {
shaderLocation: positionAttributeNum, // [[attribute(0)]].
offset: 0,
format: "float4"
};
const texCoordsAttributeDescriptor = {
shaderLocation: texCoordsAttributeNum,
offset: texCoordsOffset,
format: "float2"
}
const vertexBufferDescriptor = {
attributeSet: [positionAttributeDescriptor, texCoordsAttributeDescriptor],
stride: vertexSize,
stepMode: "vertex"
};
const vertexInputDescriptor = { vertexBuffers: [vertexBufferDescriptor] };
// Texture
// Load texture image
const image = new Image();
const imageLoadPromise = new Promise(resolve => {
image.onload = () => resolve();
image.src = "resources/safari-alpha.png"
});
await Promise.resolve(imageLoadPromise);
const textureSize = {
width: image.width,
height: image.height,
depth: 1
};
const textureDescriptor = {
size: textureSize,
arrayLayerCount: 1,
mipLevelCount: 1,
sampleCount: 1,
dimension: "2d",
format: "rgba8unorm",
usage: GPUTextureUsage.TRANSFER_DST | GPUTextureUsage.SAMPLED
};
const texture = device.createTexture(textureDescriptor);
// Texture data
const canvas2d = document.createElement('canvas');
canvas2d.width = image.width;
canvas2d.height = image.height;
const context2d = canvas2d.getContext('2d');
context2d.drawImage(image, 0, 0);
const imageData = context2d.getImageData(0, 0, image.width, image.height);
const textureDataBufferDescriptor = {
size: imageData.data.length,
usage: GPUBufferUsage.COPY_SRC
};
const [textureDataBuffer, textureArrayBuffer] = device.createBufferMapped(textureDataBufferDescriptor);
const textureWriteArray = new Uint8Array(textureArrayBuffer);
textureWriteArray.set(imageData.data);
textureDataBuffer.unmap();
const dataCopyView = {
buffer: textureDataBuffer,
offset: 0,
rowPitch: image.width * 4,
imageHeight: 0
};
const textureCopyView = {
texture: texture,
mipLevel: 0,
arrayLayer: 0,
origin: { x: 0, y: 0, z: 0 }
};
const blitCommandEncoder = device.createCommandEncoder();
blitCommandEncoder.copyBufferToTexture(dataCopyView, textureCopyView, textureSize);
queue = device.getQueue();
queue.submit([blitCommandEncoder.finish()]);
// Bind group binding layout
const transformBufferBindGroupLayoutBinding = {
binding: transformBindingNum, // id[[(0)]]
visibility: GPUShaderStage.VERTEX,
type: "uniform-buffer"
};
const textureBindGroupLayoutBinding = {
binding: textureBindingNum,
visibility: GPUShaderStage.FRAGMENT,
type: "sampled-texture"
};
textureViewBinding = {
binding: textureBindingNum,
resource: texture.createDefaultView()
};
const samplerBindGroupLayoutBinding = {
binding: samplerBindingNum,
visibility: GPUShaderStage.FRAGMENT,
type: "sampler"
};
samplerBinding = {
binding: samplerBindingNum,
resource: device.createSampler({})
};
const bindGroupLayoutDescriptor = {
bindings: [transformBufferBindGroupLayoutBinding, textureBindGroupLayoutBinding, samplerBindGroupLayoutBinding]
};
bindGroupLayout = device.createBindGroupLayout(bindGroupLayoutDescriptor);
// Pipeline
const depthStateDescriptor = {
depthWriteEnabled: true,
depthCompare: "less"
};
const pipelineLayoutDescriptor = { bindGroupLayouts: [bindGroupLayout] };
const pipelineLayout = device.createPipelineLayout(pipelineLayoutDescriptor);
const vertexStageDescriptor = {
module: shaderModule,
entryPoint: "vertex_main"
};
const fragmentStageDescriptor = {
module: shaderModule,
entryPoint: "fragment_main"
};
const colorState = {
format: "bgra8unorm",
alphaBlend: {
srcFactor: "src-alpha",
dstFactor: "one-minus-src-alpha",
operation: "add"
},
colorBlend: {
srcFactor: "src-alpha",
dstFactor: "one-minus-src-alpha",
operation: "add"
},
writeMask: GPUColorWrite.ALL
};
const pipelineDescriptor = {
layout: pipelineLayout,
vertexStage: vertexStageDescriptor,
fragmentStage: fragmentStageDescriptor,
primitiveTopology: "triangle-list",
colorStates: [colorState],
depthStencilState: depthStateDescriptor,
vertexInput: vertexInputDescriptor
};
pipeline = device.createRenderPipeline(pipelineDescriptor);
let colorAttachment = {
// attachment is acquired in render loop.
loadOp: "clear",
storeOp: "store",
clearColor: { r: 0.05, g: .3, b: .6, a: 1.0 } // GPUColor
};
// Depth stencil texture
// GPUExtent3D
const depthSize = {
width: canvas.width,
height: canvas.height,
depth: 1
};
const depthTextureDescriptor = {
size: depthSize,
arrayLayerCount: 1,
mipLevelCount: 1,
sampleCount: 1,
dimension: "2d",
format: "depth32float-stencil8",
usage: GPUTextureUsage.OUTPUT_ATTACHMENT
};
const depthTexture = device.createTexture(depthTextureDescriptor);
// GPURenderPassDepthStencilAttachmentDescriptor
const depthAttachment = {
attachment: depthTexture.createDefaultView(),
depthLoadOp: "clear",
depthStoreOp: "store",
clearDepth: 1.0
};
renderPassDescriptor = {
colorAttachments: [colorAttachment],
depthStencilAttachment: depthAttachment
};
render();
}
/* Transform Buffers and Bindings */
const transformSize = 4 * 16;
const transformBufferDescriptor = {
size: transformSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.MAP_WRITE
};
let mappedGroups = [];
function render() {
if (mappedGroups.length === 0) {
const [buffer, arrayBuffer] = device.createBufferMapped(transformBufferDescriptor);
const group = device.createBindGroup(createBindGroupDescriptor(buffer, textureViewBinding, samplerBinding));
let mappedGroup = { buffer: buffer, arrayBuffer: arrayBuffer, bindGroup: group };
drawCommands(mappedGroup);
} else
drawCommands(mappedGroups.shift());
}
function createBindGroupDescriptor(transformBuffer, textureViewBinding, samplerBinding) {
const transformBufferBinding = {
buffer: transformBuffer,
offset: 0,
size: transformSize
};
const transformBufferBindGroupBinding = {
binding: transformBindingNum,
resource: transformBufferBinding
};
return {
layout: bindGroupLayout,
bindings: [transformBufferBindGroupBinding, textureViewBinding, samplerBinding]
};
}
function drawCommands(mappedGroup) {
updateTransformArray(new Float32Array(mappedGroup.arrayBuffer));
mappedGroup.buffer.unmap();
const commandEncoder = device.createCommandEncoder();
renderPassDescriptor.colorAttachments[0].attachment = swapChain.getCurrentTexture().createDefaultView();
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
// Encode drawing commands.
passEncoder.setPipeline(pipeline);
// Vertex attributes
passEncoder.setVertexBuffers(0, [verticesBuffer], [0]);
// Bind groups
passEncoder.setBindGroup(bindGroupIndex, mappedGroup.bindGroup);
passEncoder.draw(36, 1, 0, 0);
passEncoder.endPass();
queue.submit([commandEncoder.finish()]);
// Ready the current buffer for update after GPU is done with it.
mappedGroup.buffer.mapWriteAsync().then((arrayBuffer) => {
mappedGroup.arrayBuffer = arrayBuffer;
mappedGroups.push(mappedGroup);
});
requestAnimationFrame(render);
}
function updateTransformArray(array) {
let viewMatrix = mat4.create();
mat4.translate(viewMatrix, viewMatrix, vec3.fromValues(0, 0, -5));
let now = Date.now() / 1000;
mat4.rotate(viewMatrix, viewMatrix, 1, vec3.fromValues(Math.sin(now), 1, 1));
let modelViewProjectionMatrix = mat4.create();
mat4.multiply(modelViewProjectionMatrix, projectionMatrix, viewMatrix);
mat4.copy(array, modelViewProjectionMatrix);
}
window.addEventListener("load", init);
</script>
</body>
</html>