renderer = {
class ThreeGeometryWebGPURenderer {
canvas ;
context ;
device ;
pipeline ;
format = 'bgra8unorm';
initialized = false;
size = { width: 1, height: 1 };
disposable = [];
constructor(options = {}) {
}
cleanup() {
this.disposable.forEach(d => d.destroy())
this.disposable = []
this.device.destroy()
}
async init() {
const { context, device } = await gpu.init(width, height);
this.context = context ;
this.canvas = context.canvas ;
this.device = device;
await this.createPipeline();
this.initialized = true;
}
async createPipeline() {
if (!this.device) {
throw new Error('Device not initialized');
}
const shaderCode = `
struct Uniforms {
modelViewProjectionMatrix: mat4x4f,
}
struct VertexInput {
@builtin(vertex_index) vertexIndex: u32,
@location(0) position: vec4f,
@location(1) normal: vec4f,
}
struct VertexOutput {
@builtin(position) position: vec4f,
@location(0) normal: vec4f,
}
@binding(0) @group(0) var<uniform> uniforms: Uniforms;
@vertex
fn vs(input: VertexInput) -> VertexOutput {
return VertexOutput(
uniforms.modelViewProjectionMatrix * input.position,
input.normal,
);
}
@fragment
fn fs(input: VertexOutput) -> @location(0) vec4f {
// let lightDir = normalize(vec3f(1.0, 1.0, 1.0));
// let diffuse = max(dot(normalize(input.normal.xyz), lightDir), 0.3);
// return vec4f(vec3f(diffuse) * abs(input.normal.xyz), 1.0);
return abs(input.normal);
}
`;
try {
this.pipeline = this.device.createRenderPipeline({
layout: 'auto',
vertex: {
module: this.device.createShaderModule({
code: shaderCode,
}),
entryPoint: 'vs',
buffers: [
{
arrayStride: 16,
attributes: [
{
shaderLocation: 0,
offset: 0,
format: 'float32x4',
},
],
},
{
arrayStride: 16,
attributes: [
{
shaderLocation: 1,
offset: 0,
format: 'float32x4',
},
],
},
],
},
fragment: {
module: this.device.createShaderModule({
code: shaderCode,
}),
entryPoint: 'fs',
targets: [
{
format: this.format,
},
],
},
primitive: {
topology: primitiveTopology,
// cullMode: 'back',
},
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less',
format: 'depth24plus',
},
});
} catch (error) {
console.error('Failed to create render pipeline:', error);
throw error;
}
}
async renderGeometry(
geometry /*: THREE.BufferGeometry */,
camera /*: THREE.Camera */,
scene /*: THREE.Scene */,
) {
if (!this.initialized || !this.device || !this.context) {
console.error('Renderer not properly initialized');
return;
}
if (!geometry.attributes.position || !geometry.attributes.normal) {
console.error('Geometry must have position and normal attributes');
return;
}
this.disposable.forEach(d => d.destroy())
this.disposable = []
// TODO cache everything
try {
const commandEncoder = this.device.createCommandEncoder();
const textureView = this.context.getCurrentTexture().createView();
const depthTexture = this.device.createTexture({
size: [this.size.width, this.size.height, 1],
format: 'depth24plus',
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});
const renderPassDescriptor = {
colorAttachments: [
{
view: textureView,
clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
loadOp: 'clear',
storeOp: 'store',
},
],
depthStencilAttachment: {
view: depthTexture.createView(),
depthClearValue: 1.0,
depthLoadOp: 'clear',
depthStoreOp: 'store',
},
};
const vertices = [];
const pos = geometry.attributes.position;
for (let i = 0; i < pos.count; i++) {
const j = i * pos.itemSize; // itemSize = 3
vertices.push(pos.array[j], pos.array[j + 1], pos.array[j + 2], 1);
}
const positionArray = new Float32Array(vertices);
const vertexBuffer = this.device.createBuffer({
size: positionArray.byteLength,
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
});
this.device.queue.writeBuffer(
vertexBuffer,
0,
positionArray.buffer,
positionArray.byteOffset,
positionArray.byteLength,
);
const normals /*: number[] */ = [];
const norm = geometry.attributes.normal;
for (let i = 0; i < norm.count; i++) {
const j = i * norm.itemSize; // itemSize = 3
normals.push(norm.array[j], norm.array[j + 1], norm.array[j + 2], 1);
}
const normalArray = new Float32Array(normals);
const normalBuffer = this.device.createBuffer({
size: normalArray.byteLength,
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST,
});
this.device.queue.writeBuffer(
normalBuffer,
0,
normalArray.buffer,
normalArray.byteOffset,
normalArray.byteLength,
);
const uniformBuffer = this.device.createBuffer({
size: 4 * 4 * 4,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
const indices = geometry.index.array /* as Uint16Array */;
const indexBuffer = this.device.createBuffer({
size: indices.byteLength,
usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST,
mappedAtCreation: true,
});
new Uint16Array(indexBuffer.getMappedRange()).set(indices);
indexBuffer.unmap();
// First update camera matrices
camera.updateMatrixWorld();
// Then calculate view matrix (inverse of camera's world matrix)
const viewMatrix = camera.matrixWorldInverse
.copy(camera.matrixWorld)
.invert();
// Calculate model matrix (from scene)
const modelMatrix = scene.matrixWorld;
// Calculate modelView matrix (view * model)
const modelViewMatrix = new THREE.Matrix4();
modelViewMatrix.multiplyMatrices(viewMatrix, modelMatrix);
// Finally calculate modelViewProjection (projection * view * model)
const modelViewProjectionMatrix = new THREE.Matrix4();
modelViewProjectionMatrix.multiplyMatrices(
camera.projectionMatrix,
modelViewMatrix,
);
const uniformArray = new Float32Array(modelViewProjectionMatrix.elements);
this.device.queue.writeBuffer(
uniformBuffer,
0,
uniformArray.buffer,
uniformArray.byteOffset,
uniformArray.byteLength,
);
this.disposable.push(vertexBuffer, normalBuffer, indexBuffer, uniformBuffer);
const bindGroup = this.device.createBindGroup({
layout: this.pipeline.getBindGroupLayout(0),
entries: [
{
binding: 0,
resource: {
buffer: uniformBuffer,
},
},
],
});
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
passEncoder.setPipeline(this.pipeline);
passEncoder.setVertexBuffer(0, vertexBuffer);
passEncoder.setVertexBuffer(1, normalBuffer);
passEncoder.setIndexBuffer(indexBuffer, 'uint16');
passEncoder.setBindGroup(0, bindGroup);
passEncoder.drawIndexed(indices.length, 1, 0, 0, 0);
passEncoder.end();
this.device.queue.submit([commandEncoder.finish()]);
} catch (error) {
console.error('Error during rendering:', error);
}
}
setSize(width, height) {
this.size = {
width: width * window.devicePixelRatio,
height: height * window.devicePixelRatio,
};
this.canvas.width = this.size.width;
this.canvas.height = this.size.height;
this.canvas.style.width = `${width}px`;
this.canvas.style.height = `${height}px`;
}
}
// This should all look very familiar...
async function init() {
let frame;
let canvas;
let scene;
let camera;
let controls;
let renderer;
// const geometry = new THREE.BoxGeometry(1, 1, 1);
// const geometry = new THREE.SphereGeometry(0.5, 32, 32);
// const geometry = new THREE.TorusKnotGeometry(0.6, 0.2, 16, 8);
const geometry = new THREE.TeapotGeometry(1, 10);
const resize = () => {
renderer.setSize(width, height);
camera.aspect = renderer.size.width / renderer.size.height;
camera.updateProjectionMatrix();
};
try {
renderer = new ThreeGeometryWebGPURenderer();
await renderer.init();
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(
70,
renderer.size.width / renderer.size.height,
0.001,
1000,
);
controls = new THREE.OrbitControls(camera, renderer.canvas);
camera.position.set(0, 0, 3);
window.addEventListener('resize', resize);
resize();
} catch (error) {
console.error('Failed to initialize scene:', error);
}
invalidation.then(() => {
cancelAnimationFrame(frame)
controls.dispose()
renderer.cleanup()
window.removeEventListener('resize', resize)
})
return {
canvas: renderer.canvas,
animate: () => {
renderer.renderGeometry(geometry, camera, scene)
}
}
}
return await init()
}