www.xbdev.net
xbdev - software development
Thursday February 19, 2026
Home | Contact | Support | WebGPU Graphics and Compute ... | WebGPU.. Games, Tutorials, Demos, Projects, and Code.....
     
 

WebGPU..

Games, Tutorials, Demos, Projects, and Code.....

 


Normals


Normals are a vital concept in computer graphics - essentially the direction the surface is facing. It's used for culling and lighting.


Single triangle provides a point of references for our camera - as we move around we see the triangle moving.
Single triangle provides a point of references for our camera - as we move around we see the triangle moving.


Functions Used: setVertexBuffer(), setIndexBuffer(), drawIndexed(), createBuffer(), getMappedRange(), getContext(), requestAdapter(), getPreferredCanvasFormat(), createCommandEncoder(), beginRenderPass(), setPipeline(), draw(), end(), submit(), getCurrentTexture(), createView(), createShaderModule()


To visualize the normal we draw a line on the triangle surface - the line is in its own function object called 'lines'.


lines = function()
{

this.create = function(device,presentationFormat)
{
    const 
vertWGSL = `
    struct Transforms {
        model      : mat4x4<f32>,
        view       : mat4x4<f32>,
        projection : mat4x4<f32>,
    };
    @group(0) @binding(0) var<uniform> transforms : Transforms;

    struct VSOut {
        @builtin(position) Position: vec4<f32>,
        @location(0)       color   : vec3<f32>,
    };

    @vertex
    fn main(@location(0) inPos  : vec3<f32>,
            @location(1) inColor: vec3<f32>) -> VSOut 
    {
        var mvp = transforms.projection * transforms.view * transforms.model;

        var vsOut: VSOut;
        vsOut.Position = mvp * vec4<f32>(inPos, 1.0);
        vsOut.color    = inColor;
        return vsOut;
    }
    
`;

    const 
fragWGSL = `
    @fragment
    fn main(@location(0) inColor: vec3<f32>) -> @location(0) vec4<f32> 
    {
        return vec4<f32>(inColor, 1.0);
    }
    
`;

    const 
positions = new Float32Array([0.00.0,  1.0,   // Position Vertex Buffer Data
                                        
0.00.0, -1.0 ]);
    const 
colors    = new Float32Array([ 1.00.00.0,    // Color Vertex Buffer Data
                                         
0.01.00.0 ]);
    const 
indices   = new Uint16Array( [ 0]);       // Index Buffer Data

    
const createBuffer = (arrDatausage) => {
      const 
buffer device.createBuffer({ size            : ((arrData.byteLength 3) & ~3),
                                           
usage           usage,
                                           
mappedAtCreationtrue  });
      if ( 
arrData instanceof Float32Array 
      { (new 
Float32Array(buffer.getMappedRange())).set(arrData) }
      else 
      { (new 
Uint16Array (buffer.getMappedRange())).set(arrData) }
      
buffer.unmap();
      return 
buffer;
    }

    
// Declare buffer handles (GPUBuffer)
    
this.positionBuffer createBuffer(positionsGPUBufferUsage.VERTEX);
    
this.colorBuffer    createBuffer(colors,    GPUBufferUsage.VERTEX);
    
this.indexBuffer    createBuffer(indices,   GPUBufferUsage.INDEX);

    
// ----------------------------------------------------------------

    
this.mvpUniformBuffer device.createBuffer({
      
size64*3,
      
usageGPUBufferUsage.UNIFORM GPUBufferUsage.COPY_DST
    
});

    
// ----------------------------------------------------------------

    
this.sceneUniformBindGroupLayout device.createBindGroupLayout({
      
entries: [  { binding0visibilityGPUShaderStage.VERTEX,   buffer:  { type"uniform"  }   } 
               ]
    });

    
this.uniformBindGroup device.createBindGroup({
      
layout:   this.sceneUniformBindGroupLayout,
      
entries: [  { binding 0resource: { bufferthis.mvpUniformBuffer        } }
               ],
    });

    
// ----------------------------------------------------------------

    
this.pipeline device.createRenderPipeline({
      
layoutdevice.createPipelineLayout({bindGroupLayouts: [this.sceneUniformBindGroupLayout]}),
      
vertex:    { module     device.createShaderModule({code   vertWGSL }),
                   
entryPoint 'main',
                   
buffers    : [ { arrayStride12attributes: [{ shaderLocation0,format"float32x3",offset0  }]         },
                                  { 
arrayStride12attributes: [{ shaderLocation1,format"float32x3",offset0  }]         }
        ]
      },
      
fragment:  { module     device.createShaderModule({ code  fragWGSL }),
                   
entryPoint 'main',
                   
targets    : [ {formatpresentationFormat } ],
      },
      
primitive: {
        
topology"line-list",
        
frontFace"cw",
        
cullMode'none'
      
},
      
depthStencil: {
        
format"depth24plus",
        
depthWriteEnabledtrue,
        
depthCompare"less"
      
}
    });
}
// -----------------------------------------------------

this.draw = function( devicecontextdepthTexturemodelMatrixviewMatrixprojectionMatrix
{
    
device.queue.writeBuffer(this.mvpUniformBuffer,      0,      modelMatrix);
    
device.queue.writeBuffer(this.mvpUniformBuffer,      64,     viewMatrix);
    
device.queue.writeBuffer(this.mvpUniformBuffer,      128,    projectionMatrix);
    
    const 
renderPassDescription = {
      
colorAttachments: [{
        
viewcontext.getCurrentTexture().createView(),
        
loadOp'load'// (k==0 ? "clear":"load"), 
        
clearValue: [00.50.51], // clear screen to color
        
storeOp'store'
      
}],
      
depthStencilAttachment: {
        
viewdepthTexture.createView(),
        
depthLoadOp'load'// (k==0 ? "clear":"load"), 
        
depthClearValue1,
        
depthStoreOp"store",
      }
    };
    
    
renderPassDescription.colorAttachments[0].view context.getCurrentTexture().createView();
      const 
commandEncoder device.createCommandEncoder();
      const 
renderPass commandEncoder.beginRenderPass(renderPassDescription);
    
    
renderPass.setBindGroup(0this.uniformBindGroup);
    
renderPass.setPipeline(this.pipeline);
    
renderPass.setVertexBuffer(0this.positionBuffer);
    
renderPass.setVertexBuffer(1this.colorBuffer);
    
renderPass.setIndexBuffer(this.indexBuffer'uint16');
    
renderPass.drawIndexed(2,1);
    
renderPass.end();
    
device.queue.submit([commandEncoder.finish()]);

  
  
}
// end lines


The body of the code that draws the rotating triangle:

let promise      await fetch('https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.6.0/gl-matrix-min.js');
let text         await promise.text();
let script       document.createElement('script');
script.type      'text/javascript';
script.async     false;
script.innerHTML text;
document.body.appendChild(script); 

let canvas document.createElement('canvas');
document.body.appendChildcanvas );
canvas.width  canvas.height 512;

const 
adapter await navigator.gpu.requestAdapter();
const 
device  await adapter.requestDevice();
const 
context canvas.getContext('webgpu');

const 
presentationSize   = [ canvas.width,   
                             
canvas.height 

const 
presentationFormat navigator.gpu.getPreferredCanvasFormat();

context.configure({ device devicecompositingAlphaMode"opaque"compositingAlphaMode"opaque"
                    
format presentationFormat,
                    
size   presentationSize });
const 
vertWGSL = `
struct Transforms {
    model      : mat4x4<f32>,
    view       : mat4x4<f32>,
    projection : mat4x4<f32>,
};
@group(0) @binding(0) var<uniform> transforms : Transforms;

struct VSOut {
    @builtin(position) Position: vec4<f32>,
    @location(0)       color   : vec3<f32>,
};

@vertex
fn main(@location(0) inPos  : vec3<f32>,
        @location(1) inColor: vec3<f32>) -> VSOut 
{
    var mvp = transforms.projection * transforms.view * transforms.model;
    
    var vsOut: VSOut;
    vsOut.Position = mvp * vec4<f32>(inPos, 1.0);
    vsOut.color    = inColor;
    return vsOut;
}
`;

const 
fragWGSL = `
@fragment
fn main(@location(0) inColor: vec3<f32>) -> @location(0) vec4<f32> 
{
    return vec4<f32>(inColor, 1.0);
}
`;

const 
positions = new Float32Array([-1.0, -1.00.0,   // Position Vertex Buffer Data
                                     
1.0, -1.00.0,
                                     
0.0,  1.00.0 ]);
const 
colors    = new Float32Array([ 1.00.00.0,    // Color Vertex Buffer Data
                                     
0.01.00.0
                                     
0.00.01.0  ]);
const 
indices   = new Uint16Array( [ 01]);       // Index Buffer Data

const createBuffer = (arrDatausage) => {
  const 
buffer device.createBuffer({ size            : ((arrData.byteLength 3) & ~3),
                                       
usage           usage,
                                       
mappedAtCreationtrue  });
  if ( 
arrData instanceof Float32Array 
  { (new 
Float32Array(buffer.getMappedRange())).set(arrData) }
  else 
  { (new 
Uint16Array (buffer.getMappedRange())).set(arrData) }
  
buffer.unmap();
  return 
buffer;
}

// Declare buffer handles (GPUBuffer)
var positionBuffer createBuffer(positionsGPUBufferUsage.VERTEX);
var 
colorBuffer    createBuffer(colors,    GPUBufferUsage.VERTEX);
var 
indexBuffer    createBuffer(indices,   GPUBufferUsage.INDEX);

// ----------------------------------------------------------------

function buildMatrixpr// position, rotation, scale
{
    
// if not set fall back to default values
    
if (!s= {x:1y:1z:1};
    if (!
r= {x:0y:0z:0};
    if (!
p= {x:0y:0z:0};
  
    
// Create the matrix in Javascript (using matrix library)
    
const modelMatrix          mat4.create();

    
// create the model transform with a rotation and translation
    
let translateMat mat4.create();   mat4.fromTranslationtranslateMatObject.values(p) );
    
let rotateXMat   mat4.create();   mat4.fromXRotation(rotateXMatr.x);
    
let rotateYMat   mat4.create();   mat4.fromYRotation(rotateYMatr.y);
    
let rotateZMat   mat4.create();   mat4.fromZRotation(rotateZMatr.z);
    
let scaleMat     mat4.create();   mat4.fromScaling(scaleMatObject.values(s) );

    
mat4.multiply(modelMatrixmodelMatrix,   translateMat);
    
mat4.multiply(modelMatrixmodelMatrix,   rotateXMat);
    
mat4.multiply(modelMatrixmodelMatrix,   rotateYMat);
    
mat4.multiply(modelMatrixmodelMatrix,   rotateZMat);
    
mat4.multiply(modelMatrixmodelMatrix,   scaleMat);
    return 
modelMatrix;
}

// build a model matrix (scale, rotate and position it wherever we want)
let modelMatrix buildMatrix();
   
// setup the projection
let projectionMatrix mat4.create(); 
mat4.perspective(projectionMatrixMath.PI 2canvas.width canvas.height0.0015000.0);

// default camera `lookat` - camera is at -4 units down the z-axis looking at '0,0,0'
let viewMatrix mat4.create();
mat4.lookAt(viewMatrix, [0,0,-4],  [0,0,0], [010]);


let mvpUniformBuffer device.createBuffer({
  
size64*3,
  
usageGPUBufferUsage.UNIFORM GPUBufferUsage.COPY_DST
});

device.queue.writeBuffer(mvpUniformBuffer,      0,      modelMatrix);
device.queue.writeBuffer(mvpUniformBuffer,      64,     viewMatrix);
device.queue.writeBuffer(mvpUniformBuffer,      128,    projectionMatrix);

// ----------------------------------------------------------------

let sceneUniformBindGroupLayout device.createBindGroupLayout({
  
entries: [
    { 
binding0visibilityGPUShaderStage.VERTEX,   buffer:  { type"uniform"  }   } 
  ]
});

let uniformBindGroup device.createBindGroup({
  
layout:   sceneUniformBindGroupLayout,
  
entries: [
    { 
binding 0resource: { buffermvpUniformBuffer        } }
   ],
});

// ----------------------------------------------------------------

const pipeline device.createRenderPipeline({
  
layoutdevice.createPipelineLayout({bindGroupLayouts: [sceneUniformBindGroupLayout]}),
  
vertex:    { module     device.createShaderModule({code   vertWGSL }),
               
entryPoint 'main',
               
buffers    : [ { arrayStride12attributes: [{ shaderLocation0,
                                                                
format"float32x3",
                                                                
offset0  }]         },
                              { 
arrayStride12attributes: [{ shaderLocation1,
                                                                
format"float32x3",
                                                                
offset0  }]         }
    ]
  },
  
fragment:  { module     device.createShaderModule({ code  fragWGSL }),
               
entryPoint 'main',
               
targets    : [ {formatpresentationFormat } ],
  },
  
primitive: {
    
topology"triangle-list",
    
frontFace"cw",
    
cullMode'none'
  
},
  
depthStencil: {
    
format"depth24plus",
    
depthWriteEnabledtrue,
    
depthCompare"less"
  
}
});

const 
depthTexture device.createTexture({
  
size: [canvas.widthcanvas.height1],
  
format"depth24plus",
  
usage:  GPUTextureUsage.RENDER_ATTACHMENT
})

// ---------------

let line = new lines();
line.create(device,presentationFormat);


let counter 0.0;

function 
frame() 
{

  
  
// setup a transform for each triangle 
  
let tris = [  { p:{x:0,y:0,z:0}, r:{x:0,y:0.0,z:0.0}, s:{x:1.0y:1.0,z:1.0} } ];
  
  
// loop over each triangle and render it
  //tris.forEach( (t,k)=>{
  
let t tris[0];
  
let k 0;
   
    
let modelMatrix buildMatrix(t.pt.rt.s);
    
// update the local matrix for each triangle draw differently
    
device.queue.writeBuffer(mvpUniformBuffer,      0,      modelMatrix);

    
// Rotate the camera around the origin in the circle
    
let cameraEye = [ Math.cos(counter)*3.00.0Math.sin(counter)*3.0 ];
    
mat4.lookAt(viewMatrixcameraEye,  [0,0,0], [010]);
    
device.queue.writeBuffer(mvpUniformBuffer,      64,     viewMatrix);
    
    
// simple counter
    
counter += 0.001;
    
    
    const 
renderPassDescription = {
      
colorAttachments: [{
        
viewcontext.getCurrentTexture().createView(),
        
loadOp: (k=="clear":"load"), 
        
clearValue: [00.50.51], // clear screen to color
        
storeOp'store'
      
}],
      
depthStencilAttachment: {
        
viewdepthTexture.createView(),
        
depthLoadOp: (k=="clear":"load"), 
        
depthClearValue1,
        
depthStoreOp"store",
      }
    };
    
    
renderPassDescription.colorAttachments[0].view context.getCurrentTexture().createView();
      const 
commandEncoder device.createCommandEncoder();
      const 
renderPass commandEncoder.beginRenderPass(renderPassDescription);
    
    
renderPass.setBindGroup(0uniformBindGroup);
    
renderPass.setPipeline(pipeline);
    
renderPass.setVertexBuffer(0positionBuffer);
    
renderPass.setVertexBuffer(1colorBuffer);
    
renderPass.setIndexBuffer(indexBuffer'uint16');
    
renderPass.drawIndexed(3,1);
    
renderPass.end();
    
device.queue.submit([commandEncoder.finish()]);
  
  
//});
  
  
line.draw(devicecontextdepthTexturemodelMatrixviewMatrixprojectionMatrix );
  
  
// animate - keep updating
  
requestAnimationFrame(frame);


frame();


console.log('ready...');




Resources and Links


• WebGPU Lab Example [LINK]
















































WebGPU by Example: Fractals, Image Effects, Ray-Tracing, Procedural Geometry, 2D/3D, Particles, Simulations WebGPU Compute graphics and animations using the webgpu api 12 week course kenwright learn webgpu api kenwright programming compute and graphics applications with html5 and webgpu api kenwright real-time 3d graphics with webgpu kenwright webgpu api develompent a quick start guide kenwright webgpu by example 2022 kenwright webgpu gems kenwright webgpu interactive compute and graphics visualization cookbook kenwright wgsl webgpu shading language cookbook kenwright wgsl webgpugems shading language cookbook kenwright



 
Advert (Support Website)

 
 Visitor:
Copyright (c) 2002-2025 xbdev.net - All rights reserved.
Designated articles, tutorials and software are the property of their respective owners.