www.xbdev.net
xbdev - software development
Thursday February 19, 2026
Home | Contact | Support | WebGPU Graphics and Compute ... | WebGPU.. Games, Tutorials, Demos, Projects, and Code.....
     
 

WebGPU..

Games, Tutorials, Demos, Projects, and Code.....

 


Texture Mapping


Compute graphics would be nowhere without texturing! Textures are the butter on the bread. They are a key aspect in graphics - they let us create extra super amazing detail scenes without having quatrilions of vertices! And textures aren't just for color! Oh no, they're so much more - they can be used for height information, displacement data, normals, sub-surface scattering, ... Just no end to their juicy goodness.


Basic textured square - the texture is generated - simple xor square grid.
Basic textured square - the texture is generated - simple xor square grid.


Functions Used: setVertexBuffer(), setIndexBuffer(), drawIndexed(), createBuffer(), getMappedRange(), getContext(), requestAdapter(), getPreferredCanvasFormat(), createCommandEncoder(), beginRenderPass(), setPipeline(), draw(), end(), submit(), getCurrentTexture(), createView(), createShaderModule()

This tutorial is just about getting a texture on screen - putting it on a good old quad. The quad is just 4 vertices (positions for the 4 corners). These 4 corners are positions - but we'll add some extra information. This information will be the texture coordinates.

Texture coordinates are essentiall references to the image (with (0,0) the top left and (1,1) the bottom right). The texture coordinates then map and interpolate the texture color onto the surface of the geometry (typically triangles).


function createTexturedevice )
{
      const 
imgWidth  256;
    const 
imgHeight 256;
  
    const 
textureSampler device.createSampler({
         
minFilter"linear",
         
magFilter"linear"
    
});
    const 
basicTexture device.createTexture({
        
size: [imgWidthimgHeight1],
        
formatpresentationFormat // "bgra8unorm", // "rgba8unorm",
        
usage0x2 0x4 // GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING
    
});

    const 
imageCanvas document.createElement('canvas');
    
imageCanvas.width =  imgWidth;
    
imageCanvas.height imgHeight;
    const 
imageCanvasContext imageCanvas.getContext('2d');
    const 
imageData imageCanvasContext.getImageData(00imageCanvas.widthimageCanvas.height);
    
let textureData= new Uint8ArrayimgWidth imgHeight 4);

    for (
let y 0imgHeighty++) {
        for (
let x 0imgWidthx++) {
            const 
index = (imgWidth x) * 4;
              
let r Math.sin(32 x/imgHeight) + Math.sin(32 y/imgHeight) > 0.0 128;
          
            
let g Math.sin(32 x/imgHeight) + Math.sin(32 y/imgHeight) > 0.0 64 128;

            
let b Math.sin(32 x/imgHeight) + Math.sin(32 y/imgHeight) > 0.0 128 0;
          
            
textureData[index 0] = r
            textureData
[index 1] = g
            textureData
[index 2] = b
            textureData
[index 3] = 255.0// No alpha
        
}
    }

    
device.queue.writeTexture( { texturebasicTexture },
                
textureData,
                {   
offset     :  0,
                    
bytesPerRow:  imgWidth 4,
                    
rowsPerImageimgHeight
                 
},
                [ 
imgWidth  ,  imgHeight,  1  ]   );
    return { 
t:basicTextures:textureSampler };
}


Once we've got the texture - now it's matter of linking it into the rest of the setup, this includes:

1. Adding it to the layout/binding - so the texture is passed to the fragment shader
2. Modify the pipeline so the streaming data (both position and texture coordinates)
3. Modifying the fragment shader so it knows about tht texture and the sampler
4. Passing the UV coordiantes to the vertex shader
5. Updating the vertex shader so it knowsa bout the uv coordinates (as well as the position)
6. Pass the UV coordinates along from the vertex shader to the fragment shader
7. Fragment shader - use the UV coordinates and the texture/samper - lookup the color from the texture and return that color to the screen


That's enough of that, let's see all the code that does this:

// Load matrix library on dynamically (on-the-fly)
let matprom await fetch'https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.6.0/gl-matrix-min.js' );
let mattex  await matprom.text();
var 
script   document.createElement('script');
script.type  'text/javascript';
script.innerHTML mattex;
document.head.appendChild(script); 

// -------------
let canvas document.createElement('canvas');
document.body.appendChildcanvas ); canvas.height=canvas.width=512;

const 
context canvas.getContext('webgpu');
const 
adapter await navigator.gpu.requestAdapter();
const 
device  await adapter.requestDevice();
const 
presentationFormat navigator.gpu.getPreferredCanvasFormat(); 
context.configure({ devicedeviceformatpresentationFormat  });

function 
createTexturedevice )
{
      const 
imgWidth  256;
    const 
imgHeight 256;
  
    const 
textureSampler device.createSampler({
         
minFilter"linear",
         
magFilter"linear"
    
});
    const 
basicTexture device.createTexture({
        
size: [imgWidthimgHeight1],
        
formatpresentationFormat // "bgra8unorm", // "rgba8unorm",
        
usage0x2 0x4 // GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING
    
});

    const 
imageCanvas document.createElement('canvas');
    
imageCanvas.width =  imgWidth;
    
imageCanvas.height imgHeight;
    const 
imageCanvasContext imageCanvas.getContext('2d');
    const 
imageData imageCanvasContext.getImageData(00imageCanvas.widthimageCanvas.height);
    
let textureData= new Uint8ArrayimgWidth imgHeight 4);

    for (
let y 0imgHeighty++) {
        for (
let x 0imgWidthx++) {
            const 
index = (imgWidth x) * 4;
              
let r Math.sin(32 x/imgHeight) + Math.sin(32 y/imgHeight) > 0.0 128;
          
            
let g Math.sin(32 x/imgHeight) + Math.sin(32 y/imgHeight) > 0.0 64 128;

            
let b Math.sin(32 x/imgHeight) + Math.sin(32 y/imgHeight) > 0.0 128 0;
          
            
textureData[index 0] = r
            textureData
[index 1] = g
            textureData
[index 2] = b
            textureData
[index 3] = 255.0// No alpha
        
}
    }

    
device.queue.writeTexture( { texturebasicTexture },
                
textureData,
                {   
offset     :  0,
                    
bytesPerRow:  imgWidth 4,
                    
rowsPerImageimgHeight
                 
},
                [ 
imgWidth  ,  imgHeight,  1  ]   );
    return { 
t:basicTextures:textureSampler };
}

function 
createTexturedSquaredevice )
{
  
let positionVertex = new Float32Array([
     
0.5,    0.5,   0.0,
    -
0.5,    0.5,   0.0,
     
0.5,   -0.5,   0.0,
    -
0.5,   -0.5,   0.0
  
]);
  const 
vBuffer device.createBuffer({ size:  positionVertex.byteLength,
                                        
usageGPUBufferUsage.VERTEX GPUBufferUsage.COPY_DST });
  
device.queue.writeBuffer(vBuffer0positionVertex);
  
  
let uvVertex = new Float32Array([
     
1.0,   1.0,
     
0.0,   1.0,
     
1.0,   0.0,
     
0.0,   0.0,
  ]);
  const 
uvBuffer device.createBuffer({ size:  uvVertex.byteLength,
                                        
usageGPUBufferUsage.VERTEX GPUBufferUsage.COPY_DST });
  
device.queue.writeBuffer(uvBuffer0uvVertex);
  
  
// return the vertex and texture buffers
  
return { v:vBuffert:uvBuffer };
}

function 
createMatrixUniform( )
{
  
// Create the matrix in Javascript (using matrix library)
  
const projectionMatrix     mat4.create();
  const 
viewMatrix           mat4.create();
  const 
viewProjectionMatrix mat4.create();
  
  
mat4.perspective(projectionMatrixMath.PI 2canvas.width canvas.height0.001500.0)
  
mat4.lookAt(viewMatrix, [001.0],  [000], [010]);
  
mat4.multiply(viewProjectionMatrixprojectionMatrixviewMatrix);
  
  
// Create a buffer using WebGPU API (copy matrix into it)
  
const matrixUniformBuffer device.createBuffer({
     
sizeviewProjectionMatrix.byteLength ,
     
usageGPUBufferUsage.UNIFORM GPUBufferUsage.COPY_DST
  
});
  
device.queue.writeBuffer(matrixUniformBuffer0viewProjectionMatrix );

  return 
matrixUniformBuffer;
}

let shaderWGSL = `
@group(0) @binding(0) var<uniform> viewProjectionmMatrix : mat4x4<f32>;

struct vsout {
    @builtin(position) Position: vec4<f32>,
    @location(0)       uvs     : vec2<f32>
};

@vertex 
fn vsmain(@location(0) pos : vec3<f32>,
          @location(1) uvs : vec2<f32>) -> vsout

    var r:vsout;
    r.Position = viewProjectionmMatrix * vec4<f32>(pos, 1.0);
    r.uvs      = uvs;
    return r;
}

@group(0) @binding(1) var mySampler: sampler;
@group(0) @binding(2) var myTexture: texture_2d<f32>;

@fragment 
fn psmain(@location(0) uvs: vec2<f32>) -> @location(0) vec4<f32> 
{
    return textureSample(myTexture, mySampler, uvs );
    //return vec4<f32>(1.0, 0.0, 0.5, 1.0);
}
`;

const 
textureData         createTexturedevice );
const 
squareBuffer        createTexturedSquaredevice );
const 
matrixUniformBuffer createMatrixUniform();
const 
shaderModule        device.createShaderModule({ code shaderWGSL });

// Define the layout information for the shader (uniforms)
const sceneUniformBindGroupLayout device.createBindGroupLayout({
  
entries: [{ binding0visibilityGPUShaderStage.VERTEXbuffer: { type"uniform" }      },
            { 
binding1visibilityGPUShaderStage.FRAGMENTsampler: { type"filtering"  } },
            { 
binding2visibilityGPUShaderStage.FRAGMENTtexture: { sampleType"float"viewDimension"2d"} },
           ]
});

const 
sceneUniformBindGroup device.createBindGroup({
  
layoutsceneUniformBindGroupLayout,
  
entries: [{ binding:  0resource: { buffermatrixUniformBuffer }    },
            { 
binding 1resourcetextureData.s                  },
            { 
binding 2resourcetextureData.t.createView()     },
           ]
});

const 
pipeline device.createRenderPipeline({
  
layoutdevice.createPipelineLayout({bindGroupLayouts: [sceneUniformBindGroupLayout]}),
  
vertex:      {   moduleshaderModuleentryPoint'vsmain'
                   
buffers: [
                            { 
arrayStride4*3,attributes: [ {shaderLocation0offset0format'float32x3' } ] },
                            { 
arrayStride4*2,attributes: [ {shaderLocation1offset0format'float32x2' } ] }
                            ]
               },
  
fragment:    {   moduleshaderModuleentryPoint'psmain',
                   
targets: [ { formatpresentationFormat } ]
               }, 
  
primitive:   {   topology'triangle-strip' },
});

function 
draw() 
{
  const 
commandEncoder device.createCommandEncoder();
  const 
renderPassDescriptor =                                       { // GPURenderPassDescriptor 
             
colorAttachments: [ { view       context.getCurrentTexture().createView(),
                                   
loadOp     "clear"
                                   
clearValue: [0.80.80.81], // clear screen to color/rgba
                                   
storeOp   'store' } ]           };
  const 
passEncoder commandEncoder.beginRenderPass(renderPassDescriptor);
  
passEncoder.setViewport(0.0,  0.0,                   // x, y
                          
canvas.widthcanvas.height// width, height
                          
0);                      // minDepth, maxDepth                  
  
passEncoder.setPipeline(pipeline);
  
passEncoder.setVertexBuffer(0squareBuffer.v);
  
passEncoder.setVertexBuffer(1squareBuffer.t);
  
passEncoder.setBindGroup(0sceneUniformBindGroup);
  
passEncoder.draw(4100);
  
passEncoder.end();
  
device.queue.submit([commandEncoder.finish()]);
  
//requestAnimationFrame(frame);
}
draw();
console.log('done...');





Resources and Link


• WebGPU Lab Example [LINK]


























WebGPU by Example: Fractals, Image Effects, Ray-Tracing, Procedural Geometry, 2D/3D, Particles, Simulations WebGPU Compute graphics and animations using the webgpu api 12 week course kenwright learn webgpu api kenwright programming compute and graphics applications with html5 and webgpu api kenwright real-time 3d graphics with webgpu kenwright webgpu api develompent a quick start guide kenwright webgpu by example 2022 kenwright webgpu gems kenwright webgpu interactive compute and graphics visualization cookbook kenwright wgsl webgpu shading language cookbook kenwright wgsl webgpugems shading language cookbook kenwright



 
Advert (Support Website)

 
 Visitor:
Copyright (c) 2002-2025 xbdev.net - All rights reserved.
Designated articles, tutorials and software are the property of their respective owners.