www.xbdev.net
xbdev - software development
Thursday February 19, 2026
Home | Contact | Support | WebGPU Graphics and Compute ... | WebGPU.. Games, Tutorials, Demos, Projects, and Code.....
     
 

WebGPU..

Games, Tutorials, Demos, Projects, and Code.....

 


Texture Mapping (Loading Textures)


While in the last tutorial we added a texture - this texture was procedural - we didn't load it from a file. We created a WebGPU texture buffer - and filled it with a test pattern. Now, we take it a bit further by showing you how to load a texture from a URL.



Loaded texture mapped to the surface of a square.
Loaded texture mapped to the surface of a square.


Functions Used: setVertexBuffer(), setIndexBuffer(), drawIndexed(), createBuffer(), getMappedRange(), getContext(), requestAdapter(), getPreferredCanvasFormat(), createCommandEncoder(), beginRenderPass(), setPipeline(), draw(), end(), submit(), getCurrentTexture(), createView(), createShaderModule()

The first thing you have to be aware of, is that you're downloading your images over an internet connection - and your internet connection will have a delay - so be careful of delays. Especially for large file - they won't just download instantly!

In the
loadTexture
function - we use the async/wait so that we can be sure the image has fully downloaded before we try and copy it to the WebGPU texture buffer.

A nifty little trick to check that the image is good to go, is
img.decode()
- which will only be finished when the image has been loaded and decoded - so if this has finished the image is safe to use.


async function loadTexturefileName "https://webgpulab.xbdev.net/var/images/earth.jpg" )
{
  
// Load image 
  
const img document.createElement("img");
  
img.src fileName;

  
await Promise.all([
    
img.decode()
  ]);

  
let imgWidth  img.width;
  
let imgHeight img.height;

  const 
imageCanvas document.createElement('canvas');
  
imageCanvas.width =  imgWidth;
  
imageCanvas.height imgHeight;
  const 
imageCanvasContext imageCanvas.getContext('2d');
  
imageCanvasContext.drawImage(img00imgWidthimgHeight);
  const 
imageData imageCanvasContext.getImageData(00imgWidthimgHeight);
  
let textureDataimageData.data;
  
console.log('textureData.byteLength:'textureData.byteLength );

  
// Create a texture and a sampler using WebGPU
  
const sampler device.createSampler({
    
minFilter"linear",
    
magFilter"linear"  
  
});

  const 
texture device.createTexture({
    
size: [imgWidthimgHeight1],
    
format"rgba8unorm",
    
usageGPUTextureUsage.COPY_DST GPUTextureUsage.TEXTURE_BINDING
  
});

  
device.queue.writeTexture(
      { 
texture },
      
textureData,
      { 
bytesPerRowimgWidth },
      [ 
imgWidthimgHeight]
  );
  return { 
w:imgWidthh:imgHeights:samplert:texture };
}
// end loadTexture(..)


The rest of the code is pretty similar to the previous tutorial.


// Load matrix library on dynamically (on-the-fly)
let matprom await fetch'https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.6.0/gl-matrix-min.js' );
let mattex  await matprom.text();
var 
script   document.createElement('script');
script.type  'text/javascript';
script.innerHTML mattex;
document.head.appendChild(script); 

// -------------
let canvas document.createElement('canvas');
document.body.appendChildcanvas ); canvas.height=canvas.width=512;

const 
context canvas.getContext('webgpu');
const 
adapter await navigator.gpu.requestAdapter();
const 
device  await adapter.requestDevice();
const 
presentationFormat navigator.gpu.getPreferredCanvasFormat(); 
context.configure({ devicedeviceformatpresentationFormat  });

async function loadTexturefileName "https://webgpulab.xbdev.net/var/images/test512.png" )
{
  
console.log('loading image:'fileName );
  
// Load image 
  
const img document.createElement("img");
  
img.src fileName;

  
await Promise.all([
    
img.decode()
  ]);

  
let imgWidth  img.width;
  
let imgHeight img.height;

  const 
imageCanvas document.createElement('canvas');
  
imageCanvas.width =  imgWidth;
  
imageCanvas.height imgHeight;
  const 
imageCanvasContext imageCanvas.getContext('2d');
  
imageCanvasContext.drawImage(img00imgWidthimgHeight);
  const 
imageData imageCanvasContext.getImageData(00imgWidthimgHeight);
  
let textureDataimageData.data;
  
console.log('textureData.byteLength:'textureData.byteLength );

  
// Create a texture and a sampler using WebGPU
  
const sampler device.createSampler({
    
minFilter"linear",
    
magFilter"linear"  
  
});

  const 
basicTexture device.createTexture({
    
size: [imgWidthimgHeight1],
    
format"rgba8unorm",
    
usageGPUTextureUsage.COPY_DST GPUTextureUsage.TEXTURE_BINDING
  
});

  
await
  device
.queue.writeTexture(
      { 
texture:basicTexture },
      
textureData,
      { 
bytesPerRowimgWidth },
      [ 
imgWidthimgHeight]
  );
  return { 
w:imgWidthh:imgHeights:samplert:basicTexture };
}
// end loadTexture(..)

function createTexturedSquaredevice )
{
  
let positionVertex = new Float32Array([
     
0.5,    0.5,   0.0,
    -
0.5,    0.5,   0.0,
     
0.5,   -0.5,   0.0,
    -
0.5,   -0.5,   0.0
  
]);
  const 
vBuffer device.createBuffer({ size:  positionVertex.byteLength,
                                        
usageGPUBufferUsage.VERTEX GPUBufferUsage.COPY_DST });
  
device.queue.writeBuffer(vBuffer0positionVertex);
  
  
let uvVertex = new Float32Array([
     
1.0,   0.0,
     
0.0,   0.0,
     
1.0,   1.0,
     
0.0,   1.0,
  ]);
  const 
uvBuffer device.createBuffer({ size:  uvVertex.byteLength,
                                        
usageGPUBufferUsage.VERTEX GPUBufferUsage.COPY_DST });
  
device.queue.writeBuffer(uvBuffer0uvVertex);
  
  
// return the vertex and texture buffers
  
return { v:vBuffert:uvBuffer };
}

function 
createMatrixUniform( )
{
  
// Create the matrix in Javascript (using matrix library)
  
const projectionMatrix     mat4.create();
  const 
viewMatrix           mat4.create();
  const 
viewProjectionMatrix mat4.create();
  
  
mat4.perspective(projectionMatrixMath.PI 2canvas.width canvas.height0.001500.0)
  
mat4.lookAt(viewMatrix, [001.0],  [000], [010]);
  
mat4.multiply(viewProjectionMatrixprojectionMatrixviewMatrix);
  
  
// Create a buffer using WebGPU API (copy matrix into it)
  
const matrixUniformBuffer device.createBuffer({
     
sizeviewProjectionMatrix.byteLength ,
     
usageGPUBufferUsage.UNIFORM GPUBufferUsage.COPY_DST
  
});
  
device.queue.writeBuffer(matrixUniformBuffer0viewProjectionMatrix );

  return 
matrixUniformBuffer;
}

let shaderWGSL = `
@group(0) @binding(0) var<uniform> viewProjectionmMatrix : mat4x4<f32>;

struct vsout {
    @builtin(position) Position: vec4<f32>,
    @location(0)       uvs     : vec2<f32>
};

@vertex 
fn vsmain(@location(0) pos : vec3<f32>,
          @location(1) uvs : vec2<f32>) -> vsout

    var r:vsout;
    r.Position = viewProjectionmMatrix * vec4<f32>(pos, 1.0);
    r.uvs      = uvs;
    return r;
}

@group(0) @binding(1) var mySampler: sampler;
@group(0) @binding(2) var myTexture: texture_2d<f32>;

@fragment 
fn psmain(@location(0) uvs: vec2<f32>) -> @location(0) vec4<f32> 
{
    return textureSample(myTexture, mySampler, uvs );
    //return vec4<f32>(1.0, 0.0, 0.5, 1.0);
}
`;

const 
textureData         await loadTexture( );
const 
squareBuffer        createTexturedSquaredevice );
const 
matrixUniformBuffer createMatrixUniform();
const 
shaderModule        device.createShaderModule({ code shaderWGSL });

// Define the layout information for the shader (uniforms)
const sceneUniformBindGroupLayout device.createBindGroupLayout({
  
entries: [{ binding0visibilityGPUShaderStage.VERTEXbuffer: { type"uniform" }      },
            { 
binding1visibilityGPUShaderStage.FRAGMENTsampler: { type"filtering"  } },
            { 
binding2visibilityGPUShaderStage.FRAGMENTtexture: { sampleType"float"viewDimension"2d"} },
           ]
});

const 
sceneUniformBindGroup device.createBindGroup({
  
layoutsceneUniformBindGroupLayout,
  
entries: [{ binding:  0resource: { buffermatrixUniformBuffer }    },
            { 
binding 1resourcetextureData.s                  },
            { 
binding 2resourcetextureData.t.createView()     },
           ]
});

const 
pipeline device.createRenderPipeline({
  
layoutdevice.createPipelineLayout({bindGroupLayouts: [sceneUniformBindGroupLayout]}),
  
vertex:      {   moduleshaderModuleentryPoint'vsmain'
                   
buffers: [
                            { 
arrayStride4*3,attributes: [ {shaderLocation0offset0format'float32x3' } ] },
                            { 
arrayStride4*2,attributes: [ {shaderLocation1offset0format'float32x2' } ] }
                            ]
               },
  
fragment:    {   moduleshaderModuleentryPoint'psmain',
                   
targets: [ { formatpresentationFormat } ]
               }, 
  
primitive:   {   topology'triangle-strip' },
});

function 
draw() 
{
  const 
commandEncoder device.createCommandEncoder();
  const 
renderPassDescriptor =                                       { // GPURenderPassDescriptor 
             
colorAttachments: [ { view       context.getCurrentTexture().createView(),
                                   
loadOp     "clear"
                                   
clearValue: [0.80.80.81], // clear screen to color/rgba
                                   
storeOp   'store' } ]           };
  const 
passEncoder commandEncoder.beginRenderPass(renderPassDescriptor);
  
passEncoder.setViewport(0.0,  0.0,                   // x, y
                          
canvas.widthcanvas.height// width, height
                          
0);                      // minDepth, maxDepth                  
  
passEncoder.setPipeline(pipeline);
  
passEncoder.setVertexBuffer(0squareBuffer.v);
  
passEncoder.setVertexBuffer(1squareBuffer.t);
  
passEncoder.setBindGroup(0sceneUniformBindGroup);
  
passEncoder.draw(4100);
  
passEncoder.end();
  
device.queue.submit([commandEncoder.finish()]);
  
//requestAnimationFrame(frame);
}
draw();
console.log('done...');



Resources and Link


• WebGPU Lab Example [LINK]




















WebGPU by Example: Fractals, Image Effects, Ray-Tracing, Procedural Geometry, 2D/3D, Particles, Simulations WebGPU Compute graphics and animations using the webgpu api 12 week course kenwright learn webgpu api kenwright programming compute and graphics applications with html5 and webgpu api kenwright real-time 3d graphics with webgpu kenwright webgpu api develompent a quick start guide kenwright webgpu by example 2022 kenwright webgpu gems kenwright webgpu interactive compute and graphics visualization cookbook kenwright wgsl webgpu shading language cookbook kenwright wgsl webgpugems shading language cookbook kenwright



 
Advert (Support Website)

 
 Visitor:
Copyright (c) 2002-2025 xbdev.net - All rights reserved.
Designated articles, tutorials and software are the property of their respective owners.