Lighting
Add Phong shading to a textured cube using luma.gl's shader module system.
It is assumed you've set up your development environment as described in Setup.
The base shaders handle geometry and texture sampling. Lighting calculations are
delegated to shader modules which implement the Phong shading model and a
configurable material. A ShaderInputs instance wires module uniforms together
and exposes light and material parameters to JavaScript. The current example
uses a DynamicTexture for the logo image and lets the shader modules reserve
their own binding slots in the generated pipeline layout.
The built-in lighting module accepts lights: Light[] on the JavaScript side,
but packs those values into a fixed-size, trailing array of light structs inside
its uniform block for portability across WebGL2 and WebGPU. The shader reads the
active portion of that array using pointLightCount, spotLightCount, and
directionalLightCount.
The example below introduces the lighting and phongMaterial shader modules. A
ShaderInputs instance manages uniform blocks and module settings, while the
Model is supplied with both WGSL and GLSL shaders for cross-platform rendering.
The WGSL side uses @binding(auto) throughout, so the binding numbers are
generated during assembly and the application code only binds resources by name.
import {NumberArray} from '@luma.gl/core';
import type {AnimationProps} from '@luma.gl/engine';
import {
AnimationLoopTemplate,
makeAnimationLoop,
Model,
CubeGeometry,
ShaderInputs,
loadImageBitmap,
DynamicTexture
} from '@luma.gl/engine';
import {lighting, phongMaterial, ShaderModule} from '@luma.gl/shadertools';
import {Matrix4} from '@math.gl/core';
import {webgl2Adapter} from '@luma.gl/webgl';
import {webgpuAdapter} from '@luma.gl/webgpu';
const WGSL_SHADER = /* wgsl */ `
struct Uniforms {
modelMatrix : mat4x4<f32>,
mvpMatrix : mat4x4<f32>,
eyePosition : vec3<f32>,
};
@group(0) @binding(auto) var<uniform> app : Uniforms;
@group(0) @binding(auto) var uTexture : texture_2d<f32>;
@group(0) @binding(auto) var uTextureSampler : sampler;
struct VertexInputs {
@location(0) positions : vec3<f32>,
@location(1) normals : vec3<f32>,
@location(2) texCoords : vec2<f32>
};
struct FragmentInputs {
@builtin(position) Position : vec4<f32>,
@location(0) fragUV : vec2<f32>,
@location(1) fragPosition: vec3<f32>,
@location(2) fragNormal: vec3<f32>
}
@vertex
fn vertexMain(inputs: VertexInputs) -> FragmentInputs {
var outputs : FragmentInputs;
outputs.Position = app.mvpMatrix * vec4<f32>(inputs.positions, 1);
outputs.fragUV = inputs.texCoords;
outputs.fragPosition = (app.modelMatrix * vec4<f32>(inputs.positions, 1.0)).xyz;
let mat3 = mat3x3(app.modelMatrix[0].xyz, app.modelMatrix[1].xyz, app.modelMatrix[2].xyz);
outputs.fragNormal = mat3 * inputs.normals;
return outputs;
}
@fragment
fn fragmentMain(inputs: FragmentInputs) -> @location(0) vec4<f32> {
let surfaceColor = textureSample(
uTexture,
uTextureSampler,
vec2<f32>(inputs.fragUV.x, 1.0 - inputs.fragUV.y)
).rgb;
let litColor = lighting_getLightColor2(
surfaceColor,
app.eyePosition,
inputs.fragPosition,
normalize(inputs.fragNormal)
);
return vec4<f32>(litColor, 1.0);
}
`;
const VS_GLSL = /* glsl */ `
#version 300 es
in vec3 positions;
in vec3 normals;
in vec2 texCoords;
out vec3 vPosition;
out vec3 vNormal;
out vec2 vUV;
out vec3 vColor;
uniform appUniforms {
mat4 modelMatrix;
mat4 mvpMatrix;
vec3 eyePosition;
} app;
void main(void) {
vPosition = (app.modelMatrix * vec4(positions, 1.0)).xyz;
vNormal = mat3(app.modelMatrix) * normals;
vUV = texCoords;
#ifdef LIGHTING_VERTEX
vColor = lighting_getLightColor(vec3(1.0), app.eyePosition, vPosition, normalize(vNormal));
#endif
gl_Position = app.mvpMatrix * vec4(positions, 1.0);
}
`;
const FS_GLSL = /* glsl */ `
#version 300 es
precision highp float;
in vec3 vPosition;
in vec3 vNormal;
in vec2 vUV;
in vec3 vColor;
uniform sampler2D uTexture;
uniform appUniforms {
mat4 modelMatrix;
mat4 mvpMatrix;
vec3 eyePosition;
} app;
out vec4 fragColor;
void main(void) {
#ifdef LIGHTING_FRAGMENT
vec3 surfaceColor = texture(uTexture, vec2(vUV.x, 1.0 - vUV.y)).rgb;
surfaceColor = lighting_getLightColor(surfaceColor, app.eyePosition, vPosition, normalize(vNormal));
fragColor = vec4(surfaceColor, 1.0);
#endif
#ifdef LIGHTING_VERTEX
fragColor = vec4(vColor, 1.0);
#endif
}
`;
type AppUniforms = {
modelMatrix: NumberArray;
mvpMatrix: NumberArray;
eyePosition: NumberArray;
};
const app: ShaderModule<AppUniforms, AppUniforms> = {
name: 'app',
uniformTypes: {
modelMatrix: 'mat4x4<f32>',
mvpMatrix: 'mat4x4<f32>',
eyePosition: 'vec3<f32>'
}
};
const eyePosition = [0, 0, 5];
class AppAnimationLoopTemplate extends AnimationLoopTemplate {
model: Model;
shaderInputs = new ShaderInputs<{
app: typeof app.props;
lighting: typeof lighting.props;
phongMaterial: typeof phongMaterial.props;
}>({app, lighting, phongMaterial});
modelMatrix = new Matrix4();
viewMatrix = new Matrix4().lookAt({eye: eyePosition});
mvpMatrix = new Matrix4();
constructor({device}: AnimationProps) {
super();
this.shaderInputs.setProps({
app: {
eyePosition
},
lighting: {
useByteColors: false,
lights: [
{type: 'ambient', color: [1, 1, 1], intensity: 0.15},
{
type: 'spot',
color: [1, 0.47, 0.04],
position: [2, 4, 3],
direction: [-2, -4, -3],
innerConeAngle: 0.2,
outerConeAngle: 0.55
},
{
type: 'spot',
color: [0, 1, 0.04],
position: [-2, 1, 3],
direction: [2, -1, -3],
innerConeAngle: 0.2,
outerConeAngle: 0.5
},
{
type: 'spot',
color: [0.31, 0.63, 1],
position: [-3, -2, 2],
direction: [3, 2, -2],
innerConeAngle: 0.2,
outerConeAngle: 0.6
},
{
type: 'spot',
color: [1, 0.31, 0.71],
position: [3, -3, 2],
direction: [-3, 3, -2],
innerConeAngle: 0.25,
outerConeAngle: 0.7
},
{type: 'directional', color: [1, 1, 0.86], direction: [-1, -0.5, -1]}
]
},
phongMaterial: {specularColor: [1, 1, 1], useByteColors: false, shininess: 100}
});
const texture = new DynamicTexture(device, {data: loadImageBitmap('vis-logo.png')});
this.model = new Model(device, {
source: WGSL_SHADER,
vs: VS_GLSL,
fs: FS_GLSL,
shaderInputs: this.shaderInputs,
geometry: new CubeGeometry(),
instanceCount: 1,
bindings: {uTexture: texture},
parameters: {depthWriteEnabled: true, depthCompare: 'less-equal'}
});
}
onFinalize() {
this.model.destroy();
}
onRender({device, aspect, tick}) {
this.modelMatrix
.identity()
.rotateX(tick * 0.01)
.rotateY(tick * 0.013);
this.mvpMatrix
.perspective({fovy: Math.PI / 3, aspect})
.multiplyRight(this.viewMatrix)
.multiplyRight(this.modelMatrix);
this.shaderInputs.setProps({
app: {modelMatrix: this.modelMatrix, mvpMatrix: this.mvpMatrix}
});
const renderPass = device.beginRenderPass({clearColor: [0, 0, 0, 1], clearDepth: true});
this.model.draw(renderPass);
renderPass.end();
}
}
const animationLoop = makeAnimationLoop(AppAnimationLoopTemplate, {
adapters: [webgpuAdapter, webgl2Adapter]
});
animationLoop.start();
Each frame the animation loop updates the model and projection matrices, writes
them through ShaderInputs, and draws the cube. The lighting module then
computes diffuse and specular contributions so the textured cube appears shaded
under point lights, spot lights, a directional light, and ambient light.