Lighting
Add Phong shading to a textured cube using luma.gl's shader module system.
Tutorials are maintained on a best-effort basis and may not be fully up to date (contributions welcome).
Lighting
Drawing a phong-shaded cube
It is assumed you've set up your development environment as described in Setup.
The base shaders handle geometry and texture sampling. Lighting calculations are
delegated to shader modules which implement the Phong shading model and a
configurable material. A ShaderInputs instance wires module uniforms together
and exposes light and material parameters to JavaScript.
The example below introduces the lighting and phongMaterial shader modules. A
ShaderInputs instance manages uniform blocks and module settings, while the
Model is supplied with both WGSL and GLSL shaders for cross-platform rendering.
import {NumberArray} from '@luma.gl/core';
import type {AnimationProps} from '@luma.gl/engine';
import {
AnimationLoopTemplate,
Model,
CubeGeometry,
ShaderInputs,
loadImageBitmap,
AsyncTexture
} from '@luma.gl/engine';
import {lighting, phongMaterial, ShaderModule} from '@luma.gl/shadertools';
import {Matrix4} from '@math.gl/core';
import {webgl2Adapter} from '@luma.gl/webgl';
import {webgpuAdapter} from '@luma.gl/webgpu';
const WGSL_SHADER = /* wgsl */ `
struct Uniforms {
modelMatrix : mat4x4<f32>,
mvpMatrix : mat4x4<f32>,
eyePosition : vec3<f32>,
};
@binding(0) @group(0) var<uniform> app : Uniforms;
@group(0) @binding(1) var uTexture : texture_2d<f32>;
@group(0) @binding(2) var uTextureSampler : sampler;
struct VertexInputs {
@location(0) positions : vec3<f32>,
@location(1) normals : vec3<f32>,
@location(2) texCoords : vec2<f32>
};
struct FragmentInputs {
@builtin(position) Position : vec4<f32>,
@location(0) fragUV : vec2<f32>,
@location(1) fragPosition: vec3<f32>,
@location(2) fragNormal: vec3<f32>
}
@vertex
fn vertexMain(inputs: VertexInputs) -> FragmentInputs {
var outputs : FragmentInputs;
outputs.Position = app.mvpMatrix * app.modelMatrix * vec4(inputs.positions, 1.0);
outputs.fragUV = inputs.texCoords;
outputs.fragPosition = (app.modelMatrix * vec4(inputs.positions, 1.0)).xyz;
let mat3 = mat3x3(app.modelMatrix[0].xyz, app.modelMatrix[1].xyz, app.modelMatrix[2].xyz);
outputs.fragNormal = mat3 * inputs.normals;
return outputs;
}
@fragment
fn fragmentMain(inputs: FragmentInputs) -> @location(0) vec4<f32> {
return textureSample(uTexture, uTextureSampler, inputs.fragUV);
}
`;
const VS_GLSL = /* glsl */ `
#version 300 es
in vec3 positions;
in vec3 normals;
in vec2 texCoords;
out vec3 vPosition;
out vec3 vNormal;
out vec2 vUV;
uniform appUniforms {
mat4 modelMatrix;
mat4 mvpMatrix;
vec3 eyePosition;
} app;
void main(void) {
vPosition = (app.modelMatrix * vec4(positions, 1.0)).xyz;
vNormal = mat3(app.modelMatrix) * normals;
vUV = texCoords;
gl_Position = app.mvpMatrix * vec4(positions, 1.0);
}
`;
const FS_GLSL = /* glsl */ `
#version 300 es
precision highp float;
in vec3 vPosition;
in vec3 vNormal;
in vec2 vUV;
uniform sampler2D uTexture;
uniform appUniforms {
mat4 modelMatrix;
mat4 mvpMatrix;
vec3 eyePosition;
} app;
out vec4 fragColor;
void main(void) {
vec3 surfaceColor = texture(uTexture, vec2(vUV.x, 1.0 - vUV.y)).rgb;
surfaceColor = lighting_getLightColor(surfaceColor, app.eyePosition, vPosition, normalize(vNormal));
fragColor = vec4(surfaceColor, 1.0);
}
`;
type AppUniforms = {
modelMatrix: NumberArray;
mvpMatrix: NumberArray;
eyePosition: NumberArray;
};
const app: ShaderModule<AppUniforms, AppUniforms> = {
name: 'app',
uniformTypes: {
modelMatrix: 'mat4x4<f32>',
mvpMatrix: 'mat4x4<f32>',
eyePosition: 'vec3<f32>'
}
};
const eyePosition = [0, 0, 5];
class AppAnimationLoopTemplate extends AnimationLoopTemplate {
model: Model;
shaderInputs = new ShaderInputs<{
app: typeof app.props;
lighting: typeof lighting.props;
phongMaterial: typeof phongMaterial.props;
}>({app, lighting, phongMaterial});
modelMatrix = new Matrix4();
viewMatrix = new Matrix4().lookAt({eye: eyePosition});
mvpMatrix = new Matrix4();
constructor({device}: AnimationProps) {
super();
this.shaderInputs.setProps({
lighting: {
lights: [
{type: 'ambient', color: [255, 255, 255]},
{type: 'point', color: [255, 255, 255], position: [1, 2, 1]}
]
},
phongMaterial: {specularColor: [255, 255, 255], shininess: 100}
});
const texture = new AsyncTexture(device, {data: loadImageBitmap('vis-logo.png')});
this.model = new Model(device, {
source: WGSL_SHADER,
vs: VS_GLSL,
fs: FS_GLSL,
shaderInputs: this.shaderInputs,
geometry: new CubeGeometry(),
bindings: {uTexture: texture},
parameters: {depthWriteEnabled: true, depthCompare: 'less-equal'}
});
}
onFinalize() {
this.model.destroy();
}
onRender({device, aspect, tick}) {
this.modelMatrix.identity().rotateX(tick * 0.01).rotateY(tick * 0.013);
this.mvpMatrix
.perspective({fovy: Math.PI / 3, aspect})
.multiplyRight(this.viewMatrix)
.multiplyRight(this.modelMatrix);
this.shaderInputs.setProps({
app: {modelMatrix: this.modelMatrix, mvpMatrix: this.mvpMatrix, eyePosition}
});
const renderPass = device.beginRenderPass({clearColor: [0, 0, 0, 1], clearDepth: true});
this.model.draw(renderPass);
renderPass.end();
}
}
const animationLoop = makeAnimationLoop(AnimationLoopTemplate, {adapters: [webgpuAdapter, webgl2Adapter]})
animationLoop.start();
Each frame the animation loop updates the model and projection matrices, writes
them through ShaderInputs, and draws the cube. The lighting module then
computes diffuse and specular contributions so the textured cube appears shaded
under a point light and an ambient light.