多重渲染目标(Multiple Render Targets)是一种允许在单个渲染通道中将数据输出到多个纹理的技术。这在延迟渲染、屏幕空间效果、G-Buffer创建等高级渲染技术中非常有用。
javascript
import * as THREE from 'three';
// 1. 创建多个独立的渲染目标
const width = 1024;
const height = 1024;
const mrt = [
new THREE.WebGLRenderTarget(width, height, {
format: THREE.RGBAFormat,
type: THREE.UnsignedByteType,
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter
}),
new THREE.WebGLRenderTarget(width, height, {
format: THREE.RGBAFormat,
type: THREE.FloatType, // 需要高精度
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter
}),
new THREE.WebGLRenderTarget(width, height, {
format: THREE.RGBAFormat,
type: THREE.FloatType,
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter
})
];
提示:Three.js 中 WebGLMultipleRenderTargets 的变更背景
WebGLMultipleRenderTargets关键版本与变更细节
1. r139 版本:WebGLMultipleRenderTargets 被标记为废弃(Deprecated)
在 Three.js r139 中,官方对多渲染目标的实现进行了重大重构:
WebGLMultipleRenderTargets 的功能合并到 WebGLRenderTarget 类中,不再需要单独的 WMRT 类。WebGLRenderTarget 原生支持多纹理附着(通过 attachments 属性),统一单 / 多渲染目标的使用方式。2. 后续版本:彻底移除 WMRT 类
在 r139 之后的版本(如 r140+),WebGLMultipleRenderTargets 类被完全从源码中移除,成为历史 API。
可以使用如下代码实现版本兼容:
javascript
import * as THREE from 'three';
function createModernMRT(renderer, width, height, count) {
let mrt;
// 检查是否支持
if (THREE.WebGLMultipleRenderTargets) {
mrt = new THREE.WebGLMultipleRenderTargets(width, height, count);
// 配置每个纹理
for (let i = 0; i < count; i++) {
const texture = mrt.texture[i];
texture.format = THREE.RGBAFormat;
texture.type = THREE.FloatType;
texture.minFilter = THREE.NearestFilter;
texture.magFilter = THREE.NearestFilter;
}
} else {
// 使用WebGLRenderTarget替代
mrt = [];
for (let i = 0; i < count; i++) {
mrt.push(new THREE.WebGLRenderTarget(width, height, {
format: THREE.RGBAFormat,
type: THREE.FloatType,
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter
}));
}
}
return mrt;
}
顶点着色器:
glsl
varying vec3 vNormal;
varying vec3 vPosition;
varying vec2 vUv;
void main() {
vNormal = normalize(normalMatrix * normal);
vPosition = (modelMatrix * vec4(position, 1.0)).xyz;
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
片段着色器(输出到多个缓冲区):
glsl
varying vec3 vNormal;
varying vec3 vPosition;
varying vec2 vUv;
uniform vec3 uColor;
void main() {
// 输出到第一个缓冲区:漫反射颜色
gl_FragData[0] = vec4(uColor, 1.0);
// 输出到第二个缓冲区:世界空间法线
gl_FragData[1] = vec4(normalize(vNormal) * 0.5 + 0.5, 1.0);
// 输出到第三个缓冲区:世界空间位置
gl_FragData[2] = vec4(vPosition, 1.0);
// 输出到第四个缓冲区:自定义数据
gl_FragData[3] = vec4(vUv, 0.0, 1.0);
}
javascript
// 创建自定义材质
const mrtMaterial = new THREE.ShaderMaterial({
uniforms: {
uColor: { value: new THREE.Color(0xff0000) }
},
vertexShader: `
varying vec3 vNormal;
varying vec3 vPosition;
varying vec2 vUv;
void main() {
vNormal = normalize(normalMatrix * normal);
vPosition = (modelMatrix * vec4(position, 1.0)).xyz;
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`,
fragmentShader: `
varying vec3 vNormal;
varying vec3 vPosition;
varying vec2 vUv;
uniform vec3 uColor;
void main() {
// 输出到多个缓冲区
gl_FragData[0] = vec4(uColor, 1.0);
gl_FragData[1] = vec4(normalize(vNormal) * 0.5 + 0.5, 1.0);
gl_FragData[2] = vec4(vPosition, 1.0);
gl_FragData[3] = vec4(vUv, 0.0, 1.0);
}
`
});
// 创建物体
const geometry = new THREE.BoxGeometry(1, 1, 1);
const mesh = new THREE.Mesh(geometry, mrtMaterial);
scene.add(mesh);
javascript
// 假设 mrt 是 WebGLRenderTarget 数组
const mrt = [
new THREE.WebGLRenderTarget(width, height, options), // 颜色
new THREE.WebGLRenderTarget(width, height, options), // 法线
new THREE.WebGLRenderTarget(width, height, options), // 位置
new THREE.WebGLRenderTarget(width, height, options) // 其他数据
];
// 保存原始材质
const originalMaterials = new Map();
// 为每个目标使用不同的着色器或 uniform
const mrtMaterials = [
createColorMaterial(), // 颜色材质
createNormalMaterial(), // 法线材质
createPositionMaterial(), // 位置材质
createDataMaterial() // 数据材质
];
// 渲染到所有 MRT 目标
function renderToMRT(scene, camera) {
// 保存当前渲染目标
const currentRenderTarget = renderer.getRenderTarget();
// 遍历每个渲染目标
for (let i = 0; i < mrt.length; i++) {
// 设置当前渲染目标
renderer.setRenderTarget(mrt[i]);
// 清除缓冲区
renderer.clear(true, true, true);
// 应用对应的 MRT 材质到场景中的所有网格
applyMRTMaterialToScene(scene, mrtMaterials[i]);
// 渲染场景
renderer.render(scene, camera);
// 恢复原始材质
restoreOriginalMaterials(scene);
}
// 恢复默认渲染目标
renderer.setRenderTarget(currentRenderTarget);
}
// 辅助函数:应用 MRT 材质
function applyMRTMaterialToScene(scene, mrtMaterial) {
scene.traverse((object) => {
if (object.isMesh) {
// 保存原始材质
if (!originalMaterials.has(object)) {
originalMaterials.set(object, object.material);
}
// 应用 MRT 材质
object.material = mrtMaterial;
}
});
}
// 辅助函数:恢复原始材质
function restoreOriginalMaterials(scene) {
scene.traverse((object) => {
if (object.isMesh && originalMaterials.has(object)) {
object.material = originalMaterials.get(object);
}
});
originalMaterials.clear();
}
// 现在可以访问各个纹理
const colorTexture = mrt[0].texture;
const normalTexture = mrt[1].texture;
const positionTexture = mrt[2].texture;
const dataTexture = mrt[3].texture;
javascript
class GBufferPass {
constructor(width, height) {
this.width = width;
this.height = height;
// 创建4个独立的WebGLRenderTarget作为G-Buffer
this.gBuffer = this.createGBuffer();
// 创建延迟渲染材质
this.geometryMaterial = new THREE.ShaderMaterial({
uniforms: {
diffuseMap: { value: null },
normalMap: { value: null },
roughness: { value: 0.5 },
metalness: { value: 0.0 },
emissive: { value: new THREE.Color(0x000000) },
opacity: { value: 1.0 },
outputType: { value: 0 } // 0:diffuse, 1:normal, 2:position, 3:material
},
vertexShader: this.getVertexShader(),
fragmentShader: this.getFragmentShader()
});
}
createGBuffer() {
const gBuffer = [];
// Buffer 0: Diffuse + Alpha
gBuffer[0] = new THREE.WebGLRenderTarget(this.width, this.height, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
type: THREE.UnsignedByteType,
depthBuffer: true, // 第一个缓冲区使用深度缓冲区
stencilBuffer: false
});
// Buffer 1: Normal (RGB) + 保留(A)
gBuffer[1] = new THREE.WebGLRenderTarget(this.width, this.height, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
type: THREE.FloatType,
depthBuffer: false // 不重复创建深度缓冲区
});
// Buffer 2: Position (RGB) + 保留(A)
gBuffer[2] = new THREE.WebGLRenderTarget(this.width, this.height, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
type: THREE.FloatType,
depthBuffer: false
});
// Buffer 3: Material (R:roughness, G:metalness, B:emissive, A:特殊)
gBuffer[3] = new THREE.WebGLRenderTarget(this.width, this.height, {
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
type: THREE.UnsignedByteType,
depthBuffer: false
});
return gBuffer;
}
getVertexShader() {
return `
varying vec3 vNormal;
varying vec3 vPosition;
varying vec2 vUv;
varying vec3 vTangent;
varying vec3 vBitangent;
attribute vec3 tangent;
void main() {
vUv = uv;
vNormal = normalize(normalMatrix * normal);
vPosition = (modelMatrix * vec4(position, 1.0)).xyz;
// 计算切线空间基向量
vTangent = normalize(normalMatrix * tangent);
vBitangent = cross(vNormal, vTangent);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`;
}
getFragmentShader() {
return `
varying vec3 vNormal;
varying vec3 vPosition;
varying vec2 vUv;
varying vec3 vTangent;
varying vec3 vBitangent;
uniform sampler2D diffuseMap;
uniform sampler2D normalMap;
uniform float roughness;
uniform float metalness;
uniform vec3 emissive;
uniform float opacity;
uniform int outputType;
void main() {
if (outputType == 0) {
// Buffer 0: 漫反射颜色 + 透明度
vec4 diffuse = texture2D(diffuseMap, vUv);
gl_FragColor = vec4(diffuse.rgb, opacity * diffuse.a);
}
else if (outputType == 1) {
// Buffer 1: 世界空间法线
vec3 normal = vNormal;
// 如果有法线贴图,应用它
if (textureSize(normalMap, 0).x > 1) {
vec3 tangentNormal = texture2D(normalMap, vUv).xyz * 2.0 - 1.0;
// 构建TBN矩阵
mat3 TBN = mat3(
normalize(vTangent),
normalize(vBitangent),
normalize(vNormal)
);
normal = normalize(TBN * tangentNormal);
}
// 编码法线到[0,1]范围
gl_FragColor = vec4(normal * 0.5 + 0.5, 1.0);
}
else if (outputType == 2) {
// Buffer 2: 世界空间位置
gl_FragColor = vec4(vPosition, 1.0);
}
else if (outputType == 3) {
// Buffer 3: 材质属性
float emissiveIntensity = length(emissive);
vec3 normalizedEmissive = emissive / max(emissiveIntensity, 0.001);
gl_FragColor = vec4(
roughness, // R: 粗糙度
metalness, // G: 金属度
emissiveIntensity, // B: 自发光强度
1.0 // A: 保留
);
}
}
`;
}
render(renderer, scene, camera) {
// 保存当前渲染目标
const currentRenderTarget = renderer.getRenderTarget();
// 渲染到每个G-Buffer目标
for (let i = 0; i < this.gBuffer.length; i++) {
// 设置输出类型
this.geometryMaterial.uniforms.outputType.value = i;
// 设置渲染目标
renderer.setRenderTarget(this.gBuffer[i]);
// 对于第一个缓冲区,清除颜色和深度
if (i === 0) {
renderer.clear(true, true, true);
} else {
// 其他缓冲区只清除颜色
renderer.clear(true, false, false);
}
// 渲染场景中的所有网格
this.renderSceneWithGBufferMaterial(renderer, scene, camera);
}
// 恢复默认渲染目标
renderer.setRenderTarget(currentRenderTarget);
// 返回G-Buffer纹理数组
return this.gBuffer.map(target => target.texture);
}
renderSceneWithGBufferMaterial(renderer, scene, camera) {
const materialCache = new Map();
scene.traverse((object) => {
if (object.isMesh && object.material) {
// 保存原始材质
materialCache.set(object, object.material);
// 获取材质属性
const material = object.material;
// 设置uniforms
if (material.map) {
this.geometryMaterial.uniforms.diffuseMap.value = material.map;
} else if (material.color) {
// 如果没有贴图,创建纯色纹理
const color = material.color || new THREE.Color(0xffffff);
this.geometryMaterial.uniforms.diffuseMap.value = this.createSolidColorTexture(color);
}
if (material.normalMap) {
this.geometryMaterial.uniforms.normalMap.value = material.normalMap;
} else {
// 如果没有法线贴图,使用1x1的默认法线纹理
this.geometryMaterial.uniforms.normalMap.value = this.getDefaultNormalTexture();
}
this.geometryMaterial.uniforms.roughness.value = material.roughness !== undefined ? material.roughness : 0.5;
this.geometryMaterial.uniforms.metalness.value = material.metalness !== undefined ? material.metalness : 0.0;
this.geometryMaterial.uniforms.emissive.value = material.emissive || new THREE.Color(0x000000);
this.geometryMaterial.uniforms.opacity.value = material.opacity !== undefined ? material.opacity : 1.0;
// 应用GBuffer材质
object.material = this.geometryMaterial;
}
});
// 渲染场景
renderer.render(scene, camera);
// 恢复原始材质
materialCache.forEach((material, object) => {
object.material = material;
});
}
// 辅助函数:创建纯色纹理
createSolidColorTexture(color) {
const size = 1;
const data = new Uint8Array(size * size * 3);
for (let i = 0; i < size * size; i++) {
data[i * 3] = Math.floor(color.r * 255);
data[i * 3 + 1] = Math.floor(color.g * 255);
data[i * 3 + 2] = Math.floor(color.b * 255);
}
const texture = new THREE.DataTexture(data, size, size, THREE.RGBFormat);
texture.needsUpdate = true;
return texture;
}
// 辅助函数:获取默认法线纹理(朝上)
getDefaultNormalTexture() {
const size = 1;
const data = new Uint8Array(size * size * 3);
// 法线(0, 0, 1) 编码到 (0.5, 0.5, 1.0)
for (let i = 0; i < size * size; i++) {
data[i * 3] = 127; // 0.5 * 255
data[i * 3 + 1] = 127; // 0.5 * 255
data[i * 3 + 2] = 255; // 1.0 * 255
}
const texture = new THREE.DataTexture(data, size, size, THREE.RGBFormat);
texture.needsUpdate = true;
return texture;
}
// 获取特定的G-Buffer纹理
getTexture(index) {
return this.gBuffer[index] ? this.gBuffer[index].texture : null;
}
// 获取所有G-Buffer纹理
getTextures() {
return {
diffuse: this.gBuffer[0].texture,
normal: this.gBuffer[1].texture,
position: this.gBuffer[2].texture,
material: this.gBuffer[3].texture
};
}
// 调整大小
setSize(width, height) {
this.width = width;
this.height = height;
// 重新调整所有渲染目标的大小
this.gBuffer.forEach(target => {
target.setSize(width, height);
});
}
// 清理资源
dispose() {
this.gBuffer.forEach(target => {
target.dispose();
});
this.geometryMaterial.dispose();
}
}
// 使用示例
const width = window.innerWidth;
const height = window.innerHeight;
const gBufferPass = new GBufferPass(width, height);
// 在渲染循环中
function render() {
// 渲染到G-Buffer
const gBufferTextures = gBufferPass.render(renderer, scene, camera);
// 使用G-Buffer纹理进行延迟光照计算
// const diffuseTex = gBufferTextures[0];
// const normalTex = gBufferTextures[1];
// const positionTex = gBufferTextures[2];
// const materialTex = gBufferTextures[3];
requestAnimationFrame(render);
}
// 窗口大小改变时
window.addEventListener('resize', () => {
gBufferPass.setSize(window.innerWidth, window.innerHeight);
});
// 调试:可视化G-Buffer
function visualizeGBuffer(gBufferPass) {
const debugScene = new THREE.Scene();
const debugCamera = new THREE.OrthographicCamera(-2, 2, 2, -2, 0.1, 10);
debugCamera.position.z = 1;
const textures = gBufferPass.getTextures();
const planeSize = 0.8;
// 显示漫反射缓冲区
const diffuseMaterial = new THREE.MeshBasicMaterial({ map: textures.diffuse });
const diffusePlane = new THREE.Mesh(
new THREE.PlaneGeometry(planeSize, planeSize),
diffuseMaterial
);
diffusePlane.position.set(-1.5, 0.5, 0);
debugScene.add(diffusePlane);
// 显示法线缓冲区
const normalMaterial = new THREE.MeshBasicMaterial({ map: textures.normal });
const normalPlane = new THREE.Mesh(
new THREE.PlaneGeometry(planeSize, planeSize),
normalMaterial
);
normalPlane.position.set(-0.5, 0.5, 0);
debugScene.add(normalPlane);
// 显示位置缓冲区(需要特殊处理,因为值可能超出[0,1]范围)
const positionMaterial = new THREE.ShaderMaterial({
vertexShader: `
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`,
fragmentShader: `
uniform sampler2D tPosition;
varying vec2 vUv;
void main() {
vec4 position = texture2D(tPosition, vUv);
// 可视化位置:将位置值归一化
vec3 normalizedPos = normalize(position.xyz);
gl_FragColor = vec4(normalizedPos * 0.5 + 0.5, 1.0);
}
`,
uniforms: {
tPosition: { value: textures.position }
}
});
const positionPlane = new THREE.Mesh(
new THREE.PlaneGeometry(planeSize, planeSize),
positionMaterial
);
positionPlane.position.set(0.5, 0.5, 0);
debugScene.add(positionPlane);
// 显示材质缓冲区
const materialMaterial = new THREE.MeshBasicMaterial({ map: textures.material });
const materialPlane = new THREE.Mesh(
new THREE.PlaneGeometry(planeSize, planeSize),
materialMaterial
);
materialPlane.position.set(1.5, 0.5, 0);
debugScene.add(materialPlane);
return { scene: debugScene, camera: debugCamera };
}
javascript
class ScreenSpaceEffects {
constructor(renderer, mrtTextures) {
this.renderer = renderer;
this.textures = mrtTextures;
// 创建后处理材质
this.compositeMaterial = new THREE.ShaderMaterial({
uniforms: {
tDiffuse: { value: this.textures[0] },
tNormal: { value: this.textures[1] },
tPosition: { value: this.textures[2] },
tData: { value: this.textures[3] },
cameraPos: { value: new THREE.Vector3() },
lightPos: { value: new THREE.Vector3(5, 5, 5) }
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = vec4(position, 1.0);
}
`,
fragmentShader: `
uniform sampler2D tDiffuse;
uniform sampler2D tNormal;
uniform sampler2D tPosition;
uniform sampler2D tData;
uniform vec3 cameraPos;
uniform vec3 lightPos;
varying vec2 vUv;
void main() {
// 从G-Buffer读取数据
vec4 diffuse = texture2D(tDiffuse, vUv);
vec3 normal = texture2D(tNormal, vUv).xyz * 2.0 - 1.0;
vec3 position = texture2D(tPosition, vUv).xyz;
vec4 data = texture2D(tData, vUv);
// 简单的光照计算
vec3 lightDir = normalize(lightPos - position);
float diff = max(dot(normal, lightDir), 0.0);
// 视图方向
vec3 viewDir = normalize(cameraPos - position);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), 32.0);
// 组合结果
vec3 color = diffuse.rgb * diff + vec3(0.3) * spec;
gl_FragColor = vec4(color, diffuse.a);
}
`
});
// 创建全屏四边形
this.quad = new THREE.Mesh(
new THREE.PlaneGeometry(2, 2),
this.compositeMaterial
);
this.scene = new THREE.Scene();
this.scene.add(this.quad);
this.camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
}
render(cameraPos, lightPos) {
this.compositeMaterial.uniforms.cameraPos.value.copy(cameraPos);
this.compositeMaterial.uniforms.lightPos.value.copy(lightPos);
this.renderer.setRenderTarget(null);
this.renderer.render(this.scene, this.camera);
}
}
性能考虑:
限制MRT数量(通常4-8个)
使用适当的数据格式(RGBA32F占内存)
合理设置纹理分辨率
数据编码:
glsl
// 将法线编码为0-1范围
vec3 encodedNormal = normal * 0.5 + 0.5;
// 位置数据的编码
// 方法1:直接存储(需要高精度)
// 方法2:存储深度,在着色器中重建位置
调试MRT:
javascript
// 可视化各个缓冲区
function debugMRT(mrt) {
const textures = mrt.texture;
textures.forEach((texture, index) => {
const material = new THREE.MeshBasicMaterial({ map: texture });
const plane = new THREE.Mesh(
new THREE.PlaneGeometry(1, 1),
material
);
plane.position.set(index * 1.2 - (textures.length * 0.6), 0, 0);
scene.add(plane);
});
}
清除缓冲区:
javascript
// 清除所有缓冲区
renderer.setRenderTarget(mrt);
renderer.clear(true, true, true);
WebGL 2.0:完全支持MRT
WebGL 1.0:通过扩展WEBGL_draw_buffers支持,但Three.js的WebGLMultipleRenderTargets会自动处理
MRT是高级渲染技术的核心组件,合理使用可以显著提升渲染效果和性能。