Three.js 屏幕空间反射(Screen Space Reflections, SSR)
屏幕空间反射是一种高效的实时反射技术,它仅使用当前屏幕可见的信息来计算反射效果。
SSR 基于以下步骤:
对每个像素,沿反射方向进行射线步进
在深度缓冲区中查找交点
在颜色缓冲区中采样反射颜色
应用模糊和衰减
javascript
import * as THREE from 'three';
import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer.js';
import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass.js';
import { SSRPass } from 'three/examples/jsm/postprocessing/SSRPass.js';
import { OutputPass } from 'three/examples/jsm/postprocessing/OutputPass.js';
// import { SSAOPass } from 'three/examples/jsm/postprocessing/SSAOPass.js';
class SSRExample {
constructor() {
this.initScene();
this.initSSR();
this.animate();
}
initScene() {
// 场景
this.scene = new THREE.Scene();
// 相机
this.camera = new THREE.PerspectiveCamera(
75,
window.innerWidth / window.innerHeight,
0.1,
1000
);
this.camera.position.set(5, 5, 5);
this.camera.lookAt(0, 0, 0);
// 渲染器
this.renderer = new THREE.WebGLRenderer({ antialias: true });
this.renderer.setSize(window.innerWidth, window.innerHeight);
this.renderer.setPixelRatio(window.devicePixelRatio);
this.renderer.physicallyCorrectLights = true;
document.body.appendChild(this.renderer.domElement);
// 添加内容
this.addObjects();
this.addLights();
}
initSSR() {
// 创建后期处理合成器
this.composer = new EffectComposer(this.renderer);
// 1. 基础渲染通道
const renderPass = new RenderPass(this.scene, this.camera);
this.composer.addPass(renderPass);
// 2. SSR 通道
this.ssrPass = new SSRPass({
renderer: this.renderer,
scene: this.scene,
camera: this.camera,
width: window.innerWidth,
height: window.innerHeight,
groundReflector: null,
selects: null
});
// SSR 配置
this.ssrPass.thickness = 0.018; // 厚度检测
this.ssrPass.distanceAttenuation = 2.0; // 距离衰减
this.ssrPass.fresnel = true; // 菲涅尔效应
this.ssrPass.infiniteThick = false; // 无限厚度
// 可选:模糊反射
this.ssrPass.blur = true;
this.ssrPass.blurKernel = 2;
this.ssrPass.blurSharpness = 10;
this.composer.addPass(this.ssrPass);
// 3. 输出通道
const outputPass = new OutputPass();
this.composer.addPass(outputPass);
}
addObjects() {
// 地面(反射表面)
const groundGeometry = new THREE.PlaneGeometry(20, 20);
const groundMaterial = new THREE.MeshStandardMaterial({
color: 0x333333,
roughness: 0.1,
metalness: 0.5
});
this.ground = new THREE.Mesh(groundGeometry, groundMaterial);
this.ground.rotation.x = -Math.PI / 2;
this.ground.receiveShadow = true;
this.scene.add(this.ground);
// 反射物体
const boxGeometry = new THREE.BoxGeometry(1, 1, 1);
const boxMaterial = new THREE.MeshStandardMaterial({
color: 0xff0000,
roughness: 0.0,
metalness: 1.0
});
this.box = new THREE.Mesh(boxGeometry, boxMaterial);
this.box.position.set(0, 0.5, 0);
this.box.castShadow = true;
this.scene.add(this.box);
// 更多物体
const sphereGeometry = new THREE.SphereGeometry(0.5, 32, 32);
const sphereMaterial = new THREE.MeshStandardMaterial({
color: 0x00ff00,
roughness: 0.1,
metalness: 0.9
});
this.sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
this.sphere.position.set(2, 0.5, 2);
this.spene.add(this.sphere);
// 柱体
const cylinderGeometry = new THREE.CylinderGeometry(0.3, 0.3, 2, 16);
const cylinderMaterial = new THREE.MeshStandardMaterial({
color: 0x0000ff,
roughness: 0.05,
metalness: 0.95
});
this.cylinder = new THREE.Mesh(cylinderGeometry, cylinderMaterial);
this.cylinder.position.set(-2, 1, -2);
this.scene.add(this.cylinder);
}
addLights() {
// 环境光
const ambientLight = new THREE.AmbientLight(0xffffff, 0.5);
this.scene.add(ambientLight);
// 定向光
const directionalLight = new THREE.DirectionalLight(0xffffff, 1);
directionalLight.position.set(5, 10, 7);
directionalLight.castShadow = true;
this.scene.add(directionalLight);
// 点光源
const pointLight = new THREE.PointLight(0xffff00, 1, 100);
pointLight.position.set(0, 5, 0);
this.scene.add(pointLight);
}
animate() {
requestAnimationFrame(() => this.animate());
// 动画
this.box.rotation.y += 0.01;
this.sphere.rotation.x += 0.01;
// 使用后期处理渲染
this.composer.render();
}
}
对于更精细的控制,可以实现自定义 SSR:
javascript
class CustomSSR {
constructor(renderer, scene, camera) {
this.renderer = renderer;
this.scene = scene;
this.camera = camera;
this.initBuffers();
this.initSSRShader();
}
initBuffers() {
// 创建渲染目标
const size = new THREE.Vector2();
this.renderer.getSize(size);
// 颜色缓冲区
this.colorTarget = new THREE.WebGLRenderTarget(
size.width, size.height,
{
minFilter: THREE.LinearFilter,
magFilter: THREE.LinearFilter,
format: THREE.RGBAFormat,
type: THREE.FloatType
}
);
// 深度缓冲区
this.depthTarget = new THREE.WebGLRenderTarget(
size.width, size.height,
{
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.DepthFormat,
type: THREE.UnsignedShortType
}
);
// 法线缓冲区
this.normalTarget = new THREE.WebGLRenderTarget(
size.width, size.height,
{
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
type: THREE.FloatType
}
);
}
initSSRShader() {
this.ssrMaterial = new THREE.ShaderMaterial({
uniforms: {
tColor: { value: this.colorTarget.texture },
tDepth: { value: this.depthTarget.texture },
tNormal: { value: this.normalTarget.texture },
cameraProjectionMatrix: { value: this.camera.projectionMatrix },
cameraProjectionMatrixInverse: { value: new THREE.Matrix4().copy(this.camera.projectionMatrix).invert() },
cameraWorldMatrix: { value: this.camera.matrixWorld },
resolution: { value: new THREE.Vector2(window.innerWidth, window.innerHeight) },
// SSR 参数
maxDistance: { value: 20.0 },
thickness: { value: 0.01 },
distanceAttenuation: { value: 2.0 },
fadeDistance: { value: 10.0 },
// 步进参数
maxSteps: { value: 20 },
refineSteps: { value: 5 },
stepSize: { value: 0.1 },
// 模糊参数
blurAmount: { value: 2.0 },
blurSharpness: { value: 10.0 }
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`,
fragmentShader: `
uniform sampler2D tColor;
uniform sampler2D tDepth;
uniform sampler2D tNormal;
uniform mat4 cameraProjectionMatrix;
uniform mat4 cameraProjectionMatrixInverse;
uniform mat4 cameraWorldMatrix;
uniform vec2 resolution;
uniform float maxDistance;
uniform float thickness;
uniform float distanceAttenuation;
uniform float fadeDistance;
uniform int maxSteps;
uniform int refineSteps;
uniform float stepSize;
uniform float blurAmount;
uniform float blurSharpness;
varying vec2 vUv;
// 从深度重建位置
vec3 getPosition(vec2 uv, float depth) {
vec4 clipSpace = vec4(uv * 2.0 - 1.0, depth * 2.0 - 1.0, 1.0);
vec4 viewSpace = cameraProjectionMatrixInverse * clipSpace;
viewSpace.xyz /= viewSpace.w;
vec4 worldSpace = cameraWorldMatrix * viewSpace;
return worldSpace.xyz;
}
// 屏幕空间射线步进
vec4 traceReflection(vec3 position, vec3 normal, vec3 viewDir) {
// 计算反射方向
vec3 reflectDir = reflect(viewDir, normal);
// 转换为屏幕空间
vec3 rayStart = position;
vec3 rayEnd = rayStart + reflectDir * maxDistance;
// 转换为齐次坐标
vec4 rayStartClip = cameraProjectionMatrix * vec4(rayStart, 1.0);
vec4 rayEndClip = cameraProjectionMatrix * vec4(rayEnd, 1.0);
rayStartClip.xyz /= rayStartClip.w;
rayEndClip.xyz /= rayEndClip.w;
// 转换为屏幕坐标
vec2 rayStartScreen = rayStartClip.xy * 0.5 + 0.5;
vec2 rayEndScreen = rayEndClip.xy * 0.5 + 0.5;
vec2 rayDirScreen = rayEndScreen - rayStartScreen;
float rayLengthScreen = length(rayDirScreen);
if (rayLengthScreen < 0.001) return vec4(0.0);
vec2 rayStepScreen = rayDirScreen / rayLengthScreen * stepSize;
float rayStepLength = length(rayStepScreen);
vec2 currentUV = rayStartScreen;
float currentDepth = rayStartClip.z;
for (int i = 0; i < 20; i++) {
if (i >= maxSteps) break;
currentUV += rayStepScreen;
// 边界检查
if (currentUV.x < 0.0 || currentUV.x > 1.0 ||
currentUV.y < 0.0 || currentUV.y > 1.0) {
break;
}
// 采样深度
float sceneDepth = texture2D(tDepth, currentUV).r;
vec3 scenePos = getPosition(currentUV, sceneDepth);
// 计算当前射线位置
float rayDistance = float(i) * rayStepLength;
vec3 rayPos = rayStart + reflectDir * rayDistance;
// 深度比较
float depthDiff = scenePos.z - rayPos.z;
if (depthDiff > 0.0 && depthDiff < thickness) {
// 找到交点,进行精细化
for (int j = 0; j < 5; j++) {
if (j >= refineSteps) break;
// 二分法精细化
rayStepScreen *= 0.5;
currentUV -= rayStepScreen;
sceneDepth = texture2D(tDepth, currentUV).r;
scenePos = getPosition(currentUV, sceneDepth);
rayDistance = float(i) * rayStepLength - float(j) * rayStepLength * 0.5;
rayPos = rayStart + reflectDir * rayDistance;
depthDiff = scenePos.z - rayPos.z;
if (depthDiff < 0.0) {
currentUV += rayStepScreen;
}
}
// 计算衰减
float distance = length(scenePos - position);
float attenuation = 1.0 - smoothstep(0.0, fadeDistance, distance);
attenuation *= pow(1.0 - distance / maxDistance, distanceAttenuation);
// 采样颜色
vec4 reflectionColor = texture2D(tColor, currentUV);
return vec4(reflectionColor.rgb, attenuation);
}
}
return vec4(0.0);
}
void main() {
// 采样G-Buffer
vec4 color = texture2D(tColor, vUv);
float depth = texture2D(tDepth, vUv).r;
vec4 normalData = texture2D(tNormal, vUv);
// 重建位置和法线
vec3 position = getPosition(vUv, depth);
vec3 normal = normalize(normalData.xyz * 2.0 - 1.0);
// 计算视线方向
vec3 viewDir = normalize(position - cameraWorldMatrix[3].xyz);
// 追踪反射
vec4 reflection = traceReflection(position, normal, viewDir);
// 混合反射
float fresnel = pow(1.0 - max(dot(normal, -viewDir), 0.0), 5.0);
float reflectionStrength = normalData.a * fresnel;
vec3 finalColor = mix(color.rgb, reflection.rgb, reflection.a * reflectionStrength);
gl_FragColor = vec4(finalColor, 1.0);
}
`
});
this.ssrQuad = new THREE.Mesh(
new THREE.PlaneGeometry(2, 2),
this.ssrMaterial
);
this.ssrScene = new THREE.Scene();
this.ssrScene.add(this.ssrQuad);
}
render() {
// 第一步:渲染G-Buffer
this.renderer.setRenderTarget(this.colorTarget);
this.renderer.render(this.scene, this.camera);
// 第二步:应用SSR
this.renderer.setRenderTarget(null);
this.renderer.render(this.ssrScene, new THREE.Camera());
}
}
javascript
class OptimizedSSR {
constructor() {
this.initAdaptiveSSR();
}
initAdaptiveSSR() {
// 自适应分辨率
this.downsampleFactor = 1; // 1 = 全分辨率,2 = 一半分辨率
this.adaptiveQuality = true;
// 创建降采样渲染目标
this.createDownsampleTargets();
// 可变步长
this.adaptiveStepSize = true;
this.minStepSize = 0.05;
this.maxStepSize = 0.2;
// 自适应最大步数
this.adaptiveMaxSteps = true;
this.baseMaxSteps = 30;
// 性能监控
this.frameTimes = [];
this.targetFPS = 60;
}
createDownsampleTargets() {
const width = window.innerWidth / this.downsampleFactor;
const height = window.innerHeight / this.downsampleFactor;
this.halfResTarget = new THREE.WebGLRenderTarget(width, height, {
minFilter: THREE.LinearFilter,
magFilter: THREE.LinearFilter,
format: THREE.RGBAFormat
});
// 降采样材质
this.downsampleMaterial = new THREE.ShaderMaterial({
uniforms: {
tDiffuse: { value: null },
resolution: { value: new THREE.Vector2(window.innerWidth, window.innerHeight) }
},
vertexShader: `...`, // 简单的顶点着色器
fragmentShader: `
uniform sampler2D tDiffuse;
uniform vec2 resolution;
varying vec2 vUv;
void main() {
vec2 pixelSize = 1.0 / resolution;
vec4 color = vec4(0.0);
// 4x4 盒式滤波
for(int x = -2; x <= 2; x++) {
for(int y = -2; y <= 2; y++) {
color += texture2D(tDiffuse, vUv + vec2(x, y) * pixelSize);
}
}
color /= 25.0;
gl_FragColor = color;
}
`
});
}
updateQualitySettings(fps) {
// 根据帧率调整质量
if (fps < 50 && this.downsampleFactor < 2) {
this.downsampleFactor = 2;
this.createDownsampleTargets();
console.log('SSR: 降低到半分辨率');
} else if (fps > 58 && this.downsampleFactor > 1) {
this.downsampleFactor = 1;
this.createDownsampleTargets();
console.log('SSR: 恢复到全分辨率');
}
// 调整步进参数
if (this.adaptiveStepSize) {
const stepScale = Math.min(fps / this.targetFPS, 1.0);
const stepSize = this.minStepSize + (this.maxStepSize - this.minStepSize) * stepScale;
this.ssrPass.thickness = stepSize;
}
if (this.adaptiveMaxSteps) {
const maxSteps = Math.floor(this.baseMaxSteps * (fps / this.targetFPS));
// 更新着色器uniform
}
}
// 视锥体优化
frustumCullSSR() {
const frustum = new THREE.Frustum();
const projScreenMatrix = new THREE.Matrix4();
return {
update: function(camera) {
camera.updateMatrixWorld();
projScreenMatrix.multiplyMatrices(
camera.projectionMatrix,
camera.matrixWorldInverse
);
frustum.setFromProjectionMatrix(projScreenMatrix);
return frustum;
},
isInFrustum: function(object) {
const box = new THREE.Box3().setFromObject(object);
return frustum.intersectsBox(box);
}
};
}
// 分层SSR(LOD)
setupLayeredSSR() {
// 近处:高质量反射
this.nearLayer = {
maxDistance: 5.0,
resolution: 1.0,
maxSteps: 40,
blurSize: 1.0
};
// 中间:中等质量
this.midLayer = {
maxDistance: 15.0,
resolution: 0.5,
maxSteps: 20,
blurSize: 2.0
};
// 远处:低质量
this.farLayer = {
maxDistance: 50.0,
resolution: 0.25,
maxSteps: 10,
blurSize: 4.0
};
}
}
javascript
class SSRProblemSolver {
constructor() {
this.solutions = {
// 1. 边缘瑕疵
edgeArtifacts: {
depthBias: 0.001, // 深度偏移
thickness: 0.02, // 增加厚度
edgeFade: 0.1, // 边缘淡出
screenEdgeFade: true // 屏幕边缘淡出
},
// 2. 性能问题
performance: {
downsampling: 1, // 降采样
maxRayDistance: 50, // 最大射线距离
adaptiveSteps: true, // 自适应步数
distanceCulling: true // 距离剔除
},
// 3. 缺失反射
missingReflections: {
fallbackCubemap: null, // 备用立方体贴图
maxSteps: 100, // 增加步数
binarySearch: true, // 启用二分查找
jitter: 0.01 // 添加抖动
},
// 4. 噪点问题
noise: {
temporalAccumulation: true, // 时间累积
blurPasses: 2, // 模糊次数
bilateralFilter: true, // 双边滤波
denoiseStrength: 0.5 // 降噪强度
}
};
this.initTemporalAA();
}
initTemporalAA() {
// 时间抗锯齿(TAA)减少SSR闪烁
this.historyBuffer = null;
this.frameCount = 0;
this.taaMaterial = new THREE.ShaderMaterial({
uniforms: {
tCurrent: { value: null },
tPrevious: { value: null },
resolution: { value: new THREE.Vector2() },
jitterOffset: { value: new THREE.Vector2() }
},
vertexShader: `...`,
fragmentShader: `
uniform sampler2D tCurrent;
uniform sampler2D tPrevious;
uniform vec2 resolution;
uniform vec2 jitterOffset;
varying vec2 vUv;
void main() {
// 当前帧
vec2 jitteredUV = vUv + jitterOffset / resolution;
vec4 current = texture2D(tCurrent, jitteredUV);
// 上一帧
vec4 previous = texture2D(tPrevious, vUv);
// 混合(95%当前 + 5%历史)
vec4 result = mix(previous, current, 0.05);
// 防止颜色泄漏
vec3 minColor = min(current.rgb, previous.rgb);
vec3 maxColor = max(current.rgb, previous.rgb);
result.rgb = clamp(result.rgb, minColor, maxColor);
gl_FragColor = result;
}
`
});
}
// 修复边缘瑕疵的着色器优化
createEdgeFixShader() {
return `
// 屏幕空间边缘检测
float getScreenEdgeFactor(vec2 uv) {
vec2 edge = abs(uv - 0.5) * 2.0;
edge = smoothstep(0.8, 1.0, edge);
return 1.0 - max(edge.x, edge.y);
}
// 深度边缘检测
float getDepthEdgeFactor(sampler2D depthTex, vec2 uv, vec2 pixelSize) {
float depthCenter = texture2D(depthTex, uv).r;
float depthDiff = 0.0;
for(int x = -1; x <= 1; x++) {
for(int y = -1; y <= 1; y++) {
if(x == 0 && y == 0) continue;
float depth = texture2D(depthTex, uv + vec2(x, y) * pixelSize).r;
depthDiff += abs(depth - depthCenter);
}
}
depthDiff /= 8.0;
return 1.0 - smoothstep(0.001, 0.01, depthDiff);
}
// 法线边缘检测
float getNormalEdgeFactor(sampler2D normalTex, vec2 uv, vec2 pixelSize) {
vec3 normalCenter = texture2D(normalTex, uv).rgb * 2.0 - 1.0;
float normalDiff = 0.0;
for(int x = -1; x <= 1; x++) {
for(int y = -1; y <= 1; y++) {
if(x == 0 && y == 0) continue;
vec3 normal = texture2D(normalTex, uv + vec2(x, y) * pixelSize).rgb * 2.0 - 1.0;
normalDiff += 1.0 - dot(normalCenter, normal);
}
}
normalDiff /= 8.0;
return 1.0 - smoothstep(0.1, 0.5, normalDiff);
}
`;
}
}
javascript
class CombinedEffects {
constructor() {
this.composer = new EffectComposer(renderer);
// 完整的后期处理链
this.setupPostProcessingChain();
}
setupPostProcessingChain() {
// 1. 渲染通道
const renderPass = new RenderPass(scene, camera);
this.composer.addPass(renderPass);
// 2. SSAO(屏幕空间环境光遮蔽)
const ssaoPass = new SSAOPass(scene, camera, width, height);
ssaoPass.kernelRadius = 0.1;
ssaoPass.minDistance = 0.001;
ssaoPass.maxDistance = 0.02;
this.composer.addPass(ssaoPass);
// 3. SSR
const ssrPass = new SSRPass({
renderer: renderer,
scene: scene,
camera: camera,
width: width,
height: height
});
this.composer.addPass(ssrPass);
// 4. 泛光(Bloom)
const bloomPass = new UnrealBloomPass(
new THREE.Vector2(width, height),
1.5, // 强度
0.4, // 阈值
0.85 // 平滑
);
this.composer.addPass(bloomPass);
// 5. 色调映射
const outputPass = new OutputPass();
this.composer.addPass(outputPass);
}
// 动态效果切换
toggleEffect(effectName, enabled) {
const passes = {
'ssr': this.ssrPass,
'ssao': this.ssaoPass,
'bloom': this.bloomPass
};
if (passes[effectName]) {
passes[effectName].enabled = enabled;
}
}
// 质量预设
setQualityPreset(preset) {
const presets = {
'low': {
ssrEnabled: false,
ssaoEnabled: false,
bloomEnabled: false,
resolution: 0.5
},
'medium': {
ssrEnabled: true,
ssaoEnabled: true,
bloomEnabled: false,
ssrResolution: 0.75,
ssrMaxSteps: 20
},
'high': {
ssrEnabled: true,
ssaoEnabled: true,
bloomEnabled: true,
ssrResolution: 1.0,
ssrMaxSteps: 40,
ssrBinarySearch: true
},
'ultra': {
ssrEnabled: true,
ssaoEnabled: true,
bloomEnabled: true,
ssrResolution: 1.0,
ssrMaxSteps: 60,
ssrBinarySearch: true,
ssrTemporalAA: true,
ssrDenoise: true
}
};
const config = presets[preset];
if (config) {
this.applyConfig(config);
}
}
}
SSR 优势:
实时性能好
支持动态场景
反射内容准确(限于屏幕内可见内容)
局限性:
只能反射屏幕内的内容
边缘容易产生瑕疵
需要深度/法线缓冲区
最佳实践:
结合立方体贴图作为后备
使用时间抗锯齿减少闪烁
根据性能动态调整质量
合理设置射线步进参数
性能关键:
控制最大步数
使用降采样
距离衰减
视锥体剔除
SSR 是实时渲染中非常强大的技术,正确使用可以显著提升场景的真实感。