gpt4 book ai didi

javascript - 使用带有静态背景图像的 threejs 的后处理效果

转载 作者:行者123 更新时间:2023-11-30 20:46:35 28 4
gpt4 key购买 nike

所以我从 Andrew Berg(可以在这里看到:https://codepen.io/abberg/pen/pbWkjg?editors=0010)找到了一个简洁的代码片段来在 threejs 中创建体积光,他利用后处理和着色器来创建体积光球。

我的问题是如何修改我的代码,以便在使用将创建体积光的 composer 渲染器的同时在后台渲染静态图像,因为目前我可以做一个或另一个,但不能同时做两个.

如果我删除了动画函数中的 render(); 调用并从 //renderer.render(backgroundScene,backgroundCamera); 中删除了评论
//renderer.render(场景,相机);

我能够看到静态背景图像,但是不会使用制作体积光的后处理效果,反之亦然,如果我离开 render(); 函数并离开那些注释掉的行我可以看到后期处理效果和光球着色器。

ma​​in.js

var scene, camera, renderer,
testcube, composer;

var backgroundMesh, backgroundScene, backgroundCamera,
backgroundComposer;

var occlusionComposer, occlusionRenderTarget,
occlusionBox, lightSphere, sphereUniforms;

var OCCLUSION_LAYER, DEFAULT_LAYER;

var incr;

function init(){

incr = 0;
OCCLUSION_LAYER = 1;
DEFAULT_LAYER = 0;

renderer = new THREE.WebGLRenderer({ antialias: false, alpha:true });
renderer.setSize(window.innerWidth,window.innerHeight);
renderer.autoClear = false;
renderer.setClearColor(0x000000);


scene = new THREE.Scene();

var sphereGeo = new THREE.SphereGeometry(5,10,10);
var sphereMat = new THREE.MeshBasicMaterial({color:0xffffff});
lightSphere = new THREE.Mesh(sphereGeo,sphereMat);
lightSphere.layers.set(OCCLUSION_LAYER);
lightSphere.position.z = 0;
lightSphere.position.y = -150;
lightSphere.position.x = -150;

scene.add(lightSphere);

var boxGeo = new THREE.BoxGeometry(50,50,50);
var boxMat = new THREE.MeshBasicMaterial({color:0xffff13});
testcube = new THREE.Mesh(boxGeo,boxMat);

scene.add(testcube);

testcube.position.z = 0;
testcube.position.y = -100;
testcube.position.x = -90;

camera = new THREE.PerspectiveCamera(50, window.innerWidth/window.innerHeight, 0.1,7000);

camera.position.z = 200;
camera.position.x = -150;
camera.position.y = -150;
scene.add(camera);

var light = new THREE.AmbientLight(0xffffff);
scene.add(light);

var manager = new THREE.LoadingManager();
var loader = new THREE.TextureLoader( manager );

backgroundScene = new THREE.Scene();
backgroundCamera = new THREE.Camera();
backgroundScene.add( backgroundCamera );

loader.load( 'img/background.png', function( texture ) {
backgroundMesh = new THREE.Mesh(
new THREE.PlaneGeometry(2, 2, 0),
new THREE.MeshBasicMaterial({
map: texture
})
);

backgroundMesh.material.depthTest = false;
backgroundMesh.material.depthWrite = false;
backgroundScene.add( backgroundMesh );
});


document.body.appendChild(renderer.domElement);


}
function preprocess(){

var pass;
occlusionRenderTarget = new THREE.WebGLRenderTarget( window.innerWidth * 0.5 , window.innerHeight * 0.5 );
occlusionComposer = new THREE.EffectComposer( renderer, occlusionRenderTarget);
occlusionComposer.addPass( new THREE.RenderPass( scene, camera ) );
pass = new THREE.ShaderPass( THREE.VolumetericLightShader );
pass.needsSwap = false;
occlusionComposer.addPass( pass );

sphereUniforms = pass.uniforms;

composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
pass = new THREE.ShaderPass( THREE.AdditiveBlendingShader );
pass.uniforms.tAdd.value = occlusionRenderTarget.texture;
composer.addPass( pass );
pass.renderToScreen = true;

}
function render(){


camera.layers.set(OCCLUSION_LAYER);
occlusionComposer.render();
//backgroundComposer.render();
camera.layers.set(DEFAULT_LAYER);
composer.render();

}
function animate(){


requestAnimationFrame(animate);
renderer.clear();

render();
//renderer.render(backgroundScene,backgroundCamera);
//renderer.render(scene,camera);
}
init();
preprocess();
animate();

shader.js

THREE.VolumetericLightShader = {
uniforms: {
tDiffuse: {value:null},
lightPosition: {value: new THREE.Vector2(0.5, 0.5)},
exposure: {value: 0.18},
decay: {value: 0.95},
density: {value: 0.8},
weight: {value: 0.62},
samples: {value: 100}
},

vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join("\n"),

fragmentShader: [
"varying vec2 vUv;",
"uniform sampler2D tDiffuse;",
"uniform vec2 lightPosition;",
"uniform float exposure;",
"uniform float decay;",
"uniform float density;",
"uniform float weight;",
"uniform int samples;",
"const int MAX_SAMPLES = 100;",
"void main()",
"{",
"vec2 texCoord = vUv;",
"vec2 deltaTextCoord = texCoord - lightPosition;",
"deltaTextCoord *= 1.0 / float(samples) * density;",
"vec4 color = texture2D(tDiffuse, texCoord);",
"float illuminationDecay = 1.0;",
"for(int i=0; i < MAX_SAMPLES; i++)",
"{",
"if(i == samples){",
"break;",
"}",
"texCoord -= deltaTextCoord;",
"vec4 sample = texture2D(tDiffuse, texCoord);",
"sample *= illuminationDecay * weight;",
"color += sample;",
"illuminationDecay *= decay;",
"}",
"gl_FragColor = color * exposure;",
"}"
].join("\n")
};

THREE.AdditiveBlendingShader = {
uniforms: {
tDiffuse: { value:null },
tAdd: { value:null }
},

vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join("\n"),

fragmentShader: [
"uniform sampler2D tDiffuse;",
"uniform sampler2D tAdd;",
"varying vec2 vUv;",
"void main() {",
"vec4 color = texture2D( tDiffuse, vUv );",
"vec4 add = texture2D( tAdd, vUv );",
"gl_FragColor = color + add;",
"}"
].join("\n")
};

THREE.PassThroughShader = {
uniforms: {
tDiffuse: { value: null }
},

vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join( "\n" ),

fragmentShader: [
"uniform sampler2D tDiffuse;",
"varying vec2 vUv;",
"void main() {",
"gl_FragColor = texture2D( tDiffuse, vec2( vUv.x, vUv.y ) );",
"}"
].join( "\n" )
};

最佳答案

首先为背景创建一个THREE.Scene,并用背景纹理编辑场景的background属性:

backgroundScene = new THREE.Scene();
backgroundCamera = new THREE.Camera();
backgroundScene.add( backgroundCamera );

var manager = new THREE.LoadingManager();
loader = new THREE.TextureLoader( manager );
loader.setCrossOrigin("");

var backTexture = loader.load(img/background.png,
function ( texture ) {
var img = texture.image;
bgWidth= img.width;
bgHeight = img.height;
}
);
backgroundScene.background = backTexture;

确保最终THREE.ShaderPassTHREE.Material(material属性)的transparent属性> 设置为 true:

pass = new THREE.ShaderPass( THREE.AdditiveBlendingShader );
pass.uniforms.tAdd.value = occlusionRenderTarget.texture;
composer.addPass( pass );
pass.renderToScreen = true;
pass.material.transparent = true;

最后,您必须在渲染 THREE.EffectComposer 之前渲染背景场景:

renderer.render(backgroundScene,backgroundCamera);

camera.layers.set(OCCLUSION_LAYER);
occlusionComposer.render();
camera.layers.set(DEFAULT_LAYER);
composer.render();


查看示例:

THREE.VolumetericLightShader = {
uniforms: {
tDiffuse: {value:null},
lightPosition: {value: new THREE.Vector2(0.5, 0.5)},
exposure: {value: 0.18},
decay: {value: 0.95},
density: {value: 0.8},
weight: {value: 0.62},
samples: {value: 100}
},

vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join("\n"),

fragmentShader: [
"varying vec2 vUv;",
"uniform sampler2D tDiffuse;",
"uniform vec2 lightPosition;",
"uniform float exposure;",
"uniform float decay;",
"uniform float density;",
"uniform float weight;",
"uniform int samples;",
"const int MAX_SAMPLES = 100;",
"void main()",
"{",
"vec2 texCoord = vUv;",
"vec2 deltaTextCoord = texCoord - lightPosition;",
"deltaTextCoord *= 1.0 / float(samples) * density;",
"vec4 color = texture2D(tDiffuse, texCoord);",
"float illuminationDecay = 1.0;",
"for(int i=0; i < MAX_SAMPLES; i++)",
"{",
"if(i == samples){",
"break;",
"}",
"texCoord -= deltaTextCoord;",
"vec4 sample = texture2D(tDiffuse, texCoord);",
"sample *= illuminationDecay * weight;",
"color += sample;",
"illuminationDecay *= decay;",
"}",
"gl_FragColor = color * exposure;",
"}"
].join("\n")
};

THREE.AdditiveBlendingShader = {
uniforms: {
tDiffuse: { value:null },
tAdd: { value:null },
},

vertexShader: [
"varying vec2 vUv;",
"void main() {",
"vUv = uv;",
"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
"}"
].join("\n"),

fragmentShader: [
"uniform sampler2D tDiffuse;",
"uniform sampler2D tAdd;",
"varying vec2 vUv;",
"void main() {",
"vec4 color = texture2D( tDiffuse, vUv );",
"vec4 add = texture2D( tAdd, vUv );",
"gl_FragColor = color + add;",
"}"
].join("\n")
};

var scene, camera, renderer, composer;
var backgroundScene, backgroundCamera;
var occlusionComposer, occlusionRenderTarget,
occlusionBox, lightSphere, sphereUniforms;

var OCCLUSION_LAYER, DEFAULT_LAYER;

function init(){

OCCLUSION_LAYER = 1;
DEFAULT_LAYER = 0;

renderer = new THREE.WebGLRenderer({ antialias: false, alpha:true });
renderer.setSize(window.innerWidth,window.innerHeight);
renderer.autoClear = false;
renderer.setClearColor(0x000000, 0);
window.onresize = resize;

scene = new THREE.Scene();

var sphereGeo = new THREE.SphereGeometry(5,10,10);
var sphereMat = new THREE.MeshBasicMaterial({color:0xffffff});
lightSphere = new THREE.Mesh(sphereGeo,sphereMat);
lightSphere.layers.set(OCCLUSION_LAYER);
lightSphere.position.z = 0;
lightSphere.position.y = -150;
lightSphere.position.x = -150;
scene.add(lightSphere);

var boxGeo = new THREE.BoxGeometry(50,50,50);
var boxMat = new THREE.MeshBasicMaterial({color:0xffff13});
var testcube = new THREE.Mesh(boxGeo,boxMat);
scene.add(testcube);
testcube.position.z = 0;
testcube.position.y = -100;
testcube.position.x = -90;

camera = new THREE.PerspectiveCamera(50, window.innerWidth/window.innerHeight, 0.1,7000);
camera.position.z = 200;
camera.position.x = -150;
camera.position.y = -150;
scene.add(camera);

var light = new THREE.AmbientLight(0xffffff);
scene.add(light);

backgroundScene = new THREE.Scene();
backgroundCamera = new THREE.Camera();
backgroundScene.add( backgroundCamera );

var manager = new THREE.LoadingManager();
loader = new THREE.TextureLoader( manager );
loader.setCrossOrigin("");

var backTexture = loader.load("https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/background.jpg",
function ( texture ) {
var img = texture.image;
bgWidth= img.width;
bgHeight = img.height;
}
);
backgroundScene.background = backTexture;

document.body.appendChild(renderer.domElement);
}

function preprocess(){

occlusionRenderTarget = new THREE.WebGLRenderTarget( window.innerWidth * 0.5 , window.innerHeight * 0.5 );
occlusionComposer = new THREE.EffectComposer( renderer, occlusionRenderTarget);
occlusionComposer.addPass( new THREE.RenderPass( scene, camera ) );
var pass = new THREE.ShaderPass( THREE.VolumetericLightShader );
pass.needsSwap = false;
occlusionComposer.addPass( pass );

composer = new THREE.EffectComposer( renderer );
composer.addPass( new THREE.RenderPass( scene, camera ) );
pass = new THREE.ShaderPass( THREE.AdditiveBlendingShader );
pass.uniforms.tAdd.value = occlusionRenderTarget.texture;
composer.addPass( pass );
pass.renderToScreen = true;
pass.material.transparent = true;
}

function render(){
renderer.clear();
renderer.render(backgroundScene,backgroundCamera);

camera.layers.set(OCCLUSION_LAYER);
occlusionComposer.render();
camera.layers.set(DEFAULT_LAYER);
composer.render();
}

function resize() {

var aspect = window.innerWidth / window.innerHeight;
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = aspect;
camera.updateProjectionMatrix();
}

function animate(){
requestAnimationFrame(animate);
render();
}

init();
preprocess();
animate();
<script src="https://threejs.org/build/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script src="https://abberg.github.io/lib/shaders/CopyShader.js"></script>
<script src="https://abberg.github.io/lib/postprocessing/EffectComposer.js"></script>
<script src="https://abberg.github.io/lib/postprocessing/RenderPass.js"></script>
<script src="https://abberg.github.io/lib/postprocessing/ShaderPass.js"></script>

关于javascript - 使用带有静态背景图像的 threejs 的后处理效果,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/48635441/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com