Browse Source

Examples: webgl_postprocessing_hbao (#27114)

Gernot Steinegger 1 year ago
parent
commit
d962a78185

+ 1 - 0
docs/examples/en/postprocessing/EffectComposer.html

@@ -39,6 +39,7 @@
 			[example:webgl_postprocessing_fxaa postprocessing fxaa]<br />
 			[example:webgl_postprocessing_glitch postprocessing glitch]<br />
 			[example:webgl_postprocessing_godrays postprocessing godrays]<br />
+			[example:webgl_postprocessing_hbao postprocessing hbao]<br />
 			[example:webgl_postprocessing_masking postprocessing masking]<br />
 			[example:webgl_postprocessing_outline postprocessing outline]<br />
 			[example:webgl_postprocessing_pixel postprocessing pixelate]<br />

+ 1 - 0
docs/examples/zh/postprocessing/EffectComposer.html

@@ -38,6 +38,7 @@
 			[example:webgl_postprocessing_fxaa postprocessing fxaa]<br />
 			[example:webgl_postprocessing_glitch postprocessing glitch]<br />
 			[example:webgl_postprocessing_godrays postprocessing godrays]<br />
+			[example:webgl_postprocessing_hbao postprocessing hbao]<br />
 			[example:webgl_postprocessing_masking postprocessing masking]<br />
 			[example:webgl_postprocessing_outline postprocessing outline]<br />
 			[example:webgl_postprocessing_pixel postprocessing pixelate]<br />

+ 1 - 0
examples/files.json

@@ -245,6 +245,7 @@
 		"webgl_postprocessing_fxaa",
 		"webgl_postprocessing_glitch",
 		"webgl_postprocessing_godrays",
+		"webgl_postprocessing_hbao",
 		"webgl_postprocessing_rgb_halftone",
 		"webgl_postprocessing_masking",
 		"webgl_postprocessing_ssaa",

+ 501 - 0
examples/jsm/postprocessing/HBAOPass.js

@@ -0,0 +1,501 @@
+import {
+	AddEquation,
+	Color,
+	CustomBlending,
+	DataTexture,
+	DepthStencilFormat,
+	DepthTexture,
+	DstAlphaFactor,
+	DstColorFactor,
+	HalfFloatType,
+	MeshNormalMaterial,
+	NearestFilter,
+	NoBlending,
+	RepeatWrapping,
+	RGBAFormat,
+	ShaderMaterial,
+	UniformsUtils,
+	UnsignedByteType,
+	UnsignedInt248Type,
+	WebGLRenderTarget,
+	ZeroFactor
+} from 'three';
+import { Pass, FullScreenQuad } from './Pass.js';
+import { generateHaboSampleKernelInitializer, HBAOShader, HBAODepthShader } from '../shaders/HBAOShader.js';
+import { generatePdSamplePointInitializer, PoissonDenoiseShader } from '../shaders/PoissonDenoiseShader.js';
+import { CopyShader } from '../shaders/CopyShader.js';
+import { SimplexNoise } from '../math/SimplexNoise.js';
+
+class HBAOPass extends Pass {
+
+	constructor( scene, camera, width, height, parameters ) {
+
+		super();
+
+		this.width = ( width !== undefined ) ? width : 512;
+		this.height = ( height !== undefined ) ? height : 512;
+		this.clear = true;
+		this.camera = camera;
+		this.scene = scene;
+		this.output = 0;
+		this._renderGBuffer = true;
+		this._visibilityCache = new Map();
+
+		this.rings = 4;
+		this.samples = 16;
+
+		this.noiseTexture = this.generateNoise();
+
+		this.hbaoRenderTarget = new WebGLRenderTarget( this.width, this.height, { type: HalfFloatType } );
+		this.pdRenderTarget = this.hbaoRenderTarget.clone();
+
+		this.hbaoMaterial = new ShaderMaterial( {
+			defines: Object.assign( {}, HBAOShader.defines ),
+			uniforms: UniformsUtils.clone( HBAOShader.uniforms ),
+			vertexShader: HBAOShader.vertexShader,
+			fragmentShader: HBAOShader.fragmentShader,
+			blending: NoBlending,
+			depthTest: false,
+			depthWrite: false,
+		} );
+		this.hbaoMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
+		this.hbaoMaterial.uniforms[ 'tNoise' ].value = this.noiseTexture;
+		this.hbaoMaterial.uniforms[ 'resolution' ].value.set( this.width, this.height );
+		this.hbaoMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
+		this.hbaoMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
+		this.hbaoMaterial.uniforms[ 'radius' ].value = 2;
+		this.hbaoMaterial.uniforms[ 'distanceExponent' ].value = 2;
+		this.hbaoMaterial.uniforms[ 'bias' ].value = 0.01;
+
+		this.normalMaterial = new MeshNormalMaterial();
+		this.normalMaterial.blending = NoBlending;
+
+		this.pdMaterial = new ShaderMaterial( {
+			defines: Object.assign( {}, PoissonDenoiseShader.defines ),
+			uniforms: UniformsUtils.clone( PoissonDenoiseShader.uniforms ),
+			vertexShader: PoissonDenoiseShader.vertexShader,
+			fragmentShader: PoissonDenoiseShader.fragmentShader,
+			depthTest: false,
+			depthWrite: false,
+		} );
+		this.pdMaterial.uniforms[ 'tDiffuse' ].value = this.hbaoRenderTarget.texture;
+		this.pdMaterial.uniforms[ 'tNoise' ].value = this.noiseTexture;
+		this.pdMaterial.uniforms[ 'resolution' ].value.set( this.width, this.height );
+		this.pdMaterial.uniforms[ 'lumaPhi' ].value = 10;
+		this.pdMaterial.uniforms[ 'depthPhi' ].value = 2;
+		this.pdMaterial.uniforms[ 'normalPhi' ].value = 3;
+
+		this.depthRenderMaterial = new ShaderMaterial( {
+			defines: Object.assign( {}, HBAODepthShader.defines ),
+			uniforms: UniformsUtils.clone( HBAODepthShader.uniforms ),
+			vertexShader: HBAODepthShader.vertexShader,
+			fragmentShader: HBAODepthShader.fragmentShader,
+			blending: NoBlending
+		} );
+		this.depthRenderMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
+		this.depthRenderMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
+
+		this.copyMaterial = new ShaderMaterial( {
+			uniforms: UniformsUtils.clone( CopyShader.uniforms ),
+			vertexShader: CopyShader.vertexShader,
+			fragmentShader: CopyShader.fragmentShader,
+			transparent: true,
+			depthTest: false,
+			depthWrite: false,
+			blendSrc: DstColorFactor,
+			blendDst: ZeroFactor,
+			blendEquation: AddEquation,
+			blendSrcAlpha: DstAlphaFactor,
+			blendDstAlpha: ZeroFactor,
+			blendEquationAlpha: AddEquation
+		} );
+
+		this.fsQuad = new FullScreenQuad( null );
+
+		this.originalClearColor = new Color();
+
+		this.setTextures( parameters ? parameters.depthTexture : undefined, parameters ? parameters.normalTexture : undefined );
+
+	}
+
+	dispose() {
+
+		this.noiseTexture.dispose();
+		this.normalRenderTarget.dispose();
+		this.hbaoRenderTarget.dispose();
+		this.pdRenderTarget.dispose();
+		this.normalMaterial.dispose();
+		this.pdMaterial.dispose();
+		this.copyMaterial.dispose();
+		this.depthRenderMaterial.dispose();
+		this.fsQuad.dispose();
+
+	}
+
+	setTextures( depthTexture, normalTexture ) {
+
+		if ( depthTexture !== undefined ) {
+
+			this.depthTexture = depthTexture;
+			this.normalTexture = normalTexture;
+			this._renderGBuffer = false;
+
+		} else {
+
+			this.depthTexture = new DepthTexture();
+			this.depthTexture.format = DepthStencilFormat;
+			this.depthTexture.type = UnsignedInt248Type;
+
+			this.normalRenderTarget = new WebGLRenderTarget( this.width, this.height, {
+				minFilter: NearestFilter,
+				magFilter: NearestFilter,
+				type: HalfFloatType,
+				depthTexture: this.depthTexture
+			} );
+			this.normalTexture = this.normalRenderTarget.texture;
+			this._renderGBuffer = true;
+
+		}
+
+		const normalVectorType = ( this.normalTexture ) ? 1 : 0;
+		const depthValueSource = ( this.depthTexture === this.normalTexture ) ? 1 : 0;
+
+		this.hbaoMaterial.defines[ 'NORMAL_VECTOR_TYPE' ] = normalVectorType;
+		this.hbaoMaterial.defines[ 'DEPTH_VALUE_SOURCE' ] = depthValueSource;
+		this.hbaoMaterial.uniforms[ 'tNormal' ].value = this.normalTexture;
+		this.hbaoMaterial.uniforms[ 'tDepth' ].value = this.depthTexture;
+
+		this.pdMaterial.defines[ 'NORMAL_VECTOR_TYPE' ] = normalVectorType;
+		this.pdMaterial.defines[ 'DEPTH_VALUE_SOURCE' ] = depthValueSource;
+		this.pdMaterial.uniforms[ 'tNormal' ].value = this.normalTexture;
+		this.pdMaterial.uniforms[ 'tDepth' ].value = this.depthTexture;
+
+		this.depthRenderMaterial.uniforms[ 'tDepth' ].value = this.normalRenderTarget.depthTexture;
+
+	}
+
+	updateHbaoMaterial( parameters ) {
+
+		if ( parameters.radius !== undefined ) {
+
+			this.hbaoMaterial.uniforms[ 'radius' ].value = parameters.radius;
+
+		}
+
+		if ( parameters.distanceExponent !== undefined ) {
+
+			this.hbaoMaterial.uniforms[ 'distanceExponent' ].value = parameters.distanceExponent;
+
+		}
+
+		if ( parameters.bias !== undefined ) {
+
+			this.hbaoMaterial.uniforms[ 'bias' ].value = parameters.bias;
+
+		}
+
+		if ( parameters.samples !== undefined && parameters.samples !== this.hbaoMaterial.defines[ 'SAMPLES' ] ) {
+
+			this.hbaoMaterial.defines[ 'SAMPLES' ] = parameters.samples;
+			this.hbaoMaterial.defines[ 'SAMPLE_VECTORS' ] = generateHaboSampleKernelInitializer( parameters.samples );
+			this.hbaoMaterial.needsUpdate = true;
+
+		}
+
+	}
+
+	updatePdMaterial( parameters ) {
+
+		let updateShader = false;
+
+		if ( parameters.lumaPhi !== undefined ) {
+
+			this.pdMaterial.uniforms[ 'lumaPhi' ].value = parameters.lumaPhi;
+
+		}
+
+		if ( parameters.depthPhi !== undefined ) {
+
+			this.pdMaterial.uniforms[ 'depthPhi' ].value = parameters.depthPhi;
+
+		}
+
+		if ( parameters.normalPhi !== undefined ) {
+
+			this.pdMaterial.uniforms[ 'normalPhi' ].value = parameters.normalPhi;
+
+		}
+
+		if ( parameters.radius !== undefined && parameters.radius !== this.radius ) {
+
+			this.pdMaterial.uniforms[ 'radius' ].value = parameters.radius;
+
+		}
+
+		if ( parameters.rings !== undefined && parameters.rings !== this.rings ) {
+
+			this.rings = parameters.rings;
+			updateShader = true;
+
+		}
+
+		if ( parameters.samples !== undefined && parameters.samples !== this.samples ) {
+
+			this.samples = parameters.samples;
+			updateShader = true;
+
+		}
+
+		if ( updateShader ) {
+
+
+			this.pdMaterial.defines[ 'SAMPLES' ] = parameters.samples;
+			this.pdMaterial.defines[ 'SAMPLE_VECTORS' ] = generatePdSamplePointInitializer( parameters.samples, this.rings );
+			this.pdMaterial.needsUpdate = true;
+
+		}
+
+	}
+
+	render( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
+
+		// render normals and depth (honor only meshes, points and lines do not contribute to HBAO)
+
+		if ( this._renderGBuffer ) {
+
+			this.overrideVisibility();
+			this.renderOverride( renderer, this.normalMaterial, this.normalRenderTarget, 0x7777ff, 1.0 );
+			this.restoreVisibility();
+
+		}
+
+		// render HBAO
+
+		this.hbaoMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
+		this.hbaoMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
+		this.hbaoMaterial.uniforms[ 'cameraProjectionMatrix' ].value.copy( this.camera.projectionMatrix );
+		this.hbaoMaterial.uniforms[ 'cameraProjectionMatrixInverse' ].value.copy( this.camera.projectionMatrixInverse );
+		this.renderPass( renderer, this.hbaoMaterial, this.hbaoRenderTarget, 0xffffff, 1.0 );
+
+		// render poisson denoise
+
+		this.pdMaterial.uniforms[ 'cameraProjectionMatrixInverse' ].value.copy( this.camera.projectionMatrixInverse );
+		this.renderPass( renderer, this.pdMaterial, this.pdRenderTarget, 0xffffff, 1.0 );
+
+		// output result to screen
+
+		switch ( this.output ) {
+
+			case HBAOPass.OUTPUT.Diffuse:
+
+				this.copyMaterial.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
+				this.copyMaterial.blending = NoBlending;
+				this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
+
+				break;
+
+			case HBAOPass.OUTPUT.HBAO:
+
+				this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.hbaoRenderTarget.texture;
+				this.copyMaterial.blending = NoBlending;
+				this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
+
+				break;
+
+			case HBAOPass.OUTPUT.Denoise:
+
+				this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.pdRenderTarget.texture;
+				this.copyMaterial.blending = NoBlending;
+				this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
+
+				break;
+
+			case HBAOPass.OUTPUT.Depth:
+
+				this.depthRenderMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
+				this.depthRenderMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
+				this.renderPass( renderer, this.depthRenderMaterial, this.renderToScreen ? null : writeBuffer );
+
+				break;
+
+			case HBAOPass.OUTPUT.Normal:
+
+				this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.normalRenderTarget.texture;
+				this.copyMaterial.blending = NoBlending;
+				this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
+
+				break;
+
+			case HBAOPass.OUTPUT.Default:
+
+				this.copyMaterial.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
+				this.copyMaterial.blending = NoBlending;
+				this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
+
+				this.copyMaterial.uniforms[ 'tDiffuse' ].value = this.pdRenderTarget.texture;
+				this.copyMaterial.blending = CustomBlending;
+				this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
+
+				break;
+
+			default:
+				console.warn( 'THREE.HBAOPass: Unknown output type.' );
+
+		}
+
+	}
+
+	renderPass( renderer, passMaterial, renderTarget, clearColor, clearAlpha ) {
+
+		// save original state
+		renderer.getClearColor( this.originalClearColor );
+		const originalClearAlpha = renderer.getClearAlpha();
+		const originalAutoClear = renderer.autoClear;
+
+		renderer.setRenderTarget( renderTarget );
+
+		// setup pass state
+		renderer.autoClear = false;
+		if ( ( clearColor !== undefined ) && ( clearColor !== null ) ) {
+
+			renderer.setClearColor( clearColor );
+			renderer.setClearAlpha( clearAlpha || 0.0 );
+			renderer.clear();
+
+		}
+
+		this.fsQuad.material = passMaterial;
+		this.fsQuad.render( renderer );
+
+		// restore original state
+		renderer.autoClear = originalAutoClear;
+		renderer.setClearColor( this.originalClearColor );
+		renderer.setClearAlpha( originalClearAlpha );
+
+	}
+
+	renderOverride( renderer, overrideMaterial, renderTarget, clearColor, clearAlpha ) {
+
+		renderer.getClearColor( this.originalClearColor );
+		const originalClearAlpha = renderer.getClearAlpha();
+		const originalAutoClear = renderer.autoClear;
+
+		renderer.setRenderTarget( renderTarget );
+		renderer.autoClear = false;
+
+		clearColor = overrideMaterial.clearColor || clearColor;
+		clearAlpha = overrideMaterial.clearAlpha || clearAlpha;
+
+		if ( ( clearColor !== undefined ) && ( clearColor !== null ) ) {
+
+			renderer.setClearColor( clearColor );
+			renderer.setClearAlpha( clearAlpha || 0.0 );
+			renderer.clear();
+
+		}
+
+		this.scene.overrideMaterial = overrideMaterial;
+		renderer.render( this.scene, this.camera );
+		this.scene.overrideMaterial = null;
+
+		// restore original state
+
+		renderer.autoClear = originalAutoClear;
+		renderer.setClearColor( this.originalClearColor );
+		renderer.setClearAlpha( originalClearAlpha );
+
+	}
+
+	setSize( width, height ) {
+
+		this.width = width;
+		this.height = height;
+
+		this.hbaoRenderTarget.setSize( width, height );
+		this.normalRenderTarget.setSize( width, height );
+		this.pdRenderTarget.setSize( width, height );
+
+		this.hbaoMaterial.uniforms[ 'resolution' ].value.set( width, height );
+		this.hbaoMaterial.uniforms[ 'cameraProjectionMatrix' ].value.copy( this.camera.projectionMatrix );
+		this.hbaoMaterial.uniforms[ 'cameraProjectionMatrixInverse' ].value.copy( this.camera.projectionMatrixInverse );
+
+		this.pdMaterial.uniforms[ 'resolution' ].value.set( width, height );
+		this.pdMaterial.uniforms[ 'cameraProjectionMatrixInverse' ].value.copy( this.camera.projectionMatrixInverse );
+
+	}
+
+	overrideVisibility() {
+
+		const scene = this.scene;
+		const cache = this._visibilityCache;
+
+		scene.traverse( function ( object ) {
+
+			cache.set( object, object.visible );
+
+			if ( object.isPoints || object.isLine ) object.visible = false;
+
+		} );
+
+	}
+
+	restoreVisibility() {
+
+		const scene = this.scene;
+		const cache = this._visibilityCache;
+
+		scene.traverse( function ( object ) {
+
+			const visible = cache.get( object );
+			object.visible = visible;
+
+		} );
+
+		cache.clear();
+
+	}
+
+	generateNoise( size = 64 ) {
+
+		const simplex = new SimplexNoise();
+
+		const arraySize = size * size * 4;
+		const data = new Uint8Array( arraySize );
+
+		for ( let i = 0; i < size; i ++ ) {
+
+			for ( let j = 0; j < size; j ++ ) {
+
+				const x = i;
+				const y = j;
+
+				data[ ( i * size + j ) * 4 ] = ( simplex.noise( x, y ) + 1.0 ) * 255.0;
+				data[ ( i * size + j ) * 4 + 1 ] = ( simplex.noise( x + size, y ) + 1.0 ) * 255.0;
+				data[ ( i * size + j ) * 4 + 2 ] = ( simplex.noise( x, y + size ) + 1.0 ) * 255.0;
+				data[ ( i * size + j ) * 4 + 3 ] = ( simplex.noise( x + size, y + size ) + 1.0 ) * 255.0;
+
+			}
+
+		}
+
+		const noiseTexture = new DataTexture( data, size, size, RGBAFormat, UnsignedByteType );
+		noiseTexture.wrapS = RepeatWrapping;
+		noiseTexture.wrapT = RepeatWrapping;
+		noiseTexture.needsUpdate = true;
+
+		return noiseTexture;
+
+	}
+
+}
+
+HBAOPass.OUTPUT = {
+	'Default': 0,
+	'Diffuse': 1,
+	'Depth': 2,
+	'Normal': 3,
+	'HBAO': 4,
+	'Denoise': 5,
+};
+
+export { HBAOPass };

+ 331 - 0
examples/jsm/shaders/HBAOShader.js

@@ -0,0 +1,331 @@
+import {
+	Matrix4,
+	Vector2,
+	Vector4,
+} from 'three';
+
+/**
+ * References:
+ * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.577.2286&rep=rep1&type=pdf
+ * https://ceur-ws.org/Vol-3027/paper5.pdf
+ * http://www.derschmale.com/2013/12/20/an-alternative-implementation-for-hbao-2
+ * https://github.com/N8python/n8ao
+ * https://github.com/0beqz/realism-effects
+ * https://github.com/scanberg/hbao/blob/master/resources/shaders/hbao_frag.glsl
+ * https://github.com/nvpro-samples/gl_ssao/blob/master/hbao.frag.glsl
+ */
+
+const HBAOShader = {
+
+	name: 'HBAOShader',
+
+	defines: {
+		'PERSPECTIVE_CAMERA': 1,
+		'SAMPLES': 16,
+		'SAMPLE_VECTORS': generateHaboSampleKernelInitializer( 16 ),
+		'NORMAL_VECTOR_TYPE': 1,
+		'DEPTH_VALUE_SOURCE': 0,
+		'COSINE_SAMPLE_HEMISPHERE': 0,
+	},
+
+	uniforms: {
+		'tNormal': { value: null },
+		'tDepth': { value: null },
+		'tNoise': { value: null },
+		'resolution': { value: new Vector2() },
+		'cameraNear': { value: null },
+		'cameraFar': { value: null },
+		'cameraProjectionMatrix': { value: new Matrix4() },
+		'cameraProjectionMatrixInverse': { value: new Matrix4() },
+		'radius': { value: 2. },
+		'distanceExponent': { value: 1. },
+		'bias': { value: 0.01 },
+	},
+
+	vertexShader: /* glsl */`
+
+		varying vec2 vUv;
+
+		void main() {
+
+			vUv = uv;
+
+			gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
+
+		}`,
+
+	fragmentShader: /* glsl */`
+
+		varying vec2 vUv;
+
+		uniform sampler2D tNormal;
+		uniform sampler2D tDepth;
+		uniform sampler2D tNoise;
+		uniform vec2 resolution;
+		uniform float cameraNear;
+		uniform float cameraFar;
+		uniform mat4 cameraProjectionMatrix;
+		uniform mat4 cameraProjectionMatrixInverse;		
+		uniform float radius;
+		uniform float distanceExponent;
+		uniform float bias;
+		
+		#include <common>
+		#include <packing>
+
+		#ifndef FRAGMENT_OUTPUT
+		#define FRAGMENT_OUTPUT vec4(vec3(ao), 1.)
+		#endif
+
+		const vec4 sampleKernel[SAMPLES] = SAMPLE_VECTORS;
+
+		vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
+			vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
+			vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
+			return viewSpacePosition.xyz / viewSpacePosition.w;
+		}
+
+		float getDepth(const vec2 uv) {
+		#if DEPTH_VALUE_SOURCE == 1    
+			return textureLod(tDepth, uv.xy, 0.0).a;
+		#else
+			return textureLod(tDepth, uv.xy, 0.0).r;
+		#endif
+		}
+
+		float fetchDepth(const ivec2 uv) {
+			#if DEPTH_VALUE_SOURCE == 1    
+				return texelFetch(tDepth, uv.xy, 0).a;
+			#else
+				return texelFetch(tDepth, uv.xy, 0).r;
+			#endif
+		}
+
+		float getViewZ(const in float depth) {
+			#if PERSPECTIVE_CAMERA == 1
+				return perspectiveDepthToViewZ(depth, cameraNear, cameraFar);
+			#else
+				return orthographicDepthToViewZ(depth, cameraNear, cameraFar);
+			#endif
+		}
+
+		vec3 computeNormalFromDepth(const vec2 uv) {
+            vec2 size = vec2(textureSize(tDepth, 0));
+            ivec2 p = ivec2(uv * size);
+            float c0 = fetchDepth(p);
+            float l2 = fetchDepth(p - ivec2(2, 0));
+            float l1 = fetchDepth(p - ivec2(1, 0));
+            float r1 = fetchDepth(p + ivec2(1, 0));
+            float r2 = fetchDepth(p + ivec2(2, 0));
+            float b2 = fetchDepth(p - ivec2(0, 2));
+            float b1 = fetchDepth(p - ivec2(0, 1));
+            float t1 = fetchDepth(p + ivec2(0, 1));
+            float t2 = fetchDepth(p + ivec2(0, 2));
+            float dl = abs((2.0 * l1 - l2) - c0);
+            float dr = abs((2.0 * r1 - r2) - c0);
+            float db = abs((2.0 * b1 - b2) - c0);
+            float dt = abs((2.0 * t1 - t2) - c0);
+            vec3 ce = getViewPosition(uv, c0).xyz;
+            vec3 dpdx = (dl < dr) ?  ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz
+                                  : -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
+            vec3 dpdy = (db < dt) ?  ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz
+                                  : -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
+            return normalize(cross(dpdx, dpdy));
+		}
+
+		vec3 getViewNormal(const vec2 uv) {
+		#if NORMAL_VECTOR_TYPE == 2
+			return normalize(textureLod(tNormal, uv, 0.).rgb);
+		#elif NORMAL_VECTOR_TYPE == 1
+			return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
+		#else
+			return computeNormalFromDepth(uv);
+		#endif
+		}
+
+		// source: https://www.shadertoy.com/view/cll3R4
+		vec3 cosineSampleHemisphere(const vec3 n, const vec2 u) {
+			float r = sqrt(u.x);
+			float theta = 2.0 * PI * u.y;
+			vec3 b = normalize(cross(n, vec3(0.0, 1.0, 1.0)));
+			vec3 t = cross(b, n);
+			return normalize(r * sin(theta) * b + sqrt(1.0 - u.x) * n + r * cos(theta) * t);
+		}
+		
+		float getOcclusion(const vec2 uv, const vec3 viewPos, const vec3 viewNormal, const float depth, const vec4 sampleViewDir, inout float totalWeight) {
+			
+			vec3 sampleViewPos = viewPos + sampleViewDir.xyz * radius * pow(sampleViewDir.w, distanceExponent);
+			vec4 sampleClipPos = cameraProjectionMatrix * vec4(sampleViewPos, 1.);
+			vec2 sampleUv = sampleClipPos.xy / sampleClipPos.w * 0.5 + 0.5;
+			float sampleDepth = getDepth(sampleUv);
+			float distSample = abs(getViewZ(sampleDepth));
+			float distWorld = abs(sampleViewPos.z);
+			float distanceFalloffToUse = radius;
+			float rangeCheck = smoothstep(0.0, 1.0, distanceFalloffToUse / (abs(distSample - distWorld)));
+			float weight = dot(viewNormal, sampleViewDir.xyz);
+			vec2 diff = (uv - sampleUv) * resolution;
+			vec2 clipRangeCheck = step(0., sampleUv) * step(sampleUv, vec2(1.));
+			float occlusion = rangeCheck * weight * step(distSample + bias, distWorld) * step(0.707, dot(diff, diff)) * clipRangeCheck.x * clipRangeCheck.y;
+			totalWeight += weight;
+
+			return occlusion;
+		}
+		
+		void main() {
+			float depth = getDepth(vUv.xy);
+			if (depth == 1.0) {
+				discard;
+				return;
+			}
+			vec3 viewPos = getViewPosition(vUv, depth);
+			vec3 viewNormal = getViewNormal(vUv);
+			
+			vec2 noiseResolution = vec2(textureSize(tNoise, 0));
+			vec2 noiseUv = vUv * resolution / noiseResolution;
+			vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
+			vec3 randomVec = noiseTexel.xyz * 2.0 - 1.0;
+  			vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));
+      		vec3 bitangent = cross(viewNormal, tangent);
+      		mat3 kernelMatrix = mat3(tangent, bitangent, viewNormal);
+
+			float ao = 0.0, totalWeight = 0.0;
+			for (int i = 0; i < SAMPLES; i++) {		
+				#if COSINE_SAMPLE_HEMISPHERE == 1
+					vec4 sampleNoise = noiseTexel;
+					if (i != 0) {
+						const vec4 hn = vec4(0.618033988749895, 0.3247179572447458, 0.2207440846057596, 0.1673039782614187);
+						sampleNoise = fract(sampleNoise + hn * float(i));
+						sampleNoise = mix(sampleNoise, 1.0 - sampleNoise, step(0.5, sampleNoise)) * 2.0;
+					}
+					vec3 hemisphereDir = cosineSampleHemisphere( viewNormal, sampleNoise.rg );
+					vec4 sampleViewDir = vec4(hemisphereDir, sampleNoise.b );
+				#else
+					vec4 sampleViewDir = sampleKernel[i];
+					sampleViewDir.xyz = normalize(kernelMatrix * sampleViewDir.xyz);
+				#endif
+				float occlusion = getOcclusion(vUv, viewPos, viewNormal, depth, sampleViewDir, totalWeight);
+				ao += occlusion;
+			}		
+			if (totalWeight > 0.) { 
+				ao /= totalWeight;
+			}
+			ao = clamp(1. - ao, 0., 1.);
+			gl_FragColor = FRAGMENT_OUTPUT;
+		}`
+
+};
+
+const HBAODepthShader = {
+
+	name: 'HBAODepthShader',
+
+	defines: {
+		'PERSPECTIVE_CAMERA': 1
+	},
+
+	uniforms: {
+
+		'tDepth': { value: null },
+		'cameraNear': { value: null },
+		'cameraFar': { value: null },
+
+	},
+
+	vertexShader: /* glsl */`
+
+		varying vec2 vUv;
+
+		void main() {
+
+			vUv = uv;
+			gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
+
+		}`,
+
+	fragmentShader: /* glsl */`
+
+		uniform sampler2D tDepth;
+
+		uniform float cameraNear;
+		uniform float cameraFar;
+
+		varying vec2 vUv;
+
+		#include <packing>
+
+		float getLinearDepth( const in vec2 screenPosition ) {
+
+			#if PERSPECTIVE_CAMERA == 1
+
+				float fragCoordZ = texture2D( tDepth, screenPosition ).x;
+				float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
+				return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
+
+			#else
+
+				return texture2D( tDepth, screenPosition ).x;
+
+			#endif
+
+		}
+
+		void main() {
+
+			float depth = getLinearDepth( vUv );
+			gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );
+
+		}`
+
+};
+
+function generateHaboSampleKernelInitializer( samples ) {
+
+	const poissonDisk = generateHaboSamples( samples );
+
+	let glslCode = 'vec4[SAMPLES](';
+
+	for ( let i = 0; i < samples; i ++ ) {
+
+		const sample = poissonDisk[ i ];
+		glslCode += `vec4(${sample.x}, ${sample.y}, ${sample.z}, ${sample.w})`;
+
+		if ( i < samples - 1 ) {
+
+			glslCode += ',';
+
+		}
+
+	}
+
+	glslCode += ')';
+
+	return glslCode;
+
+}
+
+function generateHaboSamples( samples ) {
+
+	const kernel = [];
+	for ( let kernelIndex = 0; kernelIndex < samples; kernelIndex ++ ) {
+
+		const spiralAngle = kernelIndex * Math.PI * ( 3 - Math.sqrt( 5 ) );
+		const z = Math.sqrt( 0.99 - ( kernelIndex / ( samples - 1 ) ) * 0.98 );
+		const radius = Math.sqrt( 1 - z * z );
+		const x = Math.cos( spiralAngle ) * radius;
+		const y = Math.sin( spiralAngle ) * radius;
+		const scaleStep = 8;
+		const scaleRange = Math.floor( samples / scaleStep );
+		const scaleIndex =
+			Math.floor( kernelIndex / scaleStep ) +
+			( kernelIndex % scaleStep ) * scaleRange;
+		let scale = 1 - scaleIndex / samples;
+		scale = 0.1 + 0.9 * scale;
+		kernel.push( new Vector4( x, y, z, scale ) );
+
+	}
+
+	return kernel;
+
+}
+
+export { generateHaboSampleKernelInitializer, HBAOShader, HBAODepthShader };

+ 248 - 0
examples/jsm/shaders/PoissonDenoiseShader.js

@@ -0,0 +1,248 @@
+import {
+	Matrix4,
+	Vector2,
+} from 'three';
+
+/**
+ * References:
+ * https://github.com/0beqz/realism-effects
+ * https://github.com/N8python/n8ao
+ */
+
+const PoissonDenoiseShader = {
+
+	name: 'PoissonDenoiseShader',
+
+	defines: {
+		'SAMPLES': 16,
+		'SAMPLE_VECTORS': generatePdSamplePointInitializer( 16, 4 ),
+		'NORMAL_VECTOR_TYPE': 1,
+		'DEPTH_VALUE_SOURCE': 0,
+	},
+
+	uniforms: {
+		'tDiffuse': { value: null },
+		'tNormal': { value: null },
+		'tDepth': { value: null },
+		'tNoise': { value: null },
+		'resolution': { value: new Vector2() },
+		'cameraProjectionMatrixInverse': { value: new Matrix4() },
+		'lumaPhi': { value: 5. },
+		'depthPhi': { value: 5. },
+		'normalPhi': { value: 5. },
+		'radius': { value: 10. },
+		'index': { value: 0 }
+	},
+
+	vertexShader: /* glsl */`
+
+		varying vec2 vUv;
+
+		void main() {
+
+			vUv = uv;
+
+			gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
+
+		}`,
+
+	fragmentShader: /* glsl */`
+
+		varying vec2 vUv;
+
+		uniform sampler2D tDiffuse;
+		uniform sampler2D tNormal;
+		uniform sampler2D tDepth;
+		uniform sampler2D tNoise;
+		uniform vec2 resolution;
+		uniform mat4 cameraProjectionMatrixInverse;
+		uniform float lumaPhi;
+		uniform float depthPhi;
+		uniform float normalPhi;
+		uniform float radius;
+		uniform int index;
+		
+		#include <common>
+		#include <packing>
+
+		#ifndef SAMPLE_LUMINANCE
+		#define SAMPLE_LUMINANCE dot(vec3(0.2125, 0.7154, 0.0721), a)
+		#endif
+
+		#ifndef FRAGMENT_OUTPUT
+		#define FRAGMENT_OUTPUT vec4(denoised, 1.)
+		#endif
+
+		float getLuminance(const in vec3 a) {
+			return SAMPLE_LUMINANCE;
+		}
+
+		const vec2 poissonDisk[SAMPLES] = SAMPLE_VECTORS;
+
+		vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
+			vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
+			vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
+			return viewSpacePosition.xyz / viewSpacePosition.w;
+		}
+		
+		float getDepth(const vec2 uv) {
+		#if DEPTH_VALUE_SOURCE == 1    
+			return textureLod(tDepth, uv.xy, 0.0).a;
+		#else
+			return textureLod(tDepth, uv.xy, 0.0).r;
+		#endif
+		}
+
+		float fetchDepth(const ivec2 uv) {
+			#if DEPTH_VALUE_SOURCE == 1    
+				return texelFetch(tDepth, uv.xy, 0).a;
+			#else
+				return texelFetch(tDepth, uv.xy, 0).r;
+			#endif
+		}
+
+		vec3 computeNormalFromDepth(const vec2 uv) {
+			vec2 size = vec2(textureSize(tDepth, 0));
+			ivec2 p = ivec2(uv * size);
+			float c0 = fetchDepth(p);
+			float l2 = fetchDepth(p - ivec2(2, 0));
+			float l1 = fetchDepth(p - ivec2(1, 0));
+			float r1 = fetchDepth(p + ivec2(1, 0));
+			float r2 = fetchDepth(p + ivec2(2, 0));
+			float b2 = fetchDepth(p - ivec2(0, 2));
+			float b1 = fetchDepth(p - ivec2(0, 1));
+			float t1 = fetchDepth(p + ivec2(0, 1));
+			float t2 = fetchDepth(p + ivec2(0, 2));
+			float dl = abs((2.0 * l1 - l2) - c0);
+			float dr = abs((2.0 * r1 - r2) - c0);
+			float db = abs((2.0 * b1 - b2) - c0);
+			float dt = abs((2.0 * t1 - t2) - c0);
+			vec3 ce = getViewPosition(uv, c0).xyz;
+			vec3 dpdx = (dl < dr) ?  ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz
+									: -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
+			vec3 dpdy = (db < dt) ?  ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz
+									: -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
+			return normalize(cross(dpdx, dpdy));
+		}
+
+		vec3 getViewNormal(const vec2 uv) {
+		#if NORMAL_VECTOR_TYPE == 2
+			return normalize(textureLod(tNormal, uv, 0.).rgb);
+		#elif NORMAL_VECTOR_TYPE == 1
+			return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
+		#else
+			return computeNormalFromDepth(uv);
+		#endif
+		}
+		
+		float distToPlane(const vec3 viewPos, const vec3 neighborViewPos, const vec3 viewNormal) {
+			return abs(dot(viewPos - neighborViewPos, viewNormal));
+		}
+		
+		void main() {
+			float depth = getDepth(vUv.xy);	
+			vec3 viewNormal = getViewNormal(vUv);	
+			if (depth == 1. || dot(viewNormal, viewNormal) == 0.) {
+				discard;
+				return;
+			}
+			vec4 texel = textureLod(tDiffuse, vUv, 0.0);
+			vec3 denoised = texel.rgb;
+			vec3 center = texel.rgb;
+			vec3 viewPos = getViewPosition(vUv, depth);
+
+			vec2 noiseResolution = vec2(textureSize(tNoise, 0));
+			vec2 noiseUv = vUv * resolution / noiseResolution;
+			vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
+      		//vec2 noiseVec = normalize((index % 2 == 0 ? noiseTexel.xy : noiseTexel.yz) * 2.0 - 1.0);
+			vec2 noiseVec = vec2(sin(noiseTexel[index % 4] * 2. * PI), cos(noiseTexel[index % 4] * 2. * PI));
+    		mat2 rotationMatrix = mat2(noiseVec.x, -noiseVec.y, noiseVec.x, noiseVec.y);
+		
+			float totalWeight = 1.0;
+			for (int i = 0; i < SAMPLES; i++) {
+				vec2 offset = rotationMatrix * (poissonDisk[i] * radius / resolution);
+				vec2 sampleUv = vUv + offset;
+				vec4 sampleTexel = textureLod(tDiffuse, sampleUv, 0.0);
+				float sampleDepth = getDepth(sampleUv);
+				vec3 sampleNormal = getViewNormal(sampleUv);
+				vec3 neighborColor = sampleTexel.rgb;
+		
+				vec3 viewPosSample = getViewPosition(sampleUv, sampleDepth);
+				
+				float normalDiff = dot(viewNormal, sampleNormal);
+				float normalSimilarity = pow(max(normalDiff, 0.), normalPhi);
+		
+				float lumaDiff = abs(getLuminance(neighborColor) - getLuminance(center));
+				float lumaSimilarity = max(1.0 - lumaDiff / lumaPhi, 0.0);
+		
+				float depthDiff = 1. - distToPlane(viewPos, viewPosSample, viewNormal);
+				float depthSimilarity = max(depthDiff / depthPhi, 0.);
+		
+				float w = lumaSimilarity * depthSimilarity * normalSimilarity;
+		
+				denoised += w * neighborColor;
+				totalWeight += w;
+			}
+		
+			if (totalWeight > 0.) { 
+				denoised /= totalWeight;
+			}
+			gl_FragColor = FRAGMENT_OUTPUT;
+		}`
+
+};
+
+function generatePdSamplePointInitializer( samples, rings ) {
+
+	const poissonDisk = generateDenoiseSamples(
+		samples,
+		rings,
+
+	);
+
+	let glslCode = 'vec2[SAMPLES](';
+
+	for ( let i = 0; i < samples; i ++ ) {
+
+		const sample = poissonDisk[ i ];
+		glslCode += `vec2(${sample.x}, ${sample.y})`;
+
+		if ( i < samples - 1 ) {
+
+			glslCode += ',';
+
+		}
+
+	}
+
+	glslCode += ')';
+
+	return glslCode;
+
+}
+
+function generateDenoiseSamples( numSamples, numRings ) {
+
+	const angleStep = ( 2 * Math.PI * numRings ) / numSamples;
+	const invNumSamples = 1.0 / numSamples;
+	const radiusStep = invNumSamples;
+	const samples = [];
+	let radius = invNumSamples;
+	let angle = 0;
+
+	for ( let i = 0; i < numSamples; i ++ ) {
+
+		const v = new Vector2( Math.cos( angle ), Math.sin( angle ) )
+			.multiplyScalar( Math.pow( radius, 0.75 ) );
+
+		samples.push( v );
+		radius += radiusStep;
+		angle += angleStep;
+
+	}
+
+	return samples;
+
+}
+
+export { generatePdSamplePointInitializer, PoissonDenoiseShader };

BIN
examples/screenshots/webgl_postprocessing_hbao.jpg


+ 1 - 0
examples/tags.json

@@ -86,6 +86,7 @@
 	"webgl_postprocessing_dof2": [ "bokeh" ],
 	"webgl_postprocessing_fxaa": [ "msaa", "multisampled" ],
 	"webgl_postprocessing_godrays": [ "light scattering" ],
+	"webgl_postprocessing_hbao": [ "ambient occlusion" ],
 	"webgl_shadowmap_progressive": [ "shadow", "soft", "lightmap", "onBeforeCompile" ],
 	"webgl_postprocessing_ssaa": [ "msaa", "multisampled" ],
 	"webgl_postprocessing_ssaa_unbiased": [ "msaa", "multisampled" ],

+ 198 - 0
examples/webgl_postprocessing_hbao.html

@@ -0,0 +1,198 @@
+<!DOCTYPE html>
+<html lang="en">
+	<head>
+		<title>three.js webgl - postprocessing - Horizon Based Ambient Occlusion</title>
+		<meta charset="utf-8">
+		<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+		<link type="text/css" rel="stylesheet" href="main.css">
+		<style>
+			body {
+				background-color: #bfe3dd;
+				color: #000;
+			}
+			a {
+				color: #2983ff;
+			}
+		</style>
+	</head>
+	<body>
+		<div id="info">
+			<a href="https://threejs.org" target="_blank" rel="noopener">three.js</a> - horizon based ambient occlusion (HBAO) by <a href="https://github.com/Rabbid76">Rabbid76</a><br/>
+		</div>
+
+		<script type="importmap">
+			{
+				"imports": {
+					"three": "../build/three.module.js",
+					"three/addons/": "./jsm/"
+				}
+			}
+		</script>
+
+		<script type="module">
+
+			import * as THREE from 'three';
+
+			import Stats from 'three/addons/libs/stats.module.js';
+			import { GUI } from 'three/addons/libs/lil-gui.module.min.js';
+
+			import { OrbitControls } from 'three/addons/controls/OrbitControls.js';
+			import { RoomEnvironment } from 'three/addons/environments/RoomEnvironment.js';
+			import { GLTFLoader } from 'three/addons/loaders/GLTFLoader.js';
+			import { DRACOLoader } from 'three/addons/loaders/DRACOLoader.js';
+
+			import { EffectComposer } from 'three/addons/postprocessing/EffectComposer.js';
+			import { RenderPass } from 'three/addons/postprocessing/RenderPass.js';
+			import { HBAOPass } from 'three/addons/postprocessing/HBAOPass.js';
+			import { OutputPass } from 'three/addons/postprocessing/OutputPass.js';
+
+			let mixer;
+
+			const clock = new THREE.Clock();
+			const container = document.createElement( 'div' );
+			document.body.appendChild( container );
+
+			const stats = new Stats();
+			container.appendChild( stats.dom );
+
+			const renderer = new THREE.WebGLRenderer( { antialias: true } );
+			renderer.setSize( window.innerWidth, window.innerHeight );
+			document.body.appendChild( renderer.domElement );
+
+			const pmremGenerator = new THREE.PMREMGenerator( renderer );
+
+			const scene = new THREE.Scene();
+			scene.background = new THREE.Color( 0xbfe3dd );
+			scene.environment = pmremGenerator.fromScene( new RoomEnvironment( renderer ), 0.04 ).texture;
+
+			const camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 100 );
+			camera.position.set( 5, 2, 8 );
+
+			const controls = new OrbitControls( camera, renderer.domElement );
+			controls.target.set( 0, 0.5, 0 );
+			controls.update();
+			controls.enablePan = false;
+			controls.enableDamping = true;
+
+			const dracoLoader = new DRACOLoader();
+			dracoLoader.setDecoderPath( 'jsm/libs/draco/gltf/' );
+
+			const loader = new GLTFLoader();
+			loader.setDRACOLoader( dracoLoader );
+			loader.load( 'models/gltf/LittlestTokyo.glb', function ( gltf ) {
+
+				const model = gltf.scene;
+				model.position.set( 1, 1, 0 );
+				model.scale.set( 0.01, 0.01, 0.01 );
+				scene.add( model );
+
+				mixer = new THREE.AnimationMixer( model );
+				mixer.clipAction( gltf.animations[ 0 ] ).play();
+
+				animate();
+
+			}, undefined, function ( e ) {
+
+				console.error( e );
+
+			} );
+
+			const width = window.innerWidth;
+			const height = window.innerHeight;
+			const pixelRatio = renderer.getPixelRatio();
+			const maxSamples = renderer.capabilities.maxSamples;
+
+			const renderTarget = new THREE.WebGLRenderTarget( width * pixelRatio, height * pixelRatio, {
+				type: THREE.HalfFloatType,
+				samples: maxSamples,
+			} );
+			renderTarget.texture.name = 'EffectComposer.rt1';
+			const composer = new EffectComposer( renderer, renderTarget );
+
+			const renderPass = new RenderPass( scene, camera );
+			composer.addPass( renderPass );
+
+			const hbaoPass = new HBAOPass( scene, camera, width, height );
+			hbaoPass.output = HBAOPass.OUTPUT.Denoise;
+			composer.addPass( hbaoPass );
+
+			const outputPass = new OutputPass();
+			composer.addPass( outputPass );
+
+			// Init gui
+			const gui = new GUI();
+
+			gui.add( hbaoPass, 'output', {
+				'Default': HBAOPass.OUTPUT.Default,
+				'Diffuse': HBAOPass.OUTPUT.Diffuse,
+				'HBAO Only': HBAOPass.OUTPUT.HBAO,
+				'HBAO Only + Denoise': HBAOPass.OUTPUT.Denoise,
+				'Depth': HBAOPass.OUTPUT.Depth,
+				'Normal': HBAOPass.OUTPUT.Normal
+			} ).onChange( function ( value ) {
+
+				hbaoPass.output = value;
+
+			} );
+
+			const hbaoParameters = {
+				radius: 2.,
+				distanceExponent: 1.,
+				bias: 0.01,
+				samples: 16,
+			};
+			const pdParameters = {
+				lumaPhi: 10.,
+				depthPhi: 2.,
+				normalPhi: 3.,
+				radius: 10.,
+				rings: 4,
+				samples: 16,
+			};
+			hbaoPass.updateHbaoMaterial( hbaoParameters );
+			hbaoPass.updatePdMaterial( pdParameters );
+			gui.add( hbaoParameters, 'radius' ).min( 0.01 ).max( 10 ).step( 0.01 ).onChange( () => hbaoPass.updateHbaoMaterial( hbaoParameters ) );
+			gui.add( hbaoParameters, 'distanceExponent' ).min( 1 ).max( 4 ).step( 0.01 ).onChange( () => hbaoPass.updateHbaoMaterial( hbaoParameters ) );
+			gui.add( hbaoParameters, 'bias' ).min( 0 ).max( 0.1 ).step( 0.001 ).onChange( () => hbaoPass.updateHbaoMaterial( hbaoParameters ) );
+			gui.add( hbaoParameters, 'samples' ).min( 1 ).max( 32 ).step( 1 ).onChange( () => hbaoPass.updateHbaoMaterial( hbaoParameters ) );
+			gui.add( pdParameters, 'lumaPhi' ).min( 0 ).max( 20 ).step( 0.01 ).onChange( () => hbaoPass.updatePdMaterial( pdParameters ) );
+			gui.add( pdParameters, 'depthPhi' ).min( 0.01 ).max( 20 ).step( 0.01 ).onChange( () => hbaoPass.updatePdMaterial( pdParameters ) );
+			gui.add( pdParameters, 'normalPhi' ).min( 0.01 ).max( 20 ).step( 0.01 ).onChange( () => hbaoPass.updatePdMaterial( pdParameters ) );
+			gui.add( pdParameters, 'radius' ).min( 0 ).max( 32 ).step( 1 ).onChange( () => hbaoPass.updatePdMaterial( pdParameters ) );
+			gui.add( pdParameters, 'rings' ).min( 0 ).max( 16 ).step( 0.125 ).onChange( () => hbaoPass.updatePdMaterial( pdParameters ) );
+			gui.add( pdParameters, 'samples' ).min( 1 ).max( 32 ).step( 1 ).onChange( () => hbaoPass.updatePdMaterial( pdParameters ) );
+
+			window.addEventListener( 'resize', onWindowResize );
+
+			function onWindowResize() {
+
+				const width = window.innerWidth;
+				const height = window.innerHeight;
+
+				camera.aspect = width / height;
+				camera.updateProjectionMatrix();
+
+				renderer.setSize( width, height );
+				composer.setSize( width, height );
+
+			}
+
+			function animate() {
+
+				requestAnimationFrame( animate );
+
+				const delta = clock.getDelta();
+
+				mixer.update( delta );
+
+				controls.update();
+
+				stats.begin();
+				composer.render();
+				stats.end();
+
+			}
+
+		</script>
+	</body>
+</html>