Browse Source

WebGPURenderer: New `AfterImageNode` for the PostProcessing renderer. (#27451)

* init afterimage node

* init value

* cleanup

---------
Renaud Rohlinger 1 năm trước cách đây
mục cha
commit
e852478997

+ 2 - 1
examples/files.json

@@ -364,7 +364,8 @@
 		"webgpu_textures_2d-array",
 		"webgpu_tsl_editor",
 		"webgpu_tsl_transpiler",
-		"webgpu_video_panorama"
+		"webgpu_video_panorama",
+		"webgpu_postprocessing_afterimage"
 	],
 	"webaudio": [
 		"webaudio_orientation",

+ 2 - 0
examples/jsm/nodes/Nodes.js

@@ -111,6 +111,8 @@ export { default as ViewportSharedTextureNode, viewportSharedTexture } from './d
 export { default as ViewportDepthTextureNode, viewportDepthTexture } from './display/ViewportDepthTextureNode.js';
 export { default as ViewportDepthNode, viewZToOrthographicDepth, orthographicDepthToViewZ, viewZToPerspectiveDepth, perspectiveDepthToViewZ, depth, depthTexture, depthPixel } from './display/ViewportDepthNode.js';
 export { default as GaussianBlurNode, gaussianBlur } from './display/GaussianBlurNode.js';
+export { default as AfterImageNode, afterImage } from './display/AfterImageNode.js';
+
 export { default as PassNode, pass, depthPass } from './display/PassNode.js';
 
 // code

+ 131 - 0
examples/jsm/nodes/display/AfterImageNode.js

@@ -0,0 +1,131 @@
+import TempNode from '../core/TempNode.js';
+import { nodeObject, addNodeElement, tslFn, float, vec4 } from '../shadernode/ShaderNode.js';
+import { NodeUpdateType } from '../core/constants.js';
+import { uv } from '../accessors/UVNode.js';
+import { texture } from '../accessors/TextureNode.js';
+import { uniform } from '../core/UniformNode.js';
+import { RenderTarget } from 'three';
+import { sign, max } from '../math/MathNode.js';
+import QuadMesh from '../../objects/QuadMesh.js';
+
+const quadMeshComp = new QuadMesh();
+
+class AfterImageNode extends TempNode {
+
+	constructor( textureNode, damp = 0.96 ) {
+
+		super( textureNode );
+
+		this.textureNode = textureNode;
+		this.textureNodeOld = texture();
+		this.damp = uniform( damp );
+
+		this._compRT = new RenderTarget();
+		this._oldRT = new RenderTarget();
+
+		this.updateBeforeType = NodeUpdateType.RENDER;
+
+	}
+
+	setSize( width, height ) {
+
+		this._compRT.setSize( width, height );
+		this._oldRT.setSize( width, height );
+
+	}
+
+	updateBefore( frame ) {
+
+		const { renderer } = frame;
+
+		const textureNode = this.textureNode;
+
+		const currentRenderTarget = renderer.getRenderTarget();
+		const currentTexture = textureNode.value;
+
+		this.textureNodeOld.value = this._oldRT.texture;
+
+		// comp
+		renderer.setRenderTarget( this._compRT );
+		quadMeshComp.render( renderer );
+
+		// Swap the textures
+		const temp = this._oldRT;
+		this._oldRT = this._compRT;
+		this._compRT = temp;
+
+		// set size before swapping fails
+		const map = currentTexture;
+		this.setSize( map.image.width, map.image.height );
+
+		renderer.setRenderTarget( currentRenderTarget );
+		textureNode.value = currentTexture;
+
+	}
+
+	setup( builder ) {
+
+		const textureNode = this.textureNode;
+		const textureNodeOld = this.textureNodeOld;
+
+		if ( textureNode.isTextureNode !== true ) {
+
+			console.error( 'AfterImageNode requires a TextureNode.' );
+
+			return vec4();
+
+		}
+
+		//
+
+		const uvNode = textureNode.uvNode || uv();
+
+		textureNodeOld.uvNode = uvNode;
+
+		const sampleTexture = ( uv ) => textureNode.cache().context( { getUV: () => uv, forceUVContext: true } );
+
+		const when_gt = tslFn( ( [ x_immutable, y_immutable ] ) => {
+
+			const y = float( y_immutable ).toVar();
+			const x = vec4( x_immutable ).toVar();
+
+			return max( sign( x.sub( y ) ), 0.0 );
+
+		} );
+
+		const afterImg = tslFn( () => {
+
+			const texelOld = vec4( textureNodeOld );
+			const texelNew = vec4( sampleTexture( uvNode ) );
+
+			texelOld.mulAssign( this.damp.mul( when_gt( texelOld, 0.1 ) ) );
+			return max( texelNew, texelOld );
+
+		} );
+
+		//
+
+		const materialComposed = this._materialComposed || ( this._materialComposed = builder.createNodeMaterial( 'MeshBasicNodeMaterial' ) );
+		materialComposed.fragmentNode = afterImg();
+
+		quadMeshComp.material = materialComposed;
+
+		//
+
+		const properties = builder.getNodeProperties( this );
+		properties.textureNode = textureNode;
+
+		//
+
+		return texture( this._compRT.texture );
+
+	}
+
+}
+
+export const afterImage = ( node, damp ) => nodeObject( new AfterImageNode( nodeObject( node ), damp ) );
+
+addNodeElement( 'afterImage', afterImage );
+
+export default AfterImageNode;
+

BIN
examples/screenshots/webgpu_postprocessing_afterimage.jpg


+ 128 - 0
examples/webgpu_postprocessing_afterimage.html

@@ -0,0 +1,128 @@
+<!DOCTYPE html>
+<html lang="en">
+	<head>
+		<title>three.js webgpu - postprocessing - afterimage</title>
+		<meta charset="utf-8">
+		<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+		<link type="text/css" rel="stylesheet" href="main.css">
+	</head>
+	<body>
+		<script type="importmap">
+			{
+				"imports": {
+					"three": "../build/three.module.js",
+					"three/addons/": "./jsm/",
+					"three/nodes": "./jsm/nodes/Nodes.js"
+				}
+			}
+		</script>
+
+		<script type="module">
+
+			import * as THREE from 'three';
+
+			import { GUI } from 'three/addons/libs/lil-gui.module.min.js';
+
+			import WebGPU from 'three/addons/capabilities/WebGPU.js';
+			import WebGPURenderer from 'three/addons/renderers/webgpu/WebGPURenderer.js';
+			import PostProcessing from 'three/addons/renderers/common/PostProcessing.js';
+			import { pass } from 'three/nodes';
+
+			let camera, scene, renderer;
+			let mesh, postProcessing, combinedPass;
+
+			const params = {
+
+				damp: 0.96
+
+			};
+
+			init();
+			createGUI();
+			animate();
+
+			function init() {
+
+				if ( WebGPU.isAvailable() === false ) {
+
+					document.body.appendChild( WebGPU.getErrorMessage() );
+
+					throw new Error( 'No WebGPU support' );
+
+				}
+
+				renderer = new WebGPURenderer( { antialias: true } );
+				renderer.setPixelRatio( window.devicePixelRatio );
+				renderer.setSize( window.innerWidth, window.innerHeight );
+				document.body.appendChild( renderer.domElement );
+
+				camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 1000 );
+				camera.position.z = 400;
+
+				scene = new THREE.Scene();
+				scene.fog = new THREE.Fog( 0x000000, 1, 1000 );
+
+				const geometry = new THREE.TorusKnotGeometry( 100, 30, 100, 16 );
+				const material = new THREE.MeshNormalMaterial();
+				mesh = new THREE.Mesh( geometry, material );
+				scene.add( mesh );
+
+				// postprocessing
+
+				postProcessing = new PostProcessing( renderer );
+
+				const scenePass = pass( scene, camera );
+				const scenePassColor = scenePass.getTextureNode();
+
+				combinedPass = scenePassColor;
+				combinedPass = combinedPass.afterImage( params.damp );
+
+				postProcessing.outputNode = combinedPass;
+
+				window.addEventListener( 'resize', onWindowResize );
+
+			}
+
+			function createGUI() {
+
+				const gui = new GUI( { title: 'Damp setting' } );
+				gui.add( combinedPass.damp, 'value', 0, 1 ).step( 0.001 );
+
+			}
+
+			function onWindowResize() {
+
+				camera.aspect = window.innerWidth / window.innerHeight;
+				camera.updateProjectionMatrix();
+
+				renderer.setSize( window.innerWidth, window.innerHeight );
+
+			}
+
+			function render() {
+
+				mesh.rotation.x += 0.0075;
+				mesh.rotation.y += 0.015;
+
+				if ( renderer.backend.isWebGPUBackend ) {
+
+					postProcessing.render();
+
+				} else {
+
+					renderer.render( scene, camera );
+
+				}
+
+			}
+
+			function animate() {
+
+				requestAnimationFrame( animate );
+				render();
+
+			}
+
+		</script>
+	</body>
+</html>