Przeglądaj źródła

WebGPU: Audio Processing using ShaderNode (#24918)

* WebGPURenderer: add .getArrayFromBuffer()

* Added webgpu_audio_processing example

* test if unmap() is really necessary

* improve example

* example update

* fix .isViewportNode

* added visual feedback

* optimize a bit
sunag 2 lat temu
rodzic
commit
ac30ce09d3

+ 1 - 0
examples/files.json

@@ -324,6 +324,7 @@
 		"webgl2_volume_perlin"
 	],
 	"webgpu": [
+		"webgpu_audio_processing",
 		"webgpu_compute",
 		"webgpu_cubemap_adjustments",
 		"webgpu_cubemap_mix",

+ 1 - 1
examples/jsm/nodes/display/ViewportNode.js

@@ -20,7 +20,7 @@ class ViewportNode extends Node {
 
 		this.scope = scope;
 
-		this.isScreenNode = true;
+		this.isViewportNode = true;
 
 	}
 

+ 63 - 5
examples/jsm/renderers/webgpu/WebGPUAttributes.js

@@ -23,7 +23,7 @@ class WebGPUAttributes {
 
 		if ( data ) {
 
-			data.buffer.destroy();
+			this._destroyBuffers( data );
 
 			this.buffers.delete( attribute );
 
@@ -51,7 +51,7 @@ class WebGPUAttributes {
 
 		} else if ( usage && usage !== data.usage ) {
 
-			data.buffer.destroy();
+			this._destroyBuffers( data );
 
 			data = this._createBuffer( attribute, usage );
 
@@ -67,6 +67,53 @@ class WebGPUAttributes {
 
 	}
 
+	async getArrayBuffer( attribute ) {
+
+		const data = this.get( attribute );
+		const device = this.device;
+
+		const gpuBuffer = data.buffer;
+		const size = gpuBuffer.size;
+
+		let gpuReadBuffer = data.readBuffer;
+		let needsUnmap = true;
+
+		if ( gpuReadBuffer === null ) {
+
+			gpuReadBuffer = device.createBuffer( {
+				size,
+				usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ
+			} );
+
+			needsUnmap = false;
+
+			data.readBuffer = gpuReadBuffer;
+
+		}
+
+		const cmdEncoder = device.createCommandEncoder( {} );
+
+		cmdEncoder.copyBufferToBuffer(
+			gpuBuffer,
+			0,
+			gpuReadBuffer,
+			0,
+			size
+		);
+
+		if ( needsUnmap ) gpuReadBuffer.unmap();
+
+		const gpuCommands = cmdEncoder.finish();
+		device.queue.submit( [ gpuCommands ] );
+
+		await gpuReadBuffer.mapAsync( GPUMapMode.READ );
+
+		const arrayBuffer = gpuReadBuffer.getMappedRange();
+
+		return new Float32Array( arrayBuffer );
+
+	}
+
 	_createBuffer( attribute, usage ) {
 
 		const array = attribute.array;
@@ -74,7 +121,7 @@ class WebGPUAttributes {
 
 		const buffer = this.device.createBuffer( {
 			size,
-			usage: usage | GPUBufferUsage.COPY_DST,
+			usage: usage | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST,
 			mappedAtCreation: true
 		} );
 
@@ -87,6 +134,7 @@ class WebGPUAttributes {
 		return {
 			version: attribute.version,
 			buffer,
+			readBuffer: null,
 			usage
 		};
 
@@ -94,6 +142,8 @@ class WebGPUAttributes {
 
 	_writeBuffer( buffer, attribute ) {
 
+		const device = this.device;
+
 		const array = attribute.array;
 		const updateRange = attribute.updateRange;
 
@@ -101,7 +151,7 @@ class WebGPUAttributes {
 
 			// Not using update ranges
 
-			this.device.queue.writeBuffer(
+			device.queue.writeBuffer(
 				buffer,
 				0,
 				array,
@@ -110,7 +160,7 @@ class WebGPUAttributes {
 
 		} else {
 
-			this.device.queue.writeBuffer(
+			device.queue.writeBuffer(
 				buffer,
 				0,
 				array,
@@ -124,6 +174,14 @@ class WebGPUAttributes {
 
 	}
 
+	_destroyBuffers( { buffer, readBuffer } ) {
+
+		buffer.destroy();
+
+		if ( readBuffer !== null ) readBuffer.destroy();
+
+	}
+
 }
 
 export default WebGPUAttributes;

+ 6 - 0
examples/jsm/renderers/webgpu/WebGPURenderer.js

@@ -384,6 +384,12 @@ class WebGPURenderer {
 
 	}
 
+	async getArrayFromBuffer( attribute ) {
+
+		return await this._attributes.getArrayBuffer( attribute );
+
+	}
+
 	getContext() {
 
 		return this._context;

BIN
examples/screenshots/webgpu_audio_processing.jpg


BIN
examples/sounds/webgpu-audio-processing.mp3


+ 249 - 0
examples/webgpu_audio_processing.html

@@ -0,0 +1,249 @@
+<html lang="en">
+	<head>
+		<title>three.js - WebGPU - Audio Processing</title>
+		<meta charset="utf-8">
+		<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+		<link type="text/css" rel="stylesheet" href="main.css">
+	</head>
+	<body>
+
+		<div id="overlay">
+			<button id="startButton">Play</button>
+		</div>
+
+		<div id="info">
+			<a href="https://threejs.org" target="_blank" rel="noopener">three.js</a> WebGPU - Audio Processing
+			<br>Click on the screen to process the audio using WebGPU.
+		</div>
+
+		<script async src="https://unpkg.com/[email protected]/dist/es-module-shims.js"></script>
+
+		<script type="importmap">
+			{
+				"imports": {
+					"three": "../build/three.module.js",
+					"three/addons/": "./jsm/",
+					"three/nodes": "./jsm/nodes/Nodes.js"
+				}
+			}
+		</script>
+
+		<script type="module">
+
+			import * as THREE from 'three';
+
+			import {
+				ShaderNode, compute,
+				uniform, element, storage, instanceIndex,
+				float, assign, add, sub, div, mul, texture, viewportTopLeft, color
+			} from 'three/nodes';
+
+			import { GUI } from 'three/addons/libs/lil-gui.module.min.js';
+
+			import WebGPU from 'three/addons/capabilities/WebGPU.js';
+			import WebGPURenderer from 'three/addons/renderers/webgpu/WebGPURenderer.js';
+
+			let camera, scene, renderer;
+			let computeNode;
+			let waveBuffer, sampleRate;
+			let waveGPUBuffer;
+			let currentAudio, currentAnalyser;
+			let analyserBuffer = new Uint8Array( 1024 );
+			let analyserTexture;
+
+			await init();
+
+			async function playAudioBuffer() {
+
+				if ( currentAudio ) currentAudio.stop();
+
+				// compute audio
+
+				renderer.compute( computeNode );
+
+				const waveArray = await renderer.getArrayFromBuffer( waveGPUBuffer );
+
+				// play result
+
+				const audioOutputContext = new AudioContext( { sampleRate } );
+				const audioOutputBuffer = audioOutputContext.createBuffer( 1, waveArray.length, sampleRate );
+
+				audioOutputBuffer.copyToChannel( waveArray, 0 );
+
+				const source = audioOutputContext.createBufferSource();
+				source.connect( audioOutputContext.destination );
+				source.buffer = audioOutputBuffer;
+				source.start();
+
+				currentAudio = source;
+
+				// visual feedback
+
+				currentAnalyser = audioOutputContext.createAnalyser();
+				currentAnalyser.fftSize = 2048;
+
+				source.connect( currentAnalyser );
+
+			}
+
+			async function init() {
+
+				if ( WebGPU.isAvailable() === false ) {
+
+					document.body.appendChild( WebGPU.getErrorMessage() );
+
+					throw new Error( 'No WebGPU support' );
+
+				}
+
+				document.onclick = () => {
+
+					const overlay = document.getElementById( 'overlay' );
+					if ( overlay !== null ) overlay.remove();
+
+					playAudioBuffer();
+
+				};
+
+				// audio buffer
+
+				const soundBuffer = await fetch( 'sounds/webgpu-audio-processing.mp3' ).then( res => res.arrayBuffer() );
+				const audioContext = new AudioContext();
+
+				const audioBuffer = await audioContext.decodeAudioData( soundBuffer );
+
+				waveBuffer = audioBuffer.getChannelData( 0 );
+
+				// adding extra silence to delay and pitch
+				waveBuffer = new Float32Array( [ ...waveBuffer, ...new Float32Array( 200000 ) ] );
+
+				sampleRate = audioBuffer.sampleRate / audioBuffer.numberOfChannels;
+
+
+				// create webgpu buffers
+
+				waveGPUBuffer = new THREE.InstancedBufferAttribute( waveBuffer, 1 );
+
+				const waveStorageNode = storage( waveGPUBuffer, 'float', waveBuffer.length );
+
+
+				// read-only buffer
+
+				const waveNode = storage( new THREE.InstancedBufferAttribute( waveBuffer, 1 ), 'float', waveBuffer.length );
+
+
+				// params
+
+				const pitch = uniform( 1.5 );
+				const delayVolume = uniform( .2 );
+				const delayOffset = uniform( .55 );
+
+
+				// compute (shader-node)
+
+				const computeShaderNode = new ShaderNode( ( inputs, builder ) => {
+
+					const index = float( instanceIndex );
+
+
+					// pitch
+
+					const time = mul( index, pitch );
+
+					let wave = element( waveNode, time );
+
+
+					// delay
+
+					for ( let i = 1; i < 7; i ++ ) {
+
+						const waveOffset = element( waveNode, mul( sub( index, mul( mul( delayOffset, sampleRate ), i ) ), pitch ) );
+						const waveOffsetVolume = mul( waveOffset, div( delayVolume, i * i ) );
+
+						wave = add( wave, waveOffsetVolume );
+
+					}
+
+
+					// store
+
+					const waveStorageElementNode = element( waveStorageNode, instanceIndex );
+
+					assign( waveStorageElementNode, wave ).build( builder );
+
+				} );
+
+
+				// compute
+
+				computeNode = compute( computeShaderNode, waveBuffer.length );
+
+
+				// gui
+
+				const gui = new GUI();
+
+				gui.add( pitch, 'value', .5, 2, 0.01 ).name( 'pitch' );
+				gui.add( delayVolume, 'value', 0, 1, .01 ).name( 'delayVolume' );
+				gui.add( delayOffset, 'value', .1, 1, .01 ).name( 'delayOffset' );
+
+
+				// renderer
+
+				const container = document.createElement( 'div' );
+				document.body.appendChild( container );
+
+				camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 0.01, 30 );
+
+
+				// nodes
+
+				analyserTexture = new THREE.DataTexture( analyserBuffer, analyserBuffer.length, 1, THREE.RedFormat );
+
+				const spectrum = mul( texture( analyserTexture, viewportTopLeft.x ).x, viewportTopLeft.y );
+				const backgroundNode = mul( color( 0x0000FF ), spectrum );
+
+
+				// scene
+
+				scene = new THREE.Scene();
+				scene.backgroundNode = backgroundNode;
+
+				// renderer
+
+				renderer = new WebGPURenderer();
+				renderer.setPixelRatio( window.devicePixelRatio );
+				renderer.setSize( window.innerWidth, window.innerHeight );
+				renderer.setAnimationLoop( render );
+				container.appendChild( renderer.domElement );
+
+				window.addEventListener( 'resize', onWindowResize );
+
+			}
+
+			function onWindowResize() {
+
+				camera.aspect = window.innerWidth / window.innerHeight;
+				camera.updateProjectionMatrix();
+
+				renderer.setSize( window.innerWidth, window.innerHeight );
+
+			}
+
+			function render() {
+
+				if ( currentAnalyser ) {
+
+					currentAnalyser.getByteFrequencyData( analyserBuffer );
+
+					analyserTexture.needsUpdate = true;
+
+				}
+
+				renderer.render( scene, camera );
+
+			}
+
+		</script>
+	</body>
+</html>

+ 1 - 0
test/e2e/puppeteer.js

@@ -44,6 +44,7 @@ const exceptionList = [
 	// webxr
 	'webxr_ar_lighting',
 	// webgpu
+	'webgpu_audio_processing',
 	'webgpu_compute',
 	'webgpu_cubemap_adjustments',
 	'webgpu_cubemap_mix',