123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249 |
- <html lang="en">
- <head>
- <title>three.js - WebGPU - Audio Processing</title>
- <meta charset="utf-8">
- <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
- <link type="text/css" rel="stylesheet" href="main.css">
- </head>
- <body>
- <div id="overlay">
- <button id="startButton">Play</button>
- </div>
- <div id="info">
- <a href="https://threejs.org" target="_blank" rel="noopener">three.js</a> WebGPU - Audio Processing
- <br>Click on screen to process the audio using WebGPU.
- </div>
- <script async src="https://unpkg.com/[email protected]/dist/es-module-shims.js"></script>
- <script type="importmap">
- {
- "imports": {
- "three": "../build/three.module.js",
- "three/addons/": "./jsm/",
- "three/nodes": "./jsm/nodes/Nodes.js"
- }
- }
- </script>
- <script type="module">
- import * as THREE from 'three';
- import {
- ShaderNode, compute,
- uniform, element, storage, instanceIndex,
- float, add, sub, div, mul, texture, viewportTopLeft, color
- } from 'three/nodes';
- import { GUI } from 'three/addons/libs/lil-gui.module.min.js';
- import WebGPU from 'three/addons/capabilities/WebGPU.js';
- import WebGPURenderer from 'three/addons/renderers/webgpu/WebGPURenderer.js';
- let camera, scene, renderer;
- let computeNode;
- let waveBuffer, sampleRate;
- let waveGPUBuffer;
- let currentAudio, currentAnalyser;
- let analyserBuffer = new Uint8Array( 1024 );
- let analyserTexture;
- await init();
- async function playAudioBuffer() {
- if ( currentAudio ) currentAudio.stop();
- // compute audio
- renderer.compute( computeNode );
- const waveArray = await renderer.getArrayFromBuffer( waveGPUBuffer );
- // play result
- const audioOutputContext = new AudioContext( { sampleRate } );
- const audioOutputBuffer = audioOutputContext.createBuffer( 1, waveArray.length, sampleRate );
- audioOutputBuffer.copyToChannel( waveArray, 0 );
- const source = audioOutputContext.createBufferSource();
- source.connect( audioOutputContext.destination );
- source.buffer = audioOutputBuffer;
- source.start();
- currentAudio = source;
- // visual feedback
- currentAnalyser = audioOutputContext.createAnalyser();
- currentAnalyser.fftSize = 2048;
- source.connect( currentAnalyser );
- }
- async function init() {
- if ( WebGPU.isAvailable() === false ) {
- document.body.appendChild( WebGPU.getErrorMessage() );
- throw new Error( 'No WebGPU support' );
- }
- document.onclick = () => {
- const overlay = document.getElementById( 'overlay' );
- if ( overlay !== null ) overlay.remove();
- playAudioBuffer();
- };
- // audio buffer
- const soundBuffer = await fetch( 'sounds/webgpu-audio-processing.mp3' ).then( res => res.arrayBuffer() );
- const audioContext = new AudioContext();
- const audioBuffer = await audioContext.decodeAudioData( soundBuffer );
- waveBuffer = audioBuffer.getChannelData( 0 );
- // adding extra silence to delay and pitch
- waveBuffer = new Float32Array( [ ...waveBuffer, ...new Float32Array( 200000 ) ] );
- sampleRate = audioBuffer.sampleRate / audioBuffer.numberOfChannels;
- // create webgpu buffers
- waveGPUBuffer = new THREE.InstancedBufferAttribute( waveBuffer, 1 );
- const waveStorageNode = storage( waveGPUBuffer, 'float', waveBuffer.length );
- // read-only buffer
- const waveNode = storage( new THREE.InstancedBufferAttribute( waveBuffer, 1 ), 'float', waveBuffer.length );
- // params
- const pitch = uniform( 1.5 );
- const delayVolume = uniform( .2 );
- const delayOffset = uniform( .55 );
- // compute (shader-node)
- const computeShaderNode = new ShaderNode( ( inputs, stack ) => {
- const index = float( instanceIndex );
- // pitch
- const time = mul( index, pitch );
- let wave = element( waveNode, time );
- // delay
- for ( let i = 1; i < 7; i ++ ) {
- const waveOffset = element( waveNode, mul( sub( index, mul( mul( delayOffset, sampleRate ), i ) ), pitch ) );
- const waveOffsetVolume = mul( waveOffset, div( delayVolume, i * i ) );
- wave = add( wave, waveOffsetVolume );
- }
- // store
- const waveStorageElementNode = element( waveStorageNode, instanceIndex );
- stack.assign( waveStorageElementNode, wave );
- } );
- // compute
- computeNode = compute( computeShaderNode, waveBuffer.length );
- // gui
- const gui = new GUI();
- gui.add( pitch, 'value', .5, 2, 0.01 ).name( 'pitch' );
- gui.add( delayVolume, 'value', 0, 1, .01 ).name( 'delayVolume' );
- gui.add( delayOffset, 'value', .1, 1, .01 ).name( 'delayOffset' );
- // renderer
- const container = document.createElement( 'div' );
- document.body.appendChild( container );
- camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 0.01, 30 );
- // nodes
- analyserTexture = new THREE.DataTexture( analyserBuffer, analyserBuffer.length, 1, THREE.RedFormat );
- const spectrum = mul( texture( analyserTexture, viewportTopLeft.x ).x, viewportTopLeft.y );
- const backgroundNode = mul( color( 0x0000FF ), spectrum );
- // scene
- scene = new THREE.Scene();
- scene.backgroundNode = backgroundNode;
- // renderer
- renderer = new WebGPURenderer();
- renderer.setPixelRatio( window.devicePixelRatio );
- renderer.setSize( window.innerWidth, window.innerHeight );
- renderer.setAnimationLoop( render );
- container.appendChild( renderer.domElement );
- window.addEventListener( 'resize', onWindowResize );
- }
- function onWindowResize() {
- camera.aspect = window.innerWidth / window.innerHeight;
- camera.updateProjectionMatrix();
- renderer.setSize( window.innerWidth, window.innerHeight );
- }
- function render() {
- if ( currentAnalyser ) {
- currentAnalyser.getByteFrequencyData( analyserBuffer );
- analyserTexture.needsUpdate = true;
- }
- renderer.render( scene, camera );
- }
- </script>
- </body>
- </html>
|