webgpu_audio_processing.html 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249
  1. <html lang="en">
  2. <head>
  3. <title>three.js - WebGPU - Audio Processing</title>
  4. <meta charset="utf-8">
  5. <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
  6. <link type="text/css" rel="stylesheet" href="main.css">
  7. </head>
  8. <body>
  9. <div id="overlay">
  10. <button id="startButton">Play</button>
  11. </div>
  12. <div id="info">
  13. <a href="https://threejs.org" target="_blank" rel="noopener">three.js</a> WebGPU - Audio Processing
  14. <br>Click on screen to process the audio using WebGPU.
  15. </div>
  16. <script async src="https://unpkg.com/[email protected]/dist/es-module-shims.js"></script>
  17. <script type="importmap">
  18. {
  19. "imports": {
  20. "three": "../build/three.module.js",
  21. "three/addons/": "./jsm/",
  22. "three/nodes": "./jsm/nodes/Nodes.js"
  23. }
  24. }
  25. </script>
  26. <script type="module">
  27. import * as THREE from 'three';
  28. import {
  29. ShaderNode, compute,
  30. uniform, element, storage, instanceIndex,
  31. float, add, sub, div, mul, texture, viewportTopLeft, color
  32. } from 'three/nodes';
  33. import { GUI } from 'three/addons/libs/lil-gui.module.min.js';
  34. import WebGPU from 'three/addons/capabilities/WebGPU.js';
  35. import WebGPURenderer from 'three/addons/renderers/webgpu/WebGPURenderer.js';
  36. let camera, scene, renderer;
  37. let computeNode;
  38. let waveBuffer, sampleRate;
  39. let waveGPUBuffer;
  40. let currentAudio, currentAnalyser;
  41. const analyserBuffer = new Uint8Array( 1024 );
  42. let analyserTexture;
  43. init();
  44. async function playAudioBuffer() {
  45. if ( currentAudio ) currentAudio.stop();
  46. // compute audio
  47. renderer.compute( computeNode );
  48. const waveArray = new Float32Array( await renderer.getArrayBuffer( waveGPUBuffer ) );
  49. // play result
  50. const audioOutputContext = new AudioContext( { sampleRate } );
  51. const audioOutputBuffer = audioOutputContext.createBuffer( 1, waveArray.length, sampleRate );
  52. audioOutputBuffer.copyToChannel( waveArray, 0 );
  53. const source = audioOutputContext.createBufferSource();
  54. source.connect( audioOutputContext.destination );
  55. source.buffer = audioOutputBuffer;
  56. source.start();
  57. currentAudio = source;
  58. // visual feedback
  59. currentAnalyser = audioOutputContext.createAnalyser();
  60. currentAnalyser.fftSize = 2048;
  61. source.connect( currentAnalyser );
  62. }
  63. async function init() {
  64. if ( WebGPU.isAvailable() === false ) {
  65. document.body.appendChild( WebGPU.getErrorMessage() );
  66. throw new Error( 'No WebGPU support' );
  67. }
  68. document.onclick = () => {
  69. const overlay = document.getElementById( 'overlay' );
  70. if ( overlay !== null ) overlay.remove();
  71. playAudioBuffer();
  72. };
  73. // audio buffer
  74. const soundBuffer = await fetch( 'sounds/webgpu-audio-processing.mp3' ).then( res => res.arrayBuffer() );
  75. const audioContext = new AudioContext();
  76. const audioBuffer = await audioContext.decodeAudioData( soundBuffer );
  77. waveBuffer = audioBuffer.getChannelData( 0 );
  78. // adding extra silence to delay and pitch
  79. waveBuffer = new Float32Array( [ ...waveBuffer, ...new Float32Array( 200000 ) ] );
  80. sampleRate = audioBuffer.sampleRate / audioBuffer.numberOfChannels;
  81. // create webgpu buffers
  82. waveGPUBuffer = new THREE.InstancedBufferAttribute( waveBuffer, 1 );
  83. const waveStorageNode = storage( waveGPUBuffer, 'float', waveBuffer.length );
  84. // read-only buffer
  85. const waveNode = storage( new THREE.InstancedBufferAttribute( waveBuffer, 1 ), 'float', waveBuffer.length );
  86. // params
  87. const pitch = uniform( 1.5 );
  88. const delayVolume = uniform( .2 );
  89. const delayOffset = uniform( .55 );
  90. // compute (shader-node)
  91. const computeShaderNode = new ShaderNode( ( inputs, stack ) => {
  92. const index = float( instanceIndex );
  93. // pitch
  94. const time = mul( index, pitch );
  95. let wave = element( waveNode, time );
  96. // delay
  97. for ( let i = 1; i < 7; i ++ ) {
  98. const waveOffset = element( waveNode, mul( sub( index, mul( mul( delayOffset, sampleRate ), i ) ), pitch ) );
  99. const waveOffsetVolume = mul( waveOffset, div( delayVolume, i * i ) );
  100. wave = add( wave, waveOffsetVolume );
  101. }
  102. // store
  103. const waveStorageElementNode = element( waveStorageNode, instanceIndex );
  104. stack.assign( waveStorageElementNode, wave );
  105. } );
  106. // compute
  107. computeNode = compute( computeShaderNode, waveBuffer.length );
  108. // gui
  109. const gui = new GUI();
  110. gui.add( pitch, 'value', .5, 2, 0.01 ).name( 'pitch' );
  111. gui.add( delayVolume, 'value', 0, 1, .01 ).name( 'delayVolume' );
  112. gui.add( delayOffset, 'value', .1, 1, .01 ).name( 'delayOffset' );
  113. // renderer
  114. const container = document.createElement( 'div' );
  115. document.body.appendChild( container );
  116. camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 0.01, 30 );
  117. // nodes
  118. analyserTexture = new THREE.DataTexture( analyserBuffer, analyserBuffer.length, 1, THREE.RedFormat );
  119. const spectrum = mul( texture( analyserTexture, viewportTopLeft.x ).x, viewportTopLeft.y );
  120. const backgroundNode = mul( color( 0x0000FF ), spectrum );
  121. // scene
  122. scene = new THREE.Scene();
  123. scene.backgroundNode = backgroundNode;
  124. // renderer
  125. renderer = new WebGPURenderer();
  126. renderer.setPixelRatio( window.devicePixelRatio );
  127. renderer.setSize( window.innerWidth, window.innerHeight );
  128. renderer.setAnimationLoop( render );
  129. container.appendChild( renderer.domElement );
  130. window.addEventListener( 'resize', onWindowResize );
  131. }
  132. function onWindowResize() {
  133. camera.aspect = window.innerWidth / window.innerHeight;
  134. camera.updateProjectionMatrix();
  135. renderer.setSize( window.innerWidth, window.innerHeight );
  136. }
  137. function render() {
  138. if ( currentAnalyser ) {
  139. currentAnalyser.getByteFrequencyData( analyserBuffer );
  140. analyserTexture.needsUpdate = true;
  141. }
  142. renderer.render( scene, camera );
  143. }
  144. </script>
  145. </body>
  146. </html>