WebXRManager.js 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743
  1. import { ArrayCamera } from '../../cameras/ArrayCamera.js';
  2. import { EventDispatcher } from '../../core/EventDispatcher.js';
  3. import { PerspectiveCamera } from '../../cameras/PerspectiveCamera.js';
  4. import { Vector3 } from '../../math/Vector3.js';
  5. import { Vector4 } from '../../math/Vector4.js';
  6. import { WebGLAnimation } from '../webgl/WebGLAnimation.js';
  7. import { WebXRController } from './WebXRController.js';
  8. class WebXRManager extends EventDispatcher {
  9. constructor( renderer, gl ) {
  10. super();
  11. const scope = this;
  12. const state = renderer.state;
  13. let session = null;
  14. let framebufferScaleFactor = 1.0;
  15. let referenceSpace = null;
  16. let referenceSpaceType = 'local-floor';
  17. let pose = null;
  18. let glBinding = null;
  19. let glFramebuffer = null;
  20. let glProjLayer = null;
  21. let glBaseLayer = null;
  22. let isMultisample = false;
  23. let glMultisampledFramebuffer = null;
  24. let glColorRenderbuffer = null;
  25. let glDepthRenderbuffer = null;
  26. let xrFrame = null;
  27. let depthStyle = null;
  28. let clearStyle = null;
  29. const msaartcSupported = renderer.extensions.has( 'EXT_multisampled_render_to_texture' );
  30. let msaaExt = null;
  31. const controllers = [];
  32. const inputSourcesMap = new Map();
  33. //
  34. const cameraL = new PerspectiveCamera();
  35. cameraL.layers.enable( 1 );
  36. cameraL.viewport = new Vector4();
  37. const cameraR = new PerspectiveCamera();
  38. cameraR.layers.enable( 2 );
  39. cameraR.viewport = new Vector4();
  40. const cameras = [ cameraL, cameraR ];
  41. const cameraVR = new ArrayCamera();
  42. cameraVR.layers.enable( 1 );
  43. cameraVR.layers.enable( 2 );
  44. let _currentDepthNear = null;
  45. let _currentDepthFar = null;
  46. //
  47. this.cameraAutoUpdate = true;
  48. this.enabled = false;
  49. this.isPresenting = false;
  50. this.getController = function ( index ) {
  51. let controller = controllers[ index ];
  52. if ( controller === undefined ) {
  53. controller = new WebXRController();
  54. controllers[ index ] = controller;
  55. }
  56. return controller.getTargetRaySpace();
  57. };
  58. this.getControllerGrip = function ( index ) {
  59. let controller = controllers[ index ];
  60. if ( controller === undefined ) {
  61. controller = new WebXRController();
  62. controllers[ index ] = controller;
  63. }
  64. return controller.getGripSpace();
  65. };
  66. this.getHand = function ( index ) {
  67. let controller = controllers[ index ];
  68. if ( controller === undefined ) {
  69. controller = new WebXRController();
  70. controllers[ index ] = controller;
  71. }
  72. return controller.getHandSpace();
  73. };
  74. //
  75. function onSessionEvent( event ) {
  76. const controller = inputSourcesMap.get( event.inputSource );
  77. if ( controller ) {
  78. controller.dispatchEvent( { type: event.type, data: event.inputSource } );
  79. }
  80. }
  81. function onSessionEnd() {
  82. inputSourcesMap.forEach( function ( controller, inputSource ) {
  83. controller.disconnect( inputSource );
  84. } );
  85. inputSourcesMap.clear();
  86. _currentDepthNear = null;
  87. _currentDepthFar = null;
  88. // restore framebuffer/rendering state
  89. state.bindXRFramebuffer( null );
  90. renderer.setRenderTarget( renderer.getRenderTarget() );
  91. if ( glFramebuffer ) gl.deleteFramebuffer( glFramebuffer );
  92. if ( glMultisampledFramebuffer ) gl.deleteFramebuffer( glMultisampledFramebuffer );
  93. if ( glColorRenderbuffer ) gl.deleteRenderbuffer( glColorRenderbuffer );
  94. if ( glDepthRenderbuffer ) gl.deleteRenderbuffer( glDepthRenderbuffer );
  95. glFramebuffer = null;
  96. glMultisampledFramebuffer = null;
  97. glColorRenderbuffer = null;
  98. glDepthRenderbuffer = null;
  99. glBaseLayer = null;
  100. glProjLayer = null;
  101. glBinding = null;
  102. session = null;
  103. //
  104. animation.stop();
  105. scope.isPresenting = false;
  106. scope.dispatchEvent( { type: 'sessionend' } );
  107. }
  108. this.setFramebufferScaleFactor = function ( value ) {
  109. framebufferScaleFactor = value;
  110. if ( scope.isPresenting === true ) {
  111. console.warn( 'THREE.WebXRManager: Cannot change framebuffer scale while presenting.' );
  112. }
  113. };
  114. this.setReferenceSpaceType = function ( value ) {
  115. referenceSpaceType = value;
  116. if ( scope.isPresenting === true ) {
  117. console.warn( 'THREE.WebXRManager: Cannot change reference space type while presenting.' );
  118. }
  119. };
  120. this.getReferenceSpace = function () {
  121. return referenceSpace;
  122. };
  123. this.getBaseLayer = function () {
  124. return glProjLayer !== null ? glProjLayer : glBaseLayer;
  125. };
  126. this.getBinding = function () {
  127. return glBinding;
  128. };
  129. this.getFrame = function () {
  130. return xrFrame;
  131. };
  132. this.getSession = function () {
  133. return session;
  134. };
  135. this.setSession = async function ( value ) {
  136. session = value;
  137. if ( session !== null ) {
  138. session.addEventListener( 'select', onSessionEvent );
  139. session.addEventListener( 'selectstart', onSessionEvent );
  140. session.addEventListener( 'selectend', onSessionEvent );
  141. session.addEventListener( 'squeeze', onSessionEvent );
  142. session.addEventListener( 'squeezestart', onSessionEvent );
  143. session.addEventListener( 'squeezeend', onSessionEvent );
  144. session.addEventListener( 'end', onSessionEnd );
  145. session.addEventListener( 'inputsourceschange', onInputSourcesChange );
  146. const attributes = gl.getContextAttributes();
  147. if ( attributes.xrCompatible !== true ) {
  148. await gl.makeXRCompatible();
  149. }
  150. if ( session.renderState.layers === undefined ) {
  151. const layerInit = {
  152. antialias: attributes.antialias,
  153. alpha: attributes.alpha,
  154. depth: attributes.depth,
  155. stencil: attributes.stencil,
  156. framebufferScaleFactor: framebufferScaleFactor
  157. };
  158. glBaseLayer = new XRWebGLLayer( session, gl, layerInit );
  159. session.updateRenderState( { baseLayer: glBaseLayer } );
  160. } else if ( gl instanceof WebGLRenderingContext ) {
  161. // Use old style webgl layer because we can't use MSAA
  162. // WebGL2 support.
  163. const layerInit = {
  164. antialias: true,
  165. alpha: attributes.alpha,
  166. depth: attributes.depth,
  167. stencil: attributes.stencil,
  168. framebufferScaleFactor: framebufferScaleFactor
  169. };
  170. glBaseLayer = new XRWebGLLayer( session, gl, layerInit );
  171. session.updateRenderState( { layers: [ glBaseLayer ] } );
  172. } else {
  173. isMultisample = attributes.antialias;
  174. let depthFormat = null;
  175. if ( attributes.depth ) {
  176. clearStyle = gl.DEPTH_BUFFER_BIT;
  177. if ( attributes.stencil ) clearStyle |= gl.STENCIL_BUFFER_BIT;
  178. depthStyle = attributes.stencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT;
  179. depthFormat = attributes.stencil ? gl.DEPTH24_STENCIL8 : gl.DEPTH_COMPONENT24;
  180. }
  181. const projectionlayerInit = {
  182. colorFormat: attributes.alpha ? gl.RGBA8 : gl.RGB8,
  183. depthFormat: depthFormat,
  184. scaleFactor: framebufferScaleFactor
  185. };
  186. glBinding = new XRWebGLBinding( session, gl );
  187. glProjLayer = glBinding.createProjectionLayer( projectionlayerInit );
  188. glFramebuffer = gl.createFramebuffer();
  189. session.updateRenderState( { layers: [ glProjLayer ] } );
  190. if ( isMultisample && msaartcSupported ) {
  191. msaaExt = renderer.extensions.get( 'EXT_multisampled_render_to_texture' );
  192. } else if ( isMultisample ) {
  193. glMultisampledFramebuffer = gl.createFramebuffer();
  194. glColorRenderbuffer = gl.createRenderbuffer();
  195. gl.bindRenderbuffer( gl.RENDERBUFFER, glColorRenderbuffer );
  196. gl.renderbufferStorageMultisample(
  197. gl.RENDERBUFFER,
  198. 4,
  199. gl.RGBA8,
  200. glProjLayer.textureWidth,
  201. glProjLayer.textureHeight );
  202. state.bindFramebuffer( gl.FRAMEBUFFER, glMultisampledFramebuffer );
  203. gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, glColorRenderbuffer );
  204. gl.bindRenderbuffer( gl.RENDERBUFFER, null );
  205. if ( depthFormat !== null ) {
  206. glDepthRenderbuffer = gl.createRenderbuffer();
  207. gl.bindRenderbuffer( gl.RENDERBUFFER, glDepthRenderbuffer );
  208. gl.renderbufferStorageMultisample( gl.RENDERBUFFER, 4, depthFormat, glProjLayer.textureWidth, glProjLayer.textureHeight );
  209. gl.framebufferRenderbuffer( gl.FRAMEBUFFER, depthStyle, gl.RENDERBUFFER, glDepthRenderbuffer );
  210. gl.bindRenderbuffer( gl.RENDERBUFFER, null );
  211. }
  212. state.bindFramebuffer( gl.FRAMEBUFFER, null );
  213. }
  214. }
  215. referenceSpace = await session.requestReferenceSpace( referenceSpaceType );
  216. animation.setContext( session );
  217. animation.start();
  218. scope.isPresenting = true;
  219. scope.dispatchEvent( { type: 'sessionstart' } );
  220. }
  221. };
  222. function onInputSourcesChange( event ) {
  223. const inputSources = session.inputSources;
  224. // Assign inputSources to available controllers
  225. for ( let i = 0; i < controllers.length; i ++ ) {
  226. inputSourcesMap.set( inputSources[ i ], controllers[ i ] );
  227. }
  228. // Notify disconnected
  229. for ( let i = 0; i < event.removed.length; i ++ ) {
  230. const inputSource = event.removed[ i ];
  231. const controller = inputSourcesMap.get( inputSource );
  232. if ( controller ) {
  233. controller.dispatchEvent( { type: 'disconnected', data: inputSource } );
  234. inputSourcesMap.delete( inputSource );
  235. }
  236. }
  237. // Notify connected
  238. for ( let i = 0; i < event.added.length; i ++ ) {
  239. const inputSource = event.added[ i ];
  240. const controller = inputSourcesMap.get( inputSource );
  241. if ( controller ) {
  242. controller.dispatchEvent( { type: 'connected', data: inputSource } );
  243. }
  244. }
  245. }
  246. //
  247. const cameraLPos = new Vector3();
  248. const cameraRPos = new Vector3();
  249. /**
  250. * Assumes 2 cameras that are parallel and share an X-axis, and that
  251. * the cameras' projection and world matrices have already been set.
  252. * And that near and far planes are identical for both cameras.
  253. * Visualization of this technique: https://computergraphics.stackexchange.com/a/4765
  254. */
  255. function setProjectionFromUnion( camera, cameraL, cameraR ) {
  256. cameraLPos.setFromMatrixPosition( cameraL.matrixWorld );
  257. cameraRPos.setFromMatrixPosition( cameraR.matrixWorld );
  258. const ipd = cameraLPos.distanceTo( cameraRPos );
  259. const projL = cameraL.projectionMatrix.elements;
  260. const projR = cameraR.projectionMatrix.elements;
  261. // VR systems will have identical far and near planes, and
  262. // most likely identical top and bottom frustum extents.
  263. // Use the left camera for these values.
  264. const near = projL[ 14 ] / ( projL[ 10 ] - 1 );
  265. const far = projL[ 14 ] / ( projL[ 10 ] + 1 );
  266. const topFov = ( projL[ 9 ] + 1 ) / projL[ 5 ];
  267. const bottomFov = ( projL[ 9 ] - 1 ) / projL[ 5 ];
  268. const leftFov = ( projL[ 8 ] - 1 ) / projL[ 0 ];
  269. const rightFov = ( projR[ 8 ] + 1 ) / projR[ 0 ];
  270. const left = near * leftFov;
  271. const right = near * rightFov;
  272. // Calculate the new camera's position offset from the
  273. // left camera. xOffset should be roughly half `ipd`.
  274. const zOffset = ipd / ( - leftFov + rightFov );
  275. const xOffset = zOffset * - leftFov;
  276. // TODO: Better way to apply this offset?
  277. cameraL.matrixWorld.decompose( camera.position, camera.quaternion, camera.scale );
  278. camera.translateX( xOffset );
  279. camera.translateZ( zOffset );
  280. camera.matrixWorld.compose( camera.position, camera.quaternion, camera.scale );
  281. camera.matrixWorldInverse.copy( camera.matrixWorld ).invert();
  282. // Find the union of the frustum values of the cameras and scale
  283. // the values so that the near plane's position does not change in world space,
  284. // although must now be relative to the new union camera.
  285. const near2 = near + zOffset;
  286. const far2 = far + zOffset;
  287. const left2 = left - xOffset;
  288. const right2 = right + ( ipd - xOffset );
  289. const top2 = topFov * far / far2 * near2;
  290. const bottom2 = bottomFov * far / far2 * near2;
  291. camera.projectionMatrix.makePerspective( left2, right2, top2, bottom2, near2, far2 );
  292. }
  293. function updateCamera( camera, parent ) {
  294. if ( parent === null ) {
  295. camera.matrixWorld.copy( camera.matrix );
  296. } else {
  297. camera.matrixWorld.multiplyMatrices( parent.matrixWorld, camera.matrix );
  298. }
  299. camera.matrixWorldInverse.copy( camera.matrixWorld ).invert();
  300. }
  301. this.updateCamera = function ( camera ) {
  302. if ( session === null ) return;
  303. cameraVR.near = cameraR.near = cameraL.near = camera.near;
  304. cameraVR.far = cameraR.far = cameraL.far = camera.far;
  305. if ( _currentDepthNear !== cameraVR.near || _currentDepthFar !== cameraVR.far ) {
  306. // Note that the new renderState won't apply until the next frame. See #18320
  307. session.updateRenderState( {
  308. depthNear: cameraVR.near,
  309. depthFar: cameraVR.far
  310. } );
  311. _currentDepthNear = cameraVR.near;
  312. _currentDepthFar = cameraVR.far;
  313. }
  314. const parent = camera.parent;
  315. const cameras = cameraVR.cameras;
  316. updateCamera( cameraVR, parent );
  317. for ( let i = 0; i < cameras.length; i ++ ) {
  318. updateCamera( cameras[ i ], parent );
  319. }
  320. cameraVR.matrixWorld.decompose( cameraVR.position, cameraVR.quaternion, cameraVR.scale );
  321. // update user camera and its children
  322. camera.position.copy( cameraVR.position );
  323. camera.quaternion.copy( cameraVR.quaternion );
  324. camera.scale.copy( cameraVR.scale );
  325. camera.matrix.copy( cameraVR.matrix );
  326. camera.matrixWorld.copy( cameraVR.matrixWorld );
  327. const children = camera.children;
  328. for ( let i = 0, l = children.length; i < l; i ++ ) {
  329. children[ i ].updateMatrixWorld( true );
  330. }
  331. // update projection matrix for proper view frustum culling
  332. if ( cameras.length === 2 ) {
  333. setProjectionFromUnion( cameraVR, cameraL, cameraR );
  334. } else {
  335. // assume single camera setup (AR)
  336. cameraVR.projectionMatrix.copy( cameraL.projectionMatrix );
  337. }
  338. };
  339. this.getCamera = function () {
  340. return cameraVR;
  341. };
  342. this.getFoveation = function () {
  343. if ( glProjLayer !== null ) {
  344. return glProjLayer.fixedFoveation;
  345. }
  346. if ( glBaseLayer !== null ) {
  347. return glBaseLayer.fixedFoveation;
  348. }
  349. return undefined;
  350. };
  351. this.setFoveation = function ( foveation ) {
  352. // 0 = no foveation = full resolution
  353. // 1 = maximum foveation = the edges render at lower resolution
  354. if ( glProjLayer !== null ) {
  355. glProjLayer.fixedFoveation = foveation;
  356. }
  357. if ( glBaseLayer !== null && glBaseLayer.fixedFoveation !== undefined ) {
  358. glBaseLayer.fixedFoveation = foveation;
  359. }
  360. };
  361. // Animation Loop
  362. let onAnimationFrameCallback = null;
  363. function onAnimationFrame( time, frame ) {
  364. pose = frame.getViewerPose( referenceSpace );
  365. xrFrame = frame;
  366. if ( pose !== null ) {
  367. const views = pose.views;
  368. if ( glBaseLayer !== null ) {
  369. state.bindXRFramebuffer( glBaseLayer.framebuffer );
  370. }
  371. let cameraVRNeedsUpdate = false;
  372. // check if it's necessary to rebuild cameraVR's camera list
  373. if ( views.length !== cameraVR.cameras.length ) {
  374. cameraVR.cameras.length = 0;
  375. cameraVRNeedsUpdate = true;
  376. }
  377. for ( let i = 0; i < views.length; i ++ ) {
  378. const view = views[ i ];
  379. let viewport = null;
  380. if ( glBaseLayer !== null ) {
  381. viewport = glBaseLayer.getViewport( view );
  382. } else {
  383. const glSubImage = glBinding.getViewSubImage( glProjLayer, view );
  384. state.bindXRFramebuffer( glFramebuffer );
  385. if ( isMultisample && msaartcSupported ) {
  386. if ( glSubImage.depthStencilTexture !== undefined ) {
  387. msaaExt.framebufferTexture2DMultisampleEXT( gl.FRAMEBUFFER, depthStyle, gl.TEXTURE_2D, glSubImage.depthStencilTexture, 0, 4 );
  388. }
  389. msaaExt.framebufferTexture2DMultisampleEXT( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, glSubImage.colorTexture, 0, 4 );
  390. } else {
  391. if ( glSubImage.depthStencilTexture !== undefined ) {
  392. gl.framebufferTexture2D( gl.FRAMEBUFFER, depthStyle, gl.TEXTURE_2D, glSubImage.depthStencilTexture, 0 );
  393. }
  394. gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, glSubImage.colorTexture, 0 );
  395. }
  396. viewport = glSubImage.viewport;
  397. }
  398. const camera = cameras[ i ];
  399. camera.matrix.fromArray( view.transform.matrix );
  400. camera.projectionMatrix.fromArray( view.projectionMatrix );
  401. camera.viewport.set( viewport.x, viewport.y, viewport.width, viewport.height );
  402. if ( i === 0 ) {
  403. cameraVR.matrix.copy( camera.matrix );
  404. }
  405. if ( cameraVRNeedsUpdate === true ) {
  406. cameraVR.cameras.push( camera );
  407. }
  408. }
  409. if ( isMultisample && ! msaartcSupported ) {
  410. state.bindXRFramebuffer( glMultisampledFramebuffer );
  411. if ( clearStyle !== null ) gl.clear( clearStyle );
  412. }
  413. }
  414. //
  415. const inputSources = session.inputSources;
  416. for ( let i = 0; i < controllers.length; i ++ ) {
  417. const controller = controllers[ i ];
  418. const inputSource = inputSources[ i ];
  419. controller.update( inputSource, frame, referenceSpace );
  420. }
  421. if ( onAnimationFrameCallback ) onAnimationFrameCallback( time, frame );
  422. if ( isMultisample && ! msaartcSupported ) {
  423. const width = glProjLayer.textureWidth;
  424. const height = glProjLayer.textureHeight;
  425. state.bindFramebuffer( gl.READ_FRAMEBUFFER, glMultisampledFramebuffer );
  426. state.bindFramebuffer( gl.DRAW_FRAMEBUFFER, glFramebuffer );
  427. // Invalidate the depth here to avoid flush of the depth data to main memory.
  428. gl.invalidateFramebuffer( gl.READ_FRAMEBUFFER, [ depthStyle ] );
  429. gl.invalidateFramebuffer( gl.DRAW_FRAMEBUFFER, [ depthStyle ] );
  430. gl.blitFramebuffer( 0, 0, width, height, 0, 0, width, height, gl.COLOR_BUFFER_BIT, gl.NEAREST );
  431. // Invalidate the MSAA buffer because it's not needed anymore.
  432. gl.invalidateFramebuffer( gl.READ_FRAMEBUFFER, [ gl.COLOR_ATTACHMENT0 ] );
  433. state.bindFramebuffer( gl.READ_FRAMEBUFFER, null );
  434. state.bindFramebuffer( gl.DRAW_FRAMEBUFFER, null );
  435. state.bindFramebuffer( gl.FRAMEBUFFER, glMultisampledFramebuffer );
  436. }
  437. xrFrame = null;
  438. }
  439. const animation = new WebGLAnimation();
  440. animation.setAnimationLoop( onAnimationFrame );
  441. this.setAnimationLoop = function ( callback ) {
  442. onAnimationFrameCallback = callback;
  443. };
  444. this.dispose = function () {};
  445. }
  446. }
  447. export { WebXRManager };