瀏覽代碼

Merge pull request #1688 from jamesu/dx11_openvr_pr

Basic OpenVR Support code
Areloch 9 年之前
父節點
當前提交
7e4095d610
共有 74 個文件被更改,包括 6002 次插入1398 次删除
  1. 0 38
      Engine/source/T3D/camera.cpp
  2. 0 1
      Engine/source/T3D/camera.h
  3. 89 45
      Engine/source/T3D/gameBase/extended/extendedMove.cpp
  4. 9 5
      Engine/source/T3D/gameBase/extended/extendedMove.h
  5. 5 5
      Engine/source/T3D/gameBase/extended/extendedMoveList.cpp
  6. 22 4
      Engine/source/T3D/gameBase/gameConnection.cpp
  7. 4 0
      Engine/source/T3D/gameBase/gameConnection.h
  8. 6 7
      Engine/source/T3D/gameFunctions.cpp
  9. 41 30
      Engine/source/T3D/lightFlareData.cpp
  10. 111 66
      Engine/source/T3D/player.cpp
  11. 8 1
      Engine/source/T3D/player.h
  12. 7 53
      Engine/source/T3D/shapeBase.cpp
  13. 0 3
      Engine/source/T3D/shapeBase.h
  14. 2 2
      Engine/source/environment/scatterSky.cpp
  15. 400 397
      Engine/source/gfx/D3D11/gfxD3D11Device.cpp
  16. 4 0
      Engine/source/gfx/D3D11/gfxD3D11Device.h
  17. 1 0
      Engine/source/gfx/D3D11/gfxD3D11EnumTranslate.cpp
  18. 25 18
      Engine/source/gfx/D3D11/gfxD3D11Target.cpp
  19. 3 3
      Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp
  20. 2 0
      Engine/source/gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp
  21. 1 0
      Engine/source/gfx/bitmap/gBitmap.cpp
  22. 4 3
      Engine/source/gfx/bitmap/loaders/bitmapPng.cpp
  23. 10 0
      Engine/source/gfx/gfxAdapter.h
  24. 2 1
      Engine/source/gfx/gfxDevice.cpp
  25. 27 8
      Engine/source/gfx/gfxDevice.h
  26. 1 0
      Engine/source/gfx/gfxDrawUtil.cpp
  27. 6 3
      Engine/source/gfx/gfxEnums.h
  28. 2 0
      Engine/source/gfx/gfxFontRenderBatcher.cpp
  29. 54 2
      Engine/source/gfx/gfxInit.cpp
  30. 6 0
      Engine/source/gfx/gfxInit.h
  31. 5 1
      Engine/source/gfx/gfxTextureProfile.h
  32. 73 2
      Engine/source/gfx/sim/debugDraw.cpp
  33. 8 3
      Engine/source/gfx/sim/debugDraw.h
  34. 340 211
      Engine/source/gui/3d/guiTSControl.cpp
  35. 17 2
      Engine/source/gui/3d/guiTSControl.h
  36. 1 0
      Engine/source/gui/controls/guiTextEditCtrl.h
  37. 2 0
      Engine/source/gui/core/guiControl.h
  38. 10 3
      Engine/source/gui/core/guiOffscreenCanvas.cpp
  39. 1 0
      Engine/source/gui/core/guiOffscreenCanvas.h
  40. 1 0
      Engine/source/gui/worldEditor/editTSCtrl.cpp
  41. 0 23
      Engine/source/lighting/advanced/advancedLightBinManager.cpp
  42. 20 2
      Engine/source/math/util/frustum.cpp
  43. 19 0
      Engine/source/platform/input/event.cpp
  44. 3 0
      Engine/source/platform/input/event.h
  45. 29 44
      Engine/source/platform/input/oculusVR/oculusVRDevice.cpp
  46. 8 3
      Engine/source/platform/input/oculusVR/oculusVRDevice.h
  47. 360 266
      Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp
  48. 21 31
      Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h
  49. 1 1
      Engine/source/platform/input/oculusVR/oculusVRSensorData.h
  50. 23 18
      Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp
  51. 1 1
      Engine/source/platform/input/oculusVR/oculusVRSensorDevice.h
  52. 1 4
      Engine/source/platform/input/oculusVR/oculusVRUtil.cpp
  53. 1 1
      Engine/source/platform/input/oculusVR/oculusVRUtil.h
  54. 546 0
      Engine/source/platform/input/openVR/openVROverlay.cpp
  55. 105 0
      Engine/source/platform/input/openVR/openVROverlay.h
  56. 1761 0
      Engine/source/platform/input/openVR/openVRProvider.cpp
  57. 388 0
      Engine/source/platform/input/openVR/openVRProvider.h
  58. 981 0
      Engine/source/platform/input/openVR/openVRTrackedObject.cpp
  59. 155 0
      Engine/source/platform/input/openVR/openVRTrackedObject.h
  60. 21 5
      Engine/source/platform/output/IDisplayDevice.h
  61. 0 1
      Engine/source/postFx/postEffect.cpp
  62. 1 0
      Engine/source/postFx/postEffectManager.h
  63. 54 9
      Engine/source/scene/reflectionManager.cpp
  64. 94 43
      Engine/source/scene/reflector.cpp
  65. 6 3
      Engine/source/scene/reflector.h
  66. 13 2
      Engine/source/scene/sceneCameraState.cpp
  67. 6 0
      Engine/source/scene/sceneCameraState.h
  68. 17 7
      Engine/source/scene/sceneManager.cpp
  69. 3 3
      Engine/source/scene/sceneRenderState.cpp
  70. 0 9
      Engine/source/scene/sceneRenderState.h
  71. 2 3
      Engine/source/sim/actionMap.cpp
  72. 2 2
      Engine/source/windowManager/windowInputGenerator.cpp
  73. 18 0
      Templates/Full/game/scripts/client/default.bind.cs
  74. 32 0
      Tools/CMake/modules/module_openvr.cmake

+ 0 - 38
Engine/source/T3D/camera.cpp

@@ -393,44 +393,6 @@ void Camera::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, Mat
    }
 }
 
-DisplayPose Camera::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
-{
-   // NOTE: this is intended to be similar to updateMove
-   DisplayPose outPose;
-   outPose.orientation = EulerF(0,0,0);
-   outPose.position = inPose.position;
-
-   // Pitch
-   outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
-
-   // Constrain the range of mRot.x
-   while (outPose.orientation.x  < -M_PI_F) 
-      outPose.orientation.x += M_2PI_F;
-   while (outPose.orientation.x  > M_PI_F) 
-      outPose.orientation.x -= M_2PI_F;
-
-   // Yaw
-   outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
-
-   // Constrain the range of mRot.z
-   while (outPose.orientation.z < -M_PI_F) 
-      outPose.orientation.z += M_2PI_F;
-   while (outPose.orientation.z > M_PI_F) 
-      outPose.orientation.z -= M_2PI_F;
-
-   // Bank
-   if (mDataBlock->cameraCanBank)
-   {
-      outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
-   }
-
-   // Constrain the range of mRot.y
-   while (outPose.orientation.y > M_PI_F) 
-      outPose.orientation.y -= M_2PI_F;
-
-   return outPose;
-}
-
 //----------------------------------------------------------------------------
 
 F32 Camera::getCameraFov()

+ 0 - 1
Engine/source/T3D/camera.h

@@ -237,7 +237,6 @@ class Camera: public ShapeBase
       virtual void interpolateTick( F32 delta);
       virtual void getCameraTransform( F32* pos,MatrixF* mat );
       virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
-      virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
 
       virtual void writePacketData( GameConnection* conn, BitStream* stream );
       virtual void readPacketData( GameConnection* conn, BitStream* stream );

+ 89 - 45
Engine/source/T3D/gameBase/extended/extendedMove.cpp

@@ -1,6 +1,7 @@
 #include "T3D/gameBase/extended/extendedMove.h"
 #include "core/stream/bitStream.h"
 #include "math/mathIO.h"
+#include "math/mAngAxis.h"
 #include "core/module.h"
 #include "console/consoleTypes.h"
 #include "core/strings/stringFunctions.h"
@@ -15,15 +16,17 @@ MODULE_BEGIN( ExtendedMoveManager )
 
 MODULE_END;
 
-S32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, };
-S32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, };
-S32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, };
+F32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, };
+F32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, };
+F32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, };
 bool ExtendedMoveManager::mRotIsEuler[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAX[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAY[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAZ[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAA[ExtendedMove::MaxPositionsRotations] = { 1, };
 
+F32 ExtendedMoveManager::mPosScale = 2.0f;
+
 void ExtendedMoveManager::init()
 {
    for(U32 i = 0; i < ExtendedMove::MaxPositionsRotations; ++i)
@@ -31,19 +34,19 @@ void ExtendedMoveManager::init()
       char varName[256];
 
       dSprintf(varName, sizeof(varName), "mvPosX%d", i);
-      Con::addVariable(varName, TypeS32, &mPosX[i], 
+      Con::addVariable(varName, TypeF32, &mPosX[i], 
          "X position of controller in millimeters.  Only 13 bits are networked.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvPosY%d", i);
-      Con::addVariable(varName, TypeS32, &mPosY[i], 
+      Con::addVariable(varName, TypeF32, &mPosY[i],
          "Y position of controller in millimeters.  Only 13 bits are networked.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvPosZ%d", i);
-      Con::addVariable(varName, TypeS32, &mPosZ[i], 
+      Con::addVariable(varName, TypeF32, &mPosZ[i],
          "Z position of controller in millimeters.  Only 13 bits are networked.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvRotIsEuler%d", i);
       Con::addVariable(varName, TypeBool, &mRotIsEuler[i], 
@@ -52,33 +55,39 @@ void ExtendedMoveManager::init()
          "(a vector and angle).  When true, the given rotation is a three component "
          "Euler angle.  When using Euler angles, the $mvRotA component of the ExtendedMove "
          "is ignored for this set of rotations.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvRotX%d", i);
       Con::addVariable(varName, TypeF32, &mRotAX[i], 
          "X rotation vector component of controller.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvRotY%d", i);
       Con::addVariable(varName, TypeF32, &mRotAY[i], 
          "Y rotation vector component of controller.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvRotZ%d", i);
       Con::addVariable(varName, TypeF32, &mRotAZ[i], 
          "Z rotation vector component of controller.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
 
       dSprintf(varName, sizeof(varName), "mvRotA%d", i);
       Con::addVariable(varName, TypeF32, &mRotAA[i], 
          "Angle rotation (in degrees) component of controller.\n"
-	      "@ingroup Game");
+         "@ingroup Game");
    }
+
+   Con::addVariable("mvPosScale", TypeF32, &mPosScale,
+      "@brief Indicates the scale to be given to mvPos values.\n\n"
+      ""
+      "@ingroup Game");
 }
 
 const ExtendedMove NullExtendedMove;
 
-#define CLAMPPOS(x) (x<0 ? -((-x) & (1<<(MaxPositionBits-1))-1) : (x & (1<<(MaxPositionBits-1))-1))
+#define CLAMPPOS(x) ((S32)(((x + 1) * .5) * ((1 << MaxPositionBits) - 1)) & ((1<<MaxPositionBits)-1))
+#define UNCLAMPPOS(x) ((F32)(x * 2 / F32((1 << MaxPositionBits) - 1) - 1.0f))
 #define CLAMPROT(f) ((S32)(((f + 1) * .5) * ((1 << MaxRotationBits) - 1)) & ((1<<MaxRotationBits)-1))
 #define UNCLAMPROT(x) ((F32)(x * 2 / F32((1 << MaxRotationBits) - 1) - 1.0f))
 
@@ -94,6 +103,10 @@ ExtendedMove::ExtendedMove() : Move()
       rotZ[i] = 0;
       rotW[i] = 1;
 
+      cposX[i] = 0;
+      cposY[i] = 0;
+      cposZ[i] = 0;
+
       EulerBasedRotation[i] = false;
    }
 }
@@ -133,20 +146,20 @@ void ExtendedMove::pack(BitStream *stream, const Move * basemove)
       {
          // Position
          if(stream->writeFlag(posX[i] != extBaseMove->posX[i]))
-            stream->writeSignedInt(posX[i], MaxPositionBits);
+            stream->writeInt(cposX[i], MaxPositionBits);
          if(stream->writeFlag(posY[i] != extBaseMove->posY[i]))
-            stream->writeSignedInt(posY[i], MaxPositionBits);
+            stream->writeInt(cposY[i], MaxPositionBits);
          if(stream->writeFlag(posZ[i] != extBaseMove->posZ[i]))
-            stream->writeSignedInt(posZ[i], MaxPositionBits);
+            stream->writeInt(cposZ[i], MaxPositionBits);
 
          // Rotation
          stream->writeFlag(EulerBasedRotation[i]);
          if(stream->writeFlag(rotX[i] != extBaseMove->rotX[i]))
-            stream->writeInt(crotX[i], MaxRotationBits);
+            stream->writeInt(crotX[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
          if(stream->writeFlag(rotY[i] != extBaseMove->rotY[i]))
-            stream->writeInt(crotY[i], MaxRotationBits);
+            stream->writeInt(crotY[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
          if(stream->writeFlag(rotZ[i] != extBaseMove->rotZ[i]))
-            stream->writeInt(crotZ[i], MaxRotationBits);
+            stream->writeInt(crotZ[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
          if(!EulerBasedRotation[i])
          {
             if(stream->writeFlag(rotW[i] != extBaseMove->rotW[i]))
@@ -175,18 +188,27 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
       for(U32 i=0; i<MaxPositionsRotations; ++i)
       {
          // Position
-         if(stream->readFlag())
-            posX[i] = stream->readSignedInt(MaxPositionBits);
+         if (stream->readFlag())
+         {
+            cposX[i] = stream->readInt(MaxPositionBits);
+            posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+         }
          else
             posX[i] = extBaseMove->posX[i];
 
-         if(stream->readFlag())
-            posY[i] = stream->readSignedInt(MaxPositionBits);
+         if (stream->readFlag())
+         {
+            cposY[i] = stream->readInt(MaxPositionBits);
+            posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale;
+         }
          else
             posY[i] = extBaseMove->posY[i];
 
-         if(stream->readFlag())
-            posZ[i] = stream->readSignedInt(MaxPositionBits);
+         if (stream->readFlag())
+         {
+            cposZ[i] = stream->readInt(MaxPositionBits);
+            posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale;
+         }
          else
             posZ[i] = extBaseMove->posZ[i];
 
@@ -197,8 +219,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
             scale = M_2PI_F;
          if(stream->readFlag())
          {
-            crotX[i] = stream->readInt(MaxRotationBits);
-            rotX[i] = UNCLAMPROT(crotX[i]) * scale;
+            crotX[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
+            rotX[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotX[i]) * scale) : UNCLAMPPOS(crotX[i]);
          }
          else
          {
@@ -207,8 +229,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
 
          if(stream->readFlag())
          {
-            crotY[i] = stream->readInt(MaxRotationBits);
-            rotY[i] = UNCLAMPROT(crotY[i]) * scale;
+            crotY[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
+            rotY[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotY[i]) * scale) : UNCLAMPPOS(crotY[i]);
          }
          else
          {
@@ -217,8 +239,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
 
          if(stream->readFlag())
          {
-            crotZ[i] = stream->readInt(MaxRotationBits);
-            rotZ[i] = UNCLAMPROT(crotZ[i]) * scale;
+            crotZ[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
+            rotZ[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotZ[i]) * scale) : UNCLAMPPOS(crotZ[i]);
          }
          else
          {
@@ -230,7 +252,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
             if(stream->readFlag())
             {
                crotW[i] = stream->readInt(MaxRotationBits);
-               rotW[i] = UNCLAMPROT(crotW[i]);
+               rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F;
             }
             else
             {
@@ -252,9 +274,9 @@ void ExtendedMove::clamp()
    for(U32 i=0; i<MaxPositionsRotations; ++i)
    {
       // Positions
-      posX[i] = CLAMPPOS(posX[i]);
-      posY[i] = CLAMPPOS(posY[i]);
-      posZ[i] = CLAMPPOS(posZ[i]);
+      cposX[i] = CLAMPPOS(posX[i] / ExtendedMoveManager::mPosScale);
+      cposY[i] = CLAMPPOS(posY[i] / ExtendedMoveManager::mPosScale);
+      cposZ[i] = CLAMPPOS(posZ[i] / ExtendedMoveManager::mPosScale);
 
       // Rotations
       if(EulerBasedRotation[i])
@@ -265,11 +287,29 @@ void ExtendedMove::clamp()
       }
       else
       {
-         crotX[i] = CLAMPROT(rotX[i]);
-         crotY[i] = CLAMPROT(rotY[i]);
-         crotZ[i] = CLAMPROT(rotZ[i]);
-         crotW[i] = CLAMPROT(rotW[i]);
+         crotX[i] = CLAMPPOS(rotX[i]);
+         crotY[i] = CLAMPPOS(rotY[i]);
+         crotZ[i] = CLAMPPOS(rotZ[i]);
+         crotW[i] = CLAMPROT(rotW[i] / M_2PI_F);
       }
+
+      #ifdef DEBUG_CONTROLLER_MOVE
+      if (i == 1)
+      {
+          F32 x, y, z, a;
+          x = UNCLAMPPOS(crotX[i]);
+          y = UNCLAMPPOS(crotY[i]);
+          z = UNCLAMPPOS(crotZ[i]);
+          a = UNCLAMPROT(crotW[i]) * M_2PI_F;
+
+          Con::printf("INPUT POS == %f,%f,%f", ExtendedMoveManager::mPosX[i], ExtendedMoveManager::mPosY[i], ExtendedMoveManager::mPosZ[i]);
+          Con::printf("rot %f,%f,%f,%f clamped to %f,%f,%f,%f", rotX[i], rotY[i], rotZ[i], rotW[i], x,y,z,a);
+          x = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+          y = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+          z = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+          Con::printf("pos %f,%f,%f clamped to %f,%f,%f", posX[i], posY[i], posZ[i], x, y, z);
+      }
+      #endif
    }
 
    // Perform the standard Move clamp
@@ -281,6 +321,10 @@ void ExtendedMove::unclamp()
    // Unclamp the values the same as for net traffic so the client matches the server
    for(U32 i=0; i<MaxPositionsRotations; ++i)
    {
+      posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+      posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale;
+      posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale;
+
       // Rotations
       if(EulerBasedRotation[i])
       {
@@ -290,10 +334,10 @@ void ExtendedMove::unclamp()
       }
       else
       {
-         rotX[i] = UNCLAMPROT(crotX[i]);
-         rotY[i] = UNCLAMPROT(crotY[i]);
-         rotZ[i] = UNCLAMPROT(crotZ[i]);
-         rotW[i] = UNCLAMPROT(crotW[i]);
+         rotX[i] = UNCLAMPPOS(crotX[i]);
+         rotY[i] = UNCLAMPPOS(crotY[i]);
+         rotZ[i] = UNCLAMPPOS(crotZ[i]);
+         rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F;
       }
    }
 

+ 9 - 5
Engine/source/T3D/gameBase/extended/extendedMove.h

@@ -11,12 +11,14 @@ struct ExtendedMove : public Move
    enum Constants {
       MaxPositionsRotations = 3,
 
-      MaxPositionBits = 13,
+      MaxPositionBits = 16,
       MaxRotationBits = 16,
    };
 
    // Position is in millimeters
-   S32 posX[MaxPositionsRotations], posY[MaxPositionsRotations], posZ[MaxPositionsRotations];
+   F32 posX[MaxPositionsRotations], posY[MaxPositionsRotations], posZ[MaxPositionsRotations];
+
+   S32 cposX[MaxPositionsRotations], cposY[MaxPositionsRotations], cposZ[MaxPositionsRotations];
 
    bool EulerBasedRotation[MaxPositionsRotations];
 
@@ -39,15 +41,17 @@ extern const ExtendedMove NullExtendedMove;
 class ExtendedMoveManager
 {
 public:
-   static S32 mPosX[ExtendedMove::MaxPositionsRotations];
-   static S32 mPosY[ExtendedMove::MaxPositionsRotations];
-   static S32 mPosZ[ExtendedMove::MaxPositionsRotations];
+   static F32 mPosX[ExtendedMove::MaxPositionsRotations];
+   static F32 mPosY[ExtendedMove::MaxPositionsRotations];
+   static F32 mPosZ[ExtendedMove::MaxPositionsRotations];
    static bool mRotIsEuler[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAX[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAY[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAZ[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAA[ExtendedMove::MaxPositionsRotations];
 
+   static F32 mPosScale;
+
    static void init();
 };
 

+ 5 - 5
Engine/source/T3D/gameBase/extended/extendedMoveList.cpp

@@ -75,11 +75,11 @@ bool ExtendedMoveList::getNextExtMove( ExtendedMove &curMove )
       else
       {
          //Rotation is passed in as an Angle Axis in degrees.  We need to convert this into a Quat.
-         QuatF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i]));
-         curMove.rotX[i] = q.x;
-         curMove.rotY[i] = q.y;
-         curMove.rotZ[i] = q.z;
-         curMove.rotW[i] = q.w;
+         AngAxisF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i]));
+         curMove.rotX[i] = q.axis.x;
+         curMove.rotY[i] = q.axis.y;
+         curMove.rotZ[i] = q.axis.z;
+         curMove.rotW[i] = q.angle;
       }
    }
 

+ 22 - 4
Engine/source/T3D/gameBase/gameConnection.cpp

@@ -469,8 +469,8 @@ bool GameConnection::readConnectRequest(BitStream *stream, const char **errorStr
 
    for(U32 i = 0; i < mConnectArgc+3; i++)
    {
-	   connectArgv[i].value = &connectArgvValue[i];
-	   connectArgvValue[i].init();
+      connectArgv[i].value = &connectArgvValue[i];
+      connectArgvValue[i].init();
    }
 
    for(U32 i = 0; i < mConnectArgc; i++)
@@ -681,6 +681,24 @@ bool GameConnection::getControlCameraTransform(F32 dt, MatrixF* mat)
    return true;
 }
 
+bool GameConnection::getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform)
+{
+   GameBase* obj = getCameraObject();
+   if (!obj)
+      return false;
+
+   GameBase* cObj = obj;
+   while ((cObj = cObj->getControlObject()) != 0)
+   {
+      if (cObj->useObjsEyePoint())
+         obj = cObj;
+   }
+
+   obj->getEyeCameraTransform(display, -1, transform);
+
+   return true;
+}
+
 bool GameConnection::getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms)
 {
    GameBase* obj = getCameraObject();
@@ -896,8 +914,8 @@ void GameConnection::onRemove()
       // clientgroup and what not (this is so that we can disconnect from a local server
       // without needing to destroy and recreate the server before we can connect to it 
       // again).
-	   // Safe-delete as we don't know whether the server connection is currently being
-	   // worked on.
+      // Safe-delete as we don't know whether the server connection is currently being
+      // worked on.
       getRemoteConnection()->safeDeleteObject();
       setRemoteConnectionObject(NULL);
    }

+ 4 - 0
Engine/source/T3D/gameBase/gameConnection.h

@@ -267,6 +267,10 @@ public:
    bool getControlCameraTransform(F32 dt,MatrixF* mat);
    bool getControlCameraVelocity(Point3F *vel);
 
+   /// Returns the head transform for the control object, using supplemental information
+   /// from the provided IDisplayDevice
+   bool getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform);
+
    /// Returns the eye transforms for the control object, using supplemental information 
    /// from the provided IDisplayDevice.
    bool getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms);

+ 6 - 7
Engine/source/T3D/gameFunctions.cpp

@@ -348,13 +348,13 @@ bool GameProcessCameraQuery(CameraQuery *query)
       query->farPlane = gClientSceneGraph->getVisibleDistance() * CameraAndFOV::sVisDistanceScale;
 
       // Provide some default values
-      query->projectionOffset = Point2F::Zero;
       query->stereoTargets[0] = 0;
       query->stereoTargets[1] = 0;
       query->eyeOffset[0] = Point3F::Zero;
       query->eyeOffset[1] = Point3F::Zero;
       query->hasFovPort = false;
       query->hasStereoTargets = false;
+      query->displayDevice = NULL;
       
       F32 cameraFov = 0.0f;
       bool fovSet = false;
@@ -364,6 +364,9 @@ bool GameProcessCameraQuery(CameraQuery *query)
       if(!gEditingMission && connection->hasDisplayDevice())
       {
          IDisplayDevice* display = connection->getDisplayDevice();
+
+         query->displayDevice = display;
+
          // Note: all eye values are invalid until this is called
          display->setDrawCanvas(query->drawCanvas);
 
@@ -372,12 +375,6 @@ bool GameProcessCameraQuery(CameraQuery *query)
          // Display may activate AFTER so we need to call this again just in case
          display->onStartFrame();
 
-         // The connection's display device may want to set the projection offset
-         if(display->providesProjectionOffset())
-         {
-            query->projectionOffset = display->getProjectionOffset();
-         }
-
          // The connection's display device may want to set the eye offset
          if(display->providesEyeOffsets())
          {
@@ -394,6 +391,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
          
          // Grab the latest overriding render view transforms
          connection->getControlCameraEyeTransforms(display, query->eyeTransforms);
+         connection->getControlCameraHeadTransform(display, &query->headMatrix);
 
          display->getStereoViewports(query->stereoViewports);
          display->getStereoTargets(query->stereoTargets);
@@ -403,6 +401,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
       {
          query->eyeTransforms[0] = query->cameraMatrix;
          query->eyeTransforms[1] = query->cameraMatrix;
+         query->headMatrix = query->cameraMatrix;
       }
 
       // Use the connection's FOV settings if requried

+ 41 - 30
Engine/source/T3D/lightFlareData.cpp

@@ -33,6 +33,7 @@
 #include "gfx/gfxOcclusionQuery.h"
 #include "gfx/gfxDrawUtil.h"
 #include "gfx/gfxTextureManager.h"
+#include "gfx/sim/debugDraw.h"
 #include "renderInstance/renderPassManager.h"
 #include "T3D/gameBase/gameConnection.h"
 #include "T3D/gameBase/processList.h"
@@ -275,12 +276,10 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt
    // is on scren at all... if not then return
    // the last result.
    const Point3F &lightPos = flareState->lightMat.getPosition();  
-   const RectI &viewport = GFX->getViewport();
-   MatrixF projMatrix;
-   state->getCameraFrustum().getProjectionMatrix(&projMatrix);
-   if( state->isReflectPass() )
-      projMatrix = state->getSceneManager()->getNonClipProjection();
-   bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), projMatrix );
+   const RectI &viewport = RectI(Point2I(0, 0), GFX->getViewport().extent);
+   MatrixF camProjMatrix = state->getSceneManager()->getNonClipProjection();
+
+   bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), camProjMatrix );
 
    // It is onscreen, so raycast as a simple occlusion test.
    const LightInfo *lightInfo = flareState->lightInfo;
@@ -297,7 +296,7 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt
       // Always treat light as onscreen if using HOQ
       // it will be faded out if offscreen anyway.
       onScreen = true;
-	  needsRaycast = false;
+      needsRaycast = false;
 
       // Test the hardware queries for rendered pixels.
       U32 pixels = 0, fullPixels = 0;
@@ -400,63 +399,75 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt
    return lightVisible;
 }
 
-void LightFlareData::prepRender( SceneRenderState *state, LightFlareState *flareState )
+void LightFlareData::prepRender(SceneRenderState *state, LightFlareState *flareState)
 {
-   PROFILE_SCOPE( LightFlareData_prepRender );
+   PROFILE_SCOPE(LightFlareData_prepRender);
 
    const LightInfo *lightInfo = flareState->lightInfo;
 
-   if (  mIsZero( flareState->fullBrightness ) ||
-         mIsZero( lightInfo->getBrightness() ) )
-      return;
+   if (mIsZero(flareState->fullBrightness) ||
+       mIsZero(lightInfo->getBrightness()))
+   return;
 
    // Figure out the element count to render.
    U32 elementCount = mElementCount;
    const bool isReflectPass = state->isReflectPass();
-   if ( isReflectPass )
+   if (isReflectPass)
    {
       // Then we don't render anything this pass.
-      if ( !mRenderReflectPass )
+      if (!mRenderReflectPass)
          return;
 
       // Find the zero distance elements which make 
       // up the corona of the light flare.
       elementCount = 0.0f;
-      for ( U32 i=0; i < mElementCount; i++ )
-         if ( mIsZero( mElementDist[i] ) )
-            elementCount++;
+      for (U32 i = 0; i < mElementCount; i++)
+         if (mIsZero(mElementDist[i]))
+      elementCount++;
    }
 
    // Better have something to render.
-   if ( elementCount == 0 )
+   if (elementCount == 0)
       return;
-  
+
    U32 visDelta = U32_MAX;
    F32 occlusionFade = 1.0f;
    Point3F lightPosSS;
-   bool lightVisible = _testVisibility( state, flareState, &visDelta, &occlusionFade, &lightPosSS );
-   
+   bool lightVisible = _testVisibility(state, flareState, &visDelta, &occlusionFade, &lightPosSS);
+
+   //DebugDrawer::get()->drawBox(flareState->lightMat.getPosition() + Point3F(-0.5, -0.5, -0.5) * 4, flareState->lightMat.getPosition() + Point3F(0.5, 0.5, 0.5) * 4, ColorI::BLUE);
+
    // We can only skip rendering if the light is not 
    // visible, and it has elapsed the fade out time.
-   if (  mIsZero( occlusionFade ) ||
-         !lightVisible && visDelta > FadeOutTime )
+   if (mIsZero(occlusionFade) ||
+      !lightVisible && visDelta > FadeOutTime)
       return;
 
    const RectI &viewport = GFX->getViewport();
-   Point3F oneOverViewportExtent( 1.0f / (F32)viewport.extent.x, 1.0f / (F32)viewport.extent.y, 0.0f );
+   Point3F oneOverViewportExtent(1.0f / (F32)viewport.extent.x, 1.0f / (F32)viewport.extent.y, 0.0f);
 
-   // Really convert it to screen space.
-   lightPosSS.x -= viewport.point.x;
-   lightPosSS.y -= viewport.point.y;
    lightPosSS *= oneOverViewportExtent;
-   lightPosSS = ( lightPosSS * 2.0f ) - Point3F::One;
+   lightPosSS = (lightPosSS * 2.0f) - Point3F::One;
    lightPosSS.y = -lightPosSS.y;
    lightPosSS.z = 0.0f;
 
+   // Determine the center of the current projection so we can converge there
+   Point3F centerProj(0);
+   {
+      MatrixF camProjMatrix = state->getSceneManager()->getNonClipProjection();
+      Point3F outCenterPos;
+      RectI centerViewport = RectI(Point2I(0, 0), viewport.extent);
+      MathUtils::mProjectWorldToScreen(Point3F(0,state->getSceneManager()->getNearClip(),0), &outCenterPos, centerViewport, MatrixF::Identity, camProjMatrix);
+      centerProj = outCenterPos;
+      centerProj *= oneOverViewportExtent;
+      centerProj = (centerProj * 2.0f) - Point3F::One;
+      centerProj.y = -centerProj.y;
+      centerProj.z = 0.0f;
+   }
+
    // Take any projection offset into account so that the point where the flare's
    // elements converge is at the 'eye' point rather than the center of the viewport.
-   const Point2F& projOffset = state->getCameraFrustum().getProjectionOffset();
-   Point3F flareVec( -lightPosSS + Point3F(projOffset.x, projOffset.y, 0.0f) );
+   Point3F flareVec( centerProj - lightPosSS );
    const F32 flareLength = flareVec.len();
    if ( flareLength > 0.0f )
       flareVec *= 1.0f / flareLength;

+ 111 - 66
Engine/source/T3D/player.cpp

@@ -56,11 +56,18 @@
 #include "T3D/decal/decalManager.h"
 #include "T3D/decal/decalData.h"
 #include "materials/baseMatInstance.h"
+#include "math/mathUtils.h"
+#include "gfx/sim/debugDraw.h"
 
 #ifdef TORQUE_EXTENDED_MOVE
    #include "T3D/gameBase/extended/extendedMove.h"
 #endif
 
+#ifdef TORQUE_OPENVR
+#include "platform/input/openVR/openVRProvider.h"
+#include "platform/input/openVR/openVRTrackedObject.h"
+#endif
+
 // Amount of time if takes to transition to a new action sequence.
 static F32 sAnimationTransitionTime = 0.25f;
 static bool sUseAnimationTransitions = true;
@@ -1776,7 +1783,7 @@ void Player::onRemove()
    mWorkingQueryBox.minExtents.set(-1e9f, -1e9f, -1e9f);
    mWorkingQueryBox.maxExtents.set(-1e9f, -1e9f, -1e9f);
 
-   SAFE_DELETE( mPhysicsRep );		
+   SAFE_DELETE( mPhysicsRep );
 
    Parent::onRemove();
 }
@@ -2489,10 +2496,25 @@ void Player::allowAllPoses()
    mAllowSwimming = true;
 }
 
+AngAxisF gPlayerMoveRot;
+
 void Player::updateMove(const Move* move)
 {
    delta.move = *move;
 
+#ifdef TORQUE_OPENVR
+   if (mControllers[0])
+   {
+      mControllers[0]->processTick(move);
+   }
+
+   if (mControllers[1])
+   {
+      mControllers[1]->processTick(move);
+   }
+
+#endif
+
    // Is waterCoverage high enough to be 'swimming'?
    {
       bool swimming = mWaterCoverage > 0.65f && canSwim();      
@@ -2531,6 +2553,7 @@ void Player::updateMove(const Move* move)
       delta.headVec = mHead;
 
       bool doStandardMove = true;
+      bool absoluteDelta = false;
       GameConnection* con = getControllingClient();
 
 #ifdef TORQUE_EXTENDED_MOVE
@@ -2618,6 +2641,38 @@ void Player::updateMove(const Move* move)
             while (mHead.y > M_PI_F) 
                mHead.y -= M_2PI_F;
          }
+         else
+         {
+            // Orient the player so we are looking towards the required position, ignoring any banking
+            AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
+            MatrixF trans(1);
+            moveRot.setMatrix(&trans);
+            trans.inverse();
+
+            Point3F vecForward(0, 10, 0);
+            Point3F viewAngle;
+            Point3F orient;
+            EulerF rot;
+            trans.mulV(vecForward);
+            viewAngle = vecForward;
+            vecForward.z = 0; // flatten
+            vecForward.normalizeSafe();
+
+            F32 yawAng;
+            F32 pitchAng;
+            MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng);
+
+            mRot = EulerF(0);
+            mRot.z = yawAng;
+            mHead = EulerF(0);
+
+            while (mRot.z < 0.0f)
+               mRot.z += M_2PI_F;
+            while (mRot.z > M_2PI_F)
+               mRot.z -= M_2PI_F;
+
+            absoluteDelta = true;
+         }
       }
 #endif
 
@@ -2666,6 +2721,13 @@ void Player::updateMove(const Move* move)
 
       delta.head = mHead;
       delta.headVec -= mHead;
+
+      if (absoluteDelta)
+      {
+         delta.headVec = Point3F(0, 0, 0);
+         delta.rotVec = Point3F(0, 0, 0);
+      }
+
       for(U32 i=0; i<3; ++i)
       {
          if (delta.headVec[i] > M_PI_F)
@@ -3275,9 +3337,9 @@ bool Player::canCrouch()
    if ( mDataBlock->actionList[PlayerData::CrouchRootAnim].sequence == -1 )
       return false;       
 
-	// We are already in this pose, so don't test it again...
-	if ( mPose == CrouchPose )
-		return true;
+   // We are already in this pose, so don't test it again...
+   if ( mPose == CrouchPose )
+      return true;
 
    // Do standard Torque physics test here!
    if ( !mPhysicsRep )
@@ -3327,8 +3389,8 @@ bool Player::canStand()
       return false;
 
    // We are already in this pose, so don't test it again...
-	if ( mPose == StandPose )
-		return true;
+   if ( mPose == StandPose )
+      return true;
 
    // Do standard Torque physics test here!
    if ( !mPhysicsRep )
@@ -3391,9 +3453,9 @@ bool Player::canProne()
    if ( !mPhysicsRep )
       return true;
 
-	// We are already in this pose, so don't test it again...
-	if ( mPose == PronePose )
-		return true;
+   // We are already in this pose, so don't test it again...
+   if ( mPose == PronePose )
+      return true;
 
    return mPhysicsRep->testSpacials( getPosition(), mDataBlock->proneBoxSize );
 }
@@ -3590,7 +3652,7 @@ MatrixF * Player::Death::fallToGround(F32 dt, const Point3F& loc, F32 curZ, F32
          normal.normalize();
          mat.set(EulerF (0.0f, 0.0f, curZ));
          mat.mulV(upY, & ahead);
-	      mCross(ahead, normal, &sideVec);
+         mCross(ahead, normal, &sideVec);
          sideVec.normalize();
          mCross(normal, sideVec, &ahead);
 
@@ -5589,58 +5651,6 @@ void Player::getMuzzleTransform(U32 imageSlot,MatrixF* mat)
    *mat = nmat;
 }
 
-DisplayPose Player::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
-{
-   // NOTE: this is intended to be similar to updateMove
-   DisplayPose outPose;
-   outPose.orientation = getRenderTransform().toEuler();
-   outPose.position = inPose.position;
-
-   if (con && con->getControlSchemeAbsoluteRotation())
-   {
-      // Pitch
-      outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
-
-      // Constrain the range of mRot.x
-      while (outPose.orientation.x  < -M_PI_F) 
-         outPose.orientation.x += M_2PI_F;
-      while (outPose.orientation.x  > M_PI_F) 
-         outPose.orientation.x -= M_2PI_F;
-
-      // Yaw
-
-      // Rotate (heading) head or body?
-      if ((isMounted() && getMountNode() == 0) || (con && !con->isFirstPerson()))
-      {
-         // Rotate head
-         outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
-      }
-      else
-      {
-         // Rotate body
-         outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
-      }
-
-      // Constrain the range of mRot.z
-      while (outPose.orientation.z < 0.0f)
-         outPose.orientation.z += M_2PI_F;
-      while (outPose.orientation.z > M_2PI_F)
-         outPose.orientation.z -= M_2PI_F;
-
-      // Bank
-      if (mDataBlock->cameraCanBank)
-      {
-         outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
-      }
-
-      // Constrain the range of mRot.y
-      while (outPose.orientation.y > M_PI_F) 
-         outPose.orientation.y -= M_2PI_F;
-   }
-
-   return outPose;
-}
-
 void Player::getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat)
 {
    disableHeadZCalc();
@@ -5836,7 +5846,7 @@ F32 Player::getSpeed() const
 
 void Player::setVelocity(const VectorF& vel)
 {
-	AssertFatal( !mIsNaN( vel ), "Player::setVelocity() - The velocity is NaN!" );
+   AssertFatal( !mIsNaN( vel ), "Player::setVelocity() - The velocity is NaN!" );
 
    mVelocity = vel;
    setMaskBits(MoveMask);
@@ -5844,7 +5854,7 @@ void Player::setVelocity(const VectorF& vel)
 
 void Player::applyImpulse(const Point3F&,const VectorF& vec)
 {
-	AssertFatal( !mIsNaN( vec ), "Player::applyImpulse() - The vector is NaN!" );
+   AssertFatal( !mIsNaN( vec ), "Player::applyImpulse() - The vector is NaN!" );
 
    // Players ignore angular velocity
    VectorF vel;
@@ -6192,7 +6202,7 @@ U32 Player::packUpdate(NetConnection *con, U32 mask, BitStream *stream)
       stream->writeFlag(mSwimming);
       stream->writeFlag(mJetting);  
       stream->writeInt(mPose, NumPoseBits);
-	  
+     
       stream->writeInt(mState,NumStateBits);
       if (stream->writeFlag(mState == RecoverState))
          stream->writeInt(mRecoverTicks,PlayerData::RecoverDelayBits);
@@ -6293,7 +6303,7 @@ void Player::unpackUpdate(NetConnection *con, BitStream *stream)
       mSwimming = stream->readFlag();
       mJetting = stream->readFlag();  
       mPose = (Pose)(stream->readInt(NumPoseBits)); 
-	  
+     
       ActionState actionState = (ActionState)stream->readInt(NumStateBits);
       if (stream->readFlag()) {
          mRecoverTicks = stream->readInt(PlayerData::RecoverDelayBits);
@@ -7160,3 +7170,38 @@ void Player::renderConvex( ObjectRenderInst *ri, SceneRenderState *state, BaseMa
    mConvex.renderWorkingList();
    GFX->leaveDebugEvent();
 }
+
+#ifdef TORQUE_OPENVR
+void Player::setControllers(Vector<OpenVRTrackedObject*> controllerList)
+{
+   mControllers[0] = controllerList.size() > 0 ? controllerList[0] : NULL;
+   mControllers[1] = controllerList.size() > 1 ? controllerList[1] : NULL;
+}
+
+ConsoleMethod(Player, setVRControllers, void, 4, 4, "")
+{
+   OpenVRTrackedObject *controllerL, *controllerR;
+   Vector<OpenVRTrackedObject*> list;
+
+   if (Sim::findObject(argv[2], controllerL))
+   {
+      list.push_back(controllerL);
+   }
+   else
+   {
+      list.push_back(NULL);
+   }
+
+   if (Sim::findObject(argv[3], controllerR))
+   {
+      list.push_back(controllerR);
+   }
+   else
+   {
+      list.push_back(NULL);
+   }
+
+   object->setControllers(list);
+}
+
+#endif

+ 8 - 1
Engine/source/T3D/player.h

@@ -39,6 +39,7 @@ class DecalData;
 class SplashData;
 class PhysicsPlayer;
 class Player;
+class OpenVRTrackedObject;
 
 //----------------------------------------------------------------------------
 
@@ -518,6 +519,8 @@ protected:
    Point3F mLastPos;          ///< Holds the last position for physics updates
    Point3F mLastWaterPos;     ///< Same as mLastPos, but for water
 
+   SimObjectPtr<OpenVRTrackedObject> mControllers[2];
+
    struct ContactInfo 
    {
       bool contacted, jump, run;
@@ -577,12 +580,17 @@ protected:
 
    PhysicsPlayer* getPhysicsRep() const { return mPhysicsRep; }
 
+#ifdef TORQUE_OPENVR
+   void setControllers(Vector<OpenVRTrackedObject*> controllerList);
+#endif
+
   protected:
    virtual void reSkin();
 
    void setState(ActionState state, U32 ticks=0);
    void updateState();
 
+
    // Jetting
    bool mJetting;
 
@@ -686,7 +694,6 @@ public:
    void getEyeBaseTransform(MatrixF* mat, bool includeBank);
    void getRenderEyeTransform(MatrixF* mat);
    void getRenderEyeBaseTransform(MatrixF* mat, bool includeBank);
-   virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
    void getCameraParameters(F32 *min, F32 *max, Point3F *offset, MatrixF *rot);
    void getMuzzleTransform(U32 imageSlot,MatrixF* mat);
    void getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat);   

+ 7 - 53
Engine/source/T3D/shapeBase.cpp

@@ -1992,67 +1992,21 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId,
    Point3F eyePos;
    Point3F rotEyePos;
 
-   DisplayPose inPose;
-   displayDevice->getFrameEyePose(&inPose, eyeId);
-   DisplayPose newPose = calcCameraDeltaPose(displayDevice->getCurrentConnection(), inPose);
+   DisplayPose newPose;
+   displayDevice->getFrameEyePose(&newPose, eyeId);
 
    // Ok, basically we just need to add on newPose to the camera transform
    // NOTE: currently we dont support third-person camera in this mode
    MatrixF cameraTransform(1);
    F32 fakePos = 0;
+   //cameraTransform = getRenderTransform(); // use this for controllers TODO
    getCameraTransform(&fakePos, &cameraTransform);
 
-   QuatF baserot = cameraTransform;
-   QuatF qrot = QuatF(newPose.orientation);
-   QuatF concatRot;
-   concatRot.mul(baserot, qrot);
-   concatRot.setMatrix(&temp);
-   temp.setPosition(cameraTransform.getPosition() + concatRot.mulP(newPose.position, &rotEyePos));
+   temp = MatrixF(1);
+   newPose.orientation.setMatrix(&temp);
+   temp.setPosition(newPose.position);
 
-   *outMat = temp;
-}
-
-DisplayPose ShapeBase::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
-{
-   // NOTE: this is intended to be similar to updateMove
-   // WARNING: does not take into account any move values
-
-   DisplayPose outPose;
-   outPose.orientation = getRenderTransform().toEuler();
-   outPose.position = inPose.position;
-
-   if (con && con->getControlSchemeAbsoluteRotation())
-   {
-      // Pitch
-      outPose.orientation.x = inPose.orientation.x;
-
-      // Constrain the range of mRot.x
-      while (outPose.orientation.x < -M_PI_F) 
-         outPose.orientation.x += M_2PI_F;
-      while (outPose.orientation.x > M_PI_F) 
-         outPose.orientation.x -= M_2PI_F;
-
-      // Yaw
-      outPose.orientation.z = inPose.orientation.z;
-
-      // Constrain the range of mRot.z
-      while (outPose.orientation.z < -M_PI_F) 
-         outPose.orientation.z += M_2PI_F;
-      while (outPose.orientation.z > M_PI_F) 
-         outPose.orientation.z -= M_2PI_F;
-
-      // Bank
-      if (mDataBlock->cameraCanBank)
-      {
-         outPose.orientation.y = inPose.orientation.y;
-      }
-
-      // Constrain the range of mRot.y
-      while (outPose.orientation.y > M_PI_F) 
-         outPose.orientation.y -= M_2PI_F;
-   }
-
-   return outPose;
+   *outMat = cameraTransform * temp;
 }
 
 void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)

+ 0 - 3
Engine/source/T3D/shapeBase.h

@@ -1588,9 +1588,6 @@ public:
    /// orient and position values of the display device.
    virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
 
-   /// Calculates a delta camera angle and view position based on inPose
-   virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
-
    /// Gets the index of a node inside a mounted image given the name
    /// @param   imageSlot   Image slot
    /// @param   nodeName    Node name

+ 2 - 2
Engine/source/environment/scatterSky.cpp

@@ -667,11 +667,11 @@ void ScatterSky::prepRenderImage( SceneRenderState *state )
       mFlareState.scale = mFlareScale;
       mFlareState.lightInfo = mLight;
 
-      Point3F lightPos = state->getCameraPosition() - state->getFarPlane() * mLight->getDirection() * 0.9f;
+      Point3F lightPos = state->getDiffuseCameraPosition() - state->getFarPlane() * mLight->getDirection() * 0.9f;
       mFlareState.lightMat.identity();
       mFlareState.lightMat.setPosition( lightPos );
 
-      F32 dist = ( lightPos - state->getCameraPosition( ) ).len( );
+      F32 dist = ( lightPos - state->getDiffuseCameraPosition( ) ).len( );
       F32 coronaScale = 0.5f;
       F32 screenRadius = GFX->getViewport( ).extent.y * coronaScale * 0.5f;
       mFlareState.worldRadius = screenRadius * dist / state->getWorldToScreenScale( ).y;

文件差異過大導致無法顯示
+ 400 - 397
Engine/source/gfx/D3D11/gfxD3D11Device.cpp


+ 4 - 0
Engine/source/gfx/D3D11/gfxD3D11Device.h

@@ -42,6 +42,8 @@
 
 class PlatformWindow;
 class GFXD3D11ShaderConstBuffer;
+class OculusVRHMDDevice;
+class D3D11OculusTexture;
 
 //------------------------------------------------------------------------------
 
@@ -53,6 +55,8 @@ class GFXD3D11Device : public GFXDevice
    friend class GFXD3D11TextureObject;
    friend class GFXD3D11TextureTarget;
    friend class GFXD3D11WindowTarget;
+	friend class OculusVRHMDDevice;
+	friend class D3D11OculusTexture;
 
    virtual GFXFormat selectSupportedFormat(GFXTextureProfile *profile,
    const Vector<GFXFormat> &formats, bool texture, bool mustblend, bool mustfilter);

+ 1 - 0
Engine/source/gfx/D3D11/gfxD3D11EnumTranslate.cpp

@@ -73,6 +73,7 @@ void GFXD3D11EnumTranslate::init()
    GFXD3D11TextureFormat[GFXFormatD24FS8] = DXGI_FORMAT_UNKNOWN;
    GFXD3D11TextureFormat[GFXFormatD16] = DXGI_FORMAT_D16_UNORM;
    GFXD3D11TextureFormat[GFXFormatR8G8B8A8_SRGB] = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;
+   GFXD3D11TextureFormat[GFXFormatR8G8B8A8_LINEAR_FORCE] = DXGI_FORMAT_R8G8B8A8_UNORM;
 //------------------------------------------------------------------------------
 //------------------------------------------------------------------------------
    GFXD3D11TextureFilter[GFXTextureFilterNone] = D3D11_FILTER_MIN_MAG_MIP_POINT;

+ 25 - 18
Engine/source/gfx/D3D11/gfxD3D11Target.cpp

@@ -97,9 +97,9 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
    if( tex == GFXTextureTarget::sDefaultDepthStencil )
    {
       mTargets[slot] = D3D11->mDeviceDepthStencil;
-	   mTargetViews[slot] = D3D11->mDeviceDepthStencilView;
-	   mTargets[slot]->AddRef();
-	   mTargetViews[slot]->AddRef();
+      mTargetViews[slot] = D3D11->mDeviceDepthStencilView;
+      mTargets[slot]->AddRef();
+      mTargetViews[slot]->AddRef();
    }
    else
    {
@@ -110,14 +110,14 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
 
       // Grab the surface level.
       if( slot == DepthStencil )
-      {		 
+      {       
          mTargets[slot] = d3dto->getSurface();
          if ( mTargets[slot] )
             mTargets[slot]->AddRef();
 
-		   mTargetViews[slot] = d3dto->getDSView();
-		   if( mTargetViews[slot])
-			   mTargetViews[slot]->AddRef();         
+         mTargetViews[slot] = d3dto->getDSView();
+         if( mTargetViews[slot])
+            mTargetViews[slot]->AddRef();         
 
       }
       else
@@ -126,12 +126,12 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
          // if the surface that it needs to render to is different than the mip level
          // in the actual texture. This will happen with MSAA.
          if( d3dto->getSurface() == NULL )
-		   {
+         {
             
-			   mTargets[slot] = d3dto->get2DTex();
-			   mTargets[slot]->AddRef();
-			   mTargetViews[slot] = d3dto->getRTView();
-			   mTargetViews[slot]->AddRef();			
+            mTargets[slot] = d3dto->get2DTex();
+            mTargets[slot]->AddRef();
+            mTargetViews[slot] = d3dto->getRTView();
+            mTargetViews[slot]->AddRef();         
          } 
          else 
          {
@@ -163,6 +163,13 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
             mTargetSize = Point2I(sd.Width, sd.Height);
 
             S32 format = sd.Format;
+
+            if (format == DXGI_FORMAT_R8G8B8A8_TYPELESS || format == DXGI_FORMAT_B8G8R8A8_TYPELESS)
+            {
+               mTargetFormat = GFXFormatR8G8B8A8;
+               return;
+            }
+
             GFXREVERSE_LOOKUP( GFXD3D11TextureFormat, GFXFormat, format );
             mTargetFormat = (GFXFormat)format;
          }
@@ -276,7 +283,7 @@ void GFXD3D11TextureTarget::resolve()
       if (mResolveTargets[i])
       {
          D3D11_TEXTURE2D_DESC desc;
-		   mTargets[i]->GetDesc(&desc);
+         mTargets[i]->GetDesc(&desc);
          D3D11DEVICECONTEXT->CopySubresourceRegion(mResolveTargets[i]->get2DTex(), 0, 0, 0, 0, mTargets[i], 0, NULL);
       }
    }
@@ -400,10 +407,10 @@ void GFXD3D11WindowTarget::activate()
 
 void GFXD3D11WindowTarget::resolveTo(GFXTextureObject *tex)
 {
-	GFXDEBUGEVENT_SCOPE(GFXPCD3D11WindowTarget_resolveTo, ColorI::RED);
+   GFXDEBUGEVENT_SCOPE(GFXPCD3D11WindowTarget_resolveTo, ColorI::RED);
 
-	D3D11_TEXTURE2D_DESC desc;
-	ID3D11Texture2D* surf = ((GFXD3D11TextureObject*)(tex))->get2DTex();
-	surf->GetDesc(&desc);
-	D3D11DEVICECONTEXT->ResolveSubresource(surf, 0, D3D11->mDeviceBackbuffer, 0, desc.Format);
+   D3D11_TEXTURE2D_DESC desc;
+   ID3D11Texture2D* surf = ((GFXD3D11TextureObject*)(tex))->get2DTex();
+   surf->GetDesc(&desc);
+   D3D11DEVICECONTEXT->ResolveSubresource(surf, 0, D3D11->mDeviceBackbuffer, 0, desc.Format);
 }

+ 3 - 3
Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp

@@ -180,8 +180,8 @@ bool GFXD3D11TextureObject::copyToBmp(GBitmap* bmp)
    // check format limitations
    // at the moment we only support RGBA for the source (other 4 byte formats should
    // be easy to add though)
-   AssertFatal(mFormat == GFXFormatR8G8B8A8, "copyToBmp: invalid format");
-   if (mFormat != GFXFormatR8G8B8A8)
+   AssertFatal(mFormat == GFXFormatR8G8B8A8 || mFormat == GFXFormatR8G8B8A8_LINEAR_FORCE, "copyToBmp: invalid format");
+   if (mFormat != GFXFormatR8G8B8A8 && mFormat != GFXFormatR8G8B8A8_LINEAR_FORCE)
       return false;
 
    PROFILE_START(GFXD3D11TextureObject_copyToBmp);
@@ -197,7 +197,7 @@ bool GFXD3D11TextureObject::copyToBmp(GBitmap* bmp)
    const U32 sourceBytesPerPixel = 4;
    U32 destBytesPerPixel = 0;
 
-   if(bmp->getFormat() == GFXFormatR8G8B8A8)
+   if (bmp->getFormat() == GFXFormatR8G8B8A8 || bmp->getFormat() == GFXFormatR8G8B8A8_LINEAR_FORCE)
       destBytesPerPixel = 4;
    else if(bmp->getFormat() == GFXFormatR8G8B8)
       destBytesPerPixel = 3;

+ 2 - 0
Engine/source/gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp

@@ -115,6 +115,8 @@ void GFXD3D9EnumTranslate::init()
    GFXD3D9TextureFormat[GFXFormatD24FS8] = D3DFMT_D24FS8;
    GFXD3D9TextureFormat[GFXFormatD16] = D3DFMT_D16;
    GFXD3D9TextureFormat[GFXFormatR8G8B8A8_SRGB] = D3DFMT_UNKNOWN;
+
+   GFXD3D9TextureFormat[GFXFormatR8G8B8A8_LINEAR_FORCE] = D3DFMT_A8R8G8B8;
    VALIDATE_LOOKUPTABLE( GFXD3D9TextureFormat, GFXFormat);
 //------------------------------------------------------------------------------
 //------------------------------------------------------------------------------

+ 1 - 0
Engine/source/gfx/bitmap/gBitmap.cpp

@@ -293,6 +293,7 @@ void GBitmap::allocateBitmap(const U32 in_width, const U32 in_height, const bool
       break;
      case GFXFormatR8G8B8:       mBytesPerPixel = 3;
       break;
+     case GFXFormatR8G8B8A8_LINEAR_FORCE:
      case GFXFormatR8G8B8X8:
      case GFXFormatR8G8B8A8:     mBytesPerPixel = 4;
       break;

+ 4 - 3
Engine/source/gfx/bitmap/loaders/bitmapPng.cpp

@@ -328,13 +328,14 @@ static bool _writePNG(GBitmap *bitmap, Stream &stream, U32 compressionLevel, U32
                   format == GFXFormatR8G8B8A8 || 
                   format == GFXFormatR8G8B8X8 || 
                   format == GFXFormatA8 ||
-                  format == GFXFormatR5G6B5, "_writePNG: ONLY RGB bitmap writing supported at this time.");
+                  format == GFXFormatR5G6B5 ||
+                  format == GFXFormatR8G8B8A8_LINEAR_FORCE, "_writePNG: ONLY RGB bitmap writing supported at this time.");
 
    if (  format != GFXFormatR8G8B8 && 
          format != GFXFormatR8G8B8A8 && 
          format != GFXFormatR8G8B8X8 && 
          format != GFXFormatA8 &&
-         format != GFXFormatR5G6B5 )
+         format != GFXFormatR5G6B5 && format != GFXFormatR8G8B8A8_LINEAR_FORCE)
       return false;
 
    png_structp png_ptr = png_create_write_struct_2(PNG_LIBPNG_VER_STRING,
@@ -381,7 +382,7 @@ static bool _writePNG(GBitmap *bitmap, Stream &stream, U32 compressionLevel, U32
          NULL,                        // compression type
          NULL);                       // filter type
    }
-   else if (format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8)
+   else if (format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8 || format == GFXFormatR8G8B8A8_LINEAR_FORCE)
    {
       png_set_IHDR(png_ptr, info_ptr,
          width, height,               // the width & height

+ 10 - 0
Engine/source/gfx/gfxAdapter.h

@@ -35,6 +35,12 @@
 #include "core/util/delegate.h"
 #endif
 
+struct GFXAdapterLUID
+{
+   unsigned long LowPart;
+   long HighPart;
+};
+
 struct GFXAdapter 
 {
 public:
@@ -58,6 +64,9 @@ public:
    /// Supported shader model. 0.f means none supported.
    F32 mShaderModel;
 
+	/// LUID for windows oculus support
+	GFXAdapterLUID mLUID;
+
    const char * getName() const { return mName; }
    const char * getOutputName() const { return mOutputName; }
    GFXAdapterType mType;
@@ -72,6 +81,7 @@ public:
       mOutputName[0] = 0;
       mShaderModel = 0.f;
       mIndex = 0;
+		dMemset(&mLUID, '\0', sizeof(mLUID));
    }
 
    ~GFXAdapter()

+ 2 - 1
Engine/source/gfx/gfxDevice.cpp

@@ -160,7 +160,8 @@ GFXDevice::GFXDevice()
    // misc
    mAllowRender = true;
    mCurrentRenderStyle = RS_Standard;
-   mCurrentProjectionOffset = Point2F::Zero;
+   mCurrentStereoTarget = -1;
+   mStereoHeadTransform = MatrixF(1);
    mCanCurrentlyRender = false;
    mInitialized = false;
    

+ 27 - 8
Engine/source/gfx/gfxDevice.h

@@ -219,6 +219,12 @@ public:
       /// The device has started rendering a frame's field (such as for side-by-side rendering)
       deStartOfField,
 
+     /// left stereo frame has been rendered
+     deLeftStereoFrameRendered,
+
+     /// right stereo frame has been rendered
+     deRightStereoFrameRendered,
+
       /// The device is about to finish rendering a frame's field
       deEndOfField,
    };
@@ -248,6 +254,7 @@ public:
    {
       RS_Standard          = 0,
       RS_StereoSideBySide  = (1<<0),     // Render into current Render Target side-by-side
+     RS_StereoSeparate    = (1<<1)      // Render in two separate passes (then combined by vr compositor)
    };
 
    enum GFXDeviceLimits
@@ -281,13 +288,19 @@ protected:
    /// The style of rendering that is to be performed, based on GFXDeviceRenderStyles
    U32 mCurrentRenderStyle;
 
-   /// The current projection offset.  May be used during side-by-side rendering, for example.
-   Point2F mCurrentProjectionOffset;
+   /// Current stereo target being rendered to
+   S32 mCurrentStereoTarget;
 
    /// Eye offset used when using a stereo rendering style
    Point3F mStereoEyeOffset[NumStereoPorts];
 
+   /// Center matrix for head
+   MatrixF mStereoHeadTransform;
+
+   /// Left and right matrix for eyes
    MatrixF mStereoEyeTransforms[NumStereoPorts];
+
+   /// Inverse of mStereoEyeTransforms
    MatrixF mInverseStereoEyeTransforms[NumStereoPorts];
 
    /// Fov port settings
@@ -338,21 +351,25 @@ public:
    /// Retrieve the current rendering style based on GFXDeviceRenderStyles
    U32 getCurrentRenderStyle() const { return mCurrentRenderStyle; }
 
+   /// Retrieve the current stereo target being rendered to
+   S32 getCurrentStereoTarget() const { return mCurrentStereoTarget; }
+
    /// Set the current rendering style, based on GFXDeviceRenderStyles
    void setCurrentRenderStyle(U32 style) { mCurrentRenderStyle = style; }
 
-   /// Set the current projection offset used during stereo rendering
-   const Point2F& getCurrentProjectionOffset() { return mCurrentProjectionOffset; }
-
-   /// Get the current projection offset used during stereo rendering
-   void setCurrentProjectionOffset(const Point2F& offset) { mCurrentProjectionOffset = offset; }
+   /// Set the current stereo target being rendered to (in case we're doing anything with postfx)
+   void setCurrentStereoTarget(const F32 targetId) { mCurrentStereoTarget = targetId; }
 
    /// Get the current eye offset used during stereo rendering
    const Point3F* getStereoEyeOffsets() { return mStereoEyeOffset; }
 
+   const MatrixF& getStereoHeadTransform() { return mStereoHeadTransform;  }
    const MatrixF* getStereoEyeTransforms() { return mStereoEyeTransforms; }
    const MatrixF* getInverseStereoEyeTransforms() { return mInverseStereoEyeTransforms; }
 
+   /// Sets the head matrix for stereo rendering
+   void setStereoHeadTransform(const MatrixF &mat) { mStereoHeadTransform = mat; }
+
    /// Set the current eye offset used during stereo rendering
    void setStereoEyeOffsets(Point3F *offsets) { dMemcpy(mStereoEyeOffset, offsets, sizeof(Point3F) * NumStereoPorts); }
 
@@ -391,6 +408,8 @@ public:
          }
          setViewport(mStereoViewports[eyeId]);
       }
+
+      mCurrentStereoTarget = eyeId;
    }
 
    GFXCardProfiler* getCardProfiler() const { return mCardProfiler; }
@@ -462,7 +481,7 @@ public:
    /// Returns the first format from the list which meets all 
    /// the criteria of the texture profile and query options.      
    virtual GFXFormat selectSupportedFormat(GFXTextureProfile *profile,
-	   const Vector<GFXFormat> &formats, bool texture, bool mustblend, bool mustfilter) = 0;
+      const Vector<GFXFormat> &formats, bool texture, bool mustblend, bool mustfilter) = 0;
 
    /// @}
 

+ 1 - 0
Engine/source/gfx/gfxDrawUtil.cpp

@@ -61,6 +61,7 @@ void GFXDrawUtil::_setupStateBlocks()
    bitmapStretchSR.setZReadWrite(false);
    bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
    bitmapStretchSR.samplersDefined = true;
+   bitmapStretchSR.setColorWrites(true, true, true, false); // NOTE: comment this out if alpha write is needed
 
    // Linear: Create wrap SB
    bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getWrapLinear();

+ 6 - 3
Engine/source/gfx/gfxEnums.h

@@ -192,6 +192,12 @@ enum GFXFormat
    GFXFormatD24S8,   
    GFXFormatD24FS8,
 
+   // sRGB formats
+   GFXFormatR8G8B8A8_SRGB,
+
+   // Guaranteed RGBA8 (for apis which really dont like bgr)
+   GFXFormatR8G8B8A8_LINEAR_FORCE,
+
    // 64 bit texture formats...
    GFXFormatR16G16B16A16,// first in group...
    GFXFormatR16G16B16A16F,
@@ -206,9 +212,6 @@ enum GFXFormat
    GFXFormatDXT4,
    GFXFormatDXT5,
 
-   // sRGB formats
-   GFXFormatR8G8B8A8_SRGB,
-
    GFXFormat_COUNT,
 
    GFXFormat_8BIT = GFXFormatA8,

+ 2 - 0
Engine/source/gfx/gfxFontRenderBatcher.cpp

@@ -50,6 +50,8 @@ FontRenderBatcher::FontRenderBatcher() : mStorage(8096)
       // result in the text always being black.  This may not be the case in OpenGL
       // so it may have to change.  -bramage
       f.samplers[0].textureColorOp = GFXTOPAdd;
+
+      f.setColorWrites(true, true, true, false); // NOTE: comment this out if alpha write is needed
       mFontSB = GFX->createStateBlock(f);
    }
 }

+ 54 - 2
Engine/source/gfx/gfxInit.cpp

@@ -198,6 +198,22 @@ GFXAdapter* GFXInit::getAdapterOfType( GFXAdapterType type, const char* outputDe
    return NULL;
 }
 
+GFXAdapter* GFXInit::getAdapterOfType(GFXAdapterType type, S32 outputDeviceIndex)
+{
+   for (U32 i = 0; i < smAdapters.size(); i++)
+   {
+      if (smAdapters[i]->mType == type)
+      {
+         if (smAdapters[i]->mIndex == outputDeviceIndex)
+         {
+            return smAdapters[i];
+         }
+      }
+   }
+
+   return NULL;
+}
+
 GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevice)
 {
    GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDevice);
@@ -219,6 +235,27 @@ GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevic
    return adapter;
 }
 
+GFXAdapter* GFXInit::chooseAdapter(GFXAdapterType type, S32 outputDeviceIndex)
+{
+   GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDeviceIndex);
+
+   if (!adapter && type != OpenGL)
+   {
+      Con::errorf("The requested renderer, %s, doesn't seem to be available."
+         " Trying the default, OpenGL.", getAdapterNameFromType(type));
+      adapter = GFXInit::getAdapterOfType(OpenGL, outputDeviceIndex);
+   }
+
+   if (!adapter)
+   {
+      Con::errorf("The OpenGL renderer doesn't seem to be available. Trying the GFXNulDevice.");
+      adapter = GFXInit::getAdapterOfType(NullDevice, 0);
+   }
+
+   AssertFatal(adapter, "There is no rendering device available whatsoever.");
+   return adapter;
+}
+
 const char* GFXInit::getAdapterNameFromType(GFXAdapterType type)
 {
    // must match GFXAdapterType order
@@ -256,8 +293,23 @@ GFXAdapter *GFXInit::getBestAdapterChoice()
    // Get the user's preference for device...
    const String   renderer   = Con::getVariable("$pref::Video::displayDevice");
    const String   outputDevice = Con::getVariable("$pref::Video::displayOutputDevice");
-   GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());
-   GFXAdapter     *adapter    = chooseAdapter(adapterType, outputDevice.c_str());
+   const String   adapterDevice = Con::getVariable("$Video::forceDisplayAdapter");
+
+   GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());;
+   GFXAdapter     *adapter = NULL;
+
+   if (adapterDevice.isEmpty())
+   {
+      adapter = chooseAdapter(adapterType, outputDevice.c_str());
+   }
+   else
+   {
+     S32 adapterIdx = dAtoi(adapterDevice.c_str());
+     if (adapterIdx == -1)
+        adapter = chooseAdapter(adapterType, outputDevice.c_str());
+     else
+        adapter = chooseAdapter(adapterType, adapterIdx);
+   }
 
    // Did they have one? Return it.
    if(adapter)

+ 6 - 0
Engine/source/gfx/gfxInit.h

@@ -74,10 +74,16 @@ public:
    /// This method never returns NULL.
    static GFXAdapter *chooseAdapter( GFXAdapterType type, const char* outputDevice);
 
+   /// Override which chooses an adapter based on an index instead
+   static GFXAdapter *chooseAdapter( GFXAdapterType type, S32 outputDeviceIndex );
+
    /// Gets the first adapter of the requested type (and on the requested output device)
    /// from the list of enumerated adapters. Should only call this after a call to
    /// enumerateAdapters.
    static GFXAdapter *getAdapterOfType( GFXAdapterType type, const char* outputDevice );
+
+   /// Override which gets an adapter based on an index instead
+   static GFXAdapter *getAdapterOfType( GFXAdapterType type, S32 outputDeviceIndex );
       
    /// Converts a GFXAdapterType to a string name. Useful for writing out prefs
    static const char *getAdapterNameFromType( GFXAdapterType type );

+ 5 - 1
Engine/source/gfx/gfxTextureProfile.h

@@ -100,7 +100,10 @@ public:
       /// of a target texture after presentation or deactivated.
       ///
       /// This is mainly a depth buffer optimization.
-      NoDiscard = BIT(10)
+      NoDiscard = BIT(10),
+
+      /// Texture is managed by another process, thus should not be modified
+      NoModify = BIT(11)
 
    };
 
@@ -164,6 +167,7 @@ public:
    inline bool noMip() const { return testFlag(NoMipmap); }
    inline bool isPooled() const { return testFlag(Pooled); }
    inline bool canDiscard() const { return !testFlag(NoDiscard); }
+   inline bool canModify() const { return !testFlag(NoModify); }
 
 private:
    /// These constants control the packing for the profile; if you add flags, types, or

+ 73 - 2
Engine/source/gfx/sim/debugDraw.cpp

@@ -139,7 +139,78 @@ void DebugDrawer::setupStateBlocks()
    mRenderAlpha = GFX->createStateBlock(d);
 }
 
-void DebugDrawer::render()
+void DebugDrawer::drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color)
+{
+   Point3F point0(a.x, a.y, a.z);
+   Point3F point1(a.x, b.y, a.z);
+   Point3F point2(b.x, b.y, a.z);
+   Point3F point3(b.x, a.y, a.z);
+
+   Point3F point4(a.x, a.y, b.z);
+   Point3F point5(a.x, b.y, b.z);
+   Point3F point6(b.x, b.y, b.z);
+   Point3F point7(b.x, a.y, b.z);
+
+   // Draw one plane
+   drawLine(point0, point1, color);
+   drawLine(point1, point2, color);
+   drawLine(point2, point3, color);
+   drawLine(point3, point0, color);
+
+   // Draw the other plane
+   drawLine(point4, point5, color);
+   drawLine(point5, point6, color);
+   drawLine(point6, point7, color);
+   drawLine(point7, point4, color);
+
+   // Draw the connecting corners
+   drawLine(point0, point4, color);
+   drawLine(point1, point5, color);
+   drawLine(point2, point6, color);
+   drawLine(point3, point7, color);
+}
+
+void DebugDrawer::drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform)
+{
+   Point3F point0(a.x, a.y, a.z);
+   Point3F point1(a.x, b.y, a.z);
+   Point3F point2(b.x, b.y, a.z);
+   Point3F point3(b.x, a.y, a.z);
+
+   Point3F point4(a.x, a.y, b.z);
+   Point3F point5(a.x, b.y, b.z);
+   Point3F point6(b.x, b.y, b.z);
+   Point3F point7(b.x, a.y, b.z);
+
+   transform.mulP(point0);
+   transform.mulP(point1);
+   transform.mulP(point2);
+   transform.mulP(point3);
+   transform.mulP(point4);
+   transform.mulP(point5);
+   transform.mulP(point6);
+   transform.mulP(point7);
+
+   // Draw one plane
+   drawLine(point0, point1, color);
+   drawLine(point1, point2, color);
+   drawLine(point2, point3, color);
+   drawLine(point3, point0, color);
+
+   // Draw the other plane
+   drawLine(point4, point5, color);
+   drawLine(point5, point6, color);
+   drawLine(point6, point7, color);
+   drawLine(point7, point4, color);
+
+   // Draw the connecting corners
+   drawLine(point0, point4, color);
+   drawLine(point1, point5, color);
+   drawLine(point2, point6, color);
+   drawLine(point3, point7, color);
+}
+
+void DebugDrawer::render(bool clear)
 {
 #ifdef ENABLE_DEBUGDRAW
    if(!isDrawing)
@@ -264,7 +335,7 @@ void DebugDrawer::render()
          shouldToggleFreeze = false;
       }
 
-      if(p->dieTime <= curTime && !isFrozen && p->dieTime != U32_MAX)
+      if(clear && p->dieTime <= curTime && !isFrozen && p->dieTime != U32_MAX)
       {
          *walk = p->next;
          mPrimChunker.free(p);

+ 8 - 3
Engine/source/gfx/sim/debugDraw.h

@@ -105,7 +105,9 @@ public:
    static void init();
 
    /// Called globally to render debug draw state. Also does state updates.
-   void render();
+   void render(bool clear=true);
+
+   bool willDraw() { return isDrawing && mHead; }
 
    void toggleFreeze()  { shouldToggleFreeze = true; };
    void toggleDrawing() 
@@ -120,8 +122,11 @@ public:
    ///
    /// @{
 
+   void drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f, 1.0f, 1.0f));
+   void drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform);
+
    void drawBox(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
-   void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f));	
+   void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
    void drawTri(const Point3F &a, const Point3F &b, const Point3F &c, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
    void drawText(const Point3F& pos, const String& text, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
    void drawCapsule(const Point3F &a, const F32 &radius, const F32 &height, const ColorF &color = ColorF(1.0f, 1.0f, 1.0f));
@@ -176,7 +181,7 @@ private:
          DirectionLine,
          OutlinedText,
          Capsule,
-      } type;	   ///< Type of the primitive. The meanings of a,b,c are determined by this.
+      } type;      ///< Type of the primitive. The meanings of a,b,c are determined by this.
 
       SimTime dieTime;   ///< Time at which we should remove this from the list.
       bool useZ; ///< If true, do z-checks for this primitive.      

+ 340 - 211
Engine/source/gui/3d/guiTSControl.cpp

@@ -37,8 +37,9 @@
 #include "gfx/gfxTransformSaver.h"
 #include "gfx/gfxDrawUtil.h"
 #include "gfx/gfxDebugEvent.h"
-
-GFXTextureObject *gLastStereoTexture = NULL;
+#include "core/stream/fileStream.h"
+#include "platform/output/IDisplayDevice.h"
+#include "T3D/gameBase/extended/extendedMove.h"
 
 #define TS_OVERLAY_SCREEN_WIDTH 0.75
 
@@ -63,8 +64,9 @@ Vector<GuiTSCtrl*> GuiTSCtrl::smAwakeTSCtrls;
 ImplementEnumType( GuiTSRenderStyles,
    "Style of rendering for a GuiTSCtrl.\n\n"
    "@ingroup Gui3D" )
-	{ GuiTSCtrl::RenderStyleStandard,         "standard"              },
-	{ GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side"   },
+   { GuiTSCtrl::RenderStyleStandard,         "standard"              },
+   { GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side"   },
+   { GuiTSCtrl::RenderStyleStereoSeparate,   "stereo separate" },
 EndImplementEnumType;
 
 //-----------------------------------------------------------------------------
@@ -158,7 +160,6 @@ GuiTSCtrl::GuiTSCtrl()
    mLastCameraQuery.farPlane = 10.0f;
    mLastCameraQuery.nearPlane = 0.01f;
 
-   mLastCameraQuery.projectionOffset = Point2F::Zero;
    mLastCameraQuery.hasFovPort = false;
    mLastCameraQuery.hasStereoTargets = false;
 
@@ -198,9 +199,9 @@ void GuiTSCtrl::initPersistFields()
 void GuiTSCtrl::consoleInit()
 {
    Con::addVariable("$TSControl::frameCount", TypeS32, &smFrameCount, "The number of frames that have been rendered since this control was created.\n"
-	   "@ingroup Rendering\n");
+      "@ingroup Rendering\n");
    Con::addVariable("$TSControl::useLatestDisplayTransform", TypeBool, &smUseLatestDisplayTransform, "Use the latest view transform when rendering stereo instead of the one calculated by the last move.\n"
-	   "@ingroup Rendering\n");
+      "@ingroup Rendering\n");
 }
 
 //-----------------------------------------------------------------------------
@@ -352,36 +353,175 @@ static FovPort CalculateFovPortForCanvas(const RectI viewport, const CameraQuery
    return fovPort;
 }
 
+void GuiTSCtrl::_internalRender(RectI guiViewport, RectI renderViewport, Frustum &frustum)
+{
+   GFXTransformSaver saver;
+   Point2I renderSize = renderViewport.extent;
+   GFXTarget *origTarget = GFX->getActiveRenderTarget();
+   S32 origStereoTarget = GFX->getCurrentStereoTarget();
+
+   if (mForceFOV != 0)
+      mLastCameraQuery.fov = mDegToRad(mForceFOV);
+
+   if (mCameraZRot)
+   {
+      MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot)));
+      mLastCameraQuery.cameraMatrix.mul(rotMat);
+   }
+
+   if (mReflectPriority > 0)
+   {
+      // Get the total reflection priority.
+      F32 totalPriority = 0;
+      for (U32 i = 0; i < smAwakeTSCtrls.size(); i++)
+         if (smAwakeTSCtrls[i]->isVisible())
+            totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
+
+      REFLECTMGR->update(mReflectPriority / totalPriority,
+         renderSize,
+         mLastCameraQuery);
+   }
+
+   GFX->setActiveRenderTarget(origTarget);
+   GFX->setCurrentStereoTarget(origStereoTarget);
+   GFX->setViewport(renderViewport);
+
+   // Clear the zBuffer so GUI doesn't hose object rendering accidentally
+   GFX->clear(GFXClearZBuffer, ColorI(20, 20, 20), 1.0f, 0);
+
+   GFX->setFrustum(frustum);
+   mSaveProjection = GFX->getProjectionMatrix();
+
+   if (mLastCameraQuery.ortho)
+   {
+      mOrthoWidth = frustum.getWidth();
+      mOrthoHeight = frustum.getHeight();
+   }
+
+   // We're going to be displaying this render at size of this control in
+   // pixels - let the scene know so that it can calculate e.g. reflections
+   // correctly for that final display result.
+   gClientSceneGraph->setDisplayTargetResolution(renderSize);
+
+   // Set the GFX world matrix to the world-to-camera transform, but don't 
+   // change the cameraMatrix in mLastCameraQuery. This is because 
+   // mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world
+   // transform. In-place invert would save a copy but mess up any GUIs that
+   // depend on that value.
+   MatrixF worldToCamera = mLastCameraQuery.cameraMatrix;
+   worldToCamera.inverse();
+   GFX->setWorldMatrix(worldToCamera);
+
+   mSaveProjection = GFX->getProjectionMatrix();
+   mSaveModelview = GFX->getWorldMatrix();
+   mSaveViewport = guiViewport;
+   mSaveWorldToScreenScale = GFX->getWorldToScreenScale();
+   mSaveFrustum = GFX->getFrustum();
+   mSaveFrustum.setTransform(mLastCameraQuery.cameraMatrix);
+
+   // Set the default non-clip projection as some 
+   // objects depend on this even in non-reflect cases.
+   gClientSceneGraph->setNonClipProjection(mSaveProjection);
+
+   // Give the post effect manager the worldToCamera, and cameraToScreen matrices
+   PFXMGR->setFrameMatrices(mSaveModelview, mSaveProjection);
+
+   renderWorld(guiViewport);
+
+   DebugDrawer* debugDraw = DebugDrawer::get();
+   if (mRenderStyle == RenderStyleStereoSideBySide && debugDraw->willDraw())
+   {
+      // For SBS we need to render over each viewport
+      Frustum frustum;
+
+      GFX->setViewport(mLastCameraQuery.stereoViewports[0]);
+      MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
+      GFX->setFrustum(frustum);
+      debugDraw->render(false);
+
+      GFX->setViewport(mLastCameraQuery.stereoViewports[1]);
+      MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]);
+      GFX->setFrustum(frustum);
+      debugDraw->render();
+   }
+   else
+   {
+      debugDraw->render();
+   }
+
+   saver.restore();
+}
+
 //-----------------------------------------------------------------------------
 
 void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
 {
-	// Save the current transforms so we can restore
+   // Save the current transforms so we can restore
    // it for child control rendering below.
    GFXTransformSaver saver;
    bool renderingToTarget = false;
 
-   if(!processCameraQuery(&mLastCameraQuery))
+   mLastCameraQuery.displayDevice = NULL;
+
+   if (!processCameraQuery(&mLastCameraQuery))
    {
       // We have no camera, but render the GUI children 
       // anyway.  This makes editing GuiTSCtrl derived
       // controls easier in the GuiEditor.
-      renderChildControls( offset, updateRect );
+      renderChildControls(offset, updateRect);
       return;
    }
 
+   // jamesu - currently a little bit of a hack. Ideally we need to ditch the viewports in the query data and just rely on the display device
+   if (mLastCameraQuery.displayDevice)
+   {
+      if (mRenderStyle == RenderStyleStereoSideBySide)
+      {
+         mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_StereoSideBySide);
+      }
+      else if (mRenderStyle == RenderStyleStereoSeparate)
+      {
+         mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_StereoSeparate);
+      }
+      else
+      {
+         mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_Standard);
+      }
+
+      // The connection's display device may want to set the eye offset
+      if (mLastCameraQuery.displayDevice->providesEyeOffsets())
+      {
+         mLastCameraQuery.displayDevice->getEyeOffsets(mLastCameraQuery.eyeOffset);
+      }
+
+      // Grab field of view for both eyes
+      if (mLastCameraQuery.displayDevice->providesFovPorts())
+      {
+         mLastCameraQuery.displayDevice->getFovPorts(mLastCameraQuery.fovPort);
+         mLastCameraQuery.hasFovPort = true;
+      }
+
+      mLastCameraQuery.displayDevice->getStereoViewports(mLastCameraQuery.stereoViewports);
+      mLastCameraQuery.displayDevice->getStereoTargets(mLastCameraQuery.stereoTargets);
+
+      mLastCameraQuery.hasStereoTargets = mLastCameraQuery.stereoTargets[0];
+   }
+
    GFXTargetRef origTarget = GFX->getActiveRenderTarget();
+   U32 origStyle = GFX->getCurrentRenderStyle();
 
    // Set up the appropriate render style
    U32 prevRenderStyle = GFX->getCurrentRenderStyle();
-   Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset();
    Point2I renderSize = getExtent();
+   Frustum frustum;
 
-   if(mRenderStyle == RenderStyleStereoSideBySide)
+   mLastCameraQuery.currentEye = -1;
+
+   if (mRenderStyle == RenderStyleStereoSideBySide)
    {
       GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide);
-      GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset);
       GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
+      GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix);
 
       if (!mLastCameraQuery.hasStereoTargets)
       {
@@ -398,13 +538,13 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
          mLastCameraQuery.fovPort[0] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[0], mLastCameraQuery);
          mLastCameraQuery.fovPort[1] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[1], mLastCameraQuery);
       }
-         
-      GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
 
+      GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
       GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
       GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
 
       MatrixF myTransforms[2];
+      Frustum frustum;
 
       if (smUseLatestDisplayTransform)
       {
@@ -417,6 +557,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
          // Use the view matrix determined from the control object
          myTransforms[0] = mLastCameraQuery.cameraMatrix;
          myTransforms[1] = mLastCameraQuery.cameraMatrix;
+         mLastCameraQuery.headMatrix = mLastCameraQuery.cameraMatrix; // override head
 
          QuatF qrot = mLastCameraQuery.cameraMatrix;
          Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
@@ -431,55 +572,121 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
       // Allow render size to originate from the render target
       if (mLastCameraQuery.stereoTargets[0])
       {
-         renderSize = mLastCameraQuery.stereoViewports[0].extent;
+         renderSize = mLastCameraQuery.stereoTargets[0]->getSize();
          renderingToTarget = true;
       }
+
+      // NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye.
+      MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
+
+      GFX->activateStereoTarget(-1);
+      _internalRender(RectI(updateRect.point, updateRect.extent), RectI(Point2I(0,0), renderSize), frustum);
+     
+      // Notify device we've rendered the right, thus the last stereo frame.
+      GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered);
+
+      // Render preview
+      if (mLastCameraQuery.displayDevice)
+      {
+         GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture();
+         if (!previewTexture.isNull())
+         {
+            GFX->setActiveRenderTarget(origTarget);
+            GFX->setCurrentRenderStyle(origStyle);
+            GFX->setClipRect(updateRect);
+            renderDisplayPreview(updateRect, previewTexture);
+         }
+      }
    }
-   else
+   else if (mRenderStyle == RenderStyleStereoSeparate && mLastCameraQuery.displayDevice)
    {
-      GFX->setCurrentRenderStyle(GFXDevice::RS_Standard);
-   }
+      // In this case we render the scene twice to different render targets, then
+      // render the final composite view 
+      GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSeparate);
+      GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
+      GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix);
+      GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
+      GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
+      GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
 
-   if ( mReflectPriority > 0 )
-   {
-      // Get the total reflection priority.
-      F32 totalPriority = 0;
-      for ( U32 i=0; i < smAwakeTSCtrls.size(); i++ )
-         if ( smAwakeTSCtrls[i]->isVisible() )
-            totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
+      MatrixF myTransforms[2];
 
-      REFLECTMGR->update(  mReflectPriority / totalPriority,
-                           getExtent(),
-                           mLastCameraQuery );
-   }
+      if (smUseLatestDisplayTransform)
+      {
+         // Use the view matrix determined from the display device
+         myTransforms[0] = mLastCameraQuery.eyeTransforms[0];
+         myTransforms[1] = mLastCameraQuery.eyeTransforms[1];
+      }
+      else
+      {
+         // Use the view matrix determined from the control object
+         myTransforms[0] = mLastCameraQuery.cameraMatrix;
+         myTransforms[1] = mLastCameraQuery.cameraMatrix;
 
-   if(mForceFOV != 0)
-      mLastCameraQuery.fov = mDegToRad(mForceFOV);
+         QuatF qrot = mLastCameraQuery.cameraMatrix;
+         Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
+         Point3F rotEyePos;
 
-   if(mCameraZRot)
-   {
-      MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot)));
-      mLastCameraQuery.cameraMatrix.mul(rotMat);
-   }
+         myTransforms[0].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[0], &rotEyePos));
+         myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos));
+      }
 
-   Frustum frustum;
-   if(mRenderStyle == RenderStyleStereoSideBySide)
-   {
-      // NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye.
-      MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho,  mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
+      MatrixF origMatrix = mLastCameraQuery.cameraMatrix;
+
+      // Left
+      MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
+      mLastCameraQuery.cameraMatrix = myTransforms[0];
+      frustum.update();
+     GFX->activateStereoTarget(0);
+     mLastCameraQuery.currentEye = 0;
+     GFX->beginField();
+     _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum);
+      GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered);
+     GFX->endField();
+
+      // Right
+     GFX->activateStereoTarget(1);
+     mLastCameraQuery.currentEye = 1;
+      MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]);
+      mLastCameraQuery.cameraMatrix = myTransforms[1];
+     frustum.update();
+     GFX->beginField();
+     _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum);
+     GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered);
+     GFX->endField();
+
+      mLastCameraQuery.cameraMatrix = origMatrix;
+
+      // Render preview
+      if (mLastCameraQuery.displayDevice)
+      {
+         GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture();
+         if (!previewTexture.isNull())
+         {
+            GFX->setActiveRenderTarget(origTarget);
+            GFX->setCurrentRenderStyle(origStyle);
+            GFX->setClipRect(updateRect);
+            renderDisplayPreview(updateRect, previewTexture);
+         }
+      }
    }
    else
    {
+#ifdef TORQUE_OS_MAC
+      Point2I screensize = getRoot()->getWindowSize();
+      tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
+#endif
+
       // set up the camera and viewport stuff:
       F32 wwidth;
       F32 wheight;
       F32 renderWidth = F32(renderSize.x);
       F32 renderHeight = F32(renderSize.y);
       F32 aspectRatio = renderWidth / renderHeight;
-   
+
       // Use the FOV to calculate the viewport height scale
       // then generate the width scale from the aspect ratio.
-      if(!mLastCameraQuery.ortho)
+      if (!mLastCameraQuery.ortho)
       {
          wheight = mLastCameraQuery.nearPlane * mTan(mLastCameraQuery.fov / 2.0f);
          wwidth = aspectRatio * wheight;
@@ -498,184 +705,33 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
       F32 top = wheight - vscale * (updateRect.point.y - offset.y);
       F32 bottom = wheight - vscale * (updateRect.point.y + updateRect.extent.y - offset.y);
 
-      frustum.set( mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane );
-   }
-
-	// Manipulate the frustum for tiled screenshots
-	const bool screenShotMode = gScreenShot && gScreenShot->isPending();
-   if ( screenShotMode )
-   {
-      gScreenShot->tileFrustum( frustum );      
-      GFX->setViewMatrix(MatrixF::Identity);
-   }
-      
-   RectI tempRect = updateRect;
-   
-   if (!renderingToTarget)
-   {
-   #ifdef TORQUE_OS_MAC
-      Point2I screensize = getRoot()->getWindowSize();
-      tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
-   #endif
-
-      GFX->setViewport( tempRect );
-   }
-   else
-   {
-      // Activate stereo RT
-      GFX->activateStereoTarget(-1);
-   }
-
-   // Clear the zBuffer so GUI doesn't hose object rendering accidentally
-   GFX->clear( GFXClearZBuffer , ColorI(20,20,20), 1.0f, 0 );
-   //GFX->clear( GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
-
-   GFX->setFrustum( frustum );
-   if(mLastCameraQuery.ortho)
-   {
-      mOrthoWidth = frustum.getWidth();
-      mOrthoHeight = frustum.getHeight();
-   }
-
-   // We're going to be displaying this render at size of this control in
-   // pixels - let the scene know so that it can calculate e.g. reflections
-   // correctly for that final display result.
-   gClientSceneGraph->setDisplayTargetResolution(renderSize);
-
-   // Set the GFX world matrix to the world-to-camera transform, but don't 
-   // change the cameraMatrix in mLastCameraQuery. This is because 
-   // mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world
-   // transform. In-place invert would save a copy but mess up any GUIs that
-   // depend on that value.
-   MatrixF worldToCamera = mLastCameraQuery.cameraMatrix;
-   worldToCamera.inverse();
-   GFX->setWorldMatrix( worldToCamera );
-
-   mSaveProjection = GFX->getProjectionMatrix();
-   mSaveModelview = GFX->getWorldMatrix();
-   mSaveViewport = updateRect;
-   mSaveWorldToScreenScale = GFX->getWorldToScreenScale();
-   mSaveFrustum = GFX->getFrustum();
-   mSaveFrustum.setTransform( mLastCameraQuery.cameraMatrix );
-
-   // Set the default non-clip projection as some 
-   // objects depend on this even in non-reflect cases.
-   gClientSceneGraph->setNonClipProjection( mSaveProjection );
-
-   // Give the post effect manager the worldToCamera, and cameraToScreen matrices
-   PFXMGR->setFrameMatrices( mSaveModelview, mSaveProjection );
+      frustum.set(mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane);
 
-   renderWorld(updateRect);
-   DebugDrawer::get()->render();
-
-   // Render the canvas overlay if its available
-   if (mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer())
-   {
-      GFXDEBUGEVENT_SCOPE( StereoGui_Render, ColorI( 255, 0, 0 ) );
-      MatrixF proj(1);
-      
-      Frustum originalFrustum = GFX->getFrustum();
-      GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0);
-      const FovPort *currentFovPort = GFX->getStereoFovPort();
-      const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
-      const Point3F *eyeOffset = GFX->getStereoEyeOffsets();
-      Frustum gfxFrustum = originalFrustum;
-
-      for (U32 i=0; i<2; i++)
+      // Manipulate the frustum for tiled screenshots
+      const bool screenShotMode = gScreenShot && gScreenShot->isPending();
+      if (screenShotMode)
       {
-         GFX->activateStereoTarget(i);
-         MathUtils::makeFovPortFrustum(&gfxFrustum, true, gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[i], eyeTransforms[i]);
-         GFX->setFrustum(gfxFrustum);
-
-         MatrixF eyeWorldTrans(1);
-         eyeWorldTrans.setPosition(Point3F(eyeOffset[i].x,eyeOffset[i].y,eyeOffset[i].z));
-         MatrixF eyeWorld(1);
-         eyeWorld.mul(eyeWorldTrans);
-         eyeWorld.inverse();
-         
-         GFX->setWorldMatrix(eyeWorld);
+         gScreenShot->tileFrustum(frustum);
          GFX->setViewMatrix(MatrixF::Identity);
-
-         if (!mStereoOverlayVB.getPointer())
-         {
-            mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic);
-            GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4);
-
-            F32 texLeft   = 0.0f;
-            F32 texRight  = 1.0f;
-            F32 texTop    = 1.0f;
-            F32 texBottom = 0.0f;
-
-            F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight();
-            F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH;
-            F32 rectHeight = rectWidth * rectRatio;
-
-            F32 screenLeft   = -rectWidth * 0.5;
-            F32 screenRight  = rectWidth * 0.5;
-            F32 screenTop    = -rectHeight * 0.5;
-            F32 screenBottom = rectHeight * 0.5;
-
-            const F32 fillConv = 0.0f;
-            const F32 frustumDepthAdjusted = gfxFrustum.getNearDist() + 0.012;
-            verts[0].point.set( screenLeft  - fillConv, frustumDepthAdjusted, screenTop    - fillConv );
-            verts[1].point.set( screenRight - fillConv, frustumDepthAdjusted, screenTop    - fillConv );
-            verts[2].point.set( screenLeft  - fillConv, frustumDepthAdjusted, screenBottom - fillConv );
-            verts[3].point.set( screenRight - fillConv, frustumDepthAdjusted, screenBottom - fillConv );
-
-            verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255);
-
-            verts[0].texCoord.set( texLeft,  texTop );
-            verts[1].texCoord.set( texRight, texTop );
-            verts[2].texCoord.set( texLeft,  texBottom );
-            verts[3].texCoord.set( texRight, texBottom );
-
-            mStereoOverlayVB.unlock();
-         }
-
-         if (!mStereoGuiSB.getPointer())
-         {
-            // DrawBitmapStretchSR
-            GFXStateBlockDesc bitmapStretchSR;
-            bitmapStretchSR.setCullMode(GFXCullNone);
-            bitmapStretchSR.setZReadWrite(false, false);
-            bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
-            bitmapStretchSR.samplersDefined = true;
-
-            bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
-            bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
-            bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
-            bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
-
-            mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR);
-         }
-
-         GFX->setVertexBuffer(mStereoOverlayVB);
-         GFX->setStateBlock(mStereoGuiSB);
-         GFX->setTexture( 0, texObject );
-         GFX->setupGenericShaders( GFXDevice::GSModColorTexture );
-         GFX->drawPrimitive( GFXTriangleStrip, 0, 2 );
       }
-   }
 
-	// Restore the previous matrix state before
-   // we begin rendering the child controls.
-   saver.restore();
+      RectI tempRect = updateRect;
 
-   // Restore the render style and any stereo parameters
-   GFX->setActiveRenderTarget(origTarget);
-   GFX->setCurrentRenderStyle(prevRenderStyle);
-   GFX->setCurrentProjectionOffset(prevProjectionOffset);
+#ifdef TORQUE_OS_MAC
+      Point2I screensize = getRoot()->getWindowSize();
+      tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
+#endif
 
-   
-   if(mRenderStyle == RenderStyleStereoSideBySide && gLastStereoTexture)
-   {
-      GFX->setClipRect(updateRect);
-      GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect);
+      _internalRender(tempRect, tempRect, frustum);
    }
 
+   // TODO: Some render to sort of overlay system?
+
    // Allow subclasses to render 2D elements.
+   GFX->setActiveRenderTarget(origTarget);
+   GFX->setCurrentRenderStyle(origStyle);
    GFX->setClipRect(updateRect);
-   renderGui( offset, updateRect );
+   renderGui(offset, updateRect);
 
    if (shouldRenderChildControls())
    {
@@ -711,10 +767,83 @@ void GuiTSCtrl::drawLineList( const Vector<Point3F> &points, const ColorI color,
       drawLine( points[i], points[i+1], color, width );
 }
 
+//-----------------------------------------------------------------------------
 
 void GuiTSCtrl::setStereoGui(GuiOffscreenCanvas *canvas)
 {
    mStereoGuiTarget = canvas ? canvas->getTarget() : NULL;
+   mStereoCanvas = canvas;
+}
+
+
+//-----------------------------------------------------------------------------
+
+void GuiTSCtrl::renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture)
+{
+   GFX->setWorldMatrix(MatrixF(1));
+   GFX->setViewMatrix(MatrixF::Identity);
+   GFX->setClipRect(updateRect);
+
+   GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::BLACK);
+   GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED);
+
+   if (!mStereoPreviewVB.getPointer())
+   {
+      mStereoPreviewVB.set(GFX, 4, GFXBufferTypeStatic);
+      GFXVertexPCT *verts = mStereoPreviewVB.lock(0, 4);
+
+      F32 texLeft = 0.0f;
+      F32 texRight = 1.0f;
+      F32 texTop = 0.0f;
+      F32 texBottom = 1.0f;
+
+      F32 rectWidth = updateRect.extent.x;
+      F32 rectHeight = updateRect.extent.y;
+
+      F32 screenLeft = 0;
+      F32 screenRight = rectWidth;
+      F32 screenTop = 0;
+      F32 screenBottom = rectHeight;
+
+      const F32 fillConv = 0.0f;
+      const F32 frustumDepthAdjusted = 0.0f;
+      verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f);
+      verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f);
+      verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f);
+      verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f);
+
+      verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255, 255, 255, 255);
+
+      verts[0].texCoord.set(texLeft, texTop);
+      verts[1].texCoord.set(texRight, texTop);
+      verts[2].texCoord.set(texLeft, texBottom);
+      verts[3].texCoord.set(texRight, texBottom);
+
+      mStereoPreviewVB.unlock();
+   }
+
+   if (!mStereoPreviewSB.getPointer())
+   {
+      // DrawBitmapStretchSR
+      GFXStateBlockDesc bitmapStretchSR;
+      bitmapStretchSR.setCullMode(GFXCullNone);
+      bitmapStretchSR.setZReadWrite(false, false);
+      bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
+      bitmapStretchSR.samplersDefined = true;
+
+      bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
+      bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
+      bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
+      bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
+
+      mStereoPreviewSB = GFX->createStateBlock(bitmapStretchSR);
+   }
+
+   GFX->setVertexBuffer(mStereoPreviewVB);
+   GFX->setStateBlock(mStereoPreviewSB);
+   GFX->setTexture(0, previewTexture);
+   GFX->setupGenericShaders(GFXDevice::GSModColorTexture);
+   GFX->drawPrimitive(GFXTriangleStrip, 0, 2);
 }
 
 //=============================================================================

+ 17 - 2
Engine/source/gui/3d/guiTSControl.h

@@ -35,6 +35,10 @@
 #include "materials/matTextureTarget.h"
 #endif
 
+#ifndef _GUIOFFSCREENCANVAS_H_
+#include "gui/core/guiOffscreenCanvas.h"
+#endif
+
 class IDisplayDevice;
 class GuiOffscreenCanvas;
 
@@ -45,16 +49,19 @@ struct CameraQuery
    F32         farPlane;
    F32         fov;
    FovPort     fovPort[2]; // fov for each eye
-   Point2F     projectionOffset;
    Point3F     eyeOffset[2];
    MatrixF     eyeTransforms[2];
    bool        ortho;
    bool        hasFovPort;
    bool        hasStereoTargets;
    MatrixF     cameraMatrix;
+   MatrixF     headMatrix; // center matrix (for HMDs)
+   S32         currentEye;
    RectI       stereoViewports[2]; // destination viewports
    GFXTextureTarget* stereoTargets[2];
    GuiCanvas* drawCanvas; // Canvas we are drawing to. Needed for VR
+
+   IDisplayDevice* displayDevice;
 };
 
 /// Abstract base class for 3D viewport GUIs.
@@ -65,7 +72,8 @@ class GuiTSCtrl : public GuiContainer
 public:
    enum RenderStyles {
       RenderStyleStandard           = 0,
-      RenderStyleStereoSideBySide   = (1<<0)
+      RenderStyleStereoSideBySide   = (1<<0),
+      RenderStyleStereoSeparate     = (1<<1),
    };
 
 protected:
@@ -104,12 +112,18 @@ protected:
    NamedTexTargetRef mStereoGuiTarget;
    GFXVertexBufferHandle<GFXVertexPCT> mStereoOverlayVB;
    GFXStateBlockRef mStereoGuiSB;
+
+   GFXVertexBufferHandle<GFXVertexPCT> mStereoPreviewVB;
+   GFXStateBlockRef mStereoPreviewSB;
+
+   SimObjectPtr<GuiOffscreenCanvas> mStereoCanvas;
    
 public:
    
    GuiTSCtrl();
 
    void onPreRender();
+   void _internalRender(RectI guiViewport, RectI renderViewport, Frustum &frustum);
    void onRender(Point2I offset, const RectI &updateRect);
    virtual bool processCameraQuery(CameraQuery *query);
 
@@ -178,6 +192,7 @@ public:
    bool shouldRenderChildControls() { return mRenderStyle == RenderStyleStandard; }
 
    void setStereoGui(GuiOffscreenCanvas *canvas);
+   void renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture);
 
    DECLARE_CONOBJECT(GuiTSCtrl);
    DECLARE_CATEGORY( "Gui 3D" );

+ 1 - 0
Engine/source/gui/controls/guiTextEditCtrl.h

@@ -124,6 +124,7 @@ public:
    void invalidText(bool playSound = true);
    void validText();
    bool isValidText();
+	inline bool isPasswordText() { return mPasswordText; }
 
    bool isAllTextSelected();
    void selectAllText();

+ 2 - 0
Engine/source/gui/core/guiControl.h

@@ -286,6 +286,8 @@ class GuiControl : public SimGroup
       const char * getConsoleCommand(); ///< Returns the name of the function bound to this GuiControl
       LangTable *getGUILangTable(void);
       const UTF8 *getGUIString(S32 id);
+
+      inline String& getTooltip() { return mTooltip; } ///< Returns the tooltip
       
       /// @}
       

+ 10 - 3
Engine/source/gui/core/guiOffscreenCanvas.cpp

@@ -176,7 +176,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
    GFX->setWorldMatrix( MatrixF::Identity );
    GFX->setViewMatrix( MatrixF::Identity );
    GFX->setProjectionMatrix( MatrixF::Identity );
-
+   
    RectI contentRect(Point2I(0,0), mTargetSize);
    {
       // Render active GUI Dialogs
@@ -193,7 +193,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
 
       // Fill Blue if no Dialogs
       if(this->size() == 0)
-         GFX->clear( GFXClearTarget, ColorF(0,0,1,1), 1.0f, 0 );
+         GFX->clear( GFXClearTarget, ColorF(0,0,0,1), 1.0f, 0 );
 
       GFX->setClipRect( contentRect );
 
@@ -210,7 +210,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
 
       GFX->getDrawUtil()->clearBitmapModulation();
    }
-   
+
    mTarget->resolve();
    GFX->popActiveRenderTarget();
 
@@ -219,6 +219,13 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
    // Keep track of the last time we rendered.
    mLastRenderMs = Platform::getRealMilliseconds();
    mTargetDirty = mDynamicTarget;
+
+   onFrameRendered();
+}
+
+void GuiOffscreenCanvas::onFrameRendered()
+{
+
 }
 
 Point2I GuiOffscreenCanvas::getWindowSize()

+ 1 - 0
Engine/source/gui/core/guiOffscreenCanvas.h

@@ -23,6 +23,7 @@ public:
    void onRemove();
    
    void renderFrame(bool preRenderOnly, bool bufferSwap);
+   virtual void onFrameRendered();
    
    Point2I getWindowSize();
 

+ 1 - 0
Engine/source/gui/worldEditor/editTSCtrl.cpp

@@ -1162,6 +1162,7 @@ bool EditTSCtrl::processCameraQuery(CameraQuery * query)
 
          query->cameraMatrix = camRot;
          query->cameraMatrix.setPosition(camPos);
+         query->headMatrix = query->cameraMatrix;
          query->fov = mOrthoFOV;
       }
 

+ 0 - 23
Engine/source/lighting/advanced/advancedLightBinManager.cpp

@@ -453,30 +453,7 @@ void AdvancedLightBinManager::_setupPerFrameParameters( const SceneRenderState *
 
    // Perform a camera offset.  We need to manually perform this offset on the sun (or vector) light's
    // polygon, which is at the far plane.
-   const Point2F& projOffset = frustum.getProjectionOffset();
    Point3F cameraOffsetPos = cameraPos;
-   if(!projOffset.isZero())
-   {
-      // First we need to calculate the offset at the near plane.  The projOffset
-      // given above can be thought of a percent as it ranges from 0..1 (or 0..-1).
-      F32 nearOffset = frustum.getNearRight() * projOffset.x;
-
-      // Now given the near plane distance from the camera we can solve the right
-      // triangle and calcuate the SIN theta for the offset at the near plane.
-      // SIN theta = x/y
-      F32 sinTheta = nearOffset / frustum.getNearDist();
-
-      // Finally, we can calcuate the offset at the far plane, which is where our sun (or vector)
-      // light's polygon is drawn.
-      F32 farOffset = frustum.getFarDist() * sinTheta;
-
-      // We can now apply this far plane offset to the far plane itself, which then compensates
-      // for the project offset.
-      MatrixF camTrans = frustum.getTransform();
-      VectorF offset = camTrans.getRightVector();
-      offset *= farOffset;
-      cameraOffsetPos += offset;
-   }
 
    // Now build the quad for drawing full-screen vector light
    // passes.... this is a volatile VB and updates every frame.

+ 20 - 2
Engine/source/math/util/frustum.cpp

@@ -214,8 +214,26 @@ void Frustum::setNearFarDist( F32 nearDist, F32 farDist )
       return;
 
    // Recalculate the frustum.
-   MatrixF xfm( mTransform ); 
-   set( mIsOrtho, getFov(), getAspectRatio(), nearDist, farDist, xfm );
+   MatrixF xfm( mTransform );
+
+   const F32 CENTER_EPSILON = 0.001;
+   F32 centerX = mNearLeft + (mNearRight - mNearLeft) * 0.5;
+   F32 centerY = mNearBottom + (mNearTop - mNearBottom) * 0.5;
+   if ((centerX > CENTER_EPSILON || centerX < -CENTER_EPSILON) || (centerY > CENTER_EPSILON || centerY < -CENTER_EPSILON) )
+   {
+      // Off-center projection, so re-calc use the new distances
+      FovPort expectedFovPort;
+      expectedFovPort.leftTan = -(mNearLeft / mNearDist);
+      expectedFovPort.rightTan = (mNearRight / mNearDist);
+      expectedFovPort.upTan = (mNearTop / mNearDist);
+      expectedFovPort.downTan = -(mNearBottom / mNearDist);
+      MathUtils::makeFovPortFrustum(this, mIsOrtho, nearDist, farDist, expectedFovPort);
+   }
+   else
+   {
+      // Projection is not off-center, use the normal code
+      set(mIsOrtho, getFov(), getAspectRatio(), nearDist, farDist, xfm);
+   }
 }
 
 //-----------------------------------------------------------------------------

+ 19 - 0
Engine/source/platform/input/event.cpp

@@ -27,6 +27,7 @@
 #include "core/stringTable.h"
 #include "platform/platformInput.h"
 #include "math/mQuat.h"
+#include "math/mAngAxis.h"
 
 MODULE_BEGIN( InputEventManager )
 
@@ -546,3 +547,21 @@ void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEve
 
    newEvent.postToSignal(Input::smInputEvent);
 }
+
+void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& aValue)
+{
+   InputEventInfo newEvent;
+
+   newEvent.deviceType = deviceType;
+   newEvent.deviceInst = deviceInst;
+   newEvent.objType = objType;
+   newEvent.objInst = objInst;
+   newEvent.action = action;
+   newEvent.fValue = aValue.axis.x;
+   newEvent.fValue2 = aValue.axis.y;
+   newEvent.fValue3 = aValue.axis.z;
+   newEvent.fValue4 = aValue.angle;
+
+   newEvent.postToSignal(Input::smInputEvent);
+}
+

+ 3 - 0
Engine/source/platform/input/event.h

@@ -504,6 +504,9 @@ public:
    /// Build an input event based on a QuatF
    void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, QuatF& qValue);
 
+   /// Build an input event based on a AngAxisF
+   void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& qValue);
+
 protected:
    U32 mNextDeviceTypeCode;
    U32 mNextDeviceCode;

+ 29 - 44
Engine/source/platform/input/oculusVR/oculusVRDevice.cpp

@@ -62,7 +62,7 @@ MODULE_END;
 // OculusVRDevice
 //-----------------------------------------------------------------------------
 
-bool OculusVRDevice::smEnableDevice = true;
+bool OculusVRDevice::smEnableDevice = false;
 
 bool OculusVRDevice::smSimulateHMD = true;
 
@@ -156,26 +156,27 @@ void OculusVRDevice::buildCodeTable()
    OculusVRSensorDevice::buildCodeTable();
 }
 
-void OculusVRDevice::addHMDDevice(ovrHmd hmd)
+void OculusVRDevice::addHMDDevice(ovrHmd hmd, ovrGraphicsLuid luid)
 {
    if(!hmd)
       return;
 
    OculusVRHMDDevice* hmdd = new OculusVRHMDDevice();
-   hmdd->set(hmd,mHMDDevices.size());
+   hmdd->set(hmd, luid, mHMDDevices.size());
    mHMDDevices.push_back(hmdd);
 
-   Con::printf("   HMD found: %s by %s [v%d]", hmd->ProductName, hmd->Manufacturer, hmd->Type);
+	ovrHmdDesc desc = ovr_GetHmdDesc(hmd);
+   Con::printf("   HMD found: %s by %s [v%d]", desc.ProductName, desc.Manufacturer, desc.Type);
 }
 
 void OculusVRDevice::createSimulatedHMD()
-{
+{/* TOFIX
    OculusVRHMDDevice* hmdd = new OculusVRHMDDevice();
-   ovrHmd hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
+   ovrHmd hmd = ovr_CreateDebug(ovrHmd_DK2);
    hmdd->set(hmd,mHMDDevices.size());
    mHMDDevices.push_back(hmdd);
 
-   Con::printf("   HMD simulated: %s by %s [v%d]", hmdd->getProductName(), hmdd->getManufacturer(), hmdd->getVersion());
+   Con::printf("   HMD simulated: %s by %s [v%d]", hmdd->getProductName(), hmdd->getManufacturer(), hmdd->getVersion()); */
 }
 
 bool OculusVRDevice::enable()
@@ -185,16 +186,17 @@ bool OculusVRDevice::enable()
 
    Con::printf("Oculus VR Device Init:");
 
-   if(sOcculusEnabled && ovr_Initialize())
+   if(sOcculusEnabled && OVR_SUCCESS(ovr_Initialize(0)))
    {
       mEnabled = true;
 
       // Enumerate HMDs and pick the first one
-      ovrHmd hmd = ovrHmd_Create(0);
-      if(hmd)
+		ovrHmd hmd;
+		ovrGraphicsLuid luid;
+      if(OVR_SUCCESS(ovr_Create(&hmd, &luid)))
       {
          // Add the HMD to our list
-         addHMDDevice(hmd);
+         addHMDDevice(hmd, luid);
 
          setActive(true);
       }
@@ -316,17 +318,6 @@ void OculusVRDevice::getEyeOffsets(Point3F *dest) const
    hmd->getEyeOffsets(dest);
 }
 
-bool OculusVRDevice::providesFovPorts() const
-{
-   if(!mHMDDevices.size())
-      return false;
-
-   const OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId);
-   if(!hmd)
-      return Point3F::Zero;
-
-   return true;
-}
 
 void OculusVRDevice::getFovPorts(FovPort *out) const
 {
@@ -560,6 +551,20 @@ GameConnection* OculusVRDevice::getCurrentConnection()
 
 //-----------------------------------------------------------------------------
 
+GFXTexHandle OculusVRDevice::getPreviewTexture()
+{
+   if (!mHMDDevices.size())
+      return NULL;
+
+   OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId);
+   if (!hmd)
+      return NULL;
+
+   return hmd->getPreviewTexture();
+}
+
+//-----------------------------------------------------------------------------
+
 DefineEngineFunction(isOculusVRDeviceActive, bool, (),,
    "@brief Used to determine if the Oculus VR input device is active\n\n"
 
@@ -700,7 +705,7 @@ DefineEngineFunction(getOVRHMDVersion, S32, (S32 index),,
    return hmd->getVersion();
 }
 
-DefineEngineFunction(getOVRHMDDisplayDeviceName, const char*, (S32 index),,
+DefineEngineFunction(getOVRHMDDisplayDeviceType, const char*, (S32 index),,
    "@brief Windows display device name used in EnumDisplaySettings/CreateDC.\n\n"
    "@param index The HMD index.\n"
    "@return The name of the HMD display device, if any.\n"
@@ -717,7 +722,7 @@ DefineEngineFunction(getOVRHMDDisplayDeviceName, const char*, (S32 index),,
       return "";
    }
 
-   return hmd->getDisplayDeviceName();
+   return hmd->getDisplayDeviceType();
 }
 
 DefineEngineFunction(getOVRHMDDisplayDeviceId, S32, (S32 index),,
@@ -740,26 +745,6 @@ DefineEngineFunction(getOVRHMDDisplayDeviceId, S32, (S32 index),,
    return hmd->getDisplayDeviceId();
 }
 
-DefineEngineFunction(getOVRHMDDisplayDesktopPos, Point2I, (S32 index),,
-   "@brief Desktop coordinate position of the screen (can be negative; may not be present on all platforms).\n\n"
-   "@param index The HMD index.\n"
-   "@return Position of the screen.\n"
-   "@ingroup Game")
-{
-   if(!ManagedSingleton<OculusVRDevice>::instanceOrNull())
-   {
-      return Point2I::Zero;
-   }
-
-   const OculusVRHMDDevice* hmd = OCULUSVRDEV->getHMDDevice(index);
-   if(!hmd)
-   {
-      return Point2I::Zero;
-   }
-
-   return hmd->getDesktopPosition();
-}
-
 DefineEngineFunction(getOVRHMDResolution, Point2I, (S32 index),,
    "@brief Provides the OVR HMD screen resolution.\n\n"
    "@param index The HMD index.\n"

+ 8 - 3
Engine/source/platform/input/oculusVR/oculusVRDevice.h

@@ -33,7 +33,7 @@
 #include "math/mQuat.h"
 #include "math/mPoint4.h"
 #include "gfx/gfxDevice.h"
-#include "OVR_CAPI_0_5_0.h"
+#include "OVR_CAPI_0_8_0.h"
 
 #define DEFAULT_RIFT_UNIT 0
 
@@ -83,6 +83,9 @@ protected:
    /// Which HMD is the active one
    U32 mActiveDeviceId;
 
+   /// Device id we need to use to hook up with oculus
+   ovrGraphicsLuid mLuid;
+
 protected:
    void cleanUp();
 
@@ -90,7 +93,7 @@ protected:
    /// Input Event Manager
    void buildCodeTable();
 
-   void addHMDDevice(ovrHmd hmd);
+   void addHMDDevice(ovrHmd hmd, ovrGraphicsLuid luid);
 
    void createSimulatedHMD();
 
@@ -112,8 +115,8 @@ public:
    virtual bool providesFrameEyePose() const;
    virtual void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
    virtual bool providesEyeOffsets() const;
+   virtual bool providesFovPorts() const { return true;  }
    virtual void getEyeOffsets(Point3F *dest) const;
-   virtual bool providesFovPorts() const;
    virtual void getFovPorts(FovPort *out) const;
    virtual bool providesProjectionOffset() const;
    virtual const Point2F& getProjectionOffset() const;
@@ -151,6 +154,8 @@ public:
    virtual void setCurrentConnection(GameConnection *connection);
    virtual GameConnection* getCurrentConnection();
 
+   GFXTexHandle getPreviewTexture();
+
    bool _handleDeviceEvent( GFXDevice::GFXDeviceEventType evt );
 
 public:

+ 360 - 266
Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp

@@ -26,11 +26,17 @@
 #include "postFx/postEffectCommon.h"
 #include "gui/core/guiCanvas.h"
 #include "platform/input/oculusVR/oculusVRUtil.h"
+#include "core/stream/fileStream.h"
 
-#include "gfx/D3D9/gfxD3D9Device.h"
-// Use D3D9 for win32
+
+#include "gfx/D3D11/gfxD3D11Device.h"
+#include "gfx/D3D11/gfxD3D11EnumTranslate.h"
+#include "gfx/gfxStringEnumTranslate.h"
+#undef D3D11
+
+// Use D3D11 for win32
 #ifdef TORQUE_OS_WIN
-#define OVR_D3D_VERSION 9
+#define OVR_D3D_VERSION 11
 #include "OVR_CAPI_D3D.h"
 #define OCULUS_USE_D3D
 #else
@@ -38,15 +44,125 @@
 #define OCULUS_USE_GL
 #endif
 
-extern GFXTextureObject *gLastStereoTexture;
+struct OculusTexture
+{
+   virtual void AdvanceToNextTexture() = 0;
+
+   virtual ~OculusTexture() {
+   }
+};
+
+//------------------------------------------------------------
+// ovrSwapTextureSet wrapper class that also maintains the render target views
+// needed for D3D11 rendering.
+struct D3D11OculusTexture : public OculusTexture
+{
+   ovrHmd                   hmd;
+   ovrSwapTextureSet      * TextureSet;
+   static const int         TextureCount = 2;
+   GFXTexHandle  TexRtv[TextureCount];
+   GFXDevice *Owner;
+
+   D3D11OculusTexture(GFXDevice* owner) :
+      hmd(nullptr),
+      TextureSet(nullptr),
+      Owner(owner)
+   {
+      TexRtv[0] = TexRtv[1] = nullptr;
+   }
+
+   bool Init(ovrHmd _hmd, int sizeW, int sizeH)
+   {
+      hmd = _hmd;
+
+      D3D11_TEXTURE2D_DESC dsDesc;
+      dsDesc.Width = sizeW;
+      dsDesc.Height = sizeH;
+      dsDesc.MipLevels = 1;
+      dsDesc.ArraySize = 1;
+      dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
+      dsDesc.SampleDesc.Count = 1;   // No multi-sampling allowed
+      dsDesc.SampleDesc.Quality = 0;
+      dsDesc.Usage = D3D11_USAGE_DEFAULT;
+      dsDesc.CPUAccessFlags = 0;
+      dsDesc.MiscFlags = 0;
+      dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
+
+
+      GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
+      ovrResult result = ovr_CreateSwapTextureSetD3D11(hmd, device->mD3DDevice, &dsDesc, ovrSwapTextureSetD3D11_Typeless, &TextureSet);
+      if (!OVR_SUCCESS(result))
+         return false;
+
+      AssertFatal(TextureSet->TextureCount == TextureCount, "TextureCount mismatch.");
+
+      for (int i = 0; i < TextureCount; ++i)
+      {
+         ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i];
+         D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
+         rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM;
+         rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
+
+         GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile);
+         object->registerResourceWithDevice(GFX);
+         *(object->getSRViewPtr()) = tex->D3D11.pSRView;
+         *(object->get2DTexPtr()) = tex->D3D11.pTexture;
+         device->mD3DDevice->CreateRenderTargetView(tex->D3D11.pTexture, &rtvd, object->getRTViewPtr());
+
+         // Add refs for texture release later on
+         if (object->getSRView()) object->getSRView()->AddRef();
+         //object->getRTView()->AddRef();
+         if (object->get2DTex()) object->get2DTex()->AddRef();
+         object->isManaged = true;
+
+         // Get the actual size of the texture...
+         D3D11_TEXTURE2D_DESC probeDesc;
+         ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC));
+         object->get2DTex()->GetDesc(&probeDesc);
+
+         object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0);
+         object->mBitmapSize = object->mTextureSize;
+         int fmt = probeDesc.Format;
+
+         if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS)
+         {
+            object->mFormat = GFXFormatR8G8B8A8; // usual case
+         }
+         else
+         {
+            // TODO: improve this. this can be very bad.
+            GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt);
+            object->mFormat = (GFXFormat)fmt;
+         }
+         TexRtv[i] = object;
+      }
+
+      return true;
+   }
+
+   ~D3D11OculusTexture()
+   {
+      for (int i = 0; i < TextureCount; ++i)
+      {
+         SAFE_DELETE(TexRtv[i]);
+      }
+      if (TextureSet)
+      {
+         ovr_DestroySwapTextureSet(hmd, TextureSet);
+      }
+   }
+
+   void AdvanceToNextTexture()
+   {
+      TextureSet->CurrentIndex = (TextureSet->CurrentIndex + 1) % TextureSet->TextureCount;
+   }
+};
+
 
-OculusVRHMDDevice::OculusVRHMDDevice() :
-mWindowSize(1280,800)
+OculusVRHMDDevice::OculusVRHMDDevice()
 {
    mIsValid = false;
    mDevice = NULL;
-   mSupportedDistortionCaps = 0;
-   mCurrentDistortionCaps = 0;
    mCurrentCaps = 0;
    mSupportedCaps = 0;
    mVsync = true;
@@ -60,6 +176,7 @@ mWindowSize(1280,800)
    mConnection = NULL;
    mSensor = NULL;
    mActionCodeIndex = 0;
+   mTextureSwapSet = NULL;
 }
 
 OculusVRHMDDevice::~OculusVRHMDDevice()
@@ -79,14 +196,14 @@ void OculusVRHMDDevice::cleanUp()
 
    if(mDevice)
    {
-      ovrHmd_Destroy(mDevice);
+      ovr_Destroy(mDevice);
       mDevice = NULL;
    }
 
    mIsValid = false;
 }
 
-void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
+void OculusVRHMDDevice::set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeIndex)
 {
    cleanUp();
 
@@ -95,50 +212,42 @@ void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
 
    mDevice = hmd;
 
-   mSupportedCaps = hmd->HmdCaps;
-   mCurrentCaps = mSupportedCaps & (ovrHmdCap_DynamicPrediction | ovrHmdCap_LowPersistence | (!mVsync ? ovrHmdCap_NoVSync : 0));
+   ovrHmdDesc desc = ovr_GetHmdDesc(hmd);
+   int caps = ovr_GetTrackingCaps(hmd);
 
-   mSupportedDistortionCaps = hmd->DistortionCaps;
-   mCurrentDistortionCaps   = mSupportedDistortionCaps & (ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette | ovrDistortionCap_Overdrive);
-	
-   mTimewarp = mSupportedDistortionCaps & ovrDistortionCap_TimeWarp;
+   mSupportedCaps = desc.AvailableHmdCaps;
+   mCurrentCaps = mSupportedCaps;
+   
+   mTimewarp = true;
 
    // DeviceInfo
-   mProductName = hmd->ProductName;
-   mManufacturer = hmd->Manufacturer;
-   mVersion = hmd->FirmwareMajor;
-
-   mDisplayDeviceName = hmd->DisplayDeviceName;
-   mDisplayId = hmd->DisplayId;
+   mProductName = desc.ProductName;
+   mManufacturer = desc.Manufacturer;
+   mVersion = desc.FirmwareMajor;
 
-   mDesktopPosition.x = hmd->WindowsPos.x;
-   mDesktopPosition.y = hmd->WindowsPos.y;
+   //
+   Vector<GFXAdapter*> adapterList;
+   GFXD3D11Device::enumerateAdapters(adapterList);
 
-   mResolution.x = hmd->Resolution.w;
-   mResolution.y = hmd->Resolution.h;
+   dMemcpy(&mLuid, &luid, sizeof(mLuid));
+   mDisplayId = -1;
 
-   mProfileInterpupillaryDistance = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
-   mLensSeparation = ovrHmd_GetFloat(hmd, "LensSeparation", 0);
-   ovrHmd_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2);
-
-   dMemcpy(mCurrentFovPorts, mDevice->DefaultEyeFov, sizeof(mDevice->DefaultEyeFov));
-
-   for (U32 i=0; i<2; i++)
+   for (U32 i = 0, sz = adapterList.size(); i < sz; i++)
    {
-      mCurrentFovPorts[i].UpTan = mDevice->DefaultEyeFov[i].UpTan;
-      mCurrentFovPorts[i].DownTan = mDevice->DefaultEyeFov[i].DownTan;
-      mCurrentFovPorts[i].LeftTan = mDevice->DefaultEyeFov[i].LeftTan;
-      mCurrentFovPorts[i].RightTan = mDevice->DefaultEyeFov[i].RightTan;
+      GFXAdapter* adapter = adapterList[i];
+      if (dMemcmp(&adapter->mLUID, &mLuid, sizeof(mLuid)) == 0)
+      {
+         mDisplayId = adapter->mIndex;
+         mDisplayDeviceType = "D3D11"; // TOFIX this
+      }
    }
 
-   if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop)
-   {
-      mWindowSize = Point2I(mDevice->Resolution.w, mDevice->Resolution.h);
-   }
-   else
-   {
-      mWindowSize = Point2I(1100, 618);
-   }
+   mResolution.x = desc.Resolution.w;
+   mResolution.y = desc.Resolution.h;
+
+   mProfileInterpupillaryDistance = ovr_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
+   mLensSeparation = ovr_GetFloat(hmd, "LensSeparation", 0);
+   ovr_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2);
 
    mActionCodeIndex = actionCodeIndex;
 
@@ -147,6 +256,8 @@ void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
    mSensor = new OculusVRSensorDevice();
    mSensor->set(mDevice, mActionCodeIndex);
 
+   mDebugMirrorTexture = NULL;
+
    updateCaps();
 }
 
@@ -163,25 +274,26 @@ void OculusVRHMDDevice::setOptimalDisplaySize(GuiCanvas *canvas)
    PlatformWindow *window = canvas->getPlatformWindow();
    GFXTarget *target = window->getGFXTarget();
 
-   if (target && target->getSize() != mWindowSize)
+   Point2I requiredSize(0, 0);
+
+   ovrHmdDesc desc = ovr_GetHmdDesc(mDevice);
+   ovrSizei leftSize = ovr_GetFovTextureSize(mDevice, ovrEye_Left, desc.DefaultEyeFov[0], mCurrentPixelDensity);
+   ovrSizei rightSize = ovr_GetFovTextureSize(mDevice, ovrEye_Right, desc.DefaultEyeFov[1], mCurrentPixelDensity);
+
+   requiredSize.x = leftSize.w + rightSize.h;
+   requiredSize.y = mMax(leftSize.h, rightSize.h);
+   
+   if (target && target->getSize() != requiredSize)
    {
       GFXVideoMode newMode;
       newMode.antialiasLevel = 0;
       newMode.bitDepth = 32;
       newMode.fullScreen = false;
       newMode.refreshRate = 75;
-      newMode.resolution = mWindowSize;
+      newMode.resolution = requiredSize;
       newMode.wideScreen = false;
       window->setVideoMode(newMode);
-      //AssertFatal(window->getClientExtent().x == mWindowSize[0] && window->getClientExtent().y == mWindowSize[1], "Window didn't resize to correct dimensions");
-   }
-
-   // Need to move window over to the rift side of the desktop
-   if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop && !OculusVRDevice::smWindowDebug)
-   {
-#ifndef OCULUS_WINDOW_DEBUG
-        window->setPosition(getDesktopPosition());
-#endif
+      //AssertFatal(window->getClientExtent().x == requiredSize.x && window->getClientExtent().y == requiredSize.y, "Window didn't resize to correct dimensions");
    }
 }
 
@@ -190,53 +302,161 @@ bool OculusVRHMDDevice::isDisplayingWarning()
    if (!mIsValid || !mDevice)
       return false;
 
+   return false;/*
    ovrHSWDisplayState displayState;
    ovrHmd_GetHSWDisplayState(mDevice, &displayState);
 
-   return displayState.Displayed;
+   return displayState.Displayed;*/
 }
 
 void OculusVRHMDDevice::dismissWarning()
 {
    if (!mIsValid || !mDevice)
       return;
-   ovrHmd_DismissHSWDisplay(mDevice);
+   //ovr_DismissHSWDisplay(mDevice);
+}
+
+GFXTexHandle OculusVRHMDDevice::getPreviewTexture()
+{
+   if (!mIsValid || !mDevice)
+      return NULL;
+
+   return mDebugMirrorTextureHandle;
 }
 
 bool OculusVRHMDDevice::setupTargets()
 {
-   ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
+   // Create eye render buffers
+   ID3D11RenderTargetView * eyeRenderTexRtv[2];
+   ovrLayerEyeFov           ld = { { ovrLayerType_EyeFov } };
+   mRenderLayer = ld;
+
+   GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
+
+   ovrHmdDesc desc = ovr_GetHmdDesc(mDevice);
+   for (int i = 0; i < 2; i++)
+   {
+      mRenderLayer.Fov[i] = desc.DefaultEyeFov[i];
+      mRenderLayer.Viewport[i].Size = ovr_GetFovTextureSize(mDevice, (ovrEyeType)i, mRenderLayer.Fov[i], mCurrentPixelDensity);
+      mEyeRenderDesc[i] = ovr_GetRenderDesc(mDevice, (ovrEyeType_)(ovrEye_Left+i), mRenderLayer.Fov[i]);
+   }
 
-   mRecomendedEyeTargetSize[0] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Left,  eyeFov[0], mCurrentPixelDensity);
-   mRecomendedEyeTargetSize[1] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Right, eyeFov[1], mCurrentPixelDensity);
+   ovrSizei recommendedEyeTargetSize[2];
+   recommendedEyeTargetSize[0] = mRenderLayer.Viewport[0].Size;
+   recommendedEyeTargetSize[1] = mRenderLayer.Viewport[1].Size;
+
+   if (mTextureSwapSet)
+   {
+      delete mTextureSwapSet;
+      mTextureSwapSet = NULL;
+   }
 
    // Calculate render target size
    if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide)
    {
       // Setup a single texture, side-by-side viewports
       Point2I rtSize(
-         mRecomendedEyeTargetSize[0].w + mRecomendedEyeTargetSize[1].w,
-         mRecomendedEyeTargetSize[0].h > mRecomendedEyeTargetSize[1].h ? mRecomendedEyeTargetSize[0].h : mRecomendedEyeTargetSize[1].h
+         recommendedEyeTargetSize[0].w + recommendedEyeTargetSize[1].w,
+         recommendedEyeTargetSize[0].h > recommendedEyeTargetSize[1].h ? recommendedEyeTargetSize[0].h : recommendedEyeTargetSize[1].h
          );
 
       GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat();
       mRTFormat = targetFormat;
 
-      rtSize = generateRenderTarget(mStereoRT, mStereoTexture, mStereoDepthTexture, rtSize);
-      
+      rtSize = generateRenderTarget(mStereoRT, mStereoDepthTexture, rtSize);
+
+      // Generate the swap texture we need to store the final image
+      D3D11OculusTexture* tex = new D3D11OculusTexture(GFX);
+      if (tex->Init(mDevice, rtSize.x, rtSize.y))
+      {
+         mTextureSwapSet = tex;
+      }
+
+      mRenderLayer.ColorTexture[0] = tex->TextureSet;
+      mRenderLayer.ColorTexture[1] = tex->TextureSet;
+
+      mRenderLayer.Viewport[0].Pos.x = 0;
+      mRenderLayer.Viewport[0].Pos.y = 0;
+      mRenderLayer.Viewport[1].Pos.x = (rtSize.x + 1) / 2;
+      mRenderLayer.Viewport[1].Pos.y = 0;
+
       // Left
-      mEyeRenderSize[0] = rtSize;
       mEyeRT[0] = mStereoRT;
-      mEyeTexture[0] = mStereoTexture;
-      mEyeViewport[0] = RectI(Point2I(0,0), Point2I((mRecomendedEyeTargetSize[0].w+1)/2, mRecomendedEyeTargetSize[0].h));
+      mEyeViewport[0] = RectI(Point2I(mRenderLayer.Viewport[0].Pos.x, mRenderLayer.Viewport[0].Pos.y), Point2I(mRenderLayer.Viewport[0].Size.w, mRenderLayer.Viewport[0].Size.h));
 
       // Right
-      mEyeRenderSize[1] = rtSize;
       mEyeRT[1] = mStereoRT;
-      mEyeTexture[1] = mStereoTexture;
-      mEyeViewport[1] = RectI(Point2I((mRecomendedEyeTargetSize[0].w+1)/2,0), Point2I((mRecomendedEyeTargetSize[1].w+1)/2, mRecomendedEyeTargetSize[1].h));
+      mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h));
+
+      GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
+
+      D3D11_TEXTURE2D_DESC dsDesc;
+      dsDesc.Width = rtSize.x;
+      dsDesc.Height = rtSize.y;
+      dsDesc.MipLevels = 1;
+      dsDesc.ArraySize = 1;
+      dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
+      dsDesc.SampleDesc.Count = 1;
+      dsDesc.SampleDesc.Quality = 0;
+      dsDesc.Usage = D3D11_USAGE_DEFAULT;
+      dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
+      dsDesc.CPUAccessFlags = 0;
+      dsDesc.MiscFlags = 0;
+
+      // Create typeless when we are rendering as non-sRGB since we will override the texture format in the RTV
+      bool reinterpretSrgbAsLinear = true;
+      unsigned compositorTextureFlags = 0;
+      if (reinterpretSrgbAsLinear)
+         compositorTextureFlags |= ovrSwapTextureSetD3D11_Typeless;
+
+      ovrResult result = ovr_CreateMirrorTextureD3D11(mDevice, device->mD3DDevice, &dsDesc, compositorTextureFlags, &mDebugMirrorTexture);
+      
+      if (result == ovrError_DisplayLost || !mDebugMirrorTexture)
+      {
+         AssertFatal(false, "Something went wrong");
+         return NULL;
+      }
+
+      // Create texture handle so we can render it in-game
+      ovrD3D11Texture* mirror_tex = (ovrD3D11Texture*)mDebugMirrorTexture;
+      D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
+      rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM;
+      rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
+
+      GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile);
+      object->registerResourceWithDevice(GFX);
+      *(object->getSRViewPtr()) = mirror_tex->D3D11.pSRView;
+      *(object->get2DTexPtr()) = mirror_tex->D3D11.pTexture;
+      device->mD3DDevice->CreateRenderTargetView(mirror_tex->D3D11.pTexture, &rtvd, object->getRTViewPtr());
+
+
+      // Add refs for texture release later on
+      if (object->getSRView()) object->getSRView()->AddRef();
+      //object->getRTView()->AddRef();
+      if (object->get2DTex()) object->get2DTex()->AddRef();
+      object->isManaged = true;
+
+      // Get the actual size of the texture...
+      D3D11_TEXTURE2D_DESC probeDesc;
+      ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC));
+      object->get2DTex()->GetDesc(&probeDesc);
 
-      gLastStereoTexture = mEyeTexture[0];
+      object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0);
+      object->mBitmapSize = object->mTextureSize;
+      int fmt = probeDesc.Format;
+
+      if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS)
+      {
+         object->mFormat = GFXFormatR8G8B8A8; // usual case
+      }
+      else
+      {
+         // TODO: improve this. this can be very bad.
+         GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt);
+         object->mFormat = (GFXFormat)fmt;
+      }
+      
+      mDebugMirrorTextureHandle = object;
    }
    else
    {
@@ -261,17 +481,14 @@ String OculusVRHMDDevice::dumpMetrics()
    F32 ipd = this->getIPD();
    U32 lastStatus = mSensor->getLastTrackingStatus();
 
-   sb.format("   | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s, Disort:%s%s%s",
+   sb.format("   | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s",
              mActionCodeIndex,
              rot.x, rot.y, rot.z,
              pos.x, pos.y, pos.z,
              eyeFov[0].upTan, eyeFov[0].downTan, eyeFov[0].leftTan, eyeFov[0].rightTan, eyeFov[1].upTan, eyeFov[1].downTan, eyeFov[1].leftTan, eyeFov[1].rightTan,
              getIPD(),
              lastStatus & ovrStatus_OrientationTracked ? " ORIENT" : "",
-             lastStatus & ovrStatus_PositionTracked ? " POS" : "",
-             mCurrentDistortionCaps & ovrDistortionCap_TimeWarp ? " TIMEWARP" : "",
-             mCurrentDistortionCaps & ovrDistortionCap_Vignette ? " VIGNETTE" : "",
-             mCurrentDistortionCaps & ovrDistortionCap_Overdrive ? " OVERDRIVE" : "");
+             lastStatus & ovrStatus_PositionTracked ? " POS" : "");
 
    return sb.data();
 }
@@ -292,82 +509,23 @@ void OculusVRHMDDevice::updateRenderInfo()
       return;
    
    PlatformWindow *window = mDrawCanvas->getPlatformWindow();
-   ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
+
+   ovrHmdDesc desc = ovr_GetHmdDesc(mDevice);
 
    // Update window size if it's incorrect
    Point2I backbufferSize = mDrawCanvas->getBounds().extent;
 
-   // Reset
-   ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
-
-#ifdef OCULUS_USE_D3D
-   // Generate render target textures
-   GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
-   if (d3d9GFX)
+   // Finally setup!
+   if (!setupTargets())
    {
-      ovrD3D9Config cfg;
-      cfg.D3D9.Header.API = ovrRenderAPI_D3D9;
-      cfg.D3D9.Header.Multisample = 0;
-      cfg.D3D9.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
-      cfg.D3D9.pDevice = d3d9GFX->getDevice();
-      cfg.D3D9.pDevice->GetSwapChain(0, &cfg.D3D9.pSwapChain);
-
-      // Finally setup!
-      if (!setupTargets())
-      {
-         onDeviceDestroy();
-         return;
-      }
-
-      ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
-
-      if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
-      {
-         Con::errorf("Couldn't configure oculus rendering!");
-         return;
-      }
-   }
-#endif
-
-#ifdef OCULUS_USE_GL
-   // Generate render target textures
-   GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
-   if (glGFX)
-   {
-      ovrGLConfig cfg;
-      cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
-      cfg.OGL.Header.Multisample = 0;
-      cfg.OGL.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
-
-#ifdef WIN32
-      cfg.OGL.Window = GetActiveWindow();//window->getPlatformDrawable();
-      cfg.OGL.DC = wglGetCurrentDC();
-#else
-      cfg.OGL.Disp = NULL;
-#endif
-
-      // Finally setup!
-      if (!setupTargets())
-      {
-         onDeviceDestroy();
-         return;
-      }
-
-      ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
-
-      if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
-      {
-         Con::errorf("Couldn't configure oculus rendering!");
-         return;
-      }
+      onDeviceDestroy();
+      return;
    }
-#endif
-
 
    mRenderConfigurationDirty = false;
 }
 
-Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize)
+Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &depth, Point2I desiredSize)
 {
     // Texture size that we already have might be big enough.
     Point2I newRTSize;
@@ -402,12 +560,12 @@ Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFX
     newRTSize.setMax(Point2I(64, 64));
 
     // Stereo RT needs to be the same size as the recommended RT
-    if ( newRT || texture.getWidthHeight() != newRTSize )
+    /*if ( newRT || mDebugStereoTexture.getWidthHeight() != newRTSize )
     {
-       texture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile,  avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
-       target->attachTexture( GFXTextureTarget::Color0, texture );
-       Con::printf("generateRenderTarget generated %x", texture.getPointer());
-    }
+       mDebugStereoTexture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile,  avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
+       target->attachTexture( GFXTextureTarget::Color0, mDebugStereoTexture);
+       Con::printf("generateRenderTarget generated %x", mDebugStereoTexture.getPointer());
+    }*/
 
     if ( depth.getWidthHeight() != newRTSize )
     {
@@ -424,6 +582,13 @@ void OculusVRHMDDevice::clearRenderTargets()
    mStereoRT = NULL;
    mEyeRT[0] = NULL;
    mEyeRT[1] = NULL;
+
+   if (mDebugMirrorTexture)
+   {
+      ovr_DestroyMirrorTexture(mDevice, mDebugMirrorTexture);
+      mDebugMirrorTexture = NULL;
+      mDebugMirrorTextureHandle = NULL;
+   }
 }
 
 void OculusVRHMDDevice::updateCaps()
@@ -431,34 +596,7 @@ void OculusVRHMDDevice::updateCaps()
    if (!mIsValid || !mDevice)
       return;
 
-   U32 oldDistortionCaps = mCurrentDistortionCaps;
-   
-   // Distortion
-   if (mTimewarp)
-   {
-      mCurrentDistortionCaps |= ovrDistortionCap_TimeWarp;
-   }
-   else
-   {
-      mCurrentDistortionCaps &= ~ovrDistortionCap_TimeWarp;
-   }
-
-   if (oldDistortionCaps != mCurrentDistortionCaps)
-   {
-      mRenderConfigurationDirty = true;
-   }
-
-   // Device
-   if (!mVsync)
-   {
-      mCurrentCaps |= ovrHmdCap_NoVSync;
-   }
-   else
-   {
-      mCurrentCaps &= ~ovrHmdCap_NoVSync;
-   }
-   
-   ovrHmd_SetEnabledCaps(mDevice, mCurrentCaps);
+   ovr_SetEnabledCaps(mDevice, mCurrentCaps);
 }
 
 static bool sInFrame = false; // protects against recursive onStartFrame calls
@@ -469,108 +607,64 @@ void OculusVRHMDDevice::onStartFrame()
       return;
 
    sInFrame = true;
-   
-#ifndef OCULUS_DEBUG_FRAME
-   ovrHmd_BeginFrame(mDevice, 0);
-#endif
 
    ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset };
-   ovrHmd_GetEyePoses(mDevice, 0, hmdToEyeViewOffset, mCurrentEyePoses, &mLastTrackingState);
+   ovrTrackingState hmdState = ovr_GetTrackingState(mDevice, 0, ovrTrue);
+   ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, mRenderLayer.RenderPose);
 
    for (U32 i=0; i<2; i++)
    {
-      mCurrentEyePoses[i].Position.x *= OculusVRDevice::smPositionTrackingScale;
-      mCurrentEyePoses[i].Position.y *= OculusVRDevice::smPositionTrackingScale;
-      mCurrentEyePoses[i].Position.z *= OculusVRDevice::smPositionTrackingScale;
+      mRenderLayer.RenderPose[i].Position.x *= OculusVRDevice::smPositionTrackingScale;
+      mRenderLayer.RenderPose[i].Position.y *= OculusVRDevice::smPositionTrackingScale;
+      mRenderLayer.RenderPose[i].Position.z *= OculusVRDevice::smPositionTrackingScale;
    }
 
+   mRenderLayer.SensorSampleTime = ovr_GetTimeInSeconds();
+
+   // Set current dest texture on stereo render target
+   D3D11OculusTexture* texSwap = (D3D11OculusTexture*)mTextureSwapSet;
+   mStereoRT->attachTexture(GFXTextureTarget::Color0, texSwap->TexRtv[texSwap->TextureSet->CurrentIndex]);
+
    sInFrame = false;
    mFrameReady = true;
 }
 
 void OculusVRHMDDevice::onEndFrame()
 {
-   if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady)
+   if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady || !mTextureSwapSet)
       return;
 
    Point2I eyeSize;
    GFXTarget *windowTarget = mDrawCanvas->getPlatformWindow()->getGFXTarget();
 
-#ifndef OCULUS_DEBUG_FRAME
-   
-#ifdef OCULUS_USE_D3D
-   GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
-   if (d3d9GFX && mEyeRT[0].getPointer())
-   {
-      // Left
-      ovrD3D9Texture eyeTextures[2];
-      eyeSize = mEyeTexture[0].getWidthHeight();
-      eyeTextures[0].D3D9.Header.API = ovrRenderAPI_D3D9;
-      eyeTextures[0].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
-      eyeTextures[0].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
-      eyeTextures[0].D3D9.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
-      eyeTextures[0].D3D9.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
-      eyeTextures[0].D3D9.Header.TextureSize.w = eyeSize.x;
-      eyeTextures[0].D3D9.Header.TextureSize.h = eyeSize.y;
-      eyeTextures[0].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[0].getPointer())->get2DTex() : NULL;
+   GFXD3D11Device *d3d11GFX = dynamic_cast<GFXD3D11Device*>(GFX);
 
-      // Right
-      eyeSize = mEyeTexture[1].getWidthHeight();
-      eyeTextures[1].D3D9.Header.API = ovrRenderAPI_D3D9;
-      eyeTextures[1].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
-      eyeTextures[1].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
-      eyeTextures[1].D3D9.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
-      eyeTextures[1].D3D9.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
-      eyeTextures[1].D3D9.Header.TextureSize.w = eyeSize.x;
-      eyeTextures[1].D3D9.Header.TextureSize.h = eyeSize.y;
-      eyeTextures[1].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[1].getPointer())->get2DTex() : NULL;
-
-      // Submit!
-      GFX->disableShaders();
-
-      GFX->setActiveRenderTarget(windowTarget);
-      GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
-      ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
-   }
-#endif
+   ovrViewScaleDesc viewScaleDesc;
+   ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset };
+   viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
+   viewScaleDesc.HmdToEyeViewOffset[0] = hmdToEyeViewOffset[0];
+   viewScaleDesc.HmdToEyeViewOffset[1] = hmdToEyeViewOffset[1];
 
-#ifdef OCULUS_USE_GL
-   GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
-   if (glGFX && mEyeRT[0].getPointer())
-   {
-      // Left
-      ovrGLTexture eyeTextures[2];
-      eyeSize = mEyeTexture[0].getWidthHeight();
-      eyeTextures[0].OGL.Header.API = ovrRenderAPI_GL;
-      eyeTextures[0].OGL.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
-      eyeTextures[0].OGL.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
-      eyeTextures[0].OGL.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
-      eyeTextures[0].OGL.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
-      eyeTextures[0].OGL.Header.TextureSize.w = eyeSize.x;
-      eyeTextures[0].OGL.Header.TextureSize.h = eyeSize.y;
-      eyeTextures[0].OGL.TexId = mEyeRT[0].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[0].getPointer())->getHandle() : 0;
 
-      // Right
-      eyeSize = mEyeTexture[1].getWidthHeight();
-      eyeTextures[1].OGL.Header.API = ovrRenderAPI_GL;
-      eyeTextures[1].OGL.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
-      eyeTextures[1].OGL.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
-      eyeTextures[1].OGL.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
-      eyeTextures[1].OGL.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
-      eyeTextures[1].OGL.Header.TextureSize.w = eyeSize.x;
-      eyeTextures[1].OGL.Header.TextureSize.h = eyeSize.y;
-      eyeTextures[0].OGL.TexId = mEyeRT[1].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[1].getPointer())->getHandle() : 0;
-
-      // Submit!
-      GFX->disableShaders();
-
-      GFX->setActiveRenderTarget(windowTarget);
-      GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
-      ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
+   ovrLayerDirect           ld = { { ovrLayerType_Direct } };
+   mDebugRenderLayer = ld;
+
+   mDebugRenderLayer.ColorTexture[0] = mRenderLayer.ColorTexture[0];
+   mDebugRenderLayer.ColorTexture[1] = mRenderLayer.ColorTexture[1];
+   mDebugRenderLayer.Viewport[0] = mRenderLayer.Viewport[0];
+   mDebugRenderLayer.Viewport[1] = mRenderLayer.Viewport[1];
+
+   // TODO: use ovrViewScaleDesc
+   ovrLayerHeader* layers = &mRenderLayer.Header;
+   ovrResult result = ovr_SubmitFrame(mDevice, 0, &viewScaleDesc, &layers, 1);
+   mTextureSwapSet->AdvanceToNextTexture();
+
+   if (OVR_SUCCESS(result))
+   {
+      int woo = 1;
    }
-#endif
 
-#endif
+   // TODO: render preview in display?
 
    mFrameReady = false;
 }
@@ -578,14 +672,15 @@ void OculusVRHMDDevice::onEndFrame()
 void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const
 {
    // Directly set the rotation and position from the eye transforms
-   ovrPosef pose = mCurrentEyePoses[eyeId];
+   ovrPosef pose = mRenderLayer.RenderPose[eyeId];
    OVR::Quatf orientation = pose.Orientation;
    const OVR::Vector3f position = pose.Position;
 
-   EulerF rotEuler;
-   OculusVRUtil::convertRotation(orientation, rotEuler);
+   MatrixF torqueMat(1);
+   OVR::Matrix4f mat(orientation);
+   OculusVRUtil::convertRotation(mat.M, torqueMat);
 
-   outPose->orientation = rotEuler;
+   outPose->orientation = QuatF(torqueMat);
    outPose->position = Point3F(-position.x, position.z, -position.y);
 }
 
@@ -605,18 +700,17 @@ void OculusVRHMDDevice::onDeviceDestroy()
       mEyeRT[1]->zombify();
    }
 
+   if (mTextureSwapSet)
+   {
+      delete mTextureSwapSet;
+      mTextureSwapSet = NULL;
+   }
+
    mStereoRT = NULL;
-   mStereoTexture = NULL;
    mStereoDepthTexture = NULL;
 
-   mEyeTexture[0] = NULL;
-   mEyeDepthTexture[0] = NULL;
-   mEyeTexture[1] = NULL;
-   mEyeDepthTexture[1] = NULL;
    mEyeRT[0] = NULL;
    mEyeRT[1] = NULL;
 
    mRenderConfigurationDirty = true;
-   
-   ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
 }

+ 21 - 31
Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h

@@ -34,12 +34,14 @@
 #include "math/mRect.h"
 #include "gfx/gfxDevice.h"
 
-#include "OVR_CAPI_0_5_0.h"
+#include "OVR_CAPI.h"
 
 class GuiCanvas;
 class GameConnection;
 struct DisplayPose;
 class OculusVRSensorDevice;
+struct OculusTexture;
+
 
 class OculusVRHMDDevice
 {
@@ -59,9 +61,6 @@ protected:
 
    ovrHmd mDevice;
 
-   U32 mSupportedDistortionCaps;
-   U32 mCurrentDistortionCaps;
-
    U32 mSupportedCaps;
    U32 mCurrentCaps;
 
@@ -70,15 +69,12 @@ protected:
    String   mManufacturer;
    U32      mVersion;
 
-   // Windows display device name used in EnumDisplaySettings/CreateDC
-   String   mDisplayDeviceName;
+   // Device type (D3D11, etc)
+   String   mDisplayDeviceType;
 
-   // MacOS display ID
+   // Adapter index
    S32      mDisplayId;
 
-   // Desktop coordinate position of the screen (can be negative; may not be present on all platforms)
-   Point2I  mDesktopPosition;
-
    // Whole screen resolution
    Point2I  mResolution;
 
@@ -99,18 +95,15 @@ protected:
    Point2F mProjectionCenterOffset;
 
    // Current pose of eyes
-   ovrPosef         mCurrentEyePoses[2];
    ovrEyeRenderDesc mEyeRenderDesc[2];
 
-   ovrFovPort mCurrentFovPorts[2];
-
-   Point2I mWindowSize;
-
    GameConnection *mConnection;
 
    OculusVRSensorDevice *mSensor;
    U32 mActionCodeIndex;
 
+   ovrGraphicsLuid mLuid;
+
 protected:
    void updateRenderInfo();
 
@@ -121,7 +114,7 @@ public:
    void cleanUp();
 
    // Set the HMD properties based on information from the OVR device
-   void set(ovrHmd hmd, U32 actionCodeIndex);
+   void set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeIndex);
 
    // Sets optimal display size for canvas
    void setOptimalDisplaySize(GuiCanvas *canvas);
@@ -133,14 +126,11 @@ public:
    U32 getVersion() const { return mVersion; }
 
    // Windows display device name used in EnumDisplaySettings/CreateDC
-   const char* getDisplayDeviceName() const { return mDisplayDeviceName.c_str(); }
+   const char* getDisplayDeviceType () const { return mDisplayDeviceType.c_str(); }
 
    // MacOS display ID
    S32 getDisplayDeviceId() const { return mDisplayId; }
 
-   // Desktop coordinate position of the screen (can be negative; may not be present on all platforms)
-   const Point2I& getDesktopPosition() const { return mDesktopPosition; }
-
    // Whole screen resolution
    const Point2I& getResolution() const { return mResolution; }
 
@@ -166,7 +156,7 @@ public:
    void getStereoViewports(RectI *dest) const { dMemcpy(dest, mEyeViewport, sizeof(mEyeViewport)); }
    void getStereoTargets(GFXTextureTarget **dest) const { dest[0] = mEyeRT[0]; dest[1] = mEyeRT[1]; }
 
-   void getFovPorts(FovPort *dest) const { dMemcpy(dest, mCurrentFovPorts, sizeof(mCurrentFovPorts)); }
+   void getFovPorts(FovPort *dest) const { dMemcpy(dest, &mRenderLayer.Fov[0], sizeof(mRenderLayer.Fov)); }
    
    /// Returns eye offsets in torque coordinate space, i.e. z being up, x being left-right, and y being depth (forward).
    void getEyeOffsets(Point3F *offsets) const { 
@@ -181,7 +171,7 @@ public:
    void onEndFrame();
    void onDeviceDestroy();
 
-   Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize);
+   Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &depth, Point2I desiredSize);
    void clearRenderTargets();
 
    bool isDisplayingWarning();
@@ -195,23 +185,17 @@ public:
    virtual void setCurrentConnection(GameConnection *connection) { mConnection = connection; }
    virtual GameConnection* getCurrentConnection() { return mConnection; }
 
+   GFXTexHandle getPreviewTexture();
+
    String dumpMetrics();
 
    // Stereo RT
-   GFXTexHandle mStereoTexture;
+   GFXTexHandle mDebugStereoTexture;
    GFXTexHandle mStereoDepthTexture;
    GFXTextureTargetRef mStereoRT;
 
    // Eye RTs (if we are using separate targets)
    GFXTextureTargetRef mEyeRT[2];
-   GFXTexHandle mEyeTexture[2];
-   GFXTexHandle mEyeDepthTexture[2];
-
-   // Current render target size for each eye
-   Point2I mEyeRenderSize[2];
-
-   // Recommended eye target size for each eye
-   ovrSizei mRecomendedEyeTargetSize[2];
 
    // Desired viewport for each eye
    RectI mEyeViewport[2];
@@ -220,6 +204,12 @@ public:
    F32 smDesiredPixelDensity;
 
    ovrTrackingState mLastTrackingState;
+   OculusTexture* mTextureSwapSet;
+   ovrLayerEyeFov mRenderLayer;
+   ovrLayerDirect mDebugRenderLayer;
+   ovrViewScaleDesc mScaleDesc;
+   ovrTexture* mDebugMirrorTexture;
+   GFXTexHandle mDebugMirrorTextureHandle;
 
    GFXDevice::GFXDeviceRenderStyles mDesiredRenderingMode;
 

+ 1 - 1
Engine/source/platform/input/oculusVR/oculusVRSensorData.h

@@ -27,7 +27,7 @@
 #include "math/mMatrix.h"
 #include "math/mQuat.h"
 #include "math/mPoint2.h"
-#include "OVR_CAPI_0_5_0.h"
+#include "OVR_CAPI_0_8_0.h"
 
 struct OculusVRSensorData
 {

+ 23 - 18
Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp

@@ -24,8 +24,10 @@
 #include "platform/input/oculusVR/oculusVRSensorData.h"
 #include "platform/input/oculusVR/oculusVRUtil.h"
 #include "platform/platformInput.h"
-#include"console/simBase.h"
+#include "console/simBase.h"
 #include "console/engineAPI.h" 
+#include "math/mAngAxis.h"
+#include "OVR_CAPI_0_8_0.h"
 
 U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0};
 U32 OculusVRSensorDevice::OVR_SENSORROTANG[OculusVRConstants::MaxSensors] = {0};
@@ -66,7 +68,7 @@ void OculusVRSensorDevice::cleanUp()
 {
    mIsValid = false;
 
-   ovrHmd_ConfigureTracking(mDevice, 0, 0);
+   ovr_ConfigureTracking(mDevice, 0, 0);
 }
 
 void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
@@ -74,7 +76,7 @@ void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
    mIsValid = false;
    mDevice = sensor;
 
-   mSupportedTrackingCaps = sensor->TrackingCaps;
+   mSupportedTrackingCaps = ovr_GetTrackingCaps(sensor);
    mCurrentTrackingCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position;
 
    mCurrentTrackingCaps = mSupportedTrackingCaps & mCurrentTrackingCaps;
@@ -82,15 +84,17 @@ void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
 
    mPositionTrackingDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_Position);
 
+	ovrHmdDesc desc = ovr_GetHmdDesc(sensor);
+
    // DeviceInfo
-   mProductName = sensor->ProductName;
-   mManufacturer = sensor->Manufacturer;
-   mVersion = sensor->Type;
+   mProductName = desc.ProductName;
+   mManufacturer = desc.Manufacturer;
+   mVersion = desc.Type;
 
    // SensorInfo
-   mVendorId = sensor->VendorId;
-   mProductId = sensor->ProductId;
-   mSerialNumber = sensor->SerialNumber;
+   mVendorId = desc.VendorId;
+   mProductId = desc.ProductId;
+   mSerialNumber = desc.SerialNumber;
 
    mActionCodeIndex = actionCodeIndex;
 
@@ -163,7 +167,7 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
       return false;
 
    // Grab current state
-   ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
+   ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
    mLastStatus = ts.StatusFlags;
 
    // Store the current data from the sensor and compare with previous data
@@ -181,7 +185,8 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
    {
       if(generateRotAsAngAxis)
       {
-         INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, currentBuffer->mRotQuat);
+         AngAxisF axisAA(currentBuffer->mRotQuat);
+         INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, axisAA);
       }
 
       if(generateRotAsEuler)
@@ -249,7 +254,7 @@ void OculusVRSensorDevice::reset()
    if(!mIsValid)
       return;
 
-   ovrHmd_RecenterPose(mDevice);
+   ovr_RecenterPose(mDevice);
 }
 
 bool OculusVRSensorDevice::getYawCorrection() const
@@ -322,7 +327,7 @@ EulerF OculusVRSensorDevice::getEulerRotation()
    if(!mIsValid)
       return Point3F::Zero;
 
-   ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
+   ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
    OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
 
    // Sensor rotation in Euler format
@@ -337,7 +342,7 @@ EulerF OculusVRSensorDevice::getRawEulerRotation()
    if(!mIsValid)
       return Point3F::Zero;
 
-   ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
+   ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
    OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
 
    // Sensor rotation in Euler format
@@ -351,7 +356,7 @@ VectorF OculusVRSensorDevice::getAcceleration()
    if(!mIsValid)
       return VectorF::Zero;
    
-   ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
+   ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
    OVR::Vector3f a = ts.HeadPose.LinearAcceleration;
 
    // Sensor acceleration in VectorF format
@@ -366,7 +371,7 @@ EulerF OculusVRSensorDevice::getAngularVelocity()
    if(!mIsValid)
       return EulerF::Zero;
    
-   ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
+   ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
    OVR::Vector3f v = ts.HeadPose.AngularVelocity;
    
    // Sensor angular velocity in EulerF format
@@ -381,7 +386,7 @@ Point3F OculusVRSensorDevice::getPosition()
    if(!mIsValid)
       return Point3F();
    
-   ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
+   ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
    OVR::Vector3f v = ts.HeadPose.ThePose.Position;
    return Point3F(-v.x, v.z, -v.y);
 }
@@ -399,5 +404,5 @@ void OculusVRSensorDevice::updateTrackingCaps()
    if (!mPositionTrackingDisabled)
       mCurrentTrackingCaps |= ovrTrackingCap_Position;
 
-   ovrHmd_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0);
+   ovr_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0);
 }

+ 1 - 1
Engine/source/platform/input/oculusVR/oculusVRSensorDevice.h

@@ -30,7 +30,7 @@
 #include "math/mPoint4.h"
 #include "platform/input/oculusVR/oculusVRConstants.h"
 #include "platform/types.h"
-#include "OVR_CAPI_0_5_0.h"
+#include "OVR_CAPI.h"
 
 struct OculusVRSensorData;
 

+ 1 - 4
Engine/source/platform/input/oculusVR/oculusVRUtil.cpp

@@ -44,10 +44,7 @@ void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation)
 void convertRotation(OVR::Quatf& inRotation, EulerF& outRotation)
 {
    F32 yaw, pitch, roll;
-   inRotation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&yaw, &pitch, &roll);
-   outRotation.x = -pitch;
-   outRotation.y = roll;
-   outRotation.z = -yaw;
+   inRotation.GetEulerAngles<OVR::Axis_X, OVR::Axis_Z, OVR::Axis_Y, OVR::Rotate_CW, OVR::Handed_R>(&outRotation.x, &outRotation.y, &outRotation.z);
 }
 
 void calculateAxisRotation(const MatrixF& inRotation, const F32& maxAxisRadius, Point2F& outRotation)

+ 1 - 1
Engine/source/platform/input/oculusVR/oculusVRUtil.h

@@ -25,7 +25,7 @@
 
 #include "math/mPoint2.h"
 #include "math/mMatrix.h"
-#include "OVR_CAPI_0_5_0.h"
+#include "OVR_CAPI_0_8_0.h"
 
 // NOTE: math code in oculus uses "Offset" which is a preprocessor macro
 #define TorqueOffset Offset

+ 546 - 0
Engine/source/platform/input/openVR/openVROverlay.cpp

@@ -0,0 +1,546 @@
+#include "platform/input/openVR/openVRProvider.h"
+#include "platform/input/openVR/openVROverlay.h"
+
+#include "gfx/D3D11/gfxD3D11Device.h"
+#include "gfx/D3D11/gfxD3D11TextureObject.h"
+#include "gfx/D3D11/gfxD3D11EnumTranslate.h"
+
+#ifdef TORQUE_OPENGL
+#include "gfx/gl/gfxGLDevice.h"
+#include "gfx/gl/gfxGLTextureObject.h"
+#include "gfx/gl/gfxGLEnumTranslate.h"
+#endif
+
+#include "postFx/postEffectCommon.h"
+#include "gui/controls/guiTextEditCtrl.h"
+
+ImplementEnumType(OpenVROverlayType,
+   "Desired overlay type for OpenVROverlay. .\n\n"
+   "@ingroup OpenVR")
+{ OpenVROverlay::OVERLAYTYPE_OVERLAY, "Overlay" },
+{ OpenVROverlay::OVERLAYTYPE_DASHBOARD, "Dashboard" },
+EndImplementEnumType;
+
+IMPLEMENT_CONOBJECT(OpenVROverlay);
+
+OpenVROverlay::OpenVROverlay()
+{
+   mTransform = MatrixF(1);
+   mOverlayWidth = 1.5f;
+   mOverlayFlags = 0;
+
+   mOverlayColor = ColorF(1, 1, 1, 1);
+   mTrackingOrigin = vr::TrackingUniverseSeated;
+
+   mTargetFormat = GFXFormatR8G8B8A8_LINEAR_FORCE; // needed for openvr!
+   mManualMouseHandling = true;
+
+   mMouseScale = Point2F(1, 1);
+}
+
+OpenVROverlay::~OpenVROverlay()
+{
+
+}
+
+static bool setProtectedOverlayTypeDirty(void *obj, const char *array, const char *data)
+{
+   OpenVROverlay *object = static_cast<OpenVROverlay*>(obj);
+   object->mOverlayTypeDirty = true;
+   return true;
+}
+
+static bool setProtectedOverlayDirty(void *obj, const char *array, const char *data)
+{
+   OpenVROverlay *object = static_cast<OpenVROverlay*>(obj);
+   object->mOverlayDirty = true;
+   return true;
+}
+
+void OpenVROverlay::initPersistFields()
+{
+   addProtectedField("overlayType", TypeOpenVROverlayType, Offset(mOverlayType, OpenVROverlay), &setProtectedOverlayTypeDirty, &defaultProtectedGetFn,
+      "Type of overlay.");
+   addProtectedField("overlayFlags", TypeS32, Offset(mOverlayFlags, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Flags for overlay.");
+   addProtectedField("overlayWidth", TypeF32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Width of overlay.");
+   addProtectedField("overlayColor", TypeColorF, Offset(mOverlayColor, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Backing color of overlay.");
+
+   addProtectedField("transformType", TypeOpenVROverlayTransformType, Offset(mOverlayTransformType, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Transform type of overlay.");
+   addProtectedField("transformPosition", TypeMatrixPosition, Offset(mTransform, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Position of overlay.");
+   addProtectedField("transformRotation", TypeMatrixRotation, Offset(mTransform, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Rotation of overlay.");
+   addProtectedField("transformDeviceIndex", TypeS32, Offset(mTransformDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Rotation of overlay.");
+   addProtectedField("transformDeviceComponent", TypeString, Offset(mTransformDeviceComponent, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Rotation of overlay.");
+
+   addProtectedField("inputMethod", TypeOpenVROverlayInputMethod, Offset(mInputMethod, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Type of input method.");
+   addProtectedField("mouseScale", TypePoint2F, Offset(mMouseScale, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Scale of mouse input.");
+
+   addProtectedField("trackingOrigin", TypeOpenVRTrackingUniverseOrigin, Offset(mTrackingOrigin, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Tracking origin.");
+
+   addProtectedField("controllerDevice", TypeS32, Offset(mControllerDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+      "Index of controller to attach overlay to.");
+
+   addField("manualMouseHandling", TypeBool, Offset(mManualMouseHandling, OpenVROverlay), "Forces openvr to create mouse events for overlay");
+
+   Parent::initPersistFields();
+}
+
+bool OpenVROverlay::onAdd()
+{
+   if (Parent::onAdd())
+   {
+      mOverlayTypeDirty = true;
+      mOverlayDirty = true;
+
+      if (OPENVR)
+      {
+         OPENVR->registerOverlay(this);
+      }
+
+      return true;
+   }
+
+   return false;
+}
+
+void OpenVROverlay::onRemove()
+{
+   if (mOverlayHandle)
+   {
+      vr::VROverlay()->DestroyOverlay(mOverlayHandle);
+      mOverlayHandle = NULL;
+   }
+
+   if (mThumbOverlayHandle)
+   {
+      vr::VROverlay()->DestroyOverlay(mThumbOverlayHandle);
+      mThumbOverlayHandle = NULL;
+   }
+
+   if (ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      OPENVR->unregisterOverlay(this);
+   }
+}
+
+void OpenVROverlay::resetOverlay()
+{
+   vr::IVROverlay *overlay = vr::VROverlay();
+   if (!overlay)
+      return;
+
+   if (mOverlayHandle)
+   {
+      overlay->DestroyOverlay(mOverlayHandle);
+      mOverlayHandle = NULL;
+   }
+
+   if (mThumbOverlayHandle)
+   {
+      overlay->DestroyOverlay(mThumbOverlayHandle);
+      mThumbOverlayHandle = NULL;
+   }
+
+   if (mOverlayType == OpenVROverlay::OVERLAYTYPE_DASHBOARD)
+   {
+      overlay->CreateDashboardOverlay(mInternalName, mInternalName, &mOverlayHandle, &mThumbOverlayHandle);
+   }
+   else
+   {
+      overlay->CreateOverlay(mInternalName, mInternalName, &mOverlayHandle);
+   }
+
+   mOverlayDirty = true;
+   mOverlayTypeDirty = false;
+
+   // Pre-render start frame so we have a texture available
+   if (!mTarget)
+   {
+      renderFrame(false, false);
+   }
+}
+
+void OpenVROverlay::updateOverlay()
+{
+   if (mOverlayTypeDirty)
+      resetOverlay();
+
+   // Update params
+   vr::IVROverlay *overlay = vr::VROverlay();
+   if (!overlay || !mOverlayHandle)
+      return;
+
+   if (!mOverlayDirty)
+      return;
+
+   MatrixF vrMat(1);
+   vr::HmdMatrix34_t ovrMat;
+   vr::HmdVector2_t ovrMouseScale;
+   ovrMouseScale.v[0] = mMouseScale.x;
+   ovrMouseScale.v[1] = mMouseScale.y;
+
+   OpenVRUtil::convertTransformToOVR(mTransform, vrMat);
+   OpenVRUtil::convertMatrixFPlainToSteamVRAffineMatrix(vrMat, ovrMat);
+
+   MatrixF reverseMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(ovrMat);
+   MatrixF finalReverseMat(1);
+   OpenVRUtil::convertTransformFromOVR(reverseMat, finalReverseMat);
+
+   switch (mOverlayTransformType)
+   {
+      case vr::VROverlayTransform_Absolute:
+         overlay->SetOverlayTransformAbsolute(mOverlayHandle, mTrackingOrigin, &ovrMat);
+         break;
+      case vr::VROverlayTransform_TrackedDeviceRelative:
+         overlay->SetOverlayTransformTrackedDeviceRelative(mOverlayHandle, mTransformDeviceIndex, &ovrMat);
+         break;
+      case vr::VROverlayTransform_TrackedComponent:
+         overlay->SetOverlayTransformTrackedDeviceComponent(mOverlayHandle, mTransformDeviceIndex, mTransformDeviceComponent.c_str());
+         break;
+      // NOTE: system not handled here - doesn't seem possible to create these
+      default:
+         break;
+   }
+
+  // overlay->SetOverlayColor(mOverlayHandle, mOverlayColor.red, mOverlayColor.green, mOverlayColor.blue);
+   overlay->SetOverlayAlpha(mOverlayHandle, mOverlayColor.alpha);
+   overlay->SetOverlayMouseScale(mOverlayHandle, &ovrMouseScale);
+   overlay->SetOverlayInputMethod(mOverlayHandle, mInputMethod);
+   overlay->SetOverlayWidthInMeters(mOverlayHandle, mOverlayWidth);
+
+   // NOTE: if flags in openvr change, double check this
+   for (U32 i = vr::VROverlayFlags_None; i <= vr::VROverlayFlags_ShowTouchPadScrollWheel; i++)
+   {
+      overlay->SetOverlayFlag(mOverlayHandle, (vr::VROverlayFlags)i, mOverlayFlags & (1 << i));
+   }
+
+   mOverlayDirty = false;
+}
+
+void OpenVROverlay::showOverlay()
+{
+   updateOverlay();
+   if (mOverlayHandle == NULL)
+      return;
+
+   if (mOverlayType != OVERLAYTYPE_DASHBOARD)
+   {
+      vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle);
+      if (err != vr::VROverlayError_None)
+      {
+         Con::errorf("VR Overlay error!");
+      }
+   }
+
+   if (!mStagingTexture)
+   {
+      renderFrame(false, false);
+   }
+}
+
+void OpenVROverlay::hideOverlay()
+{
+   if (mOverlayHandle == NULL)
+      return;
+
+   if (mOverlayType != OVERLAYTYPE_DASHBOARD)
+   {
+      vr::VROverlay()->HideOverlay(mOverlayHandle);
+   }
+}
+
+
+bool OpenVROverlay::isOverlayVisible()
+{
+   if (mOverlayHandle == NULL)
+      return false;
+
+   return vr::VROverlay()->IsOverlayVisible(mOverlayHandle);
+}
+
+bool OpenVROverlay::isOverlayHoverTarget()
+{
+   if (mOverlayHandle == NULL)
+      return false;
+
+   return vr::VROverlay()->IsHoverTargetOverlay(mOverlayHandle);
+}
+
+
+bool OpenVROverlay::isGamepadFocussed()
+{
+   if (mOverlayHandle == NULL)
+      return false;
+
+   return vr::VROverlay()->GetGamepadFocusOverlay() == mOverlayHandle;
+}
+
+bool OpenVROverlay::isActiveDashboardOverlay()
+{
+   return false; // TODO WHERE DID I GET THIS FROM
+}
+
+MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const Point2F &pos)
+{
+   if (mOverlayHandle == NULL)
+      return MatrixF::Identity;
+
+   vr::HmdVector2_t vec;
+   vec.v[0] = pos.x;
+   vec.v[1] = pos.y;
+   vr::HmdMatrix34_t outMat;
+   MatrixF outTorqueMat;
+   if (vr::VROverlay()->GetTransformForOverlayCoordinates(mOverlayHandle, mTrackingOrigin, vec, &outMat) != vr::VROverlayError_None)
+      return MatrixF::Identity;
+
+   MatrixF vrMat(1);
+   vrMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(outMat);
+   OpenVRUtil::convertTransformFromOVR(vrMat, outTorqueMat);
+   return outTorqueMat;
+}
+
+bool OpenVROverlay::castRay(const Point3F &origin, const Point3F &direction, RayInfo *info)
+{
+   if (mOverlayHandle == NULL)
+      return false;
+
+   vr::VROverlayIntersectionParams_t params;
+   vr::VROverlayIntersectionResults_t result;
+
+   Point3F ovrOrigin = OpenVRUtil::convertPointToOVR(origin);
+   Point3F ovrDirection = OpenVRUtil::convertPointToOVR(direction);
+
+   params.eOrigin = mTrackingOrigin;
+   params.vSource.v[0] = ovrOrigin.x;
+   params.vSource.v[1] = ovrOrigin.y;
+   params.vSource.v[2] = ovrOrigin.z;
+   params.vDirection.v[0] = ovrDirection.x;
+   params.vDirection.v[1] = ovrDirection.y;
+   params.vDirection.v[2] = ovrDirection.z;
+
+   bool rayHit = vr::VROverlay()->ComputeOverlayIntersection(mOverlayHandle, &params, &result);
+
+   if (rayHit && info)
+   {
+      info->t = result.fDistance;
+      info->point = OpenVRUtil::convertPointFromOVR(result.vPoint); // TODO: need to transform this FROM vr-space
+      info->normal = OpenVRUtil::convertPointFromOVR(result.vNormal);
+      info->texCoord = Point2F(result.vUVs.v[0], result.vUVs.v[1]);
+      info->object = NULL;
+      info->userData = this;
+   }
+
+   return rayHit;
+}
+
+void OpenVROverlay::moveGamepadFocusToNeighbour()
+{
+
+}
+
+void OpenVROverlay::handleOpenVREvents()
+{
+   if (mManualMouseHandling)
+   {
+      // tell OpenVR to make some events for us
+      for (vr::TrackedDeviceIndex_t unDeviceId = 1; unDeviceId < vr::k_unControllerStateAxisCount; unDeviceId++)
+      {
+         if (vr::VROverlay()->HandleControllerOverlayInteractionAsMouse(mOverlayHandle, unDeviceId))
+         {
+            break;
+         }
+      }
+   }
+
+
+   vr::VREvent_t vrEvent;
+   while (vr::VROverlay()->PollNextOverlayEvent(mOverlayHandle, &vrEvent, sizeof(vrEvent)))
+   {
+      InputEventInfo eventInfo;
+      eventInfo.deviceType = MouseDeviceType;
+      eventInfo.deviceInst = 0;
+      eventInfo.objType = SI_AXIS;
+      eventInfo.modifier = (InputModifiers)0;
+      eventInfo.ascii = 0;
+
+      //Con::printf("Overlay event %i", vrEvent.eventType);
+
+      switch (vrEvent.eventType)
+      {
+      case vr::VREvent_MouseMove:
+      {
+         //Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y);
+         eventInfo.objType = SI_AXIS;
+         eventInfo.objInst = SI_XAXIS;
+         eventInfo.action = SI_MAKE;
+         eventInfo.fValue = getExtent().x * vrEvent.data.mouse.x;
+         processMouseEvent(eventInfo);
+
+         eventInfo.objType = SI_AXIS;
+         eventInfo.objInst = SI_YAXIS;
+         eventInfo.action = SI_MAKE;
+         eventInfo.fValue = getExtent().y * (1.0 - vrEvent.data.mouse.y);
+         processMouseEvent(eventInfo);
+      }
+      break;
+
+      case vr::VREvent_MouseButtonDown:
+      {
+         eventInfo.objType = SI_BUTTON;
+         eventInfo.objInst = (InputObjectInstances)OpenVRUtil::convertOpenVRButtonToTorqueButton(vrEvent.data.mouse.button);
+         eventInfo.action = SI_MAKE;
+         eventInfo.fValue = 1.0f;
+         processMouseEvent(eventInfo);
+      }
+      break;
+
+      case vr::VREvent_MouseButtonUp:
+      {
+         eventInfo.objType = SI_BUTTON;
+         eventInfo.objInst = (InputObjectInstances)OpenVRUtil::convertOpenVRButtonToTorqueButton(vrEvent.data.mouse.button);
+         eventInfo.action = SI_BREAK;
+         eventInfo.fValue = 0.0f;
+         processMouseEvent(eventInfo);
+      }
+      break;
+
+      case vr::VREvent_OverlayShown:
+      {
+         markDirty();
+      }
+      break;
+
+      case vr::VREvent_Quit:
+         AssertFatal(false, "WTF is going on here");
+         break;
+
+      case vr::VREvent_KeyboardCharInput:
+      case vr::VREvent_KeyboardDone:
+         updateTextControl((GuiControl*)vrEvent.data.keyboard.uUserValue);
+         break;
+      }
+
+   }
+
+   if (mThumbOverlayHandle != vr::k_ulOverlayHandleInvalid)
+   {
+      while (vr::VROverlay()->PollNextOverlayEvent(mThumbOverlayHandle, &vrEvent, sizeof(vrEvent)))
+      {
+         switch (vrEvent.eventType)
+         {
+         case vr::VREvent_OverlayShown:
+         {
+            markDirty();
+         }
+         break;
+         }
+      }
+   }
+}
+
+void OpenVROverlay::updateTextControl(GuiControl* ctrl)
+{
+   if (!ctrl)
+      return;
+
+   GuiTextCtrl* textCtrl = dynamic_cast<GuiTextCtrl*>(ctrl);
+   if (textCtrl)
+   {
+      char text[GuiTextCtrl::MAX_STRING_LENGTH];
+      vr::VROverlay()->GetKeyboardText(text, GuiTextCtrl::MAX_STRING_LENGTH);
+      textCtrl->setText(text);
+   }
+}
+
+void OpenVROverlay::onFrameRendered()
+{
+   vr::IVROverlay *overlay = vr::VROverlay();
+   if (!overlay || !mOverlayHandle)
+      return;
+
+   updateOverlay();
+
+   Point2I desiredSize = mTarget->getSize();
+   if (mStagingTexture.isNull() || mStagingTexture.getWidthHeight() != desiredSize)
+   {
+      Point2I sz = mStagingTexture.getWidthHeight();
+      mStagingTexture.set(desiredSize.x, desiredSize.y, mTargetFormat, &VRTextureProfile, "OpenVROverlay staging texture");
+   }
+   mTarget->resolveTo(mStagingTexture);
+
+   vr::Texture_t tex;
+   if (GFX->getAdapterType() == Direct3D11)
+   {
+      tex = { (void*)static_cast<GFXD3D11TextureObject*>(mStagingTexture.getPointer())->getResource(), vr::API_DirectX, vr::ColorSpace_Auto };
+   }
+#ifdef TORQUE_OPENGL
+   else if (GFX->getAdapterType() == OpenGL)
+   {
+      tex = { (void*)static_cast<GFXGLTextureObject*>(mStagingTexture.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Auto };
+
+   }
+#endif
+   else
+   {
+      return;
+   }
+
+   //mStagingTexture->dumpToDisk("PNG", "D:\\test.png");
+
+   vr::EVROverlayError err = overlay->SetOverlayTexture(mOverlayHandle, &tex);
+   if (err != vr::VROverlayError_None)
+   {
+      Con::errorf("VR: Error setting overlay texture.");
+   }
+
+   //Con::printf("Overlay visible ? %s", vr::VROverlay()->IsOverlayVisible(mOverlayHandle) ? "YES" : "NO");
+}
+
+void OpenVROverlay::enableKeyboardTranslation()
+{
+   vr::IVROverlay *overlay = vr::VROverlay();
+   if (!overlay || !mOverlayHandle)
+      return;
+
+   GuiTextEditCtrl* ctrl = dynamic_cast<GuiTextEditCtrl*>(getFirstResponder());
+   if (ctrl)
+   {
+      vr::EGamepadTextInputMode inputMode = ctrl->isPasswordText() ? vr::k_EGamepadTextInputModePassword : vr::k_EGamepadTextInputModeNormal;
+      char text[GuiTextCtrl::MAX_STRING_LENGTH + 1];
+      ctrl->getText(text);
+      overlay->ShowKeyboardForOverlay(mOverlayHandle, inputMode, vr::k_EGamepadTextInputLineModeSingleLine, ctrl->getTooltip().c_str(), GuiTextCtrl::MAX_STRING_LENGTH, text, false, (uint64_t)ctrl);
+   }
+}
+
+void OpenVROverlay::disableKeyboardTranslation()
+{
+   vr::IVROverlay *overlay = vr::VROverlay();
+   if (!overlay || !mOverlayHandle)
+      return;
+
+   overlay->HideKeyboard();
+}
+
+void OpenVROverlay::setNativeAcceleratorsEnabled(bool enabled)
+{
+}
+
+DefineEngineMethod(OpenVROverlay, showOverlay, void, (), , "")
+{
+   object->showOverlay();
+}
+
+DefineEngineMethod(OpenVROverlay, hideOverlay, void, (), , "")
+{
+   object->hideOverlay();
+}

+ 105 - 0
Engine/source/platform/input/openVR/openVROverlay.h

@@ -0,0 +1,105 @@
+#ifndef _OPENVROVERLAY_H_
+#define _OPENVROVERLAY_H_
+
+#ifndef _GUIOFFSCREENCANVAS_H_
+#include "gui/core/guiOffscreenCanvas.h"
+#endif
+#ifndef _OPENVRDEVICE_H_
+#include "platform/input/openVR/openVRProvider.h"
+#endif
+#ifndef _COLLISION_H_
+#include "collision/collision.h"
+#endif
+
+
+typedef vr::VROverlayInputMethod OpenVROverlayInputMethod;
+typedef vr::VROverlayTransformType OpenVROverlayTransformType;
+typedef vr::EGamepadTextInputMode OpenVRGamepadTextInputMode;
+typedef vr::EGamepadTextInputLineMode OpenVRGamepadTextInputLineMode;
+typedef vr::ETrackingResult OpenVRTrackingResult;
+typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin;
+typedef vr::EOverlayDirection OpenVROverlayDirection;
+typedef vr::EVRState OpenVRState;
+
+class OpenVROverlay : public GuiOffscreenCanvas
+{
+public:
+   typedef GuiOffscreenCanvas Parent;
+
+   enum OverlayType
+   {
+      OVERLAYTYPE_OVERLAY,
+      OVERLAYTYPE_DASHBOARD,
+   };
+
+   vr::VROverlayHandle_t mOverlayHandle;
+   vr::VROverlayHandle_t mThumbOverlayHandle;
+
+   // Desired OpenVR state
+   U32 mOverlayFlags;
+   F32 mOverlayWidth;
+
+   vr::VROverlayTransformType mOverlayTransformType;
+   MatrixF mTransform;
+   vr::TrackedDeviceIndex_t mTransformDeviceIndex;
+   String mTransformDeviceComponent;
+
+
+   vr::VROverlayInputMethod mInputMethod;
+   Point2F mMouseScale;
+
+   vr::ETrackingUniverseOrigin mTrackingOrigin;
+   vr::TrackedDeviceIndex_t mControllerDeviceIndex;
+
+   GFXTexHandle mStagingTexture; ///< Texture used by openvr
+
+   ColorF mOverlayColor;
+
+   bool mOverlayTypeDirty; ///< Overlay type is dirty
+   bool mOverlayDirty; ///< Overlay properties are dirty
+   bool mManualMouseHandling;
+   OverlayType mOverlayType;
+
+   //
+
+   OpenVROverlay();
+   virtual ~OpenVROverlay();
+
+   static void initPersistFields();
+
+   DECLARE_CONOBJECT(OpenVROverlay);
+
+   bool onAdd();
+   void onRemove();
+
+   void resetOverlay();
+   void updateOverlay();
+
+   void showOverlay();
+   void hideOverlay();
+
+   bool isOverlayVisible();
+   bool isOverlayHoverTarget();
+
+   bool isGamepadFocussed();
+   bool isActiveDashboardOverlay();
+
+   MatrixF getTransformForOverlayCoordinates(const Point2F &pos);
+   bool castRay(const Point3F &origin, const Point3F &direction, RayInfo *info);
+
+   void moveGamepadFocusToNeighbour();
+
+   void handleOpenVREvents();
+   void updateTextControl(GuiControl* ctrl);
+   void onFrameRendered();
+
+   virtual void enableKeyboardTranslation();
+   virtual void disableKeyboardTranslation();
+   virtual void setNativeAcceleratorsEnabled(bool enabled);
+};
+
+typedef OpenVROverlay::OverlayType OpenVROverlayType;
+DefineEnumType(OpenVROverlayType);
+
+
+#endif

+ 1761 - 0
Engine/source/platform/input/openVR/openVRProvider.cpp

@@ -0,0 +1,1761 @@
+#include "platform/input/openVR/openVRProvider.h"
+#include "platform/input/openVR/openVROverlay.h"
+#include "platform/platformInput.h"
+#include "core/module.h"
+#include "console/engineAPI.h"
+#include "T3D/gameBase/gameConnection.h"
+#include "gui/core/guiCanvas.h"
+#include "postFx/postEffectCommon.h"
+#include "renderInstance/renderPassManager.h"
+#include "scene/sceneRenderState.h"
+#include "materials/baseMatInstance.h"
+#include "materials/materialManager.h"
+#include "console/consoleInternal.h"
+#include "core/stream/fileStream.h"
+
+#include "gfx/D3D11/gfxD3D11Device.h"
+#include "gfx/D3D11/gfxD3D11TextureObject.h"
+#include "gfx/D3D11/gfxD3D11EnumTranslate.h"
+#include "gfx/gfxStringEnumTranslate.h"
+
+
+#include "gfx/D3D9/gfxD3D9Device.h"
+#include "gfx/D3D9/gfxD3D9TextureObject.h"
+#include "gfx/D3D9/gfxD3D9EnumTranslate.h"
+
+#include "materials/matTextureTarget.h"
+
+#ifdef TORQUE_OPENGL
+#include "gfx/gl/gfxGLDevice.h"
+#include "gfx/gl/gfxGLTextureObject.h"
+#include "gfx/gl/gfxGLEnumTranslate.h"
+#endif
+
+struct OpenVRLoadedTexture
+{
+   vr::TextureID_t texId;
+   NamedTexTarget texTarget;
+};
+
+AngAxisF gLastMoveRot; // jamesu - this is just here for temp debugging
+
+namespace OpenVRUtil
+{
+   void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation)
+   {
+      Point4F col0; inRotTMat.getColumn(0, &col0);
+      Point4F col1; inRotTMat.getColumn(1, &col1);
+      Point4F col2; inRotTMat.getColumn(2, &col2);
+      Point4F col3; inRotTMat.getColumn(3, &col3);
+
+      // Set rotation.  We need to convert from sensor coordinates to
+      // Torque coordinates.  The sensor matrix is stored row-major.
+      // The conversion is:
+      //
+      // Sensor                       Torque
+      // a b c         a  b  c        a -c  b
+      // d e f   -->  -g -h -i  -->  -g  i -h
+      // g h i         d  e  f        d -f  e
+      outRotation.setColumn(0, Point4F( col0.x, -col2.x, col1.x, 0.0f));
+      outRotation.setColumn(1, Point4F(-col0.z, col2.z, -col1.z, 0.0f));
+      outRotation.setColumn(2, Point4F( col0.y, -col2.y, col1.y, 0.0f));
+      outRotation.setColumn(3, Point4F(-col3.x, col3.z, -col3.y, 1.0f));
+   }
+
+   void convertTransformToOVR(const MatrixF& inRotation, MatrixF& outRotation)
+   {
+      Point4F col0; inRotation.getColumn(0, &col0);
+      Point4F col1; inRotation.getColumn(1, &col1);
+      Point4F col2; inRotation.getColumn(2, &col2);
+      Point4F col3; inRotation.getColumn(3, &col3);
+
+      // This is basically a reverse of what is in convertTransformFromOVR
+      outRotation.setColumn(0, Point4F(col0.x, col2.x, -col1.x, 0.0f));
+      outRotation.setColumn(1, Point4F(col0.z, col2.z, -col1.z, 0.0f));
+      outRotation.setColumn(2, Point4F(-col0.y, -col2.y, col1.y, 0.0f));
+      outRotation.setColumn(3, Point4F(-col3.x, -col3.z, col3.y, 1.0f));
+   }
+
+   MatrixF convertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat)
+   {
+      MatrixF outMat(1);
+
+      outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0));
+      outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0));
+      outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0));
+      outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos
+
+      return outMat;
+   }
+
+
+
+   void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat)
+   {
+      Point4F row0; inMat.getRow(0, &row0);
+      Point4F row1; inMat.getRow(1, &row1);
+      Point4F row2; inMat.getRow(2, &row2);
+
+      outMat.m[0][0] = row0.x;
+      outMat.m[0][1] = row0.y;
+      outMat.m[0][2] = row0.z;
+      outMat.m[0][3] = row0.w;
+
+      outMat.m[1][0] = row1.x;
+      outMat.m[1][1] = row1.y;
+      outMat.m[1][2] = row1.z;
+      outMat.m[1][3] = row1.w;
+
+      outMat.m[2][0] = row2.x;
+      outMat.m[2][1] = row2.y;
+      outMat.m[2][2] = row2.z;
+      outMat.m[2][3] = row2.w;
+   }
+
+   U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton)
+   {
+      switch (vrButton)
+      {
+      case vr::VRMouseButton_Left:
+         return KEY_BUTTON0;
+      case vr::VRMouseButton_Right:
+         return KEY_BUTTON1;
+      case vr::VRMouseButton_Middle:
+         return KEY_BUTTON2;
+      default:
+         return KEY_NULL;
+      }
+   }
+
+
+   vr::VRTextureBounds_t TorqueRectToBounds(const RectI &rect, const Point2I &widthHeight)
+   {
+      vr::VRTextureBounds_t bounds;
+      F32 xRatio = 1.0 / (F32)widthHeight.x;
+      F32 yRatio = 1.0 / (F32)widthHeight.y;
+      bounds.uMin = rect.point.x * xRatio;
+      bounds.vMin = rect.point.y * yRatio;
+      bounds.uMax = (rect.point.x + rect.extent.x) * xRatio;
+      bounds.vMax = (rect.point.y + rect.extent.y) * yRatio;
+      return bounds;
+   }
+
+   String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL)
+   {
+      uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError);
+      if (unRequiredBufferLen == 0)
+         return "";
+
+      char *pchBuffer = new char[unRequiredBufferLen];
+      unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError);
+      String sResult = pchBuffer;
+      delete[] pchBuffer;
+      return sResult;
+   }
+
+}
+
+//------------------------------------------------------------
+
+bool OpenVRRenderModel::init(const vr::RenderModel_t & vrModel, StringTableEntry materialName)
+{
+   SAFE_DELETE(mMaterialInstance);
+   mMaterialInstance = MATMGR->createMatInstance(materialName, getGFXVertexFormat< VertexType >());
+   if (!mMaterialInstance)
+      return false;
+
+   mLocalBox = Box3F::Invalid;
+
+   // Prepare primitives
+   U16 *indPtr = NULL;
+   GFXPrimitive *primPtr = NULL;
+   mPrimitiveBuffer.set(GFX, vrModel.unTriangleCount * 3, 1, GFXBufferTypeStatic, "OpenVR Controller buffer");
+
+   mPrimitiveBuffer.lock(&indPtr, &primPtr);
+   if (!indPtr || !primPtr)
+      return false;
+
+   primPtr->minIndex = 0;
+   primPtr->numPrimitives = vrModel.unTriangleCount;
+   primPtr->numVertices = vrModel.unVertexCount;
+   primPtr->startIndex = 0;
+   primPtr->startVertex = 0;
+   primPtr->type = GFXTriangleList;
+
+   //dMemcpy(indPtr, vrModel.rIndexData, sizeof(U16) * vrModel.unTriangleCount * 3);
+
+   for (U32 i = 0; i < vrModel.unTriangleCount; i++)
+   {
+      const U32 idx = i * 3;
+      indPtr[idx + 0] = vrModel.rIndexData[idx + 2];
+      indPtr[idx + 1] = vrModel.rIndexData[idx + 1];
+      indPtr[idx + 2] = vrModel.rIndexData[idx + 0];
+   }
+
+   mPrimitiveBuffer.unlock();
+
+   // Prepare verts
+   mVertexBuffer.set(GFX, vrModel.unVertexCount, GFXBufferTypeStatic);
+   VertexType *vertPtr = mVertexBuffer.lock();
+   if (!vertPtr)
+      return false;
+
+   // Convert to torque coordinate system
+   for (U32 i = 0; i < vrModel.unVertexCount; i++)
+   {
+      const vr::RenderModel_Vertex_t &vert = vrModel.rVertexData[i];
+      vertPtr->point = OpenVRUtil::convertPointFromOVR(vert.vPosition);
+      vertPtr->point.x = -vertPtr->point.x;
+      vertPtr->point.y = -vertPtr->point.y;
+      vertPtr->point.z = -vertPtr->point.z;
+      vertPtr->normal = OpenVRUtil::convertPointFromOVR(vert.vNormal);
+      vertPtr->normal.x = -vertPtr->normal.x;
+      vertPtr->normal.y = -vertPtr->normal.y;
+      vertPtr->normal.z = -vertPtr->normal.z;
+      vertPtr->texCoord = Point2F(vert.rfTextureCoord[0], vert.rfTextureCoord[1]);
+      vertPtr++;
+   }
+
+   mVertexBuffer.unlock();
+
+   for (U32 i = 0, sz = vrModel.unVertexCount; i < sz; i++)
+   {
+      Point3F pos = Point3F(vrModel.rVertexData[i].vPosition.v[0], vrModel.rVertexData[i].vPosition.v[1], vrModel.rVertexData[i].vPosition.v[2]);
+      mLocalBox.extend(pos);
+   }
+
+   return true;
+}
+
+void OpenVRRenderModel::draw(SceneRenderState *state, MeshRenderInst* renderInstance)
+{
+   renderInstance->type = RenderPassManager::RIT_Mesh;
+   renderInstance->matInst = state->getOverrideMaterial(mMaterialInstance);
+   if (!renderInstance->matInst)
+      return;
+
+   renderInstance->vertBuff = &mVertexBuffer;
+   renderInstance->primBuff = &mPrimitiveBuffer;
+   renderInstance->prim = NULL;
+   renderInstance->primBuffIndex = 0;
+
+   if (renderInstance->matInst->getMaterial()->isTranslucent())
+   {
+      renderInstance->type = RenderPassManager::RIT_Translucent;
+      renderInstance->translucentSort = true;
+   }
+
+   renderInstance->defaultKey = renderInstance->matInst->getStateHint();
+   renderInstance->defaultKey2 = (uintptr_t)renderInstance->vertBuff;
+}
+
+//------------------------------------------------------------
+
+
+
+DECLARE_SCOPE(OpenVR);
+IMPLEMENT_SCOPE(OpenVR, OpenVRProvider, , "");
+ConsoleDoc(
+   "@class OpenVRProvider\n"
+   "@brief This class is the interface between TorqueScript and OpenVR.\n\n"
+   "@ingroup OpenVR\n"
+   );
+
+// Enum impls
+
+ImplementEnumType(OpenVROverlayInputMethod,
+   "Types of input supported by VR Overlays. .\n\n"
+   "@ingroup OpenVR")
+{ vr::VROverlayInputMethod_None, "None" },
+{ vr::VROverlayInputMethod_Mouse, "Mouse" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVROverlayTransformType,
+   "Allows the caller to figure out which overlay transform getter to call. .\n\n"
+   "@ingroup OpenVR")
+{ vr::VROverlayTransform_Absolute, "Absolute" },
+{ vr::VROverlayTransform_TrackedDeviceRelative, "TrackedDeviceRelative" },
+{ vr::VROverlayTransform_SystemOverlay, "SystemOverlay" },
+{ vr::VROverlayTransform_TrackedComponent, "TrackedComponent" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVRGamepadTextInputMode,
+   "Types of input supported by VR Overlays. .\n\n"
+   "@ingroup OpenVR")
+{ vr::k_EGamepadTextInputModeNormal, "Normal", },
+{ vr::k_EGamepadTextInputModePassword, "Password", },
+{ vr::k_EGamepadTextInputModeSubmit, "Submit" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVRGamepadTextInputLineMode,
+   "Types of input supported by VR Overlays. .\n\n"
+   "@ingroup OpenVR")
+{ vr::k_EGamepadTextInputLineModeSingleLine, "SingleLine" },
+{ vr::k_EGamepadTextInputLineModeMultipleLines, "MultipleLines" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVRTrackingResult,
+   ". .\n\n"
+   "@ingroup OpenVR")
+{ vr::TrackingResult_Uninitialized, "None" },
+{ vr::TrackingResult_Calibrating_InProgress, "Calibrating_InProgress" },
+{ vr::TrackingResult_Calibrating_OutOfRange, "Calibrating_OutOfRange" },
+{ vr::TrackingResult_Running_OK, "Running_Ok" },
+{ vr::TrackingResult_Running_OutOfRange, "Running_OutOfRange" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVRTrackingUniverseOrigin,
+   "Identifies which style of tracking origin the application wants to use for the poses it is requesting. .\n\n"
+   "@ingroup OpenVR")
+{ vr::TrackingUniverseSeated, "Seated" },
+{ vr::TrackingUniverseStanding, "Standing" },
+{ vr::TrackingUniverseRawAndUncalibrated, "RawAndUncalibrated" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVROverlayDirection,
+   "Directions for changing focus between overlays with the gamepad. .\n\n"
+   "@ingroup OpenVR")
+{ vr::OverlayDirection_Up, "Up" },
+{ vr::OverlayDirection_Down, "Down" },
+{ vr::OverlayDirection_Left, "Left" },
+{ vr::OverlayDirection_Right, "Right" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVRState,
+   "Status of the overall system or tracked objects. .\n\n"
+   "@ingroup OpenVR")
+{ vr::VRState_Undefined, "Undefined" },
+{ vr::VRState_Off, "Off" },
+{ vr::VRState_Searching, "Searching" },
+{ vr::VRState_Searching_Alert, "Searching_Alert" },
+{ vr::VRState_Ready, "Ready" },
+{ vr::VRState_Ready_Alert, "Ready_Alert" },
+{ vr::VRState_NotReady, "NotReady" },
+EndImplementEnumType;
+
+ImplementEnumType(OpenVRTrackedDeviceClass,
+   "Types of devices which are tracked .\n\n"
+   "@ingroup OpenVR")
+{ vr::TrackedDeviceClass_Invalid, "Invalid" },
+{ vr::TrackedDeviceClass_HMD, "HMD" },
+{ vr::TrackedDeviceClass_Controller, "Controller" },
+{ vr::TrackedDeviceClass_TrackingReference, "TrackingReference" },
+{ vr::TrackedDeviceClass_Other, "Other" },
+EndImplementEnumType;
+
+//------------------------------------------------------------
+
+U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount] = { 0 };
+
+U32 OpenVRProvider::OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount];
+U32 OpenVRProvider::OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount];
+
+U32 OpenVRProvider::OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount] = { 0 };
+U32 OpenVRProvider::OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount] = { 0 };
+
+EulerF OpenVRProvider::smHMDRotOffset(0);
+F32 OpenVRProvider::smHMDmvYaw = 0;
+F32 OpenVRProvider::smHMDmvPitch = 0;
+bool OpenVRProvider::smRotateYawWithMoveActions = false;
+
+static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL)
+{
+   uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError);
+   if (unRequiredBufferLen == 0)
+      return "";
+
+   char *pchBuffer = new char[unRequiredBufferLen];
+   unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError);
+   String sResult = pchBuffer;
+   delete[] pchBuffer;
+   return sResult;
+}
+
+MODULE_BEGIN(OpenVRProvider)
+
+MODULE_INIT_AFTER(InputEventManager)
+MODULE_SHUTDOWN_BEFORE(InputEventManager)
+
+MODULE_INIT
+{
+   OpenVRProvider::staticInit();
+   ManagedSingleton< OpenVRProvider >::createSingleton();
+}
+
+MODULE_SHUTDOWN
+{
+   ManagedSingleton< OpenVRProvider >::deleteSingleton();
+}
+
+MODULE_END;
+
+
+bool OpenVRRenderState::setupRenderTargets(GFXDevice::GFXDeviceRenderStyles mode)
+{
+   if (!mHMD)
+      return false;
+
+   if (mRenderMode == mode)
+      return true;
+
+   mRenderMode = mode;
+
+   if (mode == GFXDevice::RS_Standard)
+   {
+      reset(mHMD);
+      return true;
+   }
+
+   U32 sizeX, sizeY;
+   Point2I newRTSize;
+   mHMD->GetRecommendedRenderTargetSize(&sizeX, &sizeY);
+
+   if (mode == GFXDevice::RS_StereoSeparate)
+   {
+      mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
+      mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
+
+      newRTSize.x = sizeX;
+      newRTSize.y = sizeY;
+   }
+   else
+   {
+      mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
+      mEyeViewport[1] = RectI(Point2I(sizeX, 0), Point2I(sizeX, sizeY));
+
+      newRTSize.x = sizeX * 2;
+      newRTSize.y = sizeY;
+   }
+
+   GFXTexHandle stereoTexture;
+   stereoTexture.set(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color");
+   mStereoRenderTexture = stereoTexture;
+
+   GFXTexHandle stereoDepthTexture;
+   stereoDepthTexture.set(newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, "OpenVR Depth");
+   mStereoDepthTexture = stereoDepthTexture;
+
+   mStereoRT = GFX->allocRenderToTextureTarget();
+   mStereoRT->attachTexture(GFXTextureTarget::Color0, stereoTexture);
+   mStereoRT->attachTexture(GFXTextureTarget::DepthStencil, stereoDepthTexture);
+
+   mOutputEyeTextures.init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT");
+
+   return true;
+}
+
+void OpenVRRenderState::renderPreview()
+{
+
+}
+
+void OpenVRRenderState::reset(vr::IVRSystem* hmd)
+{
+   mHMD = hmd;
+
+   mStereoRT = NULL;
+
+   mStereoRenderTexture = NULL;
+   mStereoDepthTexture = NULL;
+
+   mOutputEyeTextures.clear();
+
+   if (!mHMD)
+      return;
+
+   updateHMDProjection();
+}
+
+void OpenVRRenderState::updateHMDProjection()
+{
+   vr::HmdMatrix34_t mat = mHMD->GetEyeToHeadTransform(vr::Eye_Left);
+   mEyePose[0] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat);
+   mEyePose[0].inverse();
+
+   mat = mHMD->GetEyeToHeadTransform(vr::Eye_Right);
+   mEyePose[1] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat);
+   mEyePose[1].inverse();
+
+   mHMD->GetProjectionRaw(vr::Eye_Left, &mEyeFov[0].leftTan, &mEyeFov[0].rightTan, &mEyeFov[0].upTan, &mEyeFov[0].downTan);
+   mHMD->GetProjectionRaw(vr::Eye_Right, &mEyeFov[1].leftTan, &mEyeFov[1].rightTan, &mEyeFov[1].upTan, &mEyeFov[1].downTan);
+
+   mEyeFov[0].upTan = -mEyeFov[0].upTan;
+   mEyeFov[0].leftTan = -mEyeFov[0].leftTan;
+   mEyeFov[1].upTan = -mEyeFov[1].upTan;
+   mEyeFov[1].leftTan = -mEyeFov[1].leftTan;
+}
+
+OpenVRProvider::OpenVRProvider() :
+   mHMD(NULL),
+   mRenderModels(NULL),
+   mDrawCanvas(NULL),
+   mGameConnection(NULL)
+{
+   dStrcpy(mName, "openvr");
+   mDeviceType = INPUTMGR->getNextDeviceType();
+   buildInputCodeTable();
+   GFXDevice::getDeviceEventSignal().notify(this, &OpenVRProvider::_handleDeviceEvent);
+   INPUTMGR->registerDevice(this);
+   dMemset(&mLUID, '\0', sizeof(mLUID));
+
+   mTrackingSpace = vr::TrackingUniverseStanding;
+}
+
+OpenVRProvider::~OpenVRProvider()
+{
+
+}
+
+void OpenVRProvider::staticInit()
+{
+   // Overlay flags
+   Con::setIntVariable("$OpenVR::OverlayFlags_None", 1 << (U32)vr::VROverlayFlags_None);
+   Con::setIntVariable("$OpenVR::OverlayFlags_Curved", 1 << (U32)vr::VROverlayFlags_Curved);
+   Con::setIntVariable("$OpenVR::OverlayFlags_RGSS4X", 1 << (U32)vr::VROverlayFlags_RGSS4X);
+   Con::setIntVariable("$OpenVR::OverlayFlags_NoDashboardTab", 1 << (U32)vr::VROverlayFlags_NoDashboardTab);
+   Con::setIntVariable("$OpenVR::OverlayFlags_AcceptsGamepadEvents", 1 << (U32)vr::VROverlayFlags_AcceptsGamepadEvents);
+   Con::setIntVariable("$OpenVR::OverlayFlags_ShowGamepadFocus", 1 << (U32)vr::VROverlayFlags_ShowGamepadFocus);
+   Con::setIntVariable("$OpenVR::OverlayFlags_SendVRScrollEvents", 1 << (U32)vr::VROverlayFlags_SendVRScrollEvents);
+   Con::setIntVariable("$OpenVR::OverlayFlags_SendVRTouchpadEvents", 1 << (U32)vr::VROverlayFlags_SendVRTouchpadEvents);
+   Con::setIntVariable("$OpenVR::OverlayFlags_ShowTouchPadScrollWheel", 1 << (U32)vr::VROverlayFlags_ShowTouchPadScrollWheel);
+
+   Con::addVariable("$OpenVR::HMDRotOffsetX", TypeF32, &smHMDRotOffset.x);
+   Con::addVariable("$OpenVR::HMDRotOffsetY", TypeF32, &smHMDRotOffset.y);
+   Con::addVariable("$OpenVR::HMDRotOffsetZ", TypeF32, &smHMDRotOffset.z);
+
+   Con::addVariable("$OpenVR::HMDmvYaw", TypeF32, &smHMDmvYaw);
+   Con::addVariable("$OpenVR::HMDmvPitch", TypeF32, &smHMDmvPitch);
+
+   Con::addVariable("$OpenVR::HMDRotateYawWithMoveActions", TypeBool, &smRotateYawWithMoveActions);
+}
+
+bool OpenVRProvider::enable()
+{
+   mOpenVRNS = Namespace::find(StringTable->insert("OpenVR"));
+
+   disable();
+
+   // Load openvr runtime
+   vr::EVRInitError eError = vr::VRInitError_None;
+   mHMD = vr::VR_Init(&eError, vr::VRApplication_Scene);
+
+   dMemset(mDeviceClassChar, '\0', sizeof(mDeviceClassChar));
+
+   if (eError != vr::VRInitError_None)
+   {
+      mHMD = NULL;
+      char buf[1024];
+      sprintf_s(buf, sizeof(buf), "Unable to init VR runtime: %s", vr::VR_GetVRInitErrorAsEnglishDescription(eError));
+      Con::printf(buf);
+      return false;
+   }
+
+   dMemset(&mLUID, '\0', sizeof(mLUID));
+
+#ifdef TORQUE_OS_WIN32
+
+   // For windows we need to lookup the DXGI record for this and grab the LUID for the display adapter. We need the LUID since 
+   // T3D uses EnumAdapters1 not EnumAdapters whereas openvr uses EnumAdapters.
+   int32_t AdapterIdx;
+   IDXGIAdapter* EnumAdapter;
+   IDXGIFactory1* DXGIFactory;
+   mHMD->GetDXGIOutputInfo(&AdapterIdx);
+   // Get the LUID of the device
+
+   HRESULT hr = CreateDXGIFactory1(__uuidof(IDXGIFactory1), reinterpret_cast<void**>(&DXGIFactory));
+
+   if (FAILED(hr))
+      AssertFatal(false, "OpenVRProvider::enable -> CreateDXGIFactory1 call failure");
+
+   hr = DXGIFactory->EnumAdapters(AdapterIdx, &EnumAdapter);
+
+   if (FAILED(hr))
+   {
+      Con::warnf("VR: HMD device has an invalid adapter.");
+   }
+   else
+   {
+      DXGI_ADAPTER_DESC desc;
+      hr = EnumAdapter->GetDesc(&desc);
+      if (FAILED(hr))
+      {
+         Con::warnf("VR: HMD device has an invalid adapter.");
+      }
+      else
+      {
+         dMemcpy(&mLUID, &desc.AdapterLuid, sizeof(mLUID));
+      }
+      SAFE_RELEASE(EnumAdapter);
+   }
+
+   SAFE_RELEASE(DXGIFactory);
+#endif
+
+
+
+   mRenderModels = (vr::IVRRenderModels *)vr::VR_GetGenericInterface(vr::IVRRenderModels_Version, &eError);
+   if (!mRenderModels)
+   {
+      mHMD = NULL;
+      vr::VR_Shutdown();
+
+      char buf[1024];
+      sprintf_s(buf, sizeof(buf), "Unable to get render model interface: %s", vr::VR_GetVRInitErrorAsEnglishDescription(eError));
+      Con::printf(buf);
+      return false;
+   }
+
+   mDriver = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String);
+   mDisplay = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String);
+
+   mHMDRenderState.mHMDPose = MatrixF(1);
+   mHMDRenderState.mEyePose[0] = MatrixF(1);
+   mHMDRenderState.mEyePose[1] = MatrixF(1);
+
+   mHMDRenderState.reset(mHMD);
+   mHMD->ResetSeatedZeroPose();
+   dMemset(mPreviousInputTrackedDevicePose, '\0', sizeof(mPreviousInputTrackedDevicePose));
+
+   mEnabled = true;
+
+   dMemset(mCurrentControllerState, '\0', sizeof(mCurrentControllerState));
+   dMemset(mPreviousCurrentControllerState, '\0', sizeof(mPreviousCurrentControllerState));
+
+   return true;
+}
+
+bool OpenVRProvider::disable()
+{
+   if (mHMD)
+   {
+      mHMD = NULL;
+      mRenderModels = NULL;
+      mHMDRenderState.reset(NULL);
+      vr::VR_Shutdown();
+   }
+
+   mEnabled = false;
+
+   return true;
+}
+
+void OpenVRProvider::buildInputCodeTable()
+{
+   // Obtain all of the device codes
+   for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i)
+   {
+      OVR_SENSORROT[i] = INPUTMGR->getNextDeviceCode();
+
+      OVR_SENSORROTANG[i] = INPUTMGR->getNextDeviceCode();
+
+      OVR_SENSORVELOCITY[i] = INPUTMGR->getNextDeviceCode();
+      OVR_SENSORANGVEL[i] = INPUTMGR->getNextDeviceCode();
+      OVR_SENSORMAGNETOMETER[i] = INPUTMGR->getNextDeviceCode();
+
+      OVR_SENSORPOSITION[i] = INPUTMGR->getNextDeviceCode();
+
+
+      OVR_BUTTONPRESSED[i] = INPUTMGR->getNextDeviceCode();
+      OVR_BUTTONTOUCHED[i] = INPUTMGR->getNextDeviceCode();
+
+      OVR_AXISNONE[i] = INPUTMGR->getNextDeviceCode();
+      OVR_AXISTRACKPAD[i] = INPUTMGR->getNextDeviceCode();
+      OVR_AXISJOYSTICK[i] = INPUTMGR->getNextDeviceCode();
+      OVR_AXISTRIGGER[i] = INPUTMGR->getNextDeviceCode();
+   }
+
+   // Build out the virtual map
+   char buffer[64];
+   for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i)
+   {
+      dSprintf(buffer, 64, "opvr_sensorrot%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_ROT, OVR_SENSORROT[i]);
+
+      dSprintf(buffer, 64, "opvr_sensorrotang%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORROTANG[i]);
+
+      dSprintf(buffer, 64, "opvr_sensorvelocity%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORVELOCITY[i]);
+
+      dSprintf(buffer, 64, "opvr_sensorangvel%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORANGVEL[i]);
+
+      dSprintf(buffer, 64, "opvr_sensormagnetometer%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORMAGNETOMETER[i]);
+
+      dSprintf(buffer, 64, "opvr_sensorpos%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORPOSITION[i]);
+
+      dSprintf(buffer, 64, "opvr_buttonpressed%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_BUTTONPRESSED[i]);
+      dSprintf(buffer, 64, "opvr_buttontouched%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_BUTTONTOUCHED[i]);
+
+      dSprintf(buffer, 64, "opvr_axis_none%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISNONE[i]);
+      dSprintf(buffer, 64, "opvr_axis_trackpad%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISTRACKPAD[i]);
+      dSprintf(buffer, 64, "opvr_axis_joystick%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISJOYSTICK[i]);
+      dSprintf(buffer, 64, "opvr_axis_trigger%d", i);
+      INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_AXISTRIGGER[i]);
+   }
+}
+
+bool OpenVRProvider::process()
+{
+   if (!mHMD)
+      return true;   
+
+   if (!vr::VRCompositor())
+      return true;
+
+   if (smRotateYawWithMoveActions)
+   {
+      smHMDmvYaw += MoveManager::mRightAction - MoveManager::mLeftAction + MoveManager::mXAxis_L;
+   }
+
+   // Update HMD rotation offset
+   smHMDRotOffset.z += smHMDmvYaw;
+   smHMDRotOffset.x += smHMDmvPitch;
+
+   while (smHMDRotOffset.x < -M_PI_F)
+      smHMDRotOffset.x += M_2PI_F;
+   while (smHMDRotOffset.x > M_PI_F)
+      smHMDRotOffset.x -= M_2PI_F;
+   while (smHMDRotOffset.z < -M_PI_F)
+      smHMDRotOffset.z += M_2PI_F;
+   while (smHMDRotOffset.z > M_PI_F)
+      smHMDRotOffset.z -= M_2PI_F;
+
+   smHMDmvYaw = 0;
+   smHMDmvPitch = 0;
+
+   // Process SteamVR events
+   vr::VREvent_t event;
+   while (mHMD->PollNextEvent(&event, sizeof(event)))
+   {
+      processVREvent(event);
+   }
+
+   // process overlay events
+   for (U32 i = 0; i < mOverlays.size(); i++)
+   {
+      mOverlays[i]->handleOpenVREvents();
+   }
+
+   // Process SteamVR controller state
+   for (vr::TrackedDeviceIndex_t unDevice = 0; unDevice < vr::k_unMaxTrackedDeviceCount; unDevice++)
+   {
+      vr::VRControllerState_t state;
+      if (mHMD->GetControllerState(unDevice, &state))
+      {
+        mCurrentControllerState[unDevice] = state;
+      }
+   }
+
+   // Update input poses
+   updateTrackedPoses();
+   submitInputChanges();
+
+   return true;
+}
+
+bool OpenVRProvider::providesFrameEyePose() const
+{
+   return mHMD != NULL;
+}
+
+inline Point3F OpenVRVecToTorqueVec(vr::HmdVector3_t vec)
+{
+   return Point3F(-vec.v[0], vec.v[2], -vec.v[1]);
+}
+
+void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos)
+{
+   // Directly set the rotation and position from the eye transforms
+   MatrixF torqueMat(1);
+   OpenVRUtil::convertTransformFromOVR(mat, torqueMat);
+
+   Point3F pos = torqueMat.getPosition();
+   outRot = QuatF(torqueMat);
+   outPos = pos;
+   outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case
+}
+
+void OpenVRTransformToRotPosMat(MatrixF mat, QuatF &outRot, Point3F &outPos, MatrixF &outMat)
+{
+   // Directly set the rotation and position from the eye transforms
+   MatrixF torqueMat(1);
+   OpenVRUtil::convertTransformFromOVR(mat, torqueMat);
+
+   Point3F pos = torqueMat.getPosition();
+   outRot = QuatF(torqueMat);
+   outPos = pos;
+   outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case
+   outMat = torqueMat;
+}
+
+void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const
+{
+   AssertFatal(eyeId >= -1 && eyeId < 2, "Out of bounds eye");
+
+   if (eyeId == -1)
+   {
+      // NOTE: this is codename for "head"
+      MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example
+
+#ifdef DEBUG_DISPLAY_POSE
+      pose->originalMatrix = mat;
+      OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix);
+#else
+      OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
+#endif
+
+      pose->velocity = Point3F(0);
+      pose->angularVelocity = Point3F(0);
+   }
+   else
+   {
+      MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example
+      //mat =  mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eyeId]; // same order as in the openvr example
+
+
+#ifdef DEBUG_DISPLAY_POSE
+      pose->originalMatrix = mat;
+      OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix);
+#else
+      OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
+#endif
+
+      pose->velocity = Point3F(0);
+      pose->angularVelocity = Point3F(0);
+   }
+}
+
+bool OpenVRProvider::providesEyeOffsets() const
+{
+   return mHMD != NULL;
+}
+
+/// Returns eye offset not taking into account any position tracking info
+void OpenVRProvider::getEyeOffsets(Point3F *dest) const
+{
+   dest[0] = mHMDRenderState.mEyePose[0].getPosition();
+   dest[1] = mHMDRenderState.mEyePose[1].getPosition();
+
+   dest[0] = Point3F(-dest[0].x, dest[0].y, dest[0].z); // convert from vr-space
+   dest[1] = Point3F(-dest[1].x, dest[1].y, dest[1].z);
+}
+
+bool OpenVRProvider::providesFovPorts() const
+{
+   return mHMD != NULL;
+}
+
+void OpenVRProvider::getFovPorts(FovPort *out) const
+{
+   dMemcpy(out, mHMDRenderState.mEyeFov, sizeof(mHMDRenderState.mEyeFov));
+}
+
+void OpenVRProvider::getStereoViewports(RectI *out) const
+{
+   out[0] = mHMDRenderState.mEyeViewport[0];
+   out[1] = mHMDRenderState.mEyeViewport[1];
+}
+
+void OpenVRProvider::getStereoTargets(GFXTextureTarget **out) const
+{
+   out[0] = mHMDRenderState.mStereoRT;
+   out[1] = mHMDRenderState.mStereoRT;
+}
+
+void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas)
+{
+   vr::EVRInitError peError = vr::VRInitError_None;
+
+   if (!vr::VRCompositor())
+   {
+      Con::errorf("VR: Compositor initialization failed. See log file for details\n");
+      return;
+   }
+
+   if (mDrawCanvas != canvas || mHMDRenderState.mHMD == NULL)
+   {
+      mHMDRenderState.setupRenderTargets(GFXDevice::RS_Standard);
+   }
+   mDrawCanvas = canvas;
+}
+
+void OpenVRProvider::setDrawMode(GFXDevice::GFXDeviceRenderStyles style)
+{
+   mHMDRenderState.setupRenderTargets(style);
+}
+
+void OpenVRProvider::setCurrentConnection(GameConnection *connection)
+{
+   mGameConnection = connection;
+}
+
+GameConnection* OpenVRProvider::getCurrentConnection()
+{
+   return mGameConnection;
+}
+
+GFXTexHandle OpenVRProvider::getPreviewTexture()
+{
+   return mHMDRenderState.mStereoRenderTexture; // TODO: render distortion preview
+}
+
+void OpenVRProvider::onStartFrame()
+{
+   if (!mHMD)
+      return;
+
+}
+
+void OpenVRProvider::onEndFrame()
+{
+   if (!mHMD)
+      return;
+}
+
+void OpenVRProvider::onEyeRendered(U32 index)
+{
+   if (!mHMD)
+      return;
+
+   vr::EVRCompositorError err = vr::VRCompositorError_None;
+   vr::VRTextureBounds_t bounds;
+   U32 textureIdxToSubmit = index;
+
+   GFXTexHandle eyeTex = mHMDRenderState.mOutputEyeTextures.getTextureHandle();
+   if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate)
+   {
+      mHMDRenderState.mStereoRT->resolveTo(eyeTex);
+      mHMDRenderState.mOutputEyeTextures.advance();
+   }
+   else
+   {
+      // assuming side-by-side, so the right eye will be next
+      if (index == 1)
+      {
+         mHMDRenderState.mStereoRT->resolveTo(eyeTex);
+         mHMDRenderState.mOutputEyeTextures.advance();
+      }
+      else
+      {
+         return;
+      }
+   }
+
+   if (GFX->getAdapterType() == Direct3D11)
+   {
+      vr::Texture_t eyeTexture;
+      if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate)
+      {
+         // whatever eye we are on
+         eyeTexture = { (void*)static_cast<GFXD3D11TextureObject*>(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
+         bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[index], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
+         err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture, &bounds);
+      }
+      else
+      {
+         // left & right at the same time
+         eyeTexture = { (void*)static_cast<GFXD3D11TextureObject*>(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
+         bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[0], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
+         err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left), &eyeTexture, &bounds);
+         bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[1], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
+         err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Right), &eyeTexture, &bounds);
+      }
+   }
+   else if (GFX->getAdapterType() == Direct3D9)
+   {
+      //vr::Texture_t eyeTexture = { (void*)static_cast<GFXD3D9TextureObject*>(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
+      //err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);
+   }
+#ifdef TORQUE_OPENGL
+   else if (GFX->getAdapterType() == OpenGL)
+   {
+      vr::Texture_t eyeTexture;
+      if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate)
+      {
+         // whatever eye we are on
+         eyeTexture = { (void*)static_cast<GFXGLTextureObject*>(eyeTex.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma };
+         bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[index], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
+         err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture, &bounds);
+      }
+      else
+      {
+         // left & right at the same time
+         eyeTexture = { (void*)static_cast<GFXGLTextureObject*>(eyeTex.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma };
+         bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[0], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
+         err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left), &eyeTexture, &bounds);
+         bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[1], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
+         err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Right), &eyeTexture, &bounds);
+      }
+   }
+#endif
+
+   AssertFatal(err == vr::VRCompositorError_None, "VR compositor error!");
+}
+
+void OpenVRProvider::setRoomTracking(bool room)
+{
+   vr::IVRCompositor* compositor = vr::VRCompositor();
+   mTrackingSpace = room ? vr::TrackingUniverseStanding : vr::TrackingUniverseSeated;
+   if (compositor) compositor->SetTrackingSpace(mTrackingSpace);
+}
+
+bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt)
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return true;
+   }
+
+   switch (evt)
+   {
+   case GFXDevice::deStartOfFrame:
+
+      // Start of frame
+
+      onStartFrame();
+
+      break;
+
+   case GFXDevice::dePostFrame:
+
+      // End of frame
+
+      onEndFrame();
+
+      break;
+
+   case GFXDevice::deDestroy:
+
+      // Need to reinit rendering
+      break;
+
+   case GFXDevice::deLeftStereoFrameRendered:
+      // 
+
+      onEyeRendered(0);
+      break;
+
+   case GFXDevice::deRightStereoFrameRendered:
+      // 
+
+      onEyeRendered(1);
+      break;
+
+   default:
+      break;
+   }
+
+   return true;
+}
+
+S32 OpenVRProvider::getDisplayDeviceId() const
+{
+#if defined(TORQUE_OS_WIN64) || defined(TORQUE_OS_WIN32)
+   if (GFX && GFX->getAdapterType() == Direct3D11)
+   {
+      Vector<GFXAdapter*> adapterList;
+      GFXD3D11Device::enumerateAdapters(adapterList);
+
+      for (U32 i = 0, sz = adapterList.size(); i < sz; i++)
+      {
+         GFXAdapter* adapter = adapterList[i];
+         if (dMemcmp(&adapter->mLUID, &mLUID, sizeof(mLUID)) == 0)
+         {
+            return adapter->mIndex;
+         }
+      }
+   }
+#endif
+
+   return -1;
+}
+
+void OpenVRProvider::processVREvent(const vr::VREvent_t & evt)
+{
+   mVREventSignal.trigger(evt);
+   switch (evt.eventType)
+   {
+   case vr::VREvent_InputFocusCaptured:
+      //Con::executef()
+      break;
+   case vr::VREvent_TrackedDeviceActivated:
+   {
+      // Setup render model
+   }
+   break;
+   case vr::VREvent_TrackedDeviceDeactivated:
+   {
+      // Deactivated
+   }
+   break;
+   case vr::VREvent_TrackedDeviceUpdated:
+   {
+      // Updated
+   }
+   break;
+   }
+}
+
+void OpenVRProvider::updateTrackedPoses()
+{
+   if (!mHMD)
+      return;
+
+   vr::IVRCompositor* compositor = vr::VRCompositor();
+
+   if (!compositor)
+      return;
+
+   if (compositor->GetTrackingSpace() != mTrackingSpace)
+   {
+      compositor->SetTrackingSpace(mTrackingSpace);
+   }
+
+   compositor->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0);
+
+   // Make sure we're using the latest eye offset in case user has changed IPD
+   mHMDRenderState.updateHMDProjection();
+
+   mValidPoseCount = 0;
+
+   for (int nDevice = 0; nDevice < vr::k_unMaxTrackedDeviceCount; ++nDevice)
+   {
+      IDevicePose &inPose = mCurrentDevicePose[nDevice];
+      if (mTrackedDevicePose[nDevice].bPoseIsValid)
+      {
+         mValidPoseCount++;
+         MatrixF mat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking);
+
+         if (nDevice == vr::k_unTrackedDeviceIndex_Hmd)
+         {
+            mHMDRenderState.mHMDPose = mat;
+
+         /*
+            MatrixF rotOffset(1);
+            EulerF localRot(-smHMDRotOffset.x, -smHMDRotOffset.z, smHMDRotOffset.y);
+
+            // NOTE: offsetting before is probably the best we're going to be able to do here, since if we apply the matrix AFTER 
+            // we will get correct movements relative to the camera HOWEVER this also distorts any future movements from the HMD since 
+            // we will then be on a really weird rotation axis.
+            QuatF(localRot).setMatrix(&rotOffset);
+            rotOffset.inverse();
+            mHMDRenderState.mHMDPose = mat = rotOffset * mHMDRenderState.mHMDPose;
+         */
+
+            // jamesu - store the last rotation for temp debugging
+            MatrixF torqueMat(1);
+            OpenVRUtil::convertTransformFromOVR(mat, torqueMat);
+            gLastMoveRot = AngAxisF(torqueMat);
+            //Con::printf("gLastMoveRot = %f,%f,%f,%f", gLastMoveRot.axis.x, gLastMoveRot.axis.y, gLastMoveRot.axis.z, gLastMoveRot.angle);
+            mHMDRenderState.mHMDPose.inverse();
+         }
+
+         vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice];
+         OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position);
+
+#ifdef DEBUG_DISPLAY_POSE
+       OpenVRUtil::convertTransformFromOVR(mat, inPose.actualMatrix);
+       inPose.originalMatrix = mat;
+#endif
+
+         inPose.state = outPose.eTrackingResult;
+         inPose.valid = outPose.bPoseIsValid;
+         inPose.connected = outPose.bDeviceIsConnected;
+
+         inPose.velocity = OpenVRVecToTorqueVec(outPose.vVelocity);
+         inPose.angularVelocity = OpenVRVecToTorqueVec(outPose.vAngularVelocity);
+      }
+      else
+      {
+         inPose.valid = false;
+      }
+   }
+}
+
+void OpenVRProvider::submitInputChanges()
+{
+   // Diff current frame with previous frame
+   for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; i++)
+   {
+      IDevicePose curPose = mCurrentDevicePose[i];
+      IDevicePose prevPose = mPreviousInputTrackedDevicePose[i];
+
+     S32 eventIdx = -1;
+     
+     if (!mDeviceEventMap.tryGetValue(i, eventIdx) || eventIdx < 0)
+        continue;
+
+      if (!curPose.valid || !curPose.connected)
+         continue;
+
+      if (curPose.orientation != prevPose.orientation)
+      {
+         AngAxisF axisAA(curPose.orientation);
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[eventIdx], SI_MOVE, axisAA);
+      }
+
+      if (curPose.position != prevPose.position)
+      {
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[eventIdx], SI_MOVE, curPose.position);
+      }
+
+      if (curPose.velocity != prevPose.velocity)
+      {
+         // Convert angles to degrees
+         VectorF angles;
+         angles.x = curPose.velocity.x;
+         angles.y = curPose.velocity.y;
+         angles.z = curPose.velocity.z;
+
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[eventIdx], SI_MOVE, angles);
+      }
+
+      if (curPose.angularVelocity != prevPose.angularVelocity)
+      {
+         // Convert angles to degrees
+         VectorF angles;
+         angles[0] = mRadToDeg(curPose.velocity.x);
+         angles[1] = mRadToDeg(curPose.velocity.y);
+         angles[2] = mRadToDeg(curPose.velocity.z);
+
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[eventIdx], SI_MOVE, angles);
+      }
+      /*
+      if (curPose.connected != prevPose.connected)
+      {
+         if (Con::isFunction("onOVRConnectionChanged"))
+         {
+            Con::executef("onOVRConnectionStatus", curPose.connected);
+         }
+      }*/
+
+      if (curPose.state != prevPose.state)
+      {
+         if (Con::isFunction("onOVRStateChanged"))
+         {
+            Con::executef("onOVRStateChanged", curPose.state);
+         }
+      }
+   }
+
+   dMemcpy(mPreviousInputTrackedDevicePose, mCurrentDevicePose, sizeof(mPreviousInputTrackedDevicePose));
+}
+
+void OpenVRProvider::resetSensors()
+{
+   if (mHMD)
+   {
+      mHMD->ResetSeatedZeroPose();
+   }
+}
+
+void OpenVRProvider::mapDeviceToEvent(U32 deviceIdx, S32 eventIdx)
+{
+   mDeviceEventMap[deviceIdx] = eventIdx;
+}
+
+void OpenVRProvider::resetEventMap()
+{
+   mDeviceEventMap.clear();
+}
+
+IDevicePose OpenVRProvider::getTrackedDevicePose(U32 idx)
+{
+   if (idx >= vr::k_unMaxTrackedDeviceCount)
+   {
+      IDevicePose ret;
+      ret.connected = ret.valid = false;
+      return ret;
+   }
+
+   return mCurrentDevicePose[idx];
+}
+
+void OpenVRProvider::registerOverlay(OpenVROverlay* overlay)
+{
+   mOverlays.push_back(overlay);
+}
+
+void OpenVRProvider::unregisterOverlay(OpenVROverlay* overlay)
+{
+   S32 index = mOverlays.find_next(overlay);
+   if (index != -1)
+   {
+      mOverlays.erase(index);
+   }
+}
+
+const S32 OpenVRProvider::preloadRenderModelTexture(U32 index)
+{
+   S32 idx = -1;
+   if (mLoadedTextureLookup.tryGetValue(index, idx))
+      return idx;
+
+   char buffer[256];
+   dSprintf(buffer, sizeof(buffer), "openvrtex_%u", index);
+
+   OpenVRProvider::LoadedRenderTexture loadedTexture;
+   loadedTexture.vrTextureId = index;
+   loadedTexture.vrTexture = NULL;
+   loadedTexture.texture = NULL;
+   loadedTexture.textureError = vr::VRRenderModelError_Loading;
+   loadedTexture.targetTexture = new NamedTexTarget();
+   loadedTexture.targetTexture->registerWithName(buffer);
+   mLoadedTextures.push_back(loadedTexture);
+   mLoadedTextureLookup[index] = mLoadedTextures.size() - 1;
+
+   return mLoadedTextures.size() - 1;
+}
+
+const S32 OpenVRProvider::preloadRenderModel(StringTableEntry name)
+{
+   S32 idx = -1;
+   if (mLoadedModelLookup.tryGetValue(name, idx))
+      return idx;
+
+   OpenVRProvider::LoadedRenderModel loadedModel;
+   loadedModel.name = name;
+   loadedModel.model = NULL;
+   loadedModel.vrModel = NULL;
+   loadedModel.modelError = vr::VRRenderModelError_Loading;
+   loadedModel.loadedTexture = false;
+   loadedModel.textureId = -1;
+   mLoadedModels.push_back(loadedModel);
+   mLoadedModelLookup[name] = mLoadedModels.size() - 1;
+
+   return mLoadedModels.size() - 1;
+}
+
+
+bool OpenVRProvider::getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed)
+{
+   if (idx < 0 || idx > mLoadedModels.size())
+   {
+      failed = true;
+      return true;
+   }
+
+   OpenVRProvider::LoadedRenderModel &loadedModel = mLoadedModels[idx];
+   //Con::printf("RenderModel[%i] STAGE 1", idx);
+
+   failed = false;
+
+   if (loadedModel.modelError > vr::VRRenderModelError_Loading)
+   {
+      failed = true;
+      return true;
+   }
+
+   // Stage 1 : model
+   if (!loadedModel.model)
+   {
+      loadedModel.modelError = vr::VRRenderModels()->LoadRenderModel_Async(loadedModel.name, &loadedModel.vrModel);
+      //Con::printf(" vr::VRRenderModels()->LoadRenderModel_Async(\"%s\", %x); -> %i", loadedModel.name, &loadedModel.vrModel, loadedModel.modelError);
+      if (loadedModel.modelError == vr::VRRenderModelError_None)
+      {
+         if (loadedModel.vrModel == NULL)
+         {
+            failed = true;
+            return true;
+         }
+         // Load the model
+         loadedModel.model = new OpenVRRenderModel();
+      }
+      else if (loadedModel.modelError == vr::VRRenderModelError_Loading)
+      {
+         return false;
+      }
+   }
+
+   //Con::printf("RenderModel[%i] STAGE 2 (texId == %i)", idx, loadedModel.vrModel->diffuseTextureId);
+
+   // Stage 2 : texture
+   if (!loadedModel.loadedTexture && loadedModel.model)
+   {
+      if (loadedModel.textureId == -1)
+      {
+         loadedModel.textureId = preloadRenderModelTexture(loadedModel.vrModel->diffuseTextureId);
+      }
+
+      if (loadedModel.textureId == -1)
+      {
+         failed = true;
+         return true;
+      }
+
+      if (!getRenderModelTexture(loadedModel.textureId, NULL, failed))
+      {
+         return false;
+      }
+
+      if (failed)
+      {
+         return true;
+      }
+
+      loadedModel.loadedTexture = true;
+
+      //Con::printf("RenderModel[%i] GOT TEXTURE");
+
+      // Now we can load the model. Note we first need to get a Material for the mapped texture
+      NamedTexTarget *namedTexture = mLoadedTextures[loadedModel.textureId].targetTexture;
+      String materialName = MATMGR->getMapEntry(namedTexture->getName().c_str());
+      if (materialName.isEmpty())
+      {
+         char buffer[256];
+         dSprintf(buffer, sizeof(buffer), "#%s", namedTexture->getName().c_str());
+         materialName = buffer;
+
+         //Con::printf("RenderModel[%i] materialName == %s", idx, buffer);
+
+         Material* mat = new Material();
+         mat->mMapTo = namedTexture->getName();
+         mat->mDiffuseMapFilename[0] = buffer;
+         mat->mEmissive[0] = true;
+
+         dSprintf(buffer, sizeof(buffer), "%s_Material", namedTexture->getName().c_str());
+         if (!mat->registerObject(buffer))
+         {
+            Con::errorf("Couldn't create placeholder openvr material %s!", buffer);
+            failed = true;
+            return true;
+         }
+
+         materialName = buffer;
+      }
+      
+      loadedModel.model->init(*loadedModel.vrModel, materialName);
+   }
+
+   if ((loadedModel.modelError > vr::VRRenderModelError_Loading) || 
+       (loadedModel.textureId >= 0 && mLoadedTextures[loadedModel.textureId].textureError > vr::VRRenderModelError_Loading))
+   {
+      failed = true;
+   }
+
+   if (!failed && ret)
+   {
+      *ret = loadedModel.model;
+   }
+   return true;
+}
+
+bool OpenVRProvider::getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed)
+{
+   if (idx < 0 || idx > mLoadedModels.size())
+   {
+      failed = true;
+      return true;
+   }
+
+   failed = false;
+
+   OpenVRProvider::LoadedRenderTexture &loadedTexture = mLoadedTextures[idx];
+
+   if (loadedTexture.textureError > vr::VRRenderModelError_Loading)
+   {
+      failed = true;
+      return true;
+   }
+
+   if (!loadedTexture.texture)
+   {
+      if (!loadedTexture.vrTexture)
+      {
+         loadedTexture.textureError = vr::VRRenderModels()->LoadTexture_Async(loadedTexture.vrTextureId, &loadedTexture.vrTexture);
+         if (loadedTexture.textureError == vr::VRRenderModelError_None)
+         {
+            // Load the texture
+            GFXTexHandle tex;
+
+            const U32 sz = loadedTexture.vrTexture->unWidth * loadedTexture.vrTexture->unHeight * 4;
+            GBitmap *bmp = new GBitmap(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, false, GFXFormatR8G8B8A8);
+
+            Swizzles::bgra.ToBuffer(bmp->getAddress(0,0,0), loadedTexture.vrTexture->rubTextureMapData, sz);
+
+            char buffer[256];
+            dSprintf(buffer, 256, "OVRTEX-%i.png", loadedTexture.vrTextureId);
+
+            FileStream fs;
+            fs.open(buffer, Torque::FS::File::Write);
+            bmp->writeBitmap("PNG", fs);
+            fs.close();
+
+            tex.set(bmp, &GFXDefaultStaticDiffuseProfile, true, "OpenVR Texture");
+            //tex.set(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, 1, (void*)pixels, GFXFormatR8G8B8A8, &GFXDefaultStaticDiffuseProfile, "OpenVR Texture", 1);
+
+
+            loadedTexture.targetTexture->setTexture(tex);
+            loadedTexture.texture = tex;
+         }
+         else if (loadedTexture.textureError == vr::VRRenderModelError_Loading)
+         {
+            return false;
+         }
+      }
+   }
+
+   if (loadedTexture.textureError > vr::VRRenderModelError_Loading)
+   {
+      failed = true;
+   }
+
+   if (!failed && outTex)
+   {
+      *outTex = loadedTexture.texture;
+   }
+
+   return true;
+}
+
+bool OpenVRProvider::getRenderModelTextureName(S32 idx, String &outName)
+{
+   if (idx < 0 || idx >= mLoadedTextures.size())
+      return false;
+
+   if (mLoadedTextures[idx].targetTexture)
+   {
+      outName = mLoadedTextures[idx].targetTexture->getName();
+      return true;
+   }
+
+   return false;
+}
+
+void OpenVRProvider::resetRenderModels()
+{
+   for (U32 i = 0, sz = mLoadedModels.size(); i < sz; i++)
+   {
+      SAFE_DELETE(mLoadedModels[i].model);
+      if (mLoadedModels[i].vrModel) mRenderModels->FreeRenderModel(mLoadedModels[i].vrModel);
+   }
+   for (U32 i = 0, sz = mLoadedTextures.size(); i < sz; i++)
+   {
+      SAFE_DELETE(mLoadedTextures[i].targetTexture);
+      if (mLoadedTextures[i].vrTexture) mRenderModels->FreeTexture(mLoadedTextures[i].vrTexture);
+   }
+   mLoadedModels.clear();
+   mLoadedTextures.clear();
+   mLoadedModelLookup.clear();
+   mLoadedTextureLookup.clear();
+}
+
+OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay()
+{
+   return NULL;
+}
+
+void OpenVRProvider::setOverlayNeighbour(vr::EOverlayDirection dir, OpenVROverlay *overlay)
+{
+
+}
+
+
+bool OpenVRProvider::isDashboardVisible()
+{
+   return false;
+}
+
+void OpenVRProvider::showDashboard(const char *overlayToShow)
+{
+
+}
+
+vr::TrackedDeviceIndex_t OpenVRProvider::getPrimaryDashboardDevice()
+{
+   return -1;
+}
+
+void OpenVRProvider::setKeyboardTransformAbsolute(const MatrixF &xfm)
+{
+   // mTrackingSpace
+}
+
+void OpenVRProvider::setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect)
+{
+
+}
+
+void OpenVRProvider::getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector<S32> &outList)
+{
+   for (U32 i = 0; i<vr::k_unMaxTrackedDeviceCount; i++)
+   {
+      if (!mCurrentDevicePose[i].connected)
+         continue;
+
+      vr::TrackedDeviceClass klass = mHMD->GetTrackedDeviceClass(i);
+      if (klass == deviceClass)
+      {
+         outList.push_back(i);
+      }
+   }
+}
+
+StringTableEntry OpenVRProvider::getControllerModel(U32 idx)
+{
+   if (idx >= vr::k_unMaxTrackedDeviceCount || !mRenderModels)
+      return NULL;
+
+   String str = GetTrackedDeviceString(mHMD, idx, vr::Prop_RenderModelName_String, NULL);
+   return StringTable->insert(str, true);
+}
+
+DefineEngineStaticMethod(OpenVR, getControllerDeviceIndexes, const char*, (OpenVRTrackedDeviceClass klass),,
+   "@brief Gets the indexes of devices which match the required device class")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return "";
+   }
+
+   Vector<S32> outList;
+   OPENVR->getControllerDeviceIndexes(klass, outList);
+   return EngineMarshallData<Vector<S32>>(outList);
+}
+
+DefineEngineStaticMethod(OpenVR, getControllerModel, const char*, (S32 idx), ,
+   "@brief Gets the indexes of devices which match the required device class")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return "";
+   }
+
+   return OPENVR->getControllerModel(idx);
+}
+
+DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), ,
+   "@brief Used to determine if the OpenVR input device is active\n\n"
+
+   "The OpenVR device is considered active when the library has been "
+   "initialized and either a real of simulated HMD is present.\n\n"
+
+   "@return True if the OpenVR input device is active.\n"
+
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return false;
+   }
+
+   return OPENVR->getActive();
+}
+
+
+DefineEngineStaticMethod(OpenVR, setEnabled, bool, (bool value), ,
+   "@brief Used to determine if the OpenVR input device is active\n\n"
+
+   "The OpenVR device is considered active when the library has been "
+   "initialized and either a real of simulated HMD is present.\n\n"
+
+   "@return True if the OpenVR input device is active.\n"
+
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return false;
+   }
+
+   return value ? OPENVR->enable() : OPENVR->disable();
+}
+
+
+DefineEngineStaticMethod(OpenVR, setHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), ,
+   "@brief Sets the first HMD to be a GameConnection's display device\n\n"
+   "@param conn The GameConnection to set.\n"
+   "@return True if the GameConnection display device was set.\n"
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      Con::errorf("setOVRHMDAsGameConnectionDisplayDevice(): No Oculus VR Device present.");
+      return false;
+   }
+
+   if (!conn)
+   {
+      Con::errorf("setOVRHMDAsGameConnectionDisplayDevice(): Invalid GameConnection.");
+      return false;
+   }
+
+   conn->setDisplayDevice(OPENVR);
+   return true;
+}
+
+
+DefineEngineStaticMethod(OpenVR, getDisplayDeviceId, S32, (), ,
+   "@brief MacOS display ID.\n\n"
+   "@param index The HMD index.\n"
+   "@return The ID of the HMD display device, if any.\n"
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return -1;
+   }
+
+   return OPENVR->getDisplayDeviceId();
+}
+
+DefineEngineStaticMethod(OpenVR, resetSensors, void, (), ,
+   "@brief Resets all Oculus VR sensors.\n\n"
+   "This resets all sensor orientations such that their 'normal' rotation "
+   "is defined when this function is called.  This defines an HMD's forwards "
+   "and up direction, for example."
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return;
+   }
+
+   OPENVR->resetSensors();
+}
+
+DefineEngineStaticMethod(OpenVR, mapDeviceToEvent, void, (S32 deviceId, S32 eventId), ,
+   "@brief Maps a device to an event code.\n\n"
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return;
+   }
+
+   OPENVR->mapDeviceToEvent(deviceId, eventId);
+}
+
+DefineEngineStaticMethod(OpenVR, resetEventMap, void, (), ,
+   "@brief Resets event map.\n\n"
+   "@ingroup Game")
+{
+   if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+   {
+      return;
+   }
+
+   OPENVR->resetEventMap();
+}
+
+// Overlay stuff
+
+DefineEngineFunction(OpenVRIsCompiledIn, bool, (), , "")
+{
+   return true;
+}

+ 388 - 0
Engine/source/platform/input/openVR/openVRProvider.h

@@ -0,0 +1,388 @@
+
+#ifndef _OPENVRDEVICE_H_
+#define _OPENVRDEVICE_H_
+
+#include "math/mQuat.h"
+#include "math/mPoint4.h"
+#include "math/util/frustum.h"
+#include "core/util/tSingleton.h"
+
+#include "gfx/gfxDevice.h"
+#include "gfx/gfxVertexBuffer.h"
+#include "gfx/gfxPrimitiveBuffer.h"
+#include "gfx/gfxTarget.h"
+
+#include "platform/input/IInputDevice.h"
+#include "platform/input/event.h"
+#include "platform/output/IDisplayDevice.h"
+
+#include <openvr.h>
+
+class OpenVRHMDDevice;
+class OpenVROverlay;
+class BaseMatInstance;
+class SceneRenderState;
+struct MeshRenderInst;
+class Namespace;
+class NamedTexTarget;
+
+typedef vr::VROverlayInputMethod OpenVROverlayInputMethod;
+typedef vr::VROverlayTransformType OpenVROverlayTransformType;
+typedef vr::EGamepadTextInputMode OpenVRGamepadTextInputMode;
+typedef vr::EGamepadTextInputLineMode OpenVRGamepadTextInputLineMode;
+typedef vr::ETrackingResult OpenVRTrackingResult;
+typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin;
+typedef vr::EOverlayDirection OpenVROverlayDirection;
+typedef vr::EVRState OpenVRState;
+typedef vr::TrackedDeviceClass OpenVRTrackedDeviceClass;
+
+DefineEnumType(OpenVROverlayInputMethod);
+DefineEnumType(OpenVROverlayTransformType);
+DefineEnumType(OpenVRGamepadTextInputMode);
+DefineEnumType(OpenVRGamepadTextInputLineMode);
+DefineEnumType(OpenVRTrackingResult);
+DefineEnumType(OpenVRTrackingUniverseOrigin);
+DefineEnumType(OpenVROverlayDirection);
+DefineEnumType(OpenVRState);
+DefineEnumType(OpenVRTrackedDeviceClass);
+
+namespace OpenVRUtil
+{
+   /// Convert a matrix in OVR space to torque space
+   void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation);
+
+   /// Convert a matrix in torque space to OVR space
+   void convertTransformToOVR(const MatrixF& inRotation, MatrixF& outRotation);
+
+   /// Converts vr::HmdMatrix34_t to a MatrixF
+   MatrixF convertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat);
+
+   /// Converts a MatrixF to a vr::HmdMatrix34_t
+   void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat);
+
+   U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton);
+
+   /// Converts a point to OVR coords
+   inline Point3F convertPointToOVR(const Point3F &point)
+   {
+      return Point3F(-point.x, -point.z, point.y);
+   }
+
+   /// Converts a point from OVR coords
+   inline Point3F convertPointFromOVR(const Point3F &point)
+   {
+      return Point3F(-point.x, point.z, -point.y);
+   }
+
+   // Converts a point from OVR coords, from an input float array
+   inline Point3F convertPointFromOVR(const vr::HmdVector3_t& v)
+   {
+      return Point3F(-v.v[0], v.v[2], -v.v[1]);
+   }
+};
+
+template<int TEXSIZE> class VRTextureSet
+{
+public:
+   static const int TextureCount = TEXSIZE;
+   GFXTexHandle mTextures[TEXSIZE];
+   U32 mIndex;
+
+   VRTextureSet() : mIndex(0)
+   {
+   }
+
+   void init(U32 width, U32 height, GFXFormat fmt, GFXTextureProfile *profile, const String &desc)
+   {
+      for (U32 i = 0; i < TextureCount; i++)
+      {
+         mTextures[i].set(width, height, fmt, profile, desc);
+      }
+   }
+
+   void clear()
+   {
+      for (U32 i = 0; i < TextureCount; i++)
+      {
+         mTextures[i] = NULL;
+      }
+   }
+
+   void advance()
+   {
+      mIndex = (mIndex + 1) % TextureCount;
+   }
+
+   GFXTexHandle& getTextureHandle()
+   {
+      return mTextures[mIndex];
+   }
+};
+
+/// Simple class to handle rendering native OpenVR model data
+class OpenVRRenderModel
+{
+public:
+   typedef GFXVertexPNT VertexType;
+   GFXVertexBufferHandle<VertexType> mVertexBuffer;
+   GFXPrimitiveBufferHandle mPrimitiveBuffer;
+   BaseMatInstance* mMaterialInstance; ///< Material to use for rendering. NOTE:  
+   Box3F mLocalBox;
+
+   OpenVRRenderModel() : mMaterialInstance(NULL)
+   {
+   }
+
+   ~OpenVRRenderModel()
+   {
+      SAFE_DELETE(mMaterialInstance);
+   }
+
+   Box3F getWorldBox(MatrixF &mat)
+   {
+      Box3F ret = mLocalBox;
+      mat.mul(ret);
+      return ret;
+   }
+
+   bool init(const vr::RenderModel_t & vrModel, StringTableEntry materialName);
+   void draw(SceneRenderState *state, MeshRenderInst* renderInstance);
+};
+
+struct OpenVRRenderState
+{
+   vr::IVRSystem *mHMD;
+
+   FovPort mEyeFov[2];
+   MatrixF mEyePose[2];
+   MatrixF mHMDPose;
+
+   RectI mEyeViewport[2];
+   GFXTextureTargetRef mStereoRT;
+
+   GFXTexHandle mStereoRenderTexture;
+   GFXTexHandle mStereoDepthTexture;
+
+   VRTextureSet<4> mOutputEyeTextures;
+
+   GFXDevice::GFXDeviceRenderStyles mRenderMode;
+
+   bool setupRenderTargets(GFXDevice::GFXDeviceRenderStyles mode);
+
+   void renderPreview();
+
+   void reset(vr::IVRSystem* hmd);
+   void updateHMDProjection();
+};
+
+class OpenVRProvider : public IDisplayDevice, public IInputDevice
+{
+public:
+
+   enum DataDifferences {
+      DIFF_NONE = 0,
+      DIFF_ROT = (1 << 0),
+      DIFF_ROTAXISX = (1 << 1),
+      DIFF_ROTAXISY = (1 << 2),
+      DIFF_ACCEL = (1 << 3),
+      DIFF_ANGVEL = (1 << 4),
+      DIFF_MAG = (1 << 5),
+      DIFF_POS = (1 << 6),
+      DIFF_STATUS = (1 << 7),
+
+      DIFF_ROTAXIS = (DIFF_ROTAXISX | DIFF_ROTAXISY),
+      DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
+   };
+
+   struct LoadedRenderModel
+   {
+      StringTableEntry name;
+      vr::RenderModel_t *vrModel;
+      OpenVRRenderModel *model;
+      vr::EVRRenderModelError modelError;
+      S32 textureId;
+      bool loadedTexture;
+   };
+
+   struct LoadedRenderTexture
+   {
+      U32 vrTextureId;
+      vr::RenderModel_TextureMap_t *vrTexture;
+      GFXTextureObject *texture;
+      NamedTexTarget *targetTexture;
+      vr::EVRRenderModelError textureError;
+   };
+
+   OpenVRProvider();
+   ~OpenVRProvider();
+
+   typedef Signal <void(const vr::VREvent_t &evt)> VREventSignal;
+   VREventSignal& getVREventSignal() { return mVREventSignal;  }
+
+   static void staticInit();
+
+   bool enable();
+   bool disable();
+
+   bool getActive() { return mHMD != NULL; }
+   inline vr::IVRRenderModels* getRenderModels() { return mRenderModels; }
+
+   /// @name Input handling
+   /// {
+   void buildInputCodeTable();
+   virtual bool process();
+   /// }
+
+   /// @name Display handling
+   /// {
+   virtual bool providesFrameEyePose() const;
+   virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const;
+
+   virtual bool providesEyeOffsets() const;
+   /// Returns eye offset not taking into account any position tracking info
+   virtual void getEyeOffsets(Point3F *dest) const;
+
+   virtual bool providesFovPorts() const;
+   virtual void getFovPorts(FovPort *out) const;
+
+   virtual void getStereoViewports(RectI *out) const;
+   virtual void getStereoTargets(GFXTextureTarget **out) const;
+
+   virtual void setDrawCanvas(GuiCanvas *canvas);
+   virtual void setDrawMode(GFXDevice::GFXDeviceRenderStyles style);
+
+   virtual void setCurrentConnection(GameConnection *connection);
+   virtual GameConnection* getCurrentConnection();
+
+   virtual GFXTexHandle getPreviewTexture();
+
+   virtual void onStartFrame();
+   virtual void onEndFrame();
+
+   virtual void onEyeRendered(U32 index);
+
+   virtual void setRoomTracking(bool room);
+
+   bool _handleDeviceEvent(GFXDevice::GFXDeviceEventType evt);
+
+   S32 getDisplayDeviceId() const;
+   /// }
+
+   /// @name OpenVR handling
+   /// {
+   void processVREvent(const vr::VREvent_t & event);
+
+   void updateTrackedPoses();
+   void submitInputChanges();
+
+   void resetSensors();
+
+   void mapDeviceToEvent(U32 deviceIdx, S32 eventIdx);
+   void resetEventMap();
+
+   IDevicePose getTrackedDevicePose(U32 idx);
+   /// }
+
+   /// @name Overlay registration
+   /// {
+   void registerOverlay(OpenVROverlay* overlay);
+   void unregisterOverlay(OpenVROverlay* overlay);
+   /// }
+
+   /// @name Model loading
+   /// {
+   const S32 preloadRenderModel(StringTableEntry name);
+   const S32 preloadRenderModelTexture(U32 index);
+   bool getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed);
+   bool getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed);
+   bool getRenderModelTextureName(S32 idx, String &outName);
+   void resetRenderModels();
+   /// }
+
+
+   /// @name Console API
+   /// {
+   OpenVROverlay *getGamepadFocusOverlay();
+   void setOverlayNeighbour(vr::EOverlayDirection dir, OpenVROverlay *overlay);
+
+   bool isDashboardVisible();
+   void showDashboard(const char *overlayToShow);
+
+   vr::TrackedDeviceIndex_t getPrimaryDashboardDevice();
+
+   void setKeyboardTransformAbsolute(const MatrixF &xfm);
+   void setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect);
+
+   void getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector<S32> &outList);
+   StringTableEntry getControllerModel(U32 idx);
+   /// }
+
+   /// @name OpenVR state
+   /// {
+   vr::IVRSystem *mHMD;
+   vr::IVRRenderModels *mRenderModels;
+   String mDriver;
+   String mDisplay;
+   vr::TrackedDevicePose_t mTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
+   IDevicePose mCurrentDevicePose[vr::k_unMaxTrackedDeviceCount];
+   IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
+   U32 mValidPoseCount;
+
+   vr::VRControllerState_t mCurrentControllerState[vr::k_unMaxTrackedDeviceCount];
+   vr::VRControllerState_t mPreviousCurrentControllerState[vr::k_unMaxTrackedDeviceCount];
+
+   char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount];
+
+   OpenVRRenderState mHMDRenderState;
+   GFXAdapterLUID mLUID;
+
+   vr::ETrackingUniverseOrigin mTrackingSpace;
+
+   Vector<OpenVROverlay*> mOverlays;
+
+   VREventSignal mVREventSignal;
+   Namespace *mOpenVRNS;
+
+   Vector<LoadedRenderModel> mLoadedModels;
+   Vector<LoadedRenderTexture> mLoadedTextures;
+   Map<StringTableEntry, S32> mLoadedModelLookup;
+   Map<U32, S32> mLoadedTextureLookup;
+
+   Map<U32, S32> mDeviceEventMap;
+   /// }
+
+   GuiCanvas* mDrawCanvas;
+   GameConnection* mGameConnection;
+
+   static U32 OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount];
+
+   static U32 OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount];
+
+   static U32 OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount];
+   static U32 OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount];
+
+   /// @name HMD Rotation offset
+   /// {
+   static EulerF smHMDRotOffset;
+   static F32 smHMDmvYaw;
+   static F32 smHMDmvPitch;
+   static bool smRotateYawWithMoveActions;
+   /// }
+
+public:
+   // For ManagedSingleton.
+   static const char* getSingletonName() { return "OpenVRProvider"; }
+};
+
+/// Returns the OculusVRDevice singleton.
+#define OPENVR ManagedSingleton<OpenVRProvider>::instance()
+
+#endif   // _OCULUSVRDEVICE_H_

+ 981 - 0
Engine/source/platform/input/openVR/openVRTrackedObject.cpp

@@ -0,0 +1,981 @@
+#include "platform/platform.h"
+#include "platform/input/openVR/openVRTrackedObject.h"
+#include "platform/input/openVR/openVRProvider.h"
+
+#include "math/mathIO.h"
+#include "scene/sceneRenderState.h"
+#include "console/consoleTypes.h"
+#include "core/stream/bitStream.h"
+#include "core/resourceManager.h"
+#include "materials/materialManager.h"
+#include "materials/baseMatInstance.h"
+#include "renderInstance/renderPassManager.h"
+#include "lighting/lightQuery.h"
+#include "console/engineAPI.h"
+#include "gfx/gfxTextureManager.h"
+#include "gfx/sim/debugDraw.h"
+#include "gfx/gfxTransformSaver.h"
+#include "environment/skyBox.h"
+#include "collision/boxConvex.h"
+#include "collision/concretePolyList.h"
+#include "T3D/physics/physicsPlugin.h"
+#include "T3D/physics/physicsCollision.h"
+#include "T3D/physics/physicsBody.h"
+
+#ifdef TORQUE_EXTENDED_MOVE
+#include "T3D/gameBase/extended/extendedMove.h"
+#endif
+
+
+bool OpenVRTrackedObject::smDebugControllerMovePosition = true;
+bool OpenVRTrackedObject::smDebugControllerPosition = false;
+
+static const U32 sCollisionMoveMask = (PlayerObjectType |
+   StaticShapeObjectType | VehicleObjectType);
+
+U32 OpenVRTrackedObject::sServerCollisionMask = sCollisionMoveMask; // ItemObjectType
+U32 OpenVRTrackedObject::sClientCollisionMask = sCollisionMoveMask;
+
+//-----------------------------------------------------------------------------
+
+IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData);
+
+OpenVRTrackedObjectData::OpenVRTrackedObjectData() :
+   mShapeFile(NULL)
+{
+   mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02);
+   mCollisionBoxMax = Point3F(0.02, 0.05, 0.02);
+}
+
+OpenVRTrackedObjectData::~OpenVRTrackedObjectData()
+{
+}
+
+bool OpenVRTrackedObjectData::onAdd()
+{
+   if (Parent::onAdd())
+   {
+      return true;
+   }
+
+   return false;
+}
+
+bool OpenVRTrackedObjectData::preload(bool server, String &errorStr)
+{
+   if (!Parent::preload(server, errorStr))
+      return false;
+
+   bool error = false;
+   if (!server)
+   {
+      mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL;
+   }
+}
+
+void OpenVRTrackedObjectData::initPersistFields()
+{
+   addGroup("Render Components");
+   addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model.");
+   addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min");
+   addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min");
+   endGroup("Render Components");
+
+   Parent::initPersistFields();
+}
+
+void OpenVRTrackedObjectData::packData(BitStream* stream)
+{
+   Parent::packData(stream);
+
+   stream->writeString(mShapeFile);
+}
+
+void OpenVRTrackedObjectData::unpackData(BitStream* stream)
+{
+   Parent::unpackData(stream);
+
+   mShapeFile = stream->readSTString();
+}
+
+//-----------------------------------------------------------------------------
+
+
+IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject);
+
+ConsoleDocClass(OpenVRTrackedObject,
+   "@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n"
+   "This class implements basic rendering and interactions with OpenVR controllers.\n\n"
+   "The object should be controlled by a player object. Controllers will be rendered at\n"
+   "the correct position regardless of the current transform of the object.\n"
+   "@ingroup OpenVR\n");
+
+
+//-----------------------------------------------------------------------------
+// Object setup and teardown
+//-----------------------------------------------------------------------------
+OpenVRTrackedObject::OpenVRTrackedObject() :
+   mDataBlock(NULL),
+   mShapeInstance(NULL),
+   mBasicModel(NULL),
+   mDeviceIndex(-1),
+   mMappedMoveIndex(-1),
+   mIgnoreParentRotation(true),
+   mConvexList(new Convex()),
+   mPhysicsRep(NULL)
+{
+   // Flag this object so that it will always
+   // be sent across the network to clients
+   mNetFlags.set(Ghostable | ScopeAlways);
+
+   // Set it as a "static" object that casts shadows
+   mTypeMask |= StaticObjectType | StaticShapeObjectType;
+
+   mPose.connected = false;
+}
+
+OpenVRTrackedObject::~OpenVRTrackedObject()
+{
+   clearRenderData();
+   delete mConvexList;
+}
+
+void OpenVRTrackedObject::updateRenderData()
+{
+   clearRenderData();
+
+   if (!mDataBlock)
+      return;
+
+   // Are we using a model?
+   if (mDataBlock->mShape)
+   {
+      if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape)
+      {
+         delete mShapeInstance;
+         mShapeInstance = NULL;
+      }
+
+      if (!mShapeInstance)
+      {
+         mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject());
+      }
+   }
+   else
+   {
+      setupRenderDataFromModel(isClientObject());
+   }
+}
+
+void OpenVRTrackedObject::setupRenderDataFromModel(bool loadComponentModels)
+{
+   clearRenderData();
+   
+   if (!OPENVR || !OPENVR->isEnabled())
+      return;
+
+   vr::IVRRenderModels *models = OPENVR->getRenderModels();
+   if (!models)
+      return;
+
+   if (!mShapeInstance && mModelName && mModelName[0] != '\0')
+   {
+      bool failed = false;
+      S32 idx = OPENVR->preloadRenderModel(mModelName);
+      while (!OPENVR->getRenderModel(idx, &mBasicModel, failed))
+      {
+         if (failed)
+            break;
+      }
+   }
+
+   if (loadComponentModels)
+   {
+      mRenderComponents.setSize(models->GetComponentCount(mModelName));
+
+      for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
+      {
+         RenderModelSlot &slot = mRenderComponents[i];
+         char buffer[1024];
+
+         slot.mappedNodeIdx = -1;
+         slot.componentName = NULL;
+         slot.nativeModel = NULL;
+
+         U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer));
+         if (result == 0)
+            continue;
+
+#ifdef DEBUG_CONTROLLER_MODELS
+         Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer);
+#endif
+
+         slot.componentName = StringTable->insert(buffer, true);
+
+         result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer));
+         if (result == 0)
+         {
+#ifdef DEBUG_CONTROLLER_MODELS
+            Con::printf("Controller[%s] component %i NO MODEL", mModelName, i);
+#endif
+            continue;
+         }
+
+#ifdef DEBUG_CONTROLLER_MODELS
+         Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName);
+#endif
+
+         bool failed = false;
+         S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true));
+         while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed))
+         {
+            if (failed)
+               break;
+         }
+      }
+   }
+}
+
+void OpenVRTrackedObject::clearRenderData()
+{
+   mBasicModel = NULL;
+   mRenderComponents.clear();
+}
+
+//-----------------------------------------------------------------------------
+// Object Editing
+//-----------------------------------------------------------------------------
+void OpenVRTrackedObject::initPersistFields()
+{
+   // SceneObject already handles exposing the transform
+   Parent::initPersistFields();
+
+   addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
+   addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
+   addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
+
+   static bool conInit = false;
+   if (!conInit)
+   {
+      Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition);
+      Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition);
+      conInit = true;
+   }
+}
+
+void OpenVRTrackedObject::inspectPostApply()
+{
+   Parent::inspectPostApply();
+
+   // Flag the network mask to send the updates
+   // to the client object
+   setMaskBits(UpdateMask);
+}
+
+bool OpenVRTrackedObject::onAdd()
+{
+   if (!Parent::onAdd())
+      return false;
+
+   // Set up a 1x1x1 bounding box
+   mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f),
+      Point3F(0.5f, 0.5f, 0.5f));
+
+   resetWorldBox();
+
+   // Add this object to the scene
+   addToScene();
+
+   if (mDataBlock)
+   {
+      mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
+      mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
+      resetWorldBox();
+   }
+   else
+   {
+      setGlobalBounds();
+   }
+
+   return true;
+}
+
+void OpenVRTrackedObject::onRemove()
+{
+   // Remove this object from the scene
+   removeFromScene();
+
+   clearRenderData();
+
+   SAFE_DELETE(mPhysicsRep);
+
+   Parent::onRemove();
+}
+
+void OpenVRTrackedObject::_updatePhysics()
+{
+   SAFE_DELETE(mPhysicsRep);
+
+   if (!PHYSICSMGR)
+      return;
+
+   PhysicsCollision *colShape = NULL;
+   MatrixF offset(true);
+   colShape = PHYSICSMGR->createCollision();
+   colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset);
+
+   if (colShape)
+   {
+      PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client");
+      mPhysicsRep = PHYSICSMGR->createBody();
+      mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world);
+      mPhysicsRep->setTransform(getTransform());
+   }
+}
+
+bool OpenVRTrackedObject::onNewDataBlock(GameBaseData *dptr, bool reload)
+{
+   mDataBlock = dynamic_cast<OpenVRTrackedObjectData*>(dptr);
+   if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload))
+      return false;
+
+   // Setup the models
+   clearRenderData();
+
+   mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
+   mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
+
+   mGlobalBounds = false;
+
+   resetWorldBox();
+
+   _updatePhysics();
+
+   scriptOnNewDataBlock();
+
+   return true;
+}
+
+void OpenVRTrackedObject::setInteractObject(SceneObject* object, bool holding)
+{
+   mInteractObject = object;
+   mHoldInteractedObject = holding;
+}
+
+void OpenVRTrackedObject::setTransform(const MatrixF & mat)
+{
+   // Let SceneObject handle all of the matrix manipulation
+   Parent::setTransform(mat);
+
+   // Dirty our network mask so that the new transform gets
+   // transmitted to the client object
+   setMaskBits(UpdateMask);
+}
+
+void OpenVRTrackedObject::setModelName(String &modelName)
+{
+   if (!isServerObject())
+      return;
+
+   mModelName = StringTable->insert(modelName.c_str(), true);
+   setMaskBits(UpdateMask);
+}
+
+U32 OpenVRTrackedObject::packUpdate(NetConnection *conn, U32 mask, BitStream *stream)
+{
+   // Allow the Parent to get a crack at writing its info
+   U32 retMask = Parent::packUpdate(conn, mask, stream);
+
+   // Write our transform information
+   if (stream->writeFlag(mask & UpdateMask))
+   {
+      mathWrite(*stream, getTransform());
+      mathWrite(*stream, getScale());
+
+      stream->write((S16)mDeviceIndex);
+      stream->write((S16)mMappedMoveIndex);
+      stream->writeString(mModelName);
+   }
+
+   return retMask;
+}
+
+void OpenVRTrackedObject::unpackUpdate(NetConnection *conn, BitStream *stream)
+{
+   // Let the Parent read any info it sent
+   Parent::unpackUpdate(conn, stream);
+
+   if (stream->readFlag())  // UpdateMask
+   {
+      mathRead(*stream, &mObjToWorld);
+      mathRead(*stream, &mObjScale);
+
+      setTransform(mObjToWorld);
+      
+      S16 readDeviceIndex;
+      S16 readMoveIndex;
+      stream->read(&readDeviceIndex);
+      stream->read(&readMoveIndex);
+
+      mDeviceIndex = readDeviceIndex;
+      mMappedMoveIndex = readMoveIndex;
+      mModelName = stream->readSTString();
+
+      updateRenderData();
+   }
+
+}
+
+void OpenVRTrackedObject::writePacketData(GameConnection *conn, BitStream *stream)
+{
+   Parent::writePacketData(conn, stream);
+}
+
+void OpenVRTrackedObject::readPacketData(GameConnection *conn, BitStream *stream)
+{
+   Parent::readPacketData(conn, stream);
+}
+
+MatrixF OpenVRTrackedObject::getTrackedTransform()
+{
+   IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
+   MatrixF trackedMat(1);
+
+   pose.orientation.setMatrix(&trackedMat);
+   trackedMat.setPosition(pose.position);
+
+   return trackedMat;
+}
+
+MatrixF OpenVRTrackedObject::getLastTrackedTransform()
+{
+   MatrixF trackedMat(1);
+
+   mPose.orientation.setMatrix(&trackedMat);
+   trackedMat.setPosition(mPose.position);
+
+   return trackedMat;
+}
+
+MatrixF OpenVRTrackedObject::getBaseTrackingTransform()
+{
+   if (isMounted())
+   {
+      MatrixF mat;
+
+      mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat);
+      if (mIgnoreParentRotation)
+      {
+         Point3F pos = mat.getPosition();
+         mat = MatrixF(1);
+         mat.setPosition(pos);
+      }
+      //mat.inverse();
+      return mat;
+   }
+
+   return MatrixF(1);
+}
+
+void OpenVRTrackedObject::prepRenderImage(SceneRenderState *state)
+{
+   RenderPassManager *renderPass = state->getRenderPass();
+
+   // debug rendering for now
+
+   if (mDeviceIndex < 0)
+      return;
+
+   // Current pose
+   IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
+   IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0);
+
+   if (!pose.connected && !mPose.connected)
+      return;
+
+   MatrixF offsetMat = getBaseTrackingTransform();
+   //offsetMat.inverse();
+
+   Point3F pos = offsetMat.getPosition();
+   //Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z);
+
+   const F32 CONTROLLER_SCALE = 0.1;
+
+   if (smDebugControllerPosition)
+   {
+      ColorI drawColor = ColorI::GREEN;
+      if (!pose.valid)
+      {
+         drawColor = ColorI::RED;
+      }
+
+      // Draw Camera
+      /*
+      DisplayPose cameraPose;
+      OPENVR->getFrameEyePose(&cameraPose, -1);
+      Point3F cameraCenter(0);
+      MatrixF cameraMat(1);
+      cameraPose.orientation.setMatrix(&cameraMat);
+      cameraMat.setPosition(cameraPose.position);
+      cameraMat.mulP(cameraCenter);
+      //DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN);
+      
+      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box 
+      */
+
+      // Draw Tracked HMD Pos
+      Point3F hmdCenter(0, 0, 0);
+      MatrixF hmdMat(1);
+      hmdPose.orientation.setMatrix(&hmdMat);
+      hmdMat.setPosition(hmdPose.position);
+      hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
+      hmdMat = offsetMat * hmdMat;
+      hmdMat.mulP(hmdCenter);
+      DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED);
+      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box 
+
+
+      // Draw Controller
+      MatrixF mat(1);
+      pose.orientation.setMatrix(&mat);
+      mat.setPosition(pose.position);
+      mat.inverse(); // same as HMD
+      mat = offsetMat * mat;
+
+      Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
+      Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
+      Point3F middle(0, 0, 0);
+
+      Point3F center(0, 0, 0);
+      mat.mulP(center);
+
+      //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
+
+      mat.mulP(middleStart);
+      mat.mulP(middle);
+      mat.mulP(middleEnd);
+
+      char buffer[256];
+      dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
+      DebugDrawer::get()->drawText(middle, buffer);
+      DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
+      DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
+      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box 
+      DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
+   }
+
+   if (isClientObject() && smDebugControllerMovePosition)
+   {
+      MatrixF transform = getRenderTransform();
+      transform.scale(mObjScale);
+      DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform);
+      
+      // jamesu - grab server object pose for debugging
+      OpenVRTrackedObject* tracked = static_cast<OpenVRTrackedObject*>(getServerObject());
+      if (tracked)
+      {
+         mPose = tracked->mPose;
+      }
+
+      ColorI drawColor = ColorI::GREEN;
+      if (!pose.valid)
+      {
+         drawColor = ColorI::RED;
+      }
+                                                                                                 // Draw Controller
+      MatrixF mat(1);
+      mPose.orientation.setMatrix(&mat);
+      mat.setPosition(mPose.position);
+      mat.inverse(); // same as HMD
+      mat = offsetMat * mat;
+
+      Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
+      Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
+      Point3F middle(0, 0, 0);
+
+      Point3F center(0, 0, 0);
+      mat.mulP(center);
+
+      //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
+
+      mat.mulP(middleStart);
+      mat.mulP(middle);
+      mat.mulP(middleEnd);
+
+      char buffer[256];
+      dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
+      DebugDrawer::get()->drawText(middle, buffer);
+      DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
+      DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
+      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box 
+      DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
+   }
+
+   // Controller matrix base
+   MatrixF trackedMat = getTrackedTransform();
+   MatrixF invTrackedMat(1);
+
+   invTrackedMat = trackedMat;
+   invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
+
+   invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
+   trackedMat = invTrackedMat;
+   trackedMat.inverse();
+
+   // Render the controllers, using either the render model or the shape
+   if (mShapeInstance)
+   {
+      // Calculate the distance of this object from the camera
+      Point3F cameraOffset = invTrackedMat.getPosition();
+      cameraOffset -= state->getDiffuseCameraPosition();
+      F32 dist = cameraOffset.len();
+      if (dist < 0.01f)
+      dist = 0.01f;
+
+      // Set up the LOD for the shape
+      F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z));
+
+      mShapeInstance->setDetailFromDistance(state, dist * invScale);
+
+      // Make sure we have a valid level of detail
+      if (mShapeInstance->getCurrentDetail() < 0)
+         return;
+
+      // GFXTransformSaver is a handy helper class that restores
+      // the current GFX matrices to their original values when
+      // it goes out of scope at the end of the function
+      GFXTransformSaver saver;
+
+      // Set up our TS render state
+      TSRenderState rdata;
+      rdata.setSceneState(state);
+      rdata.setFadeOverride(1.0f);
+
+      // We might have some forward lit materials
+      // so pass down a query to gather lights.
+      LightQuery query;
+      query.init(getWorldSphere());
+      rdata.setLightQuery(&query);
+
+      // Set the world matrix to the objects render transform
+      MatrixF mat = trackedMat;
+
+      mat.scale(mObjScale);
+      GFX->setWorldMatrix(mat);
+
+      // TODO: move the nodes about for components
+
+      mShapeInstance->animate();
+      mShapeInstance->render(rdata);
+   }
+   else if (mRenderComponents.size() > 0)
+   {
+      vr::IVRRenderModels *models = OPENVR->getRenderModels();
+      if (!models)
+         return;
+
+      vr::IVRSystem* vrs = vr::VRSystem();
+
+      if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState))
+      {
+         return;
+      }
+
+      for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
+      {
+         RenderModelSlot slot = mRenderComponents[i];
+         vr::RenderModel_ControllerMode_State_t modeState;
+         vr::RenderModel_ComponentState_t componentState;
+
+         modeState.bScrollWheelVisible = false;
+
+         if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState))
+         {
+            MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
+
+            // Set our RenderInst as a standard mesh render
+            ri->type = RenderPassManager::RIT_Mesh;
+
+            // Calculate our sorting point
+            if (state && slot.nativeModel)
+            {
+               // Calculate our sort point manually.
+               const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat);
+               ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
+            }
+            else
+            {
+               ri->sortDistSq = 0.0f;
+            }
+
+            MatrixF newTransform = trackedMat;
+            MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel);
+            MatrixF offComponentMat(1);
+            OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat);
+
+            newTransform = offComponentMat * newTransform;
+
+            newTransform.inverse();
+
+            //DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE);
+
+            if (!slot.nativeModel)
+               continue;
+            if (i < 1)
+               continue;
+
+            // Set up our transforms
+            ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
+            ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
+            ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
+
+            // If our material needs lights then fill the RIs
+            // light vector with the best lights.
+            if (true)
+            {
+               LightQuery query;
+               Point3F center(0, 0, 0);
+               invTrackedMat.mulP(center);
+               query.init(SphereF(center, 10.0f));
+               query.getLights(ri->lights, 8);
+            }
+
+            // Draw model
+            slot.nativeModel->draw(state, ri);
+            state->getRenderPass()->addInst(ri);
+         }
+      }
+   }
+   else if (mBasicModel)
+   {
+      MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
+
+      // Set our RenderInst as a standard mesh render
+      ri->type = RenderPassManager::RIT_Mesh;
+
+      // Calculate our sorting point
+      if (state)
+      {
+         // Calculate our sort point manually.
+         const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat);
+         ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
+      }
+      else
+      {
+         ri->sortDistSq = 0.0f;
+      }
+
+      MatrixF newTransform = invTrackedMat;
+      // Set up our transforms
+      ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
+      ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
+      ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
+
+      // If our material needs lights then fill the RIs
+      // light vector with the best lights.
+      if (true)
+      {
+         LightQuery query;
+         Point3F center(0, 0, 0);
+         invTrackedMat.mulP(center);
+         query.init(SphereF(center, 10.0f));
+         query.getLights(ri->lights, 8);
+      }
+
+      // Draw model
+      mBasicModel->draw(state, ri);
+      state->getRenderPass()->addInst(ri);
+   }
+}
+
+U32 OpenVRTrackedObject::getCollisionMask()
+{
+   if (isServerObject())
+      return sServerCollisionMask;
+   else
+      return sClientCollisionMask;
+}
+
+void OpenVRTrackedObject::updateWorkingCollisionSet()
+{
+   const U32 mask = getCollisionMask();
+   Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale());
+   F32 len = (50) * TickSec;
+   F32 l = (len * 1.1) + 0.1;  // fudge factor
+   convexBox.minExtents -= Point3F(l, l, l);
+   convexBox.maxExtents += Point3F(l, l, l);
+
+   disableCollision();
+   mConvexList->updateWorkingList(convexBox, mask);
+   enableCollision();
+}
+
+void OpenVRTrackedObject::updateMove(const Move *move)
+{
+   // Set transform based on move
+
+#ifdef TORQUE_EXTENDED_MOVE
+
+   const ExtendedMove* emove = dynamic_cast<const ExtendedMove*>(move);
+   if (!emove)
+      return;
+
+   U32 emoveIndex = mMappedMoveIndex;
+   if (emoveIndex >= ExtendedMove::MaxPositionsRotations)
+      emoveIndex = 0;
+
+   //IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
+   //Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex);
+
+   if (!emove->EulerBasedRotation[emoveIndex])
+   {
+      AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
+      // Update our pose based on the move info
+      mPose.orientation = inRot;
+      mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]);
+      mPose.valid = true;
+      mPose.connected = true;
+   }
+
+   // Set transform based on move pose
+   MatrixF trackedMat(1);
+   MatrixF invTrackedMat(1);
+
+   mPose.orientation.setMatrix(&trackedMat);
+   trackedMat.setPosition(mPose.position);
+
+   invTrackedMat = trackedMat;
+   invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
+
+   invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
+   trackedMat = invTrackedMat;
+   trackedMat.inverse();
+
+   SceneObject::setTransform(invTrackedMat);
+
+   if (mPhysicsRep)
+      mPhysicsRep->setTransform(invTrackedMat);
+#endif
+}
+
+void OpenVRTrackedObject::processTick(const Move *move)
+{
+   // Perform collision checks
+   if (isServerObject())
+   {
+      updateMove(move);
+
+      if (!mPhysicsRep)
+      {
+         updateWorkingCollisionSet();
+      }
+   }
+
+   Parent::processTick(move);
+}
+
+void OpenVRTrackedObject::interpolateTick(F32 delta)
+{
+   // Set latest transform
+
+   Parent::interpolateTick(delta);
+}
+
+void OpenVRTrackedObject::advanceTime(F32 dt)
+{
+   Parent::advanceTime(dt);
+}
+
+bool OpenVRTrackedObject::castRay(const Point3F &start, const Point3F &end, RayInfo* info)
+{
+   if (!mPose.connected || !mPose.valid)
+      return false;
+
+   // Collide against bounding box.
+   F32 st, et, fst = 0.0f, fet = 1.0f;
+   F32 *bmin = &mObjBox.minExtents.x;
+   F32 *bmax = &mObjBox.maxExtents.x;
+   F32 const *si = &start.x;
+   F32 const *ei = &end.x;
+
+   for (S32 i = 0; i < 3; i++) {
+      if (*si < *ei) {
+         if (*si > *bmax || *ei < *bmin)
+            return false;
+         F32 di = *ei - *si;
+         st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f;
+         et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f;
+      }
+      else {
+         if (*ei > *bmax || *si < *bmin)
+            return false;
+         F32 di = *ei - *si;
+         st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f;
+         et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f;
+      }
+      if (st > fst) fst = st;
+      if (et < fet) fet = et;
+      if (fet < fst)
+         return false;
+      bmin++; bmax++;
+      si++; ei++;
+   }
+
+   info->normal = start - end;
+   info->normal.normalizeSafe();
+   getTransform().mulV(info->normal);
+
+   info->t = fst;
+   info->object = this;
+   info->point.interpolate(start, end, fst);
+   info->material = 0;
+   return true;
+}
+
+void OpenVRTrackedObject::buildConvex(const Box3F& box, Convex* convex)
+{
+   // These should really come out of a pool
+   mConvexList->collectGarbage();
+
+   Box3F realBox = box;
+   mWorldToObj.mul(realBox);
+   realBox.minExtents.convolveInverse(mObjScale);
+   realBox.maxExtents.convolveInverse(mObjScale);
+
+   if (realBox.isOverlapped(getObjBox()) == false)
+      return;
+
+   // Just return a box convex for the entire shape...
+   Convex* cc = 0;
+   CollisionWorkingList& wl = convex->getWorkingList();
+   for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) {
+      if (itr->mConvex->getType() == BoxConvexType &&
+         itr->mConvex->getObject() == this) {
+         cc = itr->mConvex;
+         break;
+      }
+   }
+   if (cc)
+      return;
+
+   // Create a new convex.
+   BoxConvex* cp = new BoxConvex;
+   mConvexList->registerObject(cp);
+   convex->addToWorkingList(cp);
+   cp->init(this);
+
+   mObjBox.getCenter(&cp->mCenter);
+   cp->mSize.x = mObjBox.len_x() / 2.0f;
+   cp->mSize.y = mObjBox.len_y() / 2.0f;
+   cp->mSize.z = mObjBox.len_z() / 2.0f;
+}
+
+bool OpenVRTrackedObject::testObject(SceneObject* enter)
+{
+   return false; // TODO
+}
+
+DefineEngineMethod(OpenVRTrackedObject, setModelName, void, (String modelName),, "Set model name. Typically you should do this from the client to update the server representation.")
+{
+   object->setModelName(modelName);
+}

+ 155 - 0
Engine/source/platform/input/openVR/openVRTrackedObject.h

@@ -0,0 +1,155 @@
+#ifndef _OPENVR_TRACKED_OBJECT_H_
+#define _OPENVR_TRACKED_OBJECT_H_
+
+#ifndef _GAMEBASE_H_
+#include "T3D/gameBase/gameBase.h"
+#endif
+#ifndef _GFXVERTEXBUFFER_H_
+#include "gfx/gfxVertexBuffer.h"
+#endif
+#ifndef _GFXPRIMITIVEBUFFER_H_
+#include "gfx/gfxPrimitiveBuffer.h"
+#endif
+#ifndef _TSSHAPEINSTANCE_H_
+#include "ts/tsShapeInstance.h"
+#endif
+#include "collision/earlyOutPolyList.h"
+
+#include <openvr.h>
+
+class BaseMatInstance;
+class OpenVRRenderModel;
+class PhysicsBody;
+
+class OpenVRTrackedObjectData : public GameBaseData {
+public:
+   typedef GameBaseData Parent;
+
+   StringTableEntry mShapeFile;
+   Resource<TSShape> mShape; ///< Torque model
+
+   Point3F mCollisionBoxMin;
+   Point3F mCollisionBoxMax;
+
+public:
+
+   OpenVRTrackedObjectData();
+   ~OpenVRTrackedObjectData();
+
+   DECLARE_CONOBJECT(OpenVRTrackedObjectData);
+
+   bool onAdd();
+   bool preload(bool server, String &errorStr);
+
+   static void  initPersistFields();
+
+   virtual void packData(BitStream* stream);
+   virtual void unpackData(BitStream* stream);
+};
+
+/// Implements a GameObject which tracks an OpenVR controller
+class OpenVRTrackedObject : public GameBase
+{
+   typedef GameBase Parent;
+
+   enum MaskBits
+   {
+      UpdateMask = Parent::NextFreeMask << 0,
+      NextFreeMask = Parent::NextFreeMask << 1
+   };
+
+   struct RenderModelSlot
+   {
+      StringTableEntry componentName; ///< Component name
+      S16 mappedNodeIdx; ///< Mapped node idx in mShape
+      OpenVRRenderModel *nativeModel; ///< Native model
+   };
+
+   OpenVRTrackedObjectData *mDataBlock;
+
+   /// @name Rendering
+   /// {
+   TSShapeInstance *mShapeInstance; ///< Shape used to render controller (uses native model otherwise)
+   StringTableEntry mModelName;
+   OpenVRRenderModel *mBasicModel; ///< Basic model
+   Vector<RenderModelSlot> mRenderComponents;
+   /// }
+
+   S32 mDeviceIndex; ///< Controller idx in openvr (for direct updating)
+   S32 mMappedMoveIndex; ///< Movemanager move index for rotation
+
+   vr::VRControllerState_t mCurrentControllerState;
+   vr::VRControllerState_t mPreviousControllerState;
+
+   IDevicePose mPose; ///< Current openvr pose data, or reconstructed data from the client
+
+   Convex* mConvexList;
+   EarlyOutPolyList     mClippedList;
+   PhysicsBody *mPhysicsRep;
+
+   SimObjectPtr<SceneObject> mCollisionObject; ///< Object we're currently colliding with
+   SimObjectPtr<SceneObject> mInteractObject;  ///< Object we've designated as important to interact with
+
+   bool mHoldInteractedObject; ///< Performs pickup logic with mInteractObject
+   bool mIgnoreParentRotation; ///< Ignores the rotation of the parent object
+
+   static bool smDebugControllerPosition; ///< Shows latest controller position in DebugDrawer
+   static bool smDebugControllerMovePosition; ///< Shows move position in DebugDrawer
+   static U32 sServerCollisionMask;
+   static U32 sClientCollisionMask;
+
+public:
+   OpenVRTrackedObject();
+   virtual ~OpenVRTrackedObject();
+
+   void updateRenderData();
+   void setupRenderDataFromModel(bool loadComponentModels);
+
+   void clearRenderData();
+
+   DECLARE_CONOBJECT(OpenVRTrackedObject);
+
+   static void initPersistFields();
+
+   virtual void inspectPostApply();
+
+   bool onAdd();
+   void onRemove();
+
+
+   void _updatePhysics();
+   bool onNewDataBlock(GameBaseData *dptr, bool reload);
+
+   void setInteractObject(SceneObject* object, bool holding);
+
+   void setTransform(const MatrixF &mat);
+   void setModelName(String &modelName);
+
+   U32  packUpdate(NetConnection *conn, U32 mask, BitStream *stream);
+   void unpackUpdate(NetConnection *conn, BitStream *stream);
+   void writePacketData(GameConnection *conn, BitStream *stream);
+   void readPacketData(GameConnection *conn, BitStream *stream);
+
+   void prepRenderImage(SceneRenderState *state);
+
+   MatrixF getTrackedTransform();
+   MatrixF getLastTrackedTransform();
+   MatrixF getBaseTrackingTransform();
+
+   U32 getCollisionMask();
+   void updateWorkingCollisionSet();
+
+   // Time management
+   void updateMove(const Move *move);
+   void processTick(const Move *move);
+   void interpolateTick(F32 delta);
+   void advanceTime(F32 dt);
+
+   // Collision
+   bool castRay(const Point3F &start, const Point3F &end, RayInfo* info);
+   void buildConvex(const Box3F& box, Convex* convex);
+   bool testObject(SceneObject* enter);
+
+};
+
+#endif // _OPENVR_TRACKED_OBJECT_H_

+ 21 - 5
Engine/source/platform/output/IDisplayDevice.h

@@ -34,15 +34,30 @@ class GuiCanvas;
 /// Defines the basic display pose common to most display devices
 typedef struct DisplayPose
 {
-   EulerF orientation;  /// Direction device is facing
+   QuatF orientation;  /// Direction device is facing
    Point3F position;    /// Relative position of device in view space
+
+   Point3F velocity;
+   Point3F angularVelocity;
+
+#ifdef DEBUG_DISPLAY_POSE 
+   MatrixF actualMatrix;
+   MatrixF originalMatrix;
+#endif
+
+   U32 state; /// Generic state
+
+   bool valid; /// Pose set
+   bool connected; /// Device connected
 } IDevicePose;
 
 class IDisplayDevice
 {
 public:
    virtual bool providesFrameEyePose() const = 0;
-   virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const = 0;
+
+	/// Get a display pose for the specified eye, or the HMD if eyeId is -1.
+   virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const = 0;
 
    virtual bool providesEyeOffsets() const = 0;
    /// Returns eye offset not taking into account any position tracking info
@@ -51,18 +66,19 @@ public:
    virtual bool providesFovPorts() const = 0;
    virtual void getFovPorts(FovPort *out) const = 0;
 
-   virtual bool providesProjectionOffset() const = 0;
-   virtual const Point2F& getProjectionOffset() const = 0;
-
    virtual void getStereoViewports(RectI *out) const = 0;
    virtual void getStereoTargets(GFXTextureTarget **out) const = 0;
 
    virtual void setDrawCanvas(GuiCanvas *canvas) = 0;
+   virtual void setDrawMode(GFXDevice::GFXDeviceRenderStyles style) = 0;
 
    virtual void setCurrentConnection(GameConnection *connection) = 0;
    virtual GameConnection* getCurrentConnection() = 0;
 
    virtual void onStartFrame() = 0;
+
+   /// Returns a texture handle representing a preview of the composited VR view
+   virtual GFXTexHandle getPreviewTexture() = 0;
 };
 
 #endif   // _IDISPLAYDEVICE_H_

+ 0 - 1
Engine/source/postFx/postEffect.cpp

@@ -154,7 +154,6 @@ GFX_ImplementTextureProfile( VRTextureProfile,
 GFX_ImplementTextureProfile( VRDepthProfile,
                             GFXTextureProfile::DiffuseMap,
                             GFXTextureProfile::PreserveSize |
-                            GFXTextureProfile::RenderTarget |
                             GFXTextureProfile::NoMipmap |
                             GFXTextureProfile::ZTarget,
                             GFXTextureProfile::NONE );

+ 1 - 0
Engine/source/postFx/postEffectManager.h

@@ -127,6 +127,7 @@ public:
    const PFXFrameState &getFrameState() const { return mFrameState[mFrameStateSwitch]; }
    const PFXFrameState &getLastFrameState() const { return mFrameState[!mFrameStateSwitch]; }
 
+   void setFrameState(const PFXFrameState& newState) { mFrameState[mFrameStateSwitch] = newState; }
    void setFrameMatrices( const MatrixF &worldToCamera, const MatrixF &cameraToScreen );
    
    // For ManagedSingleton.

+ 54 - 9
Engine/source/scene/reflectionManager.cpp

@@ -28,6 +28,7 @@
 #include "console/consoleTypes.h"
 #include "core/tAlgorithm.h"
 #include "math/mMathFn.h"
+#include "math/mathUtils.h"
 #include "T3D/gameBase/gameConnection.h"
 #include "ts/tsShapeInstance.h"
 #include "gui/3d/guiTSControl.h"
@@ -94,9 +95,9 @@ ReflectionManager::ReflectionManager()
 void ReflectionManager::initConsole()
 {
    Con::addVariable( "$pref::Reflect::refractTexScale", TypeF32, &ReflectionManager::smRefractTexScale, "RefractTex has dimensions equal to the active render target scaled in both x and y by this float.\n"
-	   "@ingroup Rendering");
+      "@ingroup Rendering");
    Con::addVariable( "$pref::Reflect::frameLimitMS", TypeS32, &ReflectionManager::smFrameReflectionMS, "ReflectionManager tries not to spend more than this amount of time updating reflections per frame.\n"
-	   "@ingroup Rendering");
+      "@ingroup Rendering");
 }
 
 ReflectionManager::~ReflectionManager()
@@ -134,12 +135,49 @@ void ReflectionManager::update(  F32 timeSlice,
    // Setup a culler for testing the 
    // visibility of reflectors.
    Frustum culler;
-   culler.set( false,
-               query.fov,
-               (F32)resolution.x / (F32)resolution.y,
-               query.nearPlane, 
-               query.farPlane,
-               query.cameraMatrix );
+
+   // jamesu - normally we just need a frustum which covers the current ports, however for SBS mode 
+   // we need something which covers both viewports.
+   S32 stereoTarget = GFX->getCurrentStereoTarget();
+   if (stereoTarget != -1)
+   {
+      // In this case we're rendering in stereo using a specific eye
+      MathUtils::makeFovPortFrustum(&culler, false, query.nearPlane, query.farPlane, query.fovPort[stereoTarget], query.headMatrix);
+   }
+   else if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide)
+   {
+      // Calculate an ideal culling size here, we'll just assume double fov based on the first fovport based on 
+      // the head position.
+      FovPort port = query.fovPort[0];
+      F32 leftSize = query.nearPlane * port.leftTan;
+      F32 rightSize = query.nearPlane * port.rightTan;
+      F32 upSize = query.nearPlane * port.upTan;
+      F32 downSize = query.nearPlane * port.downTan;
+
+      F32 left = -leftSize;
+      F32 right = rightSize;
+      F32 top = upSize;
+      F32 bottom = -downSize;
+
+      F32 fovInRadians = mAtan2((top - bottom) / 2.0f, query.nearPlane) * 3.0f;
+
+      culler.set(false,
+         fovInRadians,
+         (F32)(query.stereoViewports[0].extent.x + query.stereoViewports[1].extent.x) / (F32)query.stereoViewports[0].extent.y,
+         query.nearPlane,
+         query.farPlane,
+         query.headMatrix);
+   }
+   else
+   {
+      // Normal culling
+      culler.set(false,
+         query.fov,
+         (F32)resolution.x / (F32)resolution.y,
+         query.nearPlane,
+         query.farPlane,
+         query.cameraMatrix);
+   }
 
    // Manipulate the frustum for tiled screenshots
    const bool screenShotMode = gScreenShot && gScreenShot->isPending();
@@ -159,6 +197,7 @@ void ReflectionManager::update(  F32 timeSlice,
    refparams.viewportExtent = resolution;
    refparams.culler = culler;
    refparams.startOfUpdateMs = startOfUpdateMs;
+   refparams.eyeId = stereoTarget;
 
    // Update the reflection score.
    ReflectorList::iterator reflectorIter = mReflectors.begin();
@@ -173,6 +212,7 @@ void ReflectionManager::update(  F32 timeSlice,
    mTimer->getElapsedMs();
    mTimer->reset();
    U32 numUpdated = 0;
+   U32 currentTarget = stereoTarget >= 0 ? stereoTarget : 0;
    reflectorIter = mReflectors.begin();
    for ( ; reflectorIter != mReflectors.end(); reflectorIter++ )
    {      
@@ -182,7 +222,12 @@ void ReflectionManager::update(  F32 timeSlice,
          break;
 
       (*reflectorIter)->updateReflection( refparams );
-      (*reflectorIter)->lastUpdateMs = startOfUpdateMs;
+
+     if (stereoTarget != 0) // only update MS if we're not rendering the left eye in separate mode
+     {
+        (*reflectorIter)->lastUpdateMs = startOfUpdateMs;
+     }
+
       numUpdated++;
 
       // If we run out of update time then stop.

+ 94 - 43
Engine/source/scene/reflector.cpp

@@ -39,6 +39,7 @@
 #include "math/mathUtils.h"
 #include "math/util/frustum.h"
 #include "gfx/screenshot.h"
+#include "postFx/postEffectManager.h"
 
 extern ColorI gCanvasClearColor;
 
@@ -418,7 +419,7 @@ void CubeReflector::updateFace( const ReflectParams &params, U32 faceidx )
    );
 
    reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
-   reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix );
+   reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix );
 
    // render scene
    LIGHTMGR->registerGlobalLights( &reflectRenderState.getCullingFrustum(), false );
@@ -532,31 +533,48 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
    texDim = getMin( texDim, params.viewportExtent.x );
    texDim = getMin( texDim, params.viewportExtent.y );
 
-   bool texResize = ( texDim != mLastTexSize );  
-   mLastTexSize = texDim;
+   S32 currentTarget = params.eyeId >= 0 ? params.eyeId : 0;
 
-   const Point2I texSize( texDim, texDim );
+   const Point2I texSize = Point2I(texDim, texDim);
+
+   bool texResize = (texSize != mLastTexSize);
+   mLastTexSize = texSize;
 
    if (  texResize || 
-         reflectTex.isNull() ||
+         innerReflectTex[currentTarget].isNull() || 
+       innerReflectTex[currentTarget]->getSize() != texSize || 
          reflectTex->getFormat() != REFLECTMGR->getReflectFormat() )
    {
-      reflectTex = REFLECTMGR->allocRenderTarget( texSize );
-      depthBuff = LightShadowMap::_getDepthTarget( texSize.x, texSize.y );
+      innerReflectTex[currentTarget] = REFLECTMGR->allocRenderTarget( texSize );
    }
 
-   // store current matrices
-   GFXTransformSaver saver;
-   
-   Point2I viewport(params.viewportExtent);
-   if(GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide)
+   if ( texResize || depthBuff.isNull() )
    {
-      viewport.x *= 0.5f;
+     depthBuff = LightShadowMap::_getDepthTarget(texSize.x, texSize.y);
    }
-   F32 aspectRatio = F32( viewport.x ) / F32( viewport.y );
+
+   reflectTex = innerReflectTex[currentTarget];
+
+   // store current matrices
+   GFXTransformSaver saver;
 
    Frustum frustum;
-   frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane);
+
+   S32 stereoTarget = GFX->getCurrentStereoTarget();
+   if (stereoTarget != -1)
+   {
+      MathUtils::makeFovPortFrustum(&frustum, false, params.query->nearPlane, params.query->farPlane, params.query->fovPort[stereoTarget]);
+   }
+   else
+   {
+      Point2I viewport(params.viewportExtent);
+      if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide)
+      {
+         viewport.x *= 0.5f;
+      }
+      F32 aspectRatio = F32(viewport.x) / F32(viewport.y);
+      frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane);
+   }
 
    // Manipulate the frustum for tiled screenshots
    const bool screenShotMode = gScreenShot && gScreenShot->isPending();
@@ -578,10 +596,10 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
 
    if(reflectTarget.isNull())
       reflectTarget = GFX->allocRenderToTextureTarget();
-   reflectTarget->attachTexture( GFXTextureTarget::Color0, reflectTex );
+   reflectTarget->attachTexture( GFXTextureTarget::Color0, innerReflectTex[currentTarget] );
    reflectTarget->attachTexture( GFXTextureTarget::DepthStencil, depthBuff );
    GFX->pushActiveRenderTarget();
-   GFX->setActiveRenderTarget( reflectTarget );   
+   GFX->setActiveRenderTarget( reflectTarget );
 
    U32 objTypeFlag = -1;
    SceneCameraState reflectCameraState = SceneCameraState::fromGFX();
@@ -603,10 +621,21 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
    {
       // Store previous values
       RectI originalVP = GFX->getViewport();
-
-      Point2F projOffset = GFX->getCurrentProjectionOffset();
-      const FovPort *currentFovPort = GFX->getStereoFovPort();
-      MatrixF inverseEyeTransforms[2];
+      MatrixF origNonClipProjection = gClientSceneGraph->getNonClipProjection();
+      PFXFrameState origPFXState = PFXMGR->getFrameState();
+
+     const FovPort *currentFovPort = params.query->fovPort;
+     MatrixF inverseEyeTransforms[2];
+     Frustum gfxFrustum;
+
+     // Calculate viewport based on texture size
+     RectI stereoViewports[2];
+     stereoViewports[0] = params.query->stereoViewports[0];
+     stereoViewports[1] = params.query->stereoViewports[1];
+     stereoViewports[0].extent.x = stereoViewports[1].extent.x = texSize.x / 2;
+     stereoViewports[0].extent.y = stereoViewports[1].extent.y = texSize.y;
+     stereoViewports[0].point.x = 0;
+     stereoViewports[1].point.x = stereoViewports[0].extent.x;
 
       // Calculate world transforms for eyes
       inverseEyeTransforms[0] = params.query->eyeTransforms[0];
@@ -614,50 +643,64 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
       inverseEyeTransforms[0].inverse();
       inverseEyeTransforms[1].inverse();
 
-      Frustum originalFrustum = GFX->getFrustum();
-
+     //
       // Render left half of display
-      GFX->activateStereoTarget(0);
-      GFX->setWorldMatrix(params.query->eyeTransforms[0]);
+      //
 
-      Frustum gfxFrustum = originalFrustum;
-      MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], inverseEyeTransforms[0]);
+     GFX->setViewport(stereoViewports[0]);
+     GFX->setCurrentStereoTarget(0);
+      MathUtils::makeFovPortFrustum(&gfxFrustum, params.query->ortho, params.query->nearPlane, params.query->farPlane, params.query->fovPort[0]);
+     gfxFrustum.update();
       GFX->setFrustum(gfxFrustum);
 
       setGFXMatrices( params.query->eyeTransforms[0] );
 
-      SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
-      SceneRenderState renderStateLeft( gClientSceneGraph, SPT_Reflect, cameraStateLeft );
+     SceneRenderState renderStateLeft
+      (
+        gClientSceneGraph,
+        SPT_Reflect,
+        SceneCameraState::fromGFX()
+      );
+
       renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
-      renderStateLeft.setSceneRenderField(0);
       renderStateLeft.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
-      renderStateLeft.setDiffuseCameraTransform( params.query->eyeTransforms[0] );
+     renderStateLeft.setDiffuseCameraTransform(params.query->headMatrix);
+     //renderStateLeft.disableAdvancedLightingBins(true);
 
       gClientSceneGraph->renderSceneNoLights( &renderStateLeft, objTypeFlag );
 
+     //
       // Render right half of display
-      GFX->activateStereoTarget(1);
-      GFX->setWorldMatrix(params.query->eyeTransforms[1]);
+     //
 
-      gfxFrustum = originalFrustum;
-      MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], inverseEyeTransforms[1]);
+     GFX->setViewport(stereoViewports[1]);
+     GFX->setCurrentStereoTarget(1);
+     MathUtils::makeFovPortFrustum(&gfxFrustum, params.query->ortho, params.query->nearPlane, params.query->farPlane, params.query->fovPort[1]);
+     gfxFrustum.update();
       GFX->setFrustum(gfxFrustum);
 
       setGFXMatrices( params.query->eyeTransforms[1] );
 
-      SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
-      SceneRenderState renderStateRight( gClientSceneGraph, SPT_Reflect, cameraStateRight );
+     SceneRenderState renderStateRight
+     (
+        gClientSceneGraph,
+        SPT_Reflect,
+        SceneCameraState::fromGFX()
+     );
+
       renderStateRight.setSceneRenderStyle(SRS_SideBySide);
-      renderStateRight.setSceneRenderField(1);
       renderStateRight.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
-      renderStateRight.setDiffuseCameraTransform( params.query->eyeTransforms[1] );
-      renderStateRight.disableAdvancedLightingBins(true);
+      renderStateRight.setDiffuseCameraTransform( params.query->headMatrix );
+      //renderStateRight.disableAdvancedLightingBins(true);
 
       gClientSceneGraph->renderSceneNoLights( &renderStateRight, objTypeFlag );
 
       // Restore previous values
-      GFX->setFrustum(gfxFrustum);
+      GFX->setFrustum(frustum);
       GFX->setViewport(originalVP);
+      gClientSceneGraph->setNonClipProjection(origNonClipProjection);
+      PFXMGR->setFrameState(origPFXState);
+     GFX->setCurrentStereoTarget(-1);
    }
    else
    {
@@ -669,7 +712,7 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
       );
 
       reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
-      reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix );
+      reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix );
 
       gClientSceneGraph->renderSceneNoLights( &reflectRenderState, objTypeFlag );
    }
@@ -680,6 +723,14 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
    reflectTarget->resolve();
    GFX->popActiveRenderTarget();
 
+#ifdef DEBUG_REFLECT_TEX
+   static U32 reflectStage = 0;
+   char buf[128]; dSprintf(buf, 128, "F:\\REFLECT-OUT%i.PNG", reflectStage);
+   //reflectTex->dumpToDisk("PNG", buf);
+   reflectStage++;
+   if (reflectStage > 1) reflectStage = 0;
+#endif
+
    // Restore detail adjust amount.
    TSShapeInstance::smDetailAdjust = detailAdjustBackup;
 
@@ -793,7 +844,7 @@ MatrixF PlaneReflector::getFrustumClipProj( MatrixF &modelview )
    // as (sgn(clipPlane.x), sgn(clipPlane.y), 1, 1) and
    // transform it into camera space by multiplying it
    // by the inverse of the projection matrix
-   Vector4F	q;
+   Vector4F   q;
    q.x = sgn(clipPlane.x) / proj(0,0);
    q.y = sgn(clipPlane.y) / proj(1,1);
    q.z = -1.0F;

+ 6 - 3
Engine/source/scene/reflector.h

@@ -53,6 +53,7 @@ struct ReflectParams
    Point2I viewportExtent;
    Frustum culler;
    U32 startOfUpdateMs;
+   S8 eyeId;
 };
 
 
@@ -191,7 +192,7 @@ public:
    {
       refplane.set( Point3F(0,0,0), Point3F(0,0,1) );
       objectSpace = false;
-      mLastTexSize = 0;
+      mLastTexSize = Point2I(0,0);
    }
 
    virtual ~PlaneReflector() {}
@@ -213,7 +214,7 @@ public:
 
 protected:
 
-   U32 mLastTexSize;
+   Point2I mLastTexSize;
 
    // The camera position at the last update.
    Point3F mLastPos;
@@ -224,7 +225,9 @@ protected:
 public:
 
    GFXTextureTargetRef reflectTarget;
-   GFXTexHandle reflectTex;
+
+   GFXTexHandle innerReflectTex[2]; /// < Textures we actually render to
+   GFXTexHandle reflectTex; ///< Last texture we rendered to
    GFXTexHandle depthBuff;
    PlaneF refplane;
    bool objectSpace;

+ 13 - 2
Engine/source/scene/sceneCameraState.cpp

@@ -32,6 +32,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu
    : mViewport( viewport ),
      mFrustum( frustum ),
      mWorldViewMatrix( worldView ),
+     mHeadWorldViewMatrix( worldView ),
      mProjectionMatrix( projection )
 {
    mViewDirection = frustum.getTransform().getForwardVector();
@@ -39,7 +40,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu
 
 //-----------------------------------------------------------------------------
 
-SceneCameraState SceneCameraState::fromGFX()
+SceneCameraState SceneCameraState::fromGFX( )
 {
    return fromGFXWithViewport( GFX->getViewport() );
 }
@@ -56,10 +57,20 @@ SceneCameraState SceneCameraState::fromGFXWithViewport( const RectI& viewport )
    Frustum frustum = GFX->getFrustum();
    frustum.setTransform( camera );
 
-   return SceneCameraState(
+   SceneCameraState ret = SceneCameraState(
       viewport,
       frustum,
       world,
       GFX->getProjectionMatrix()
    );
+
+   // If rendering to stereo, make sure we get the head matrix
+   S32 stereoTarget = GFX->getCurrentStereoTarget();
+   if (stereoTarget != -1)
+   {
+      ret.mHeadWorldViewMatrix = GFX->getStereoHeadTransform();
+      ret.mHeadWorldViewMatrix.inverse();
+   }
+
+   return ret;
 }

+ 6 - 0
Engine/source/scene/sceneCameraState.h

@@ -51,6 +51,9 @@ class SceneCameraState
       /// The inverse of the frustum's transform stored here for caching.
       MatrixF mWorldViewMatrix;
 
+      /// Actual head position (will be - eye pos)
+      MatrixF mHeadWorldViewMatrix;
+
       /// The projection matrix.
       MatrixF mProjectionMatrix;
 
@@ -88,6 +91,9 @@ class SceneCameraState
       /// Return the world-space view vector.
       const Point3F& getViewDirection() const { return mViewDirection; }
 
+      /// Returns the world->view transform for the head (used to calculate various display metrics)
+      const MatrixF& getHeadWorldViewMatrix() const { return mHeadWorldViewMatrix; }
+
       /// Return the view->world transform.  This is a shortcut for getFrustum().getTransform().
       const MatrixF& getViewWorldMatrix() const { return mFrustum.getTransform(); }
 

+ 17 - 7
Engine/source/scene/sceneManager.cpp

@@ -41,6 +41,8 @@
 // For player object bounds workaround.
 #include "T3D/player.h"
 
+#include "postFx/postEffectManager.h"
+
 extern bool gEditingMission;
 
 
@@ -239,7 +241,10 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
       MatrixF originalWorld = GFX->getWorldMatrix();
       Frustum originalFrustum = GFX->getFrustum();
 
-      Point2F projOffset = GFX->getCurrentProjectionOffset();
+      // Save PFX & SceneManager projections
+      MatrixF origNonClipProjection = renderState->getSceneManager()->getNonClipProjection();
+      PFXFrameState origPFXState = PFXMGR->getFrameState();
+
       const FovPort *currentFovPort = GFX->getStereoFovPort();
       const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
       const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms();
@@ -251,15 +256,16 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
       GFX->setWorldMatrix(worldEyeTransforms[0]);
 
       Frustum gfxFrustum = originalFrustum;
-      MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], eyeTransforms[0]);
+      MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0]);
       GFX->setFrustum(gfxFrustum);
 
       SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
       SceneRenderState renderStateLeft( this, renderState->getScenePassType(), cameraStateLeft );
+      renderStateLeft.getSceneManager()->setNonClipProjection(GFX->getProjectionMatrix());
       renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
-      renderStateLeft.setSceneRenderField(0);
+      PFXMGR->setFrameMatrices(GFX->getWorldMatrix(), GFX->getProjectionMatrix());
 
-      renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone );
+      renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); // left
 
       // Indicate that we've just finished a field
       //GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(255,0,0), 1.0f, 0);
@@ -271,21 +277,25 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
       GFX->setWorldMatrix(worldEyeTransforms[1]);
 
       gfxFrustum = originalFrustum;
-      MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], eyeTransforms[1]);
+      MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1]);
       GFX->setFrustum(gfxFrustum);
 
       SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
       SceneRenderState renderStateRight( this, renderState->getScenePassType(), cameraStateRight );
+      renderStateRight.getSceneManager()->setNonClipProjection(GFX->getProjectionMatrix());
       renderStateRight.setSceneRenderStyle(SRS_SideBySide);
-      renderStateRight.setSceneRenderField(1);
+      PFXMGR->setFrameMatrices(GFX->getWorldMatrix(), GFX->getProjectionMatrix());
 
-      renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone );
+      renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); // right
 
       // Indicate that we've just finished a field
       //GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(0,255,0), 1.0f, 0);
       GFX->endField();
 
       // Restore previous values
+      renderState->getSceneManager()->setNonClipProjection(origNonClipProjection);
+      PFXMGR->setFrameState(origPFXState);
+
       GFX->setWorldMatrix(originalWorld);
       GFX->setFrustum(originalFrustum);
       GFX->setViewport(originalVP);

+ 3 - 3
Engine/source/scene/sceneRenderState.cpp

@@ -48,11 +48,11 @@ SceneRenderState::SceneRenderState( SceneManager* sceneManager,
       mDisableAdvancedLightingBins( false ),
       mRenderArea( view.getFrustum().getBounds() ),
       mAmbientLightColor( sceneManager->getAmbientLightColor() ),
-      mSceneRenderStyle( SRS_Standard ),
-      mRenderField( 0 )
+      mSceneRenderStyle( SRS_Standard )
 {
    // Setup the default parameters for the screen metrics methods.
-   mDiffuseCameraTransform = view.getViewWorldMatrix();
+   mDiffuseCameraTransform = view.getHeadWorldViewMatrix();
+   mDiffuseCameraTransform.inverse();
 
    // The vector eye is the camera vector with its 
    // length normalized to 1 / zFar.

+ 0 - 9
Engine/source/scene/sceneRenderState.h

@@ -72,9 +72,6 @@ class SceneRenderState
       /// The render style being performed
       SceneRenderStyle mSceneRenderStyle;
 
-      /// When doing stereo rendering, the current field that is being rendered
-      S32 mRenderField;
-
       /// The render pass which we are setting up with this scene state.
       RenderPassManager* mRenderPass;
 
@@ -237,12 +234,6 @@ class SceneRenderState
       /// Set the rendering style used for the scene
       void setSceneRenderStyle(SceneRenderStyle style) { mSceneRenderStyle = style; }
 
-      /// Get the stereo field being rendered
-      S32 getSceneRenderField() const { return mRenderField; }
-
-      /// Set the stereo field being rendered
-      void setSceneRenderField(S32 field) { mRenderField = field; }
-
       /// @}
 
       /// @name Transforms, projections, and viewports.

+ 2 - 3
Engine/source/sim/actionMap.cpp

@@ -1450,9 +1450,8 @@ bool ActionMap::processAction(const InputEventInfo* pEvent)
          }
          else
          {
-            // Handle rotation (QuatF)
-            QuatF quat(pEvent->fValue, pEvent->fValue2, pEvent->fValue3, pEvent->fValue4);
-            AngAxisF aa(quat);
+            // Handle rotation (AngAxisF)
+            AngAxisF aa(Point3F(pEvent->fValue, pEvent->fValue2, pEvent->fValue3), pEvent->fValue4);
             aa.axis.normalize();
             argv[1] = Con::getFloatArg( aa.axis.x );
             argv[2] = Con::getFloatArg( aa.axis.y );

+ 2 - 2
Engine/source/windowManager/windowInputGenerator.cpp

@@ -82,7 +82,7 @@ WindowInputGenerator::~WindowInputGenerator()
 //-----------------------------------------------------------------------------
 void WindowInputGenerator::generateInputEvent( InputEventInfo &inputEvent )
 {
-   if (!mInputController || !mFocused)
+   if (!mInputController)// || !mFocused)
       return;
 
    if (inputEvent.action == SI_MAKE && inputEvent.deviceType == KeyboardDeviceType)
@@ -331,7 +331,7 @@ void WindowInputGenerator::handleKeyboard( WindowId did, U32 modifier, U32 actio
 void WindowInputGenerator::handleInputEvent( U32 deviceInst, F32 fValue, F32 fValue2, F32 fValue3, F32 fValue4, S32 iValue, U16 deviceType, U16 objType, U16 ascii, U16 objInst, U8 action, U8 modifier )
 {
    // Skip it if we don't have focus.
-   if(!mInputController || !mFocused)
+   if(!mInputController)// || !mFocused)
       return;
 
    // Convert to an InputEventInfo and pass it around for processing.

+ 18 - 0
Templates/Full/game/scripts/client/default.bind.cs

@@ -752,3 +752,21 @@ vehicleMap.bind( gamepad, btn_b, brake );
 vehicleMap.bind( gamepad, btn_x, movebackward );
 // bind exiting the vehicle to a button
 vehicleMap.bindCmd(gamepad, btn_y,"getout();","");
+
+
+// ----------------------------------------------------------------------------
+// Oculus Rift
+// ----------------------------------------------------------------------------
+
+function OVRSensorRotEuler(%pitch, %roll, %yaw)
+{
+   //echo("Sensor euler: " @ %pitch SPC %roll SPC %yaw);
+   $mvRotZ0 = %yaw;
+   $mvRotX0 = %pitch;
+   $mvRotY0 = %roll;
+}
+
+$mvRotIsEuler0 = true;
+$OculusVR::GenerateAngleAxisRotationEvents = false;
+$OculusVR::GenerateEulerRotationEvents = true;
+moveMap.bind( oculusvr, ovr_sensorrotang0, OVRSensorRotEuler );

+ 32 - 0
Tools/CMake/modules/module_openvr.cmake

@@ -0,0 +1,32 @@
+
+# module openvr
+
+option(TORQUE_OPENVR "Enable openvr module" OFF)
+mark_as_advanced(TORQUE_OPENVR)
+if(TORQUE_OPENVR)
+	if(TORQUE_OPENVR_SDK_PATH STREQUAL "")
+		set(TORQUE_OPENVR_SDK_PATH "" CACHE PATH "openvr library path" FORCE)
+	endif()
+else() # hide variable
+	set(TORQUE_OPENVR_SDK_PATH "" CACHE INTERNAL "" FORCE) 
+endif() 
+ 
+if(TORQUE_OPENVR)
+	# Source
+	addPathRec( "${srcDir}/platform/input/openvr" )
+
+	# Includes
+	addInclude( "${TORQUE_OPENVR_SDK_PATH}/headers" )
+	 
+	# Libs
+	if( WIN32 ) 
+		if( TORQUE_CPU_X64 )
+		link_directories( "${TORQUE_OPENVR_SDK_PATH}/lib/win64" )
+		else()
+		link_directories( "${TORQUE_OPENVR_SDK_PATH}/lib/win32" )
+		endif()
+		addLib( "openvr_api" )
+	endif()
+
+    addDef(TORQUE_OPENVR)
+endif()

部分文件因文件數量過多而無法顯示