瀏覽代碼

Add basic support for showing openvr controllers and tracked objects

James Urquhart 9 年之前
父節點
當前提交
e6159a590a

+ 34 - 19
Engine/source/T3D/gameBase/extended/extendedMove.cpp

@@ -16,15 +16,17 @@ MODULE_BEGIN( ExtendedMoveManager )
 
 
 MODULE_END;
 MODULE_END;
 
 
-S32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, };
-S32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, };
-S32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, };
+F32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, };
+F32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, };
+F32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, };
 bool ExtendedMoveManager::mRotIsEuler[ExtendedMove::MaxPositionsRotations] = { 0, };
 bool ExtendedMoveManager::mRotIsEuler[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAX[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAX[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAY[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAY[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAZ[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAZ[ExtendedMove::MaxPositionsRotations] = { 0, };
 F32 ExtendedMoveManager::mRotAA[ExtendedMove::MaxPositionsRotations] = { 1, };
 F32 ExtendedMoveManager::mRotAA[ExtendedMove::MaxPositionsRotations] = { 1, };
 
 
+F32 ExtendedMoveManager::mPosScale = 2.0f;
+
 void ExtendedMoveManager::init()
 void ExtendedMoveManager::init()
 {
 {
    for(U32 i = 0; i < ExtendedMove::MaxPositionsRotations; ++i)
    for(U32 i = 0; i < ExtendedMove::MaxPositionsRotations; ++i)
@@ -32,17 +34,17 @@ void ExtendedMoveManager::init()
       char varName[256];
       char varName[256];
 
 
       dSprintf(varName, sizeof(varName), "mvPosX%d", i);
       dSprintf(varName, sizeof(varName), "mvPosX%d", i);
-      Con::addVariable(varName, TypeS32, &mPosX[i], 
+      Con::addVariable(varName, TypeF32, &mPosX[i], 
          "X position of controller in millimeters.  Only 13 bits are networked.\n"
          "X position of controller in millimeters.  Only 13 bits are networked.\n"
 	      "@ingroup Game");
 	      "@ingroup Game");
 
 
       dSprintf(varName, sizeof(varName), "mvPosY%d", i);
       dSprintf(varName, sizeof(varName), "mvPosY%d", i);
-      Con::addVariable(varName, TypeS32, &mPosY[i], 
+      Con::addVariable(varName, TypeF32, &mPosY[i],
          "Y position of controller in millimeters.  Only 13 bits are networked.\n"
          "Y position of controller in millimeters.  Only 13 bits are networked.\n"
 	      "@ingroup Game");
 	      "@ingroup Game");
 
 
       dSprintf(varName, sizeof(varName), "mvPosZ%d", i);
       dSprintf(varName, sizeof(varName), "mvPosZ%d", i);
-      Con::addVariable(varName, TypeS32, &mPosZ[i], 
+      Con::addVariable(varName, TypeF32, &mPosZ[i],
          "Z position of controller in millimeters.  Only 13 bits are networked.\n"
          "Z position of controller in millimeters.  Only 13 bits are networked.\n"
 	      "@ingroup Game");
 	      "@ingroup Game");
 
 
@@ -75,6 +77,11 @@ void ExtendedMoveManager::init()
          "Angle rotation (in degrees) component of controller.\n"
          "Angle rotation (in degrees) component of controller.\n"
 	      "@ingroup Game");
 	      "@ingroup Game");
    }
    }
+
+   Con::addVariable("mvPosScale", TypeF32, &mPosScale,
+	   "@brief Indicates the scale to be given to mvPos values.\n\n"
+	   ""
+	   "@ingroup Game");
 }
 }
 
 
 const ExtendedMove NullExtendedMove;
 const ExtendedMove NullExtendedMove;
@@ -183,8 +190,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
          // Position
          // Position
          if (stream->readFlag())
          if (stream->readFlag())
          {
          {
-            posX[i] = stream->readInt(MaxPositionBits);
-            cposX[i] = UNCLAMPPOS(posX[i]);
+            cposX[i] = stream->readInt(MaxPositionBits);
+            posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
          }
          }
          else
          else
             posX[i] = extBaseMove->posX[i];
             posX[i] = extBaseMove->posX[i];
@@ -192,7 +199,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
          if (stream->readFlag())
          if (stream->readFlag())
          {
          {
             cposY[i] = stream->readInt(MaxPositionBits);
             cposY[i] = stream->readInt(MaxPositionBits);
-            posY[i] = UNCLAMPPOS(cposY[i]);
+            posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale;
          }
          }
          else
          else
             posY[i] = extBaseMove->posY[i];
             posY[i] = extBaseMove->posY[i];
@@ -200,7 +207,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
          if (stream->readFlag())
          if (stream->readFlag())
          {
          {
             cposZ[i] = stream->readInt(MaxPositionBits);
             cposZ[i] = stream->readInt(MaxPositionBits);
-            posZ[i] = UNCLAMPPOS(cposZ[i]);
+            posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale;
          }
          }
          else
          else
             posZ[i] = extBaseMove->posZ[i];
             posZ[i] = extBaseMove->posZ[i];
@@ -267,9 +274,9 @@ void ExtendedMove::clamp()
    for(U32 i=0; i<MaxPositionsRotations; ++i)
    for(U32 i=0; i<MaxPositionsRotations; ++i)
    {
    {
       // Positions
       // Positions
-      cposX[i] = CLAMPPOS(posX[i]);
-      cposY[i] = CLAMPPOS(posY[i]);
-      cposZ[i] = CLAMPPOS(posZ[i]);
+      cposX[i] = CLAMPPOS(posX[i] / ExtendedMoveManager::mPosScale);
+      cposY[i] = CLAMPPOS(posY[i] / ExtendedMoveManager::mPosScale);
+      cposZ[i] = CLAMPPOS(posZ[i] / ExtendedMoveManager::mPosScale);
 
 
       // Rotations
       // Rotations
       if(EulerBasedRotation[i])
       if(EulerBasedRotation[i])
@@ -286,15 +293,23 @@ void ExtendedMove::clamp()
          crotW[i] = CLAMPROT(rotW[i] / M_2PI_F);
          crotW[i] = CLAMPROT(rotW[i] / M_2PI_F);
       }
       }
 
 
-      /*if (i == 0)
+	  #ifdef DEBUG_CONTROLLER_MOVE
+      if (i == 1)
       {
       {
           F32 x, y, z, a;
           F32 x, y, z, a;
           x = UNCLAMPPOS(crotX[i]);
           x = UNCLAMPPOS(crotX[i]);
           y = UNCLAMPPOS(crotY[i]);
           y = UNCLAMPPOS(crotY[i]);
           z = UNCLAMPPOS(crotZ[i]);
           z = UNCLAMPPOS(crotZ[i]);
           a = UNCLAMPROT(crotW[i]) * M_2PI_F;
           a = UNCLAMPROT(crotW[i]) * M_2PI_F;
-          //Con::printf("rot %f,%f,%f,%f clamped to %f,%f,%f,%f", rotX[i], rotY[i], rotZ[i], rotW[i], x,y,z,a);
-      }*/
+
+		  Con::printf("INPUT POS == %f,%f,%f", ExtendedMoveManager::mPosX[i], ExtendedMoveManager::mPosY[i], ExtendedMoveManager::mPosZ[i]);
+          Con::printf("rot %f,%f,%f,%f clamped to %f,%f,%f,%f", rotX[i], rotY[i], rotZ[i], rotW[i], x,y,z,a);
+		  x = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+		  y = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+		  z = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+		  Con::printf("pos %f,%f,%f clamped to %f,%f,%f", posX[i], posY[i], posZ[i], x, y, z);
+      }
+	  #endif
    }
    }
 
 
    // Perform the standard Move clamp
    // Perform the standard Move clamp
@@ -306,9 +321,9 @@ void ExtendedMove::unclamp()
    // Unclamp the values the same as for net traffic so the client matches the server
    // Unclamp the values the same as for net traffic so the client matches the server
    for(U32 i=0; i<MaxPositionsRotations; ++i)
    for(U32 i=0; i<MaxPositionsRotations; ++i)
    {
    {
-      posX[i] = UNCLAMPPOS(cposX[i]);
-      posY[i] = UNCLAMPPOS(cposY[i]);
-      posZ[i] = UNCLAMPPOS(cposZ[i]);
+      posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
+      posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale;
+      posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale;
 
 
       // Rotations
       // Rotations
       if(EulerBasedRotation[i])
       if(EulerBasedRotation[i])

+ 5 - 3
Engine/source/T3D/gameBase/extended/extendedMove.h

@@ -41,15 +41,17 @@ extern const ExtendedMove NullExtendedMove;
 class ExtendedMoveManager
 class ExtendedMoveManager
 {
 {
 public:
 public:
-   static S32 mPosX[ExtendedMove::MaxPositionsRotations];
-   static S32 mPosY[ExtendedMove::MaxPositionsRotations];
-   static S32 mPosZ[ExtendedMove::MaxPositionsRotations];
+   static F32 mPosX[ExtendedMove::MaxPositionsRotations];
+   static F32 mPosY[ExtendedMove::MaxPositionsRotations];
+   static F32 mPosZ[ExtendedMove::MaxPositionsRotations];
    static bool mRotIsEuler[ExtendedMove::MaxPositionsRotations];
    static bool mRotIsEuler[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAX[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAX[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAY[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAY[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAZ[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAZ[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAA[ExtendedMove::MaxPositionsRotations];
    static F32 mRotAA[ExtendedMove::MaxPositionsRotations];
 
 
+   static F32 mPosScale;
+
    static void init();
    static void init();
 };
 };
 
 

+ 67 - 2
Engine/source/T3D/player.cpp

@@ -57,11 +57,17 @@
 #include "T3D/decal/decalData.h"
 #include "T3D/decal/decalData.h"
 #include "materials/baseMatInstance.h"
 #include "materials/baseMatInstance.h"
 #include "math/mathUtils.h"
 #include "math/mathUtils.h"
+#include "gfx/sim/debugDraw.h"
 
 
 #ifdef TORQUE_EXTENDED_MOVE
 #ifdef TORQUE_EXTENDED_MOVE
    #include "T3D/gameBase/extended/extendedMove.h"
    #include "T3D/gameBase/extended/extendedMove.h"
 #endif
 #endif
 
 
+#ifdef TORQUE_OPENVR
+#include "platform/input/openVR/openVRProvider.h"
+#include "platform/input/openVR/openVRTrackedObject.h"
+#endif
+
 // Amount of time if takes to transition to a new action sequence.
 // Amount of time if takes to transition to a new action sequence.
 static F32 sAnimationTransitionTime = 0.25f;
 static F32 sAnimationTransitionTime = 0.25f;
 static bool sUseAnimationTransitions = true;
 static bool sUseAnimationTransitions = true;
@@ -2496,6 +2502,19 @@ void Player::updateMove(const Move* move)
 {
 {
    delta.move = *move;
    delta.move = *move;
 
 
+#ifdef TORQUE_OPENVR
+   if (mControllers[0])
+   {
+	   mControllers[0]->processTick(move);
+   }
+
+   if (mControllers[1])
+   {
+	   mControllers[1]->processTick(move);
+   }
+
+#endif
+
    // Is waterCoverage high enough to be 'swimming'?
    // Is waterCoverage high enough to be 'swimming'?
    {
    {
       bool swimming = mWaterCoverage > 0.65f && canSwim();      
       bool swimming = mWaterCoverage > 0.65f && canSwim();      
@@ -2628,18 +2647,29 @@ void Player::updateMove(const Move* move)
             AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
             AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
             MatrixF trans(1);
             MatrixF trans(1);
             moveRot.setMatrix(&trans);
             moveRot.setMatrix(&trans);
+            trans.inverse();
 
 
-            Point3F vecForward(0, 1, 0);
+            Point3F vecForward(0, 10, 0);
+            Point3F viewAngle;
             Point3F orient;
             Point3F orient;
             EulerF rot;
             EulerF rot;
             trans.mulV(vecForward);
             trans.mulV(vecForward);
+            viewAngle = vecForward;
+            vecForward.z = 0; // flatten
+            vecForward.normalizeSafe();
 
 
             F32 yawAng;
             F32 yawAng;
             F32 pitchAng;
             F32 pitchAng;
             MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng);
             MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng);
+
+            mRot = EulerF(0);
             mRot.z = yawAng;
             mRot.z = yawAng;
             mHead = EulerF(0);
             mHead = EulerF(0);
-            mHead.x = -pitchAng;
+
+            while (mRot.z < 0.0f)
+               mRot.z += M_2PI_F;
+            while (mRot.z > M_2PI_F)
+               mRot.z -= M_2PI_F;
 
 
             absoluteDelta = true;
             absoluteDelta = true;
          }
          }
@@ -7140,3 +7170,38 @@ void Player::renderConvex( ObjectRenderInst *ri, SceneRenderState *state, BaseMa
    mConvex.renderWorkingList();
    mConvex.renderWorkingList();
    GFX->leaveDebugEvent();
    GFX->leaveDebugEvent();
 }
 }
+
+#ifdef TORQUE_OPENVR
+void Player::setControllers(Vector<OpenVRTrackedObject*> controllerList)
+{
+	mControllers[0] = controllerList.size() > 0 ? controllerList[0] : NULL;
+	mControllers[1] = controllerList.size() > 1 ? controllerList[1] : NULL;
+}
+
+ConsoleMethod(Player, setVRControllers, void, 4, 4, "")
+{
+	OpenVRTrackedObject *controllerL, *controllerR;
+	Vector<OpenVRTrackedObject*> list;
+
+	if (Sim::findObject(argv[2], controllerL))
+	{
+		list.push_back(controllerL);
+	}
+	else
+	{
+		list.push_back(NULL);
+	}
+
+	if (Sim::findObject(argv[3], controllerR))
+	{
+		list.push_back(controllerR);
+	}
+	else
+	{
+		list.push_back(NULL);
+	}
+
+	object->setControllers(list);
+}
+
+#endif

+ 8 - 0
Engine/source/T3D/player.h

@@ -39,6 +39,7 @@ class DecalData;
 class SplashData;
 class SplashData;
 class PhysicsPlayer;
 class PhysicsPlayer;
 class Player;
 class Player;
+class OpenVRTrackedObject;
 
 
 //----------------------------------------------------------------------------
 //----------------------------------------------------------------------------
 
 
@@ -518,6 +519,8 @@ protected:
    Point3F mLastPos;          ///< Holds the last position for physics updates
    Point3F mLastPos;          ///< Holds the last position for physics updates
    Point3F mLastWaterPos;     ///< Same as mLastPos, but for water
    Point3F mLastWaterPos;     ///< Same as mLastPos, but for water
 
 
+   SimObjectPtr<OpenVRTrackedObject> mControllers[2];
+
    struct ContactInfo 
    struct ContactInfo 
    {
    {
       bool contacted, jump, run;
       bool contacted, jump, run;
@@ -577,12 +580,17 @@ protected:
 
 
    PhysicsPlayer* getPhysicsRep() const { return mPhysicsRep; }
    PhysicsPlayer* getPhysicsRep() const { return mPhysicsRep; }
 
 
+#ifdef TORQUE_OPENVR
+   void setControllers(Vector<OpenVRTrackedObject*> controllerList);
+#endif
+
   protected:
   protected:
    virtual void reSkin();
    virtual void reSkin();
 
 
    void setState(ActionState state, U32 ticks=0);
    void setState(ActionState state, U32 ticks=0);
    void updateState();
    void updateState();
 
 
+
    // Jetting
    // Jetting
    bool mJetting;
    bool mJetting;
 
 

+ 5 - 8
Engine/source/T3D/shapeBase.cpp

@@ -1999,17 +1999,14 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId,
    // NOTE: currently we dont support third-person camera in this mode
    // NOTE: currently we dont support third-person camera in this mode
    MatrixF cameraTransform(1);
    MatrixF cameraTransform(1);
    F32 fakePos = 0;
    F32 fakePos = 0;
+   //cameraTransform = getRenderTransform(); // use this for controllers TODO
    getCameraTransform(&fakePos, &cameraTransform);
    getCameraTransform(&fakePos, &cameraTransform);
 
 
-   QuatF baserot = cameraTransform;
-   QuatF qrot = QuatF(newPose.orientation);
-   //QuatF concatRot;
-   //concatRot.mul(baserot, qrot);
-   qrot.setMatrix(&temp);
+   temp = MatrixF(1);
+   newPose.orientation.setMatrix(&temp);
+   temp.setPosition(newPose.position);
 
 
-   temp.setPosition(cameraTransform.getPosition() + qrot.mulP(newPose.position, &rotEyePos));
-   
-   *outMat = temp;
+   *outMat = cameraTransform * temp;
 }
 }
 
 
 void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)
 void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)

+ 4 - 4
Engine/source/platform/input/openVR/openVROverlay.cpp

@@ -63,7 +63,7 @@ void OpenVROverlay::initPersistFields()
       "Type of overlay.");
       "Type of overlay.");
    addProtectedField("overlayFlags", TypeS32, Offset(mOverlayFlags, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
    addProtectedField("overlayFlags", TypeS32, Offset(mOverlayFlags, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
       "Flags for overlay.");
       "Flags for overlay.");
-   addProtectedField("overlayWidth", TypeS32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
+   addProtectedField("overlayWidth", TypeF32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
       "Width of overlay.");
       "Width of overlay.");
    addProtectedField("overlayColor", TypeColorF, Offset(mOverlayColor, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
    addProtectedField("overlayColor", TypeColorF, Offset(mOverlayColor, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
       "Backing color of overlay.");
       "Backing color of overlay.");
@@ -127,7 +127,7 @@ void OpenVROverlay::onRemove()
       mThumbOverlayHandle = NULL;
       mThumbOverlayHandle = NULL;
    }
    }
 
 
-	if (OPENVR)
+	if (ManagedSingleton<OpenVRProvider>::instanceOrNull())
 	{
 	{
 		OPENVR->unregisterOverlay(this);
 		OPENVR->unregisterOverlay(this);
 	}
 	}
@@ -373,13 +373,13 @@ void OpenVROverlay::handleOpenVREvents()
       eventInfo.modifier = (InputModifiers)0;
       eventInfo.modifier = (InputModifiers)0;
       eventInfo.ascii = 0;
       eventInfo.ascii = 0;
 
 
-		Con::printf("Overlay event %i", vrEvent.eventType);
+		//Con::printf("Overlay event %i", vrEvent.eventType);
 
 
       switch (vrEvent.eventType)
       switch (vrEvent.eventType)
       {
       {
       case vr::VREvent_MouseMove:
       case vr::VREvent_MouseMove:
       {
       {
-			Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y);
+			//Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y);
          eventInfo.objType = SI_AXIS;
          eventInfo.objType = SI_AXIS;
          eventInfo.objInst = SI_XAXIS;
          eventInfo.objInst = SI_XAXIS;
          eventInfo.action = SI_MAKE;
          eventInfo.action = SI_MAKE;

+ 546 - 9
Engine/source/platform/input/openVR/openVRProvider.cpp

@@ -6,6 +6,12 @@
 #include "T3D/gameBase/gameConnection.h"
 #include "T3D/gameBase/gameConnection.h"
 #include "gui/core/guiCanvas.h"
 #include "gui/core/guiCanvas.h"
 #include "postFx/postEffectCommon.h"
 #include "postFx/postEffectCommon.h"
+#include "renderInstance/renderPassManager.h"
+#include "scene/sceneRenderState.h"
+#include "materials/baseMatInstance.h"
+#include "materials/materialManager.h"
+#include "console/consoleInternal.h"
+#include "core/stream/fileStream.h"
 
 
 #include "gfx/D3D11/gfxD3D11Device.h"
 #include "gfx/D3D11/gfxD3D11Device.h"
 #include "gfx/D3D11/gfxD3D11TextureObject.h"
 #include "gfx/D3D11/gfxD3D11TextureObject.h"
@@ -17,12 +23,20 @@
 #include "gfx/D3D9/gfxD3D9TextureObject.h"
 #include "gfx/D3D9/gfxD3D9TextureObject.h"
 #include "gfx/D3D9/gfxD3D9EnumTranslate.h"
 #include "gfx/D3D9/gfxD3D9EnumTranslate.h"
 
 
+#include "materials/matTextureTarget.h"
+
 #ifdef TORQUE_OPENGL
 #ifdef TORQUE_OPENGL
 #include "gfx/gl/gfxGLDevice.h"
 #include "gfx/gl/gfxGLDevice.h"
 #include "gfx/gl/gfxGLTextureObject.h"
 #include "gfx/gl/gfxGLTextureObject.h"
 #include "gfx/gl/gfxGLEnumTranslate.h"
 #include "gfx/gl/gfxGLEnumTranslate.h"
 #endif
 #endif
 
 
+struct OpenVRLoadedTexture
+{
+	vr::TextureID_t texId;
+	NamedTexTarget texTarget;
+};
+
 AngAxisF gLastMoveRot; // jamesu - this is just here for temp debugging
 AngAxisF gLastMoveRot; // jamesu - this is just here for temp debugging
 
 
 namespace OpenVRUtil
 namespace OpenVRUtil
@@ -74,6 +88,8 @@ namespace OpenVRUtil
       return outMat;
       return outMat;
    }
    }
 
 
+
+
    void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat)
    void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat)
    {
    {
       Point4F row0; inMat.getRow(0, &row0);
       Point4F row0; inMat.getRow(0, &row0);
@@ -123,6 +139,114 @@ namespace OpenVRUtil
       bounds.vMax = (rect.point.y + rect.extent.y) * yRatio;
       bounds.vMax = (rect.point.y + rect.extent.y) * yRatio;
       return bounds;
       return bounds;
    }
    }
+
+   String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL)
+   {
+	   uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError);
+	   if (unRequiredBufferLen == 0)
+		   return "";
+
+	   char *pchBuffer = new char[unRequiredBufferLen];
+	   unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError);
+	   String sResult = pchBuffer;
+	   delete[] pchBuffer;
+	   return sResult;
+   }
+
+}
+
+//------------------------------------------------------------
+
+bool OpenVRRenderModel::init(const vr::RenderModel_t & vrModel, StringTableEntry materialName)
+{
+	SAFE_DELETE(mMaterialInstance);
+	mMaterialInstance = MATMGR->createMatInstance(materialName, getGFXVertexFormat< VertexType >());
+	if (!mMaterialInstance)
+		return false;
+
+	mLocalBox = Box3F::Invalid;
+
+	// Prepare primitives
+	U16 *indPtr = NULL;
+	GFXPrimitive *primPtr = NULL;
+	mPrimitiveBuffer.set(GFX, vrModel.unTriangleCount * 3, 1, GFXBufferTypeStatic, "OpenVR Controller buffer");
+
+	mPrimitiveBuffer.lock(&indPtr, &primPtr);
+	if (!indPtr || !primPtr)
+		return false;
+
+	primPtr->minIndex = 0;
+	primPtr->numPrimitives = vrModel.unTriangleCount;
+	primPtr->numVertices = vrModel.unVertexCount;
+	primPtr->startIndex = 0;
+	primPtr->startVertex = 0;
+	primPtr->type = GFXTriangleList;
+
+	//dMemcpy(indPtr, vrModel.rIndexData, sizeof(U16) * vrModel.unTriangleCount * 3);
+
+	for (U32 i = 0; i < vrModel.unTriangleCount; i++)
+	{
+		const U32 idx = i * 3;
+		indPtr[idx + 0] = vrModel.rIndexData[idx + 2];
+		indPtr[idx + 1] = vrModel.rIndexData[idx + 1];
+		indPtr[idx + 2] = vrModel.rIndexData[idx + 0];
+	}
+
+	mPrimitiveBuffer.unlock();
+
+	// Prepare verts
+	mVertexBuffer.set(GFX, vrModel.unVertexCount, GFXBufferTypeStatic);
+	VertexType *vertPtr = mVertexBuffer.lock();
+	if (!vertPtr)
+		return false;
+
+	// Convert to torque coordinate system
+	for (U32 i = 0; i < vrModel.unVertexCount; i++)
+	{
+		const vr::RenderModel_Vertex_t &vert = vrModel.rVertexData[i];
+		vertPtr->point = OpenVRUtil::convertPointFromOVR(vert.vPosition);
+		vertPtr->point.x = -vertPtr->point.x;
+		vertPtr->point.y = -vertPtr->point.y;
+		vertPtr->point.z = -vertPtr->point.z;
+		vertPtr->normal = OpenVRUtil::convertPointFromOVR(vert.vNormal);
+		vertPtr->normal.x = -vertPtr->normal.x;
+		vertPtr->normal.y = -vertPtr->normal.y;
+		vertPtr->normal.z = -vertPtr->normal.z;
+		vertPtr->texCoord = Point2F(vert.rfTextureCoord[0], vert.rfTextureCoord[1]);
+		vertPtr++;
+	}
+
+	mVertexBuffer.unlock();
+
+	for (U32 i = 0, sz = vrModel.unVertexCount; i < sz; i++)
+	{
+		Point3F pos = Point3F(vrModel.rVertexData[i].vPosition.v[0], vrModel.rVertexData[i].vPosition.v[1], vrModel.rVertexData[i].vPosition.v[2]);
+		mLocalBox.extend(pos);
+	}
+
+	return true;
+}
+
+void OpenVRRenderModel::draw(SceneRenderState *state, MeshRenderInst* renderInstance)
+{
+	renderInstance->type = RenderPassManager::RIT_Mesh;
+	renderInstance->matInst = state->getOverrideMaterial(mMaterialInstance);
+	if (!renderInstance->matInst)
+		return;
+
+	renderInstance->vertBuff = &mVertexBuffer;
+	renderInstance->primBuff = &mPrimitiveBuffer;
+	renderInstance->prim = NULL;
+	renderInstance->primBuffIndex = 0;
+
+	if (renderInstance->matInst->getMaterial()->isTranslucent())
+	{
+		renderInstance->type = RenderPassManager::RIT_Translucent;
+		renderInstance->translucentSort = true;
+	}
+
+	renderInstance->defaultKey = renderInstance->matInst->getStateHint();
+	renderInstance->defaultKey2 = (uintptr_t)renderInstance->vertBuff;
 }
 }
 
 
 //------------------------------------------------------------
 //------------------------------------------------------------
@@ -209,6 +333,16 @@ ImplementEnumType(OpenVRState,
 { vr::VRState_NotReady, "NotReady" },
 { vr::VRState_NotReady, "NotReady" },
 EndImplementEnumType;
 EndImplementEnumType;
 
 
+ImplementEnumType(OpenVRTrackedDeviceClass,
+	"Types of devices which are tracked .\n\n"
+	"@ingroup OpenVR")
+{ vr::TrackedDeviceClass_Invalid, "Invalid" },
+{ vr::TrackedDeviceClass_HMD, "HMD" },
+{ vr::TrackedDeviceClass_Controller, "Controller" },
+{ vr::TrackedDeviceClass_TrackingReference, "TrackingReference" },
+{ vr::TrackedDeviceClass_Other, "Other" },
+EndImplementEnumType;
+
 //------------------------------------------------------------
 //------------------------------------------------------------
 
 
 U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 };
 U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 };
@@ -371,7 +505,7 @@ OpenVRProvider::OpenVRProvider() :
    INPUTMGR->registerDevice(this);
    INPUTMGR->registerDevice(this);
    dMemset(&mLUID, '\0', sizeof(mLUID));
    dMemset(&mLUID, '\0', sizeof(mLUID));
 
 
-   mTrackingSpace = vr::TrackingUniverseSeated;
+   mTrackingSpace = vr::TrackingUniverseStanding;
 }
 }
 
 
 OpenVRProvider::~OpenVRProvider()
 OpenVRProvider::~OpenVRProvider()
@@ -404,6 +538,8 @@ void OpenVRProvider::staticInit()
 
 
 bool OpenVRProvider::enable()
 bool OpenVRProvider::enable()
 {
 {
+   mOpenVRNS = Namespace::find(StringTable->insert("OpenVR"));
+
    disable();
    disable();
 
 
    // Load openvr runtime
    // Load openvr runtime
@@ -479,12 +615,19 @@ bool OpenVRProvider::enable()
    mDriver = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String);
    mDriver = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String);
    mDisplay = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String);
    mDisplay = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String);
 
 
+   mHMDRenderState.mHMDPose = MatrixF(1);
+   mHMDRenderState.mEyePose[0] = MatrixF(1);
+   mHMDRenderState.mEyePose[1] = MatrixF(1);
+
    mHMDRenderState.reset(mHMD);
    mHMDRenderState.reset(mHMD);
    mHMD->ResetSeatedZeroPose();
    mHMD->ResetSeatedZeroPose();
    dMemset(mPreviousInputTrackedDevicePose, '\0', sizeof(mPreviousInputTrackedDevicePose));
    dMemset(mPreviousInputTrackedDevicePose, '\0', sizeof(mPreviousInputTrackedDevicePose));
 
 
    mEnabled = true;
    mEnabled = true;
 
 
+   dMemset(mCurrentControllerState, '\0', sizeof(mCurrentControllerState));
+   dMemset(mPreviousCurrentControllerState, '\0', sizeof(mPreviousCurrentControllerState));
+
    return true;
    return true;
 }
 }
 
 
@@ -614,7 +757,7 @@ bool OpenVRProvider::process()
       vr::VRControllerState_t state;
       vr::VRControllerState_t state;
       if (mHMD->GetControllerState(unDevice, &state))
       if (mHMD->GetControllerState(unDevice, &state))
       {
       {
-         // TODO
+		  mCurrentControllerState[unDevice] = state;
       }
       }
    }
    }
 
 
@@ -643,7 +786,21 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos)
 
 
    Point3F pos = torqueMat.getPosition();
    Point3F pos = torqueMat.getPosition();
    outRot = QuatF(torqueMat);
    outRot = QuatF(torqueMat);
-   outPos = pos;// Point3F(-pos.x, pos.z, -pos.y);
+   outPos = pos;
+   outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case
+}
+
+void OpenVRTransformToRotPosMat(MatrixF mat, QuatF &outRot, Point3F &outPos, MatrixF &outMat)
+{
+	// Directly set the rotation and position from the eye transforms
+	MatrixF torqueMat(1);
+	OpenVRUtil::convertTransformFromOVR(mat, torqueMat);
+
+	Point3F pos = torqueMat.getPosition();
+	outRot = QuatF(torqueMat);
+	outPos = pos;
+	outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case
+	outMat = torqueMat;
 }
 }
 
 
 void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const
 void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const
@@ -655,15 +812,29 @@ void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const
 		// NOTE: this is codename for "head"
 		// NOTE: this is codename for "head"
 		MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example
 		MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example
 
 
+#ifdef DEBUG_DISPLAY_POSE
+		pose->originalMatrix = mat;
+		OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix);
+#else
 		OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
 		OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
+#endif
+
 		pose->velocity = Point3F(0);
 		pose->velocity = Point3F(0);
 		pose->angularVelocity = Point3F(0);
 		pose->angularVelocity = Point3F(0);
 	}
 	}
 	else
 	else
 	{
 	{
 		MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example
 		MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example
+		//mat =  mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eyeId]; // same order as in the openvr example
+
 
 
+#ifdef DEBUG_DISPLAY_POSE
+		pose->originalMatrix = mat;
+		OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix);
+#else
 		OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
 		OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
+#endif
+
 		pose->velocity = Point3F(0);
 		pose->velocity = Point3F(0);
 		pose->angularVelocity = Point3F(0);
 		pose->angularVelocity = Point3F(0);
 	}
 	}
@@ -914,10 +1085,14 @@ S32 OpenVRProvider::getDisplayDeviceId() const
 	return -1;
 	return -1;
 }
 }
 
 
-void OpenVRProvider::processVREvent(const vr::VREvent_t & event)
+void OpenVRProvider::processVREvent(const vr::VREvent_t & evt)
 {
 {
-   switch (event.eventType)
+   mVREventSignal.trigger(evt);
+   switch (evt.eventType)
    {
    {
+   case vr::VREvent_InputFocusCaptured:
+	   //Con::executef()
+	   break;
    case vr::VREvent_TrackedDeviceActivated:
    case vr::VREvent_TrackedDeviceActivated:
    {
    {
       // Setup render model
       // Setup render model
@@ -969,6 +1144,8 @@ void OpenVRProvider::updateTrackedPoses()
          if (nDevice == vr::k_unTrackedDeviceIndex_Hmd)
          if (nDevice == vr::k_unTrackedDeviceIndex_Hmd)
          {
          {
             mHMDRenderState.mHMDPose = mat;
             mHMDRenderState.mHMDPose = mat;
+
+			/*
             MatrixF rotOffset(1);
             MatrixF rotOffset(1);
             EulerF localRot(-smHMDRotOffset.x, -smHMDRotOffset.z, smHMDRotOffset.y);
             EulerF localRot(-smHMDRotOffset.x, -smHMDRotOffset.z, smHMDRotOffset.y);
 
 
@@ -978,6 +1155,7 @@ void OpenVRProvider::updateTrackedPoses()
             QuatF(localRot).setMatrix(&rotOffset);
             QuatF(localRot).setMatrix(&rotOffset);
             rotOffset.inverse();
             rotOffset.inverse();
             mHMDRenderState.mHMDPose = mat = rotOffset * mHMDRenderState.mHMDPose;
             mHMDRenderState.mHMDPose = mat = rotOffset * mHMDRenderState.mHMDPose;
+			*/
 
 
             // jamesu - store the last rotation for temp debugging
             // jamesu - store the last rotation for temp debugging
             MatrixF torqueMat(1);
             MatrixF torqueMat(1);
@@ -990,6 +1168,11 @@ void OpenVRProvider::updateTrackedPoses()
          vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice];
          vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice];
          OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position);
          OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position);
 
 
+#ifdef DEBUG_DISPLAY_POSE
+		 OpenVRUtil::convertTransformFromOVR(mat, inPose.actualMatrix);
+		 inPose.originalMatrix = mat;
+#endif
+
          inPose.state = outPose.eTrackingResult;
          inPose.state = outPose.eTrackingResult;
          inPose.valid = outPose.bPoseIsValid;
          inPose.valid = outPose.bPoseIsValid;
          inPose.connected = outPose.bDeviceIsConnected;
          inPose.connected = outPose.bDeviceIsConnected;
@@ -1012,18 +1195,23 @@ void OpenVRProvider::submitInputChanges()
       IDevicePose curPose = mCurrentDevicePose[i];
       IDevicePose curPose = mCurrentDevicePose[i];
       IDevicePose prevPose = mPreviousInputTrackedDevicePose[i];
       IDevicePose prevPose = mPreviousInputTrackedDevicePose[i];
 
 
+	  S32 eventIdx = -1;
+	  
+	  if (!mDeviceEventMap.tryGetValue(i, eventIdx) || eventIdx < 0)
+		  continue;
+
       if (!curPose.valid || !curPose.connected)
       if (!curPose.valid || !curPose.connected)
          continue;
          continue;
 
 
       if (curPose.orientation != prevPose.orientation)
       if (curPose.orientation != prevPose.orientation)
       {
       {
          AngAxisF axisAA(curPose.orientation);
          AngAxisF axisAA(curPose.orientation);
-         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[i], SI_MOVE, axisAA);
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[eventIdx], SI_MOVE, axisAA);
       }
       }
 
 
       if (curPose.position != prevPose.position)
       if (curPose.position != prevPose.position)
       {
       {
-         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[i], SI_MOVE, curPose.position);
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[eventIdx], SI_MOVE, curPose.position);
       }
       }
 
 
       if (curPose.velocity != prevPose.velocity)
       if (curPose.velocity != prevPose.velocity)
@@ -1034,7 +1222,7 @@ void OpenVRProvider::submitInputChanges()
          angles.y = curPose.velocity.y;
          angles.y = curPose.velocity.y;
          angles.z = curPose.velocity.z;
          angles.z = curPose.velocity.z;
 
 
-         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[i], SI_MOVE, angles);
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[eventIdx], SI_MOVE, angles);
       }
       }
 
 
       if (curPose.angularVelocity != prevPose.angularVelocity)
       if (curPose.angularVelocity != prevPose.angularVelocity)
@@ -1045,7 +1233,7 @@ void OpenVRProvider::submitInputChanges()
          angles[1] = mRadToDeg(curPose.velocity.y);
          angles[1] = mRadToDeg(curPose.velocity.y);
          angles[2] = mRadToDeg(curPose.velocity.z);
          angles[2] = mRadToDeg(curPose.velocity.z);
 
 
-         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[i], SI_MOVE, angles);
+         INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[eventIdx], SI_MOVE, angles);
       }
       }
       /*
       /*
       if (curPose.connected != prevPose.connected)
       if (curPose.connected != prevPose.connected)
@@ -1076,6 +1264,28 @@ void OpenVRProvider::resetSensors()
    }
    }
 }
 }
 
 
+void OpenVRProvider::mapDeviceToEvent(U32 deviceIdx, S32 eventIdx)
+{
+	mDeviceEventMap[deviceIdx] = eventIdx;
+}
+
+void OpenVRProvider::resetEventMap()
+{
+	mDeviceEventMap.clear();
+}
+
+IDevicePose OpenVRProvider::getTrackedDevicePose(U32 idx)
+{
+	if (idx >= vr::k_unMaxTrackedDeviceCount)
+	{
+		IDevicePose ret;
+		ret.connected = ret.valid = false;
+		return ret;
+	}
+
+	return mCurrentDevicePose[idx];
+}
+
 void OpenVRProvider::registerOverlay(OpenVROverlay* overlay)
 void OpenVRProvider::registerOverlay(OpenVROverlay* overlay)
 {
 {
 	mOverlays.push_back(overlay);
 	mOverlays.push_back(overlay);
@@ -1090,6 +1300,261 @@ void OpenVRProvider::unregisterOverlay(OpenVROverlay* overlay)
 	}
 	}
 }
 }
 
 
+const S32 OpenVRProvider::preloadRenderModelTexture(U32 index)
+{
+	S32 idx = -1;
+	if (mLoadedTextureLookup.tryGetValue(index, idx))
+		return idx;
+
+	char buffer[256];
+	dSprintf(buffer, sizeof(buffer), "openvrtex_%u", index);
+
+	OpenVRProvider::LoadedRenderTexture loadedTexture;
+	loadedTexture.vrTextureId = index;
+	loadedTexture.vrTexture = NULL;
+	loadedTexture.texture = NULL;
+	loadedTexture.textureError = vr::VRRenderModelError_Loading;
+	loadedTexture.targetTexture = new NamedTexTarget();
+	loadedTexture.targetTexture->registerWithName(buffer);
+	mLoadedTextures.push_back(loadedTexture);
+	mLoadedTextureLookup[index] = mLoadedTextures.size() - 1;
+
+	return mLoadedTextures.size() - 1;
+}
+
+const S32 OpenVRProvider::preloadRenderModel(StringTableEntry name)
+{
+	S32 idx = -1;
+	if (mLoadedModelLookup.tryGetValue(name, idx))
+		return idx;
+
+	OpenVRProvider::LoadedRenderModel loadedModel;
+	loadedModel.name = name;
+	loadedModel.model = NULL;
+	loadedModel.vrModel = NULL;
+	loadedModel.modelError = vr::VRRenderModelError_Loading;
+	loadedModel.loadedTexture = false;
+	loadedModel.textureId = -1;
+	mLoadedModels.push_back(loadedModel);
+	mLoadedModelLookup[name] = mLoadedModels.size() - 1;
+
+	return mLoadedModels.size() - 1;
+}
+
+
+bool OpenVRProvider::getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed)
+{
+	if (idx < 0 || idx > mLoadedModels.size())
+	{
+		failed = true;
+		return true;
+	}
+
+	OpenVRProvider::LoadedRenderModel &loadedModel = mLoadedModels[idx];
+	//Con::printf("RenderModel[%i] STAGE 1", idx);
+
+	failed = false;
+
+	if (loadedModel.modelError > vr::VRRenderModelError_Loading)
+	{
+		failed = true;
+		return true;
+	}
+
+	// Stage 1 : model
+	if (!loadedModel.model)
+	{
+		loadedModel.modelError = vr::VRRenderModels()->LoadRenderModel_Async(loadedModel.name, &loadedModel.vrModel);
+		//Con::printf(" vr::VRRenderModels()->LoadRenderModel_Async(\"%s\", %x); -> %i", loadedModel.name, &loadedModel.vrModel, loadedModel.modelError);
+		if (loadedModel.modelError == vr::VRRenderModelError_None)
+		{
+			if (loadedModel.vrModel == NULL)
+			{
+				failed = true;
+				return true;
+			}
+			// Load the model
+			loadedModel.model = new OpenVRRenderModel();
+		}
+		else if (loadedModel.modelError == vr::VRRenderModelError_Loading)
+		{
+			return false;
+		}
+	}
+
+	//Con::printf("RenderModel[%i] STAGE 2 (texId == %i)", idx, loadedModel.vrModel->diffuseTextureId);
+
+	// Stage 2 : texture
+	if (!loadedModel.loadedTexture && loadedModel.model)
+	{
+		if (loadedModel.textureId == -1)
+		{
+			loadedModel.textureId = preloadRenderModelTexture(loadedModel.vrModel->diffuseTextureId);
+		}
+
+		if (loadedModel.textureId == -1)
+		{
+			failed = true;
+			return true;
+		}
+
+		if (!getRenderModelTexture(loadedModel.textureId, NULL, failed))
+		{
+			return false;
+		}
+
+		if (failed)
+		{
+			return true;
+		}
+
+		loadedModel.loadedTexture = true;
+
+		//Con::printf("RenderModel[%i] GOT TEXTURE");
+
+		// Now we can load the model. Note we first need to get a Material for the mapped texture
+		NamedTexTarget *namedTexture = mLoadedTextures[loadedModel.textureId].targetTexture;
+		String materialName = MATMGR->getMapEntry(namedTexture->getName().c_str());
+		if (materialName.isEmpty())
+		{
+			char buffer[256];
+			dSprintf(buffer, sizeof(buffer), "#%s", namedTexture->getName().c_str());
+			materialName = buffer;
+
+			//Con::printf("RenderModel[%i] materialName == %s", idx, buffer);
+
+			Material* mat = new Material();
+			mat->mMapTo = namedTexture->getName();
+			mat->mDiffuseMapFilename[0] = buffer;
+			mat->mEmissive[0] = true;
+
+			dSprintf(buffer, sizeof(buffer), "%s_Material", namedTexture->getName().c_str());
+			if (!mat->registerObject(buffer))
+			{
+				Con::errorf("Couldn't create placeholder openvr material %s!", buffer);
+				failed = true;
+				return true;
+			}
+
+			materialName = buffer;
+		}
+		
+		loadedModel.model->init(*loadedModel.vrModel, materialName);
+	}
+
+	if ((loadedModel.modelError > vr::VRRenderModelError_Loading) || 
+	    (loadedModel.textureId >= 0 && mLoadedTextures[loadedModel.textureId].textureError > vr::VRRenderModelError_Loading))
+	{
+		failed = true;
+	}
+
+	if (!failed && ret)
+	{
+		*ret = loadedModel.model;
+	}
+	return true;
+}
+
+bool OpenVRProvider::getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed)
+{
+	if (idx < 0 || idx > mLoadedModels.size())
+	{
+		failed = true;
+		return true;
+	}
+
+	failed = false;
+
+	OpenVRProvider::LoadedRenderTexture &loadedTexture = mLoadedTextures[idx];
+
+	if (loadedTexture.textureError > vr::VRRenderModelError_Loading)
+	{
+		failed = true;
+		return true;
+	}
+
+	if (!loadedTexture.texture)
+	{
+		if (!loadedTexture.vrTexture)
+		{
+			loadedTexture.textureError = vr::VRRenderModels()->LoadTexture_Async(loadedTexture.vrTextureId, &loadedTexture.vrTexture);
+			if (loadedTexture.textureError == vr::VRRenderModelError_None)
+			{
+				// Load the texture
+				GFXTexHandle tex;
+
+				const U32 sz = loadedTexture.vrTexture->unWidth * loadedTexture.vrTexture->unHeight * 4;
+				GBitmap *bmp = new GBitmap(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, false, GFXFormatR8G8B8A8);
+
+				Swizzles::bgra.ToBuffer(bmp->getAddress(0,0,0), loadedTexture.vrTexture->rubTextureMapData, sz);
+
+				char buffer[256];
+				dSprintf(buffer, 256, "OVRTEX-%i.png", loadedTexture.vrTextureId);
+
+				FileStream fs;
+				fs.open(buffer, Torque::FS::File::Write);
+				bmp->writeBitmap("PNG", fs);
+				fs.close();
+
+				tex.set(bmp, &GFXDefaultStaticDiffuseProfile, true, "OpenVR Texture");
+				//tex.set(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, 1, (void*)pixels, GFXFormatR8G8B8A8, &GFXDefaultStaticDiffuseProfile, "OpenVR Texture", 1);
+
+
+				loadedTexture.targetTexture->setTexture(tex);
+				loadedTexture.texture = tex;
+			}
+			else if (loadedTexture.textureError == vr::VRRenderModelError_Loading)
+			{
+				return false;
+			}
+		}
+	}
+
+	if (loadedTexture.textureError > vr::VRRenderModelError_Loading)
+	{
+		failed = true;
+	}
+
+	if (!failed && outTex)
+	{
+		*outTex = loadedTexture.texture;
+	}
+
+	return true;
+}
+
+bool OpenVRProvider::getRenderModelTextureName(S32 idx, String &outName)
+{
+	if (idx < 0 || idx >= mLoadedTextures.size())
+		return false;
+
+	if (mLoadedTextures[idx].targetTexture)
+	{
+		outName = mLoadedTextures[idx].targetTexture->getName();
+		return true;
+	}
+
+	return false;
+}
+
+void OpenVRProvider::resetRenderModels()
+{
+	for (U32 i = 0, sz = mLoadedModels.size(); i < sz; i++)
+	{
+		SAFE_DELETE(mLoadedModels[i].model);
+		if (mLoadedModels[i].vrModel) mRenderModels->FreeRenderModel(mLoadedModels[i].vrModel);
+	}
+	for (U32 i = 0, sz = mLoadedTextures.size(); i < sz; i++)
+	{
+		SAFE_DELETE(mLoadedTextures[i].targetTexture);
+		if (mLoadedTextures[i].vrTexture) mRenderModels->FreeTexture(mLoadedTextures[i].vrTexture);
+	}
+	mLoadedModels.clear();
+	mLoadedTextures.clear();
+	mLoadedModelLookup.clear();
+	mLoadedTextureLookup.clear();
+}
+
 OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay()
 OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay()
 {
 {
    return NULL;
    return NULL;
@@ -1126,6 +1591,54 @@ void OpenVRProvider::setKeyboardPositionForOverlay(OpenVROverlay *overlay, const
 
 
 }
 }
 
 
+void OpenVRProvider::getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector<S32> &outList)
+{
+	for (U32 i = 0; i<vr::k_unMaxTrackedDeviceCount; i++)
+	{
+		if (!mCurrentDevicePose[i].connected)
+			continue;
+
+		vr::TrackedDeviceClass klass = mHMD->GetTrackedDeviceClass(i);
+		if (klass == deviceClass)
+		{
+			outList.push_back(i);
+		}
+	}
+}
+
+StringTableEntry OpenVRProvider::getControllerModel(U32 idx)
+{
+	if (idx >= vr::k_unMaxTrackedDeviceCount || !mRenderModels)
+		return NULL;
+
+	String str = GetTrackedDeviceString(mHMD, idx, vr::Prop_RenderModelName_String, NULL);
+	return StringTable->insert(str, true);
+}
+
+DefineEngineStaticMethod(OpenVR, getControllerDeviceIndexes, const char*, (OpenVRTrackedDeviceClass klass),,
+	"@brief Gets the indexes of devices which match the required device class")
+{
+	if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+	{
+		return "";
+	}
+
+	Vector<S32> outList;
+	OPENVR->getControllerDeviceIndexes(klass, outList);
+	return EngineMarshallData<Vector<S32>>(outList);
+}
+
+DefineEngineStaticMethod(OpenVR, getControllerModel, const char*, (S32 idx), ,
+	"@brief Gets the indexes of devices which match the required device class")
+{
+	if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+	{
+		return "";
+	}
+
+	return OPENVR->getControllerModel(idx);
+}
+
 DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), ,
 DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), ,
    "@brief Used to determine if the OpenVR input device is active\n\n"
    "@brief Used to determine if the OpenVR input device is active\n\n"
 
 
@@ -1216,6 +1729,30 @@ DefineEngineStaticMethod(OpenVR, resetSensors, void, (), ,
    OPENVR->resetSensors();
    OPENVR->resetSensors();
 }
 }
 
 
+DefineEngineStaticMethod(OpenVR, mapDeviceToEvent, void, (S32 deviceId, S32 eventId), ,
+	"@brief Maps a device to an event code.\n\n"
+	"@ingroup Game")
+{
+	if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+	{
+		return;
+	}
+
+	OPENVR->mapDeviceToEvent(deviceId, eventId);
+}
+
+DefineEngineStaticMethod(OpenVR, resetEventMap, void, (), ,
+	"@brief Resets event map.\n\n"
+	"@ingroup Game")
+{
+	if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
+	{
+		return;
+	}
+
+	OPENVR->resetEventMap();
+}
+
 // Overlay stuff
 // Overlay stuff
 
 
 DefineEngineFunction(OpenVRIsCompiledIn, bool, (), , "")
 DefineEngineFunction(OpenVRIsCompiledIn, bool, (), , "")

+ 91 - 0
Engine/source/platform/input/openVR/openVRProvider.h

@@ -20,6 +20,11 @@
 
 
 class OpenVRHMDDevice;
 class OpenVRHMDDevice;
 class OpenVROverlay;
 class OpenVROverlay;
+class BaseMatInstance;
+class SceneRenderState;
+struct MeshRenderInst;
+class Namespace;
+class NamedTexTarget;
 
 
 typedef vr::VROverlayInputMethod OpenVROverlayInputMethod;
 typedef vr::VROverlayInputMethod OpenVROverlayInputMethod;
 typedef vr::VROverlayTransformType OpenVROverlayTransformType;
 typedef vr::VROverlayTransformType OpenVROverlayTransformType;
@@ -29,6 +34,7 @@ typedef vr::ETrackingResult OpenVRTrackingResult;
 typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin;
 typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin;
 typedef vr::EOverlayDirection OpenVROverlayDirection;
 typedef vr::EOverlayDirection OpenVROverlayDirection;
 typedef vr::EVRState OpenVRState;
 typedef vr::EVRState OpenVRState;
+typedef vr::TrackedDeviceClass OpenVRTrackedDeviceClass;
 
 
 DefineEnumType(OpenVROverlayInputMethod);
 DefineEnumType(OpenVROverlayInputMethod);
 DefineEnumType(OpenVROverlayTransformType);
 DefineEnumType(OpenVROverlayTransformType);
@@ -38,6 +44,7 @@ DefineEnumType(OpenVRTrackingResult);
 DefineEnumType(OpenVRTrackingUniverseOrigin);
 DefineEnumType(OpenVRTrackingUniverseOrigin);
 DefineEnumType(OpenVROverlayDirection);
 DefineEnumType(OpenVROverlayDirection);
 DefineEnumType(OpenVRState);
 DefineEnumType(OpenVRState);
+DefineEnumType(OpenVRTrackedDeviceClass);
 
 
 namespace OpenVRUtil
 namespace OpenVRUtil
 {
 {
@@ -112,6 +119,36 @@ public:
 	}
 	}
 };
 };
 
 
+/// Simple class to handle rendering native OpenVR model data
+class OpenVRRenderModel
+{
+public:
+	typedef GFXVertexPNT VertexType;
+	GFXVertexBufferHandle<VertexType> mVertexBuffer;
+	GFXPrimitiveBufferHandle mPrimitiveBuffer;
+	BaseMatInstance* mMaterialInstance; ///< Material to use for rendering. NOTE:  
+	Box3F mLocalBox;
+
+	OpenVRRenderModel() : mMaterialInstance(NULL)
+	{
+	}
+
+	~OpenVRRenderModel()
+	{
+		SAFE_DELETE(mMaterialInstance);
+	}
+
+	Box3F getWorldBox(MatrixF &mat)
+	{
+		Box3F ret = mLocalBox;
+		mat.mul(ret);
+		return ret;
+	}
+
+	bool init(const vr::RenderModel_t & vrModel, StringTableEntry materialName);
+	void draw(SceneRenderState *state, MeshRenderInst* renderInstance);
+};
+
 struct OpenVRRenderState
 struct OpenVRRenderState
 {
 {
    vr::IVRSystem *mHMD;
    vr::IVRSystem *mHMD;
@@ -157,15 +194,38 @@ public:
       DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
       DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
    };
    };
 
 
+   struct LoadedRenderModel
+   {
+	   StringTableEntry name;
+	   vr::RenderModel_t *vrModel;
+	   OpenVRRenderModel *model;
+	   vr::EVRRenderModelError modelError;
+	   S32 textureId;
+	   bool loadedTexture;
+   };
+
+   struct LoadedRenderTexture
+   {
+	   U32 vrTextureId;
+	   vr::RenderModel_TextureMap_t *vrTexture;
+	   GFXTextureObject *texture;
+	   NamedTexTarget *targetTexture;
+	   vr::EVRRenderModelError textureError;
+   };
+
    OpenVRProvider();
    OpenVRProvider();
    ~OpenVRProvider();
    ~OpenVRProvider();
 
 
+   typedef Signal <void(const vr::VREvent_t &evt)> VREventSignal;
+   VREventSignal& getVREventSignal() { return mVREventSignal;  }
+
    static void staticInit();
    static void staticInit();
 
 
    bool enable();
    bool enable();
    bool disable();
    bool disable();
 
 
    bool getActive() { return mHMD != NULL; }
    bool getActive() { return mHMD != NULL; }
+   inline vr::IVRRenderModels* getRenderModels() { return mRenderModels; }
 
 
    /// @name Input handling
    /// @name Input handling
    /// {
    /// {
@@ -216,6 +276,11 @@ public:
    void submitInputChanges();
    void submitInputChanges();
 
 
    void resetSensors();
    void resetSensors();
+
+   void mapDeviceToEvent(U32 deviceIdx, S32 eventIdx);
+   void resetEventMap();
+
+   IDevicePose getTrackedDevicePose(U32 idx);
    /// }
    /// }
 
 
 	/// @name Overlay registration
 	/// @name Overlay registration
@@ -224,6 +289,16 @@ public:
 	void unregisterOverlay(OpenVROverlay* overlay);
 	void unregisterOverlay(OpenVROverlay* overlay);
 	/// }
 	/// }
 
 
+	/// @name Model loading
+	/// {
+	const S32 preloadRenderModel(StringTableEntry name);
+	const S32 preloadRenderModelTexture(U32 index);
+	bool getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed);
+	bool getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed);
+	bool getRenderModelTextureName(S32 idx, String &outName);
+	void resetRenderModels();
+	/// }
+
 
 
    /// @name Console API
    /// @name Console API
    /// {
    /// {
@@ -237,6 +312,9 @@ public:
 
 
    void setKeyboardTransformAbsolute(const MatrixF &xfm);
    void setKeyboardTransformAbsolute(const MatrixF &xfm);
    void setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect);
    void setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect);
+
+   void getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector<S32> &outList);
+   StringTableEntry getControllerModel(U32 idx);
    /// }
    /// }
 
 
    /// @name OpenVR state
    /// @name OpenVR state
@@ -250,6 +328,9 @@ public:
    IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
    IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
    U32 mValidPoseCount;
    U32 mValidPoseCount;
 
 
+   vr::VRControllerState_t mCurrentControllerState[vr::k_unMaxTrackedDeviceCount];
+   vr::VRControllerState_t mPreviousCurrentControllerState[vr::k_unMaxTrackedDeviceCount];
+
    char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount];
    char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount];
 
 
    OpenVRRenderState mHMDRenderState;
    OpenVRRenderState mHMDRenderState;
@@ -258,6 +339,16 @@ public:
    vr::ETrackingUniverseOrigin mTrackingSpace;
    vr::ETrackingUniverseOrigin mTrackingSpace;
 
 
 	Vector<OpenVROverlay*> mOverlays;
 	Vector<OpenVROverlay*> mOverlays;
+
+	VREventSignal mVREventSignal;
+	Namespace *mOpenVRNS;
+
+	Vector<LoadedRenderModel> mLoadedModels;
+	Vector<LoadedRenderTexture> mLoadedTextures;
+	Map<StringTableEntry, S32> mLoadedModelLookup;
+	Map<U32, S32> mLoadedTextureLookup;
+
+	Map<U32, S32> mDeviceEventMap;
    /// }
    /// }
 
 
    GuiCanvas* mDrawCanvas;
    GuiCanvas* mDrawCanvas;

+ 981 - 0
Engine/source/platform/input/openVR/openVRTrackedObject.cpp

@@ -0,0 +1,981 @@
+#include "platform/platform.h"
+#include "platform/input/openVR/openVRTrackedObject.h"
+#include "platform/input/openVR/openVRProvider.h"
+
+#include "math/mathIO.h"
+#include "scene/sceneRenderState.h"
+#include "console/consoleTypes.h"
+#include "core/stream/bitStream.h"
+#include "core/resourceManager.h"
+#include "materials/materialManager.h"
+#include "materials/baseMatInstance.h"
+#include "renderInstance/renderPassManager.h"
+#include "lighting/lightQuery.h"
+#include "console/engineAPI.h"
+#include "gfx/gfxTextureManager.h"
+#include "gfx/sim/debugDraw.h"
+#include "gfx/gfxTransformSaver.h"
+#include "environment/skyBox.h"
+#include "collision/boxConvex.h"
+#include "collision/concretePolyList.h"
+#include "T3D/physics/physicsPlugin.h"
+#include "T3D/physics/physicsCollision.h"
+#include "T3D/physics/physicsBody.h"
+
+#ifdef TORQUE_EXTENDED_MOVE
+#include "T3D/gameBase/extended/extendedMove.h"
+#endif
+
+
+bool OpenVRTrackedObject::smDebugControllerMovePosition = true;
+bool OpenVRTrackedObject::smDebugControllerPosition = false;
+
+static const U32 sCollisionMoveMask = (PlayerObjectType |
+	StaticShapeObjectType | VehicleObjectType);
+
+U32 OpenVRTrackedObject::sServerCollisionMask = sCollisionMoveMask; // ItemObjectType
+U32 OpenVRTrackedObject::sClientCollisionMask = sCollisionMoveMask;
+
+//-----------------------------------------------------------------------------
+
+IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData);
+
+OpenVRTrackedObjectData::OpenVRTrackedObjectData() :
+   mShapeFile(NULL)
+{
+	mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02);
+	mCollisionBoxMax = Point3F(0.02, 0.05, 0.02);
+}
+
+OpenVRTrackedObjectData::~OpenVRTrackedObjectData()
+{
+}
+
+bool OpenVRTrackedObjectData::onAdd()
+{
+	if (Parent::onAdd())
+	{
+		return true;
+	}
+
+	return false;
+}
+
+bool OpenVRTrackedObjectData::preload(bool server, String &errorStr)
+{
+	if (!Parent::preload(server, errorStr))
+		return false;
+
+	bool error = false;
+	if (!server)
+	{
+		mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL;
+	}
+}
+
+void OpenVRTrackedObjectData::initPersistFields()
+{
+	addGroup("Render Components");
+	addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model.");
+	addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min");
+	addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min");
+	endGroup("Render Components");
+
+	Parent::initPersistFields();
+}
+
+void OpenVRTrackedObjectData::packData(BitStream* stream)
+{
+	Parent::packData(stream);
+
+	stream->writeString(mShapeFile);
+}
+
+void OpenVRTrackedObjectData::unpackData(BitStream* stream)
+{
+	Parent::unpackData(stream);
+
+	mShapeFile = stream->readSTString();
+}
+
+//-----------------------------------------------------------------------------
+
+
+IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject);
+
+ConsoleDocClass(OpenVRTrackedObject,
+	"@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n"
+	"This class implements basic rendering and interactions with OpenVR controllers.\n\n"
+	"The object should be controlled by a player object. Controllers will be rendered at\n"
+	"the correct position regardless of the current transform of the object.\n"
+	"@ingroup OpenVR\n");
+
+
+//-----------------------------------------------------------------------------
+// Object setup and teardown
+//-----------------------------------------------------------------------------
+OpenVRTrackedObject::OpenVRTrackedObject() :
+   mDataBlock(NULL),
+   mShapeInstance(NULL),
+   mBasicModel(NULL),
+   mDeviceIndex(-1),
+   mMappedMoveIndex(-1),
+   mIgnoreParentRotation(true),
+   mConvexList(new Convex()),
+   mPhysicsRep(NULL)
+{
+	// Flag this object so that it will always
+	// be sent across the network to clients
+	mNetFlags.set(Ghostable | ScopeAlways);
+
+	// Set it as a "static" object that casts shadows
+	mTypeMask |= StaticObjectType | StaticShapeObjectType;
+
+	mPose.connected = false;
+}
+
+OpenVRTrackedObject::~OpenVRTrackedObject()
+{
+	clearRenderData();
+	delete mConvexList;
+}
+
+void OpenVRTrackedObject::updateRenderData()
+{
+	clearRenderData();
+
+	if (!mDataBlock)
+		return;
+
+	// Are we using a model?
+	if (mDataBlock->mShape)
+	{
+		if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape)
+		{
+			delete mShapeInstance;
+			mShapeInstance = NULL;
+		}
+
+		if (!mShapeInstance)
+		{
+			mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject());
+		}
+	}
+	else
+	{
+		setupRenderDataFromModel(isClientObject());
+	}
+}
+
+void OpenVRTrackedObject::setupRenderDataFromModel(bool loadComponentModels)
+{
+	clearRenderData();
+	
+	if (!OPENVR || !OPENVR->isEnabled())
+		return;
+
+	vr::IVRRenderModels *models = OPENVR->getRenderModels();
+	if (!models)
+		return;
+
+	if (!mShapeInstance && mModelName && mModelName[0] != '\0')
+	{
+		bool failed = false;
+		S32 idx = OPENVR->preloadRenderModel(mModelName);
+		while (!OPENVR->getRenderModel(idx, &mBasicModel, failed))
+		{
+			if (failed)
+				break;
+		}
+	}
+
+	if (loadComponentModels)
+	{
+		mRenderComponents.setSize(models->GetComponentCount(mModelName));
+
+		for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
+		{
+			RenderModelSlot &slot = mRenderComponents[i];
+			char buffer[1024];
+
+			slot.mappedNodeIdx = -1;
+			slot.componentName = NULL;
+			slot.nativeModel = NULL;
+
+			U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer));
+			if (result == 0)
+				continue;
+
+#ifdef DEBUG_CONTROLLER_MODELS
+			Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer);
+#endif
+
+			slot.componentName = StringTable->insert(buffer, true);
+
+			result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer));
+			if (result == 0)
+			{
+#ifdef DEBUG_CONTROLLER_MODELS
+				Con::printf("Controller[%s] component %i NO MODEL", mModelName, i);
+#endif
+				continue;
+			}
+
+#ifdef DEBUG_CONTROLLER_MODELS
+			Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName);
+#endif
+
+			bool failed = false;
+			S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true));
+			while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed))
+			{
+				if (failed)
+					break;
+			}
+		}
+	}
+}
+
+void OpenVRTrackedObject::clearRenderData()
+{
+	mBasicModel = NULL;
+	mRenderComponents.clear();
+}
+
+//-----------------------------------------------------------------------------
+// Object Editing
+//-----------------------------------------------------------------------------
+void OpenVRTrackedObject::initPersistFields()
+{
+	// SceneObject already handles exposing the transform
+	Parent::initPersistFields();
+
+	addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
+	addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
+	addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
+
+	static bool conInit = false;
+	if (!conInit)
+	{
+		Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition);
+		Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition);
+		conInit = true;
+	}
+}
+
+void OpenVRTrackedObject::inspectPostApply()
+{
+	Parent::inspectPostApply();
+
+	// Flag the network mask to send the updates
+	// to the client object
+	setMaskBits(UpdateMask);
+}
+
+bool OpenVRTrackedObject::onAdd()
+{
+	if (!Parent::onAdd())
+		return false;
+
+	// Set up a 1x1x1 bounding box
+	mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f),
+		Point3F(0.5f, 0.5f, 0.5f));
+
+	resetWorldBox();
+
+	// Add this object to the scene
+	addToScene();
+
+	if (mDataBlock)
+	{
+		mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
+		mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
+		resetWorldBox();
+	}
+	else
+	{
+		setGlobalBounds();
+	}
+
+	return true;
+}
+
+void OpenVRTrackedObject::onRemove()
+{
+	// Remove this object from the scene
+	removeFromScene();
+
+	clearRenderData();
+
+	SAFE_DELETE(mPhysicsRep);
+
+	Parent::onRemove();
+}
+
+void OpenVRTrackedObject::_updatePhysics()
+{
+	SAFE_DELETE(mPhysicsRep);
+
+	if (!PHYSICSMGR)
+		return;
+
+	PhysicsCollision *colShape = NULL;
+	MatrixF offset(true);
+	colShape = PHYSICSMGR->createCollision();
+	colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset);
+
+	if (colShape)
+	{
+		PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client");
+		mPhysicsRep = PHYSICSMGR->createBody();
+		mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world);
+		mPhysicsRep->setTransform(getTransform());
+	}
+}
+
+bool OpenVRTrackedObject::onNewDataBlock(GameBaseData *dptr, bool reload)
+{
+	mDataBlock = dynamic_cast<OpenVRTrackedObjectData*>(dptr);
+	if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload))
+		return false;
+
+	// Setup the models
+	clearRenderData();
+
+	mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
+	mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
+
+	mGlobalBounds = false;
+
+	resetWorldBox();
+
+	_updatePhysics();
+
+	scriptOnNewDataBlock();
+
+	return true;
+}
+
+void OpenVRTrackedObject::setInteractObject(SceneObject* object, bool holding)
+{
+	mInteractObject = object;
+	mHoldInteractedObject = holding;
+}
+
+void OpenVRTrackedObject::setTransform(const MatrixF & mat)
+{
+	// Let SceneObject handle all of the matrix manipulation
+	Parent::setTransform(mat);
+
+	// Dirty our network mask so that the new transform gets
+	// transmitted to the client object
+	setMaskBits(UpdateMask);
+}
+
+void OpenVRTrackedObject::setModelName(String &modelName)
+{
+	if (!isServerObject())
+		return;
+
+	mModelName = StringTable->insert(modelName.c_str(), true);
+	setMaskBits(UpdateMask);
+}
+
+U32 OpenVRTrackedObject::packUpdate(NetConnection *conn, U32 mask, BitStream *stream)
+{
+	// Allow the Parent to get a crack at writing its info
+	U32 retMask = Parent::packUpdate(conn, mask, stream);
+
+	// Write our transform information
+	if (stream->writeFlag(mask & UpdateMask))
+	{
+		mathWrite(*stream, getTransform());
+		mathWrite(*stream, getScale());
+
+		stream->write((S16)mDeviceIndex);
+		stream->write((S16)mMappedMoveIndex);
+		stream->writeString(mModelName);
+	}
+
+	return retMask;
+}
+
+void OpenVRTrackedObject::unpackUpdate(NetConnection *conn, BitStream *stream)
+{
+	// Let the Parent read any info it sent
+	Parent::unpackUpdate(conn, stream);
+
+	if (stream->readFlag())  // UpdateMask
+	{
+		mathRead(*stream, &mObjToWorld);
+		mathRead(*stream, &mObjScale);
+
+		setTransform(mObjToWorld);
+		
+		S16 readDeviceIndex;
+		S16 readMoveIndex;
+		stream->read(&readDeviceIndex);
+		stream->read(&readMoveIndex);
+
+		mDeviceIndex = readDeviceIndex;
+		mMappedMoveIndex = readMoveIndex;
+		mModelName = stream->readSTString();
+
+		updateRenderData();
+	}
+
+}
+
+void OpenVRTrackedObject::writePacketData(GameConnection *conn, BitStream *stream)
+{
+	Parent::writePacketData(conn, stream);
+}
+
+void OpenVRTrackedObject::readPacketData(GameConnection *conn, BitStream *stream)
+{
+	Parent::readPacketData(conn, stream);
+}
+
+MatrixF OpenVRTrackedObject::getTrackedTransform()
+{
+	IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
+	MatrixF trackedMat(1);
+
+	pose.orientation.setMatrix(&trackedMat);
+	trackedMat.setPosition(pose.position);
+
+	return trackedMat;
+}
+
+MatrixF OpenVRTrackedObject::getLastTrackedTransform()
+{
+	MatrixF trackedMat(1);
+
+	mPose.orientation.setMatrix(&trackedMat);
+	trackedMat.setPosition(mPose.position);
+
+	return trackedMat;
+}
+
+MatrixF OpenVRTrackedObject::getBaseTrackingTransform()
+{
+	if (isMounted())
+	{
+		MatrixF mat;
+
+		mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat);
+		if (mIgnoreParentRotation)
+		{
+			Point3F pos = mat.getPosition();
+			mat = MatrixF(1);
+			mat.setPosition(pos);
+		}
+		//mat.inverse();
+		return mat;
+	}
+
+	return MatrixF(1);
+}
+
+void OpenVRTrackedObject::prepRenderImage(SceneRenderState *state)
+{
+	RenderPassManager *renderPass = state->getRenderPass();
+
+	// debug rendering for now
+
+	if (mDeviceIndex < 0)
+		return;
+
+	// Current pose
+	IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
+	IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0);
+
+	if (!pose.connected && !mPose.connected)
+		return;
+
+	MatrixF offsetMat = getBaseTrackingTransform();
+	//offsetMat.inverse();
+
+	Point3F pos = offsetMat.getPosition();
+	//Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z);
+
+	const F32 CONTROLLER_SCALE = 0.1;
+
+	if (smDebugControllerPosition)
+	{
+		ColorI drawColor = ColorI::GREEN;
+		if (!pose.valid)
+		{
+			drawColor = ColorI::RED;
+		}
+
+		// Draw Camera
+		/*
+		DisplayPose cameraPose;
+		OPENVR->getFrameEyePose(&cameraPose, -1);
+		Point3F cameraCenter(0);
+		MatrixF cameraMat(1);
+		cameraPose.orientation.setMatrix(&cameraMat);
+		cameraMat.setPosition(cameraPose.position);
+		cameraMat.mulP(cameraCenter);
+		//DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN);
+		
+		DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box 
+		*/
+
+		// Draw Tracked HMD Pos
+		Point3F hmdCenter(0, 0, 0);
+		MatrixF hmdMat(1);
+		hmdPose.orientation.setMatrix(&hmdMat);
+		hmdMat.setPosition(hmdPose.position);
+		hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
+		hmdMat = offsetMat * hmdMat;
+		hmdMat.mulP(hmdCenter);
+		DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED);
+		DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box 
+
+
+		// Draw Controller
+		MatrixF mat(1);
+		pose.orientation.setMatrix(&mat);
+		mat.setPosition(pose.position);
+		mat.inverse(); // same as HMD
+		mat = offsetMat * mat;
+
+		Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
+		Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
+		Point3F middle(0, 0, 0);
+
+		Point3F center(0, 0, 0);
+		mat.mulP(center);
+
+		//DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
+
+		mat.mulP(middleStart);
+		mat.mulP(middle);
+		mat.mulP(middleEnd);
+
+		char buffer[256];
+		dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
+		DebugDrawer::get()->drawText(middle, buffer);
+		DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
+		DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
+		DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box 
+		DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
+	}
+
+	if (isClientObject() && smDebugControllerMovePosition)
+	{
+		MatrixF transform = getRenderTransform();
+		transform.scale(mObjScale);
+		DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform);
+		
+		// jamesu - grab server object pose for debugging
+		OpenVRTrackedObject* tracked = static_cast<OpenVRTrackedObject*>(getServerObject());
+		if (tracked)
+		{
+			mPose = tracked->mPose;
+		}
+
+		ColorI drawColor = ColorI::GREEN;
+		if (!pose.valid)
+		{
+			drawColor = ColorI::RED;
+		}
+																																 // Draw Controller
+		MatrixF mat(1);
+		mPose.orientation.setMatrix(&mat);
+		mat.setPosition(mPose.position);
+		mat.inverse(); // same as HMD
+		mat = offsetMat * mat;
+
+		Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
+		Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
+		Point3F middle(0, 0, 0);
+
+		Point3F center(0, 0, 0);
+		mat.mulP(center);
+
+		//DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
+
+		mat.mulP(middleStart);
+		mat.mulP(middle);
+		mat.mulP(middleEnd);
+
+		char buffer[256];
+		dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
+		DebugDrawer::get()->drawText(middle, buffer);
+		DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
+		DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
+		DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box 
+		DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
+	}
+
+	// Controller matrix base
+	MatrixF trackedMat = getTrackedTransform();
+	MatrixF invTrackedMat(1);
+
+	invTrackedMat = trackedMat;
+	invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
+
+	invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
+	trackedMat = invTrackedMat;
+	trackedMat.inverse();
+
+	// Render the controllers, using either the render model or the shape
+	if (mShapeInstance)
+	{
+		// Calculate the distance of this object from the camera
+		Point3F cameraOffset = invTrackedMat.getPosition();
+		cameraOffset -= state->getDiffuseCameraPosition();
+		F32 dist = cameraOffset.len();
+		if (dist < 0.01f)
+		dist = 0.01f;
+
+		// Set up the LOD for the shape
+		F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z));
+
+		mShapeInstance->setDetailFromDistance(state, dist * invScale);
+
+		// Make sure we have a valid level of detail
+		if (mShapeInstance->getCurrentDetail() < 0)
+			return;
+
+		// GFXTransformSaver is a handy helper class that restores
+		// the current GFX matrices to their original values when
+		// it goes out of scope at the end of the function
+		GFXTransformSaver saver;
+
+		// Set up our TS render state
+		TSRenderState rdata;
+		rdata.setSceneState(state);
+		rdata.setFadeOverride(1.0f);
+
+		// We might have some forward lit materials
+		// so pass down a query to gather lights.
+		LightQuery query;
+		query.init(getWorldSphere());
+		rdata.setLightQuery(&query);
+
+		// Set the world matrix to the objects render transform
+		MatrixF mat = trackedMat;
+
+		mat.scale(mObjScale);
+		GFX->setWorldMatrix(mat);
+
+		// TODO: move the nodes about for components
+
+		mShapeInstance->animate();
+		mShapeInstance->render(rdata);
+	}
+	else if (mRenderComponents.size() > 0)
+	{
+		vr::IVRRenderModels *models = OPENVR->getRenderModels();
+		if (!models)
+			return;
+
+		vr::IVRSystem* vrs = vr::VRSystem();
+
+		if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState))
+		{
+			return;
+		}
+
+		for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
+		{
+			RenderModelSlot slot = mRenderComponents[i];
+			vr::RenderModel_ControllerMode_State_t modeState;
+			vr::RenderModel_ComponentState_t componentState;
+
+			modeState.bScrollWheelVisible = false;
+
+			if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState))
+			{
+				MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
+
+				// Set our RenderInst as a standard mesh render
+				ri->type = RenderPassManager::RIT_Mesh;
+
+				// Calculate our sorting point
+				if (state && slot.nativeModel)
+				{
+					// Calculate our sort point manually.
+					const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat);
+					ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
+				}
+				else
+				{
+					ri->sortDistSq = 0.0f;
+				}
+
+				MatrixF newTransform = trackedMat;
+				MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel);
+				MatrixF offComponentMat(1);
+				OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat);
+
+				newTransform = offComponentMat * newTransform;
+
+				newTransform.inverse();
+
+				//DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE);
+
+				if (!slot.nativeModel)
+					continue;
+				if (i < 1)
+					continue;
+
+				// Set up our transforms
+				ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
+				ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
+				ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
+
+				// If our material needs lights then fill the RIs
+				// light vector with the best lights.
+				if (true)
+				{
+					LightQuery query;
+					Point3F center(0, 0, 0);
+					invTrackedMat.mulP(center);
+					query.init(SphereF(center, 10.0f));
+					query.getLights(ri->lights, 8);
+				}
+
+				// Draw model
+				slot.nativeModel->draw(state, ri);
+				state->getRenderPass()->addInst(ri);
+			}
+		}
+	}
+	else if (mBasicModel)
+	{
+		MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
+
+		// Set our RenderInst as a standard mesh render
+		ri->type = RenderPassManager::RIT_Mesh;
+
+		// Calculate our sorting point
+		if (state)
+		{
+			// Calculate our sort point manually.
+			const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat);
+			ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
+		}
+		else
+		{
+			ri->sortDistSq = 0.0f;
+		}
+
+		MatrixF newTransform = invTrackedMat;
+		// Set up our transforms
+		ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
+		ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
+		ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
+
+		// If our material needs lights then fill the RIs
+		// light vector with the best lights.
+		if (true)
+		{
+			LightQuery query;
+			Point3F center(0, 0, 0);
+			invTrackedMat.mulP(center);
+			query.init(SphereF(center, 10.0f));
+			query.getLights(ri->lights, 8);
+		}
+
+		// Draw model
+		mBasicModel->draw(state, ri);
+		state->getRenderPass()->addInst(ri);
+	}
+}
+
+U32 OpenVRTrackedObject::getCollisionMask()
+{
+	if (isServerObject())
+		return sServerCollisionMask;
+	else
+		return sClientCollisionMask;
+}
+
+void OpenVRTrackedObject::updateWorkingCollisionSet()
+{
+	const U32 mask = getCollisionMask();
+	Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale());
+	F32 len = (50) * TickSec;
+	F32 l = (len * 1.1) + 0.1;  // fudge factor
+	convexBox.minExtents -= Point3F(l, l, l);
+	convexBox.maxExtents += Point3F(l, l, l);
+
+	disableCollision();
+	mConvexList->updateWorkingList(convexBox, mask);
+	enableCollision();
+}
+
+void OpenVRTrackedObject::updateMove(const Move *move)
+{
+	// Set transform based on move
+
+#ifdef TORQUE_EXTENDED_MOVE
+
+	const ExtendedMove* emove = dynamic_cast<const ExtendedMove*>(move);
+	if (!emove)
+		return;
+
+	U32 emoveIndex = mMappedMoveIndex;
+	if (emoveIndex >= ExtendedMove::MaxPositionsRotations)
+		emoveIndex = 0;
+
+	//IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
+	//Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex);
+
+	if (!emove->EulerBasedRotation[emoveIndex])
+	{
+		AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
+		// Update our pose based on the move info
+		mPose.orientation = inRot;
+		mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]);
+		mPose.valid = true;
+		mPose.connected = true;
+	}
+
+	// Set transform based on move pose
+	MatrixF trackedMat(1);
+	MatrixF invTrackedMat(1);
+
+	mPose.orientation.setMatrix(&trackedMat);
+	trackedMat.setPosition(mPose.position);
+
+	invTrackedMat = trackedMat;
+	invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
+
+	invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
+	trackedMat = invTrackedMat;
+	trackedMat.inverse();
+
+	SceneObject::setTransform(invTrackedMat);
+
+	if (mPhysicsRep)
+		mPhysicsRep->setTransform(invTrackedMat);
+#endif
+}
+
+void OpenVRTrackedObject::processTick(const Move *move)
+{
+	// Perform collision checks
+	if (isServerObject())
+	{
+		updateMove(move);
+
+		if (!mPhysicsRep)
+		{
+			updateWorkingCollisionSet();
+		}
+	}
+
+	Parent::processTick(move);
+}
+
+void OpenVRTrackedObject::interpolateTick(F32 delta)
+{
+	// Set latest transform
+
+	Parent::interpolateTick(delta);
+}
+
+void OpenVRTrackedObject::advanceTime(F32 dt)
+{
+	Parent::advanceTime(dt);
+}
+
+bool OpenVRTrackedObject::castRay(const Point3F &start, const Point3F &end, RayInfo* info)
+{
+	if (!mPose.connected || !mPose.valid)
+		return false;
+
+	// Collide against bounding box.
+	F32 st, et, fst = 0.0f, fet = 1.0f;
+	F32 *bmin = &mObjBox.minExtents.x;
+	F32 *bmax = &mObjBox.maxExtents.x;
+	F32 const *si = &start.x;
+	F32 const *ei = &end.x;
+
+	for (S32 i = 0; i < 3; i++) {
+		if (*si < *ei) {
+			if (*si > *bmax || *ei < *bmin)
+				return false;
+			F32 di = *ei - *si;
+			st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f;
+			et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f;
+		}
+		else {
+			if (*ei > *bmax || *si < *bmin)
+				return false;
+			F32 di = *ei - *si;
+			st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f;
+			et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f;
+		}
+		if (st > fst) fst = st;
+		if (et < fet) fet = et;
+		if (fet < fst)
+			return false;
+		bmin++; bmax++;
+		si++; ei++;
+	}
+
+	info->normal = start - end;
+	info->normal.normalizeSafe();
+	getTransform().mulV(info->normal);
+
+	info->t = fst;
+	info->object = this;
+	info->point.interpolate(start, end, fst);
+	info->material = 0;
+	return true;
+}
+
+void OpenVRTrackedObject::buildConvex(const Box3F& box, Convex* convex)
+{
+	// These should really come out of a pool
+	mConvexList->collectGarbage();
+
+	Box3F realBox = box;
+	mWorldToObj.mul(realBox);
+	realBox.minExtents.convolveInverse(mObjScale);
+	realBox.maxExtents.convolveInverse(mObjScale);
+
+	if (realBox.isOverlapped(getObjBox()) == false)
+		return;
+
+	// Just return a box convex for the entire shape...
+	Convex* cc = 0;
+	CollisionWorkingList& wl = convex->getWorkingList();
+	for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) {
+		if (itr->mConvex->getType() == BoxConvexType &&
+			itr->mConvex->getObject() == this) {
+			cc = itr->mConvex;
+			break;
+		}
+	}
+	if (cc)
+		return;
+
+	// Create a new convex.
+	BoxConvex* cp = new BoxConvex;
+	mConvexList->registerObject(cp);
+	convex->addToWorkingList(cp);
+	cp->init(this);
+
+	mObjBox.getCenter(&cp->mCenter);
+	cp->mSize.x = mObjBox.len_x() / 2.0f;
+	cp->mSize.y = mObjBox.len_y() / 2.0f;
+	cp->mSize.z = mObjBox.len_z() / 2.0f;
+}
+
+bool OpenVRTrackedObject::testObject(SceneObject* enter)
+{
+	return false; // TODO
+}
+
+DefineEngineMethod(OpenVRTrackedObject, setModelName, void, (String modelName),, "Set model name. Typically you should do this from the client to update the server representation.")
+{
+	object->setModelName(modelName);
+}

+ 155 - 0
Engine/source/platform/input/openVR/openVRTrackedObject.h

@@ -0,0 +1,155 @@
+#ifndef _OPENVR_TRACKED_OBJECT_H_
+#define _OPENVR_TRACKED_OBJECT_H_
+
+#ifndef _GAMEBASE_H_
+#include "T3D/gameBase/gameBase.h"
+#endif
+#ifndef _GFXVERTEXBUFFER_H_
+#include "gfx/gfxVertexBuffer.h"
+#endif
+#ifndef _GFXPRIMITIVEBUFFER_H_
+#include "gfx/gfxPrimitiveBuffer.h"
+#endif
+#ifndef _TSSHAPEINSTANCE_H_
+#include "ts/tsShapeInstance.h"
+#endif
+#include "collision/earlyOutPolyList.h"
+
+#include <openvr.h>
+
+class BaseMatInstance;
+class OpenVRRenderModel;
+class PhysicsBody;
+
+class OpenVRTrackedObjectData : public GameBaseData {
+public:
+	typedef GameBaseData Parent;
+
+	StringTableEntry mShapeFile;
+	Resource<TSShape> mShape; ///< Torque model
+
+	Point3F mCollisionBoxMin;
+	Point3F mCollisionBoxMax;
+
+public:
+
+	OpenVRTrackedObjectData();
+	~OpenVRTrackedObjectData();
+
+	DECLARE_CONOBJECT(OpenVRTrackedObjectData);
+
+	bool onAdd();
+	bool preload(bool server, String &errorStr);
+
+	static void  initPersistFields();
+
+	virtual void packData(BitStream* stream);
+	virtual void unpackData(BitStream* stream);
+};
+
+/// Implements a GameObject which tracks an OpenVR controller
+class OpenVRTrackedObject : public GameBase
+{
+	typedef GameBase Parent;
+
+	enum MaskBits
+	{
+		UpdateMask = Parent::NextFreeMask << 0,
+		NextFreeMask = Parent::NextFreeMask << 1
+	};
+
+	struct RenderModelSlot
+	{
+		StringTableEntry componentName; ///< Component name
+		S16 mappedNodeIdx; ///< Mapped node idx in mShape
+		OpenVRRenderModel *nativeModel; ///< Native model
+	};
+
+	OpenVRTrackedObjectData *mDataBlock;
+
+	/// @name Rendering
+	/// {
+	TSShapeInstance *mShapeInstance; ///< Shape used to render controller (uses native model otherwise)
+	StringTableEntry mModelName;
+	OpenVRRenderModel *mBasicModel; ///< Basic model
+	Vector<RenderModelSlot> mRenderComponents;
+	/// }
+
+	S32 mDeviceIndex; ///< Controller idx in openvr (for direct updating)
+	S32 mMappedMoveIndex; ///< Movemanager move index for rotation
+
+	vr::VRControllerState_t mCurrentControllerState;
+	vr::VRControllerState_t mPreviousControllerState;
+
+	IDevicePose mPose; ///< Current openvr pose data, or reconstructed data from the client
+
+	Convex* mConvexList;
+	EarlyOutPolyList     mClippedList;
+	PhysicsBody *mPhysicsRep;
+
+	SimObjectPtr<SceneObject> mCollisionObject; ///< Object we're currently colliding with
+	SimObjectPtr<SceneObject> mInteractObject;  ///< Object we've designated as important to interact with
+
+	bool mHoldInteractedObject; ///< Performs pickup logic with mInteractObject
+	bool mIgnoreParentRotation; ///< Ignores the rotation of the parent object
+
+	static bool smDebugControllerPosition; ///< Shows latest controller position in DebugDrawer
+	static bool smDebugControllerMovePosition; ///< Shows move position in DebugDrawer
+	static U32 sServerCollisionMask;
+	static U32 sClientCollisionMask;
+
+public:
+	OpenVRTrackedObject();
+	virtual ~OpenVRTrackedObject();
+
+	void updateRenderData();
+	void setupRenderDataFromModel(bool loadComponentModels);
+
+	void clearRenderData();
+
+	DECLARE_CONOBJECT(OpenVRTrackedObject);
+
+	static void initPersistFields();
+
+	virtual void inspectPostApply();
+
+	bool onAdd();
+	void onRemove();
+
+
+	void _updatePhysics();
+	bool onNewDataBlock(GameBaseData *dptr, bool reload);
+
+	void setInteractObject(SceneObject* object, bool holding);
+
+	void setTransform(const MatrixF &mat);
+	void setModelName(String &modelName);
+
+	U32  packUpdate(NetConnection *conn, U32 mask, BitStream *stream);
+	void unpackUpdate(NetConnection *conn, BitStream *stream);
+	void writePacketData(GameConnection *conn, BitStream *stream);
+	void readPacketData(GameConnection *conn, BitStream *stream);
+
+	void prepRenderImage(SceneRenderState *state);
+
+	MatrixF getTrackedTransform();
+	MatrixF getLastTrackedTransform();
+	MatrixF getBaseTrackingTransform();
+
+	U32 getCollisionMask();
+	void updateWorkingCollisionSet();
+
+	// Time management
+	void updateMove(const Move *move);
+	void processTick(const Move *move);
+	void interpolateTick(F32 delta);
+	void advanceTime(F32 dt);
+
+	// Collision
+	bool castRay(const Point3F &start, const Point3F &end, RayInfo* info);
+	void buildConvex(const Box3F& box, Convex* convex);
+	bool testObject(SceneObject* enter);
+
+};
+
+#endif // _OPENVR_TRACKED_OBJECT_H_

+ 5 - 0
Engine/source/platform/output/IDisplayDevice.h

@@ -40,6 +40,11 @@ typedef struct DisplayPose
    Point3F velocity;
    Point3F velocity;
    Point3F angularVelocity;
    Point3F angularVelocity;
 
 
+#ifdef DEBUG_DISPLAY_POSE 
+   MatrixF actualMatrix;
+   MatrixF originalMatrix;
+#endif
+
    U32 state; /// Generic state
    U32 state; /// Generic state
 
 
    bool valid; /// Pose set
    bool valid; /// Pose set

+ 2 - 0
Tools/CMake/modules/module_openvr.cmake

@@ -27,4 +27,6 @@ if(TORQUE_OPENVR)
 		endif()
 		endif()
 		addLib( "openvr_api" )
 		addLib( "openvr_api" )
 	endif()
 	endif()
+
+    addDef(TORQUE_OPENVR)
 endif()
 endif()