|
@@ -16,17 +16,21 @@
|
|
|
#include "gfx/D3D9/gfxD3D9TextureObject.h"
|
|
|
#include "gfx/D3D9/gfxD3D9EnumTranslate.h"
|
|
|
|
|
|
-/*
|
|
|
+#ifdef TORQUE_OPENGL
|
|
|
#include "gfx/gl/gfxGLDevice.h"
|
|
|
#include "gfx/gl/gfxGLTextureObject.h"
|
|
|
#include "gfx/gl/gfxGLEnumTranslate.h"
|
|
|
-*/
|
|
|
+#endif
|
|
|
|
|
|
namespace OpenVRUtil
|
|
|
{
|
|
|
- /// Convert an OVR sensor's rotation to a Torque 3D matrix
|
|
|
- void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation)
|
|
|
- {
|
|
|
+ void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation)
|
|
|
+ {
|
|
|
+ Point4F col0; inRotTMat.getColumn(0, &col0);
|
|
|
+ Point4F col1; inRotTMat.getColumn(1, &col1);
|
|
|
+ Point4F col2; inRotTMat.getColumn(2, &col2);
|
|
|
+ Point4F col3; inRotTMat.getColumn(3, &col3);
|
|
|
+
|
|
|
// Set rotation. We need to convert from sensor coordinates to
|
|
|
// Torque coordinates. The sensor matrix is stored row-major.
|
|
|
// The conversion is:
|
|
@@ -35,15 +39,158 @@ namespace OpenVRUtil
|
|
|
// a b c a b c a -c b
|
|
|
// d e f --> -g -h -i --> -g i -h
|
|
|
// g h i d e f d -f e
|
|
|
- outRotation.setColumn(0, Point4F( inRotMat[0][0], -inRotMat[2][0], inRotMat[1][0], 0.0f));
|
|
|
- outRotation.setColumn(1, Point4F(-inRotMat[0][2], inRotMat[2][2], -inRotMat[1][2], 0.0f));
|
|
|
- outRotation.setColumn(2, Point4F( inRotMat[0][1], -inRotMat[2][1], inRotMat[1][1], 0.0f));
|
|
|
- outRotation.setPosition(Point3F::Zero);
|
|
|
+ outRotation.setColumn(0, Point4F( col0.x, -col2.x, col1.x, 0.0f));
|
|
|
+ outRotation.setColumn(1, Point4F(-col0.z, col2.z, -col1.z, 0.0f));
|
|
|
+ outRotation.setColumn(2, Point4F( col0.y, -col2.y, col1.y, 0.0f));
|
|
|
+ outRotation.setColumn(3, Point4F(-col3.x, col3.z, -col3.y, 1.0f));
|
|
|
+ }
|
|
|
+
|
|
|
+ void convertTransformToOVR(const MatrixF& inRotation, MatrixF& outRotation)
|
|
|
+ {
|
|
|
+ Point4F col0; inRotation.getColumn(0, &col0);
|
|
|
+ Point4F col1; inRotation.getColumn(1, &col1);
|
|
|
+ Point4F col2; inRotation.getColumn(2, &col2);
|
|
|
+ Point4F col3; inRotation.getColumn(3, &col3);
|
|
|
+
|
|
|
+ // This is basically a reverse of what is in convertTransformFromOVR
|
|
|
+ outRotation.setColumn(0, Point4F(col0.x, col2.x, -col1.x, 0.0f));
|
|
|
+ outRotation.setColumn(1, Point4F(col0.z, col2.z, -col1.z, 0.0f));
|
|
|
+ outRotation.setColumn(2, Point4F(-col0.y, -col2.y, col1.y, 0.0f));
|
|
|
+ outRotation.setColumn(3, Point4F(-col3.x, -col3.z, col3.y, 1.0f));
|
|
|
+ }
|
|
|
+
|
|
|
+ MatrixF convertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat)
|
|
|
+ {
|
|
|
+ MatrixF outMat(1);
|
|
|
+
|
|
|
+ outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0));
|
|
|
+ outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0));
|
|
|
+ outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0));
|
|
|
+ outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos
|
|
|
+
|
|
|
+ return outMat;
|
|
|
+ }
|
|
|
+
|
|
|
+ void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat)
|
|
|
+ {
|
|
|
+ Point4F row0; inMat.getRow(0, &row0);
|
|
|
+ Point4F row1; inMat.getRow(1, &row1);
|
|
|
+ Point4F row2; inMat.getRow(2, &row2);
|
|
|
+
|
|
|
+ outMat.m[0][0] = row0.x;
|
|
|
+ outMat.m[0][1] = row0.y;
|
|
|
+ outMat.m[0][2] = row0.z;
|
|
|
+ outMat.m[0][3] = row0.w;
|
|
|
+
|
|
|
+ outMat.m[1][0] = row1.x;
|
|
|
+ outMat.m[1][1] = row1.y;
|
|
|
+ outMat.m[1][2] = row1.z;
|
|
|
+ outMat.m[1][3] = row1.w;
|
|
|
+
|
|
|
+ outMat.m[2][0] = row2.x;
|
|
|
+ outMat.m[2][1] = row2.y;
|
|
|
+ outMat.m[2][2] = row2.z;
|
|
|
+ outMat.m[2][3] = row2.w;
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ vr::VRTextureBounds_t TorqueRectToBounds(const RectI &rect, const Point2I &widthHeight)
|
|
|
+ {
|
|
|
+ vr::VRTextureBounds_t bounds;
|
|
|
+ F32 xRatio = 1.0 / (F32)widthHeight.x;
|
|
|
+ F32 yRatio = 1.0 / (F32)widthHeight.y;
|
|
|
+ bounds.uMin = rect.point.x * xRatio;
|
|
|
+ bounds.vMin = rect.point.y * yRatio;
|
|
|
+ bounds.uMax = (rect.point.x + rect.extent.x) * xRatio;
|
|
|
+ bounds.vMax = (rect.point.y + rect.extent.y) * yRatio;
|
|
|
+ return bounds;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+//------------------------------------------------------------
|
|
|
+
|
|
|
|
|
|
|
|
|
+DECLARE_SCOPE(OpenVR);
|
|
|
+IMPLEMENT_SCOPE(OpenVR, OpenVRProvider, , "");
|
|
|
+ConsoleDoc(
|
|
|
+ "@class OpenVRProvider\n"
|
|
|
+ "@brief This class is the interface between TorqueScript and OpenVR.\n\n"
|
|
|
+ "@ingroup OpenVR\n"
|
|
|
+ );
|
|
|
+
|
|
|
+// Enum impls
|
|
|
+
|
|
|
+ImplementEnumType(OpenVROverlayInputMethod,
|
|
|
+ "Types of input supported by VR Overlays. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::VROverlayInputMethod_None, "None" },
|
|
|
+{ vr::VROverlayInputMethod_Mouse, "Mouse" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVROverlayTransformType,
|
|
|
+ "Allows the caller to figure out which overlay transform getter to call. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::VROverlayTransform_Absolute, "Absolute" },
|
|
|
+{ vr::VROverlayTransform_TrackedDeviceRelative, "TrackedDeviceRelative" },
|
|
|
+{ vr::VROverlayTransform_SystemOverlay, "SystemOverlay" },
|
|
|
+{ vr::VROverlayTransform_TrackedComponent, "TrackedComponent" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVRGamepadTextInputMode,
|
|
|
+ "Types of input supported by VR Overlays. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::k_EGamepadTextInputModeNormal, "Normal", },
|
|
|
+{ vr::k_EGamepadTextInputModePassword, "Password", },
|
|
|
+{ vr::k_EGamepadTextInputModeSubmit, "Submit" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVRGamepadTextInputLineMode,
|
|
|
+ "Types of input supported by VR Overlays. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::k_EGamepadTextInputLineModeSingleLine, "SingleLine" },
|
|
|
+{ vr::k_EGamepadTextInputLineModeMultipleLines, "MultipleLines" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVRTrackingResult,
|
|
|
+ ". .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::TrackingResult_Uninitialized, "None" },
|
|
|
+{ vr::TrackingResult_Calibrating_InProgress, "Calibrating_InProgress" },
|
|
|
+{ vr::TrackingResult_Calibrating_OutOfRange, "Calibrating_OutOfRange" },
|
|
|
+{ vr::TrackingResult_Running_OK, "Running_Ok" },
|
|
|
+{ vr::TrackingResult_Running_OutOfRange, "Running_OutOfRange" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVRTrackingUniverseOrigin,
|
|
|
+ "Identifies which style of tracking origin the application wants to use for the poses it is requesting. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::TrackingUniverseSeated, "Seated" },
|
|
|
+{ vr::TrackingUniverseStanding, "Standing" },
|
|
|
+{ vr::TrackingUniverseRawAndUncalibrated, "RawAndUncalibrated" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVROverlayDirection,
|
|
|
+ "Directions for changing focus between overlays with the gamepad. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::OverlayDirection_Up, "Up" },
|
|
|
+{ vr::OverlayDirection_Down, "Down" },
|
|
|
+{ vr::OverlayDirection_Left, "Left" },
|
|
|
+{ vr::OverlayDirection_Right, "Right" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
+ImplementEnumType(OpenVRState,
|
|
|
+ "Status of the overall system or tracked objects. .\n\n"
|
|
|
+ "@ingroup OpenVR")
|
|
|
+{ vr::VRState_Undefined, "Undefined" },
|
|
|
+{ vr::VRState_Off, "Off" },
|
|
|
+{ vr::VRState_Searching, "Searching" },
|
|
|
+{ vr::VRState_Searching_Alert, "Searching_Alert" },
|
|
|
+{ vr::VRState_Ready, "Ready" },
|
|
|
+{ vr::VRState_Ready_Alert, "Ready_Alert" },
|
|
|
+{ vr::VRState_NotReady, "NotReady" },
|
|
|
+EndImplementEnumType;
|
|
|
+
|
|
|
//------------------------------------------------------------
|
|
|
|
|
|
U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
|
@@ -74,18 +221,6 @@ static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex
|
|
|
return sResult;
|
|
|
}
|
|
|
|
|
|
-static MatrixF ConvertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat)
|
|
|
-{
|
|
|
- MatrixF outMat(1);
|
|
|
-
|
|
|
- outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0));
|
|
|
- outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0));
|
|
|
- outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0));
|
|
|
- outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos
|
|
|
-
|
|
|
- return outMat;
|
|
|
-}
|
|
|
-
|
|
|
MODULE_BEGIN(OpenVRProvider)
|
|
|
|
|
|
MODULE_INIT_AFTER(InputEventManager)
|
|
@@ -105,184 +240,60 @@ MODULE_SHUTDOWN
|
|
|
MODULE_END;
|
|
|
|
|
|
|
|
|
-bool OpenVRRenderState::setupRenderTargets(U32 mode)
|
|
|
+bool OpenVRRenderState::setupRenderTargets(GFXDevice::GFXDeviceRenderStyles mode)
|
|
|
{
|
|
|
if (!mHMD)
|
|
|
return false;
|
|
|
|
|
|
+ if (mRenderMode == mode)
|
|
|
+ return true;
|
|
|
+
|
|
|
+ mRenderMode = mode;
|
|
|
+
|
|
|
+ if (mode == GFXDevice::RS_Standard)
|
|
|
+ {
|
|
|
+ reset(mHMD);
|
|
|
+ return true;
|
|
|
+ }
|
|
|
+
|
|
|
U32 sizeX, sizeY;
|
|
|
Point2I newRTSize;
|
|
|
mHMD->GetRecommendedRenderTargetSize(&sizeX, &sizeY);
|
|
|
|
|
|
- mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
|
|
- mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
|
|
+ if (mode == GFXDevice::RS_StereoSeparate)
|
|
|
+ {
|
|
|
+ mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
|
|
+ mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
|
|
+
|
|
|
+ newRTSize.x = sizeX;
|
|
|
+ newRTSize.y = sizeY;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
|
|
+ mEyeViewport[1] = RectI(Point2I(sizeX, 0), Point2I(sizeX, sizeY));
|
|
|
|
|
|
- newRTSize.x = sizeX;
|
|
|
- newRTSize.y = sizeY;
|
|
|
+ newRTSize.x = sizeX * 2;
|
|
|
+ newRTSize.y = sizeY;
|
|
|
+ }
|
|
|
|
|
|
GFXTexHandle stereoTexture;
|
|
|
stereoTexture.set(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color");
|
|
|
- mStereoRenderTextures[0] = mStereoRenderTextures[1] = stereoTexture;
|
|
|
+ mStereoRenderTexture = stereoTexture;
|
|
|
|
|
|
GFXTexHandle stereoDepthTexture;
|
|
|
stereoDepthTexture.set(newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, "OpenVR Depth");
|
|
|
- mStereoDepthTextures[0] = mStereoDepthTextures[1] = stereoDepthTexture;
|
|
|
+ mStereoDepthTexture = stereoDepthTexture;
|
|
|
|
|
|
mStereoRT = GFX->allocRenderToTextureTarget();
|
|
|
mStereoRT->attachTexture(GFXTextureTarget::Color0, stereoTexture);
|
|
|
mStereoRT->attachTexture(GFXTextureTarget::DepthStencil, stereoDepthTexture);
|
|
|
|
|
|
- mEyeRT[0] = mEyeRT[1] = mStereoRT;
|
|
|
-
|
|
|
- mOutputEyeTextures[0].init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT");
|
|
|
- mOutputEyeTextures[1].init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT");
|
|
|
+ mOutputEyeTextures.init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT");
|
|
|
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
-void OpenVRRenderState::setupDistortion()
|
|
|
-{
|
|
|
- if (!mHMD)
|
|
|
- return;
|
|
|
-
|
|
|
- U16 m_iLensGridSegmentCountH = 43;
|
|
|
- U16 m_iLensGridSegmentCountV = 43;
|
|
|
-
|
|
|
- float w = (float)(1.0 / float(m_iLensGridSegmentCountH - 1));
|
|
|
- float h = (float)(1.0 / float(m_iLensGridSegmentCountV - 1));
|
|
|
-
|
|
|
- float u, v = 0;
|
|
|
-
|
|
|
- Vector<GFXVertexPTTT> vVerts(0);
|
|
|
- GFXVertexPTTT *vert;
|
|
|
-
|
|
|
- vVerts.reserve((m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2);
|
|
|
-
|
|
|
- mDistortionVerts.set(GFX, (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2, GFXBufferTypeStatic);
|
|
|
-
|
|
|
- vert = mDistortionVerts.lock();
|
|
|
-
|
|
|
- //left eye distortion verts
|
|
|
- float Xoffset = -1;
|
|
|
- for (int y = 0; y < m_iLensGridSegmentCountV; y++)
|
|
|
- {
|
|
|
- for (int x = 0; x < m_iLensGridSegmentCountH; x++)
|
|
|
- {
|
|
|
- u = x*w; v = 1 - y*h;
|
|
|
- vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f);
|
|
|
-
|
|
|
- vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Left, u, v);
|
|
|
-
|
|
|
- vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]); // r
|
|
|
- vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); // g
|
|
|
- vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); // b
|
|
|
-
|
|
|
- vert++;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- //right eye distortion verts
|
|
|
- Xoffset = 0;
|
|
|
- for (int y = 0; y < m_iLensGridSegmentCountV; y++)
|
|
|
- {
|
|
|
- for (int x = 0; x < m_iLensGridSegmentCountH; x++)
|
|
|
- {
|
|
|
- u = x*w; v = 1 - y*h;
|
|
|
- vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f);
|
|
|
-
|
|
|
- vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Right, u, v);
|
|
|
-
|
|
|
- vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]);
|
|
|
- vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]);
|
|
|
- vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]);
|
|
|
-
|
|
|
- vert++;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- mDistortionVerts.unlock();
|
|
|
-
|
|
|
- mDistortionInds.set(GFX, m_iLensGridSegmentCountV * m_iLensGridSegmentCountH * 6 * 2, 0, GFXBufferTypeStatic);
|
|
|
-
|
|
|
- GFXPrimitive *prim;
|
|
|
- U16 *index;
|
|
|
-
|
|
|
- mDistortionInds.lock(&index, &prim);
|
|
|
- U16 a, b, c, d;
|
|
|
-
|
|
|
- U16 offset = 0;
|
|
|
- for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++)
|
|
|
- {
|
|
|
- for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++)
|
|
|
- {
|
|
|
- a = m_iLensGridSegmentCountH*y + x + offset;
|
|
|
- b = m_iLensGridSegmentCountH*y + x + 1 + offset;
|
|
|
- c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset;
|
|
|
- d = (y + 1)*m_iLensGridSegmentCountH + x + offset;
|
|
|
- *index++ = a;
|
|
|
- *index++ = b;
|
|
|
- *index++ = c;
|
|
|
-
|
|
|
- *index++ = a;
|
|
|
- *index++ = c;
|
|
|
- *index++ = d;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- offset = (m_iLensGridSegmentCountH)*(m_iLensGridSegmentCountV);
|
|
|
- for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++)
|
|
|
- {
|
|
|
- for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++)
|
|
|
- {
|
|
|
- a = m_iLensGridSegmentCountH*y + x + offset;
|
|
|
- b = m_iLensGridSegmentCountH*y + x + 1 + offset;
|
|
|
- c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset;
|
|
|
- d = (y + 1)*m_iLensGridSegmentCountH + x + offset;
|
|
|
- *index++ = a;
|
|
|
- *index++ = b;
|
|
|
- *index++ = c;
|
|
|
-
|
|
|
- *index++ = a;
|
|
|
- *index++ = c;
|
|
|
- *index++ = d;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- mDistortionInds.unlock();
|
|
|
-}
|
|
|
-
|
|
|
-void OpenVRRenderState::renderDistortion(U32 eye)
|
|
|
-{
|
|
|
- // Updates distortion for an eye (this should only be the case for backend APIS where image should be predistorted)
|
|
|
- /*
|
|
|
-
|
|
|
- glDisable(GL_DEPTH_TEST);
|
|
|
- glViewport( 0, 0, m_nWindowWidth, m_nWindowHeight );
|
|
|
-
|
|
|
- glBindVertexArray( m_unLensVAO );
|
|
|
- glUseProgram( m_unLensProgramID );
|
|
|
-
|
|
|
- //render left lens (first half of index array )
|
|
|
- glBindTexture(GL_TEXTURE_2D, leftEyeDesc.m_nResolveTextureId );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR );
|
|
|
- glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, 0 );
|
|
|
-
|
|
|
- //render right lens (second half of index array )
|
|
|
- glBindTexture(GL_TEXTURE_2D, rightEyeDesc.m_nResolveTextureId );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
|
|
|
- glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR );
|
|
|
- glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, (const void *)(m_uiIndexSize) );
|
|
|
-
|
|
|
- glBindVertexArray( 0 );
|
|
|
- glUseProgram( 0 );
|
|
|
- */
|
|
|
-}
|
|
|
-
|
|
|
void OpenVRRenderState::renderPreview()
|
|
|
{
|
|
|
|
|
@@ -293,26 +304,21 @@ void OpenVRRenderState::reset(vr::IVRSystem* hmd)
|
|
|
mHMD = hmd;
|
|
|
|
|
|
mStereoRT = NULL;
|
|
|
- mEyeRT[0] = mEyeRT[1] = NULL;
|
|
|
-
|
|
|
- mStereoRenderTextures[0] = mStereoRenderTextures[1] = NULL;
|
|
|
- mStereoDepthTextures[0] = mStereoDepthTextures[1] = NULL;
|
|
|
|
|
|
- mDistortionVerts = NULL;
|
|
|
- mDistortionInds = NULL;
|
|
|
+ mStereoRenderTexture = NULL;
|
|
|
+ mStereoDepthTexture = NULL;
|
|
|
|
|
|
- mOutputEyeTextures[0].clear();
|
|
|
- mOutputEyeTextures[1].clear();
|
|
|
+ mOutputEyeTextures.clear();
|
|
|
|
|
|
if (!mHMD)
|
|
|
return;
|
|
|
|
|
|
vr::HmdMatrix34_t mat = mHMD->GetEyeToHeadTransform(vr::Eye_Left);
|
|
|
- mEyePose[0] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat);
|
|
|
+ mEyePose[0] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat);
|
|
|
mEyePose[0].inverse();
|
|
|
|
|
|
mat = mHMD->GetEyeToHeadTransform(vr::Eye_Right);
|
|
|
- mEyePose[1] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat);
|
|
|
+ mEyePose[1] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat);
|
|
|
mEyePose[1].inverse();
|
|
|
|
|
|
mHMD->GetProjectionRaw(vr::Eye_Left, &mEyeFov[0].leftTan, &mEyeFov[0].rightTan, &mEyeFov[0].upTan, &mEyeFov[0].downTan);
|
|
@@ -336,6 +342,8 @@ OpenVRProvider::OpenVRProvider() :
|
|
|
GFXDevice::getDeviceEventSignal().notify(this, &OpenVRProvider::_handleDeviceEvent);
|
|
|
INPUTMGR->registerDevice(this);
|
|
|
dMemset(&mLUID, '\0', sizeof(mLUID));
|
|
|
+
|
|
|
+ mTrackingSpace = vr::TrackingUniverseSeated;
|
|
|
}
|
|
|
|
|
|
OpenVRProvider::~OpenVRProvider()
|
|
@@ -345,7 +353,16 @@ OpenVRProvider::~OpenVRProvider()
|
|
|
|
|
|
void OpenVRProvider::staticInit()
|
|
|
{
|
|
|
- // TODO: Add console vars
|
|
|
+ // Overlay flags
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_None", 1 << (U32)vr::VROverlayFlags_None);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_Curved", 1 << (U32)vr::VROverlayFlags_Curved);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_RGSS4X", 1 << (U32)vr::VROverlayFlags_RGSS4X);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_NoDashboardTab", 1 << (U32)vr::VROverlayFlags_NoDashboardTab);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_AcceptsGamepadEvents", 1 << (U32)vr::VROverlayFlags_AcceptsGamepadEvents);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_ShowGamepadFocus", 1 << (U32)vr::VROverlayFlags_ShowGamepadFocus);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_SendVRScrollEvents", 1 << (U32)vr::VROverlayFlags_SendVRScrollEvents);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_SendVRTouchpadEvents", 1 << (U32)vr::VROverlayFlags_SendVRTouchpadEvents);
|
|
|
+ Con::setIntVariable("$OpenVR::OverlayFlags_ShowTouchPadScrollWheel", 1 << (U32)vr::VROverlayFlags_ShowTouchPadScrollWheel);
|
|
|
}
|
|
|
|
|
|
bool OpenVRProvider::enable()
|
|
@@ -558,34 +575,11 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos)
|
|
|
{
|
|
|
// Directly set the rotation and position from the eye transforms
|
|
|
MatrixF torqueMat(1);
|
|
|
-
|
|
|
- F32 inRotMat[4][4];
|
|
|
- Point4F col0; mat.getColumn(0, &col0);
|
|
|
- Point4F col1; mat.getColumn(1, &col1);
|
|
|
- Point4F col2; mat.getColumn(2, &col2);
|
|
|
- Point4F col3; mat.getColumn(3, &col3);
|
|
|
- inRotMat[0][0] = col0.x;
|
|
|
- inRotMat[0][1] = col0.y;
|
|
|
- inRotMat[0][2] = col0.z;
|
|
|
- inRotMat[0][3] = col0.w;
|
|
|
- inRotMat[1][0] = col1.x;
|
|
|
- inRotMat[1][1] = col1.y;
|
|
|
- inRotMat[1][2] = col1.z;
|
|
|
- inRotMat[1][3] = col1.w;
|
|
|
- inRotMat[2][0] = col2.x;
|
|
|
- inRotMat[2][1] = col2.y;
|
|
|
- inRotMat[2][2] = col2.z;
|
|
|
- inRotMat[2][3] = col2.w;
|
|
|
- inRotMat[3][0] = col3.x;
|
|
|
- inRotMat[3][1] = col3.y;
|
|
|
- inRotMat[3][2] = col3.z;
|
|
|
- inRotMat[3][3] = col3.w;
|
|
|
-
|
|
|
- OpenVRUtil::convertRotation(inRotMat, torqueMat);
|
|
|
+ OpenVRUtil::convertTransformFromOVR(mat, torqueMat);
|
|
|
|
|
|
Point3F pos = torqueMat.getPosition();
|
|
|
outRot = QuatF(torqueMat);
|
|
|
- outPos = Point3F(-pos.x, pos.z, -pos.y);
|
|
|
+ outPos = pos;// Point3F(-pos.x, pos.z, -pos.y);
|
|
|
}
|
|
|
|
|
|
void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const
|
|
@@ -639,8 +633,8 @@ void OpenVRProvider::getStereoViewports(RectI *out) const
|
|
|
|
|
|
void OpenVRProvider::getStereoTargets(GFXTextureTarget **out) const
|
|
|
{
|
|
|
- out[0] = mHMDRenderState.mEyeRT[0];
|
|
|
- out[1] = mHMDRenderState.mEyeRT[1];
|
|
|
+ out[0] = mHMDRenderState.mStereoRT;
|
|
|
+ out[1] = mHMDRenderState.mStereoRT;
|
|
|
}
|
|
|
|
|
|
void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas)
|
|
@@ -655,11 +649,16 @@ void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas)
|
|
|
|
|
|
if (mDrawCanvas != canvas || mHMDRenderState.mHMD == NULL)
|
|
|
{
|
|
|
- mHMDRenderState.setupRenderTargets(0);
|
|
|
+ mHMDRenderState.setupRenderTargets(GFXDevice::RS_Standard);
|
|
|
}
|
|
|
mDrawCanvas = canvas;
|
|
|
}
|
|
|
|
|
|
+void OpenVRProvider::setDrawMode(GFXDevice::GFXDeviceRenderStyles style)
|
|
|
+{
|
|
|
+ mHMDRenderState.setupRenderTargets(style);
|
|
|
+}
|
|
|
+
|
|
|
void OpenVRProvider::setCurrentConnection(GameConnection *connection)
|
|
|
{
|
|
|
mGameConnection = connection;
|
|
@@ -672,7 +671,7 @@ GameConnection* OpenVRProvider::getCurrentConnection()
|
|
|
|
|
|
GFXTexHandle OpenVRProvider::getPreviewTexture()
|
|
|
{
|
|
|
- return mHMDRenderState.mStereoRenderTextures[0]; // TODO: render distortion preview
|
|
|
+ return mHMDRenderState.mStereoRenderTexture; // TODO: render distortion preview
|
|
|
}
|
|
|
|
|
|
void OpenVRProvider::onStartFrame()
|
|
@@ -694,31 +693,87 @@ void OpenVRProvider::onEyeRendered(U32 index)
|
|
|
return;
|
|
|
|
|
|
vr::EVRCompositorError err = vr::VRCompositorError_None;
|
|
|
+ vr::VRTextureBounds_t bounds;
|
|
|
+ U32 textureIdxToSubmit = index;
|
|
|
|
|
|
- GFXTexHandle eyeTex = mHMDRenderState.mOutputEyeTextures[index].getTextureHandle();
|
|
|
- mHMDRenderState.mEyeRT[0]->resolveTo(eyeTex);
|
|
|
- mHMDRenderState.mOutputEyeTextures[index].advance();
|
|
|
+ GFXTexHandle eyeTex = mHMDRenderState.mOutputEyeTextures.getTextureHandle();
|
|
|
+ if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate)
|
|
|
+ {
|
|
|
+ mHMDRenderState.mStereoRT->resolveTo(eyeTex);
|
|
|
+ mHMDRenderState.mOutputEyeTextures.advance();
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ // assuming side-by-side, so the right eye will be next
|
|
|
+ if (index == 1)
|
|
|
+ {
|
|
|
+ mHMDRenderState.mStereoRT->resolveTo(eyeTex);
|
|
|
+ mHMDRenderState.mOutputEyeTextures.advance();
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
if (GFX->getAdapterType() == Direct3D11)
|
|
|
{
|
|
|
- GFXFormat fmt1 = eyeTex->getFormat();
|
|
|
- vr::Texture_t eyeTexture = { (void*)static_cast<GFXD3D11TextureObject*>(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
|
|
|
- err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);
|
|
|
+ vr::Texture_t eyeTexture;
|
|
|
+ if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate)
|
|
|
+ {
|
|
|
+ // whatever eye we are on
|
|
|
+ eyeTexture = { (void*)static_cast<GFXD3D11TextureObject*>(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
|
|
|
+ bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[index], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
|
|
|
+ err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture, &bounds);
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ // left & right at the same time
|
|
|
+ eyeTexture = { (void*)static_cast<GFXD3D11TextureObject*>(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
|
|
|
+ bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[0], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
|
|
|
+ err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left), &eyeTexture, &bounds);
|
|
|
+ bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[1], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
|
|
|
+ err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Right), &eyeTexture, &bounds);
|
|
|
+ }
|
|
|
}
|
|
|
else if (GFX->getAdapterType() == Direct3D9)
|
|
|
{
|
|
|
//vr::Texture_t eyeTexture = { (void*)static_cast<GFXD3D9TextureObject*>(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
|
|
|
//err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);
|
|
|
}
|
|
|
+#ifdef TORQUE_OPENGL
|
|
|
else if (GFX->getAdapterType() == OpenGL)
|
|
|
- {/*
|
|
|
- vr::Texture_t eyeTexture = { (void*)static_cast<GFXGLTextureObject*>(mHMDRenderState.mStereoRenderTextures[index].getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma };
|
|
|
- vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);*/
|
|
|
+ {
|
|
|
+ vr::Texture_t eyeTexture;
|
|
|
+ if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate)
|
|
|
+ {
|
|
|
+ // whatever eye we are on
|
|
|
+ eyeTexture = { (void*)static_cast<GFXGLTextureObject*>(eyeTex.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma };
|
|
|
+ bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[index], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
|
|
|
+ err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture, &bounds);
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ // left & right at the same time
|
|
|
+ eyeTexture = { (void*)static_cast<GFXGLTextureObject*>(eyeTex.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma };
|
|
|
+ bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[0], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
|
|
|
+ err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left), &eyeTexture, &bounds);
|
|
|
+ bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[1], mHMDRenderState.mStereoRenderTexture.getWidthHeight());
|
|
|
+ err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Right), &eyeTexture, &bounds);
|
|
|
+ }
|
|
|
}
|
|
|
+#endif
|
|
|
|
|
|
AssertFatal(err == vr::VRCompositorError_None, "VR compositor error!");
|
|
|
}
|
|
|
|
|
|
+void OpenVRProvider::setRoomTracking(bool room)
|
|
|
+{
|
|
|
+ vr::IVRCompositor* compositor = vr::VRCompositor();
|
|
|
+ mTrackingSpace = room ? vr::TrackingUniverseStanding : vr::TrackingUniverseSeated;
|
|
|
+ if (compositor) compositor->SetTrackingSpace(mTrackingSpace);
|
|
|
+}
|
|
|
+
|
|
|
bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt)
|
|
|
{
|
|
|
if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
|
|
@@ -770,9 +825,8 @@ bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt)
|
|
|
|
|
|
S32 OpenVRProvider::getDisplayDeviceId() const
|
|
|
{
|
|
|
- return -1;
|
|
|
-#ifdef TORQUE_OS_WIN32
|
|
|
- if (GFX->getAdapterType() == Direct3D11)
|
|
|
+#if defined(TORQUE_OS_WIN64) || defined(TORQUE_OS_WIN32)
|
|
|
+ if (GFX && GFX->getAdapterType() == Direct3D11)
|
|
|
{
|
|
|
Vector<GFXAdapter*> adapterList;
|
|
|
GFXD3D11Device::enumerateAdapters(adapterList);
|
|
@@ -818,7 +872,17 @@ void OpenVRProvider::updateTrackedPoses()
|
|
|
if (!mHMD)
|
|
|
return;
|
|
|
|
|
|
- vr::VRCompositor()->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0);
|
|
|
+ vr::IVRCompositor* compositor = vr::VRCompositor();
|
|
|
+
|
|
|
+ if (!compositor)
|
|
|
+ return;
|
|
|
+
|
|
|
+ if (compositor->GetTrackingSpace() != mTrackingSpace)
|
|
|
+ {
|
|
|
+ compositor->SetTrackingSpace(mTrackingSpace);
|
|
|
+ }
|
|
|
+
|
|
|
+ compositor->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0);
|
|
|
|
|
|
mValidPoseCount = 0;
|
|
|
|
|
@@ -828,7 +892,7 @@ void OpenVRProvider::updateTrackedPoses()
|
|
|
if (mTrackedDevicePose[nDevice].bPoseIsValid)
|
|
|
{
|
|
|
mValidPoseCount++;
|
|
|
- MatrixF mat = ConvertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking);
|
|
|
+ MatrixF mat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking);
|
|
|
mat.inverse();
|
|
|
|
|
|
if (nDevice == vr::k_unTrackedDeviceIndex_Hmd)
|
|
@@ -925,7 +989,43 @@ void OpenVRProvider::resetSensors()
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-DefineEngineFunction(isOpenVRDeviceActive, bool, (), ,
|
|
|
+OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay()
|
|
|
+{
|
|
|
+ return NULL;
|
|
|
+}
|
|
|
+
|
|
|
+void OpenVRProvider::setOverlayNeighbour(vr::EOverlayDirection dir, OpenVROverlay *overlay)
|
|
|
+{
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+bool OpenVRProvider::isDashboardVisible()
|
|
|
+{
|
|
|
+ return false;
|
|
|
+}
|
|
|
+
|
|
|
+void OpenVRProvider::showDashboard(const char *overlayToShow)
|
|
|
+{
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+vr::TrackedDeviceIndex_t OpenVRProvider::getPrimaryDashboardDevice()
|
|
|
+{
|
|
|
+ return -1;
|
|
|
+}
|
|
|
+
|
|
|
+void OpenVRProvider::setKeyboardTransformAbsolute(const MatrixF &xfm)
|
|
|
+{
|
|
|
+ // mTrackingSpace
|
|
|
+}
|
|
|
+
|
|
|
+void OpenVRProvider::setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect)
|
|
|
+{
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), ,
|
|
|
"@brief Used to determine if the OpenVR input device is active\n\n"
|
|
|
|
|
|
"The OpenVR device is considered active when the library has been "
|
|
@@ -940,11 +1040,11 @@ DefineEngineFunction(isOpenVRDeviceActive, bool, (), ,
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
- return OCULUSVRDEV->getActive();
|
|
|
+ return OPENVR->getActive();
|
|
|
}
|
|
|
|
|
|
|
|
|
-DefineEngineFunction(OpenVRSetEnabled, bool, (bool value), ,
|
|
|
+DefineEngineStaticMethod(OpenVR, setEnabled, bool, (bool value), ,
|
|
|
"@brief Used to determine if the OpenVR input device is active\n\n"
|
|
|
|
|
|
"The OpenVR device is considered active when the library has been "
|
|
@@ -959,12 +1059,11 @@ DefineEngineFunction(OpenVRSetEnabled, bool, (bool value), ,
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
- return value ? ManagedSingleton<OpenVRProvider>::instance()->enable() : ManagedSingleton<OpenVRProvider>::instance()->disable();
|
|
|
+ return value ? OPENVR->enable() : OPENVR->disable();
|
|
|
}
|
|
|
|
|
|
|
|
|
-
|
|
|
-DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), ,
|
|
|
+DefineEngineStaticMethod(OpenVR, setHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), ,
|
|
|
"@brief Sets the first HMD to be a GameConnection's display device\n\n"
|
|
|
"@param conn The GameConnection to set.\n"
|
|
|
"@return True if the GameConnection display device was set.\n"
|
|
@@ -982,12 +1081,12 @@ DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConne
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
- conn->setDisplayDevice(ManagedSingleton<OpenVRProvider>::instance());
|
|
|
+ conn->setDisplayDevice(OPENVR);
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
|
|
|
-DefineEngineFunction(OpenVRGetDisplayDeviceId, S32, (), ,
|
|
|
+DefineEngineStaticMethod(OpenVR, getDisplayDeviceId, S32, (), ,
|
|
|
"@brief MacOS display ID.\n\n"
|
|
|
"@param index The HMD index.\n"
|
|
|
"@return The ID of the HMD display device, if any.\n"
|
|
@@ -998,10 +1097,10 @@ DefineEngineFunction(OpenVRGetDisplayDeviceId, S32, (), ,
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
- return ManagedSingleton<OpenVRProvider>::instance()->getDisplayDeviceId();
|
|
|
+ return OPENVR->getDisplayDeviceId();
|
|
|
}
|
|
|
|
|
|
-DefineEngineFunction(OpenVRResetSensors, void, (), ,
|
|
|
+DefineEngineStaticMethod(OpenVR, resetSensors, void, (), ,
|
|
|
"@brief Resets all Oculus VR sensors.\n\n"
|
|
|
"This resets all sensor orientations such that their 'normal' rotation "
|
|
|
"is defined when this function is called. This defines an HMD's forwards "
|
|
@@ -1013,5 +1112,7 @@ DefineEngineFunction(OpenVRResetSensors, void, (), ,
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
- ManagedSingleton<OpenVRProvider>::instance()->resetSensors();
|
|
|
+ OPENVR->resetSensors();
|
|
|
}
|
|
|
+
|
|
|
+// Overlay stuff
|