Browse Source

Make OculusVR.getHMDVectorPoseLeftEye return the HMD relative, not world relative, eye positions.

Campbell Suter 8 năm trước cách đây
mục cha
commit
760277f61d

+ 19 - 32
jme3-vr/src/main/java/com/jme3/input/vr/OculusVR.java

@@ -95,14 +95,11 @@ public class OculusVR implements VRAPI {
     private final Matrix4f[] hmdRelativeEyePoses = new Matrix4f[2];
 
     /**
-     * The eye poses relative to the world, as used during rendering.
-     */
-    private final OVRPosef eyePosesPtr[] = new OVRPosef[2];
-
-    /**
-     * The eye positions relative to the world, as used by jME.
+     * Store the positions for each eye, relative to the HMD.
+     *
+     * @see #getHMDVectorPoseLeftEye()
      */
-    private final Vector3f eyePositions[] = new Vector3f[2];
+    private final Vector3f[] hmdRelativeEyePositions = new Vector3f[2];
 
     /**
      * The position and orientation of the user's head.
@@ -227,6 +224,9 @@ public class OculusVR implements VRAPI {
         for (int eye = 0; eye < 2; eye++) {
             projections[eye] = OVRMatrix4f.malloc();
             //1.3 was right handed, now none flag
+
+            hmdRelativeEyePoses[eye] = new Matrix4f();
+            hmdRelativeEyePositions[eye] = new Vector3f();
         }
 
         // step 6 - render desc
@@ -240,12 +240,11 @@ public class OculusVR implements VRAPI {
 
             OVRPosef pose = eyeRenderDesc[eye].HmdToEyePose();
 
-            Matrix4f jPose = new Matrix4f();
-            jPose.setTranslation(vecO2J(pose.Position(), new Vector3f()));
-            jPose.setRotationQuaternion(quatO2J(pose.Orientation(), new Quaternion()));
+            vecO2J(pose.Position(), hmdRelativeEyePositions[eye]);
 
-            hmdRelativeEyePoses[eye] = jPose;
-            eyePositions[eye] = new Vector3f(); // Set the absolute position up for later.
+            hmdRelativeEyePoses[eye].loadIdentity();
+            hmdRelativeEyePoses[eye].setTranslation(hmdRelativeEyePositions[eye]);
+            hmdRelativeEyePoses[eye].setRotationQuaternion(quatO2J(pose.Orientation(), new Quaternion()));
         }
 
         // step 7 - recenter
@@ -268,22 +267,6 @@ public class OculusVR implements VRAPI {
         //get head pose
         headPose = hmdState.HeadPose().ThePose();
         hmdState.free();
-
-        //build view offsets struct
-        OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2);
-        hmdToEyeOffsets.put(0, eyeRenderDesc[ovrEye_Left].HmdToEyePose());
-        hmdToEyeOffsets.put(1, eyeRenderDesc[ovrEye_Right].HmdToEyePose());
-
-        //calculate eye poses
-        OVRPosef.Buffer outEyePoses = OVRPosef.create(2);
-        OVRUtil.ovr_CalcEyePoses(headPose, hmdToEyeOffsets, outEyePoses);
-        hmdToEyeOffsets.free();
-        eyePosesPtr[ovrEye_Left] = outEyePoses.get(0);
-        eyePosesPtr[ovrEye_Right] = outEyePoses.get(1);
-
-        for (int i = 0; i < eyePosesPtr.length; i++) {
-            vecO2J(eyePosesPtr[i].Position(), eyePositions[i]);
-        }
     }
 
     @Override
@@ -383,12 +366,12 @@ public class OculusVR implements VRAPI {
 
     @Override
     public Vector3f getHMDVectorPoseLeftEye() {
-        return eyePositions[ovrEye_Left];
+        return hmdRelativeEyePositions[ovrEye_Left];
     }
 
     @Override
     public Vector3f getHMDVectorPoseRightEye() {
-        return eyePositions[ovrEye_Right];
+        return hmdRelativeEyePositions[ovrEye_Right];
     }
 
     @Override
@@ -640,8 +623,12 @@ public class OculusVR implements VRAPI {
         return fovPorts[ovrEye_Left]; // TODO checking the left and right eyes match
     }
 
-    public OVRPosef[] getEyePosesPtr() {
-        return eyePosesPtr;
+    public OVRPosef getHeadPose() {
+        return headPose;
+    }
+
+    public OVRPosef getEyePose(int eye) {
+        return eyeRenderDesc[eye].HmdToEyePose();
     }
 }
 

+ 13 - 4
jme3-vr/src/main/java/com/jme3/util/VRViewManagerOculus.java

@@ -169,11 +169,20 @@ public class VRViewManagerOculus extends AbstractVRViewManager {
 
     @Override
     public void render() {
-        for (int eye = 0; eye < 2; eye++) {
-            // TODO do we need this? Don't we set the camera positions ourselves?
-            OVRPosef eyePose = hardware.getEyePosesPtr()[eye];
-            hardware.getLayer0().RenderPose(eye, eyePose);
 
+        // Calculate the render pose (translation/rotation) for each eye.
+        // LibOVR takes the difference between this and the real position of each eye at display time
+        // to apply AZW (timewarp).
+
+        OVRPosef.Buffer hmdToEyeOffsets = OVRPosef.calloc(2);
+        hmdToEyeOffsets.put(0, hardware.getEyePose(ovrEye_Left));
+        hmdToEyeOffsets.put(1, hardware.getEyePose(ovrEye_Right));
+
+        //calculate eye poses
+        OVRUtil.ovr_CalcEyePoses(hardware.getHeadPose(), hmdToEyeOffsets, hardware.getLayer0().RenderPose());
+        hmdToEyeOffsets.free();
+
+        for (int eye = 0; eye < 2; eye++) {
             IntBuffer currentIndexB = BufferUtils.createIntBuffer(1);
             ovr_GetTextureSwapChainCurrentIndex(session(), hardware.getChain(eye), currentIndexB);
             int index = currentIndexB.get();