Преглед изворни кода

Merge pull request #608 from galibzon/OpenXRFixes

This PR improves XrSpaces location calculation.

Previously, XrSpaces were being located each frame
during OpenXRVkInput::PollActions() which was called
before OpenXRVk::Device::BeginInternal(). The problem
with that approach is that xrLocateSpace needs
 a "predicted display time" for the CURRENT frame.
But the "predicted display time" is calculated during
  OpenXRVk::Device::BeginInternal(), this means that all XrPoses
  for the CURRENT frame were calculated with the "predicted
display time" of the PREVIOUS frame. This was causing the rendered
frames to look jittery.

In this PR, the XrSpaces are now located during
 OpenXRVk::Device::BeginInternal() this guarantees that
 when xrLocateSpace is called it is using the correct
 "predicted display time" for the CURRENT frame. This improves
 the jitter because the camera View Srg transforms now feed the correct
 transform value for each frame improving the stability of the rendered
 views.

Another improvement is that the Base Space used in xrLocateSpace
is the LOCAL space instead of the hard coded VIEW space. Also APIS were
added to change the Base Space for both Visualized spaces and Controller (Joysticks)
spaces.

Also the engine is now notified via AZ::RPI::XRSpaceNotificationBus::OnXRSpaceLocationsChanged
at the right time so the engine can update the ViewSrg for each eye
at the correct time.

Another improvement is that the AZ::RPI::PoseData reported to the engine
is now given according to the O3DE convention: X+ Right, Y+ forward, Z+ Up.
Also added equivalent APIs that report the data as AZ::Transform.

Fixed bug when the Xr Poses would be corrupted
if the Proximity Sensor was enabled.

Now the XrSpaces are reset each time a XR_SESSION_STATE_READY
event is received (typically because the Proximity Sensor is disabled)
galibzon пре 1 година
родитељ
комит
a30927eace

+ 0 - 1
Gems/OpenXRVk/Code/Include/OpenXRVk/OpenXRVkDevice.h

@@ -79,6 +79,5 @@ namespace OpenXRVk
         XrCompositionLayerProjection m_xrLayer{ XR_TYPE_COMPOSITION_LAYER_PROJECTION };
         AZStd::vector<XrCompositionLayerProjectionView> m_projectionLayerViews;
         AZStd::vector<XrView> m_views;
-        uint32_t m_viewCountOutput = 0;
     };
 }

+ 38 - 8
Gems/OpenXRVk/Code/Include/OpenXRVk/OpenXRVkInput.h

@@ -16,6 +16,8 @@
 
 namespace OpenXRVk
 {
+    class Device;
+
     // Class that will help manage XrActionSet/XrAction
     class Input final
         : public XR::Input
@@ -24,10 +26,25 @@ namespace OpenXRVk
         AZ_CLASS_ALLOCATOR(Input, AZ::SystemAllocator);
         AZ_RTTI(Input, "{97ADD1FE-27DF-4F36-9F61-683F881F9477}", XR::Input);
 
+        static constexpr char LogName[] = "OpenXRVkInput";
+
         static XR::Ptr<Input> Create();
 
-        //! Sync all the actions and update controller
-        //! as well as various tracked space poses
+        //! Called by the session when the predicted display time has been updated (typically
+        //! the device updates the predicted display time during BeginFrame).
+        //! \param[in] device The device that emitted this event.
+        //! \param[in] predictedTime The predicted display time for the current frame.
+        //! \param[out] xrViews Vector where each Eye Pose will be stored. Eye poses are always relative to the VIEW space.
+        //!                     The VIEW pose typically represents the pose of the Head (The Head is typically centered
+        //!                     between both eyes). Subscript 0 is the left eye, while subscript 1 is the right eye.                   
+        //! Returns true if the number of Eye Poses matches the size of @xrViews.
+        bool UpdateXrSpaceLocations(const OpenXRVk::Device& device, XrTime predictedDisplayTime, AZStd::vector<XrView>& xrViews);
+
+        //! Sync all the actions and update controller.
+        //! REMARK: XrPoses are not updated in this function. Instead, poses are updated upon UpdateXrSpaceLocations().
+        //! Why? Because PollActions() is called on the main thread outside of the BeginFrame()/EndFrame() loop.
+        //! This means the if Poses are updated during PollActions(), those poses would be using the predicted display time
+        //! of the previous frame instead of the current frame.
         void PollActions();
 
         //! Initialize various actions/actions sets and add support for Oculus touch bindings
@@ -39,20 +56,30 @@ namespace OpenXRVk
         //! Attach action sets
         AZ::RHI::ResultCode InitializeActionSets(XrSession xrSession) const;
 
-        //! Update Controller space information
+        //! Updates a Controller/Joystick pose.
         void LocateControllerSpace(XrTime predictedDisplayTime, XrSpace baseSpace, AZ::u32 handIndex);
 
-        //! Update information for a specific tracked space type (i.e visualizedSpaceType)
+        //! Update pose information for the view. 
         void LocateVisualizedSpace(XrTime predictedDisplayTime, XrSpace space, XrSpace baseSpace, OpenXRVk::SpaceType visualizedSpaceType);
 
         //! Return Pose data for a controller attached to a hand index
-        AZ::RHI::ResultCode GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const;
+        //! By default the pose data is converted per O3DE convention: Xright, Yfront, Zup.
+        //! You can read the raw XR Pose data by setting @convertToO3de to false (Not recommended, but useful for debugging).
+        AZ::RHI::ResultCode GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData, bool convertToO3de = true) const;
+
+        //! Same as above but returns the pose data as an AZ::Transform. The AZ::Transform also includes the controller scale.
+        AZ::RHI::ResultCode GetControllerTransform(AZ::u32 handIndex, AZ::Transform& outTransform, bool convertToO3de = true) const;
 
-        //! Return scale for a controller attached to a hand index
+        //! Returns scale for a controller attached to a hand index
         float GetControllerScale(AZ::u32 handIndex) const;
 
-        //! Return Pose data for a tracked space type (i.e visualizedSpaceType)
-        AZ::RHI::ResultCode GetVisualizedSpacePose(OpenXRVk::SpaceType visualizedSpaceType, AZ::RPI::PoseData& outPoseData) const;
+        //! Return Pose data for a tracked space type (i.e visualizedSpaceType).
+        //! By default the pose data is converted per O3DE convention: Xright, Yfront, Zup.
+        //! You can read the raw XR Pose data by setting @convertToO3de to false (Not recommended, but useful for debugging).
+        AZ::RHI::ResultCode GetVisualizedSpacePose(OpenXRVk::SpaceType visualizedSpaceType, AZ::RPI::PoseData& outPoseData, bool convertToO3de = true) const;
+
+        //! Same as above but returns the pose data as an AZ::Transform
+        AZ::RHI::ResultCode GetVisualizedSpaceTransform(OpenXRVk::SpaceType visualizedSpaceType, AZ::Transform& outTransform, bool convertToO3de = true) const;
 
         //! Get the Squeeze action
         XrAction GetSqueezeAction(AZ::u32 handIndex) const;
@@ -107,6 +134,9 @@ namespace OpenXRVk
         //! Destroy native objects
         void ShutdownInternal() override;
 
+        //! Returns true if the number of Eye Poses matches the size of @xrViews.
+        bool LocateEyeViews(XrTime predictedDisplayTime, AZStd::vector<XrView>& xrViews);
+
         XrActionSet m_actionSet{ XR_NULL_HANDLE };
 
         XrAction m_hapticAction{};

+ 29 - 0
Gems/OpenXRVk/Code/Include/OpenXRVk/OpenXRVkSession.h

@@ -40,6 +40,21 @@ namespace OpenXRVk
         //! Return the Xrspace related to the SpaceType enum
         XrSpace GetXrSpace(SpaceType spaceType) const;
 
+        ////////////////////////////////////////////////////////////////////////////////////////////
+        //! Called by a Device when the predicted display time has been updated (typically
+        //! the device updates the predicted display time during BeginFrame).
+        //! See OpenXRVkInput.h UpdateXrSpaceLocations(...) for more details.
+        void UpdateXrSpaceLocations(const OpenXRVk::Device& device, XrTime predictedDisplayTime, AZStd::vector<XrView>& xrViews);
+
+        //! Setters and Getters for the base spaces that will be used
+        //! when calling xrLocateSpace().
+        //! By default, the base space for visualization is SpaceType::Local
+        //! and the base space for Joysticks/controllers is SpaceType::View (aka the Head)
+        void SetBaseSpaceTypeForVisualization(SpaceType spaceType);
+        void SetBaseSpaceTypeForControllers(SpaceType spaceType);
+        SpaceType GetBaseSpaceTypeForVisualization() const;
+        SpaceType GetBaseSpaceTypeForControllers() const;
+
         //////////////////////////////////////////////////////////////////////////
         // XR::Session overrides
         AZ::RHI::ResultCode InitInternal(AZ::RHI::XRSessionDescriptor* descriptor) override;
@@ -50,6 +65,7 @@ namespace OpenXRVk
         void PollEvents() override;
         void LocateControllerSpace(AZ::u32 handIndex) override;
         AZ::RHI::ResultCode GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const override;
+        AZ::RHI::ResultCode GetControllerTransform(AZ::u32 handIndex, AZ::Transform& outTransform) const override;
         AZ::RHI::ResultCode GetControllerStagePose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const override;
         AZ::RHI::ResultCode GetViewFrontPose(AZ::RPI::PoseData& outPoseData) const override;
         AZ::RHI::ResultCode GetViewLocalPose(AZ::RPI::PoseData& outPoseData) const override;
@@ -68,12 +84,25 @@ namespace OpenXRVk
         void ShutdownInternal() override;
         void LogActionSourceName(XrAction action, const AZStd::string_view actionName) const;
         Input* GetNativeInput() const;
+        // Spaces are reset every time the proximity sensor turns off, or the user wears the headset
+        // when the proximity sensor is ON.
+        void ResetSpaces();
 
         XrSession m_session = XR_NULL_HANDLE;
         XrSessionState m_sessionState = XR_SESSION_STATE_UNKNOWN;
         XrEventDataBuffer m_eventDataBuffer;
         XrInstance m_xrInstance = XR_NULL_HANDLE;
         XrGraphicsBindingVulkan2KHR m_graphicsBinding{ XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR };
+        
+        // Application defined base space that will used to calculate
+        // the relative pose of all other spaces.
+        // Typically SpaceType::Local or SpaceType::Stage.
+        SpaceType m_baseSpaceTypeForVisualization = SpaceType::Local;
+
+        // Application defined base space that will use to calculate
+        // the relative pose of the joysticks (aka XR Controllers).
+        // Typically SpaceType::View, but could be SpaceType::Local or SpaceType::Stage.
+        SpaceType m_baseSpaceTypeForControllers = SpaceType::View;
 
         bool m_sessionRunning = false;
         bool m_exitRenderLoop = false;

+ 8 - 0
Gems/OpenXRVk/Code/Include/OpenXRVk/OpenXRVkUtils.h

@@ -8,9 +8,11 @@
 
 #pragma once
 
+#include <AzCore/Math/Transform.h>
 #include <OpenXRVk_Platform.h>
 #include <XR/XRBase.h>
 
+
 // Macro to generate stringify functions for OpenXR enumerations based data provided in openxr_reflection.h
 #define ENUM_CASE_STR(name, val) case name: return #name;
 #define MAKE_XR_TO_STRING_FUNC(enumType)                  \
@@ -63,4 +65,10 @@ namespace OpenXRVk
     //! Iterate through the characters while caching the starting pointer to a string
     //! and every time ' ' is encountered replace it with '\0' to indicate the end of a string.
     AZStd::vector<const char*> ParseExtensionString(char* names);
+
+    AZ::Quaternion AzQuaternionFromXrPose(const XrPosef& pose, bool convertCoordinates = true);
+    AZ::Vector3 AzPositionFromXrPose(const XrPosef& pose, bool convertCoordinates = true);
+    AZ::Transform AzTransformFromXrPose(const XrPosef& pose, bool convertCoordinates = true);
+    XrPosef XrPoseFromAzTransform(const AZ::Transform& tm, bool convertCoordinates = true);
+
 }

+ 18 - 42
Gems/OpenXRVk/Code/Source/OpenXRVkDevice.cpp

@@ -52,6 +52,11 @@ namespace OpenXRVk
         {
             WARN_IF_UNSUCCESSFUL(result);
         }
+
+        // Notify the input system that we have a new predicted display time.
+        // The new predicted display time will be used to calculate XrPoses for the current frame.
+        session->UpdateXrSpaceLocations(*this, m_frameState.predictedDisplayTime, m_views);
+
         //Always return true as we want EndFrame to always be called. 
         return true;
     }
@@ -112,36 +117,10 @@ namespace OpenXRVk
     {
         XR::SwapChain::View* baseSwapChainView = baseSwapChain->GetView(viewIndex);
         SwapChain::View* swapChainView = static_cast<SwapChain::View*>(baseSwapChainView);
-        Space* xrSpace = static_cast<Space*>(GetSession()->GetSpace());
-        Instance* instance = static_cast<Instance*>(GetDescriptor().m_instance.get());
-        Session* session = static_cast<Session*>(GetSession().get());
-        XrSession xrSession = session->GetXrSession();
         XrSwapchain swapChainHandle = swapChainView->GetSwapChainHandle();
 
-        XrViewState viewState{ XR_TYPE_VIEW_STATE };
-        uint32_t viewCapacityInput = aznumeric_cast<uint32_t>(m_views.size());
-
-        XrViewLocateInfo viewLocateInfo{ XR_TYPE_VIEW_LOCATE_INFO };
-        viewLocateInfo.viewConfigurationType = instance->GetViewConfigType(); 
-        viewLocateInfo.displayTime = m_frameState.predictedDisplayTime;
-        viewLocateInfo.space = xrSpace->GetXrSpace(OpenXRVk::SpaceType::View);
-
-        XrResult result = xrLocateViews(xrSession, &viewLocateInfo, &viewState, viewCapacityInput, &m_viewCountOutput, m_views.data());
-        ASSERT_IF_UNSUCCESSFUL(result);
-        
-        if ((viewState.viewStateFlags & XR_VIEW_STATE_POSITION_VALID_BIT) == 0 ||
-            (viewState.viewStateFlags & XR_VIEW_STATE_ORIENTATION_VALID_BIT) == 0)
-        {
-            //There is no valid tracking poses for the views
-            return false;
-        }
-
-        AZ_Assert(m_viewCountOutput == viewCapacityInput, "Size mismatch between xrLocateViews %i and xrEnumerateViewConfigurationViews %i", m_viewCountOutput, viewCapacityInput);
-        AZ_Assert(m_viewCountOutput == static_cast<SwapChain*>(baseSwapChain)->GetViewConfigs().size(), "Size mismatch between xrLocateViews %i and xrEnumerateViewConfigurationViews %i", m_viewCountOutput, static_cast<SwapChain*>(baseSwapChain)->GetViewConfigs().size());
-
-        m_projectionLayerViews.resize(m_viewCountOutput);
         XrSwapchainImageAcquireInfo acquireInfo{ XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO };
-        result = xrAcquireSwapchainImage(swapChainHandle, &acquireInfo, &baseSwapChainView->m_activeImageIndex);
+        auto result = xrAcquireSwapchainImage(swapChainHandle, &acquireInfo, &baseSwapChainView->m_activeImageIndex);
         baseSwapChainView->m_isImageAcquired = (result == XR_SUCCESS);
         WARN_IF_UNSUCCESSFUL(result);
         
@@ -150,6 +129,9 @@ namespace OpenXRVk
         result = xrWaitSwapchainImage(swapChainHandle, &waitInfo);
         ASSERT_IF_UNSUCCESSFUL(result);
 
+        // REMARK: The data in m_views was updated during BeginFrameInternal(), which
+        // calls session->UpdateXrSpaceLocations(...).
+        m_projectionLayerViews.resize(m_views.size());
         m_projectionLayerViews[viewIndex] = { XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW };
         m_projectionLayerViews[viewIndex].pose = m_views[viewIndex].pose;
         m_projectionLayerViews[viewIndex].fov = m_views[viewIndex].fov;
@@ -189,12 +171,12 @@ namespace OpenXRVk
 
     AZ::RHI::ResultCode Device::GetViewFov(AZ::u32 viewIndex, AZ::RPI::FovData& outFovData) const
     {
-        if(viewIndex < m_projectionLayerViews.size())
+        if(viewIndex < m_views.size())
         { 
-            outFovData.m_angleLeft = m_projectionLayerViews[viewIndex].fov.angleLeft;
-            outFovData.m_angleRight = m_projectionLayerViews[viewIndex].fov.angleRight;
-            outFovData.m_angleUp = m_projectionLayerViews[viewIndex].fov.angleUp;
-            outFovData.m_angleDown = m_projectionLayerViews[viewIndex].fov.angleDown;
+            outFovData.m_angleLeft = m_views[viewIndex].fov.angleLeft;
+            outFovData.m_angleRight = m_views[viewIndex].fov.angleRight;
+            outFovData.m_angleUp = m_views[viewIndex].fov.angleUp;
+            outFovData.m_angleDown = m_views[viewIndex].fov.angleDown;
             return AZ::RHI::ResultCode::Success;
         }
         return AZ::RHI::ResultCode::Fail;
@@ -202,17 +184,10 @@ namespace OpenXRVk
 
     AZ::RHI::ResultCode Device::GetViewPose(AZ::u32 viewIndex, AZ::RPI::PoseData& outPoseData) const
     { 
-        if (viewIndex < m_projectionLayerViews.size())
+        if (viewIndex < m_views.size())
         {
-            const XrQuaternionf& orientation = m_projectionLayerViews[viewIndex].pose.orientation;
-            const XrVector3f& position = m_projectionLayerViews[viewIndex].pose.position;
-            outPoseData.m_orientation.Set(orientation.x,
-                                          orientation.y, 
-                                          orientation.z, 
-                                          orientation.w);
-            outPoseData.m_position.Set(position.x,
-                                       position.y, 
-                                       position.z);
+            outPoseData.m_orientation = AzQuaternionFromXrPose(m_views[viewIndex].pose);
+            outPoseData.m_position = AzPositionFromXrPose(m_views[viewIndex].pose);
             return AZ::RHI::ResultCode::Success;
         }
         return AZ::RHI::ResultCode::Fail;
@@ -231,4 +206,5 @@ namespace OpenXRVk
         m_xrVkDevice = VK_NULL_HANDLE;
         m_xrVkPhysicalDevice = VK_NULL_HANDLE;
     }
+
 }

+ 109 - 36
Gems/OpenXRVk/Code/Source/OpenXRVkInput.cpp

@@ -14,6 +14,8 @@
 #include <OpenXRVk/OpenXRVkUtils.h>
 #include <AzCore/Casting/numeric_cast.h>
 
+#include <Atom/RPI.Public/XR/XRSpaceNotificationBus.h>
+
 namespace OpenXRVk
 {
     XR::Ptr<Input> Input::Create()
@@ -221,7 +223,6 @@ namespace OpenXRVk
     {
         const auto session = static_cast<Session*>(GetDescriptor().m_session.get());
         XrSession xrSession = session->GetXrSession();
-        const auto device = static_cast<Device*>(GetDescriptor().m_device.get());
         m_handActive = { XR_FALSE, XR_FALSE };
 
         auto& rawControllerData = m_xrControllerImpl->GetRawState();
@@ -335,6 +336,31 @@ namespace OpenXRVk
         rawControllerData.m_leftMotorVibrationValue = 0.f;
         rawControllerData.m_rightMotorVibrationValue = 0.f;
 
+        // Check if the Quit (Home) button was pressed this sync...
+        const bool quitPressed = GetButtonState(InputDeviceXRController::Button::Home);
+        if (quitPressed && !m_wasQuitPressedLastSync)
+        {
+            result = xrRequestExitSession(xrSession);
+            WARN_IF_UNSUCCESSFUL(result);
+        }
+        m_wasQuitPressedLastSync = quitPressed;
+    }
+
+
+    bool Input::UpdateXrSpaceLocations(const OpenXRVk::Device& device, XrTime predictedDisplayTime, AZStd::vector<XrView>& xrViews)
+    {
+        const auto thisDevice = static_cast<Device*>(GetDescriptor().m_device.get());
+        if (thisDevice != &device)
+        {
+            return false;
+        }
+
+        auto& rawControllerData = m_xrControllerImpl->GetRawState();
+        const auto session = static_cast<Session*>(GetDescriptor().m_session.get());
+        XrSession xrSession = session->GetXrSession();
+        XrSpace xrBaseSpaceForVisualization = session->GetXrSpace(session->GetBaseSpaceTypeForVisualization());
+        XrSpace xrBaseSpaceForJoysticks = session->GetXrSpace(session->GetBaseSpaceTypeForControllers());
+
         // Update poses
         for (const auto hand : { XR::Side::Left, XR::Side::Right })
         {
@@ -345,51 +371,80 @@ namespace OpenXRVk
             XrActionStatePose poseState{};
             poseState.type = XR_TYPE_ACTION_STATE_POSE;
 
-            result = xrGetActionStatePose(xrSession, &getInfo, &poseState);
+            XrResult result = xrGetActionStatePose(xrSession, &getInfo, &poseState);
             WARN_IF_UNSUCCESSFUL(result);
             m_handActive[static_cast<AZ::u32>(hand)] = poseState.isActive;
 
-            LocateControllerSpace(device->GetPredictedDisplayTime(), session->GetXrSpace(OpenXRVk::SpaceType::View), static_cast<AZ::u32>(hand));
+            LocateControllerSpace(predictedDisplayTime, xrBaseSpaceForJoysticks, static_cast<AZ::u32>(hand));
         }
 
         // Cache 3d location information
         for (AZ::u32 i = 0; i < static_cast<AZ::u32>(SpaceType::Count); i++)
         {
             const auto spaceType = static_cast<SpaceType>(i);
-            LocateVisualizedSpace(device->GetPredictedDisplayTime(), session->GetXrSpace(spaceType),
-                                  session->GetXrSpace(OpenXRVk::SpaceType::View), spaceType);
+            LocateVisualizedSpace(predictedDisplayTime, session->GetXrSpace(spaceType),
+                xrBaseSpaceForVisualization, spaceType);
         }
 
-        // XR to AZ vector conversion...
-        // Goes from y-up to z-up configuration (keeping Right Handed system)
-        const auto convertVector3 = [](const XrVector3f& xrVec3) -> AZ::Vector3
-        {
-            return AZ::Vector3{ xrVec3.x, -xrVec3.z, xrVec3.y };
-        };
+        rawControllerData.m_leftPositionState = AzPositionFromXrPose(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Left)].pose);
+        rawControllerData.m_rightPositionState = AzPositionFromXrPose(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Right)].pose);
 
-        // XR to AZ quaternion conversion...
-        // Goes from y-up to z-up configuration (keeping Right Handed system)
-        const auto convertQuat = [](const XrQuaternionf& xrQuat) -> AZ::Quaternion
+        rawControllerData.m_leftOrientationState = AzQuaternionFromXrPose(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Left)].pose);
+        rawControllerData.m_rightOrientationState = AzQuaternionFromXrPose(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Right)].pose);
+
+        if (LocateEyeViews(predictedDisplayTime, xrViews))
         {
-            return AZ::Quaternion{ xrQuat.x, -xrQuat.z, xrQuat.y, xrQuat.w };
-        };
+            //! Time to notify the engine that we have new poses.
+            const auto& xrSpaceLocationHeadToBase = m_xrVisualizedSpaceLocations[OpenXRVk::SpaceType::View];
+            const auto baseToHeadTm = AzTransformFromXrPose(xrSpaceLocationHeadToBase.pose);
+            const auto headToLeftEyeTm = AzTransformFromXrPose(xrViews[0].pose);
+            const auto headToRightEyeTm = AzTransformFromXrPose(xrViews[1].pose);
 
-        rawControllerData.m_leftPositionState = convertVector3(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Left)].pose.position);
-        rawControllerData.m_rightPositionState = convertVector3(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Right)].pose.position);
+            AZ::RPI::XRSpaceNotificationBus::Broadcast(&AZ::RPI::XRSpaceNotifications::OnXRSpaceLocationsChanged,
+                baseToHeadTm, headToLeftEyeTm, headToRightEyeTm);
 
-        rawControllerData.m_leftOrientationState = convertQuat(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Left)].pose.orientation);
-        rawControllerData.m_rightOrientationState = convertQuat(m_handSpaceLocation[static_cast<AZ::u32>(XR::Side::Right)].pose.orientation);
+            return true;
+        }
 
-        // Check if the Quit (Home) button was pressed this sync...
-        const bool quitPressed = GetButtonState(InputDeviceXRController::Button::Home);
-        if (quitPressed && !m_wasQuitPressedLastSync)
+        return false;
+    }
+
+    bool Input::LocateEyeViews(XrTime predictedDisplayTime, AZStd::vector<XrView>& xrViews)
+    {
+        const auto session = static_cast<Session*>(GetDescriptor().m_session.get());
+        XrSession xrSession = session->GetXrSession();
+        const auto xrVkInstance = static_cast<Instance*>(GetDescriptor().m_instance.get());
+
+        // Let's get the FOV data, which for most practical purposes it is always the same
+        // across all frames. But most importantly we need to get the location of each Eye relative to the View Space pose.
+
+        Space* xrSpace = static_cast<Space*>(session->GetSpace());
+
+        XrViewState viewState{ XR_TYPE_VIEW_STATE };
+        uint32_t viewCapacityInput = aznumeric_cast<uint32_t>(xrViews.size());
+        uint32_t viewCountOutput = 0;
+
+        XrViewLocateInfo viewLocateInfo{ XR_TYPE_VIEW_LOCATE_INFO };
+        viewLocateInfo.viewConfigurationType = xrVkInstance->GetViewConfigType();
+        viewLocateInfo.displayTime = predictedDisplayTime;
+        viewLocateInfo.space = xrSpace->GetXrSpace(OpenXRVk::SpaceType::View);
+
+        XrResult result = xrLocateViews(xrSession, &viewLocateInfo, &viewState, viewCapacityInput, &viewCountOutput, xrViews.data());
+        ASSERT_IF_UNSUCCESSFUL(result);
+
+        if ((viewState.viewStateFlags & XR_VIEW_STATE_POSITION_VALID_BIT) == 0 ||
+            (viewState.viewStateFlags & XR_VIEW_STATE_ORIENTATION_VALID_BIT) == 0)
         {
-            result = xrRequestExitSession(xrSession);
-            WARN_IF_UNSUCCESSFUL(result);
+            //There is no valid tracking poses for the views
+            return false;
         }
-        m_wasQuitPressedLastSync = quitPressed;
+
+        AZ_Error(LogName, viewCountOutput == viewCapacityInput, "Size mismatch between xrLocateViews %i and xrEnumerateViewConfigurationViews %i", viewCountOutput, viewCapacityInput);
+
+        return (viewCountOutput == viewCapacityInput);
     }
 
+
     void Input::LocateControllerSpace(XrTime predictedDisplayTime, XrSpace baseSpace, AZ::u32 handIndex)
     {
         XrSpaceLocation spaceLocation{};
@@ -420,28 +475,46 @@ namespace OpenXRVk
         }
     }
 
-    AZ::RHI::ResultCode Input::GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const
+    AZ::RHI::ResultCode Input::GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData, bool convertToO3de) const
     {
         if (handIndex < AZStd::size(m_handSpaceLocation))
         {
-            const XrQuaternionf& orientation = m_handSpaceLocation[handIndex].pose.orientation;
-            const XrVector3f& position = m_handSpaceLocation[handIndex].pose.position;
-            outPoseData.m_orientation.Set(orientation.x, orientation.y, orientation.z, orientation.w);
-            outPoseData.m_position.Set(position.x, position.y, position.z);
+            outPoseData.m_orientation = AzQuaternionFromXrPose(m_handSpaceLocation[handIndex].pose, convertToO3de);
+            outPoseData.m_position = AzPositionFromXrPose(m_handSpaceLocation[handIndex].pose, convertToO3de);
+            return AZ::RHI::ResultCode::Success;
+        }
+        return AZ::RHI::ResultCode::Fail;
+    }
+
+    AZ::RHI::ResultCode Input::GetControllerTransform(AZ::u32 handIndex, AZ::Transform& outTransform, bool convertToO3de) const
+    {
+        if (handIndex < AZStd::size(m_handSpaceLocation))
+        {
+            outTransform = AzTransformFromXrPose(m_handSpaceLocation[handIndex].pose, convertToO3de);
+            outTransform.SetUniformScale(m_handScale[handIndex]);
+            return AZ::RHI::ResultCode::Success;
+        }
+        return AZ::RHI::ResultCode::Fail;
+    }
+
+    AZ::RHI::ResultCode Input::GetVisualizedSpacePose(OpenXRVk::SpaceType visualizedSpaceType, AZ::RPI::PoseData& outPoseData, bool convertToO3de) const
+    {
+        const auto spaceIndex = static_cast<AZ::u32>(visualizedSpaceType);
+        if (spaceIndex < AZStd::size(m_xrVisualizedSpaceLocations))
+        {
+            outPoseData.m_orientation = AzQuaternionFromXrPose(m_xrVisualizedSpaceLocations[spaceIndex].pose, convertToO3de);
+            outPoseData.m_position = AzPositionFromXrPose(m_xrVisualizedSpaceLocations[spaceIndex].pose, convertToO3de);
             return AZ::RHI::ResultCode::Success;
         }
         return AZ::RHI::ResultCode::Fail;
     }
 
-    AZ::RHI::ResultCode Input::GetVisualizedSpacePose(OpenXRVk::SpaceType visualizedSpaceType, AZ::RPI::PoseData& outPoseData) const
+    AZ::RHI::ResultCode Input::GetVisualizedSpaceTransform(OpenXRVk::SpaceType visualizedSpaceType, AZ::Transform& outTransform, bool convertToO3de) const
     {
         const auto spaceIndex = static_cast<AZ::u32>(visualizedSpaceType);
         if (spaceIndex < AZStd::size(m_xrVisualizedSpaceLocations))
         {
-            const XrQuaternionf& orientation = m_xrVisualizedSpaceLocations[spaceIndex].pose.orientation;
-            const XrVector3f& position = m_xrVisualizedSpaceLocations[spaceIndex].pose.position;
-            outPoseData.m_orientation.Set(orientation.x, orientation.y, orientation.z, orientation.w);
-            outPoseData.m_position.Set(position.x, position.y, position.z);
+            outTransform = AzTransformFromXrPose(m_xrVisualizedSpaceLocations[spaceIndex].pose, convertToO3de);
             return AZ::RHI::ResultCode::Success;
         }
         return AZ::RHI::ResultCode::Fail;

+ 44 - 1
Gems/OpenXRVk/Code/Source/OpenXRVkSession.cpp

@@ -112,6 +112,12 @@ namespace OpenXRVk
                 XrResult result = xrBeginSession(m_session, &sessionBeginInfo);
                 WARN_IF_UNSUCCESSFUL(result);
                 m_sessionRunning = true;
+                // It's important to reset the spaces when this event is received.
+                // Typically the Proximity Sensor is ON, which reduces battery usage when the user
+                // is not wearing the headset. Each time the proximity sensor is disabled or the user
+                // decides to wear the headset, the XrSpaces need to be recreated, otherwise their
+                // poses would be corrupted.
+                ResetSpaces();
                 break;
             }
             case XR_SESSION_STATE_STOPPING:
@@ -142,6 +148,13 @@ namespace OpenXRVk
             }
         }
     }
+
+    void Session::ResetSpaces()
+    {
+        Space* xrVkSpace = static_cast<Space*>(GetSpace());
+        xrVkSpace->ShutdownInternal();
+        xrVkSpace->CreateVisualizedSpaces(m_session);
+    }
     
     const XrEventDataBaseHeader* Session::TryReadNextEvent()
     {
@@ -222,6 +235,26 @@ namespace OpenXRVk
         }
     }
 
+    void Session::SetBaseSpaceTypeForVisualization(SpaceType spaceType)
+    {
+        m_baseSpaceTypeForVisualization = spaceType;
+    }
+
+    void Session::SetBaseSpaceTypeForControllers(SpaceType spaceType)
+    {
+        m_baseSpaceTypeForControllers = spaceType;
+    }
+
+    SpaceType Session::GetBaseSpaceTypeForVisualization() const
+    {
+        return m_baseSpaceTypeForVisualization;
+    }
+
+    SpaceType Session::GetBaseSpaceTypeForControllers() const
+    {
+        return m_baseSpaceTypeForControllers;
+    }
+
     void Session::LogActionSourceName(XrAction action, const AZStd::string_view actionName) const
     {
         XrBoundSourcesForActionEnumerateInfo getInfo = { XR_TYPE_BOUND_SOURCES_FOR_ACTION_ENUMERATE_INFO };
@@ -278,6 +311,11 @@ namespace OpenXRVk
     {
         return GetNativeInput()->GetControllerPose(handIndex, outPoseData);
     }
+
+    AZ::RHI::ResultCode Session::GetControllerTransform(AZ::u32 handIndex, AZ::Transform& outTransform) const
+    {
+        return GetNativeInput()->GetControllerTransform(handIndex, outTransform);
+    }
     
     AZ::RHI::ResultCode Session::GetControllerStagePose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const
     {
@@ -293,7 +331,7 @@ namespace OpenXRVk
 
     AZ::RHI::ResultCode Session::GetViewLocalPose(AZ::RPI::PoseData& outPoseData) const
     {
-        return GetNativeInput()->GetVisualizedSpacePose(OpenXRVk::SpaceType::Local, outPoseData);
+        return GetNativeInput()->GetVisualizedSpacePose(OpenXRVk::SpaceType::View, outPoseData);
     }
 
     float Session::GetControllerScale(AZ::u32 handIndex) const
@@ -384,4 +422,9 @@ namespace OpenXRVk
     {
         return static_cast<Input*>(GetInput());
     }
+
+    void Session::UpdateXrSpaceLocations(const OpenXRVk::Device& device, XrTime predictedDisplayTime, AZStd::vector<XrView>& xrViews)
+    {
+        GetNativeInput()->UpdateXrSpaceLocations(device, predictedDisplayTime, xrViews);
+    }
 }

+ 44 - 0
Gems/OpenXRVk/Code/Source/OpenXRVkUtils.cpp

@@ -74,4 +74,48 @@ namespace OpenXRVk
         }
         return list;
     }
+
+    AZ::Quaternion AzQuaternionFromXrPose(const XrPosef& pose, bool convertCoordinates)
+    {
+        if (convertCoordinates)
+        {
+            return AZ::Quaternion(pose.orientation.x, -pose.orientation.z, pose.orientation.y, pose.orientation.w);
+        }
+        return AZ::Quaternion(pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
+    }
+
+    AZ::Vector3 AzPositionFromXrPose(const XrPosef& pose, bool convertCoordinates)
+    {
+        return AZ::Vector3(pose.position.x,
+                           convertCoordinates ? -pose.position.z : pose.position.y,
+                           convertCoordinates ? pose.position.y : pose.position.z);
+    }
+
+    AZ::Transform AzTransformFromXrPose(const XrPosef& pose, bool convertCoordinates)
+    {
+        const auto azQuat = AzQuaternionFromXrPose(pose, convertCoordinates);
+        const auto azVec = AzPositionFromXrPose(pose, convertCoordinates);
+        AZ::Transform tm = AZ::Transform::CreateFromQuaternionAndTranslation(azQuat, azVec);
+        return tm;
+    }
+
+    XrPosef XrPoseFromAzTransform(const AZ::Transform& tm, bool convertCoordinates)
+    {
+        const auto &azQuat = tm.GetRotation();
+        const auto &azPos = tm.GetTranslation();
+        
+        XrPosef pose;
+        
+        pose.orientation.x = azQuat.GetX();
+        pose.orientation.y = convertCoordinates ? -azQuat.GetZ() : azQuat.GetY();
+        pose.orientation.z = convertCoordinates ? azQuat.GetY() : azQuat.GetZ();
+        pose.orientation.w = azQuat.GetW();
+
+        pose.position.x = azPos.GetX();
+        pose.position.y = convertCoordinates ? -azPos.GetZ() : azPos.GetY();
+        pose.position.z = convertCoordinates ?  azPos.GetY() : azPos.GetZ();
+
+        return pose; 
+    }
+
 }

+ 49 - 25
Gems/OpenXRVk/Code/Source/XRCameraMovementComponent.cpp

@@ -25,24 +25,6 @@
 
 namespace OpenXRVk
 {
-    static AZ::Transform GetCameraTransformFromCurrentView()
-    {
-        if (const auto viewportContextMgr = AZ::Interface<AZ::RPI::ViewportContextRequestsInterface>::Get();
-            viewportContextMgr != nullptr)
-        {
-            if (const AZ::RPI::ViewportContextPtr viewportContext = viewportContextMgr->GetDefaultViewportContext();
-                viewportContext != nullptr)
-            {
-                if (const AZ::RPI::ViewPtr view = viewportContext->GetDefaultView();
-                    view != nullptr)
-                {
-                    return view->GetCameraTransform();
-                }
-            }
-        }
-        return AZ::Transform::CreateIdentity();
-    }
-
     void XRCameraMovementComponent::Reflect(AZ::ReflectContext* context)
     {
         if (auto serializeContext = azrtti_cast<AZ::SerializeContext*>(context))
@@ -99,25 +81,39 @@ namespace OpenXRVk
 
     void XRCameraMovementComponent::Activate()
     {
-        AzFramework::InputChannelEventListener::Connect();
-        AZ::TickBus::Handler::BusConnect();
+        Camera::CameraNotificationBus::Handler::BusConnect();
+        if (m_isActive)
+        {
+            AzFramework::InputChannelEventListener::Connect();
+            AZ::TickBus::Handler::BusConnect();
+        }
     }
 
     void XRCameraMovementComponent::Deactivate()
     {
-        AZ::TickBus::Handler::BusDisconnect();
-        AzFramework::InputChannelEventListener::Disconnect();
+        if (AZ::TickBus::Handler::BusIsConnected())
+        {
+            AZ::TickBus::Handler::BusDisconnect();
+        }
+        if (AzFramework::InputChannelEventListener::BusIsConnected())
+        {
+            AzFramework::InputChannelEventListener::Disconnect();
+        }
+        
+        Camera::CameraNotificationBus::Handler::BusDisconnect();
     }
 
     void XRCameraMovementComponent::OnTick(float deltaTime, [[maybe_unused]] AZ::ScriptTimePoint timePoint)
     {
-        AZ::Transform cameraTransform = GetCameraTransformFromCurrentView();
+        AZ::Transform cameraTransform;
+        AZ::TransformBus::EventResult(cameraTransform, GetEntityId(), &AZ::TransformBus::Events::GetWorldTM);
 
         // Update movement...
         const float moveSpeed = m_moveSpeed * deltaTime;
-        const AZ::Vector3 movementVec = (cameraTransform.GetBasisX() * m_movement.GetX())
+        const AZ::Vector3 movementVec = 
+              (cameraTransform.GetBasisX() * m_movement.GetX())
             + (cameraTransform.GetBasisY() * m_movement.GetY())
-            + (AZ::Vector3{0.f, 0.f, 1.f} * m_movement.GetZ()); // use a fixed UP for the Z direction
+            + (cameraTransform.GetBasisZ() * m_movement.GetZ());
         const AZ::Vector3 newPosition{ (cameraTransform.GetTranslation() + (movementVec * moveSpeed)) };
         cameraTransform.SetTranslation(newPosition);
 
@@ -163,4 +159,32 @@ namespace OpenXRVk
         }
     }
 
+    // Camera::CameraNotificationBus::Handler overrides
+    void XRCameraMovementComponent::OnActiveViewChanged(const AZ::EntityId& activeEntityId)
+    {
+        m_isActive = activeEntityId == GetEntityId();
+        if (m_isActive)
+        {
+            if (!AZ::TickBus::Handler::BusIsConnected())
+            {
+                AZ::TickBus::Handler::BusConnect();
+            }
+            if (!AzFramework::InputChannelEventListener::BusIsConnected())
+            {
+                AzFramework::InputChannelEventListener::Connect();
+            }
+        }
+        else
+        {
+            if (AZ::TickBus::Handler::BusIsConnected())
+            {
+                AZ::TickBus::Handler::BusDisconnect();
+            }
+            if (AzFramework::InputChannelEventListener::BusIsConnected())
+            {
+                AzFramework::InputChannelEventListener::Disconnect();
+            }
+        }
+    }
+
 } // namespace OpenXRVk

+ 8 - 0
Gems/OpenXRVk/Code/Source/XRCameraMovementComponent.h

@@ -11,6 +11,7 @@
 #include <AzCore/Component/Component.h>
 #include <AzCore/Component/TickBus.h>
 #include <AzFramework/Input/Events/InputChannelEventListener.h>
+#include <AzFramework/Components/CameraBus.h>
 
 
 namespace OpenXRVk
@@ -22,6 +23,7 @@ namespace OpenXRVk
         : public AZ::Component
         , public AZ::TickBus::Handler
         , public AzFramework::InputChannelEventListener
+        , public Camera::CameraNotificationBus::Handler
     {
     public:
         AZ_COMPONENT(OpenXRVk::XRCameraMovementComponent, "{7FEC0A04-D994-445C-B8DE-190D03BC3820}");
@@ -44,6 +46,9 @@ namespace OpenXRVk
         // AzFramework::InputChannelEventListener
         bool OnInputChannelEventFiltered(const AzFramework::InputChannel& inputChannel) override;
 
+        // Camera::CameraNotificationBus::Handler overrides
+        void OnActiveViewChanged(const AZ::EntityId&) override;
+
     private:
         void OnXRControllerEvent(const AzFramework::InputChannel& inputChannel);
 
@@ -54,6 +59,9 @@ namespace OpenXRVk
         // Serialized data...
         float m_moveSpeed = 20.f;
         float m_movementSensitivity = 0.025f;
+
+        // We will process XR Actions only if the entity that owns this component is the active camera. 
+        bool m_isActive = false;
     };
 
 } // namespace OpenXRVk

+ 2 - 2
Gems/OpenXRVk/gem.json

@@ -6,7 +6,7 @@
     "origin": "Open 3D Engine - o3de.org",
     "origin_url": "https://github.com/o3de/o3de",
     "type": "Code",
-    "summary": "OpenXR Vulcan for Atom",
+    "summary": "OpenXR Vulkan for Atom",
     "canonical_tags": [
         "Gem"
     ],
@@ -16,5 +16,5 @@
     "dependencies": [],
     "repo_uri": "https://raw.githubusercontent.com/o3de/o3de-extras/development",
     "download_source_uri": "https://github.com/o3de/o3de-extras/releases/download/2.0/openxrvk-1.0.0-gem.zip",
-    "version": "1.0.0"
+    "version": "1.0.1"
 }

+ 3 - 0
Gems/XR/Code/Include/XR/XRSession.h

@@ -76,6 +76,9 @@ namespace XR
         //! Api to retrieve the controller space data
         virtual AZ::RHI::ResultCode GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const = 0;
 
+        //! Same as above, but conveniently returns a transform
+        virtual AZ::RHI::ResultCode GetControllerTransform(AZ::u32 handIndex, AZ::Transform& outTransform) const = 0;
+
         //! Api to retrieve the controller space data associated with local view translated and rotated by 60 deg left or right based on handIndex
         virtual AZ::RHI::ResultCode GetControllerStagePose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const = 0;
 

+ 2 - 0
Gems/XR/Code/Include/XR/XRSystem.h

@@ -71,6 +71,8 @@ namespace XR
         AZ::RHI::ResultCode GetViewLocalPose(AZ::RPI::PoseData& outPoseData) const override;
         AZ::RHI::ResultCode GetControllerStagePose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const override;
         AZ::RHI::ResultCode GetControllerPose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const override;
+        AZ::RHI::ResultCode GetControllerTransform(const AZ::u32 handIndex, AZ::Transform& outTransform) const override;
+
         float GetControllerScale(AZ::u32 handIndex) const override;
         bool ShouldRender() const override;
         AZ::Matrix4x4 CreateStereoscopicProjection(float angleLeft, float angleRight,

+ 9 - 0
Gems/XR/Code/Source/XRSystem.cpp

@@ -254,6 +254,15 @@ namespace XR
         return AZ::RHI::ResultCode::NotReady;
     }
 
+    AZ::RHI::ResultCode System::GetControllerTransform(const AZ::u32 handIndex, AZ::Transform& outTransform) const
+    {
+        if (m_session->IsSessionRunning())
+        {
+            return m_session->GetControllerTransform(handIndex, outTransform);
+        }
+        return AZ::RHI::ResultCode::NotReady;
+    }
+
     AZ::RHI::ResultCode System::GetControllerStagePose(AZ::u32 handIndex, AZ::RPI::PoseData& outPoseData) const
     {
         if (m_session->IsSessionRunning())

+ 1 - 1
Gems/XR/gem.json

@@ -16,5 +16,5 @@
     "dependencies": [],
     "repo_uri": "https://raw.githubusercontent.com/o3de/o3de-extras/development",
     "download_source_uri": "https://github.com/o3de/o3de-extras/releases/download/2.0/xr-1.0.0-gem.zip",
-    "version": "1.0.0"
+    "version": "1.0.1"
 }

+ 2 - 2
Projects/OpenXRTest/project.json

@@ -21,7 +21,7 @@
     ],
     "restricted": "OpenXRTest",
     "gem_names": [
-        "XR",
-        "OpenXRVk"
+        "XR>=1.0.1",
+        "OpenXRVk>=1.0.1"
     ]
 }