OVR_CAPI_Util.h 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /********************************************************************************//**
  2. \file OVR_CAPI_Util.h
  3. \brief This header provides LibOVR utility function declarations
  4. \copyright Copyright 2015 Oculus VR, LLC All Rights reserved.
  5. *************************************************************************************/
  6. #ifndef OVR_CAPI_Util_h
  7. #define OVR_CAPI_Util_h
  8. #include "OVR_CAPI.h"
  9. #ifdef __cplusplus
  10. extern "C" {
  11. #endif
  12. /// Enumerates modifications to the projection matrix based on the application's needs.
  13. ///
  14. /// \see ovrMatrix4f_Projection
  15. ///
  16. typedef enum ovrProjectionModifier_
  17. {
  18. /// Use for generating a default projection matrix that is:
  19. /// * Left-handed.
  20. /// * Near depth values stored in the depth buffer are smaller than far depth values.
  21. /// * Both near and far are explicitly defined.
  22. /// * With a clipping range that is (0 to w).
  23. ovrProjection_None = 0x00,
  24. /// Enable if using right-handed transformations in your application.
  25. ovrProjection_RightHanded = 0x01,
  26. /// After the projection transform is applied, far values stored in the depth buffer will be less than closer depth values.
  27. /// NOTE: Enable only if the application is using a floating-point depth buffer for proper precision.
  28. ovrProjection_FarLessThanNear = 0x02,
  29. /// When this flag is used, the zfar value pushed into ovrMatrix4f_Projection() will be ignored
  30. /// NOTE: Enable only if ovrProjection_FarLessThanNear is also enabled where the far clipping plane will be pushed to infinity.
  31. ovrProjection_FarClipAtInfinity = 0x04,
  32. /// Enable if the application is rendering with OpenGL and expects a projection matrix with a clipping range of (-w to w).
  33. /// Ignore this flag if your application already handles the conversion from D3D range (0 to w) to OpenGL.
  34. ovrProjection_ClipRangeOpenGL = 0x08,
  35. } ovrProjectionModifier;
  36. /// Return values for ovr_Detect.
  37. ///
  38. /// \see ovr_Detect
  39. ///
  40. typedef struct OVR_ALIGNAS(8) ovrDetectResult_
  41. {
  42. /// Is ovrFalse when the Oculus Service is not running.
  43. /// This means that the Oculus Service is either uninstalled or stopped.
  44. /// IsOculusHMDConnected will be ovrFalse in this case.
  45. /// Is ovrTrue when the Oculus Service is running.
  46. /// This means that the Oculus Service is installed and running.
  47. /// IsOculusHMDConnected will reflect the state of the HMD.
  48. ovrBool IsOculusServiceRunning;
  49. /// Is ovrFalse when an Oculus HMD is not detected.
  50. /// If the Oculus Service is not running, this will be ovrFalse.
  51. /// Is ovrTrue when an Oculus HMD is detected.
  52. /// This implies that the Oculus Service is also installed and running.
  53. ovrBool IsOculusHMDConnected;
  54. OVR_UNUSED_STRUCT_PAD(pad0, 6) ///< \internal struct padding
  55. } ovrDetectResult;
  56. OVR_STATIC_ASSERT(sizeof(ovrDetectResult) == 8, "ovrDetectResult size mismatch");
  57. /// Detects Oculus Runtime and Device Status
  58. ///
  59. /// Checks for Oculus Runtime and Oculus HMD device status without loading the LibOVRRT
  60. /// shared library. This may be called before ovr_Initialize() to help decide whether or
  61. /// not to initialize LibOVR.
  62. ///
  63. /// \param[in] timeoutMsec Specifies a timeout to wait for HMD to be attached or 0 to poll.
  64. ///
  65. /// \return Returns an ovrDetectResult object indicating the result of detection.
  66. ///
  67. /// \see ovrDetectResult
  68. ///
  69. OVR_PUBLIC_FUNCTION(ovrDetectResult) ovr_Detect(int timeoutMsec);
  70. // On the Windows platform,
  71. #ifdef _WIN32
  72. /// This is the Windows Named Event name that is used to check for HMD connected state.
  73. #define OVR_HMD_CONNECTED_EVENT_NAME L"OculusHMDConnected"
  74. #endif // _WIN32
  75. /// Used to generate projection from ovrEyeDesc::Fov.
  76. ///
  77. /// \param[in] fov Specifies the ovrFovPort to use.
  78. /// \param[in] znear Distance to near Z limit.
  79. /// \param[in] zfar Distance to far Z limit.
  80. /// \param[in] projectionModFlags A combination of the ovrProjectionModifier flags.
  81. ///
  82. /// \return Returns the calculated projection matrix.
  83. ///
  84. /// \see ovrProjectionModifier
  85. ///
  86. OVR_PUBLIC_FUNCTION(ovrMatrix4f) ovrMatrix4f_Projection(ovrFovPort fov, float znear, float zfar, unsigned int projectionModFlags);
  87. /// Extracts the required data from the result of ovrMatrix4f_Projection.
  88. ///
  89. /// \param[in] projection Specifies the project matrix from which to extract ovrTimewarpProjectionDesc.
  90. /// \param[in] projectionModFlags A combination of the ovrProjectionModifier flags.
  91. /// \return Returns the extracted ovrTimewarpProjectionDesc.
  92. /// \see ovrTimewarpProjectionDesc
  93. ///
  94. OVR_PUBLIC_FUNCTION(ovrTimewarpProjectionDesc) ovrTimewarpProjectionDesc_FromProjection(ovrMatrix4f projection, unsigned int projectionModFlags);
  95. /// Generates an orthographic sub-projection.
  96. ///
  97. /// Used for 2D rendering, Y is down.
  98. ///
  99. /// \param[in] projection The perspective matrix that the orthographic matrix is derived from.
  100. /// \param[in] orthoScale Equal to 1.0f / pixelsPerTanAngleAtCenter.
  101. /// \param[in] orthoDistance Equal to the distance from the camera in meters, such as 0.8m.
  102. /// \param[in] hmdToEyeViewOffsetX Specifies the offset of the eye from the center.
  103. ///
  104. /// \return Returns the calculated projection matrix.
  105. ///
  106. OVR_PUBLIC_FUNCTION(ovrMatrix4f) ovrMatrix4f_OrthoSubProjection(ovrMatrix4f projection, ovrVector2f orthoScale,
  107. float orthoDistance, float hmdToEyeViewOffsetX);
  108. /// Computes offset eye poses based on headPose returned by ovrTrackingState.
  109. ///
  110. /// \param[in] headPose Indicates the HMD position and orientation to use for the calculation.
  111. /// \param[in] hmdToEyeViewOffset Can be ovrEyeRenderDesc.HmdToEyeViewOffset returned from
  112. /// ovr_GetRenderDesc. For monoscopic rendering, use a vector that is the average
  113. /// of the two vectors for both eyes.
  114. /// \param[out] outEyePoses If outEyePoses are used for rendering, they should be passed to
  115. /// ovr_SubmitFrame in ovrLayerEyeFov::RenderPose or ovrLayerEyeFovDepth::RenderPose.
  116. ///
  117. OVR_PUBLIC_FUNCTION(void) ovr_CalcEyePoses(ovrPosef headPose,
  118. const ovrVector3f hmdToEyeViewOffset[2],
  119. ovrPosef outEyePoses[2]);
  120. /// Returns the predicted head pose in outHmdTrackingState and offset eye poses in outEyePoses.
  121. ///
  122. /// This is a thread-safe function where caller should increment frameIndex with every frame
  123. /// and pass that index where applicable to functions called on the rendering thread.
  124. /// Assuming outEyePoses are used for rendering, it should be passed as a part of ovrLayerEyeFov.
  125. /// The caller does not need to worry about applying HmdToEyeViewOffset to the returned outEyePoses variables.
  126. ///
  127. /// \param[in] hmd Specifies an ovrHmd previously returned by ovr_Create.
  128. /// \param[in] frameIndex Specifies the targeted frame index, or 0 to refer to one frame after
  129. /// the last time ovr_SubmitFrame was called.
  130. /// \param[in] hmdToEyeViewOffset Can be ovrEyeRenderDesc.HmdToEyeViewOffset returned from
  131. /// ovr_GetRenderDesc. For monoscopic rendering, use a vector that is the average
  132. /// of the two vectors for both eyes.
  133. /// \param[in] latencyMarker Specifies that this call is the point in time where
  134. /// the "App-to-Mid-Photon" latency timer starts from. If a given ovrLayer
  135. /// provides "SensorSampleTimestamp", that will override the value stored here.
  136. /// \param[out] outEyePoses The predicted eye poses.
  137. /// \param[out] outHmdTrackingState The predicted ovrTrackingState. May be NULL, in which case it is ignored.
  138. ///
  139. OVR_PUBLIC_FUNCTION(void) ovr_GetEyePoses(ovrSession session, long long frameIndex, ovrBool latencyMarker,
  140. const ovrVector3f hmdToEyeViewOffset[2],
  141. ovrPosef outEyePoses[2],
  142. ovrTrackingState* outHmdTrackingState);
  143. #ifdef __cplusplus
  144. } /* extern "C" */
  145. #endif
  146. #endif // Header include guard