瀏覽代碼

Motion Matching: Added section about trajectory prediction to the ReadMe.md

Signed-off-by: Benjamin Jillich <[email protected]>
Benjamin Jillich 3 年之前
父節點
當前提交
23299b6a60

+ 1 - 1
Gems/MotionMatching/Code/Source/MotionMatchingInstance.cpp

@@ -289,7 +289,7 @@ namespace EMotionFX::MotionMatching
         m_trajectoryHistory.Update(timePassedInSeconds);
 
         // Update the trajectory query control points.
-        m_trajectoryQuery.Update(m_actorInstance,
+        m_trajectoryQuery.Update(*m_actorInstance,
             m_cachedTrajectoryFeature,
             m_trajectoryHistory,
             mode,

+ 10 - 8
Gems/MotionMatching/Code/Source/TrajectoryQuery.cpp

@@ -21,13 +21,13 @@ namespace EMotionFX::MotionMatching
         return displacement;
     }
 
-    void TrajectoryQuery::PredictFutureTrajectory(const ActorInstance* actorInstance,
+    void TrajectoryQuery::PredictFutureTrajectory(const ActorInstance& actorInstance,
         const FeatureTrajectory* trajectoryFeature,
         const AZ::Vector3& targetPos,
         [[maybe_unused]] const AZ::Vector3& targetFacingDir)
     {
-        const AZ::Vector3 actorInstanceWorldPosition = actorInstance->GetWorldSpaceTransform().m_position;
-        const AZ::Quaternion actorInstanceWorldRotation = actorInstance->GetWorldSpaceTransform().m_rotation;
+        const AZ::Vector3 actorInstanceWorldPosition = actorInstance.GetWorldSpaceTransform().m_position;
+        const AZ::Quaternion actorInstanceWorldRotation = actorInstance.GetWorldSpaceTransform().m_rotation;
         const AZ::Vector3 actorInstanceToTarget = (targetPos - actorInstanceWorldPosition);
 
         const size_t numFutureSamples = trajectoryFeature->GetNumFutureSamples();
@@ -41,6 +41,8 @@ namespace EMotionFX::MotionMatching
             AZ_Assert(trajectoryFeature->GetFutureTimeRange() > AZ::Constants::FloatEpsilon, "Trajectory feature future time range is too small.");
             const float velocity = actorInstanceToTarget.GetLength() / trajectoryFeature->GetFutureTimeRange();
 
+            linearDisplacementPerSample = (velocity / numSections);
+
             // Use the direction from the current actor instance position to the target as the target facing direction
             // and convert the direction vector to a quaternion.
             targetFacingDirQuat = AZ::Quaternion::CreateShortestArc(trajectoryFeature->GetFacingAxisDir(), actorInstanceToTarget);
@@ -61,7 +63,7 @@ namespace EMotionFX::MotionMatching
                 // Interpolate between the linear direction to target and the facing direction from the previous sample.
                 // This will make sure the facing direction close to the current time matches the current facing direction and
                 // the facing direction in the most far future matches the desired target facing direction.
-                const float weight = 1.0f - powf(1.0f - t, m_positionBias);
+                const float weight = 1.0f - AZStd::pow(1.0f - t, m_positionBias);
                 const AZ::Vector3 interpolatedPosDelta = prevFacingDir.Lerp(actorInstanceToTarget.GetNormalized(), weight);
 
                 // Scale it by the desired velocity.
@@ -73,7 +75,7 @@ namespace EMotionFX::MotionMatching
             // Facing direction
             {
                 // Interpolate facing direction from current character facing direction (first sample) to the target facing direction (most far future sample).
-                const float weight = 1.0f - powf(1.0f - t, m_rotationBias);
+                const float weight = 1.0f - AZStd::pow(1.0f - t, m_rotationBias);
                 const AZ::Quaternion interpolatedRotation = actorInstanceWorldRotation.Slerp(targetFacingDirQuat, weight);
 
                 // Convert the interpolated rotation result back to a facing direction vector.
@@ -84,7 +86,7 @@ namespace EMotionFX::MotionMatching
         }
     }
 
-    void TrajectoryQuery::Update(const ActorInstance* actorInstance,
+    void TrajectoryQuery::Update(const ActorInstance& actorInstance,
         const FeatureTrajectory* trajectoryFeature,
         const TrajectoryHistory& trajectoryHistory,
         EMode mode,
@@ -101,7 +103,7 @@ namespace EMotionFX::MotionMatching
 
         for (size_t i = 0; i < numPastSamples; ++i)
         {
-            const float sampleTimeNormalized = i / static_cast<float>(numPastSamples - 1);
+            const float sampleTimeNormalized = i / aznumeric_cast<float>(numPastSamples - 1);
             const TrajectoryHistory::Sample sample = trajectoryHistory.Evaluate(sampleTimeNormalized * pastTimeRange);
             m_pastControlPoints[i] = { sample.m_position, sample.m_facingDirection };
         }
@@ -123,7 +125,7 @@ namespace EMotionFX::MotionMatching
                 const float offset = i * 0.1f;
                 const AZ::Vector3 curSample = SampleFunction(offset, pathRadius, m_automaticModePhase);
                 AZ::Vector3 displacement = curSample - base;
-                m_futureControlPoints[i].m_position = actorInstance->GetWorldSpaceTransform().m_position + displacement;
+                m_futureControlPoints[i].m_position = actorInstance.GetWorldSpaceTransform().m_position + displacement;
 
                 // Evaluate a control point slightly further into the future than the actual
                 // one and use the position difference as the facing direction.

+ 2 - 2
Gems/MotionMatching/Code/Source/TrajectoryQuery.h

@@ -39,7 +39,7 @@ namespace EMotionFX::MotionMatching
             MODE_AUTOMATIC = 1
         };
 
-        void Update(const ActorInstance* actorInstance,
+        void Update(const ActorInstance& actorInstance,
             const FeatureTrajectory* trajectoryFeature,
             const TrajectoryHistory& trajectoryHistory,
             EMode mode,
@@ -59,7 +59,7 @@ namespace EMotionFX::MotionMatching
             const AZStd::vector<ControlPoint>& controlPoints,
             const AZ::Color& color);
 
-        void PredictFutureTrajectory(const ActorInstance* actorInstance,
+        void PredictFutureTrajectory(const ActorInstance& actorInstance,
             const FeatureTrajectory* trajectoryFeature,
             const AZ::Vector3& targetPos,
             const AZ::Vector3& targetFacingDir);

+ 6 - 0
Gems/MotionMatching/README.md

@@ -101,6 +101,12 @@ The trajectory history stores world space position and facing direction data of
 
 ![Trajectory Feature](https://user-images.githubusercontent.com/43751992/151819315-beb8d9a1-69ca-49cd-bec0-ba2bae2dc469.png)
 
+## Trajectory prediction
+
+The user controls the character by its future trajectory. The future trajectory contains the path the character is expected to move along, if it should accelerate, move faster, or come to a stop, and if it should be walking forward doing a turn, or strafe sideways. Based on a joystick position, we need to predict the future trajectory and build the path and the facing direction vectors across the control points. The trajectory feature defines the time window of the prediction and the number of samples to be generated. We generate an exponential curve that starts in the direction of the character and then bends towards the given target.
+
+https://user-images.githubusercontent.com/43751992/156741698-d2306bac-cdf5-4a25-96bd-0fc4422b598b.mp4
+
 ## Motion Matching data
 
 Data based on a given skeleton but independent of the instance like the motion capture database, the feature schema or feature matrix is stored here. It is just a wrapper to group the sharable data.