瀏覽代碼

Work in progress to move common functionality to shared functions

Signed-off-by: rgba16f <[email protected]>
rgba16f 3 年之前
父節點
當前提交
d0b68aae5c

+ 79 - 0
Project/Code/PythonTests/Automated/benchmark_runner_periodic_suite_common.py

@@ -0,0 +1,79 @@
+"""
+Copyright (c) Contributors to the Open 3D Engine Project.
+For complete copyright and license terms please see the LICENSE at the root of this distribution.
+
+SPDX-License-Identifier: Apache-2.0 OR MIT
+"""
+import logging
+import os
+import subprocess
+import psutil
+
+import ly_test_tools.environment.process_utils as process_utils
+import ly_test_tools.environment.file_system as file_system
+import ly_test_tools.launchers.platforms.base
+from ly_test_tools.benchmark.data_aggregator import BenchmarkDataAggregator
+
+logger = logging.getLogger(__name__)
+
+def filebeat_service_running():
+    """
+    Checks if the filebeat service is currently running on the OS.
+    :return: True if filebeat service detected and running, False otherwise.
+    """
+    result = False
+    try:
+        filebeat_service = psutil.win_service_get('filebeat')
+        filebeat_service_info = filebeat_service.as_dict()
+        if filebeat_service_info['status'] == 'running':
+            result = True
+    except psutil.NoSuchProcess:
+        return result
+
+    return result
+
+
+class LoftSampleException(Exception):
+    """Custom Exception class for LoftSample tests."""
+    pass
+
+
+def LoftSampleFrameTimingTest_GatherBenchmarkMetrics_Common(
+        self, request, workspace, rhi, loftsample_gamelauncher_log_monitor):
+    benchmark_name = f'LoftSample_{rhi}'
+    cmd = os.path.join(workspace.paths.build_directory(),
+                        'LoftSample.GameLauncher.exe '
+                        f'--project-path={workspace.paths.project()} '
+                        f'--rhi {rhi} '
+                        '--regset="/O3DE/Performance/FrameTimeRecording/Activate=true" '
+                        '--regset="/O3DE/Performance/FrameTimeRecording/QuitOnComplete=false" '
+                        f'--regset="/O3DE/Performance/FrameTimeRecording/ProfileName={benchmark_name}" '
+                        '+loadlevel levels/archvis/loft/interior_03.spawnable')
+
+    def teardown():
+        process_utils.kill_processes_named(['AssetProcessor', 'LoftSample.GameLauncher'], ignore_extensions=True)
+    request.addfinalizer(teardown)
+
+    # delete any pre-existing data
+    benchmark_data_folder = [os.path.join(
+            workspace.paths.project(), "user", "Scripts", "PerformanceBenchmarks", benchmark_name)]
+    file_system.delete(benchmark_data_folder, True, True)
+
+
+    # Execute test.
+    launcherPid = subprocess.Popen(cmd, stderr=subprocess.STDOUT, encoding='UTF-8', shell=True).pid
+    try:
+        expected_lines = ["(Script) - OutputProfileData complete"]
+        loftsample_gamelauncher_log_monitor.monitor_log_for_lines(expected_lines, timeout=180)
+    except ly_test_tools.log.log_monitor.LogMonitorException as e:
+        raise LoftSampleException(f'Data capturing did not complete in time for RHI {rhi}, got error: {e}')
+
+def LoftSampleFrameTimingTest_SendBenchmarkMetrics_Common(
+        workspace, launcher_platform, rhi):
+    """
+    Gathers the benchmark metrics and uses filebeat to send the metrics data.
+    """
+
+    aggregator = BenchmarkDataAggregator(workspace, logger, 'periodic')
+    aggregator.upload_metrics(f'{launcher_platform}_{rhi}')
+

+ 8 - 68
Project/Code/PythonTests/Automated/benchmark_runner_periodic_suite_dx12.py

@@ -4,87 +4,27 @@ For complete copyright and license terms please see the LICENSE at the root of t
 
 SPDX-License-Identifier: Apache-2.0 OR MIT
 """
-import logging
-import os
-import subprocess
-import psutil
-import time
 
 import pytest
-
-import ly_test_tools.environment.process_utils as process_utils
-import ly_test_tools.environment.file_system as file_system
 import ly_test_tools.launchers.platforms.base
-from ly_test_tools.benchmark.data_aggregator import BenchmarkDataAggregator
-
-logger = logging.getLogger(__name__)
-
-def filebeat_service_running():
-    """
-    Checks if the filebeat service is currently running on the OS.
-    :return: True if filebeat service detected and running, False otherwise.
-    """
-    result = False
-    try:
-        filebeat_service = psutil.win_service_get('filebeat')
-        filebeat_service_info = filebeat_service.as_dict()
-        if filebeat_service_info['status'] == 'running':
-            result = True
-    except psutil.NoSuchProcess:
-        return result
-
-    return result
-
-
-class LoftSampleException(Exception):
-    """Custom Exception class for LoftSample tests."""
-    pass
-
+from Automated.benchmark_runner_periodic_suite_common import LoftSampleFrameTimingTest_GatherBenchmarkMetrics_Common
+from Automated.benchmark_runner_periodic_suite_common import LoftSampleFrameTimingTest_SendBenchmarkMetrics_Common
+from Automated.benchmark_runner_periodic_suite_common import filebeat_service_running
 
 @pytest.mark.parametrize('launcher_platform', ['windows'])
[email protected]("project", ["LoftSample"])
[email protected]('rhi', ['dx12'])
 @pytest.mark.usefixtures("clean_loftsample_gamelauncher_logs", "loftsample_gamelauncher_log_monitor")
 class TestPerformanceBenchmarksPeriodicSuite:
 
-    @pytest.mark.parametrize('rhi', ['-rhi=dx12'])
     def test_LoftSampleFrameTimingTest_GatherBenchmarkMetrics_DX12(
             self, request, workspace, launcher_platform, rhi, loftsample_gamelauncher_log_monitor):
-        benchmark_name = 'LoftSample_dx12'
-        cmd = os.path.join(workspace.paths.build_directory(),
-                           'LoftSample.GameLauncher.exe '
-                           f'--project-path={workspace.paths.project()} '
-                           '--rhi dx12 '
-                           '--regset="/O3DE/Performance/FrameTimeRecording/Activate=true" '
-                           '--regset="/O3DE/Performance/FrameTimeRecording/QuitOnComplete=false" '
-                           f'--regset="/O3DE/Performance/FrameTimeRecording/ProfileName={benchmark_name}" '
-                           '+loadlevel levels/archvis/loft/interior_03.spawnable')
-
-        def teardown():
-            process_utils.kill_processes_named(['AssetProcessor', 'LoftSample.GameLauncher'], ignore_extensions=True)
-        request.addfinalizer(teardown)
-
-        # delete any pre-existing data
-        benchmark_data_folder = [os.path.join(
-                workspace.paths.project(), "user", "Scripts", "PerformanceBenchmarks", benchmark_name)]
-        file_system.delete(benchmark_data_folder, True, True)
-
-
-        # Execute test.
-        launcherPid = subprocess.Popen(cmd, stderr=subprocess.STDOUT, encoding='UTF-8', shell=True).pid
-        time.sleep(30) # Game.log doesn't exist for about 30 seconds after the process launches
-        try:
-            expected_lines = ["(Script) - OutputProfileData complete"]
-            loftsample_gamelauncher_log_monitor.monitor_log_for_lines(expected_lines, timeout=180)
-        except ly_test_tools.log.log_monitor.LogMonitorException as e:
-            raise LoftSampleException(f'Data capturing did not complete in time for RHI {rhi}, got error: {e}')
+            LoftSampleFrameTimingTest_GatherBenchmarkMetrics_Common(self, request, workspace, rhi, loftsample_gamelauncher_log_monitor)
 
     @pytest.mark.skipif(not filebeat_service_running(), reason="filebeat service not running")
     def test_LoftSampleFrameTimingTest_SendBenchmarkMetrics_DX12(
-            self, request, editor, workspace, project, launcher_platform, level):
+            self, request, editor, workspace, project, launcher_platform, rhi, level):
         """
         Gathers the DX12 benchmark metrics and uses filebeat to send the metrics data.
         """
-
-        aggregator = BenchmarkDataAggregator(workspace, logger, 'periodic')
-        aggregator.upload_metrics('windows_dx12')
-
+        LoftSampleFrameTimingTest_SendBenchmarkMetrics_Common(
+                    workspace, launcher_platform, rhi)

+ 3 - 3
Project/Code/PythonTests/conftest.py

@@ -4,7 +4,7 @@ For complete copyright and license terms please see the LICENSE at the root of t
 
 SPDX-License-Identifier: Apache-2.0 OR MIT
 
-pytest test configuration file for launching AtomSampleViewerStandalone tests.
+pytest test configuration file for launching LoftSample.GameLauncher tests.
 """
 
 import logging
@@ -22,7 +22,7 @@ logger = logging.getLogger(__name__)
 
 @pytest.fixture(scope="function", autouse=True)
 def clean_loftsample_gamelauncher_logs(request, workspace):
-    """Deletes any AtomSampleViewer log files so that the test run can start with empty logs."""
+    """Deletes any LoftSample log files so that the test run can start with empty logs."""
     logs = ['Game.log']
     logger.info(f'Deleting log files for LoftSample.GameLauncher tests: {logs}')
 
@@ -43,7 +43,7 @@ def loftsample_gamelauncher_log_monitor(request, workspace):
     launcher = ly_test_tools.launchers.platforms.base.Launcher(workspace, [])  # Needed for log monitor to work.
     launcher.is_alive = types.MethodType(is_alive, launcher)
     file_to_monitor = os.path.join(workspace.paths.project_log(), 'Game.log')
-    log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
+    log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor,  log_creation_max_wait_time=40)
     log_monitor.file_to_monitor = file_to_monitor
 
     return log_monitor