2
0

aws_metrics_automation_test.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289
  1. """
  2. Copyright (c) Contributors to the Open 3D Engine Project.
  3. For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. SPDX-License-Identifier: Apache-2.0 OR MIT
  5. """
  6. import logging
  7. import os
  8. import pytest
  9. import typing
  10. from datetime import datetime
  11. import ly_test_tools.log.log_monitor
  12. from AWS.common import constants
  13. from AWS.common.resource_mappings import AWS_RESOURCE_MAPPINGS_ACCOUNT_ID_KEY
  14. from .aws_metrics_custom_thread import AWSMetricsThread
  15. # fixture imports
  16. from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor
  17. from .aws_metrics_utils import aws_metrics_utils
  18. AWS_METRICS_FEATURE_NAME = 'AWSMetrics'
  19. logger = logging.getLogger(__name__)
  20. def _setup(launcher: pytest.fixture, asset_processor: pytest.fixture) -> pytest.fixture:
  21. """
  22. Set up the resource mapping configuration and start the log monitor.
  23. :param launcher: Client launcher for running the test level.
  24. :param asset_processor: asset_processor fixture.
  25. :return log monitor object.
  26. """
  27. asset_processor.start()
  28. asset_processor.wait_for_idle()
  29. file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), constants.GAME_LOG_NAME)
  30. # Initialize the log monitor.
  31. log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
  32. return log_monitor
  33. def monitor_metrics_submission(log_monitor: pytest.fixture) -> None:
  34. """
  35. Monitor the messages and notifications for submitting metrics.
  36. :param log_monitor: Log monitor to check the log messages.
  37. """
  38. expected_lines = [
  39. '(Script) - Submitted metrics without buffer.',
  40. '(Script) - Submitted metrics with buffer.',
  41. '(Script) - Flushed the buffered metrics.',
  42. '(Script) - Metrics is sent successfully.'
  43. ]
  44. unexpected_lines = [
  45. '(Script) - Failed to submit metrics without buffer.',
  46. '(Script) - Failed to submit metrics with buffer.',
  47. '(Script) - Failed to send metrics.'
  48. ]
  49. result = log_monitor.monitor_log_for_lines(
  50. expected_lines=expected_lines,
  51. unexpected_lines=unexpected_lines,
  52. halt_on_unexpected=True)
  53. # Assert the log monitor detected expected lines and did not detect any unexpected lines.
  54. assert result, (
  55. f'Log monitoring failed. Used expected_lines values: {expected_lines} & '
  56. f'unexpected_lines values: {unexpected_lines}')
  57. def query_metrics_from_s3(aws_metrics_utils: pytest.fixture, resource_mappings: pytest.fixture) -> None:
  58. """
  59. Verify that the metrics events are delivered to the S3 bucket and can be queried.
  60. :param aws_metrics_utils: aws_metrics_utils fixture.
  61. :param resource_mappings: resource_mappings fixture.
  62. """
  63. aws_metrics_utils.verify_s3_delivery(
  64. resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsBucketName')
  65. )
  66. logger.info('Metrics are sent to S3.')
  67. aws_metrics_utils.run_glue_crawler(
  68. resource_mappings.get_resource_name_id('AWSMetrics.EventsCrawlerName'))
  69. # Remove the events_json table if exists so that the sample query can create a table with the same name.
  70. aws_metrics_utils.try_delete_table(resource_mappings.get_resource_name_id('AWSMetrics.EventDatabaseName'), 'events_json')
  71. aws_metrics_utils.run_named_queries(resource_mappings.get_resource_name_id('AWSMetrics.AthenaWorkGroupName'))
  72. logger.info('Query metrics from S3 successfully.')
  73. def verify_operational_metrics(aws_metrics_utils: pytest.fixture,
  74. resource_mappings: pytest.fixture, start_time: datetime) -> None:
  75. """
  76. Verify that operational health metrics are delivered to CloudWatch.
  77. :param aws_metrics_utils: aws_metrics_utils fixture.
  78. :param resource_mappings: resource_mappings fixture.
  79. :param start_time: Time when the game launcher starts.
  80. """
  81. aws_metrics_utils.verify_cloud_watch_delivery(
  82. 'AWS/Lambda',
  83. 'Invocations',
  84. [{'Name': 'FunctionName',
  85. 'Value': resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsProcessingLambdaName')}],
  86. start_time)
  87. logger.info('AnalyticsProcessingLambda metrics are sent to CloudWatch.')
  88. aws_metrics_utils.verify_cloud_watch_delivery(
  89. 'AWS/Lambda',
  90. 'Invocations',
  91. [{'Name': 'FunctionName',
  92. 'Value': resource_mappings.get_resource_name_id('AWSMetrics.EventProcessingLambdaName')}],
  93. start_time)
  94. logger.info('EventsProcessingLambda metrics are sent to CloudWatch.')
  95. def update_kinesis_analytics_application_status(aws_metrics_utils: pytest.fixture,
  96. resource_mappings: pytest.fixture, start_application: bool) -> None:
  97. """
  98. Update the Kinesis analytics application to start or stop it.
  99. :param aws_metrics_utils: aws_metrics_utils fixture.
  100. :param resource_mappings: resource_mappings fixture.
  101. :param start_application: whether to start or stop the application.
  102. """
  103. if start_application:
  104. aws_metrics_utils.start_kinesis_data_analytics_application(
  105. resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsApplicationName'))
  106. else:
  107. aws_metrics_utils.stop_kinesis_data_analytics_application(
  108. resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsApplicationName'))
  109. @pytest.mark.SUITE_awsi
  110. @pytest.mark.usefixtures('automatic_process_killer')
  111. @pytest.mark.usefixtures('aws_credentials')
  112. @pytest.mark.usefixtures('resource_mappings')
  113. @pytest.mark.parametrize('assume_role_arn', [constants.ASSUME_ROLE_ARN])
  114. @pytest.mark.parametrize('feature_name', [AWS_METRICS_FEATURE_NAME])
  115. @pytest.mark.parametrize('profile_name', ['AWSAutomationTest'])
  116. @pytest.mark.parametrize('project', ['AutomatedTesting'])
  117. @pytest.mark.parametrize('region_name', [constants.AWS_REGION])
  118. @pytest.mark.parametrize('resource_mappings_filename', [constants.AWS_RESOURCE_MAPPING_FILE_NAME])
  119. @pytest.mark.parametrize('session_name', [constants.SESSION_NAME])
  120. @pytest.mark.parametrize('stacks', [[f'{constants.AWS_PROJECT_NAME}-{AWS_METRICS_FEATURE_NAME}-{constants.AWS_REGION}']])
  121. class TestAWSMetricsWindows(object):
  122. """
  123. Test class to verify the real-time and batch analytics for metrics.
  124. """
  125. @pytest.mark.parametrize('level', ['levels/aws/metrics/metrics.spawnable'])
  126. def test_realtime_and_batch_analytics(self,
  127. level: str,
  128. launcher: pytest.fixture,
  129. asset_processor: pytest.fixture,
  130. workspace: pytest.fixture,
  131. aws_utils: pytest.fixture,
  132. resource_mappings: pytest.fixture,
  133. aws_metrics_utils: pytest.fixture):
  134. """
  135. Verify that the metrics events are sent to CloudWatch and S3 for analytics.
  136. """
  137. # Start Kinesis analytics application on a separate thread to avoid blocking the test.
  138. kinesis_analytics_application_thread = AWSMetricsThread(target=update_kinesis_analytics_application_status,
  139. args=(aws_metrics_utils, resource_mappings, True))
  140. kinesis_analytics_application_thread.start()
  141. log_monitor = _setup(launcher=launcher, asset_processor=asset_processor)
  142. # Kinesis analytics application needs to be in the running state before we start the game launcher.
  143. kinesis_analytics_application_thread.join()
  144. launcher.args = ['+LoadLevel', level]
  145. launcher.args.extend(['-rhi=null'])
  146. start_time = datetime.utcnow()
  147. with launcher.start(launch_ap=False):
  148. monitor_metrics_submission(log_monitor)
  149. # Verify that real-time analytics metrics are delivered to CloudWatch.
  150. aws_metrics_utils.verify_cloud_watch_delivery(
  151. AWS_METRICS_FEATURE_NAME,
  152. 'TotalLogins',
  153. [],
  154. start_time)
  155. logger.info('Real-time metrics are sent to CloudWatch.')
  156. # Run time-consuming operations on separate threads to avoid blocking the test.
  157. operational_threads = list()
  158. operational_threads.append(
  159. AWSMetricsThread(target=query_metrics_from_s3,
  160. args=(aws_metrics_utils, resource_mappings)))
  161. operational_threads.append(
  162. AWSMetricsThread(target=verify_operational_metrics,
  163. args=(aws_metrics_utils, resource_mappings, start_time)))
  164. operational_threads.append(
  165. AWSMetricsThread(target=update_kinesis_analytics_application_status,
  166. args=(aws_metrics_utils, resource_mappings, False)))
  167. for thread in operational_threads:
  168. thread.start()
  169. for thread in operational_threads:
  170. thread.join()
  171. @pytest.mark.parametrize('level', ['levels/aws/metrics/metrics.spawnable'])
  172. def test_realtime_and_batch_analytics_no_global_accountid(self,
  173. level: str,
  174. launcher: pytest.fixture,
  175. asset_processor: pytest.fixture,
  176. workspace: pytest.fixture,
  177. aws_utils: pytest.fixture,
  178. resource_mappings: pytest.fixture,
  179. aws_metrics_utils: pytest.fixture):
  180. """
  181. Verify that the metrics events are sent to CloudWatch and S3 for analytics.
  182. """
  183. # Remove top-level account ID from resource mappings
  184. resource_mappings.clear_select_keys([AWS_RESOURCE_MAPPINGS_ACCOUNT_ID_KEY])
  185. # Start Kinesis analytics application on a separate thread to avoid blocking the test.
  186. kinesis_analytics_application_thread = AWSMetricsThread(target=update_kinesis_analytics_application_status,
  187. args=(aws_metrics_utils, resource_mappings, True))
  188. kinesis_analytics_application_thread.start()
  189. log_monitor = _setup(launcher=launcher, asset_processor=asset_processor)
  190. # Kinesis analytics application needs to be in the running state before we start the game launcher.
  191. kinesis_analytics_application_thread.join()
  192. launcher.args = ['+LoadLevel', level]
  193. launcher.args.extend(['-rhi=null'])
  194. start_time = datetime.utcnow()
  195. with launcher.start(launch_ap=False):
  196. monitor_metrics_submission(log_monitor)
  197. # Verify that real-time analytics metrics are delivered to CloudWatch.
  198. aws_metrics_utils.verify_cloud_watch_delivery(
  199. AWS_METRICS_FEATURE_NAME,
  200. 'TotalLogins',
  201. [],
  202. start_time)
  203. logger.info('Real-time metrics are sent to CloudWatch.')
  204. # Run time-consuming operations on separate threads to avoid blocking the test.
  205. operational_threads = list()
  206. operational_threads.append(
  207. AWSMetricsThread(target=query_metrics_from_s3,
  208. args=(aws_metrics_utils, resource_mappings)))
  209. operational_threads.append(
  210. AWSMetricsThread(target=verify_operational_metrics,
  211. args=(aws_metrics_utils, resource_mappings, start_time)))
  212. operational_threads.append(
  213. AWSMetricsThread(target=update_kinesis_analytics_application_status,
  214. args=(aws_metrics_utils, resource_mappings, False)))
  215. for thread in operational_threads:
  216. thread.start()
  217. for thread in operational_threads:
  218. thread.join()
  219. @pytest.mark.parametrize('level', ['levels/aws/metrics/metrics.spawnable'])
  220. def test_unauthorized_user_request_rejected(self,
  221. level: str,
  222. launcher: pytest.fixture,
  223. asset_processor: pytest.fixture,
  224. workspace: pytest.fixture):
  225. """
  226. Verify that unauthorized users cannot send metrics events to the AWS backed backend.
  227. """
  228. log_monitor = _setup(launcher=launcher, asset_processor=asset_processor)
  229. # Set invalid AWS credentials.
  230. launcher.args = ['+LoadLevel', level, '+cl_awsAccessKey', 'AKIAIOSFODNN7EXAMPLE',
  231. '+cl_awsSecretKey', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY']
  232. launcher.args.extend(['-rhi=null'])
  233. with launcher.start(launch_ap=False):
  234. result = log_monitor.monitor_log_for_lines(
  235. expected_lines=['(Script) - Failed to send metrics.'],
  236. unexpected_lines=['(Script) - Metrics is sent successfully.'],
  237. halt_on_unexpected=True)
  238. assert result, 'Metrics events are sent successfully by unauthorized user'
  239. logger.info('Unauthorized user is rejected to send metrics.')
  240. def test_clean_up_s3_bucket(self,
  241. aws_utils: pytest.fixture,
  242. resource_mappings: pytest.fixture,
  243. aws_metrics_utils: pytest.fixture):
  244. """
  245. Clear the analytics bucket objects so that the S3 bucket can be destroyed during tear down.
  246. """
  247. aws_metrics_utils.empty_bucket(
  248. resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsBucketName'))