Jenkinsfile 44 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947
  1. #!/usr/bin/env groovy
  2. /*
  3. * Copyright (c) Contributors to the Open 3D Engine Project.
  4. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  5. *
  6. * SPDX-License-Identifier: Apache-2.0 OR MIT
  7. *
  8. */
  9. import groovy.json.JsonOutput
  10. PIPELINE_CONFIG_FILE = 'scripts/build/Jenkins/lumberyard.json'
  11. INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
  12. PIPELINE_RETRY_ATTEMPTS = 3
  13. EMPTY_JSON = readJSON text: '{}'
  14. PROJECT_REPOSITORY_NAME = 'o3de-atom-sampleviewer'
  15. PROJECT_ORGANIZATION_NAME = 'o3de'
  16. ENGINE_REPOSITORY_NAME = 'o3de'
  17. ENGINE_ORGANIZATION_NAME = 'o3de'
  18. ENGINE_BRANCH_DEFAULT = "${env.BRANCH_DEFAULT}" ?: "${env.BRANCH_NAME}"
  19. // Branches with build snapshots
  20. BUILD_SNAPSHOTS = ['development', 'stabilization/2205']
  21. // Build snapshots with empty snapshot (for use with 'SNAPSHOT' pipeline parameter)
  22. BUILD_SNAPSHOTS_WITH_EMPTY = BUILD_SNAPSHOTS + ''
  23. // The default build snapshot to be selected in the 'SNAPSHOT' pipeline parameter
  24. DEFAULT_BUILD_SNAPSHOT = BUILD_SNAPSHOTS_WITH_EMPTY.get(0)
  25. // Branches with build snapshots as comma separated value string
  26. env.BUILD_SNAPSHOTS = BUILD_SNAPSHOTS.join(",")
  27. def pipelineProperties = []
  28. def pipelineParameters = [
  29. // Build/clean Parameters
  30. // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
  31. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
  32. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
  33. booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
  34. booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
  35. stringParam(defaultValue: "${ENGINE_BRANCH_DEFAULT}", description: 'Sets a different branch from o3de engine repo to use or use commit id. Default is branchname', trim: true, name: 'ENGINE_BRANCH')
  36. ]
  37. def palSh(cmd, lbl = '', winSlashReplacement = true) {
  38. if (env.IS_UNIX) {
  39. sh label: lbl,
  40. script: cmd
  41. } else if (winSlashReplacement) {
  42. bat label: lbl,
  43. script: cmd.replace('/','\\')
  44. } else {
  45. bat label: lbl,
  46. script: cmd
  47. }
  48. }
  49. def palMkdir(path) {
  50. if (env.IS_UNIX) {
  51. sh label: "Making directories ${path}",
  52. script: "mkdir -p ${path}"
  53. } else {
  54. def win_path = path.replace('/','\\')
  55. bat label: "Making directories ${win_path}",
  56. script: "mkdir ${win_path}."
  57. }
  58. }
  59. def palRm(path) {
  60. if (env.IS_UNIX) {
  61. sh label: "Removing ${path}",
  62. script: "rm ${path}"
  63. } else {
  64. def win_path = path.replace('/','\\')
  65. bat label: "Removing ${win_path}",
  66. script: "del /Q ${win_path}"
  67. }
  68. }
  69. def palRmDir(path) {
  70. if (env.IS_UNIX) {
  71. sh label: "Removing ${path}",
  72. script: "rm -rf ${path}"
  73. } else {
  74. def win_path = path.replace('/','\\')
  75. bat label: "Removing ${win_path}",
  76. script: "rd /s /q ${win_path}"
  77. }
  78. }
  79. def IsPullRequest(branchName) {
  80. // temporarily using the name to detect if we are in a PR
  81. // In the future we will check with github
  82. return branchName.startsWith('PR-')
  83. }
  84. def IsJobEnabled(branchName, buildTypeMap, pipelineName, platformName) {
  85. if (IsPullRequest(branchName)) {
  86. return buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
  87. }
  88. def job_list_override = params.JOB_LIST_OVERRIDE ? params.JOB_LIST_OVERRIDE.tokenize(',') : ''
  89. if (!job_list_override.isEmpty()) {
  90. return params[platformName] && job_list_override.contains(buildTypeMap.key);
  91. } else {
  92. return params[platformName] && buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
  93. }
  94. }
  95. def IsAPLogUpload(branchName, jobName) {
  96. return !IsPullRequest(branchName) && jobName.toLowerCase().contains('asset') && env.AP_LOGS_S3_BUCKET
  97. }
  98. def GetRunningPipelineName(JENKINS_JOB_NAME) {
  99. // If the job name has an underscore
  100. def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
  101. if (job_parts.size() > 1) {
  102. return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
  103. }
  104. return [job_parts[0], 'default']
  105. }
  106. @NonCPS
  107. def RegexMatcher(str, regex) {
  108. def matcher = (str =~ regex)
  109. return matcher ? matcher.group(1) : null
  110. }
  111. def LoadPipelineConfig(String pipelineName, String branchName) {
  112. echo 'Loading pipeline config'
  113. def pipelineConfig = {}
  114. pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
  115. palRm(PIPELINE_CONFIG_FILE)
  116. pipelineConfig.platforms = EMPTY_JSON
  117. // Load the pipeline configs per platform
  118. pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
  119. def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
  120. if (!env.IS_UNIX) {
  121. platform_regex = platform_regex.replace('/','\\\\')
  122. }
  123. echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
  124. for (pipeline_config_path in findFiles(glob: pipeline_config)) {
  125. echo "\tFound platform pipeline config ${pipeline_config_path}"
  126. def platform = RegexMatcher(pipeline_config_path, platform_regex)
  127. if(platform) {
  128. pipelineConfig.platforms[platform] = EMPTY_JSON
  129. pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
  130. }
  131. palRm(pipeline_config_path.toString())
  132. }
  133. }
  134. // Load the build configs
  135. pipelineConfig.BUILD_CONFIGS.each { build_config ->
  136. def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
  137. if (!env.IS_UNIX) {
  138. platform_regex = platform_regex.replace('/','\\\\')
  139. }
  140. echo "Searching configs in ${build_config} using ${platform_regex}"
  141. for (build_config_path in findFiles(glob: build_config)) {
  142. echo "\tFound config ${build_config_path}"
  143. def platform = RegexMatcher(build_config_path, platform_regex)
  144. if(platform) {
  145. pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
  146. }
  147. }
  148. }
  149. return pipelineConfig
  150. }
  151. def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
  152. def envVarMap = [:]
  153. platformPipelineEnv = platformEnv['ENV'] ?: [:]
  154. platformPipelineEnv.each { var ->
  155. envVarMap[var.key] = var.value
  156. }
  157. platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
  158. platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
  159. platformPipelineEnvOverride.each { var ->
  160. envVarMap[var.key] = var.value
  161. }
  162. buildTypeEnv.each { var ->
  163. // This may override the above one if there is an entry defined by the job
  164. envVarMap[var.key] = var.value
  165. }
  166. // Environment that only applies to to Jenkins tweaks.
  167. // For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
  168. // instances. This allow us to scale up and down without having to re-download 3rdParty
  169. envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
  170. envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
  171. return envVarMap
  172. }
  173. def GetEnvStringList(Map envVarMap) {
  174. def strList = []
  175. envVarMap.each { var ->
  176. strList.add("${var.key}=${var.value}")
  177. }
  178. return strList
  179. }
  180. def getEngineRemoteConfig(remoteConfigs) {
  181. def engineRemoteConfigs = [name: "${ENGINE_REPOSITORY_NAME}",
  182. url: remoteConfigs.url[0]
  183. .replace("${PROJECT_REPOSITORY_NAME}", "${ENGINE_REPOSITORY_NAME}")
  184. .replace("/${PROJECT_ORGANIZATION_NAME}/", "/${ENGINE_ORGANIZATION_NAME}/"),
  185. credentialsId: remoteConfigs.credentialsId[0]
  186. ]
  187. return engineRemoteConfigs
  188. }
  189. def CheckoutBootstrapScripts(String branchName) {
  190. checkout([$class: 'GitSCM',
  191. branches: [[name: "*/${branchName}"]],
  192. doGenerateSubmoduleConfigurations: false,
  193. extensions: [
  194. [$class: 'PruneStaleBranch'],
  195. [$class: 'AuthorInChangelog'],
  196. [$class: 'SparseCheckoutPaths', sparseCheckoutPaths: [
  197. [ $class: 'SparseCheckoutPath', path: 'scripts/build/Jenkins/' ],
  198. [ $class: 'SparseCheckoutPath', path: 'scripts/build/bootstrap/' ],
  199. [ $class: 'SparseCheckoutPath', path: 'scripts/build/Platform' ]
  200. ]],
  201. // Shallow checkouts break changelog computation. Do not enable.
  202. [$class: 'CloneOption', noTags: false, reference: '', shallow: false]
  203. ],
  204. submoduleCfg: [],
  205. userRemoteConfigs: [getEngineRemoteConfig(scm.userRemoteConfigs)]
  206. ])
  207. }
  208. def CheckoutRepo(boolean disableSubmodules = false) {
  209. def projectsAndUrl = [
  210. "${ENGINE_REPOSITORY_NAME}": getEngineRemoteConfig(scm.userRemoteConfigs),
  211. "${PROJECT_REPOSITORY_NAME}": scm.userRemoteConfigs[0]
  212. ]
  213. projectsAndUrl.each { projectAndUrl ->
  214. if(!fileExists(projectAndUrl.key)) {
  215. palMkdir(projectAndUrl.key)
  216. }
  217. dir(projectAndUrl.key) {
  218. if(fileExists('.git')) {
  219. // If the repository after checkout is locked, likely we took a snapshot while git was running,
  220. // to leave the repo in a usable state, garbage collect.
  221. def indexLockFile = '.git/index.lock'
  222. if(fileExists(indexLockFile)) {
  223. palSh('git gc', 'Git GarbageCollect')
  224. }
  225. if(fileExists(indexLockFile)) { // if it is still there, remove it
  226. palRm(indexLockFile)
  227. }
  228. }
  229. }
  230. }
  231. def random = new Random()
  232. def retryAttempt = 0
  233. retry(5) {
  234. if (retryAttempt > 0) {
  235. sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from Github
  236. }
  237. retryAttempt = retryAttempt + 1
  238. projectsAndUrl.each { projectAndUrl ->
  239. dir(projectAndUrl.key) {
  240. def branchName = scm.branches
  241. if(projectAndUrl.key == "${ENGINE_REPOSITORY_NAME}") {
  242. branchName = [[name: params.ENGINE_BRANCH]]
  243. }
  244. checkout scm: [
  245. $class: 'GitSCM',
  246. branches: branchName,
  247. extensions: [
  248. [$class: 'PruneStaleBranch'],
  249. [$class: 'AuthorInChangelog'],
  250. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  251. [$class: 'CheckoutOption', timeout: 60]
  252. ],
  253. userRemoteConfigs: [projectAndUrl.value]
  254. ]
  255. }
  256. }
  257. }
  258. // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
  259. dir(PROJECT_REPOSITORY_NAME) {
  260. palSh('git rev-parse HEAD > commitid', 'Getting commit id')
  261. env.CHANGE_ID = readFile file: 'commitid'
  262. env.CHANGE_ID = env.CHANGE_ID.trim()
  263. palRm('commitid')
  264. // CHANGE_DATE is used by the installer to provide some ability to sort tagged builds in addition to BRANCH_NAME and CHANGE_ID
  265. commitDateFmt = '%%cI'
  266. if (env.IS_UNIX) commitDateFmt = '%cI'
  267. palSh("git show -s --format=${commitDateFmt} ${env.CHANGE_ID} > commitdate", 'Getting commit date')
  268. env.CHANGE_DATE = readFile file: 'commitdate'
  269. env.CHANGE_DATE = env.CHANGE_DATE.trim()
  270. palRm('commitdate')
  271. }
  272. }
  273. def HandleDriveMount(String snapshot, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean recreateVolume = false) {
  274. unstash name: 'incremental_build_script'
  275. def pythonCmd = ''
  276. if(env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
  277. else pythonCmd = 'python3 -u '
  278. if(recreateVolume) {
  279. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
  280. }
  281. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --snapshot ${snapshot} --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume', winSlashReplacement=false)
  282. if(env.IS_UNIX) {
  283. sh label: 'Setting volume\'s ownership',
  284. script: """
  285. if sudo test ! -d "${workspace}"; then
  286. sudo mkdir -p ${workspace}
  287. cd ${workspace}/..
  288. sudo chown -R lybuilder:root .
  289. fi
  290. """
  291. }
  292. }
  293. def PreBuildCommonSteps(Map pipelineConfig, String snapshot, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
  294. echo 'Starting pre-build common steps...'
  295. if (mount) {
  296. if(env.RECREATE_VOLUME?.toBoolean()){
  297. echo 'Starting to recreating drive...'
  298. HandleDriveMount(snapshot, repositoryName, projectName, pipeline, branchName, platform, buildType, workspace, true)
  299. } else {
  300. echo 'Starting to mounting drive...'
  301. HandleDriveMount(snapshot, repositoryName, projectName, pipeline, branchName, platform, buildType, workspace, false)
  302. }
  303. }
  304. // Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
  305. // we need to cleanup. Once all branches take this relocation, we can remove this
  306. if(env.CLEAN_WORKSPACE?.toBoolean() || fileExists("${workspace}/.git")) {
  307. if(fileExists(workspace)) {
  308. palRmDir(workspace)
  309. }
  310. }
  311. dir(workspace) {
  312. // Add folder where we will store the 3rdParty downloads and packages
  313. if(!fileExists('3rdParty')) {
  314. palMkdir('3rdParty')
  315. }
  316. CheckoutRepo(disableSubmodules)
  317. }
  318. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  319. // Get python
  320. if(env.IS_UNIX) {
  321. sh label: 'Getting python',
  322. script: 'python/get_python.sh'
  323. } else {
  324. bat label: 'Getting python',
  325. script: 'python/get_python.bat'
  326. }
  327. // Always run the clean step, the scripts detect what variables were set, but it also cleans if
  328. // the NODE_LABEL has changed
  329. def command = "${pipelineConfig.PYTHON_DIR}/python"
  330. if(env.IS_UNIX) command += '.sh'
  331. else command += '.cmd'
  332. command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
  333. palSh(command, "Running ${platform} clean")
  334. if(fileExists('.lfsconfig')) {
  335. palSh("git lfs install", "LFS config exists. Installing LFS hooks to local repo")
  336. palSh("git lfs pull", "Pulling new LFS objects")
  337. }
  338. }
  339. }
  340. def Build(Map pipelineConfig, String platform, String type, String workspace) {
  341. // If EXECUTE_FROM_PROJECT is defined, we execute the script from the project instead of from the engine
  342. // In both cases, the scripts are in the engine, is just what the current dir is and how we get to the scripts
  343. def currentDir = "${workspace}/${ENGINE_REPOSITORY_NAME}"
  344. def pathToEngine = ""
  345. timeout(time: env.TIMEOUT, unit: 'MINUTES', activity: true) {
  346. def command = "${pipelineConfig.PYTHON_DIR}/python"
  347. def ext = ''
  348. if(env.IS_UNIX) {
  349. command += '.sh'
  350. ext = '.sh'
  351. }
  352. else command += '.cmd'
  353. // Setup environment for project execution, otherwise, register the project
  354. if (env.EXECUTE_FROM_PROJECT?.toBoolean()) {
  355. currentDir = "${workspace}/${PROJECT_REPOSITORY_NAME}"
  356. pathToEngine = "../${ENGINE_REPOSITORY_NAME}/"
  357. } else {
  358. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  359. palSh("scripts/o3de${ext} register --project-path ${workspace}/${PROJECT_REPOSITORY_NAME}", "Registering project ${PROJECT_REPOSITORY_NAME}")
  360. }
  361. }
  362. command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
  363. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  364. palSh(command, "Running ${platform} ${type}")
  365. }
  366. }
  367. }
  368. def TestMetrics(Map pipelineConfig, String workspace, String branchName, String repoName, String buildJobName, String outputDirectory, String configuration) {
  369. catchError(buildResult: null, stageResult: null) {
  370. def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
  371. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  372. checkout scm: [
  373. $class: 'GitSCM',
  374. branches: [[name: '*/main']],
  375. extensions: [
  376. [$class: 'AuthorInChangelog'],
  377. [$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']
  378. ],
  379. userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
  380. ]
  381. withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
  382. def command = "${pipelineConfig.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py " +
  383. '-e jenkins.creds.user %username% -e jenkins.creds.pass %apitoken% ' +
  384. "-e jenkins.base_url ${env.JENKINS_URL} " +
  385. "${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} --url ${env.BUILD_URL.replace('%','%%')}"
  386. bat label: "Publishing ${buildJobName} Test Metrics",
  387. script: command
  388. }
  389. }
  390. }
  391. }
  392. def BenchmarkMetrics(Map pipelineConfig, String workspace, String branchName, String outputDirectory) {
  393. catchError(buildResult: null, stageResult: null) {
  394. def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
  395. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  396. checkout scm: [
  397. $class: 'GitSCM',
  398. branches: [[name: '*/main']],
  399. extensions: [
  400. [$class: 'AuthorInChangelog'],
  401. [$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']
  402. ],
  403. userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
  404. ]
  405. def command = "${pipelineConfig.PYTHON_DIR}/python.cmd -u mars/scripts/python/benchmark_scraper.py ${cmakeBuildDir} ${branchName}"
  406. palSh(command, "Publishing Benchmark Metrics")
  407. }
  408. }
  409. }
  410. def ExportTestResults(Map options, String platform, String type, String workspace, Map params) {
  411. catchError(message: "Error exporting tests results (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  412. def o3deroot = "${workspace}/${ENGINE_REPOSITORY_NAME}"
  413. dir("${o3deroot}/${params.OUTPUT_DIRECTORY}") {
  414. junit testResults: "Testing/**/*.xml", skipPublishingChecks: true
  415. palRmDir("Testing")
  416. // Recreate test runner xml directories that need to be pre generated
  417. palMkdir("Testing/Pytest")
  418. palMkdir("Testing/Gtest")
  419. }
  420. }
  421. }
  422. def ExportTestScreenshots(Map options, String branchName, String platformName, String jobName, String workspace, Map params) {
  423. catchError(message: "Error exporting test screenshots (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  424. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  425. def screenshotsFolder = "${workspace}/${PROJECT_REPOSITORY_NAME}/user/scripts/Screenshots"
  426. def s3Uploader = "scripts/build/tools/upload_to_s3.py"
  427. def command = "${options.PYTHON_DIR}/python.cmd -u ${s3Uploader} --base_dir ${screenshotsFolder} " +
  428. '--file_regex \\"(.*\$)\\" ' +
  429. "--bucket ${env.TEST_SCREENSHOT_BUCKET} " +
  430. "--search_subdirectories True --key_prefix ${PROJECT_REPOSITORY_NAME}/${branchName}/${env.BUILD_NUMBER}/${jobName} " +
  431. '--extra_args {\\"ACL\\":\\"bucket-owner-full-control\\"}'
  432. palSh(command, "Uploading test screenshots for ${jobName}")
  433. }
  434. }
  435. }
  436. def UploadAPLogs(Map options, String branchName, String platformName, String jobName, String workspace, Map params) {
  437. catchError(message: "Error exporting logs (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  438. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  439. def apLogsPath = "${workspace}/${PROJECT_REPOSITORY_NAME}/user/log"
  440. def s3UploadScriptPath = "scripts/build/tools/upload_to_s3.py"
  441. if(env.IS_UNIX) {
  442. pythonPath = "${options.PYTHON_DIR}/python.sh"
  443. }
  444. else {
  445. pythonPath = "${options.PYTHON_DIR}/python.cmd"
  446. }
  447. def command = "${pythonPath} -u ${s3UploadScriptPath} --base_dir ${apLogsPath} " +
  448. "--file_regex \".*\" --bucket ${env.AP_LOGS_S3_BUCKET} " +
  449. "--search_subdirectories True --key_prefix ${PROJECT_REPOSITORY_NAME}/${branchName}/${env.BUILD_NUMBER}/${platformName}/${jobName} " +
  450. '--extra_args {\\"ACL\\":\\"bucket-owner-full-control\\"}'
  451. palSh(command, "Uploading AP logs for job ${jobName} for branch ${branchName}", false)
  452. }
  453. }
  454. }
  455. def PostBuildCommonSteps(String workspace, boolean mount = true) {
  456. echo 'Starting post-build common steps...'
  457. if (mount) {
  458. def pythonCmd = ''
  459. if(env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
  460. else pythonCmd = 'python3 -u '
  461. try {
  462. timeout(5) {
  463. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
  464. }
  465. } catch (Exception e) {
  466. echo "Unmount script error ${e}"
  467. }
  468. }
  469. }
  470. def CreateSetupStage(Map pipelineConfig, String snapshot, String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars, boolean onlyMountEBSVolume = false) {
  471. return {
  472. stage('Setup') {
  473. if(onlyMountEBSVolume) {
  474. HandleDriveMount(snapshot, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], false)
  475. } else {
  476. PreBuildCommonSteps(pipelineConfig, snapshot, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
  477. }
  478. }
  479. }
  480. }
  481. def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
  482. return {
  483. stage("${jobName}") {
  484. Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
  485. }
  486. }
  487. }
  488. def CreateTestMetricsStage(Map pipelineConfig, String branchName, Map environmentVars, String buildJobName, String outputDirectory, String configuration) {
  489. return {
  490. stage("${buildJobName}_metrics") {
  491. TestMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME, buildJobName, outputDirectory, configuration)
  492. BenchmarkMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, outputDirectory)
  493. }
  494. }
  495. }
  496. def CreateExportTestResultsStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars, Map params) {
  497. return {
  498. stage("${jobName}_results") {
  499. ExportTestResults(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'], params)
  500. }
  501. }
  502. }
  503. def CreateExportTestScreenshotsStage(Map pipelineConfig, String branchName, String platformName, String jobName, Map environmentVars, Map params) {
  504. return {
  505. stage("${jobName}_screenshots") {
  506. ExportTestScreenshots(pipelineConfig, branchName, platformName, jobName, environmentVars['WORKSPACE'], params)
  507. }
  508. }
  509. }
  510. def CreateUploadAPLogsStage(Map pipelineConfig, String branchName, String platformName, String jobName, String workspace, Map params) {
  511. return {
  512. stage("${jobName}_upload_ap_logs") {
  513. UploadAPLogs(pipelineConfig, branchName, platformName, jobName, workspace, params)
  514. }
  515. }
  516. }
  517. def CreateTeardownStage(Map environmentVars) {
  518. return {
  519. stage('Teardown') {
  520. PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
  521. }
  522. }
  523. }
  524. def CreateSingleNode(Map pipelineConfig, def platform, def build_job, Map envVars, String branchName, String pipelineName, String repositoryName, String projectName, boolean onlyMountEBSVolume = false) {
  525. def nodeLabel = envVars['NODE_LABEL']
  526. return {
  527. def currentResult = ''
  528. def currentException = ''
  529. retry(PIPELINE_RETRY_ATTEMPTS) {
  530. node("${nodeLabel}") {
  531. if(isUnix()) { // Has to happen inside a node
  532. envVars['IS_UNIX'] = 1
  533. }
  534. withEnv(GetEnvStringList(envVars)) {
  535. def build_job_name = build_job.key
  536. try {
  537. CreateSetupStage(pipelineConfig, snapshot, repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, envVars, onlyMountEBSVolume).call()
  538. if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
  539. pipelineEnvVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  540. build_job.value.steps.each { build_step ->
  541. build_job_name = build_step
  542. // This addition of maps makes it that the right operand will override entries if they overlap with the left operand
  543. envVars = pipelineEnvVars + GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, platform.value.build_types[build_step].PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  544. try {
  545. CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
  546. }
  547. catch (Exception e) {
  548. if (envVars['NONBLOCKING_STEP']?.toBoolean()) {
  549. unstable(message: "Build step ${build_step} failed but it's a non-blocking step in build job ${build_job.key}")
  550. } else {
  551. throw e
  552. }
  553. }
  554. }
  555. } else {
  556. CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
  557. }
  558. }
  559. catch(Exception e) {
  560. if (e instanceof org.jenkinsci.plugins.workflow.steps.FlowInterruptedException) {
  561. def causes = e.getCauses().toString()
  562. if (causes.contains('RemovedNodeCause')) {
  563. error "Node disconnected during build: ${e}" // Error raised to retry stage on a new node
  564. }
  565. }
  566. if (IsAPLogUpload(branchName, build_job_name)) {
  567. CreateUploadAPLogsStage(pipelineConfig, branchName, platform.key, build_job_name, envVars['WORKSPACE'], platform.value.build_types[build_job_name].PARAMETERS).call()
  568. }
  569. // All other errors will be raised outside the retry block
  570. currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
  571. currentException = e.toString()
  572. }
  573. finally {
  574. def params = platform.value.build_types[build_job_name].PARAMETERS
  575. if (env.MARS_REPO && params && params.containsKey('TEST_METRICS') && params.TEST_METRICS == 'True') {
  576. def output_directory = params.OUTPUT_DIRECTORY
  577. def configuration = params.CONFIGURATION
  578. CreateTestMetricsStage(pipelineConfig, branchName, envVars, build_job_name, output_directory, configuration).call()
  579. }
  580. if (params && params.containsKey('TEST_RESULTS') && params.TEST_RESULTS == 'True') {
  581. CreateExportTestResultsStage(pipelineConfig, platform.key, build_job_name, envVars, params).call()
  582. }
  583. if (params && params.containsKey('TEST_SCREENSHOTS') && params.TEST_SCREENSHOTS == 'True' && currentResult == 'FAILURE') {
  584. CreateExportTestScreenshotsStage(pipelineConfig, branchName, platform.key, build_job_name, envVars, params).call()
  585. }
  586. CreateTeardownStage(envVars).call()
  587. }
  588. }
  589. }
  590. }
  591. // https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
  592. // {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
  593. if (currentResult == 'FAILURE') {
  594. currentBuild.result = 'FAILURE'
  595. error "FAILURE: ${currentException}"
  596. } else if (currentResult == 'UNSTABLE') {
  597. currentBuild.result = 'UNSTABLE'
  598. unstable(message: "UNSTABLE: ${currentException}")
  599. }
  600. }
  601. }
  602. // Used in CreateBuildJobs() to preprocess the build_job steps to programically create
  603. // Node sections with a set of steps that can run on that node.
  604. class PipeStepJobData {
  605. String m_nodeLabel = ""
  606. def m_steps = []
  607. PipeStepJobData(String label) {
  608. this.m_nodeLabel = label
  609. }
  610. def addStep(def step) {
  611. this.m_steps.add(step)
  612. }
  613. }
  614. def CreateBuildJobs(Map pipelineConfig, def platform, def build_job, Map envVars, String branchName, String pipelineName, String repositoryName, String projectName) {
  615. // if this is a pipeline, split jobs based on the NODE_LABEL
  616. if(build_job.value.steps) {
  617. def defaultLabel = envVars['NODE_LABEL']
  618. def lastNodeLable = ""
  619. def jobList = []
  620. def currentIdx = -1;
  621. // iterate the steps to build the order of node label + steps sets.
  622. // Order matters, as it is executed from first to last.
  623. // example layout.
  624. // node A
  625. // step 1
  626. // step 2
  627. // node B
  628. // step 3
  629. // node C
  630. // step 4
  631. build_job.value.steps.each { build_step ->
  632. //if node label defined
  633. if(platform.value.build_types[build_step] && platform.value.build_types[build_step].PIPELINE_ENV &&
  634. platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL']) {
  635. //if the last node label doen't match the new one, append it.
  636. if(platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL'] != lastNodeLable) {
  637. lastNodeLable = platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL']
  638. jobList.add(new PipeStepJobData(lastNodeLable))
  639. currentIdx++
  640. }
  641. }
  642. //no label define, so it needs to run on the default node label
  643. else if(lastNodeLable != defaultLabel) { //if the last node is not the default, append default
  644. lastNodeLable = defaultLabel
  645. jobList.add(new PipeStepJobData(lastNodeLable))
  646. currentIdx++
  647. }
  648. //add the build_step to the current node
  649. jobList[currentIdx].addStep(build_step)
  650. }
  651. return {
  652. jobList.eachWithIndex{ element, idx ->
  653. //update the node label + steps to the discovered data
  654. envVars['NODE_LABEL'] = element.m_nodeLabel
  655. build_job.value.steps = element.m_steps
  656. //no any additional nodes just mount the drive, do not handle clean parameters as that will be done by the first node.
  657. boolean onlyMountEBSVolume = idx != 0;
  658. //add this node
  659. CreateSingleNode(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName, onlyMountEBSVolume).call()
  660. }
  661. }
  662. } else {
  663. return CreateSingleNode(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
  664. }
  665. }
  666. def projectName = ''
  667. def pipelineName = ''
  668. def branchName = ''
  669. def pipelineConfig = {}
  670. // Start Pipeline
  671. try {
  672. stage('Setup Pipeline') {
  673. node('controller') {
  674. def envVarList = []
  675. if(isUnix()) {
  676. envVarList.add('IS_UNIX=1')
  677. }
  678. withEnv(envVarList) {
  679. timestamps {
  680. repositoryUrl = scm.getUserRemoteConfigs()[0].getUrl()
  681. // repositoryName is the full repository name
  682. repositoryName = (repositoryUrl =~ /https:\/\/github.com\/(.*)\.git/)[0][1]
  683. env.REPOSITORY_NAME = repositoryName
  684. (projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
  685. env.PIPELINE_NAME = pipelineName
  686. if(env.BRANCH_NAME) {
  687. branchName = env.BRANCH_NAME
  688. } else {
  689. branchName = scm.branches[0].name // for non-multibranch pipelines
  690. env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
  691. }
  692. if(env.CHANGE_TARGET) {
  693. // PR builds
  694. if(BUILD_SNAPSHOTS.contains(env.CHANGE_TARGET)) {
  695. snapshot = env.CHANGE_TARGET
  696. echo "Snapshot for destination branch \"${env.CHANGE_TARGET}\" found."
  697. } else {
  698. snapshot = DEFAULT_BUILD_SNAPSHOT
  699. echo "Snapshot for destination branch \"${env.CHANGE_TARGET}\" does not exist, defaulting to snapshot \"${snapshot}\""
  700. }
  701. } else {
  702. // Non-PR builds
  703. pipelineParameters.add(choice(defaultValue: DEFAULT_BUILD_SNAPSHOT, name: 'SNAPSHOT', choices: BUILD_SNAPSHOTS_WITH_EMPTY, description: 'Selects the build snapshot to use. A more diverted snapshot will cause longer build times, but will not cause build failures.'))
  704. snapshot = env.SNAPSHOT
  705. echo "Snapshot \"${snapshot}\" selected."
  706. }
  707. pipelineProperties.add(disableConcurrentBuilds())
  708. def engineBranch = params.ENGINE_BRANCH ?: "${ENGINE_BRANCH_DEFAULT}" // This allows the first run to work with parameters having null value, but use the engine branch parameter afterwards
  709. echo "Running repository: \"${repositoryName}\", pipeline: \"${pipelineName}\", branch: \"${branchName}\" on engine branch \"${engineBranch}\"..."
  710. CheckoutBootstrapScripts(engineBranch)
  711. // Load configs
  712. pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
  713. // Add each platform as a parameter that the user can disable if needed
  714. if (!IsPullRequest(branchName)) {
  715. pipelineParameters.add(stringParam(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'))
  716. pipelineConfig.platforms.each { platform ->
  717. pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
  718. }
  719. }
  720. // Add additional Jenkins parameters
  721. pipelineConfig.platforms.each { platform ->
  722. platformEnv = platform.value.PIPELINE_ENV
  723. pipelineJenkinsParameters = platformEnv['PIPELINE_JENKINS_PARAMETERS'] ?: [:]
  724. jenkinsParametersToAdd = pipelineJenkinsParameters[pipelineName] ?: [:]
  725. jenkinsParametersToAdd.each{ jenkinsParameter ->
  726. defaultValue = jenkinsParameter['default_value']
  727. // Use last run's value as default value so we can save values in different Jenkins environment
  728. if (jenkinsParameter['use_last_run_value']?.toBoolean()) {
  729. defaultValue = params."${jenkinsParameter['parameter_name']}" ?: jenkinsParameter['default_value']
  730. }
  731. switch (jenkinsParameter['parameter_type']) {
  732. case 'string':
  733. pipelineParameters.add(stringParam(defaultValue: defaultValue,
  734. description: jenkinsParameter['description'],
  735. name: jenkinsParameter['parameter_name']
  736. ))
  737. break
  738. case 'boolean':
  739. pipelineParameters.add(booleanParam(defaultValue: defaultValue,
  740. description: jenkinsParameter['description'],
  741. name: jenkinsParameter['parameter_name']
  742. ))
  743. break
  744. case 'password':
  745. pipelineParameters.add(password(defaultValue: defaultValue,
  746. description: jenkinsParameter['description'],
  747. name: jenkinsParameter['parameter_name']
  748. ))
  749. break
  750. }
  751. }
  752. }
  753. pipelineProperties.add(parameters(pipelineParameters))
  754. properties(pipelineProperties)
  755. // Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
  756. stash name: 'incremental_build_script',
  757. includes: INCREMENTAL_BUILD_SCRIPT_PATH
  758. }
  759. }
  760. }
  761. }
  762. if(env.BUILD_NUMBER == '1' && !IsPullRequest(branchName)) {
  763. // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
  764. // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
  765. currentBuild.result = 'SUCCESS'
  766. return
  767. }
  768. def someBuildHappened = false
  769. // Build and Post-Build Testing Stage
  770. def enginebuildConfigs = [:]
  771. def platformBuildConfigs =[:]
  772. // Platform Builds run on EC2
  773. pipelineConfig.platforms.each { platform ->
  774. platform.value.build_types.each { build_job ->
  775. if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
  776. def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  777. envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
  778. def nodeLabel = envVars['NODE_LABEL']
  779. someBuildHappened = true
  780. enginebuildConfigs["${platform.key} [${build_job.key}]"] = CreateBuildJobs(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
  781. }
  782. }
  783. }
  784. // Project Platform Builds run on EC2
  785. pipelineConfig.platforms.each { platform ->
  786. platform.value.build_types.each { build_job ->
  787. if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
  788. def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  789. envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
  790. def nodeLabel = envVars['NODE_LABEL']
  791. someBuildHappened = true
  792. envVars['CMAKE_LY_PROJECTS'] = "../${PROJECT_REPOSITORY_NAME}"
  793. platformBuildConfigs["${platform.key} [${build_job.key}]"] = CreateBuildJobs(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
  794. }
  795. }
  796. }
  797. timestamps {
  798. stage("${ENGINE_REPOSITORY_NAME} Build") {
  799. catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  800. parallel enginebuildConfigs // Run parallel builds
  801. }
  802. }
  803. stage("${PROJECT_REPOSITORY_NAME} Build") {
  804. catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  805. parallel platformBuildConfigs // Run parallel builds
  806. }
  807. }
  808. echo 'All builds successful'
  809. }
  810. if (!someBuildHappened) {
  811. currentBuild.result = 'NOT_BUILT'
  812. }
  813. }
  814. catch(Exception e) {
  815. error "Exception: ${e}"
  816. }
  817. finally {
  818. try {
  819. node('controller') {
  820. if("${currentBuild.currentResult}" == "SUCCESS") {
  821. buildFailure = ""
  822. emailBody = "${BUILD_URL}\nSuccess!"
  823. } else {
  824. buildFailure = tm('${BUILD_FAILURE_ANALYZER}')
  825. emailBody = "${BUILD_URL}\n${buildFailure}!"
  826. }
  827. if(env.POST_AR_BUILD_SNS_TOPIC) {
  828. message_json = [
  829. "build_url": env.BUILD_URL,
  830. "build_number": env.BUILD_NUMBER,
  831. "repository_name": env.REPOSITORY_NAME,
  832. "branch_name": env.BRANCH_NAME,
  833. "build_result": "${currentBuild.currentResult}",
  834. "build_failure": buildFailure,
  835. "recreate_volume": env.RECREATE_VOLUME,
  836. "clean_output_directory": env.CLEAN_OUTPUT_DIRECTORY,
  837. "clean_assets": env.CLEAN_ASSETS
  838. ]
  839. snsPublish(
  840. topicArn: env.POST_AR_BUILD_SNS_TOPIC,
  841. subject:'Build Result',
  842. message:JsonOutput.toJson(message_json)
  843. )
  844. }
  845. emailext (
  846. body: "${emailBody}",
  847. subject: "${currentBuild.currentResult}: ${JOB_NAME} - Build # ${BUILD_NUMBER}",
  848. recipientProviders: [
  849. [$class: 'RequesterRecipientProvider']
  850. ]
  851. )
  852. }
  853. } catch(Exception e) {
  854. }
  855. }