Jenkinsfile 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604
  1. #!/usr/bin/env groovy
  2. /*
  3. * Copyright (c) Contributors to the Open 3D Engine Project.
  4. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  5. *
  6. * SPDX-License-Identifier: Apache-2.0 OR MIT
  7. *
  8. */
  9. PIPELINE_CONFIG_FILE = 'scripts/build/Jenkins/lumberyard.json'
  10. INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
  11. EMPTY_JSON = readJSON text: '{}'
  12. PROJECT_REPOSITORY_NAME = 'o3de-netsoaktest'
  13. PROJECT_ORGANIZATION_NAME = 'o3de'
  14. ENGINE_REPOSITORY_NAME = 'o3de'
  15. ENGINE_ORGANIZATION_NAME = 'o3de'
  16. ENGINE_BRANCH_DEFAULT = "${env.BRANCH_DEFAULT}" ?: "${env.BRANCH_NAME}"
  17. def pipelineProperties = []
  18. def pipelineParameters = [
  19. // Build/clean Parameters
  20. // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
  21. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
  22. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
  23. booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
  24. booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
  25. stringParam(defaultValue: "${ENGINE_BRANCH_DEFAULT}", description: 'Sets a different branch from o3de engine repo to use or use commit id. Default is branchname', trim: true, name: 'ENGINE_BRANCH')
  26. ]
  27. def PlatformSh(cmd, lbl = '', winSlashReplacement = true) {
  28. if (env.IS_UNIX) {
  29. sh label: lbl,
  30. script: cmd
  31. } else if (winSlashReplacement) {
  32. bat label: lbl,
  33. script: cmd.replace('/','\\')
  34. } else {
  35. bat label: lbl,
  36. script: cmd
  37. }
  38. }
  39. def PlatformMkdir(path) {
  40. if (env.IS_UNIX) {
  41. sh label: "Making directories ${path}",
  42. script: "mkdir -p ${path}"
  43. } else {
  44. def win_path = path.replace('/','\\')
  45. bat label: "Making directories ${win_path}",
  46. script: "mkdir ${win_path}."
  47. }
  48. }
  49. def PlatformRm(path) {
  50. if (env.IS_UNIX) {
  51. sh label: "Removing ${path}",
  52. script: "rm ${path}"
  53. } else {
  54. def win_path = path.replace('/','\\')
  55. bat label: "Removing ${win_path}",
  56. script: "del /Q ${win_path}"
  57. }
  58. }
  59. def PlatformRmDir(path) {
  60. if (env.IS_UNIX) {
  61. sh label: "Removing ${path}",
  62. script: "rm -rf ${path}"
  63. } else {
  64. def win_path = path.replace('/','\\')
  65. bat label: "Removing ${win_path}",
  66. script: "rd /s /q ${win_path}"
  67. }
  68. }
  69. def IsPullRequest(branchName) {
  70. // temporarily using the name to detect if we are in a PR
  71. // In the future we will check with github
  72. return branchName.startsWith('PR-')
  73. }
  74. def IsJobEnabled(branchName, buildTypeMap, pipelineName, platformName) {
  75. if (IsPullRequest(branchName)) {
  76. return buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
  77. }
  78. def job_list_override = params.JOB_LIST_OVERRIDE ? params.JOB_LIST_OVERRIDE.tokenize(',') : ''
  79. if (!job_list_override.isEmpty()) {
  80. return params[platformName] && job_list_override.contains(buildTypeMap.key);
  81. } else {
  82. return params[platformName] && buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
  83. }
  84. }
  85. def GetRunningPipelineName(JENKINS_JOB_NAME) {
  86. // If the job name has an underscore
  87. def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
  88. if (job_parts.size() > 1) {
  89. return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
  90. }
  91. return [job_parts[0], 'default']
  92. }
  93. @NonCPS
  94. def RegexMatcher(str, regex) {
  95. def matcher = (str =~ regex)
  96. return matcher ? matcher.group(1) : null
  97. }
  98. def LoadPipelineConfig(String pipelineName, String branchName) {
  99. echo 'Loading pipeline config'
  100. def pipelineConfig = {}
  101. pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
  102. PlatformRm(PIPELINE_CONFIG_FILE)
  103. pipelineConfig.platforms = EMPTY_JSON
  104. // Load the pipeline configs per platform
  105. pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
  106. def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
  107. if (!env.IS_UNIX) {
  108. platform_regex = platform_regex.replace('/','\\\\')
  109. }
  110. echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
  111. for (pipeline_config_path in findFiles(glob: pipeline_config)) {
  112. echo "\tFound platform pipeline config ${pipeline_config_path}"
  113. def platform = RegexMatcher(pipeline_config_path, platform_regex)
  114. if (platform) {
  115. pipelineConfig.platforms[platform] = EMPTY_JSON
  116. pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
  117. }
  118. PlatformRm(pipeline_config_path.toString())
  119. }
  120. }
  121. // Load the build configs
  122. pipelineConfig.BUILD_CONFIGS.each { build_config ->
  123. def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
  124. if (!env.IS_UNIX) {
  125. platform_regex = platform_regex.replace('/','\\\\')
  126. }
  127. echo "Searching configs in ${build_config} using ${platform_regex}"
  128. for (build_config_path in findFiles(glob: build_config)) {
  129. echo "\tFound config ${build_config_path}"
  130. def platform = RegexMatcher(build_config_path, platform_regex)
  131. if (platform) {
  132. pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
  133. }
  134. }
  135. }
  136. return pipelineConfig
  137. }
  138. def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
  139. def envVarMap = [:]
  140. platformPipelineEnv = platformEnv['ENV'] ?: [:]
  141. platformPipelineEnv.each { var ->
  142. envVarMap[var.key] = var.value
  143. }
  144. platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
  145. platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
  146. platformPipelineEnvOverride.each { var ->
  147. envVarMap[var.key] = var.value
  148. }
  149. buildTypeEnv.each { var ->
  150. // This may override the above one if there is an entry defined by the job
  151. envVarMap[var.key] = var.value
  152. }
  153. // Environment that only applies to to Jenkins tweaks.
  154. // For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
  155. // instances. This allow us to scale up and down without having to re-download 3rdParty
  156. envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
  157. envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
  158. return envVarMap
  159. }
  160. def GetEnvStringList(Map envVarMap) {
  161. def strList = []
  162. envVarMap.each { var ->
  163. strList.add("${var.key}=${var.value}")
  164. }
  165. return strList
  166. }
  167. def GetEngineRemoteConfig(remoteConfigs) {
  168. def engineRemoteConfigs = [name: "${ENGINE_REPOSITORY_NAME}",
  169. url: remoteConfigs.url[0]
  170. .replace("${PROJECT_REPOSITORY_NAME}", "${ENGINE_REPOSITORY_NAME}")
  171. .replace("/${PROJECT_ORGANIZATION_NAME}/", "/${ENGINE_ORGANIZATION_NAME}/"),
  172. credentialsId: remoteConfigs.credentialsId[0]
  173. ]
  174. return engineRemoteConfigs
  175. }
  176. def CheckoutBootstrapScripts(String branchName) {
  177. checkout([$class: 'GitSCM',
  178. branches: [[name: "*/${branchName}"]],
  179. doGenerateSubmoduleConfigurations: false,
  180. extensions: [
  181. [$class: 'PruneStaleBranch'],
  182. [$class: 'AuthorInChangelog'],
  183. [$class: 'SparseCheckoutPaths', sparseCheckoutPaths: [
  184. [ $class: 'SparseCheckoutPath', path: 'scripts/build/Jenkins/' ],
  185. [ $class: 'SparseCheckoutPath', path: 'scripts/build/bootstrap/' ],
  186. [ $class: 'SparseCheckoutPath', path: 'scripts/build/Platform' ]
  187. ]],
  188. // Shallow checkouts break changelog computation. Do not enable.
  189. [$class: 'CloneOption', noTags: false, reference: '', shallow: false]
  190. ],
  191. submoduleCfg: [],
  192. userRemoteConfigs: [GetEngineRemoteConfig(scm.userRemoteConfigs)]
  193. ])
  194. }
  195. def CheckoutRepo(boolean disableSubmodules = false) {
  196. def projectsAndUrl = [
  197. "${ENGINE_REPOSITORY_NAME}": GetEngineRemoteConfig(scm.userRemoteConfigs),
  198. "${PROJECT_REPOSITORY_NAME}": scm.userRemoteConfigs[0]
  199. ]
  200. projectsAndUrl.each { projectAndUrl ->
  201. if (!fileExists(projectAndUrl.key)) {
  202. PlatformMkdir(projectAndUrl.key)
  203. }
  204. dir(projectAndUrl.key) {
  205. if (fileExists('.git')) {
  206. // If the repository after checkout is locked, likely we took a snapshot while git was running,
  207. // to leave the repo in a usable state, garbage collect.
  208. def indexLockFile = '.git/index.lock'
  209. if (fileExists(indexLockFile)) {
  210. PlatformSh('git gc', 'Git GarbageCollect')
  211. }
  212. if (fileExists(indexLockFile)) { // if it is still there, remove it
  213. PlatformRm(indexLockFile)
  214. }
  215. }
  216. }
  217. }
  218. def random = new Random()
  219. def retryAttempt = 0
  220. retry(5) {
  221. if (retryAttempt > 0) {
  222. sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from Github
  223. }
  224. retryAttempt = retryAttempt + 1
  225. projectsAndUrl.each { projectAndUrl ->
  226. dir(projectAndUrl.key) {
  227. def branchName = scm.branches
  228. if (projectAndUrl.key == "${ENGINE_REPOSITORY_NAME}") {
  229. branchName = [[name: params.ENGINE_BRANCH]]
  230. }
  231. checkout scm: [
  232. $class: 'GitSCM',
  233. branches: branchName,
  234. extensions: [
  235. [$class: 'PruneStaleBranch'],
  236. [$class: 'AuthorInChangelog'],
  237. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  238. [$class: 'CheckoutOption', timeout: 60]
  239. ],
  240. userRemoteConfigs: [projectAndUrl.value]
  241. ]
  242. }
  243. }
  244. }
  245. // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
  246. dir(PROJECT_REPOSITORY_NAME) {
  247. PlatformSh('git rev-parse HEAD > commitid', 'Getting commit id')
  248. env.CHANGE_ID = readFile file: 'commitid'
  249. env.CHANGE_ID = env.CHANGE_ID.trim()
  250. PlatformRm('commitid')
  251. // CHANGE_DATE is used by the installer to provide some ability to sort tagged builds in addition to BRANCH_NAME and CHANGE_ID
  252. commitDateFmt = '%%cI'
  253. if (env.IS_UNIX) commitDateFmt = '%cI'
  254. PlatformSh("git show -s --format=${commitDateFmt} ${env.CHANGE_ID} > commitdate", 'Getting commit date')
  255. env.CHANGE_DATE = readFile file: 'commitdate'
  256. env.CHANGE_DATE = env.CHANGE_DATE.trim()
  257. PlatformRm('commitdate')
  258. }
  259. }
  260. def PreBuildCommonSteps(Map pipelineConfig, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
  261. echo 'Starting pre-build common steps...'
  262. if (mount) {
  263. unstash name: 'incremental_build_script'
  264. def pythonCmd = ''
  265. if (env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
  266. else pythonCmd = 'python3 -u '
  267. if (env.RECREATE_VOLUME?.toBoolean()) {
  268. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
  269. }
  270. timeout(5) {
  271. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume', winSlashReplacement=false)
  272. }
  273. if (env.IS_UNIX) {
  274. sh label: 'Setting volume\'s ownership',
  275. script: """
  276. if sudo test ! -d "${workspace}"; then
  277. sudo mkdir -p ${workspace}
  278. cd ${workspace}/..
  279. sudo chown -R lybuilder:root .
  280. fi
  281. """
  282. }
  283. }
  284. // Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
  285. // we need to cleanup. Once all branches take this relocation, we can remove this
  286. if (env.CLEAN_WORKSPACE?.toBoolean() || fileExists("${workspace}/.git")) {
  287. if (fileExists(workspace)) {
  288. PlatformRmDir(workspace)
  289. }
  290. }
  291. dir(workspace) {
  292. // Add folder where we will store the 3rdParty downloads and packages
  293. if (!fileExists('3rdParty')) {
  294. PlatformMkdir('3rdParty')
  295. }
  296. CheckoutRepo(disableSubmodules)
  297. }
  298. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  299. // Get python
  300. if (env.IS_UNIX) {
  301. sh label: 'Getting python',
  302. script: 'python/get_python.sh'
  303. } else {
  304. bat label: 'Getting python',
  305. script: 'python/get_python.bat'
  306. }
  307. // Always run the clean step, the scripts detect what variables were set, but it also cleans if
  308. // the NODE_LABEL has changed
  309. def command = "${pipelineConfig.PYTHON_DIR}/python"
  310. if (env.IS_UNIX) command += '.sh'
  311. else command += '.cmd'
  312. command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
  313. PlatformSh(command, "Running ${platform} clean")
  314. }
  315. }
  316. def Build(Map pipelineConfig, String platform, String type, String workspace) {
  317. // If EXECUTE_FROM_PROJECT is defined, we execute the script from the project instead of from the engine
  318. // In both cases, the scripts are in the engine, is just what the current dir is and how we get to the scripts
  319. def currentDir = "${workspace}/${ENGINE_REPOSITORY_NAME}"
  320. def pathToEngine = ""
  321. timeout(time: env.TIMEOUT, unit: 'MINUTES', activity: true) {
  322. def command = "${pipelineConfig.PYTHON_DIR}/python"
  323. def ext = ''
  324. if (env.IS_UNIX) {
  325. command += '.sh'
  326. ext = '.sh'
  327. }
  328. else command += '.cmd'
  329. // Setup environment for project execution, otherwise, register the project
  330. if (env.EXECUTE_FROM_PROJECT?.toBoolean()) {
  331. currentDir = "${workspace}/${PROJECT_REPOSITORY_NAME}"
  332. pathToEngine = "../${ENGINE_REPOSITORY_NAME}/"
  333. } else {
  334. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  335. PlatformSh("scripts/o3de${ext} register --project-path ${workspace}/${PROJECT_REPOSITORY_NAME}", "Registering project ${PROJECT_REPOSITORY_NAME}")
  336. }
  337. }
  338. command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
  339. dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
  340. PlatformSh(command, "Running ${platform} ${type}")
  341. }
  342. }
  343. }
  344. def ExportTestResults(Map options, String platform, String type, String workspace, Map params) {
  345. catchError(message: "Error exporting tests results (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
  346. def o3deroot = "${workspace}/${ENGINE_REPOSITORY_NAME}"
  347. dir("${o3deroot}/${params.OUTPUT_DIRECTORY}") {
  348. junit testResults: "Testing/**/*.xml"
  349. PlatformRmDir("Testing")
  350. // Recreate test runner xml directories that need to be pre generated
  351. PlatformMkdir("Testing/Pytest")
  352. PlatformMkdir("Testing/Gtest")
  353. }
  354. }
  355. }
  356. def PostBuildCommonSteps(String workspace, boolean mount = true) {
  357. echo 'Starting post-build common steps...'
  358. if (mount) {
  359. def pythonCmd = ''
  360. if (env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
  361. else pythonCmd = 'python3 -u '
  362. try {
  363. timeout(5) {
  364. PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
  365. }
  366. } catch (Exception e) {
  367. echo "Unmount script error ${e}"
  368. }
  369. }
  370. }
  371. def CreateSetupStage(Map pipelineConfig, String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars) {
  372. return {
  373. stage('Setup') {
  374. PreBuildCommonSteps(pipelineConfig, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
  375. }
  376. }
  377. }
  378. def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
  379. return {
  380. stage("${jobName}") {
  381. Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
  382. }
  383. }
  384. }
  385. def CreateExportTestResultsStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars, Map params) {
  386. return {
  387. stage("${jobName}_results") {
  388. ExportTestResults(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'], params)
  389. }
  390. }
  391. }
  392. def CreateTeardownStage(Map environmentVars) {
  393. return {
  394. stage('Teardown') {
  395. PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
  396. }
  397. }
  398. }
  399. def projectName = ''
  400. def pipelineName = ''
  401. def branchName = ''
  402. def pipelineConfig = {}
  403. // Start Pipeline
  404. try {
  405. stage('Setup Pipeline') {
  406. node('controller') {
  407. def envVarList = []
  408. if (isUnix()) {
  409. envVarList.add('IS_UNIX=1')
  410. }
  411. withEnv(envVarList) {
  412. timestamps {
  413. repositoryUrl = scm.getUserRemoteConfigs()[0].getUrl()
  414. // repositoryName is the full repository name
  415. repositoryName = (repositoryUrl =~ /https:\/\/github.com\/(.*)\.git/)[0][1]
  416. (projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
  417. if (env.BRANCH_NAME) {
  418. branchName = env.BRANCH_NAME
  419. } else {
  420. branchName = scm.branches[0].name // for non-multibranch pipelines
  421. env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
  422. }
  423. pipelineProperties.add(disableConcurrentBuilds())
  424. def engineBranch = params.ENGINE_BRANCH ?: "${ENGINE_BRANCH_DEFAULT}" // This allows the first run to work with parameters having null value, but use the engine branch parameter afterwards
  425. echo "Running repository: \"${repositoryName}\", pipeline: \"${pipelineName}\", branch: \"${branchName}\" on engine branch \"${engineBranch}\"..."
  426. CheckoutBootstrapScripts(engineBranch)
  427. // Load configs
  428. pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
  429. // Add each platform as a parameter that the user can disable if needed
  430. if (!IsPullRequest(branchName)) {
  431. pipelineParameters.add(stringParam(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'))
  432. pipelineConfig.platforms.each { platform ->
  433. pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
  434. }
  435. }
  436. pipelineProperties.add(parameters(pipelineParameters))
  437. properties(pipelineProperties)
  438. // Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
  439. stash name: 'incremental_build_script',
  440. includes: INCREMENTAL_BUILD_SCRIPT_PATH
  441. }
  442. }
  443. }
  444. }
  445. if (env.BUILD_NUMBER == '1' && !IsPullRequest(branchName)) {
  446. // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
  447. // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
  448. currentBuild.result = 'SUCCESS'
  449. return
  450. }
  451. def someBuildHappened = false
  452. // Build and Post-Build Testing Stage
  453. def buildConfigs = [:]
  454. // Platform Builds run on EC2
  455. pipelineConfig.platforms.each { platform ->
  456. platform.value.build_types.each { build_job ->
  457. if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
  458. def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  459. envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
  460. envVars['CMAKE_LY_PROJECTS'] = "../${PROJECT_REPOSITORY_NAME}"
  461. def nodeLabel = envVars['NODE_LABEL']
  462. someBuildHappened = true
  463. buildConfigs["${platform.key} [${build_job.key}]"] = {
  464. node("${nodeLabel}") {
  465. if (isUnix()) { // Has to happen inside a node
  466. envVars['IS_UNIX'] = 1
  467. }
  468. withEnv(GetEnvStringList(envVars)) {
  469. def build_job_name = build_job.key
  470. try {
  471. CreateSetupStage(pipelineConfig, repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call()
  472. if (build_job.value.steps) { //this is a pipe with many steps so create all the build stages
  473. build_job.value.steps.each { build_step ->
  474. build_job_name = build_step
  475. CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
  476. }
  477. } else {
  478. CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
  479. }
  480. }
  481. catch(Exception e) {
  482. // https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
  483. // {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
  484. def currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
  485. if (currentResult == 'FAILURE') {
  486. currentBuild.result = 'FAILURE'
  487. error "FAILURE: ${e}"
  488. } else if (currentResult == 'UNSTABLE') {
  489. currentBuild.result = 'UNSTABLE'
  490. unstable(message: "UNSTABLE: ${e}")
  491. }
  492. }
  493. finally {
  494. def params = platform.value.build_types[build_job_name].PARAMETERS
  495. if (params && params.containsKey('TEST_RESULTS') && params.TEST_RESULTS == 'True') {
  496. CreateExportTestResultsStage(pipelineConfig, platform.key, build_job_name, envVars, params).call()
  497. }
  498. CreateTeardownStage(envVars).call()
  499. }
  500. }
  501. }
  502. }
  503. }
  504. }
  505. }
  506. timestamps {
  507. stage('Build') {
  508. parallel buildConfigs // Run parallel builds
  509. }
  510. echo 'All builds successful'
  511. }
  512. if (!someBuildHappened) {
  513. currentBuild.result = 'NOT_BUILT'
  514. }
  515. }
  516. catch(Exception e) {
  517. error "Exception: ${e}"
  518. }
  519. finally {
  520. try {
  521. if (env.SNS_TOPIC) {
  522. snsPublish(
  523. topicArn: env.SNS_TOPIC,
  524. subject:'Build Result',
  525. message:"${currentBuild.currentResult}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
  526. )
  527. }
  528. node('controller') {
  529. step([
  530. $class: 'Mailer',
  531. notifyEveryUnstableBuild: true,
  532. recipients: emailextrecipients([
  533. [$class: 'RequesterRecipientProvider']
  534. ])
  535. ])
  536. }
  537. } catch(Exception e) {
  538. }
  539. }