Jenkinsfile 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618
  1. #!/usr/bin/env groovy
  2. /*
  3. * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
  4. * its licensors.
  5. *
  6. * For complete copyright and license terms please see the LICENSE at the root of this
  7. * distribution (the "License"). All use of this software is governed by the License,
  8. * or, if provided, by the license below or the license accompanying this file. Do not
  9. * remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
  10. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11. *
  12. */
  13. PIPELINE_CONFIG_FILE = 'AutomatedReview/lumberyard.json'
  14. INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
  15. EMPTY_JSON = readJSON text: '{}'
  16. def pipelineProperties = []
  17. def pipelineParameters = [
  18. // Build/clean Parameters
  19. // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
  20. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build', name: 'CLEAN_OUTPUT_DIRECTORY'),
  21. booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
  22. booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
  23. string(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'),
  24. // Pull Request Parameters
  25. string(defaultValue: '', description: '', name: 'DESTINATION_BRANCH'),
  26. string(defaultValue: '', description: '', name: 'DESTINATION_COMMIT'),
  27. string(defaultValue: '', description: '', name: 'PULL_REQUEST_ID'),
  28. string(defaultValue: '', description: '', name: 'REPOSITORY_NAME'),
  29. string(defaultValue: '', description: '', name: 'SOURCE_BRANCH'),
  30. string(defaultValue: '', description: '', name: 'SOURCE_COMMIT')
  31. ]
  32. def palSh(cmd, lbl = '', winSlashReplacement = true) {
  33. if (env.IS_UNIX) {
  34. sh label: lbl,
  35. script: cmd
  36. } else if (winSlashReplacement) {
  37. bat label: lbl,
  38. script: cmd.replace('/','\\')
  39. } else {
  40. bat label: lbl,
  41. script: cmd
  42. }
  43. }
  44. def palMkdir(path) {
  45. if (env.IS_UNIX) {
  46. sh label: "Making directories ${path}",
  47. script: "mkdir -p ${path}"
  48. } else {
  49. def win_path = path.replace('/','\\')
  50. bat label: "Making directories ${win_path}",
  51. script: "mkdir ${win_path}."
  52. }
  53. }
  54. def palRm(path) {
  55. if (env.IS_UNIX) {
  56. sh label: "Removing ${path}",
  57. script: "rm ${path}"
  58. } else {
  59. def win_path = path.replace('/','\\')
  60. bat label: "Removing ${win_path}",
  61. script: "del ${win_path}"
  62. }
  63. }
  64. def palRmDir(path) {
  65. if (env.IS_UNIX) {
  66. sh label: "Removing ${path}",
  67. script: "rm -rf ${path}"
  68. } else {
  69. def win_path = path.replace('/','\\')
  70. bat label: "Removing ${win_path}",
  71. script: "rd /s /q ${win_path}"
  72. }
  73. }
  74. def IsJobEnabled(buildTypeMap, pipelineName, platformName) {
  75. def job_list_override = params.JOB_LIST_OVERRIDE.tokenize(',')
  76. if(params.PULL_REQUEST_ID) { // dont allow pull requests to filter platforms/jobs
  77. if(buildTypeMap.value.TAGS) {
  78. return buildTypeMap.value.TAGS.contains(pipelineName)
  79. }
  80. } else if (!job_list_override.isEmpty()) {
  81. return params[platformName] && job_list_override.contains(buildTypeMap.key);
  82. } else {
  83. if (params[platformName]) {
  84. if(buildTypeMap.value.TAGS) {
  85. return buildTypeMap.value.TAGS.contains(pipelineName)
  86. }
  87. }
  88. }
  89. return false
  90. }
  91. def GetRunningPipelineName(JENKINS_JOB_NAME) {
  92. // If the job name has an underscore
  93. def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
  94. if (job_parts.size() > 1) {
  95. return job_parts[job_parts.size()-1]
  96. }
  97. return 'default'
  98. }
  99. @NonCPS
  100. def RegexMatcher(str, regex) {
  101. def matcher = (str =~ regex)
  102. return matcher ? matcher.group(1) : null
  103. }
  104. def LoadPipelineConfig(String pipelineName, String branchName) {
  105. echo 'Loading pipeline config'
  106. PullFilesFromGit(PIPELINE_CONFIG_FILE, branchName)
  107. def pipelineConfig = {}
  108. pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
  109. palRm(PIPELINE_CONFIG_FILE)
  110. pipelineConfig.platforms = EMPTY_JSON
  111. // Load the pipeline configs per platform
  112. pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
  113. def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
  114. if (!env.IS_UNIX) {
  115. platform_regex = platform_regex.replace('/','\\\\')
  116. }
  117. echo "Downloading platform pipeline configs ${pipeline_config}"
  118. PullFilesFromGit(pipeline_config, branchName)
  119. echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
  120. for (pipeline_config_path in findFiles(glob: pipeline_config)) {
  121. echo "\tFound platform pipeline config ${pipeline_config_path}"
  122. def platform = RegexMatcher(pipeline_config_path, platform_regex)
  123. if(platform) {
  124. pipelineConfig.platforms[platform] = EMPTY_JSON
  125. pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
  126. }
  127. palRm(pipeline_config_path.toString())
  128. }
  129. }
  130. // Load the build configs
  131. pipelineConfig.BUILD_CONFIGS.each { build_config ->
  132. def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
  133. if (!env.IS_UNIX) {
  134. platform_regex = platform_regex.replace('/','\\\\')
  135. }
  136. echo "Downloading configs ${build_config}"
  137. PullFilesFromGit(build_config, branchName)
  138. echo "Searching configs in ${build_config} using ${platform_regex}"
  139. for (build_config_path in findFiles(glob: build_config)) {
  140. echo "\tFound config ${build_config_path}"
  141. def platform = RegexMatcher(build_config_path, platform_regex)
  142. if(platform) {
  143. pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
  144. }
  145. palRm(build_config_path.toString())
  146. }
  147. }
  148. return pipelineConfig
  149. }
  150. def GetPipelineRegion() {
  151. def gitUrl = scm.getUserRemoteConfigs()[0].getUrl()
  152. def gitUrlList = gitUrl.tokenize('.') as String[]
  153. def pipelineRegion = gitUrlList[1]
  154. return pipelineRegion
  155. }
  156. def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
  157. def envVarMap = [:]
  158. platformPipelineEnv = platformEnv['ENV'] ?: [:]
  159. platformPipelineEnv.each { var ->
  160. envVarMap[var.key] = var.value
  161. }
  162. platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
  163. platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
  164. platformPipelineEnvOverride.each { var ->
  165. envVarMap[var.key] = var.value
  166. }
  167. buildTypeEnv.each { var ->
  168. // This may override the above one if there is an entry defined by the job
  169. envVarMap[var.key] = var.value
  170. }
  171. return envVarMap
  172. }
  173. def GetEnvStringList(Map envVarMap) {
  174. def strList = []
  175. envVarMap.each { var ->
  176. strList.add("${var.key}=${var.value}")
  177. }
  178. return strList
  179. }
  180. // Pulls/downloads files from the repo through codecommit. Despite Glob matching is NOT supported, '*' is supported
  181. // as a folder or filename (not a portion, it has to be the whole folder or filename)
  182. def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFound = true, String repositoryName = env.DEFAULT_REPOSITORY_NAME) {
  183. echo "PullFilesFromGit filenamePath=${filenamePath} branchName=${branchName}"
  184. def folderPathParts = filenamePath.tokenize('/')
  185. def filename = folderPathParts[folderPathParts.size()-1]
  186. folderPathParts.remove(folderPathParts.size()-1) // remove the filename
  187. def folderPath = folderPathParts.join('/')
  188. if (folderPath.contains('*')) {
  189. try {
  190. def currentPath = ''
  191. for (int i = 0; i < folderPathParts.size(); i++) {
  192. if (folderPathParts[i] == '*') {
  193. palMkdir(currentPath)
  194. retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${currentPath} > ${currentPath}/.codecommit", "GetFolder ${currentPath}") }
  195. def folderInfo = readJSON file: "${currentPath}/.codecommit"
  196. folderInfo.subFolders.each { folder ->
  197. def newSubPath = currentPath + '/' + folder.relativePath
  198. for (int j = i+1; j < folderPathParts.size(); j++) {
  199. newSubPath = newSubPath + '/' + folderPathParts[j]
  200. }
  201. newSubPath = newSubPath + '/' + filename
  202. PullFilesFromGit(newSubPath, branchName, false, repositoryName)
  203. }
  204. palRm("${currentPath}/.codecommit")
  205. }
  206. if (i == 0) {
  207. currentPath = folderPathParts[i]
  208. } else {
  209. currentPath = currentPath + '/' + folderPathParts[i]
  210. }
  211. }
  212. } catch(Exception e) {
  213. }
  214. } else if (filename.contains('*')) {
  215. try {
  216. palMkdir(folderPath)
  217. retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${folderPath} > ${folderPath}/.codecommit", "GetFolder ${folderPath}") }
  218. def folderInfo = readJSON file: "${folderPath}/.codecommit"
  219. folderInfo.files.each { file ->
  220. PullFilesFromGit("${folderPath}/${filename}", branchName, false, repositoryName)
  221. }
  222. palRm("${folderPath}/.codecommit")
  223. } catch(Exception e) {
  224. }
  225. } else {
  226. def errorFile = "${folderPath}/error.txt"
  227. palMkdir(folderPath)
  228. retry(3) {
  229. try {
  230. if(env.IS_UNIX) {
  231. sh label: "Downloading ${filenamePath}",
  232. script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${filenamePath}_encoded"
  233. sh label: 'Decoding',
  234. script: "base64 --decode ${filenamePath}_encoded > ${filenamePath}"
  235. } else {
  236. errorFile = errorFile.replace('/','\\')
  237. win_filenamePath = filenamePath.replace('/', '\\')
  238. bat label: "Downloading ${win_filenamePath}",
  239. script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded"
  240. bat label: 'Decoding',
  241. script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}"
  242. }
  243. palRm("${filenamePath}_encoded")
  244. } catch (Exception ex) {
  245. def error = ''
  246. if(fileExists(errorFile)) {
  247. error = readFile errorFile
  248. }
  249. if (!error || !(!failIfNotFound && error.contains('FileDoesNotExistException'))) {
  250. palRm("${errorFile} ${filenamePath}.encoded ${filenamePath}")
  251. throw new Exception("Could not get file: ${filenamePath}, ex: ${ex}, stderr: ${error}")
  252. }
  253. }
  254. palRm(errorFile)
  255. }
  256. }
  257. }
  258. def CheckoutRepo(boolean disableSubmodules = false) {
  259. if(fileExists('.git')) {
  260. // If the repository after checkout is locked, likely we took a snapshot while git was running,
  261. // to leave the repo in a usable state, garbagecollect. This also helps in situations where
  262. def indexLockFile = '.git/index.lock'
  263. if(fileExists(indexLockFile)) {
  264. palSh('git gc', 'Git GarbageCollect')
  265. }
  266. if(fileExists(indexLockFile)) { // if it is still there, remove it
  267. palRm(indexLockFile)
  268. }
  269. palSh('git remote prune origin', 'Git reset')
  270. }
  271. def random = new Random()
  272. def retryAttempt = 0
  273. retry(5) {
  274. if (retryAttempt > 0) {
  275. sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
  276. }
  277. retryAttempt = retryAttempt + 1
  278. if(params.PULL_REQUEST_ID) {
  279. // This is a pull request build. Perform merge with destination branch before building.
  280. checkout scm: [
  281. $class: 'GitSCM',
  282. branches: scm.branches,
  283. extensions: [
  284. [$class: 'PreBuildMerge', options: [mergeRemote: 'origin', mergeTarget: params.DESTINATION_BRANCH]],
  285. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  286. [$class: 'CheckoutOption', timeout: 60]
  287. ],
  288. userRemoteConfigs: scm.userRemoteConfigs
  289. ]
  290. } else {
  291. checkout scm: [
  292. $class: 'GitSCM',
  293. branches: scm.branches,
  294. extensions: [
  295. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  296. [$class: 'CheckoutOption', timeout: 60]
  297. ],
  298. userRemoteConfigs: scm.userRemoteConfigs
  299. ]
  300. }
  301. }
  302. // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
  303. palSh('git rev-parse HEAD > commitid', 'Getting commit id')
  304. env.CHANGE_ID = readFile file: 'commitid'
  305. env.CHANGE_ID = env.CHANGE_ID.trim()
  306. palRm('commitid')
  307. }
  308. def PreBuildCommonSteps(String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
  309. echo 'Starting pre-build common steps...'
  310. if (mount) {
  311. unstash name: 'incremental_build_script'
  312. def pythonCmd = ''
  313. if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
  314. else pythonCmd = 'python -u '
  315. if(params.RECREATE_VOLUME) {
  316. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume')
  317. }
  318. timeout(5) {
  319. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume')
  320. }
  321. if(env.IS_UNIX) {
  322. sh label: 'Setting volume\'s ownership',
  323. script: """
  324. if sudo test ! -d "${workspace}"; then
  325. sudo mkdir -p ${workspace}
  326. cd ${workspace}/..
  327. sudo chown -R lybuilder:root .
  328. fi
  329. """
  330. }
  331. }
  332. if(params.CLEAN_WORKSPACE) {
  333. if(fileExists(workspace)) {
  334. palRmDir(workspace)
  335. }
  336. }
  337. dir(workspace) {
  338. CheckoutRepo(disableSubmodules)
  339. // Get python
  340. if(env.IS_UNIX) {
  341. sh label: 'Getting python',
  342. script: 'python/get_python.sh'
  343. } else {
  344. bat label: 'Getting python',
  345. script: 'python/get_python.bat'
  346. }
  347. }
  348. }
  349. def Build(Map options, String platform, String type, String workspace) {
  350. def command = "${options.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
  351. dir(workspace) {
  352. if (env.IS_UNIX) {
  353. sh label: "Running ${platform} ${type}",
  354. script: "${options.PYTHON_DIR}/python.sh -u ${command}"
  355. } else {
  356. bat label: "Running ${platform} ${type}",
  357. script: "${options.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
  358. }
  359. }
  360. }
  361. def TestMetrics(Map options, Map buildType, String workspace, String branchName, String repoName) {
  362. catchError(buildResult: null, stageResult: null) {
  363. def cmakeBuildDir = [workspace, buildType.value.PARAMETERS.OUTPUT_DIRECTORY].join('/')
  364. def command = "${options.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py -e jenkins.creds.user ${username} -e jenkins.creds.pass ${apitoken} ${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${buildType.value.PARAMETERS.CONFIGURATION} ${repoName} "
  365. if (params.DESTINATION_BRANCH)
  366. command += '--destination-branch "$DESTINATION_BRANCH" '
  367. dir(workspace) {
  368. checkout scm: [
  369. $class: 'GitSCM',
  370. extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']],
  371. userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars']]
  372. ]
  373. withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
  374. bat label: "Publishing ${buildType.key} Test Metrics",
  375. script: command
  376. }
  377. }
  378. }
  379. }
  380. def PostBuildCommonSteps(String workspace, boolean mount = true) {
  381. echo 'Starting post-build common steps...'
  382. if(params.PULL_REQUEST_ID) {
  383. dir(workspace) {
  384. if(fileExists('.git')) {
  385. palSh('git reset --hard HEAD', 'Discard PR merge, git reset')
  386. }
  387. }
  388. }
  389. if (mount) {
  390. def pythonCmd = ''
  391. if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
  392. else pythonCmd = 'python -u '
  393. try {
  394. timeout(5) {
  395. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
  396. }
  397. } catch (Exception e) {
  398. echo "Unmount script error ${e}"
  399. }
  400. }
  401. }
  402. def CreateSetupStage(String pipelineName, String branchName, String platformName, String jobName, Map environmentVars) {
  403. return {
  404. stage("Setup") {
  405. PreBuildCommonSteps(pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
  406. }
  407. }
  408. }
  409. def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
  410. return {
  411. stage("${jobName}") {
  412. Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
  413. }
  414. }
  415. }
  416. def CreateTestMetricsStage(Map pipelineConfig, Map buildJob, String branchName, Map environmentVars) {
  417. return {
  418. stage("${buildJob.key}") {
  419. TestMetrics(pipelineConfig, buildJob, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME)
  420. }
  421. }
  422. }
  423. def CreateTeardownStage(Map environmentVars) {
  424. return {
  425. stage("Teardown") {
  426. PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
  427. }
  428. }
  429. }
  430. def pipelineName = ''
  431. def pipelineRegion = ''
  432. def branchName = ''
  433. def pipelineConfig = {}
  434. // Start Pipeline
  435. try {
  436. stage('Setup Pipeline') {
  437. node('controller') {
  438. def envVarList = []
  439. if(isUnix()) {
  440. envVarList.add('IS_UNIX=1')
  441. }
  442. withEnv(envVarList) {
  443. timestamps {
  444. pipelineName = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
  445. pipelineRegion = GetPipelineRegion()
  446. if(env.BRANCH_NAME) {
  447. branchName = env.BRANCH_NAME
  448. } else {
  449. branchName = scm.branches[0].name // for non-multibranch pipelines
  450. env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
  451. }
  452. pipelineProperties.add(disableConcurrentBuilds())
  453. echo "Running \"${pipelineName}\" for \"${branchName}\", region: \"${pipelineRegion}\"..."
  454. // Load configs
  455. pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
  456. // Add each platform as a parameter that the user can disable if needed
  457. pipelineConfig.platforms.each { platform ->
  458. pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
  459. }
  460. pipelineProperties.add(parameters(pipelineParameters))
  461. properties(pipelineProperties)
  462. // Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
  463. PullFilesFromGit(INCREMENTAL_BUILD_SCRIPT_PATH, branchName)
  464. stash name: 'incremental_build_script',
  465. includes: INCREMENTAL_BUILD_SCRIPT_PATH
  466. }
  467. }
  468. }
  469. }
  470. if(env.BUILD_NUMBER == '1') {
  471. // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
  472. // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
  473. currentBuild.result = 'SUCCESS'
  474. return
  475. }
  476. // Build and Post-Build Testing Stage
  477. def buildConfigs = [:]
  478. // Platform Builds run on EC2
  479. pipelineConfig.platforms.each { platform ->
  480. platform.value.build_types.each { build_job ->
  481. if (IsJobEnabled(build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
  482. def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
  483. envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
  484. def nodeLabel = envVars['NODE_LABEL']
  485. buildConfigs["${platform.key} [${build_job.key}]"] = {
  486. node("${nodeLabel}-${pipelineRegion}") {
  487. if(isUnix()) { // Has to happen inside a node
  488. envVars['IS_UNIX'] = 1
  489. }
  490. withEnv(GetEnvStringList(envVars)) {
  491. timeout(time: envVars['TIMEOUT'], unit: 'MINUTES', activity: true) {
  492. try {
  493. CreateSetupStage(pipelineName, branchName, platform.key, build_job.key, envVars).call()
  494. if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
  495. build_job.value.steps.each { build_step ->
  496. CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
  497. }
  498. } else {
  499. CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
  500. }
  501. if (env.MARS_REPO && platform.key == 'Windows' && build_job.key.startsWith('test')) {
  502. CreateTestMetricsStage(pipelineConfig, build_job, branchName, envVars).call()
  503. }
  504. }
  505. catch(Exception e) {
  506. // https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
  507. // {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
  508. def currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
  509. if (currentResult == 'FAILURE') {
  510. currentBuild.result = 'FAILURE'
  511. error "FAILURE: ${e}"
  512. } else if (currentResult == 'UNSTABLE') {
  513. currentBuild.result = 'UNSTABLE'
  514. unstable(message: "UNSTABLE: ${e}")
  515. }
  516. }
  517. finally {
  518. CreateTeardownStage(envVars).call()
  519. }
  520. }
  521. }
  522. }
  523. }
  524. }
  525. }
  526. }
  527. timestamps {
  528. stage('Build') {
  529. parallel buildConfigs // Run parallel builds
  530. }
  531. echo 'All builds successful'
  532. }
  533. }
  534. catch(Exception e) {
  535. error "Exception: ${e}"
  536. }
  537. finally {
  538. try {
  539. if(env.SNS_TOPIC) {
  540. snsPublish(
  541. topicArn: env.SNS_TOPIC,
  542. subject:'Build Result',
  543. message:"${currentBuild.currentResult}:${params.REPOSITORY_NAME}:${params.SOURCE_BRANCH}:${params.SOURCE_COMMIT}:${params.DESTINATION_COMMIT}:${params.PULL_REQUEST_ID}:${BUILD_URL}:${params.RECREATE_VOLUME}:${params.CLEAN_OUTPUT_DIRECTORY}"
  544. )
  545. }
  546. step([
  547. $class: 'Mailer',
  548. notifyEveryUnstableBuild: true,
  549. sendToIndividuals: true,
  550. recipients: emailextrecipients([
  551. [$class: 'CulpritsRecipientProvider'],
  552. [$class: 'RequesterRecipientProvider']
  553. ])
  554. ])
  555. } catch(Exception e) {
  556. }
  557. }