Jenkinsfile 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530
  1. #!/usr/bin/env groovy
  2. /*
  3. * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
  4. * its licensors.
  5. *
  6. * For complete copyright and license terms please see the LICENSE at the root of this
  7. * distribution (the "License"). All use of this software is governed by the License,
  8. * or, if provided, by the license below or the license accompanying this file. Do not
  9. * remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
  10. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11. *
  12. */
  13. PIPELINE_CONFIG_FILE = 'AutomatedReview/lumberyard.json'
  14. INCREMENTAL_BUILD_SCRIPT_PATH = "scripts/build/bootstrap/incremental_build_util.py"
  15. def pipelineProperties = []
  16. def pipelineParameters = [
  17. // Build/clean Parameters
  18. // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
  19. booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build', name: 'CLEAN_OUTPUT_DIRECTORY'),
  20. booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
  21. booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
  22. string(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: "JOB_LIST_OVERRIDE"),
  23. // Pull Request Parameters
  24. string(defaultValue: '', description: '', name: 'DESTINATION_BRANCH'),
  25. string(defaultValue: '', description: '', name: 'DESTINATION_COMMIT'),
  26. string(defaultValue: '', description: '', name: 'PULL_REQUEST_ID'),
  27. string(defaultValue: '', description: '', name: 'REPOSITORY_NAME'),
  28. string(defaultValue: '', description: '', name: 'SOURCE_BRANCH'),
  29. string(defaultValue: '', description: '', name: 'SOURCE_COMMIT')
  30. ]
  31. def palSh(cmd, lbl = '') {
  32. if (env.IS_UNIX) {
  33. sh label: lbl,
  34. script: cmd
  35. } else {
  36. bat label: lbl,
  37. script: cmd.replace('/','\\')
  38. }
  39. }
  40. def palMkdir(path) {
  41. if (env.IS_UNIX) {
  42. sh label: "Making directories ${path}",
  43. script: "mkdir -p ${path}"
  44. } else {
  45. def win_path = path.replace('/','\\')
  46. bat label: "Making directories ${win_path}",
  47. script: "mkdir ${win_path}."
  48. }
  49. }
  50. def palRm(path) {
  51. if (env.IS_UNIX) {
  52. sh label: "Removing ${path}",
  53. script: "rm ${path}"
  54. } else {
  55. def win_path = path.replace('/','\\')
  56. bat label: "Removing ${win_path}",
  57. script: "del ${win_path}"
  58. }
  59. }
  60. def palRmDir(path) {
  61. if (env.IS_UNIX) {
  62. sh label: "Removing ${path}",
  63. script: "rm -rf ${path}"
  64. } else {
  65. def win_path = path.replace('/','\\')
  66. bat label: "Removing ${win_path}",
  67. script: "rd /s /q ${win_path}"
  68. }
  69. }
  70. def GetJobListOverride() {
  71. if(params.PULL_REQUEST_ID) {
  72. return ''
  73. } else {
  74. return params.JOB_LIST_OVERRIDE
  75. }
  76. }
  77. def GetRunningPipelineName(JENKINS_JOB_NAME) {
  78. // If the job name has an underscore
  79. def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
  80. if (job_parts.size() > 1) {
  81. return job_parts[job_parts.size()-1]
  82. }
  83. return "default"
  84. }
  85. def LoadPipelineConfig(String pipelineConfigFile, String branchName) {
  86. echo 'Loading pipeline config'
  87. PullFilesFromGit(pipelineConfigFile, branchName)
  88. def pipelineConfig = readJSON file: "${pipelineConfigFile}"
  89. // Load the pipeline configs per platform
  90. pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
  91. def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
  92. if (!env.IS_UNIX) {
  93. platform_regex = platform_regex.replace('/','\\\\')
  94. }
  95. echo "Downloading platform pipeline configs ${pipeline_config}"
  96. PullFilesFromGit(pipeline_config, branchName)
  97. echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
  98. for (pipeline_config_path in findFiles(glob: pipeline_config)) {
  99. echo "\tFound platform pipeline config ${pipeline_config_path}"
  100. def platform = RegexMatcher(pipeline_config_path, platform_regex)
  101. if(platform) {
  102. pipelineConfig.platforms[platform] = readJSON file: "${pipeline_config_path}"
  103. }
  104. }
  105. }
  106. return pipelineConfig
  107. }
  108. def GetPipelineRegion() {
  109. def gitUrl = scm.getUserRemoteConfigs()[0].getUrl()
  110. def gitUrlList = gitUrl.tokenize('.') as String[]
  111. def pipelineRegion = gitUrlList[1]
  112. return pipelineRegion
  113. }
  114. def SetBuildEnvVars(Map platformConfig, String type, String branchName) {
  115. // Returns list of build env vars
  116. def envVarList = []
  117. platformConfig.each { config ->
  118. envVarList.add("${config.key}=${config.value}")
  119. }
  120. envVarList.add("JOB_NAME=${branchName}_${platformConfig.JOB_NAME}_${type}")
  121. if(isUnix()) {
  122. envVarList.add("IS_UNIX=1")
  123. }
  124. return envVarList
  125. }
  126. @NonCPS
  127. def RegexMatcher(str, regex) {
  128. def matcher = (str =~ "${regex}")
  129. return matcher ? matcher.group(1) : null
  130. }
  131. def LoadPlatformBuildConfigs(Map options, String pipelineName, String branchName) {
  132. echo 'Loading build configs'
  133. def buildTypes = [:]
  134. options.BUILD_CONFIGS.each { build_config ->
  135. def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
  136. if (!env.IS_UNIX) {
  137. platform_regex = platform_regex.replace('/','\\\\')
  138. }
  139. echo "Downloading configs ${build_config}"
  140. PullFilesFromGit(build_config, branchName)
  141. echo "Searching configs in ${build_config} using ${platform_regex}"
  142. for (build_config_path in findFiles(glob: build_config)) {
  143. echo "\tFound config ${build_config_path}"
  144. def platform = RegexMatcher(build_config_path, platform_regex)
  145. if(platform) {
  146. def buildConfig = readJSON file: "${build_config_path}"
  147. def types = []
  148. buildConfig.each { type ->
  149. if(GetJobListOverride()) {
  150. echo "\t\tAdding build type \"${type.key}\" to \"${pipelineName}\" because JOB_LIST_OVERRIDE was defined"
  151. types.add(type.key)
  152. } else if(type.value.TAGS) {
  153. type.value.TAGS.each { tag ->
  154. if(tag == pipelineName) {
  155. echo "\t\tAdding build type \"${type.key}\" to \"${tag}\""
  156. types.add(type.key)
  157. }
  158. }
  159. }
  160. }
  161. buildTypes[platform] = types
  162. }
  163. }
  164. }
  165. return buildTypes
  166. }
  167. // Pulls/downloads files from the repo through codecommit. Despite Glob matching is NOT supported, '*' is supported
  168. // as a folder or filename (not a portion, it has to be the whole folder or filename)
  169. def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFound = true, String repositoryName = env.DEFAULT_REPOSITORY_NAME) {
  170. echo "PullFilesFromGit filenamePath=${filenamePath} branchName=${branchName}"
  171. def folderPathParts = filenamePath.tokenize('/')
  172. def filename = folderPathParts[folderPathParts.size()-1]
  173. folderPathParts.remove(folderPathParts.size()-1) // remove the filename
  174. def folderPath = folderPathParts.join('/')
  175. if (folderPath.contains('*')) {
  176. def currentPath = ""
  177. for (int i = 0; i < folderPathParts.size(); i++) {
  178. if (folderPathParts[i] == '*') {
  179. palMkdir(currentPath)
  180. retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${currentPath} > ${currentPath}/.codecommit", "GetFolder ${currentPath}") }
  181. def folderInfo = readJSON file: "${currentPath}/.codecommit"
  182. folderInfo.subFolders.each { folder ->
  183. def newSubPath = currentPath + '/' + folder.relativePath
  184. for (int j = i+1; j < folderPathParts.size(); j++) {
  185. newSubPath = newSubPath + '/' + folderPathParts[j]
  186. }
  187. newSubPath = newSubPath + '/' + filename
  188. PullFilesFromGit(newSubPath, branchName, false, repositoryName)
  189. }
  190. palRm("${currentPath}/.codecommit")
  191. }
  192. if (i == 0) {
  193. currentPath = folderPathParts[i]
  194. } else {
  195. currentPath = currentPath + '/' + folderPathParts[i]
  196. }
  197. }
  198. } else if (filename.contains('*')) {
  199. palMkdir(folderPath)
  200. retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${folderPath} > ${folderPath}/.codecommit", "GetFolder ${folderPath}") }
  201. def folderInfo = readJSON file: "${folderPath}/.codecommit"
  202. folderInfo.files.each { file ->
  203. PullFilesFromGit("${folderPath}/${filename}", branchName, false, repositoryName)
  204. }
  205. palRm("${folderPath}/.codecommit")
  206. } else {
  207. def errorFile = "${folderPath}/error.txt"
  208. palMkdir(folderPath)
  209. retry(3) {
  210. try {
  211. if(env.IS_UNIX) {
  212. sh label: "Downloading ${filenamePath}",
  213. script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${filenamePath}_encoded"
  214. sh label: "Decoding",
  215. script: "base64 --decode ${filenamePath}_encoded > ${filenamePath}"
  216. } else {
  217. errorFile = errorFile.replace('/','\\')
  218. win_filenamePath = filenamePath.replace('/', '\\')
  219. bat label: "Downloading ${win_filenamePath}",
  220. script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded"
  221. bat label: "Decoding",
  222. script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}"
  223. }
  224. palRm("${filenamePath}_encoded")
  225. } catch (Exception ex) {
  226. def error = ''
  227. if(fileExists(errorFile)) {
  228. error = readFile errorFile
  229. }
  230. if (!error || !(!failIfNotFound && error.contains("FileDoesNotExistException"))) {
  231. palRm("${errorFile} ${filenamePath}.encoded ${filenamePath}")
  232. throw new Exception("Could not get file: ${filenamePath}, ex: ${ex}, stderr: ${error}")
  233. }
  234. }
  235. palRm(errorFile)
  236. }
  237. }
  238. }
  239. def CheckoutRepo(boolean disableSubmodules = false) {
  240. def random = new Random()
  241. def retryAttempt = 0
  242. retry(5) {
  243. if (retryAttempt > 0) {
  244. sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
  245. }
  246. retryAttempt = retryAttempt + 1
  247. if(params.PULL_REQUEST_ID) {
  248. // This is a pull request build. Perform merge with destination branch before building.
  249. checkout scm: [
  250. $class: 'GitSCM',
  251. branches: scm.branches,
  252. extensions: [
  253. [$class: 'PreBuildMerge', options: [mergeRemote: 'origin', mergeTarget: params.DESTINATION_BRANCH]],
  254. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  255. [$class: 'CheckoutOption', timeout: 60]
  256. ],
  257. userRemoteConfigs: scm.userRemoteConfigs
  258. ]
  259. } else {
  260. checkout scm: [
  261. $class: 'GitSCM',
  262. branches: scm.branches,
  263. extensions: [
  264. [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
  265. [$class: 'CheckoutOption', timeout: 60]
  266. ],
  267. userRemoteConfigs: scm.userRemoteConfigs
  268. ]
  269. }
  270. }
  271. }
  272. def SendEmailNotification() {
  273. // Email notification
  274. node('controller') {
  275. step([
  276. $class: 'Mailer',
  277. notifyEveryUnstableBuild: true,
  278. sendToIndividuals: true,
  279. recipients: emailextrecipients([
  280. [$class: 'CulpritsRecipientProvider'],
  281. [$class: 'RequesterRecipientProvider']
  282. ])
  283. ])
  284. }
  285. }
  286. def PreBuildCommonSteps(String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
  287. echo 'Starting pre-build common steps...'
  288. if (mount) {
  289. PullFilesFromGit(INCREMENTAL_BUILD_SCRIPT_PATH, branchName)
  290. def pythonCmd = ""
  291. if(env.IS_UNIX) pythonCmd = "sudo -E python -u "
  292. else pythonCmd = "python -u "
  293. if(params.RECREATE_VOLUME) {
  294. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume')
  295. }
  296. timeout(5) {
  297. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume')
  298. }
  299. if(env.IS_UNIX) {
  300. sh label: 'Setting volume\'s ownership',
  301. script: """
  302. if sudo test ! -d "${workspace}"; then
  303. sudo mkdir -p ${workspace}
  304. cd ${workspace}/..
  305. sudo chown -R lybuilder:root .
  306. fi
  307. """
  308. }
  309. }
  310. if(params.CLEAN_WORKSPACE) {
  311. palRmDir("${workspace}")
  312. }
  313. dir(workspace) {
  314. if(fileExists(".git")) {
  315. palSh("""
  316. git remote prune origin
  317. git reset --hard HEAD
  318. """, 'Git reset')
  319. }
  320. CheckoutRepo(disableSubmodules)
  321. // If the repository after checkout is locked, likely we took a snapshot while git was running,
  322. // to leave the repo in a usable state, garbagecollect
  323. if(fileExists(".git/index.lock")) {
  324. palSh('git gc', 'Git GarbageCollect')
  325. }
  326. palSh('git rev-parse HEAD > commitid', 'Getting commit id')
  327. env.CHANGE_ID = readFile file: 'commitid'
  328. env.CHANGE_ID = env.CHANGE_ID.trim()
  329. palRm('commitid')
  330. // Get python
  331. if(env.IS_UNIX) {
  332. sh label: 'Getting python',
  333. script: 'python/get_python.sh'
  334. } else {
  335. bat label: 'Getting python',
  336. script: 'python/get_python.bat'
  337. }
  338. }
  339. }
  340. def Build(Map options, String platform, String type, String workspace) {
  341. def command = "${options.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
  342. dir(workspace) {
  343. if (env.IS_UNIX) {
  344. sh label: "Running ${platform} ${type}",
  345. script: "${options.PYTHON_DIR}/python.sh -u ${command}"
  346. } else {
  347. bat label: "Running ${platform} ${type}",
  348. script: "${options.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
  349. }
  350. }
  351. }
  352. def PostBuildCommonSteps(String workspace, boolean mount = true) {
  353. echo 'Starting post-build common steps...'
  354. if(params.PULL_REQUEST_ID) {
  355. dir(workspace) {
  356. if(fileExists(".git")) {
  357. palSh('git reset --hard HEAD', 'Discard PR merge, git reset')
  358. }
  359. }
  360. }
  361. if (mount) {
  362. def pythonCmd = ""
  363. if(env.IS_UNIX) pythonCmd = "sudo -E python -u "
  364. else pythonCmd = "python -u "
  365. try {
  366. timeout(5) {
  367. palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
  368. }
  369. }
  370. catch (e) {
  371. // Incremental build script will reboot the node if the EBS detach fails
  372. echo 'Node disconnected, error: \"${e}\", continuing...'
  373. }
  374. }
  375. }
  376. def pipelineName = ""
  377. def pipelineRegion = ""
  378. def branchName = ""
  379. def pipelineConfig = {}
  380. def buildTypes = {}
  381. // Start Pipeline
  382. try {
  383. stage('Setup Pipeline') {
  384. node('controller') {
  385. def envVarList = []
  386. if(isUnix()) {
  387. envVarList.add("IS_UNIX=1")
  388. }
  389. withEnv(envVarList) {
  390. timestamps {
  391. pipelineName = GetRunningPipelineName(env.JOB_NAME)
  392. pipelineRegion = GetPipelineRegion()
  393. if(env.BRANCH_NAME) {
  394. branchName = env.BRANCH_NAME
  395. } else {
  396. branchName = scm.branches[0].name // for non-multibranch pipelines
  397. env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
  398. }
  399. pipelineProperties.add(disableConcurrentBuilds())
  400. echo "Running \"${pipelineName}\" for \"${branchName}\", region: \"${pipelineRegion}\"..."
  401. // Load configs
  402. pipelineConfig = LoadPipelineConfig(PIPELINE_CONFIG_FILE, branchName)
  403. buildTypes = LoadPlatformBuildConfigs(pipelineConfig, pipelineName, branchName)
  404. // Add each platform as a parameter that the user can disable if needed
  405. buildTypes.each { platform ->
  406. pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: "${platform.key}"))
  407. }
  408. pipelineProperties.add(parameters(pipelineParameters))
  409. properties(pipelineProperties)
  410. }
  411. }
  412. }
  413. }
  414. if(env.BUILD_NUMBER == '1') {
  415. // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
  416. // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
  417. currentBuild.result = 'SUCCESS'
  418. return
  419. }
  420. // Build and Post-Build Testing Stage
  421. def buildConfigs = [:]
  422. def job_list_override = GetJobListOverride().tokenize(',')
  423. // Platform Builds run on EC2
  424. buildTypes.each { platform ->
  425. if (params["${platform.key}"]) {
  426. def pipelinePlatformConfig = pipelineConfig.platforms["${platform.key}"]
  427. if(pipelinePlatformConfig) {
  428. platform.value.each { build_type ->
  429. if (job_list_override.isEmpty() || job_list_override.contains(build_type)) {
  430. buildConfigs["${pipelinePlatformConfig.JOB_NAME} [${build_type}]"] = {
  431. node("${pipelinePlatformConfig.LABEL}-${pipelineRegion}") {
  432. stage("${pipelinePlatformConfig.JOB_NAME} [${build_type}]") {
  433. def envVars = SetBuildEnvVars(pipelinePlatformConfig, build_type, branchName)
  434. withEnv(envVars) {
  435. timeout(time: pipelinePlatformConfig.TIMEOUT, unit: 'MINUTES', activity: true) {
  436. try {
  437. PreBuildCommonSteps(pipelineName, branchName, platform.key, build_type, pipelinePlatformConfig.WORKSPACE, pipelinePlatformConfig.MOUNT_VOLUME)
  438. Build(pipelineConfig, platform.key, build_type, pipelinePlatformConfig.WORKSPACE)
  439. }
  440. catch (e) {
  441. currentBuild.result = 'FAILURE'
  442. throw e
  443. }
  444. finally {
  445. PostBuildCommonSteps(pipelinePlatformConfig.WORKSPACE, pipelinePlatformConfig.MOUNT_VOLUME)
  446. }
  447. }
  448. }
  449. }
  450. }
  451. }
  452. }
  453. }
  454. } else {
  455. echo "[WARN] Could not find pipeline config for ${platform.key}, skipping platform"
  456. }
  457. }
  458. }
  459. timestamps {
  460. stage('Build') {
  461. parallel buildConfigs // Run parallel builds
  462. }
  463. echo 'All builds successful'
  464. currentBuild.result = 'SUCCESS'
  465. }
  466. }
  467. catch (e) {
  468. currentBuild.result = 'FAILURE'
  469. throw e
  470. }
  471. finally {
  472. if(env.SNS_TOPIC) {
  473. snsPublish(
  474. topicArn:"${env.SNS_TOPIC}",
  475. subject:'Build Result',
  476. message:"${currentBuild.currentResult}:${params.REPOSITORY_NAME}:${params.SOURCE_BRANCH}:${params.SOURCE_COMMIT}:${params.DESTINATION_COMMIT}:${params.PULL_REQUEST_ID}:${BUILD_URL}:${params.RECREATE_VOLUME}:${params.CLEAN_OUTPUT_DIRECTORY}"
  477. )
  478. }
  479. SendEmailNotification()
  480. }