|
@@ -0,0 +1,604 @@
|
|
|
+#!/usr/bin/env groovy
|
|
|
+/*
|
|
|
+ * Copyright (c) Contributors to the Open 3D Engine Project.
|
|
|
+ * For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
|
|
+ *
|
|
|
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
|
|
|
+ *
|
|
|
+ */
|
|
|
+
|
|
|
+PIPELINE_CONFIG_FILE = 'scripts/build/Jenkins/lumberyard.json'
|
|
|
+INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
|
|
|
+
|
|
|
+EMPTY_JSON = readJSON text: '{}'
|
|
|
+
|
|
|
+PROJECT_REPOSITORY_NAME = 'o3de-netsoaktest'
|
|
|
+PROJECT_ORGANIZATION_NAME = 'o3de'
|
|
|
+ENGINE_REPOSITORY_NAME = 'o3de'
|
|
|
+ENGINE_ORGANIZATION_NAME = 'o3de'
|
|
|
+ENGINE_BRANCH_DEFAULT = "${env.BRANCH_DEFAULT}" ?: "${env.BRANCH_NAME}"
|
|
|
+
|
|
|
+def pipelineProperties = []
|
|
|
+
|
|
|
+def pipelineParameters = [
|
|
|
+ // Build/clean Parameters
|
|
|
+ // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
|
|
|
+ booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
|
|
|
+ booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
|
|
|
+ booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
|
|
|
+ booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
|
|
|
+ stringParam(defaultValue: "${ENGINE_BRANCH_DEFAULT}", description: 'Sets a different branch from o3de engine repo to use or use commit id. Default is branchname', trim: true, name: 'ENGINE_BRANCH')
|
|
|
+]
|
|
|
+
|
|
|
+def PlatformSh(cmd, lbl = '', winSlashReplacement = true) {
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ sh label: lbl,
|
|
|
+ script: cmd
|
|
|
+ } else if (winSlashReplacement) {
|
|
|
+ bat label: lbl,
|
|
|
+ script: cmd.replace('/','\\')
|
|
|
+ } else {
|
|
|
+ bat label: lbl,
|
|
|
+ script: cmd
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def PlatformMkdir(path) {
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ sh label: "Making directories ${path}",
|
|
|
+ script: "mkdir -p ${path}"
|
|
|
+ } else {
|
|
|
+ def win_path = path.replace('/','\\')
|
|
|
+ bat label: "Making directories ${win_path}",
|
|
|
+ script: "mkdir ${win_path}."
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def PlatformRm(path) {
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ sh label: "Removing ${path}",
|
|
|
+ script: "rm ${path}"
|
|
|
+ } else {
|
|
|
+ def win_path = path.replace('/','\\')
|
|
|
+ bat label: "Removing ${win_path}",
|
|
|
+ script: "del /Q ${win_path}"
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def PlatformRmDir(path) {
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ sh label: "Removing ${path}",
|
|
|
+ script: "rm -rf ${path}"
|
|
|
+ } else {
|
|
|
+ def win_path = path.replace('/','\\')
|
|
|
+ bat label: "Removing ${win_path}",
|
|
|
+ script: "rd /s /q ${win_path}"
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def IsPullRequest(branchName) {
|
|
|
+ // temporarily using the name to detect if we are in a PR
|
|
|
+ // In the future we will check with github
|
|
|
+ return branchName.startsWith('PR-')
|
|
|
+}
|
|
|
+
|
|
|
+def IsJobEnabled(branchName, buildTypeMap, pipelineName, platformName) {
|
|
|
+ if (IsPullRequest(branchName)) {
|
|
|
+ return buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
|
|
|
+ }
|
|
|
+ def job_list_override = params.JOB_LIST_OVERRIDE ? params.JOB_LIST_OVERRIDE.tokenize(',') : ''
|
|
|
+ if (!job_list_override.isEmpty()) {
|
|
|
+ return params[platformName] && job_list_override.contains(buildTypeMap.key);
|
|
|
+ } else {
|
|
|
+ return params[platformName] && buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def GetRunningPipelineName(JENKINS_JOB_NAME) {
|
|
|
+ // If the job name has an underscore
|
|
|
+ def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
|
|
|
+ if (job_parts.size() > 1) {
|
|
|
+ return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
|
|
|
+ }
|
|
|
+ return [job_parts[0], 'default']
|
|
|
+}
|
|
|
+
|
|
|
+@NonCPS
|
|
|
+def RegexMatcher(str, regex) {
|
|
|
+ def matcher = (str =~ regex)
|
|
|
+ return matcher ? matcher.group(1) : null
|
|
|
+}
|
|
|
+
|
|
|
+def LoadPipelineConfig(String pipelineName, String branchName) {
|
|
|
+ echo 'Loading pipeline config'
|
|
|
+ def pipelineConfig = {}
|
|
|
+ pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
|
|
|
+ PlatformRm(PIPELINE_CONFIG_FILE)
|
|
|
+ pipelineConfig.platforms = EMPTY_JSON
|
|
|
+
|
|
|
+ // Load the pipeline configs per platform
|
|
|
+ pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
|
|
|
+ def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
|
|
|
+ if (!env.IS_UNIX) {
|
|
|
+ platform_regex = platform_regex.replace('/','\\\\')
|
|
|
+ }
|
|
|
+ echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
|
|
|
+ for (pipeline_config_path in findFiles(glob: pipeline_config)) {
|
|
|
+ echo "\tFound platform pipeline config ${pipeline_config_path}"
|
|
|
+ def platform = RegexMatcher(pipeline_config_path, platform_regex)
|
|
|
+ if (platform) {
|
|
|
+ pipelineConfig.platforms[platform] = EMPTY_JSON
|
|
|
+ pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
|
|
|
+ }
|
|
|
+ PlatformRm(pipeline_config_path.toString())
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // Load the build configs
|
|
|
+ pipelineConfig.BUILD_CONFIGS.each { build_config ->
|
|
|
+ def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
|
|
|
+ if (!env.IS_UNIX) {
|
|
|
+ platform_regex = platform_regex.replace('/','\\\\')
|
|
|
+ }
|
|
|
+ echo "Searching configs in ${build_config} using ${platform_regex}"
|
|
|
+ for (build_config_path in findFiles(glob: build_config)) {
|
|
|
+ echo "\tFound config ${build_config_path}"
|
|
|
+ def platform = RegexMatcher(build_config_path, platform_regex)
|
|
|
+ if (platform) {
|
|
|
+ pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return pipelineConfig
|
|
|
+}
|
|
|
+
|
|
|
+def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
|
|
|
+ def envVarMap = [:]
|
|
|
+ platformPipelineEnv = platformEnv['ENV'] ?: [:]
|
|
|
+ platformPipelineEnv.each { var ->
|
|
|
+ envVarMap[var.key] = var.value
|
|
|
+ }
|
|
|
+ platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
|
|
|
+ platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
|
|
|
+ platformPipelineEnvOverride.each { var ->
|
|
|
+ envVarMap[var.key] = var.value
|
|
|
+ }
|
|
|
+ buildTypeEnv.each { var ->
|
|
|
+ // This may override the above one if there is an entry defined by the job
|
|
|
+ envVarMap[var.key] = var.value
|
|
|
+ }
|
|
|
+
|
|
|
+ // Environment that only applies to to Jenkins tweaks.
|
|
|
+ // For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
|
|
|
+ // instances. This allow us to scale up and down without having to re-download 3rdParty
|
|
|
+ envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
|
|
|
+ envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
|
|
|
+
|
|
|
+ return envVarMap
|
|
|
+}
|
|
|
+
|
|
|
+def GetEnvStringList(Map envVarMap) {
|
|
|
+ def strList = []
|
|
|
+ envVarMap.each { var ->
|
|
|
+ strList.add("${var.key}=${var.value}")
|
|
|
+ }
|
|
|
+ return strList
|
|
|
+}
|
|
|
+
|
|
|
+def GetEngineRemoteConfig(remoteConfigs) {
|
|
|
+ def engineRemoteConfigs = [name: "${ENGINE_REPOSITORY_NAME}",
|
|
|
+ url: remoteConfigs.url[0]
|
|
|
+ .replace("${PROJECT_REPOSITORY_NAME}", "${ENGINE_REPOSITORY_NAME}")
|
|
|
+ .replace("/${PROJECT_ORGANIZATION_NAME}/", "/${ENGINE_ORGANIZATION_NAME}/"),
|
|
|
+ credentialsId: remoteConfigs.credentialsId[0]
|
|
|
+ ]
|
|
|
+ return engineRemoteConfigs
|
|
|
+}
|
|
|
+
|
|
|
+def CheckoutBootstrapScripts(String branchName) {
|
|
|
+ checkout([$class: 'GitSCM',
|
|
|
+ branches: [[name: "*/${branchName}"]],
|
|
|
+ doGenerateSubmoduleConfigurations: false,
|
|
|
+ extensions: [
|
|
|
+ [$class: 'PruneStaleBranch'],
|
|
|
+ [$class: 'AuthorInChangelog'],
|
|
|
+ [$class: 'SparseCheckoutPaths', sparseCheckoutPaths: [
|
|
|
+ [ $class: 'SparseCheckoutPath', path: 'scripts/build/Jenkins/' ],
|
|
|
+ [ $class: 'SparseCheckoutPath', path: 'scripts/build/bootstrap/' ],
|
|
|
+ [ $class: 'SparseCheckoutPath', path: 'scripts/build/Platform' ]
|
|
|
+ ]],
|
|
|
+ // Shallow checkouts break changelog computation. Do not enable.
|
|
|
+ [$class: 'CloneOption', noTags: false, reference: '', shallow: false]
|
|
|
+ ],
|
|
|
+ submoduleCfg: [],
|
|
|
+ userRemoteConfigs: [GetEngineRemoteConfig(scm.userRemoteConfigs)]
|
|
|
+ ])
|
|
|
+}
|
|
|
+
|
|
|
+def CheckoutRepo(boolean disableSubmodules = false) {
|
|
|
+
|
|
|
+ def projectsAndUrl = [
|
|
|
+ "${ENGINE_REPOSITORY_NAME}": GetEngineRemoteConfig(scm.userRemoteConfigs),
|
|
|
+ "${PROJECT_REPOSITORY_NAME}": scm.userRemoteConfigs[0]
|
|
|
+ ]
|
|
|
+
|
|
|
+ projectsAndUrl.each { projectAndUrl ->
|
|
|
+ if (!fileExists(projectAndUrl.key)) {
|
|
|
+ PlatformMkdir(projectAndUrl.key)
|
|
|
+ }
|
|
|
+ dir(projectAndUrl.key) {
|
|
|
+ if (fileExists('.git')) {
|
|
|
+ // If the repository after checkout is locked, likely we took a snapshot while git was running,
|
|
|
+ // to leave the repo in a usable state, garbage collect.
|
|
|
+ def indexLockFile = '.git/index.lock'
|
|
|
+ if (fileExists(indexLockFile)) {
|
|
|
+ PlatformSh('git gc', 'Git GarbageCollect')
|
|
|
+ }
|
|
|
+ if (fileExists(indexLockFile)) { // if it is still there, remove it
|
|
|
+ PlatformRm(indexLockFile)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ def random = new Random()
|
|
|
+ def retryAttempt = 0
|
|
|
+ retry(5) {
|
|
|
+ if (retryAttempt > 0) {
|
|
|
+ sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from Github
|
|
|
+ }
|
|
|
+ retryAttempt = retryAttempt + 1
|
|
|
+ projectsAndUrl.each { projectAndUrl ->
|
|
|
+ dir(projectAndUrl.key) {
|
|
|
+ def branchName = scm.branches
|
|
|
+ if (projectAndUrl.key == "${ENGINE_REPOSITORY_NAME}") {
|
|
|
+ branchName = [[name: params.ENGINE_BRANCH]]
|
|
|
+ }
|
|
|
+ checkout scm: [
|
|
|
+ $class: 'GitSCM',
|
|
|
+ branches: branchName,
|
|
|
+ extensions: [
|
|
|
+ [$class: 'PruneStaleBranch'],
|
|
|
+ [$class: 'AuthorInChangelog'],
|
|
|
+ [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
|
|
|
+ [$class: 'CheckoutOption', timeout: 60]
|
|
|
+ ],
|
|
|
+ userRemoteConfigs: [projectAndUrl.value]
|
|
|
+ ]
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
|
|
|
+ dir(PROJECT_REPOSITORY_NAME) {
|
|
|
+ PlatformSh('git rev-parse HEAD > commitid', 'Getting commit id')
|
|
|
+ env.CHANGE_ID = readFile file: 'commitid'
|
|
|
+ env.CHANGE_ID = env.CHANGE_ID.trim()
|
|
|
+ PlatformRm('commitid')
|
|
|
+ // CHANGE_DATE is used by the installer to provide some ability to sort tagged builds in addition to BRANCH_NAME and CHANGE_ID
|
|
|
+ commitDateFmt = '%%cI'
|
|
|
+ if (env.IS_UNIX) commitDateFmt = '%cI'
|
|
|
+
|
|
|
+ PlatformSh("git show -s --format=${commitDateFmt} ${env.CHANGE_ID} > commitdate", 'Getting commit date')
|
|
|
+ env.CHANGE_DATE = readFile file: 'commitdate'
|
|
|
+ env.CHANGE_DATE = env.CHANGE_DATE.trim()
|
|
|
+ PlatformRm('commitdate')
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def PreBuildCommonSteps(Map pipelineConfig, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
|
|
|
+ echo 'Starting pre-build common steps...'
|
|
|
+
|
|
|
+ if (mount) {
|
|
|
+ unstash name: 'incremental_build_script'
|
|
|
+
|
|
|
+ def pythonCmd = ''
|
|
|
+ if (env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
|
|
|
+ else pythonCmd = 'python3 -u '
|
|
|
+
|
|
|
+ if (env.RECREATE_VOLUME?.toBoolean()) {
|
|
|
+ PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
|
|
|
+ }
|
|
|
+ timeout(5) {
|
|
|
+ PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume', winSlashReplacement=false)
|
|
|
+ }
|
|
|
+
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ sh label: 'Setting volume\'s ownership',
|
|
|
+ script: """
|
|
|
+ if sudo test ! -d "${workspace}"; then
|
|
|
+ sudo mkdir -p ${workspace}
|
|
|
+ cd ${workspace}/..
|
|
|
+ sudo chown -R lybuilder:root .
|
|
|
+ fi
|
|
|
+ """
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
|
|
|
+ // we need to cleanup. Once all branches take this relocation, we can remove this
|
|
|
+ if (env.CLEAN_WORKSPACE?.toBoolean() || fileExists("${workspace}/.git")) {
|
|
|
+ if (fileExists(workspace)) {
|
|
|
+ PlatformRmDir(workspace)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ dir(workspace) {
|
|
|
+ // Add folder where we will store the 3rdParty downloads and packages
|
|
|
+ if (!fileExists('3rdParty')) {
|
|
|
+ PlatformMkdir('3rdParty')
|
|
|
+ }
|
|
|
+ CheckoutRepo(disableSubmodules)
|
|
|
+ }
|
|
|
+ dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
|
+ // Get python
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ sh label: 'Getting python',
|
|
|
+ script: 'python/get_python.sh'
|
|
|
+ } else {
|
|
|
+ bat label: 'Getting python',
|
|
|
+ script: 'python/get_python.bat'
|
|
|
+ }
|
|
|
+
|
|
|
+ // Always run the clean step, the scripts detect what variables were set, but it also cleans if
|
|
|
+ // the NODE_LABEL has changed
|
|
|
+ def command = "${pipelineConfig.PYTHON_DIR}/python"
|
|
|
+ if (env.IS_UNIX) command += '.sh'
|
|
|
+ else command += '.cmd'
|
|
|
+ command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
|
|
|
+ PlatformSh(command, "Running ${platform} clean")
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def Build(Map pipelineConfig, String platform, String type, String workspace) {
|
|
|
+ // If EXECUTE_FROM_PROJECT is defined, we execute the script from the project instead of from the engine
|
|
|
+ // In both cases, the scripts are in the engine, is just what the current dir is and how we get to the scripts
|
|
|
+ def currentDir = "${workspace}/${ENGINE_REPOSITORY_NAME}"
|
|
|
+ def pathToEngine = ""
|
|
|
+
|
|
|
+ timeout(time: env.TIMEOUT, unit: 'MINUTES', activity: true) {
|
|
|
+ def command = "${pipelineConfig.PYTHON_DIR}/python"
|
|
|
+ def ext = ''
|
|
|
+ if (env.IS_UNIX) {
|
|
|
+ command += '.sh'
|
|
|
+ ext = '.sh'
|
|
|
+ }
|
|
|
+ else command += '.cmd'
|
|
|
+
|
|
|
+ // Setup environment for project execution, otherwise, register the project
|
|
|
+ if (env.EXECUTE_FROM_PROJECT?.toBoolean()) {
|
|
|
+ currentDir = "${workspace}/${PROJECT_REPOSITORY_NAME}"
|
|
|
+ pathToEngine = "../${ENGINE_REPOSITORY_NAME}/"
|
|
|
+ } else {
|
|
|
+ dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
|
+ PlatformSh("scripts/o3de${ext} register --project-path ${workspace}/${PROJECT_REPOSITORY_NAME}", "Registering project ${PROJECT_REPOSITORY_NAME}")
|
|
|
+ }
|
|
|
+ }
|
|
|
+ command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
|
|
|
+ dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
|
+ PlatformSh(command, "Running ${platform} ${type}")
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def ExportTestResults(Map options, String platform, String type, String workspace, Map params) {
|
|
|
+ catchError(message: "Error exporting tests results (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
|
|
|
+ def o3deroot = "${workspace}/${ENGINE_REPOSITORY_NAME}"
|
|
|
+ dir("${o3deroot}/${params.OUTPUT_DIRECTORY}") {
|
|
|
+ junit testResults: "Testing/**/*.xml"
|
|
|
+ PlatformRmDir("Testing")
|
|
|
+ // Recreate test runner xml directories that need to be pre generated
|
|
|
+ PlatformMkdir("Testing/Pytest")
|
|
|
+ PlatformMkdir("Testing/Gtest")
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def PostBuildCommonSteps(String workspace, boolean mount = true) {
|
|
|
+ echo 'Starting post-build common steps...'
|
|
|
+
|
|
|
+ if (mount) {
|
|
|
+ def pythonCmd = ''
|
|
|
+ if (env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
|
|
|
+ else pythonCmd = 'python3 -u '
|
|
|
+
|
|
|
+ try {
|
|
|
+ timeout(5) {
|
|
|
+ PlatformSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
|
|
|
+ }
|
|
|
+ } catch (Exception e) {
|
|
|
+ echo "Unmount script error ${e}"
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def CreateSetupStage(Map pipelineConfig, String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars) {
|
|
|
+ return {
|
|
|
+ stage('Setup') {
|
|
|
+ PreBuildCommonSteps(pipelineConfig, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
|
|
|
+ return {
|
|
|
+ stage("${jobName}") {
|
|
|
+ Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def CreateExportTestResultsStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars, Map params) {
|
|
|
+ return {
|
|
|
+ stage("${jobName}_results") {
|
|
|
+ ExportTestResults(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'], params)
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def CreateTeardownStage(Map environmentVars) {
|
|
|
+ return {
|
|
|
+ stage('Teardown') {
|
|
|
+ PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+def projectName = ''
|
|
|
+def pipelineName = ''
|
|
|
+def branchName = ''
|
|
|
+def pipelineConfig = {}
|
|
|
+
|
|
|
+// Start Pipeline
|
|
|
+try {
|
|
|
+ stage('Setup Pipeline') {
|
|
|
+ node('controller') {
|
|
|
+ def envVarList = []
|
|
|
+ if (isUnix()) {
|
|
|
+ envVarList.add('IS_UNIX=1')
|
|
|
+ }
|
|
|
+ withEnv(envVarList) {
|
|
|
+ timestamps {
|
|
|
+ repositoryUrl = scm.getUserRemoteConfigs()[0].getUrl()
|
|
|
+ // repositoryName is the full repository name
|
|
|
+ repositoryName = (repositoryUrl =~ /https:\/\/github.com\/(.*)\.git/)[0][1]
|
|
|
+ (projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
|
|
|
+
|
|
|
+ if (env.BRANCH_NAME) {
|
|
|
+ branchName = env.BRANCH_NAME
|
|
|
+ } else {
|
|
|
+ branchName = scm.branches[0].name // for non-multibranch pipelines
|
|
|
+ env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
|
|
|
+ }
|
|
|
+ pipelineProperties.add(disableConcurrentBuilds())
|
|
|
+
|
|
|
+ def engineBranch = params.ENGINE_BRANCH ?: "${ENGINE_BRANCH_DEFAULT}" // This allows the first run to work with parameters having null value, but use the engine branch parameter afterwards
|
|
|
+ echo "Running repository: \"${repositoryName}\", pipeline: \"${pipelineName}\", branch: \"${branchName}\" on engine branch \"${engineBranch}\"..."
|
|
|
+
|
|
|
+ CheckoutBootstrapScripts(engineBranch)
|
|
|
+
|
|
|
+ // Load configs
|
|
|
+ pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
|
|
|
+
|
|
|
+ // Add each platform as a parameter that the user can disable if needed
|
|
|
+ if (!IsPullRequest(branchName)) {
|
|
|
+ pipelineParameters.add(stringParam(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'))
|
|
|
+
|
|
|
+ pipelineConfig.platforms.each { platform ->
|
|
|
+ pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
|
|
|
+ }
|
|
|
+ }
|
|
|
+ pipelineProperties.add(parameters(pipelineParameters))
|
|
|
+ properties(pipelineProperties)
|
|
|
+
|
|
|
+ // Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
|
|
|
+ stash name: 'incremental_build_script',
|
|
|
+ includes: INCREMENTAL_BUILD_SCRIPT_PATH
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ if (env.BUILD_NUMBER == '1' && !IsPullRequest(branchName)) {
|
|
|
+ // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
|
|
|
+ // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
|
|
|
+ currentBuild.result = 'SUCCESS'
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ def someBuildHappened = false
|
|
|
+
|
|
|
+ // Build and Post-Build Testing Stage
|
|
|
+ def buildConfigs = [:]
|
|
|
+
|
|
|
+ // Platform Builds run on EC2
|
|
|
+ pipelineConfig.platforms.each { platform ->
|
|
|
+ platform.value.build_types.each { build_job ->
|
|
|
+ if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
|
|
|
+ def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
|
|
|
+ envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
|
|
|
+ envVars['CMAKE_LY_PROJECTS'] = "../${PROJECT_REPOSITORY_NAME}"
|
|
|
+ def nodeLabel = envVars['NODE_LABEL']
|
|
|
+ someBuildHappened = true
|
|
|
+
|
|
|
+ buildConfigs["${platform.key} [${build_job.key}]"] = {
|
|
|
+ node("${nodeLabel}") {
|
|
|
+ if (isUnix()) { // Has to happen inside a node
|
|
|
+ envVars['IS_UNIX'] = 1
|
|
|
+ }
|
|
|
+ withEnv(GetEnvStringList(envVars)) {
|
|
|
+ def build_job_name = build_job.key
|
|
|
+ try {
|
|
|
+ CreateSetupStage(pipelineConfig, repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call()
|
|
|
+
|
|
|
+ if (build_job.value.steps) { //this is a pipe with many steps so create all the build stages
|
|
|
+ build_job.value.steps.each { build_step ->
|
|
|
+ build_job_name = build_step
|
|
|
+ CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
|
|
|
+ }
|
|
|
+ }
|
|
|
+ catch(Exception e) {
|
|
|
+ // https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
|
|
|
+ // {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
|
|
|
+ def currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
|
|
|
+ if (currentResult == 'FAILURE') {
|
|
|
+ currentBuild.result = 'FAILURE'
|
|
|
+ error "FAILURE: ${e}"
|
|
|
+ } else if (currentResult == 'UNSTABLE') {
|
|
|
+ currentBuild.result = 'UNSTABLE'
|
|
|
+ unstable(message: "UNSTABLE: ${e}")
|
|
|
+ }
|
|
|
+ }
|
|
|
+ finally {
|
|
|
+ def params = platform.value.build_types[build_job_name].PARAMETERS
|
|
|
+ if (params && params.containsKey('TEST_RESULTS') && params.TEST_RESULTS == 'True') {
|
|
|
+ CreateExportTestResultsStage(pipelineConfig, platform.key, build_job_name, envVars, params).call()
|
|
|
+ }
|
|
|
+ CreateTeardownStage(envVars).call()
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ timestamps {
|
|
|
+
|
|
|
+ stage('Build') {
|
|
|
+ parallel buildConfigs // Run parallel builds
|
|
|
+ }
|
|
|
+
|
|
|
+ echo 'All builds successful'
|
|
|
+ }
|
|
|
+ if (!someBuildHappened) {
|
|
|
+ currentBuild.result = 'NOT_BUILT'
|
|
|
+ }
|
|
|
+}
|
|
|
+catch(Exception e) {
|
|
|
+ error "Exception: ${e}"
|
|
|
+}
|
|
|
+finally {
|
|
|
+ try {
|
|
|
+ if (env.SNS_TOPIC) {
|
|
|
+ snsPublish(
|
|
|
+ topicArn: env.SNS_TOPIC,
|
|
|
+ subject:'Build Result',
|
|
|
+ message:"${currentBuild.currentResult}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
|
|
|
+ )
|
|
|
+ }
|
|
|
+ node('controller') {
|
|
|
+ step([
|
|
|
+ $class: 'Mailer',
|
|
|
+ notifyEveryUnstableBuild: true,
|
|
|
+ recipients: emailextrecipients([
|
|
|
+ [$class: 'RequesterRecipientProvider']
|
|
|
+ ])
|
|
|
+ ])
|
|
|
+ }
|
|
|
+ } catch(Exception e) {
|
|
|
+ }
|
|
|
+}
|