Jelajahi Sumber

Create Jenkins AR jobs for ASV

This enables AR jobs within the ASV project through a nested call from a Project Jenkinsfile (this file) to the O3DE Jenkinsfile. This is accomplished through the following:

- Two separate Git pulls are made (ASV and o3de) into separate workspace folders. A new function loops through the repos to get common parameters
- The ASV Jenkinsfile takes in a new engine branch and refspec parameter (defaults to HEAD of O3DE mainline)
- This Jenkinsfile calls the O3DE Jenkinsfile, then overrides the CMAKE_LY_PROJECTS and SCM branch/refspec parameters
- The build is called using a engine centric workflow, but can be adjusted to work in a project centric manner through setting a PROJECT_ENABLED parameter in build_config.json on the O3DE repo
- The same default builds used in the O3DE AR will be used (though redundant) for now as a 1st pass. Future work is to transfer a common O3DE engine binary between project centric workflows to reduce build steps/times
Mike Chang 4 tahun lalu
induk
melakukan
d49091092d
2 mengubah file dengan 643 tambahan dan 1 penghapusan
  1. 5 1
      .gitignore
  2. 638 0
      Scripts/build/Jenkins/Jenkinsfile

+ 5 - 1
.gitignore

@@ -1,6 +1,10 @@
 _savebackup/
 .mayaSwatches/
 *.swatches
-[Bb]uild/
+.vscode/
+__pycache__
+AssetProcessorTemp/**
+[Bb]uild/**
 [Cc]ache/
 [Uu]ser/
+.DS_Store

+ 638 - 0
Scripts/build/Jenkins/Jenkinsfile

@@ -0,0 +1,638 @@
+#!/usr/bin/env groovy
+/*
+* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
+* its licensors.
+*
+* For complete copyright and license terms please see the LICENSE at the root of this
+* distribution (the "License"). All use of this software is governed by the License,
+* or, if provided, by the license below or the license accompanying this file. Do not
+* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+*
+*/
+
+PIPELINE_CONFIG_FILE = 'scripts/build/Jenkins/lumberyard.json'
+INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
+
+EMPTY_JSON = readJSON text: '{}'
+
+PROJECT_REPOSITORY_NAME = 'o3de-atom-sampleviewer'
+PROJECT_ORGANIZATION_NAME = 'aws-lumberyard'
+ENGINE_REPOSITORY_NAME = 'o3de'
+ENGINE_ORGANIZATION_NAME = 'aws-lumberyard'
+
+def pipelineProperties = []
+
+def pipelineParameters = [
+    // Build/clean Parameters
+    // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
+    booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
+    booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
+    booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
+    booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
+    stringParam(defaultValue: 'main', description: 'Sets a different branch from o3de engine repo to use or use commit id. Default is mainline', trim: true, name: 'ENGINE_BRANCH'),
+    stringParam(defaultValue: 'origin/main', description: 'Sets a refspec for the mainline branch. Default is head of main', trim: true, name: 'ENGINE_REFSPEC')
+]
+
+def palSh(cmd, lbl = '', winSlashReplacement = true) {
+    if (env.IS_UNIX) {
+        sh label: lbl,
+           script: cmd
+    } else if (winSlashReplacement) {
+        bat label: lbl,
+            script: cmd.replace('/','\\')
+    } else {
+        bat label: lbl,
+            script: cmd
+    }
+}
+
+def palMkdir(path) {
+    if (env.IS_UNIX) {
+        sh label: "Making directories ${path}",
+           script: "mkdir -p ${path}"
+    } else {
+        def win_path = path.replace('/','\\')
+        bat label: "Making directories ${win_path}",
+            script: "mkdir ${win_path}."
+    }
+}
+
+def palRm(path) {
+    if (env.IS_UNIX) {
+        sh label: "Removing ${path}",
+           script: "rm ${path}"
+    } else {
+        def win_path = path.replace('/','\\')
+        bat label: "Removing ${win_path}",
+            script: "del ${win_path}"
+    }
+}
+
+def palRmDir(path) {
+    if (env.IS_UNIX) {
+        sh label: "Removing ${path}",
+           script: "rm -rf ${path}"
+    } else {
+        def win_path = path.replace('/','\\')
+        bat label: "Removing ${win_path}",
+            script: "rd /s /q ${win_path}"
+    }
+}
+
+def IsPullRequest(branchName) {
+    // temporarily using the name to detect if we are in a PR
+    // In the future we will check with github
+    return branchName.startsWith('PR-')
+}
+
+def IsJobEnabled(branchName, buildTypeMap, pipelineName, platformName) {
+    if (IsPullRequest(branchName)) {
+        return buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
+    }
+    def job_list_override = params.JOB_LIST_OVERRIDE ? params.JOB_LIST_OVERRIDE.tokenize(',') : ''
+    if (!job_list_override.isEmpty()) {
+        return params[platformName] && job_list_override.contains(buildTypeMap.key);
+    } else {
+        return params[platformName] && buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
+    }
+}
+
+def GetRunningPipelineName(JENKINS_JOB_NAME) {
+    // If the job name has an underscore
+    def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
+    if (job_parts.size() > 1) {
+        return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
+    }
+    return [job_parts[0], 'default']
+}
+
+@NonCPS
+def RegexMatcher(str, regex) {
+    def matcher = (str =~ regex)
+    return matcher ? matcher.group(1) : null
+}
+
+def LoadPipelineConfig(String pipelineName, String branchName) {
+    echo 'Loading pipeline config'
+    def pipelineConfig = {}
+    pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
+    palRm(PIPELINE_CONFIG_FILE)
+    pipelineConfig.platforms = EMPTY_JSON
+
+    // Load the pipeline configs per platform
+    pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
+        def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
+        if (!env.IS_UNIX) {
+            platform_regex = platform_regex.replace('/','\\\\')
+        }
+        echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
+        for (pipeline_config_path in findFiles(glob: pipeline_config)) {
+            echo "\tFound platform pipeline config ${pipeline_config_path}"
+            def platform = RegexMatcher(pipeline_config_path, platform_regex)
+            if(platform) {
+                pipelineConfig.platforms[platform] = EMPTY_JSON
+                pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
+            }
+            palRm(pipeline_config_path.toString())
+        }
+    }
+
+    // Load the build configs
+    pipelineConfig.BUILD_CONFIGS.each { build_config ->
+        def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
+        if (!env.IS_UNIX) {
+            platform_regex = platform_regex.replace('/','\\\\')
+        }
+        echo "Searching configs in ${build_config} using ${platform_regex}"
+        for (build_config_path in findFiles(glob: build_config)) {
+            echo "\tFound config ${build_config_path}"
+            def platform = RegexMatcher(build_config_path, platform_regex)
+            if(platform) {
+                pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
+            }
+        }
+    }
+    return pipelineConfig
+}
+
+def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
+    def envVarMap = [:]
+    platformPipelineEnv = platformEnv['ENV'] ?: [:]
+    platformPipelineEnv.each { var ->
+        envVarMap[var.key] = var.value
+    }
+    platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
+    platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
+    platformPipelineEnvOverride.each { var ->
+        envVarMap[var.key] = var.value
+    }
+    buildTypeEnv.each { var ->
+        // This may override the above one if there is an entry defined by the job
+        envVarMap[var.key] = var.value
+    }
+    
+    // Environment that only applies to to Jenkins tweaks.
+    // For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
+    // instances. This allow us to scale up and down without having to re-download 3rdParty
+    envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
+    envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
+
+    return envVarMap
+}
+
+def GetEnvStringList(Map envVarMap) {
+    def strList = []
+    envVarMap.each { var ->
+        strList.add("${var.key}=${var.value}")
+    }
+    return strList
+}
+
+def GetLfsConfig(cmd, lbl = '') {
+    if (env.IS_UNIX) {
+        sh label: lbl,
+           script: cmd,
+           returnStdout: true
+    } else {
+        powershell label: lbl, // Powershell is used due to bat output returning the prompt as well
+           script: cmd,
+           returnStdout: true
+    }
+}
+
+def getEngineRemoteConfig(remoteConfigs) {
+    def engineRemoteConfigs = [name: "${ENGINE_REPOSITORY_NAME}",
+        url: remoteConfigs.url[0]
+            .replace("${PROJECT_REPOSITORY_NAME}", "${ENGINE_REPOSITORY_NAME}")
+            .replace("/${PROJECT_ORGANIZATION_NAME}/", "/${ENGINE_ORGANIZATION_NAME}/"),
+        refspec: "+refs/heads/main:refs/remotes/${params.ENGINE_REFSPEC}",
+        credentialsId: remoteConfigs.credentialsId[0]
+        ]
+    return engineRemoteConfigs
+}
+
+def CheckoutBootstrapScripts(String branchName) {
+    checkout([$class: "GitSCM",
+        branches: [[name: "*/${branchName}"]],
+        doGenerateSubmoduleConfigurations: false,
+        extensions: [
+            [$class: "PruneStaleBranch"],
+            [$class: "SparseCheckoutPaths",
+                sparseCheckoutPaths: [
+                    [ $class: "SparseCheckoutPath", path: "scripts/build/Jenkins/" ],
+                    [ $class: "SparseCheckoutPath", path: "scripts/build/bootstrap/" ],
+                    [ $class: "SparseCheckoutPath", path: "scripts/build/Platform" ]
+                ]
+            ],
+            [$class: "CloneOption", depth: 1, noTags: false, reference: "", shallow: true]
+        ],
+        submoduleCfg: [],
+        userRemoteConfigs: [getEngineRemoteConfig(scm.userRemoteConfigs)]
+    ])
+}
+
+def CheckoutRepo(boolean disableSubmodules = false) {
+
+    def projectsAndUrl = [
+        "${ENGINE_REPOSITORY_NAME}": getEngineRemoteConfig(scm.userRemoteConfigs),
+        "${PROJECT_REPOSITORY_NAME}": scm.userRemoteConfigs[0]
+    ]
+
+    projectsAndUrl.each { projectAndUrl ->
+        if(!fileExists(projectAndUrl.key)) {
+            palMkdir(projectAndUrl.key)
+        }
+        dir(projectAndUrl.key) {           
+            if(fileExists('.git')) {
+                // If the repository after checkout is locked, likely we took a snapshot while git was running,
+                // to leave the repo in a usable state, garbagecollect. This also helps in situations where 
+                def indexLockFile = '.git/index.lock'
+                if(fileExists(indexLockFile)) {
+                    palSh('git gc', 'Git GarbageCollect')    
+                }
+                if(fileExists(indexLockFile)) { // if it is still there, remove it
+                    palRm(indexLockFile)
+                }
+            }
+        }
+    }
+
+    def random = new Random()
+    def retryAttempt = 0
+    retry(5) {
+        if (retryAttempt > 0) {
+            sleep random.nextInt(60 * retryAttempt)  // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
+        }
+        retryAttempt = retryAttempt + 1
+        projectsAndUrl.each { projectAndUrl ->
+            dir(projectAndUrl.key) {
+                def branchName = scm.branches
+                palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout
+                if(projectAndUrl.key == "${ENGINE_REPOSITORY_NAME}") {
+                    branchName = [[name: params.ENGINE_BRANCH]]
+                }
+                checkout scm: [
+                    $class: 'GitSCM',
+                    branches: branchName,
+                    extensions: [
+                        [$class: 'PruneStaleBranch'],
+                        [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
+                        [$class: 'CheckoutOption', timeout: 60]
+                    ],
+                    userRemoteConfigs: [projectAndUrl.value]
+                ]
+                if(fileExists(".lfsconfig")) {
+                    def localLfsUrl = GetLfsConfig("git config -f .lfsconfig --get lfs.url", "Getting LFS URL").replace("https://","").trim() // Read the lfs file instead of relying on env var
+                    withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) {
+                        palSh("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${localLfsUrl}", 'Set credentials', false)
+                    }
+                }
+
+                palSh('git lfs install', 'Git LFS Install')
+                palSh('git lfs pull', 'Git LFS Pull')
+            }
+        }
+    }
+
+    // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
+    dir(PROJECT_REPOSITORY_NAME) {
+        palSh('git rev-parse HEAD > commitid', 'Getting commit id')
+        env.CHANGE_ID = readFile file: 'commitid'
+        env.CHANGE_ID = env.CHANGE_ID.trim()
+        palRm('commitid')
+    }
+}
+
+def PreBuildCommonSteps(Map pipelineConfig, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
+    echo 'Starting pre-build common steps...'
+
+    if (mount) {
+        unstash name: 'incremental_build_script'
+
+        def pythonCmd = ''
+        if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
+        else pythonCmd = 'python -u '
+
+        if(env.RECREATE_VOLUME?.toBoolean()) {
+            palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
+        }
+        timeout(5) {
+            palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume', winSlashReplacement=false)
+        }
+
+        if(env.IS_UNIX) {
+            sh label: 'Setting volume\'s ownership',
+               script: """
+                if sudo test ! -d "${workspace}"; then
+                    sudo mkdir -p ${workspace}
+                    cd ${workspace}/..
+                    sudo chown -R lybuilder:root .
+                fi
+               """
+        }
+    }
+
+    // Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
+    // we need to cleanup. Once all branches take this relocation, we can remove this
+    if(env.CLEAN_WORKSPACE?.toBoolean() || fileExists("${workspace}/.git")) {
+        if(fileExists(workspace)) {
+            palRmDir(workspace)
+        }
+    }
+
+    dir(workspace) {
+        // Add folder where we will store the 3rdParty downloads and packages
+        if(!fileExists('3rdParty')) {
+            palMkdir('3rdParty')
+        }
+
+        CheckoutRepo(disableSubmodules)
+    }
+
+    dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
+        // Get python
+        if(env.IS_UNIX) {
+            sh label: 'Getting python',
+               script: 'python/get_python.sh'
+        } else {
+            bat label: 'Getting python',
+                script: 'python/get_python.bat'
+        }
+
+        if(env.CLEAN_OUTPUT_DIRECTORY?.toBoolean() || env.CLEAN_ASSETS?.toBoolean()) {
+            def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
+            if (env.IS_UNIX) {
+                sh label: "Running ${platform} clean",
+                   script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}"
+            } else {
+                bat label: "Running ${platform} clean",
+                    script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
+            }
+        }
+    }
+}
+
+def Build(Map options, String platform, String type, String workspace) {
+    // If EXECUTE_FROM_PROJECT is defined, we execute the script from the project instead of from the engine
+    // In both cases, the scripts are in the engine, is just what the current dir is and how we get to the scripts
+    def currentDir = "${workspace}/${ENGINE_REPOSITORY_NAME}"
+    def pathToEngine = ""
+    if (env.EXECUTE_FROM_PROJECT?.toBoolean()) {
+        currentDir = "${workspace}/${PROJECT_REPOSITORY_NAME}"
+        pathToEngine = "../${ENGINE_REPOSITORY_NAME}/"
+    }
+    def command = "${pathToEngine}${options.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
+    dir("${currentDir}") {
+        if (env.IS_UNIX) {
+            sh label: "Running ${platform} ${type}",
+               script: "${pathToEngine}${options.PYTHON_DIR}/python.sh -u ${command}"
+        } else {
+            bat label: "Running ${platform} ${type}",
+                script: "${pathToEngine}${options.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
+        }
+    }
+}
+
+def TestMetrics(Map options, String workspace, String branchName, String repoName, String buildJobName, String outputDirectory, String configuration) {
+    catchError(buildResult: null, stageResult: null) {
+        def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
+        dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
+            checkout scm: [
+                $class: 'GitSCM',
+                branches: [[name: '*/main']],
+                extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']],
+                userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
+            ]
+            withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
+                def command = "${options.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py " +
+                              '-e jenkins.creds.user %username% -e jenkins.creds.pass %apitoken% ' +
+                              "-e jenkins.base_url ${env.JENKINS_URL} " +
+                              "${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} "
+                bat label: "Publishing ${buildJobName} Test Metrics",
+                    script: command
+            }
+        }
+    }
+}
+
+def PostBuildCommonSteps(String workspace, boolean mount = true) {
+    echo 'Starting post-build common steps...'
+
+    if (mount) {
+        def pythonCmd = ''
+        if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
+        else pythonCmd = 'python -u '
+
+        try {
+            timeout(5) {
+                palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
+            }
+        } catch (Exception e) {
+            echo "Unmount script error ${e}"
+        }
+    }
+}
+
+def CreateSetupStage(Map pipelineConfig, String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars) {
+    return {
+        stage("Setup") {
+            PreBuildCommonSteps(pipelineConfig, repositoryName, projectName, pipelineName, branchName, platformName, jobName,  environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
+        }
+    }
+}
+
+def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
+    return {
+        stage("${jobName}") {
+            Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
+        }
+    }
+}
+
+def CreateTestMetricsStage(Map pipelineConfig, String branchName, Map environmentVars, String buildJobName, String outputDirectory, String configuration) {
+    return {
+        stage("${buildJobName}_metrics") {
+            TestMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME, buildJobName, outputDirectory, configuration)
+        }
+    }
+}
+
+def CreateTeardownStage(Map environmentVars) {
+    return {
+        stage("Teardown") {
+            PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
+        }
+    }
+}
+
+def projectName = ''
+def pipelineName = ''
+def branchName = ''
+def pipelineConfig = {}
+
+// Start Pipeline
+try {
+    stage('Setup Pipeline') {
+        node('controller') {
+            def envVarList = []
+            if(isUnix()) {
+                envVarList.add('IS_UNIX=1')
+            }
+            withEnv(envVarList) {
+                timestamps {
+                    repositoryUrl = scm.getUserRemoteConfigs()[0].getUrl()
+                    // repositoryName is the full repository name
+                    repositoryName = (repositoryUrl =~ /https:\/\/github.com\/(.*)\.git/)[0][1]
+                    (projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
+
+                    if(env.BRANCH_NAME) {
+                        branchName = env.BRANCH_NAME
+                    } else {
+                        branchName = scm.branches[0].name // for non-multibranch pipelines
+                        env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
+                    }
+                    pipelineProperties.add(disableConcurrentBuilds())
+
+                    echo "Running \"${pipelineName}\" for \"${branchName}\" on engine branch \"${params.ENGINE_BRANCH}\"..."
+
+                    CheckoutBootstrapScripts(params.ENGINE_BRANCH)
+
+                    // Load configs
+                    pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
+
+                    // Add each platform as a parameter that the user can disable if needed
+                    if (!IsPullRequest(branchName)) {
+                        pipelineParameters.add(stringParam(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'))
+
+                        pipelineConfig.platforms.each { platform ->
+                            pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
+                        }
+                    }
+                    pipelineProperties.add(parameters(pipelineParameters))
+                    properties(pipelineProperties)
+
+                    // Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
+                    stash name: 'incremental_build_script',
+                          includes: INCREMENTAL_BUILD_SCRIPT_PATH
+                 }
+            }
+        }
+    }
+
+    if(env.BUILD_NUMBER == '1' && !IsPullRequest(branchName)) {
+        // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
+        // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
+        currentBuild.result = 'SUCCESS'
+        return
+    }
+
+    def someBuildHappened = false
+
+    // Build and Post-Build Testing Stage
+    def buildConfigs = [:]
+
+    // Platform Builds run on EC2
+    pipelineConfig.platforms.each { platform ->
+        platform.value.build_types.each { build_job ->
+            if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) {   // User can filter jobs, jobs are tagged by pipeline
+                def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
+                envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
+                envVars['CMAKE_LY_PROJECTS'] = "../${PROJECT_REPOSITORY_NAME}"
+                def nodeLabel = envVars['NODE_LABEL']
+                someBuildHappened = true
+
+                buildConfigs["${platform.key} [${build_job.key}]"] = {
+                    node("${nodeLabel}") {
+                        if(isUnix()) { // Has to happen inside a node
+                            envVars['IS_UNIX'] = 1
+                        }
+                        withEnv(GetEnvStringList(envVars)) {
+                            try {
+                                def build_job_name = build_job.key
+
+                                CreateSetupStage(pipelineConfig, repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call()
+
+                                if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
+                                    build_job.value.steps.each { build_step ->
+                                        build_job_name = build_step
+                                
+                                        def buildTypeJson = platform.value.build_types["${build_job_name}"]
+                                        def buildTypePipelineEnv = buildTypeJson ? buildTypeJson.PIPELINE_ENV : EMPTY_JSON
+                                        def jobEnvVars = envVars
+                                        buildTypePipelineEnv.each { var ->
+                                            jobEnvVars[var.key] = var.value
+                                        }
+                                      
+                                        withEnv(GetEnvStringList(jobEnvVars)) {
+                                            CreateBuildStage(pipelineConfig,  platform.key, build_step, envVars).call()
+                                        }
+                                    }
+                                } else {
+                                    CreateBuildStage(pipelineConfig,  platform.key, build_job.key, envVars).call()
+                                }
+
+                                if (env.MARS_REPO && platform.key == 'Windows' && build_job_name.startsWith('test')) {
+                                    def output_directory = platform.value.build_types[build_job_name].PARAMETERS.OUTPUT_DIRECTORY
+                                    def configuration = platform.value.build_types[build_job_name].PARAMETERS.CONFIGURATION
+                                    CreateTestMetricsStage(pipelineConfig, branchName, envVars, build_job_name, output_directory, configuration).call()
+                                }
+                            }
+                            catch(Exception e) {
+                                //  https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
+                                //  {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
+                                def currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
+                                if (currentResult == 'FAILURE') {
+                                    currentBuild.result = 'FAILURE'
+                                    error "FAILURE: ${e}"
+                                } else if (currentResult == 'UNSTABLE') {
+                                    currentBuild.result = 'UNSTABLE'
+                                    unstable(message: "UNSTABLE: ${e}")
+                                }
+                            }
+                            finally {
+                                CreateTeardownStage(envVars).call()
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    timestamps {
+
+        stage('Build') {
+            parallel buildConfigs // Run parallel builds
+        }
+
+        echo 'All builds successful'
+    }
+    if (!someBuildHappened) {
+        currentBuild.result = 'NOT_BUILT'
+    }
+}
+catch(Exception e) {
+    error "Exception: ${e}"
+}
+finally {
+    try {
+        if(env.SNS_TOPIC) {
+            snsPublish(
+                topicArn: env.SNS_TOPIC,
+                subject:'Build Result',
+                message:"${currentBuild.currentResult}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
+            )
+        }
+        step([
+            $class: 'Mailer',
+            notifyEveryUnstableBuild: true,
+            sendToIndividuals: true,
+            recipients: emailextrecipients([
+                [$class: 'CulpritsRecipientProvider'],
+                [$class: 'RequesterRecipientProvider']
+            ])
+        ])
+    } catch(Exception e) {
+    }
+}