Skip to content
Snippets Groups Projects
Commit 9aafb3fb authored by Mike Marchetti's avatar Mike Marchetti
Browse files

update to support gerrit merge/patchset triggers


Signed-off-by: default avatarMike Marchetti <mmarchetti@sandvine.com>
parent 6bb17890
No related branches found
No related tags found
No related merge requests found
......@@ -36,6 +36,19 @@ node("${params.NODE}") {
}
stage('downstream') {
// initially use stage_name as the event_type
def stage_name = GERRIT_EVENT_TYPE
switch(GERRIT_EVENT_TYPE) {
case "change-merged":
stage_name = "stage_2-merge"
break
case "patchset-created":
stage_name = "stage_2"
break
}
// pipeline running from gerrit trigger.
// kickoff the downstream multibranch pipeline
def downstream_params = [
......@@ -46,8 +59,7 @@ node("${params.NODE}") {
string(name: 'PROJECT_URL_PREFIX', value: params.PROJECT_URL_PREFIX),
booleanParam(name: 'TEST_INSTALL', value: params.TEST_INSTALL),
]
stage_name = "stage_2"
if ( params.STAGE )
{
// go directly to stage 3 (osm system)
......@@ -59,11 +71,12 @@ node("${params.NODE}") {
return
}
}
println("TEST_INSTALL = ${params.TEST_INSTALL}")
// callout to stage_2. This is a multi-branch pipeline.
upstream_job_name = "${mdg}-${stage_name}/${GERRIT_BRANCH}"
downstream_job_name = "${mdg}-${stage_name}/${GERRIT_BRANCH}"
println("TEST_INSTALL = ${params.TEST_INSTALL}, downstream job: ${downstream_job_name}")
stage_2_result = build job: "${upstream_job_name}", parameters: downstream_params, propagate: true
stage_2_result = build job: "${downstream_job_name}", parameters: downstream_params, propagate: true
if (stage_2_result.getResult() != 'SUCCESS') {
project = stage_2_result.getProjectName()
build = stage_2_result.getNumber()
......
......@@ -74,9 +74,13 @@ def ci_pipeline(mdg,url_prefix,project,branch,refspec,revision,build_system,arti
string(name: 'UPSTREAM_JOB_NAME', value: "${JOB_NAME}" ),
string(name: 'UPSTREAM_JOB_NUMBER', value: "${BUILD_NUMBER}" ),
]
stage_3_job = "osm-stage_3"
if ( JOB_NAME.contains('merge') ) {
stage_3_job += '-merge'
}
// callout to stage_3. This is the system build
result = build job: "osm-stage_3/${branch}", parameters: downstream_params_stage_3, propagate: true
// callout to stage_3. This is the system build
result = build job: "${stage_3_job}/${branch}", parameters: downstream_params_stage_3, propagate: true
if (result.getResult() != 'SUCCESS') {
project = result.getProjectName()
build = result.getNumber()
......
......@@ -31,7 +31,7 @@ properties([
string(defaultValue: 'dpkg1', description: '', name: 'GPG_KEY_NAME'),
string(defaultValue: 'artifactory-osm', description: '', name: 'ARTIFACTORY_SERVER'),
booleanParam(defaultValue: false, description: '', name: 'SAVE_CONTAINER_ON_FAIL'),
booleanParam(defaultValue: false, description: '', name: 'SAVE_CONTAINER_ON_PASS'),
booleanParam(defaultValue: false, description: '', name: 'SAVE_CONTAINER_ON_PASS')
])
])
......@@ -49,78 +49,87 @@ node("${params.NODE}") {
ci_helper = load "jenkins/ci-pipelines/ci_helper.groovy"
def upstream_main_job = params.UPSTREAM_SUFFIX
def save_artifacts = false
if ( JOB_NAME.contains('merge') ) {
upstream_main_job += '-merge'
save_artifacts = true
println("merge job, saving artifacts")
}
// Copy the artifacts from the upstream jobs
stage("Copy Artifacts") {
// cleanup any previous repo
sh 'rm -rf repo'
if ( params.UPSTREAM_SUFFIX ) {
dir("repo") {
// grab all stable upstream builds based on the
// given target UPSTREAM_SUFFIX
dir("${RELEASE}") {
def list = ["SO", "UI", "RO", "openvim", "osmclient", "IM"]
for (component in list) {
step ([$class: 'CopyArtifact',
projectName: "${component}${params.UPSTREAM_SUFFIX}/${GERRIT_BRANCH}"])
// grab the build name/number
//options = get_env_from_build('build.env')
build_num = ci_helper.get_env_value('build.env','BUILD_NUMBER')
//build_num = sh(returnStdout:true, script: "cat build.env | awk -F= '/BUILD_NUMBER/{print \$2}'").trim()
ci_helper.get_archive(params.ARTIFACTORY_SERVER,component,GERRIT_BRANCH, "${component}-stage_2 :: ${GERRIT_BRANCH}", build_num)
// cleanup any prevously defined dists
sh "rm -rf dists"
}
// check if an upstream artifact based on specific build number has been requested
// This is the case of a merge build and the upstream merge build is not yet complete (it is not deemed
// a successful build yet). The upstream job is calling this downstream job (with the its build artifiact)
if ( params.UPSTREAM_JOB_NAME ) {
step ([$class: 'CopyArtifact',
projectName: "${params.UPSTREAM_JOB_NAME}",
selector: [$class: 'SpecificBuildSelector', buildNumber: "${params.UPSTREAM_JOB_NUMBER}"]
])
//options = get_env_from_build('build.env')
// grab the build name/number
//build_num = sh(returnStdout:true, script: "cat build.env | awk -F= '/BUILD_NUMBER/{print \$2}'").trim()
build_num = ci_helper.get_env_value('build.env','BUILD_NUMBER')
component = ci_helper.get_mdg_from_project(ci_helper.get_env_value('build.env','GERRIT_PROJECT'))
ci_helper.get_archive(params.ARTIFACTORY_SERVER,component,GERRIT_BRANCH, "${component}-stage_2 :: ${GERRIT_BRANCH}", build_num)
sh "rm -rf dists"
}
// sign all the components
for (component in list) {
sh "dpkg-sig --sign builder -k ${GPG_KEY_NAME} pool/${component}/*"
}
// now create the distro
for (component in list) {
sh "mkdir -p dists/${params.REPO_DISTRO}/${component}/binary-amd64/"
sh "apt-ftparchive packages pool/${component} > dists/${params.REPO_DISTRO}/${component}/binary-amd64/Packages"
sh "gzip -9fk dists/${params.REPO_DISTRO}/${component}/binary-amd64/Packages"
}
// create and sign the release file
sh "apt-ftparchive release dists/${params.REPO_DISTRO} > dists/${params.REPO_DISTRO}/Release"
sh "gpg --yes -abs -u ${GPG_KEY_NAME} -o dists/${params.REPO_DISTRO}/Release.gpg dists/${params.REPO_DISTRO}/Release"
// copy the public key into the release folder
// this pulls the key from the home dir of the current user (jenkins)
sh "cp ~/${REPO_KEY_NAME} ."
dir("repo") {
// grab all stable upstream builds based on the
dir("${RELEASE}") {
def list = ["SO", "UI", "RO", "openvim", "osmclient", "IM"]
for (component in list) {
step ([$class: 'CopyArtifact',
projectName: "${component}${upstream_main_job}/${GERRIT_BRANCH}"])
// grab the build name/number
//options = get_env_from_build('build.env')
build_num = ci_helper.get_env_value('build.env','BUILD_NUMBER')
// grab the archives from the stage_2 builds (ie. this will be the artifacts stored based on a merge)
ci_helper.get_archive(params.ARTIFACTORY_SERVER,component,GERRIT_BRANCH, "${component}${upstream_main_job} :: ${GERRIT_BRANCH}", build_num)
// cleanup any prevously defined dists
sh "rm -rf dists"
}
// start an apache server to serve up the images
http_server_name = "${container_name}-apache"
pwd = sh(returnStdout:true, script: 'pwd').trim()
repo_base_url = ci_helper.start_http_server(pwd,http_server_name)
// check if an upstream artifact based on specific build number has been requested
// This is the case of a merge build and the upstream merge build is not yet complete (it is not deemed
// a successful build yet). The upstream job is calling this downstream job (with the its build artifiact)
if ( params.UPSTREAM_JOB_NAME ) {
step ([$class: 'CopyArtifact',
projectName: "${params.UPSTREAM_JOB_NAME}",
selector: [$class: 'SpecificBuildSelector', buildNumber: "${params.UPSTREAM_JOB_NUMBER}"]
])
//options = get_env_from_build('build.env')
// grab the build name/number
//build_num = sh(returnStdout:true, script: "cat build.env | awk -F= '/BUILD_NUMBER/{print \$2}'").trim()
build_num = ci_helper.get_env_value('build.env','BUILD_NUMBER')
component = ci_helper.get_mdg_from_project(ci_helper.get_env_value('build.env','GERRIT_PROJECT'))
// the upstream job name contains suffix with the project. Need this stripped off
def project_without_branch = params.UPSTREAM_JOB_NAME.split('/')[0]
ci_helper.get_archive(params.ARTIFACTORY_SERVER,component,GERRIT_BRANCH, "${project_without_branch} :: ${GERRIT_BRANCH}", build_num)
sh "rm -rf dists"
}
// sign all the components
for (component in list) {
sh "dpkg-sig --sign builder -k ${GPG_KEY_NAME} pool/${component}/*"
}
// now create the distro
for (component in list) {
sh "mkdir -p dists/${params.REPO_DISTRO}/${component}/binary-amd64/"
sh "apt-ftparchive packages pool/${component} > dists/${params.REPO_DISTRO}/${component}/binary-amd64/Packages"
sh "gzip -9fk dists/${params.REPO_DISTRO}/${component}/binary-amd64/Packages"
}
// create and sign the release file
sh "apt-ftparchive release dists/${params.REPO_DISTRO} > dists/${params.REPO_DISTRO}/Release"
sh "gpg --yes -abs -u ${GPG_KEY_NAME} -o dists/${params.REPO_DISTRO}/Release.gpg dists/${params.REPO_DISTRO}/Release"
// copy the public key into the release folder
// this pulls the key from the home dir of the current user (jenkins)
sh "cp ~/${REPO_KEY_NAME} ."
}
// start an apache server to serve up the images
http_server_name = "${container_name}-apache"
pwd = sh(returnStdout:true, script: 'pwd').trim()
repo_base_url = ci_helper.start_http_server(pwd,http_server_name)
}
}
......@@ -174,13 +183,16 @@ node("${params.NODE}") {
junit '*.xml'
}
stage("Archive") {
sh "echo ${container_name} > build_version.txt"
archiveArtifacts artifacts: "build_version.txt", fingerprint: true
// save the artifacts of this build if this is a merge job
if ( save_artifacts ) {
stage("Archive") {
sh "echo ${container_name} > build_version.txt"
archiveArtifacts artifacts: "build_version.txt", fingerprint: true
// Archive the tested repo
dir("repo/${RELEASE}") {
ci_helper.archive(params.ARTIFACTORY_SERVER,RELEASE,GERRIT_BRANCH,'tested')
// Archive the tested repo
dir("repo/${RELEASE}") {
ci_helper.archive(params.ARTIFACTORY_SERVER,RELEASE,GERRIT_BRANCH,'tested')
}
}
}
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment