1 /* Copyright ETSI Contributors and Others
5 * Licensed under the Apache License, Version 2.0 (the "License"); you may
6 * not use this file except in compliance with the License. You may obtain
7 * a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14 * License for the specific language governing permissions and limitations
20 string(defaultValue: env.GERRIT_BRANCH, description: '', name: 'GERRIT_BRANCH'),
21 string(defaultValue: 'system', description: '', name: 'NODE'),
22 string(defaultValue: '', description: '', name: 'BUILD_FROM_SOURCE'),
23 string(defaultValue: 'unstable', description: '', name: 'REPO_DISTRO'),
24 string(defaultValue: '', description: '', name: 'COMMIT_ID'),
25 string(defaultValue: '-stage_2', description: '', name: 'UPSTREAM_SUFFIX'),
26 string(defaultValue: 'pubkey.asc', description: '', name: 'REPO_KEY_NAME'),
27 string(defaultValue: 'release', description: '', name: 'RELEASE'),
28 string(defaultValue: '', description: '', name: 'UPSTREAM_JOB_NAME'),
29 string(defaultValue: '', description: '', name: 'UPSTREAM_JOB_NUMBER'),
30 string(defaultValue: 'OSMETSI', description: '', name: 'GPG_KEY_NAME'),
31 string(defaultValue: 'artifactory-osm', description: '', name: 'ARTIFACTORY_SERVER'),
32 string(defaultValue: 'osm-stage_4', description: '', name: 'DOWNSTREAM_STAGE_NAME'),
33 string(defaultValue: 'testing-daily', description: '', name: 'DOCKER_TAG'),
34 string(defaultValue: 'ubuntu22.04', description: '', name: 'OPENSTACK_BASE_IMAGE'),
35 string(defaultValue: 'osm.sanity', description: '', name: 'OPENSTACK_OSM_FLAVOR'),
36 booleanParam(defaultValue: false, description: '', name: 'TRY_OLD_SERVICE_ASSURANCE'),
37 booleanParam(defaultValue: false, description: '', name: 'SAVE_CONTAINER_ON_FAIL'),
38 booleanParam(defaultValue: false, description: '', name: 'SAVE_CONTAINER_ON_PASS'),
39 booleanParam(defaultValue: true, description: '', name: 'SAVE_ARTIFACTS_ON_SMOKE_SUCCESS'),
40 booleanParam(defaultValue: true, description: '', name: 'DO_BUILD'),
41 booleanParam(defaultValue: true, description: '', name: 'DO_INSTALL'),
42 booleanParam(defaultValue: true, description: '', name: 'DO_DOCKERPUSH'),
43 booleanParam(defaultValue: false, description: '', name: 'SAVE_ARTIFACTS_OVERRIDE'),
44 string(defaultValue: '/home/jenkins/hive/openstack-etsi.rc', description: '', name: 'HIVE_VIM_1'),
45 booleanParam(defaultValue: true, description: '', name: 'DO_ROBOT'),
46 string(defaultValue: 'sanity', description: 'sanity/regression/daily are the common options',
47 name: 'ROBOT_TAG_NAME'),
48 string(defaultValue: '/home/jenkins/hive/robot-systest.cfg', description: '', name: 'ROBOT_VIM'),
49 string(defaultValue: '/home/jenkins/hive/port-mapping-etsi-vim.yaml',
50 description: 'Port mapping file for SDN assist in ETSI VIM',
51 name: 'ROBOT_PORT_MAPPING_VIM'),
52 string(defaultValue: '/home/jenkins/hive/kubeconfig.yaml', description: '', name: 'KUBECONFIG'),
53 string(defaultValue: '/home/jenkins/hive/clouds.yaml', description: '', name: 'CLOUDS'),
54 string(defaultValue: 'Default', description: '', name: 'INSTALLER'),
55 string(defaultValue: '100.0', description: '% passed Robot tests to mark the build as passed',
56 name: 'ROBOT_PASS_THRESHOLD'),
57 string(defaultValue: '80.0', description: '% passed Robot tests to mark the build as unstable ' +
58 '(if lower, it will be failed)', name: 'ROBOT_UNSTABLE_THRESHOLD'),
62 ////////////////////////////////////////////////////////////////////////////////////////
64 ////////////////////////////////////////////////////////////////////////////////////////
65 void run_robot_systest(String tagName,
68 String prometheusHostname,
69 Integer prometheusPort=null,
71 String portmappingfile=null,
72 String kubeconfig=null,
75 String jujuPassword=null,
76 String osmRSAfile=null,
77 String passThreshold='0.0',
78 String unstableThreshold='0.0') {
79 tempdir = sh(returnStdout: true, script: 'mktemp -d').trim()
80 String environmentFile = ''
82 environmentFile = envfile
84 sh(script: "touch ${tempdir}/env")
85 environmentFile = "${tempdir}/env"
87 PROMETHEUS_PORT_VAR = ''
88 if (prometheusPort != null) {
89 PROMETHEUS_PORT_VAR = "--env PROMETHEUS_PORT=${prometheusPort}"
93 hostfilemount = "-v ${hostfile}:/etc/hosts"
96 JUJU_PASSWORD_VAR = ''
97 if (jujuPassword != null) {
98 JUJU_PASSWORD_VAR = "--env JUJU_PASSWORD=${jujuPassword}"
102 sh("""docker run --env OSM_HOSTNAME=${osmHostname} --env PROMETHEUS_HOSTNAME=${prometheusHostname} \
103 ${PROMETHEUS_PORT_VAR} ${JUJU_PASSWORD_VAR} --env-file ${environmentFile} \
104 -v ${clouds}:/etc/openstack/clouds.yaml \
105 -v ${osmRSAfile}:/root/osm_id_rsa -v ${kubeconfig}:/root/.kube/config -v ${tempdir}:/robot-systest/reports \
106 -v ${portmappingfile}:/root/port-mapping.yaml ${hostfilemount} opensourcemano/tests:${tagName} \
107 -c -t ${testName}""")
109 sh("cp ${tempdir}/*.xml .")
110 sh("cp ${tempdir}/*.html .")
111 outputDirectory = sh(returnStdout: true, script: 'pwd').trim()
112 println("Present Directory is : ${outputDirectory}")
114 $class : 'RobotPublisher',
115 outputPath : "${outputDirectory}",
116 outputFileName : '*.xml',
117 disableArchiveOutput : false,
118 reportFileName : 'report.html',
119 logFileName : 'log.html',
120 passThreshold : passThreshold,
121 unstableThreshold: unstableThreshold,
122 otherFiles : '*.png',
127 void archive_logs(Map remote) {
129 sshCommand remote: remote, command: '''mkdir -p logs/dags'''
130 if (useCharmedInstaller) {
131 sshCommand remote: remote, command: '''
132 for pod in `kubectl get pods -n osm | grep -v operator | grep -v NAME| awk '{print $1}'`; do
133 logfile=`echo $pod | cut -d- -f1`
134 echo "Extracting log for $logfile"
135 kubectl logs -n osm $pod --timestamps=true 2>&1 > logs/$logfile.log
139 sshCommand remote: remote, command: '''
140 for deployment in `kubectl -n osm get deployments | grep -v operator | grep -v NAME| awk '{print $1}'`; do
141 echo "Extracting log for $deployment"
142 kubectl -n osm logs deployments/$deployment --timestamps=true --all-containers 2>&1 \
143 > logs/$deployment.log
146 sshCommand remote: remote, command: '''
147 for statefulset in `kubectl -n osm get statefulsets | grep -v operator | grep -v NAME| awk '{print $1}'`; do
148 echo "Extracting log for $statefulset"
149 kubectl -n osm logs statefulsets/$statefulset --timestamps=true --all-containers 2>&1 \
150 > logs/$statefulset.log
153 sshCommand remote: remote, command: '''
154 schedulerPod="$(kubectl get pods -n osm | grep airflow-scheduler| awk '{print $1; exit}')"; \
155 echo "Extracting logs from Airflow DAGs from pod ${schedulerPod}"; \
156 kubectl cp -n osm ${schedulerPod}:/opt/airflow/logs/scheduler/latest/dags logs/dags -c scheduler
161 sshCommand remote: remote, command: '''ls -al logs'''
162 sshGet remote: remote, from: 'logs', into: '.', override: true
163 archiveArtifacts artifacts: 'logs/*.log, logs/dags/*.log'
166 String get_value(String key, String output) {
167 for (String line : output.split( '\n' )) {
168 data = line.split( '\\|' )
169 if (data.length > 1) {
170 if ( data[1].trim() == key ) {
171 return data[2].trim()
177 ////////////////////////////////////////////////////////////////////////////////////////
179 ////////////////////////////////////////////////////////////////////////////////////////
180 node("${params.NODE}") {
182 INTERNAL_DOCKER_REGISTRY = 'osm.etsi.org:5050/devops/cicd/'
183 INTERNAL_DOCKER_PROXY = 'http://172.21.1.1:5000'
184 APT_PROXY = 'http://172.21.1.1:3142'
185 SSH_KEY = '~/hive/cicd_rsa'
186 ARCHIVE_LOGS_FLAG = false
189 tag_or_branch = params.GERRIT_BRANCH.replaceAll(/\./, '')
195 ci_helper = load 'jenkins/ci-pipelines/ci_helper.groovy'
197 def upstreamMainJob = params.UPSTREAM_SUFFIX
199 // upstream jobs always use merged artifacts
200 upstreamMainJob += '-merge'
201 containerNamePrefix = "osm-${tag_or_branch}"
202 containerName = "${containerNamePrefix}"
204 keep_artifacts = false
205 if ( JOB_NAME.contains('merge') ) {
206 containerName += '-merge'
208 // On a merge job, we keep artifacts on smoke success
209 keep_artifacts = params.SAVE_ARTIFACTS_ON_SMOKE_SUCCESS
211 containerName += "-${BUILD_NUMBER}"
214 http_server_name = null
216 useCharmedInstaller = params.INSTALLER.equalsIgnoreCase('charmed')
220 ///////////////////////////////////////////////////////////////////////////////////////
221 // Fetch stage 2 .deb artifacts
222 ///////////////////////////////////////////////////////////////////////////////////////
223 stage('Copy Artifacts') {
224 // cleanup any previous repo
225 sh "tree -fD repo || exit 0"
227 sh "tree -fD repo && lsof repo || exit 0"
231 RELEASE_DIR = sh(returnStdout:true, script: 'pwd').trim()
233 // check if an upstream artifact based on specific build number has been requested
234 // This is the case of a merge build and the upstream merge build is not yet complete
235 // (it is not deemed a successful build yet). The upstream job is calling this downstream
236 // job (with the its build artifact)
237 def upstreamComponent = ''
238 if (params.UPSTREAM_JOB_NAME) {
239 println("Fetching upstream job artifact from ${params.UPSTREAM_JOB_NAME}")
240 lock('Artifactory') {
241 step ([$class: 'CopyArtifact',
242 projectName: "${params.UPSTREAM_JOB_NAME}",
243 selector: [$class: 'SpecificBuildSelector',
244 buildNumber: "${params.UPSTREAM_JOB_NUMBER}"]
247 upstreamComponent = ci_helper.get_mdg_from_project(
248 ci_helper.get_env_value('build.env','GERRIT_PROJECT'))
249 def buildNumber = ci_helper.get_env_value('build.env','BUILD_NUMBER')
250 dir("$upstreamComponent") {
251 // the upstream job name contains suffix with the project. Need this stripped off
252 project_without_branch = params.UPSTREAM_JOB_NAME.split('/')[0]
253 packages = ci_helper.get_archive(params.ARTIFACTORY_SERVER,
256 "${project_without_branch} :: ${GERRIT_BRANCH}",
259 packageList.addAll(packages)
260 println("Fetched pre-merge ${params.UPSTREAM_JOB_NAME}: ${packages}")
262 } // lock artifactory
266 list = ['RO', 'osmclient', 'IM', 'devops', 'MON', 'N2VC', 'NBI',
267 'common', 'LCM', 'POL', 'NG-UI', 'NG-SA', 'PLA', 'tests']
268 if (upstreamComponent.length() > 0) {
269 println("Skipping upstream fetch of ${upstreamComponent}")
270 list.remove(upstreamComponent)
272 for (buildStep in list) {
273 def component = buildStep
274 parallelSteps[component] = {
276 println("Fetching artifact for ${component}")
277 step([$class: 'CopyArtifact',
278 projectName: "${component}${upstreamMainJob}/${GERRIT_BRANCH}"])
280 // grab the archives from the stage_2 builds
281 // (ie. this will be the artifacts stored based on a merge)
282 packages = ci_helper.get_archive(params.ARTIFACTORY_SERVER,
285 "${component}${upstreamMainJob} :: ${GERRIT_BRANCH}",
286 ci_helper.get_env_value('build.env', 'BUILD_NUMBER'))
287 packageList.addAll(packages)
288 println("Fetched ${component}: ${packages}")
293 lock('Artifactory') {
294 parallel parallelSteps
297 ///////////////////////////////////////////////////////////////////////////////////////
298 // Create Devops APT repository
299 ///////////////////////////////////////////////////////////////////////////////////////
301 for (component in [ 'devops', 'IM', 'osmclient' ]) {
302 sh "ls -al ${component}/pool/"
303 sh "cp -r ${component}/pool/* pool/"
304 sh "dpkg-sig --sign builder -k ${GPG_KEY_NAME} pool/${component}/*"
305 sh "mkdir -p dists/${params.REPO_DISTRO}/${component}/binary-amd64/"
306 sh("""apt-ftparchive packages pool/${component} \
307 > dists/${params.REPO_DISTRO}/${component}/binary-amd64/Packages""")
308 sh "gzip -9fk dists/${params.REPO_DISTRO}/${component}/binary-amd64/Packages"
311 // create and sign the release file
312 sh "apt-ftparchive release dists/${params.REPO_DISTRO} > dists/${params.REPO_DISTRO}/Release"
313 sh("""gpg --yes -abs -u ${GPG_KEY_NAME} \
314 -o dists/${params.REPO_DISTRO}/Release.gpg dists/${params.REPO_DISTRO}/Release""")
316 // copy the public key into the release folder
317 // this pulls the key from the home dir of the current user (jenkins)
318 sh "cp ~/${REPO_KEY_NAME} 'OSM ETSI Release Key.gpg'"
319 sh "cp ~/${REPO_KEY_NAME} ."
322 // start an apache server to serve up the packages
323 http_server_name = "${containerName}-apache"
325 pwd = sh(returnStdout:true, script: 'pwd').trim()
326 repo_port = sh(script: 'echo $(python -c \'import socket; s=socket.socket(); s.bind(("", 0));' +
327 'print(s.getsockname()[1]); s.close()\');',
328 returnStdout: true).trim()
329 internal_docker_http_server_url = ci_helper.start_http_server(pwd, http_server_name, repo_port)
330 NODE_IP_ADDRESS = sh(returnStdout: true, script:
331 "echo ${SSH_CONNECTION} | awk '{print \$3}'").trim()
332 ci_helper.check_status_http_server(NODE_IP_ADDRESS, repo_port)
337 // Unpack devops package into temporary location so that we use it from upstream if it was part of a patch
338 osm_devops_dpkg = sh(returnStdout: true, script: 'find ./repo/release/pool/ -name osm-devops*.deb').trim()
339 devopstempdir = sh(returnStdout: true, script: 'mktemp -d').trim()
340 println("Extracting local devops package ${osm_devops_dpkg} into ${devopstempdir} for docker build step")
341 sh "dpkg -x ${osm_devops_dpkg} ${devopstempdir}"
342 OSM_DEVOPS = "${devopstempdir}/usr/share/osm-devops"
343 // Convert URLs from stage 2 packages to arguments that can be passed to docker build
344 for (remotePath in packageList) {
345 packageName = remotePath[remotePath.lastIndexOf('/') + 1 .. -1]
346 packageName = packageName[0 .. packageName.indexOf('_') - 1]
347 builtModules[packageName] = remotePath
351 ///////////////////////////////////////////////////////////////////////////////////////
352 // Build docker containers
353 ///////////////////////////////////////////////////////////////////////////////////////
357 if ( params.DO_BUILD ) {
358 withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: 'gitlab-registry',
359 usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD']]) {
360 sh "docker login ${INTERNAL_DOCKER_REGISTRY} -u ${USERNAME} -p ${PASSWORD}"
362 datetime = sh(returnStdout: true, script: 'date +%Y-%m-%d:%H:%M:%S').trim()
363 moduleBuildArgs = " --build-arg CACHE_DATE=${datetime}"
364 for (packageName in builtModules.keySet()) {
365 envName = packageName.replaceAll('-', '_').toUpperCase() + '_URL'
366 moduleBuildArgs += " --build-arg ${envName}=" + builtModules[packageName]
370 containerList = sh(returnStdout: true, script:
371 "find . -name Dockerfile -printf '%h\\n' | sed 's|\\./||'")
372 containerList = Arrays.asList(containerList.split('\n'))
375 for (buildStep in containerList) {
376 def module = buildStep
377 def moduleName = buildStep.toLowerCase()
378 def moduleTag = containerName
379 parallelSteps[module] = {
381 sh("""docker build --build-arg APT_PROXY=${APT_PROXY} \
382 -t opensourcemano/${moduleName}:${moduleTag} ${moduleBuildArgs} .""")
383 println("Tagging ${moduleName}:${moduleTag}")
384 sh("""docker tag opensourcemano/${moduleName}:${moduleTag} \
385 ${INTERNAL_DOCKER_REGISTRY}opensourcemano/${moduleName}:${moduleTag}""")
387 ${INTERNAL_DOCKER_REGISTRY}opensourcemano/${moduleName}:${moduleTag}""")
391 parallel parallelSteps
394 } // if (params.DO_BUILD)
396 if (params.DO_INSTALL) {
397 ///////////////////////////////////////////////////////////////////////////////////////
399 ///////////////////////////////////////////////////////////////////////////////////////
400 stage('Spawn Remote VM') {
401 println('Launching new VM')
402 output = sh(returnStdout: true, script: """#!/bin/sh -e
403 for line in `grep OS ~/hive/robot-systest.cfg | grep -v OS_CLOUD` ; do export \$line ; done
404 openstack server create --flavor ${OPENSTACK_OSM_FLAVOR} \
405 --image ${OPENSTACK_BASE_IMAGE} \
407 --property build_url="${BUILD_URL}" \
408 --nic net-id=osm-ext \
412 server_id = get_value('id', output)
414 if (server_id == null) {
415 println('VM launch output: ')
417 throw new Exception('VM Launch failed')
419 println("Target VM is ${server_id}, waiting for IP address to be assigned")
423 while (IP_ADDRESS == '') {
424 output = sh(returnStdout: true, script: """#!/bin/sh -e
425 for line in `grep OS ~/hive/robot-systest.cfg | grep -v OS_CLOUD` ; do export \$line ; done
426 openstack server show ${server_id}
428 IP_ADDRESS = get_value('addresses', output)
430 IP_ADDRESS = IP_ADDRESS.split('=')[1]
431 println("Waiting for VM at ${IP_ADDRESS} to be reachable")
434 timeout(time: 1, unit: 'MINUTES') {
438 script: "ssh -T -i ${SSH_KEY} " +
439 "-o StrictHostKeyChecking=no " +
440 "-o UserKnownHostsFile=/dev/null " +
441 "-o ConnectTimeout=5 ubuntu@${IP_ADDRESS} 'echo Alive'")
442 alive = (output == 0)
445 println('VM is ready and accepting ssh connections')
447 //////////////////////////////////////////////////////////////////////////////////////////////
448 println('Applying sshd config workaround for Ubuntu 22.04 and old jsch client in Jenkins...')
450 sh( returnStatus: true,
451 script: "ssh -T -i ${SSH_KEY} " +
452 "-o StrictHostKeyChecking=no " +
453 "-o UserKnownHostsFile=/dev/null " +
454 "ubuntu@${IP_ADDRESS} " +
455 "'echo HostKeyAlgorithms +ssh-rsa | sudo tee -a /etc/ssh/sshd_config'")
456 sh( returnStatus: true,
457 script: "ssh -T -i ${SSH_KEY} " +
458 "-o StrictHostKeyChecking=no " +
459 "-o UserKnownHostsFile=/dev/null " +
460 "ubuntu@${IP_ADDRESS} " +
461 "'echo PubkeyAcceptedKeyTypes +ssh-rsa | sudo tee -a /etc/ssh/sshd_config'")
462 sh( returnStatus: true,
463 script: "ssh -T -i ${SSH_KEY} " +
464 "-o StrictHostKeyChecking=no " +
465 "-o UserKnownHostsFile=/dev/null " +
466 "ubuntu@${IP_ADDRESS} " +
467 "'sudo systemctl restart sshd'")
468 //////////////////////////////////////////////////////////////////////////////////////////////
470 } // stage("Spawn Remote VM")
472 ///////////////////////////////////////////////////////////////////////////////////////
473 // Checks before installation
474 ///////////////////////////////////////////////////////////////////////////////////////
475 stage('Checks before installation') {
480 identityFile: SSH_KEY,
486 // Ensure the VM is ready
487 sshCommand remote: remote, command: 'cloud-init status --wait'
488 // Force time sync to avoid clock drift and invalid certificates
489 sshCommand remote: remote, command: 'sudo apt-get -y update'
490 sshCommand remote: remote, command: 'sudo apt-get -y install chrony'
491 sshCommand remote: remote, command: 'sudo service chrony stop'
492 sshCommand remote: remote, command: 'sudo chronyd -vq'
493 sshCommand remote: remote, command: 'sudo service chrony start'
495 } // stage("Checks before installation")
496 ///////////////////////////////////////////////////////////////////////////////////////
498 ///////////////////////////////////////////////////////////////////////////////////////
505 if (params.COMMIT_ID) {
506 commit_id = "-b ${params.COMMIT_ID}"
508 if (params.REPO_DISTRO) {
509 repo_distro = "-r ${params.REPO_DISTRO}"
511 if (params.REPO_KEY_NAME) {
512 repo_key_name = "-k ${params.REPO_KEY_NAME}"
514 if (params.RELEASE) {
515 release = "-R ${params.RELEASE}"
517 if (params.REPOSITORY_BASE) {
518 repo_base_url = "-u ${params.REPOSITORY_BASE}"
520 repo_base_url = "-u http://${NODE_IP_ADDRESS}:${repo_port}"
527 identityFile: SSH_KEY,
533 sshCommand remote: remote, command: '''
534 wget https://osm-download.etsi.org/ftp/osm-13.0-thirteen/install_osm.sh
535 chmod +x ./install_osm.sh
536 sed -i '1 i\\export PATH=/snap/bin:\$PATH' ~/.bashrc
539 Map gitlabCredentialsMap = [$class: 'UsernamePasswordMultiBinding',
540 credentialsId: 'gitlab-registry',
541 usernameVariable: 'USERNAME',
542 passwordVariable: 'PASSWORD']
543 if (useCharmedInstaller) {
544 // Use local proxy for docker hub
545 sshCommand remote: remote, command: '''
546 sudo snap install microk8s --classic --channel=1.19/stable
547 sudo sed -i "s|https://registry-1.docker.io|http://172.21.1.1:5000|" \
548 /var/snap/microk8s/current/args/containerd-template.toml
549 sudo systemctl restart snap.microk8s.daemon-containerd.service
550 sudo snap alias microk8s.kubectl kubectl
553 withCredentials([gitlabCredentialsMap]) {
554 sshCommand remote: remote, command: """
555 ./install_osm.sh -y \
558 ${release} -r unstable \
560 --registry ${USERNAME}:${PASSWORD}@${INTERNAL_DOCKER_REGISTRY} \
561 --tag ${containerName}
564 prometheusHostname = "prometheus.${IP_ADDRESS}.nip.io"
566 osmHostname = "nbi.${IP_ADDRESS}.nip.io:443"
568 // Run -k8s installer here specifying internal docker registry and docker proxy
569 osm_installation_options = ""
570 if (params.TRY_OLD_SERVICE_ASSURANCE) {
571 osm_installation_options = "--old-sa"
573 withCredentials([gitlabCredentialsMap]) {
574 sshCommand remote: remote, command: """
575 ./install_osm.sh -y \
578 ${release} -r unstable \
579 -d ${USERNAME}:${PASSWORD}@${INTERNAL_DOCKER_REGISTRY} \
580 -p ${INTERNAL_DOCKER_PROXY} \
581 -t ${containerName} \
582 ${osm_installation_options}
585 prometheusHostname = IP_ADDRESS
586 prometheusPort = 9091
587 osmHostname = IP_ADDRESS
589 } // stage("Install")
590 ///////////////////////////////////////////////////////////////////////////////////////
591 // Health check of installed OSM in remote vm
592 ///////////////////////////////////////////////////////////////////////////////////////
593 stage('OSM Health') {
594 // if this point is reached, logs should be archived
595 ARCHIVE_LOGS_FLAG = true
597 sshCommand remote: remote, command: """
598 /usr/share/osm-devops/installers/osm_health.sh -k -s ${stackName}
600 } // stage("OSM Health")
601 } // if ( params.DO_INSTALL )
604 ///////////////////////////////////////////////////////////////////////////////////////
605 // Execute Robot tests
606 ///////////////////////////////////////////////////////////////////////////////////////
607 stage_archive = false
608 if ( params.DO_ROBOT ) {
610 stage('System Integration Test') {
611 if (useCharmedInstaller) {
612 tempdir = sh(returnStdout: true, script: 'mktemp -d').trim()
613 sh(script: "touch ${tempdir}/hosts")
614 hostfile = "${tempdir}/hosts"
615 sh """cat << EOF > ${hostfile}
617 ${remote.host} prometheus.${remote.host}.nip.io nbi.${remote.host}.nip.io
623 jujuPassword = sshCommand remote: remote, command: '''
624 echo `juju gui 2>&1 | grep password | cut -d: -f2`
629 params.ROBOT_TAG_NAME,
634 params.ROBOT_PORT_MAPPING_VIM,
640 params.ROBOT_PASS_THRESHOLD,
641 params.ROBOT_UNSTABLE_THRESHOLD
643 } // stage("System Integration Test")
645 stage('After System Integration test') {
646 if (currentBuild.result != 'FAILURE') {
647 stage_archive = keep_artifacts
649 println('Systest test failed, throwing error')
650 error = new Exception('Systest test failed')
651 currentBuild.result = 'FAILURE'
656 } // if ( params.DO_ROBOT )
658 if (params.SAVE_ARTIFACTS_OVERRIDE || stage_archive) {
660 // Archive the tested repo
661 dir("${RELEASE_DIR}") {
662 ci_helper.archive(params.ARTIFACTORY_SERVER, RELEASE, GERRIT_BRANCH, 'tested')
664 if (params.DO_DOCKERPUSH) {
665 stage('Publish to Dockerhub') {
667 for (buildStep in containerList) {
668 def module = buildStep
669 def moduleName = buildStep.toLowerCase()
670 def dockerTag = params.DOCKER_TAG
671 def moduleTag = containerName
673 parallelSteps[module] = {
675 sh("docker pull ${INTERNAL_DOCKER_REGISTRY}opensourcemano/${moduleName}:${moduleTag}")
676 sh("""docker tag ${INTERNAL_DOCKER_REGISTRY}opensourcemano/${moduleName}:${moduleTag} \
677 opensourcemano/${moduleName}:${dockerTag}""")
678 sh "docker push opensourcemano/${moduleName}:${dockerTag}"
682 parallel parallelSteps
684 stage('Snap promotion') {
685 withCredentials([string(credentialsId: 'Snapstore', variable: 'SNAPCRAFT_STORE_CREDENTIALS')]) {
686 snaps = ['osmclient']
687 for (snap in snaps) {
689 if (BRANCH_NAME.startsWith('v')) {
690 channel = BRANCH_NAME.substring(1) + '/'
691 } else if (BRANCH_NAME != 'master') {
692 channel += '/' + BRANCH_NAME.replaceAll('/', '-')
694 track = channel + 'edge\\*'
695 edge_rev = sh(returnStdout: true,
696 script: "snapcraft revisions $snap | " +
697 "grep \"$track\" | tail -1 | awk '{print \$1}'").trim()
698 track = channel + 'beta\\*'
699 beta_rev = sh(returnStdout: true,
700 script: "snapcraft revisions $snap | " +
701 "grep \"$track\" | tail -1 | awk '{print \$1}'").trim()
703 print "Edge: $edge_rev, Beta: $beta_rev"
705 if (edge_rev != beta_rev) {
706 print "Promoting $edge_rev to beta in place of $beta_rev"
707 beta_track = channel + 'beta'
708 sh "snapcraft release $snap $edge_rev $beta_track"
712 } // stage('Snap promotion')
713 stage('Charm promotion') {
719 'mongodb-exporter-k8s',
720 'mysqld-exporter-k8s',
728 'osm-update-db-operator',
729 'osm-vca-integrator',
731 for (charm in charms) {
734 if (BRANCH_NAME.startsWith('v')) {
735 channel = BRANCH_NAME.substring(1)
736 } else if (BRANCH_NAME != 'master') {
737 channel += '/' + BRANCH_NAME.replaceAll('/', '-')
740 withCredentials([string(credentialsId: 'Charmstore', variable: 'CHARMCRAFT_AUTH')]) {
741 sh "charmcraft status $charm --format json > ${charm}.json"
742 isCharm = sh(returnStdout: true, script: "grep architecture ${charm}.json | wc -l").trim() as int
743 resourceArgument = ""
745 jqScriptEdge = "cat ${charm}.json | jq -r '.[] | select(.track==\"$channel\") | .mappings[] | select(.base.architecture==\"amd64\" and .base.channel==\"20.04\") | .releases[] | select(.channel==\"$channel/edge/merged\")| .version'|head -1"
746 jqScriptBeta = "cat ${charm}.json | jq -r '.[] | select(.track==\"$channel\") | .mappings[] | select(.base.architecture==\"amd64\" and .base.channel==\"20.04\") | .releases[] | select(.channel==\"$channel/beta\")| .version'|head -1"
749 resourceNameScript = "cat ${charm}.json | jq -r '.[] | select(.track==\"$channel\") | .mappings[] | select(.base.architecture==\"amd64\" and .base.channel==\"20.04\") | .releases[] | select(.channel==\"$channel/edge/merged\")| .resources[$index].name'|head -1"
750 resourceRevsScript = "cat ${charm}.json | jq -r '.[] | select(.track==\"$channel\") | .mappings[] | select(.base.architecture==\"amd64\" and .base.channel==\"20.04\") | .releases[] | select(.channel==\"$channel/edge/merged\")| .resources[$index].revision'|head -1"
751 resourceName = sh(returnStdout: true, script: resourceNameScript).trim()
752 resourceRevs = sh(returnStdout: true, script: resourceRevsScript).trim()
753 if (resourceName != "null") {
754 resourceArgument += " --resource ${resourceName}:${resourceRevs}"
761 jqScriptEdge = "cat ${charm}.json | jq -r '.[] | select(.track==\"$channel\") | .mappings[].releases[] | select(.channel==\"$channel/edge/merged\")| .version'|head -1"
762 jqScriptBeta = "cat ${charm}.json | jq -r '.[] | select(.track==\"$channel\") | .mappings[].releases[] | select(.channel==\"$channel/beta\")| .version'|head -1"
764 // edge/merged is used in place of /edge as 10.1.0 LTS uses latest/edge
765 edge_rev = sh(returnStdout: true, script: jqScriptEdge).trim()
766 beta_rev = sh(returnStdout: true, script: jqScriptBeta).trim()
767 try { edge_rev = edge_rev as int } catch (NumberFormatException nfe) {edge_rev = 0}
768 try { beta_rev = beta_rev as int } catch (NumberFormatException nfe) {beta_rev = 0}
770 print "Edge: $edge_rev, Beta: $beta_rev $resourceArgument"
772 if (edge_rev > beta_rev) {
773 print "Promoting $edge_rev to beta in place of $beta_rev"
774 beta_track = channel + 'beta'
775 sh "charmcraft release ${charm} --revision=${edge_rev} ${resourceArgument} --channel=${channel}/beta"
780 } // stage('Charm promotion')
781 } // if (params.DO_DOCKERPUSH)
782 } // stage('Archive')
783 } // if (params.SAVE_ARTIFACTS_OVERRIDE || stage_archive)
789 stage('Archive Container Logs') {
790 if ( ARCHIVE_LOGS_FLAG ) {
797 identityFile: SSH_KEY,
802 println('Archiving container logs')
804 } catch (Exception e) {
805 println('Error fetching logs: '+ e.getMessage())
807 } // end if ( ARCHIVE_LOGS_FLAG )
810 if ( params.DO_INSTALL && server_id != null) {
812 if (error && params.SAVE_CONTAINER_ON_FAIL ) {
815 if (!error && params.SAVE_CONTAINER_ON_PASS ) {
820 if (server_id != null) {
821 println("Deleting VM: $server_id")
823 for line in `grep OS ~/hive/robot-systest.cfg | grep -v OS_CLOUD` ; do export \$line ; done
824 openstack server delete ${server_id}
827 println("Saved VM $server_id in ETSI VIM")
831 if ( http_server_name != null ) {
832 sh "docker stop ${http_server_name} || true"
833 sh "docker rm ${http_server_name} || true"
836 if ( devopstempdir != null ) {
837 sh "rm -rf ${devopstempdir}"