Started by timer Obtained jenkins/public-clouds-tests/Jenkinsfile from git https://osm.etsi.org/gerrit/osm/devops Running in Durability level: MAX_SURVIVABILITY [Pipeline] properties [Pipeline] node Running on osm worker 4 in /home/jenkins/workspace/azure_robot_tests [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout No credentials specified > git rev-parse --is-inside-work-tree # timeout=10 Fetching changes from the remote Git repository > git config remote.origin.url https://osm.etsi.org/gerrit/osm/devops # timeout=10 Fetching upstream changes from https://osm.etsi.org/gerrit/osm/devops > git --version # timeout=10 > git fetch --tags --force --progress https://osm.etsi.org/gerrit/osm/devops +refs/heads/*:refs/remotes/origin/* > git rev-parse refs/remotes/origin/master^{commit} # timeout=10 > git rev-parse refs/remotes/origin/origin/master^{commit} # timeout=10 Checking out Revision e400dfdf8dd2bdd67321fa70a56cef6458533dbc (refs/remotes/origin/master) > git config core.sparsecheckout # timeout=10 > git checkout -f e400dfdf8dd2bdd67321fa70a56cef6458533dbc Commit message: "Prepare installers and Jenkins for Release FIFTEEN" > git rev-list --no-walk e400dfdf8dd2bdd67321fa70a56cef6458533dbc # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Declarative: Agent Setup) [Pipeline] sh [azure_robot_tests] Running shell script + docker pull opensourcemano/tests:testing-daily testing-daily: Pulling from opensourcemano/tests Digest: sha256:696d1acb95ead0e99c2ea1d287f976d3e47cc8565a0b9ff4602e2a52fd1bb148 Status: Image is up to date for opensourcemano/tests:testing-daily docker.io/opensourcemano/tests:testing-daily [Pipeline] } [Pipeline] // stage [Pipeline] sh [azure_robot_tests] Running shell script + docker inspect -f . opensourcemano/tests:testing-daily . [Pipeline] withDockerContainer osm worker 4 does not seem to be running inside a container $ docker run -t -d -u 1001:1001 -u root:root --entrypoint= -w /home/jenkins/workspace/azure_robot_tests -v /home/jenkins/workspace/azure_robot_tests:/home/jenkins/workspace/azure_robot_tests:rw,z -v /home/jenkins/workspace/azure_robot_tests@tmp:/home/jenkins/workspace/azure_robot_tests@tmp:rw,z -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** --entrypoint cat opensourcemano/tests:testing-daily [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] stage [Pipeline] { (Set environment) [Pipeline] script [Pipeline] { [Pipeline] sh [azure_robot_tests] Running shell script + mkdir -m 700 /root/.ssh [Pipeline] sh [azure_robot_tests] Running shell script + ssh-keygen -t rsa -f /root/.ssh/id_rsa -N Generating public/private rsa key pair. Your identification has been saved in /root/.ssh/id_rsa Your public key has been saved in /root/.ssh/id_rsa.pub The key fingerprint is: SHA256:52cVU71OU5oaXuWGhrvlm5TG9JrqZXqZr7KpPSKwZLk root@47440e0bbe8f The key's randomart image is: +---[RSA 3072]----+ | o| | .+| | .o*o| | o **o| | .S .. *=..| | = o ++.+ | | o + . o+Bo.| | E . .++B=+ | | ..+B**+.| +----[SHA256]-----+ [Pipeline] sh [azure_robot_tests] Running shell script + cp /root/.ssh/id_rsa /root/osm_id_rsa [Pipeline] sh [azure_robot_tests] Running shell script + echo Reading credential azure-credentials Reading credential azure-credentials [Pipeline] } [Pipeline] // script [Pipeline] withCredentials [Pipeline] { [Pipeline] sh [azure_robot_tests] Running shell script + cp **** /root/azure-creds.json [Pipeline] sh [azure_robot_tests] Running shell script + set +x [ { "cloudName": "AzureCloud", "homeTenantId": "e6746ab5-ebdc-4e9d-821b-a71bdaf63d9b", "id": "8fb7e78d-097b-413d-bc65-41d29be6bab1", "isDefault": true, "managedByTenants": [], "name": "Azure in Open", "state": "Enabled", "tenantId": "e6746ab5-ebdc-4e9d-821b-a71bdaf63d9b", "user": { "name": "7c5ba2e6-2013-49a0-bf9a-f2592030f7ff", "type": "servicePrincipal" } } ] [Pipeline] sh [azure_robot_tests] Running shell script + az vm list -o table Name ResourceGroup Location Zones ---------------- ------------------ ---------- ------- vm-CICD-Host OSM_CICD_GROUP westeurope 1 vm-VPN-Host OSM_GROUP westeurope VPN-Gateway OSM_GROUP westeurope vm-Hackfest-Host OSM_HACKFEST_GROUP westeurope [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Create k8s cluster) [Pipeline] sh [azure_robot_tests] Running shell script + /robot-systest/cloud-scripts/create-k8s.sh Creating a new IaaS k8s cluster in azure + az vm create --resource-group OSM_CICD_GROUP --name k8stest202312311208 --image Canonical:0001-com-ubuntu-server-jammy:22_04-lts:latest --size Standard_A2_v2 --vnet-name OSM-CICD-net --subnet OSM-CICD-subnet --public-ip-address '' --admin-username ubuntu --priority Regular Selecting "uksouth" may reduce your costs. The region you've selected may cost more for the same services. You can disable this message in the future with the command "az config set core.display_region_identified=false". Learn more at https://go.microsoft.com/fwlink/?linkid=222571 WARNING: Consider upgrading security for your workloads using Azure Trusted Launch VMs. To know more about Trusted Launch, please visit https://aka.ms/TrustedLaunch. { "fqdns": "", "id": "/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/virtualMachines/k8stest202312311208", "location": "westeurope", "macAddress": "00-0D-3A-AC-93-3A", "powerState": "VM running", "privateIpAddress": "172.21.23.10", "publicIpAddress": "", "resourceGroup": "OSM_CICD_GROUP", "zones": "" } ++ az vm show -d -g OSM_CICD_GROUP -n k8stest202312311208 --query privateIps ++ tr -d '"' + export K8S_IP=172.21.23.10 + K8S_IP=172.21.23.10 ++ az vm show --resource-group OSM_CICD_GROUP --name k8stest202312311208 --query 'networkProfile.networkInterfaces[0].id' + INTERFACE_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic"' + INTERFACE_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic ++ az network nic show --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic --query networkSecurityGroup.id + SECURITY_GROUP_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG"' + SECURITY_GROUP_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG ++ az resource show --ids /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG --query name + SECURITY_GROUP_NAME='"k8stest202312311208NSG"' + SECURITY_GROUP_NAME=k8stest202312311208NSG + az network nsg rule create -n microk8s --nsg-name k8stest202312311208NSG --priority 2000 -g OSM_CICD_GROUP --description 'Microk8s port' --protocol TCP --destination-port-ranges 16443 { "access": "Allow", "description": "Microk8s port", "destinationAddressPrefix": "*", "destinationAddressPrefixes": [], "destinationPortRange": "16443", "destinationPortRanges": [], "direction": "Inbound", "etag": "W/\"acdd51d1-b8d5-4adf-b9e3-2dd5dd80776f\"", "id": "/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG/securityRules/microk8s", "name": "microk8s", "priority": 2000, "protocol": "Tcp", "provisioningState": "Succeeded", "resourceGroup": "OSM_CICD_GROUP", "sourceAddressPrefix": "*", "sourceAddressPrefixes": [], "sourcePortRange": "*", "sourcePortRanges": [], "type": "Microsoft.Network/networkSecurityGroups/securityRules" } + echo 'export K8S_IP="172.21.23.10"' + echo 'export K8S_IMAGE_NAME="k8stest202312311208"' + install_remote_microk8s + set +e + ssh -T -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@172.21.23.10 'sudo apt-get update -y && sudo apt-get upgrade -y && sudo reboot' Warning: Permanently added '172.21.23.10' (ED25519) to the list of known hosts. Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Get:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease [119 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease [109 kB] Get:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease [110 kB] Get:5 http://azure.archive.ubuntu.com/ubuntu jammy/universe amd64 Packages [14.1 MB] Get:6 http://azure.archive.ubuntu.com/ubuntu jammy/universe Translation-en [5652 kB] Get:7 http://azure.archive.ubuntu.com/ubuntu jammy/universe amd64 c-n-f Metadata [286 kB] Get:8 http://azure.archive.ubuntu.com/ubuntu jammy/multiverse amd64 Packages [217 kB] Get:9 http://azure.archive.ubuntu.com/ubuntu jammy/multiverse Translation-en [112 kB] Get:10 http://azure.archive.ubuntu.com/ubuntu jammy/multiverse amd64 c-n-f Metadata [8372 B] Get:11 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 Packages [1263 kB] Get:12 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main Translation-en [260 kB] Get:13 http://azure.archive.ubuntu.com/ubuntu jammy-updates/restricted amd64 Packages [1250 kB] Get:14 http://azure.archive.ubuntu.com/ubuntu jammy-updates/restricted Translation-en [203 kB] Get:15 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 Packages [1020 kB] Get:16 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe Translation-en [226 kB] Get:17 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 c-n-f Metadata [22.1 kB] Get:18 http://azure.archive.ubuntu.com/ubuntu jammy-updates/multiverse amd64 Packages [41.6 kB] Get:19 http://azure.archive.ubuntu.com/ubuntu jammy-updates/multiverse Translation-en [9768 B] Get:20 http://azure.archive.ubuntu.com/ubuntu jammy-updates/multiverse amd64 c-n-f Metadata [472 B] Get:21 http://azure.archive.ubuntu.com/ubuntu jammy-backports/main amd64 Packages [41.7 kB] Get:22 http://azure.archive.ubuntu.com/ubuntu jammy-backports/main Translation-en [10.5 kB] Get:23 http://azure.archive.ubuntu.com/ubuntu jammy-backports/main amd64 c-n-f Metadata [388 B] Get:24 http://azure.archive.ubuntu.com/ubuntu jammy-backports/restricted amd64 c-n-f Metadata [116 B] Get:25 http://azure.archive.ubuntu.com/ubuntu jammy-backports/universe amd64 Packages [24.3 kB] Get:26 http://azure.archive.ubuntu.com/ubuntu jammy-backports/universe Translation-en [16.5 kB] Get:27 http://azure.archive.ubuntu.com/ubuntu jammy-backports/universe amd64 c-n-f Metadata [644 B] Get:28 http://azure.archive.ubuntu.com/ubuntu jammy-backports/multiverse amd64 c-n-f Metadata [116 B] Get:29 http://azure.archive.ubuntu.com/ubuntu jammy-security/main amd64 Packages [1051 kB] Get:30 http://azure.archive.ubuntu.com/ubuntu jammy-security/main Translation-en [200 kB] Get:31 http://azure.archive.ubuntu.com/ubuntu jammy-security/restricted amd64 Packages [1226 kB] Get:32 http://azure.archive.ubuntu.com/ubuntu jammy-security/restricted Translation-en [200 kB] Get:33 http://azure.archive.ubuntu.com/ubuntu jammy-security/universe amd64 Packages [823 kB] Get:34 http://azure.archive.ubuntu.com/ubuntu jammy-security/universe Translation-en [156 kB] Get:35 http://azure.archive.ubuntu.com/ubuntu jammy-security/universe amd64 c-n-f Metadata [16.8 kB] Get:36 http://azure.archive.ubuntu.com/ubuntu jammy-security/multiverse amd64 Packages [36.5 kB] Get:37 http://azure.archive.ubuntu.com/ubuntu jammy-security/multiverse Translation-en [7060 B] Get:38 http://azure.archive.ubuntu.com/ubuntu jammy-security/multiverse amd64 c-n-f Metadata [260 B] Fetched 28.8 MB in 11s (2615 kB/s) Reading package lists... Reading package lists... Building dependency tree... Reading state information... Calculating upgrade... The following packages have been kept back: linux-tools-common python3-update-manager update-manager-core The following packages will be upgraded: binutils binutils-common binutils-x86-64-linux-gnu cryptsetup cryptsetup-bin cryptsetup-initramfs curl kpartx libbinutils libc-bin libc6 libcryptsetup12 libctf-nobfd0 libctf0 libcurl3-gnutls libcurl4 libssh-4 linux-cloud-tools-common locales multipath-tools openssh-client openssh-server openssh-sftp-server python3-cryptography systemd-hwe-hwdb tar vim vim-common vim-runtime vim-tiny xxd 31 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Need to get 24.9 MB of archives. After this operation, 26.6 kB of additional disk space will be used. Get:1 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 tar amd64 1.34+dfsg-1ubuntu0.1.22.04.2 [295 kB] Get:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc6 amd64 2.35-0ubuntu3.5 [3235 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc-bin amd64 2.35-0ubuntu3.5 [706 kB] Get:4 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 openssh-sftp-server amd64 1:8.9p1-3ubuntu0.5 [38.7 kB] Get:5 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 openssh-server amd64 1:8.9p1-3ubuntu0.5 [435 kB] Get:6 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 openssh-client amd64 1:8.9p1-3ubuntu0.5 [906 kB] Get:7 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcryptsetup12 amd64 2:2.4.3-1ubuntu1.2 [211 kB] Get:8 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 locales all 2.35-0ubuntu3.5 [4245 kB] Get:9 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim amd64 2:8.2.3995-1ubuntu2.15 [1735 kB] Get:10 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim-tiny amd64 2:8.2.3995-1ubuntu2.15 [710 kB] Get:11 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim-runtime all 2:8.2.3995-1ubuntu2.15 [6835 kB] Get:12 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 xxd amd64 2:8.2.3995-1ubuntu2.15 [55.2 kB] Get:13 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim-common all 2:8.2.3995-1ubuntu2.15 [81.5 kB] Get:14 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libctf0 amd64 2.38-4ubuntu2.4 [103 kB] Get:15 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libctf-nobfd0 amd64 2.38-4ubuntu2.4 [108 kB] Get:16 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 binutils-x86-64-linux-gnu amd64 2.38-4ubuntu2.4 [2327 kB] Get:17 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libbinutils amd64 2.38-4ubuntu2.4 [662 kB] Get:18 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 binutils amd64 2.38-4ubuntu2.4 [3194 B] Get:19 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 binutils-common amd64 2.38-4ubuntu2.4 [222 kB] Get:20 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cryptsetup-initramfs all 2:2.4.3-1ubuntu1.2 [26.2 kB] Get:21 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libssh-4 amd64 0.9.6-2ubuntu0.22.04.2 [186 kB] Get:22 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cryptsetup-bin amd64 2:2.4.3-1ubuntu1.2 [145 kB] Get:23 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cryptsetup amd64 2:2.4.3-1ubuntu1.2 [193 kB] Get:24 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 curl amd64 7.81.0-1ubuntu1.15 [194 kB] Get:25 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcurl4 amd64 7.81.0-1ubuntu1.15 [289 kB] Get:26 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcurl3-gnutls amd64 7.81.0-1ubuntu1.15 [284 kB] Get:27 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 linux-cloud-tools-common all 5.15.0-91.101 [95.8 kB] Get:28 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 python3-cryptography amd64 3.4.8-1ubuntu2.1 [236 kB] Get:29 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 systemd-hwe-hwdb all 249.11.4 [2978 B] Get:30 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 kpartx amd64 0.8.8-1ubuntu1.22.04.4 [28.8 kB] Get:31 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 multipath-tools amd64 0.8.8-1ubuntu1.22.04.4 [331 kB] debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline debconf: unable to initialize frontend: Readline debconf: (This frontend requires a controlling tty.) debconf: falling back to frontend: Teletype dpkg-preconfigure: unable to re-open stdin: Fetched 24.9 MB in 1s (34.1 MB/s) (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../tar_1.34+dfsg-1ubuntu0.1.22.04.2_amd64.deb ... Unpacking tar (1.34+dfsg-1ubuntu0.1.22.04.2) over (1.34+dfsg-1ubuntu0.1.22.04.1) ... Setting up tar (1.34+dfsg-1ubuntu0.1.22.04.2) ... (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../libc6_2.35-0ubuntu3.5_amd64.deb ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Unpacking libc6:amd64 (2.35-0ubuntu3.5) over (2.35-0ubuntu3.4) ... Setting up libc6:amd64 (2.35-0ubuntu3.5) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../libc-bin_2.35-0ubuntu3.5_amd64.deb ... Unpacking libc-bin (2.35-0ubuntu3.5) over (2.35-0ubuntu3.4) ... Setting up libc-bin (2.35-0ubuntu3.5) ... (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../00-openssh-sftp-server_1%3a8.9p1-3ubuntu0.5_amd64.deb ... Unpacking openssh-sftp-server (1:8.9p1-3ubuntu0.5) over (1:8.9p1-3ubuntu0.4) ... Preparing to unpack .../01-openssh-server_1%3a8.9p1-3ubuntu0.5_amd64.deb ... Unpacking openssh-server (1:8.9p1-3ubuntu0.5) over (1:8.9p1-3ubuntu0.4) ... Preparing to unpack .../02-openssh-client_1%3a8.9p1-3ubuntu0.5_amd64.deb ... Unpacking openssh-client (1:8.9p1-3ubuntu0.5) over (1:8.9p1-3ubuntu0.4) ... Preparing to unpack .../03-libcryptsetup12_2%3a2.4.3-1ubuntu1.2_amd64.deb ... Unpacking libcryptsetup12:amd64 (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../04-locales_2.35-0ubuntu3.5_all.deb ... Unpacking locales (2.35-0ubuntu3.5) over (2.35-0ubuntu3.4) ... Preparing to unpack .../05-vim_2%3a8.2.3995-1ubuntu2.15_amd64.deb ... Unpacking vim (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../06-vim-tiny_2%3a8.2.3995-1ubuntu2.15_amd64.deb ... Unpacking vim-tiny (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../07-vim-runtime_2%3a8.2.3995-1ubuntu2.15_all.deb ... Unpacking vim-runtime (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../08-xxd_2%3a8.2.3995-1ubuntu2.15_amd64.deb ... Unpacking xxd (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../09-vim-common_2%3a8.2.3995-1ubuntu2.15_all.deb ... Unpacking vim-common (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../10-libctf0_2.38-4ubuntu2.4_amd64.deb ... Unpacking libctf0:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../11-libctf-nobfd0_2.38-4ubuntu2.4_amd64.deb ... Unpacking libctf-nobfd0:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../12-binutils-x86-64-linux-gnu_2.38-4ubuntu2.4_amd64.deb ... Unpacking binutils-x86-64-linux-gnu (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../13-libbinutils_2.38-4ubuntu2.4_amd64.deb ... Unpacking libbinutils:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../14-binutils_2.38-4ubuntu2.4_amd64.deb ... Unpacking binutils (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../15-binutils-common_2.38-4ubuntu2.4_amd64.deb ... Unpacking binutils-common:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../16-cryptsetup-initramfs_2%3a2.4.3-1ubuntu1.2_all.deb ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Unpacking cryptsetup-initramfs (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../17-libssh-4_0.9.6-2ubuntu0.22.04.2_amd64.deb ... Unpacking libssh-4:amd64 (0.9.6-2ubuntu0.22.04.2) over (0.9.6-2ubuntu0.22.04.1) ... Preparing to unpack .../18-cryptsetup-bin_2%3a2.4.3-1ubuntu1.2_amd64.deb ... Unpacking cryptsetup-bin (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../19-cryptsetup_2%3a2.4.3-1ubuntu1.2_amd64.deb ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Unpacking cryptsetup (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../20-curl_7.81.0-1ubuntu1.15_amd64.deb ... Unpacking curl (7.81.0-1ubuntu1.15) over (7.81.0-1ubuntu1.14) ... Preparing to unpack .../21-libcurl4_7.81.0-1ubuntu1.15_amd64.deb ... Unpacking libcurl4:amd64 (7.81.0-1ubuntu1.15) over (7.81.0-1ubuntu1.14) ... Preparing to unpack .../22-libcurl3-gnutls_7.81.0-1ubuntu1.15_amd64.deb ... Unpacking libcurl3-gnutls:amd64 (7.81.0-1ubuntu1.15) over (7.81.0-1ubuntu1.14) ... Preparing to unpack .../23-linux-cloud-tools-common_5.15.0-91.101_all.deb ... Unpacking linux-cloud-tools-common (5.15.0-91.101) over (5.15.0-89.99) ... Preparing to unpack .../24-python3-cryptography_3.4.8-1ubuntu2.1_amd64.deb ... Unpacking python3-cryptography (3.4.8-1ubuntu2.1) over (3.4.8-1ubuntu2) ... Preparing to unpack .../25-systemd-hwe-hwdb_249.11.4_all.deb ... Unpacking systemd-hwe-hwdb (249.11.4) over (249.11.3) ... Preparing to unpack .../26-kpartx_0.8.8-1ubuntu1.22.04.4_amd64.deb ... Unpacking kpartx (0.8.8-1ubuntu1.22.04.4) over (0.8.8-1ubuntu1.22.04.3) ... Preparing to unpack .../27-multipath-tools_0.8.8-1ubuntu1.22.04.4_amd64.deb ... Unpacking multipath-tools (0.8.8-1ubuntu1.22.04.4) over (0.8.8-1ubuntu1.22.04.3) ... Setting up openssh-client (1:8.9p1-3ubuntu0.5) ... Setting up binutils-common:amd64 (2.38-4ubuntu2.4) ... Setting up libctf-nobfd0:amd64 (2.38-4ubuntu2.4) ... Setting up linux-cloud-tools-common (5.15.0-91.101) ... Setting up locales (2.35-0ubuntu3.5) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Generating locales (this might take a while)... en_US.UTF-8... done Generation complete. Setting up xxd (2:8.2.3995-1ubuntu2.15) ... Setting up vim-common (2:8.2.3995-1ubuntu2.15) ... Setting up python3-cryptography (3.4.8-1ubuntu2.1) ... Setting up libssh-4:amd64 (0.9.6-2ubuntu0.22.04.2) ... Setting up systemd-hwe-hwdb (249.11.4) ... Setting up kpartx (0.8.8-1ubuntu1.22.04.4) ... Setting up libcurl4:amd64 (7.81.0-1ubuntu1.15) ... Setting up libcryptsetup12:amd64 (2:2.4.3-1ubuntu1.2) ... Setting up curl (7.81.0-1ubuntu1.15) ... Setting up libbinutils:amd64 (2.38-4ubuntu2.4) ... Setting up vim-runtime (2:8.2.3995-1ubuntu2.15) ... Setting up libctf0:amd64 (2.38-4ubuntu2.4) ... Setting up cryptsetup-bin (2:2.4.3-1ubuntu1.2) ... Setting up openssh-sftp-server (1:8.9p1-3ubuntu0.5) ... Setting up vim (2:8.2.3995-1ubuntu2.15) ... Setting up openssh-server (1:8.9p1-3ubuntu0.5) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline rescue-ssh.target is a disabled or a static unit not running, not starting it. ssh.socket is a disabled or a static unit not running, not starting it. Setting up cryptsetup (2:2.4.3-1ubuntu1.2) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Setting up libcurl3-gnutls:amd64 (7.81.0-1ubuntu1.15) ... Setting up vim-tiny (2:8.2.3995-1ubuntu2.15) ... Setting up multipath-tools (0.8.8-1ubuntu1.22.04.4) ... Could not execute systemctl: at /usr/bin/deb-systemd-invoke line 142. Setting up cryptsetup-initramfs (2:2.4.3-1ubuntu1.2) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline update-initramfs: deferring update (trigger activated) Setting up binutils-x86-64-linux-gnu (2.38-4ubuntu2.4) ... Setting up binutils (2.38-4ubuntu2.4) ... Processing triggers for udev (249.11-0ubuntu3.11) ... Processing triggers for initramfs-tools (0.140ubuntu13.4) ... update-initramfs: Generating /boot/initrd.img-6.2.0-1018-azure Processing triggers for libc-bin (2.35-0ubuntu3.5) ... Processing triggers for ufw (0.36.1-4ubuntu0.1) ... Processing triggers for man-db (2.10.2-1) ... Running kernel seems to be up-to-date. Services to be restarted: systemctl restart chrony.service systemctl restart cron.service systemctl restart irqbalance.service systemctl restart packagekit.service systemctl restart polkit.service systemctl restart rsyslog.service systemctl restart serial-getty@ttyS0.service systemctl restart snapd.service systemctl restart systemd-journald.service systemctl restart systemd-networkd.service systemctl restart systemd-resolved.service systemctl restart systemd-udevd.service systemctl restart walinuxagent.service Service restarts being deferred: /etc/needrestart/restart.d/dbus.service systemctl restart getty@tty1.service systemctl restart networkd-dispatcher.service systemctl restart systemd-logind.service systemctl restart unattended-upgrades.service systemctl restart user@1000.service No containers need to be restarted. No user sessions are running outdated binaries. No VM guests are running outdated hypervisor (qemu) binaries on this host. Connection to 172.21.23.10 closed by remote host. + sleep 90 + ssh -T -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@172.21.23.10 Warning: Permanently added '172.21.23.10' (ED25519) to the list of known hosts. Welcome to Ubuntu 22.04.3 LTS (GNU/Linux 6.2.0-1018-azure x86_64) * Documentation: https://help.ubuntu.com * Management: https://landscape.canonical.com * Support: https://ubuntu.com/advantage System information as of Sun Dec 31 12:13:37 UTC 2023 System load: 0.6259765625 Processes: 124 Usage of /: 5.9% of 28.89GB Users logged in: 0 Memory usage: 7% IPv4 address for eth0: 172.21.23.10 Swap usage: 0% Expanded Security Maintenance for Applications is not enabled. 2 updates can be applied immediately. 1 of these updates is a standard security update. To see these additional updates run: apt list --upgradable Enable ESM Apps to receive additional future security updates. See https://ubuntu.com/esm or run: sudo pro status + sudo snap install yq yq v4.40.4 from Mike Farah (mikefarah) installed + sudo snap install microk8s --classic microk8s (1.28/stable) v1.28.3 from Canonical** installed + sudo usermod -a -G microk8s ubuntu + newgrp microk8s microk8s is running high-availability: no datastore master nodes: 127.0.0.1:19001 datastore standby nodes: none addons: enabled: dns # (core) CoreDNS ha-cluster # (core) Configure high availability on the current node helm # (core) Helm - the package manager for Kubernetes helm3 # (core) Helm 3 - the package manager for Kubernetes disabled: cert-manager # (core) Cloud native certificate management cis-hardening # (core) Apply CIS K8s hardening community # (core) The community addons repository dashboard # (core) The Kubernetes dashboard gpu # (core) Automatic enablement of Nvidia CUDA host-access # (core) Allow Pods connecting to Host services smoothly hostpath-storage # (core) Storage class; allocates storage from host directory ingress # (core) Ingress controller for external access kube-ovn # (core) An advanced network fabric for Kubernetes mayastor # (core) OpenEBS MayaStor metallb # (core) Loadbalancer for your Kubernetes cluster metrics-server # (core) K8s Metrics Server for API access to service metrics minio # (core) MinIO object storage observability # (core) A lightweight observability stack for logs, traces and metrics prometheus # (core) Prometheus operator for monitoring and logging rbac # (core) Role-Based Access Control for authorisation registry # (core) Private image registry exposed on localhost:32000 rook-ceph # (core) Distributed Ceph storage using Rook storage # (core) Alias to hostpath-storage add-on, deprecated WARNING: Do not enable or disable multiple addons in one command. This form of chained operations on addons will be DEPRECATED in the future. Please, enable one addon at a time: 'microk8s enable ' Infer repository core for addon storage Infer repository core for addon dns DEPRECATION WARNING: 'storage' is deprecated and will soon be removed. Please use 'hostpath-storage' instead. Infer repository core for addon hostpath-storage Enabling default storage class. WARNING: Hostpath storage is not suitable for production environments. A hostpath volume can grow beyond the size limit set in the volume claim manifest. deployment.apps/hostpath-provisioner created storageclass.storage.k8s.io/microk8s-hostpath created serviceaccount/microk8s-hostpath created clusterrole.rbac.authorization.k8s.io/microk8s-hostpath created clusterrolebinding.rbac.authorization.k8s.io/microk8s-hostpath created Storage will be available soon. Addon core/dns is already enabled + ssh -T -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@172.21.23.10 Warning: Permanently added '172.21.23.10' (ED25519) to the list of known hosts. Welcome to Ubuntu 22.04.3 LTS (GNU/Linux 6.2.0-1018-azure x86_64) * Documentation: https://help.ubuntu.com * Management: https://landscape.canonical.com * Support: https://ubuntu.com/advantage System information as of Sun Dec 31 12:13:37 UTC 2023 System load: 0.6259765625 Processes: 124 Usage of /: 5.9% of 28.89GB Users logged in: 0 Memory usage: 7% IPv4 address for eth0: 172.21.23.10 Swap usage: 0% Expanded Security Maintenance for Applications is not enabled. 2 updates can be applied immediately. 1 of these updates is a standard security update. To see these additional updates run: apt list --upgradable Enable ESM Apps to receive additional future security updates. See https://ubuntu.com/esm or run: sudo pro status ++ hostname -I ++ awk '{print $1}' 172.21.23.10 + PRIVATE_IP=172.21.23.10 + echo 172.21.23.10 + sudo microk8s.enable metallb:172.21.23.10-172.21.23.10 Infer repository core for addon metallb Enabling MetalLB Applying Metallb manifest customresourcedefinition.apiextensions.k8s.io/addresspools.metallb.io created customresourcedefinition.apiextensions.k8s.io/bfdprofiles.metallb.io created customresourcedefinition.apiextensions.k8s.io/bgpadvertisements.metallb.io created customresourcedefinition.apiextensions.k8s.io/bgppeers.metallb.io created customresourcedefinition.apiextensions.k8s.io/communities.metallb.io created customresourcedefinition.apiextensions.k8s.io/ipaddresspools.metallb.io created customresourcedefinition.apiextensions.k8s.io/l2advertisements.metallb.io created namespace/metallb-system created serviceaccount/controller created serviceaccount/speaker created clusterrole.rbac.authorization.k8s.io/metallb-system:controller created clusterrole.rbac.authorization.k8s.io/metallb-system:speaker created role.rbac.authorization.k8s.io/controller created role.rbac.authorization.k8s.io/pod-lister created clusterrolebinding.rbac.authorization.k8s.io/metallb-system:controller created clusterrolebinding.rbac.authorization.k8s.io/metallb-system:speaker created rolebinding.rbac.authorization.k8s.io/controller created secret/webhook-server-cert created service/webhook-service created rolebinding.rbac.authorization.k8s.io/pod-lister created daemonset.apps/speaker created deployment.apps/controller created validatingwebhookconfiguration.admissionregistration.k8s.io/validating-webhook-configuration created Waiting for Metallb controller to be ready. error: timed out waiting for the condition on deployments/controller MetalLB controller is still not ready deployment.apps/controller condition met ipaddresspool.metallb.io/default-addresspool created l2advertisement.metallb.io/default-advertise-all-pools created MetalLB is enabled + ssh -T -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@172.21.23.10 Warning: Permanently added '172.21.23.10' (ED25519) to the list of known hosts. Welcome to Ubuntu 22.04.3 LTS (GNU/Linux 6.2.0-1018-azure x86_64) * Documentation: https://help.ubuntu.com * Management: https://landscape.canonical.com * Support: https://ubuntu.com/advantage System information as of Sun Dec 31 12:13:37 UTC 2023 System load: 0.6259765625 Processes: 124 Usage of /: 5.9% of 28.89GB Users logged in: 0 Memory usage: 7% IPv4 address for eth0: 172.21.23.10 Swap usage: 0% Expanded Security Maintenance for Applications is not enabled. 2 updates can be applied immediately. 1 of these updates is a standard security update. To see these additional updates run: apt list --upgradable Enable ESM Apps to receive additional future security updates. See https://ubuntu.com/esm or run: sudo pro status + sudo sed -i 's/\#MOREIPS/IP.3 = 172.21.23.10/g' /var/snap/microk8s/current/certs/csr.conf.template + cat /var/snap/microk8s/current/certs/csr.conf.template [ req ] default_bits = 2048 prompt = no default_md = sha256 req_extensions = req_ext distinguished_name = dn [ dn ] C = GB ST = Canonical L = Canonical O = Canonical OU = Canonical CN = 127.0.0.1 [ req_ext ] subjectAltName = @alt_names [ alt_names ] DNS.1 = kubernetes DNS.2 = kubernetes.default DNS.3 = kubernetes.default.svc DNS.4 = kubernetes.default.svc.cluster DNS.5 = kubernetes.default.svc.cluster.local IP.1 = 127.0.0.1 IP.2 = 10.152.183.1 IP.3 = 172.21.23.10 [ v3_ext ] authorityKeyIdentifier=keyid,issuer:always basicConstraints=CA:FALSE keyUsage=keyEncipherment,dataEncipherment,digitalSignature extendedKeyUsage=serverAuth,clientAuth subjectAltName=@alt_names + echo ================================================================ ================================================================ + echo K8s cluster credentials: K8s cluster credentials: + echo ================================================================ ================================================================ + echo + ssh -T -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@172.21.23.10 'sudo microk8s.config' + sed 's/server: .*/server: https:\/\/172.21.23.10:16443/g' + tee /robot-systest/results/kubeconfig.yaml Warning: Permanently added '172.21.23.10' (ED25519) to the list of known hosts. apiVersion: v1 clusters: - cluster: certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUREekNDQWZlZ0F3SUJBZ0lVSjZ4WmU5V1MvTXBKTlRtN0gzS3NVQm5Fejlrd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0Z6RVZNQk1HQTFVRUF3d01NVEF1TVRVeUxqRTRNeTR4TUI0WERUSXpNVEl6TVRFeU1UUXlNVm9YRFRNegpNVEl5T0RFeU1UUXlNVm93RnpFVk1CTUdBMVVFQXd3TU1UQXVNVFV5TGpFNE15NHhNSUlCSWpBTkJna3Foa2lHCjl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4WVZxeTdHZ05Qb1hQRTQ2RlhvWFl6WFRuOVZscTIvZ1JpcXgKdVN3Smp6bzlZRlBUYVdXOTN1dGhvcXB6VDByQzhmR0ZBOHFpR1FIMnh2MGZYTVFoMTIrbkdyalRjYzcvc3pCNAo1ZEFSYlBEcUhMN3ZvcmlyUkx4ejRXdWxIZjZiaisyb1ljTmxpTkZUcCtvY3J3SW0rdEtWWXN2Q1FFbWQ1aTFKClZmVjBvQ2JoZ1YvbTZWV2tYTGsycUpBckdKUFVwMTNmL3NNOHRGb29ld0xTQ0MwTFFYNmM3cEh3ajk2c0hmdVAKelViN0RiN3FQMnJ2WWNhMXRvWGc2SWdkdnVreXhvUitRNEpIUWdIRGVld2VaTUQ3RTRhK2w5K25Obm9BcEJROApMMk9zSEp1MzNNRURWNGtqWktLNU02cS9adDlXUUxjOHdwNEE0dEVTeG9ZUSs3d2FHd0lEQVFBQm8xTXdVVEFkCkJnTlZIUTRFRmdRVURvOGJEdTdVOWtZMW1kMm5QZkFDM0JqMk0xWXdId1lEVlIwakJCZ3dGb0FVRG84YkR1N1UKOWtZMW1kMm5QZkFDM0JqMk0xWXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QU5CZ2txaGtpRzl3MEJBUXNGQUFPQwpBUUVBdEdYbVNGcVd4b2E0SnNxYnliRzlNcHlrTWdobjBVTlBYYnlrM0k4dWF6enIzbHh3M2d0ckhZUVR3VnlhCkwvZjVXcnExTGhwbkZTWTJXQmwvOWYxeUdHREp0Z3dxZE14a3ZpUWlsWHE3R0JESThKZE5ML3Y0blhiOXdoOGIKY1VQL0JIaGRCRHd3cjYyR2ttZUVGSW5LYXA2UVIyeWZpcmFWRDZIMktldCtyTG9qRDQvMzk3SjhGUHhvTmM0dQpBWTdCNEo0T1B6dFlzbm11SGozZnhjSnhFeXI2d1RpaGZqVys0eXBOdk5vTzJGVWpaSVpRb24wYit2clRCMVdOCktCaFhYVGRxVHpqSTMvNUVVb3R6VXFuYk5RcGFvaDdyY01sYmxFa1pQaHYrcXFzU0ZSbXdBZUtFNVZxdHFUUUoKeUhaR05jK25lUDJlRlNWNHlHZU1ndC91Y1E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg== server: https://172.21.23.10:16443 name: microk8s-cluster contexts: - context: cluster: microk8s-cluster user: admin name: microk8s current-context: microk8s kind: Config preferences: {} users: - name: admin user: client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUN6RENDQWJTZ0F3SUJBZ0lVVlhMMk9hajFlQnRxdEs4Z3NQdmoxZXc0S3hFd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0Z6RVZNQk1HQTFVRUF3d01NVEF1TVRVeUxqRTRNeTR4TUI0WERUSXpNVEl6TVRFeU1UUXlNMW9YRFRNegpNVEl5T0RFeU1UUXlNMW93S1RFT01Bd0dBMVVFQXd3RllXUnRhVzR4RnpBVkJnTlZCQW9NRG5ONWMzUmxiVHB0CllYTjBaWEp6TUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF6eE9RUnNHM29jZmEKcUYzbkgzZ1hxSVlEUmN5S3c4MzhrbXdhZng5MWREeVJ3L05nNG13UlZKSWE3UGtlQ0hWcXFuQ3FqcTZkdXkwQgpQZUpSR1hodEtoQ3NrOWZmZkFFUThQc0dNQjhqYzBvMDA2STFvN29ESDB5ZWU4N3V1WTRROFR0NnI4R0FWazg5Cjd2NmMvUi9tU3FRYitHZ09kbjBPM0VnUktkN3V4NzJxZzBvM25tSXBINHRDS1dQaUJGT05nSWh0aXY1WlZDb0wKa2Nsek9TVWE1cE92TVZkVElWVlZmQ0d4WnFDenp0TDBLME9tM0RrY21IL2ZJUnN5ajZsVFFCNW1ySGZ1QndMOAp6YVFVWnpuaDVLT3cxV0lCODQweVUvc2RQeW82c1VvRXJ4QktsNGNPNzM5MmxzalY1UGJjN0xpY3FCWUVvYnlxCm1UajBXc3Jnc3dJREFRQUJNQTBHQ1NxR1NJYjNEUUVCQ3dVQUE0SUJBUUMrcldGeUFDSVVoMVFUc1ZnSHp6VzMKSjU4STFPN004K1RsZG9KTmlLaDRYSlVoTzFaSXJ1RjJCdVQ2djhFUjFyTmphT1EzTkE4SG1rYVF1ckdLSmlKYgpmdk5tZjJJbnU4YnB4RzRuNGJuVndlbWlDZVlmU3YxeDFaVlBuYlZtNW9JQ0xYOW9ZSjZGVjhrM0k0Nmh2RVUzCisvTEV2b3RvZThTUGNRQ2FZeENPbFNUaC8yTmVkNlB5bzJ3ajZ3RWNXMnh0dmdBRkxUeFlZTUx6eGh2UFBoTEMKYVJ1QTB5TkNYWWFIS1h5L0hlVWhDSTZUNDJrYkRXOFJ0eXBNVWc4aXZzYXN0Z24ydi9ZVkUzTnk3ZmpBSW9sVgo5K2VWcnh6cjV5NEJqZ0ptNjRFN1JtS29aRkFGMWNGS0JTRm9uTmFHZUQ0YSt0Tmh0d3ptLzRnRU8yZlliRGRKCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K client-key-data: LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBenhPUVJzRzNvY2ZhcUYzbkgzZ1hxSVlEUmN5S3c4MzhrbXdhZng5MWREeVJ3L05nCjRtd1JWSklhN1BrZUNIVnFxbkNxanE2ZHV5MEJQZUpSR1hodEtoQ3NrOWZmZkFFUThQc0dNQjhqYzBvMDA2STEKbzdvREgweWVlODd1dVk0UThUdDZyOEdBVms4OTd2NmMvUi9tU3FRYitHZ09kbjBPM0VnUktkN3V4NzJxZzBvMwpubUlwSDR0Q0tXUGlCRk9OZ0lodGl2NVpWQ29Ma2Nsek9TVWE1cE92TVZkVElWVlZmQ0d4WnFDenp0TDBLME9tCjNEa2NtSC9mSVJzeWo2bFRRQjVtckhmdUJ3TDh6YVFVWnpuaDVLT3cxV0lCODQweVUvc2RQeW82c1VvRXJ4QksKbDRjTzczOTJsc2pWNVBiYzdMaWNxQllFb2J5cW1UajBXc3Jnc3dJREFRQUJBb0lCQUcrTmdYYXNtdWtiU2M3VApyMU9EbFhNaXBwYXVacjZ1cy9hSktBajlnSCthdFlmVEQ0bEtZRmVuMGJ1QlJFMllPMXNRRU1HR2pkQXNvWE9kCi9NUjAzSklCbzRhTmo0WTA2TlI0MWsrTUpzUFA0UDkxYmVJQ3JmWHBoNkw0eWgvSnlaUzZ6dy9wZE9LS0U3b3gKRFk5SzVQdW14cTZlRjlSNjhFdDR3MDVFRzJ3YjVvSmNWOHRsT3NlL3N0L3JqcjdmQk1nOTlxOVFYM1R3NXUzVgpYZE0wWVhpNEhzRnlxcnNvVi9XSkwrc2owakhGcmFlTnNZa25tSVVmaTFIclNpOHcyWmJzUDJodVNLTzE5M0VOClE3ZXovUHZMbHh1TDVYbWpFUjh1a3ZVaVlTc0Z6Y29zeExaUEU4dEFnY294aVJXWENIUzExMjlHZWVjVmt2bGEKbHR2dVYxRUNnWUVBL2UxTXMzU3lxSGdST3FPZ1A4YXp3cEs4YVRnT0QvSXNsSGhTK1JjdU1MT0MzTW8vMi9WaQpqQ2h0cC9GSnpycE1CUmppV3hJdHFqSzhOSzFPbisybVM0V0E1Y1Btekg2RjdOQXFhUUZDSWhUcXB1REdjeFF3CkRMQ094YzliRGhBa0hFVks4RlpXakMwZzR0MytEdXMzOUJLOUlINy9NeURneU5TRUdsRHNUOXNDZ1lFQTBNUlkKL3JjdEVZdTVKeVcyaUkrOHdZNy9lS0VybUZTWVA0TzBLRXJxenMrMDlpZGIwOXRCOC94dGtVYTdORUFvYWFnOAphTGpqUGV4aFp5cUNBazJ6L05GVmlMU3FjYTM0NlB5QmVTOUlPV1krZ3JNbEM4YXJnUTd6c2wvNGJrNllNM1pSCmlmSE9WbUZSamNOOE1CWXBkUFdGRm9rWUpOOVdWb2lJWXUyaDFna0NnWUVBekg3T0ZYdjI0WndJYkF3WVl5UTMKNG92ZnRuRVJTaUJEYkxKV1NBbzJCYnlidUFvZFVad1Y2RUFxeDJTWGh5Z2JUc09Tajd0ZEVVTll0S2VadVV4ZgpiMVN0ME54cXcrZGxIYWtyYm5NMytsQ1VCUjdpVXcxQUFRZ3pndlNTSlFvMHVwdkhWVFNDOE5Mck9DbUZrQ2c0CllKU3l3VGZsZUJ5VFNzbHpOZXBUMVo4Q2dZRUFvZldod1VJa2diY3ZGakxJMEZzS3FmTWhPTmw0Yzc0MU94a1cKL0NkYTBNVU04K2FHOCszbTFiMGcxNUxUQ3U4M0t2anNuS1hMRGJCRHhJQ1N5ZXkwVk0rbUZ0OXFWN1VMSk4wNApLT2JJbGxjKzZiREhiY0pjTFV5SFBQdjlSTDVPajZZY0RmTjlDMENHdjM5SDZocFhZVGhVWHE1Ty85UVFMQ1BTCkEwV3R4dGtDZ1lCOFpLaExNMkFhMXREbVpqQnlvVHIweXNBSHNuZWNsa0htMjE3NmtoTDhQNWJwTmpYWk9wRHIKcGRKdC83UCtVWWVNWUJWZUhqaFRycWNZUDRmL0RNQ29GTTZwVEtNaDkrSUZaWHYxdHF6MGlJWGdiSC91b1NwZAp6MmcvbU1yWUgrcm9jT1JMNzIzUEF5Q3NiOWtOcjNGREswcEF5Z2VTb21Ib2ZPMEIvWVhpY0E9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo= + echo 'export K8S_CREDENTIALS=/robot-systest/results/kubeconfig.yaml' + echo File with new environment was created at /robot-systest/results/k8s_environment.rc File with new environment was created at /robot-systest/results/k8s_environment.rc [Pipeline] sh [azure_robot_tests] Running shell script + cat /robot-systest/results/k8s_environment.rc export CLOUD_TYPE="azure" export USE_PAAS_K8S="FALSE" export K8S_IP="172.21.23.10" export K8S_IMAGE_NAME="k8stest202312311208" export K8S_CREDENTIALS=/robot-systest/results/kubeconfig.yaml [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Install OSM) [Pipeline] sh [azure_robot_tests] Running shell script + /robot-systest/cloud-scripts/create-osm-vm.sh + az vm create --resource-group OSM_CICD_GROUP --name osmtest202312311216 --image Canonical:0001-com-ubuntu-server-jammy:22_04-lts:latest --size Standard_D4as_v4 --vnet-name OSM-CICD-net --subnet OSM-CICD-subnet --public-ip-address '' --admin-username ubuntu --priority Regular --os-disk-size-gb 64 Selecting "uksouth" may reduce your costs. The region you've selected may cost more for the same services. You can disable this message in the future with the command "az config set core.display_region_identified=false". Learn more at https://go.microsoft.com/fwlink/?linkid=222571 WARNING: Consider upgrading security for your workloads using Azure Trusted Launch VMs. To know more about Trusted Launch, please visit https://aka.ms/TrustedLaunch. { "fqdns": "", "id": "/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/virtualMachines/osmtest202312311216", "location": "westeurope", "macAddress": "00-0D-3A-C2-7B-D6", "powerState": "VM running", "privateIpAddress": "172.21.23.11", "publicIpAddress": "", "resourceGroup": "OSM_CICD_GROUP", "zones": "" } ++ az vm show -d -g OSM_CICD_GROUP -n osmtest202312311216 --query privateIps ++ tr -d '"' + export NEW_OSM_IP=172.21.23.11 + NEW_OSM_IP=172.21.23.11 ++ az vm show --resource-group OSM_CICD_GROUP --name osmtest202312311216 --query 'networkProfile.networkInterfaces[0].id' + INTERFACE_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic"' + INTERFACE_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic ++ az network nic show --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic --query networkSecurityGroup.id + SECURITY_GROUP_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG"' + SECURITY_GROUP_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG ++ az resource show --ids /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG --query name + SECURITY_GROUP_NAME='"osmtest202312311216NSG"' + SECURITY_GROUP_NAME=osmtest202312311216NSG + az network nsg rule create -n osm --nsg-name osmtest202312311216NSG --priority 2000 -g OSM_CICD_GROUP --description 'NBI and Prometheus ports' --protocol TCP --destination-port-ranges 9999 9091 { "access": "Allow", "description": "NBI and Prometheus ports", "destinationAddressPrefix": "*", "destinationAddressPrefixes": [], "destinationPortRanges": [ "9999", "9091" ], "direction": "Inbound", "etag": "W/\"cb7a771e-5548-4e47-b135-055cb791028d\"", "id": "/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG/securityRules/osm", "name": "osm", "priority": 2000, "protocol": "Tcp", "provisioningState": "Succeeded", "resourceGroup": "OSM_CICD_GROUP", "sourceAddressPrefix": "*", "sourceAddressPrefixes": [], "sourcePortRange": "*", "sourcePortRanges": [], "type": "Microsoft.Network/networkSecurityGroups/securityRules" } + mkdir -p /robot-systest/results + cat + echo File with new environment was created at /robot-systest/results/osm_environment.rc File with new environment was created at /robot-systest/results/osm_environment.rc [Pipeline] sh [azure_robot_tests] Running shell script + cat /robot-systest/results/osm_environment.rc export CLOUD_TYPE="azure" export OSM_HOSTNAME="172.21.23.11" export OSM_IMAGE_NAME="osmtest202312311216" [Pipeline] sh [azure_robot_tests] Running shell script + . /robot-systest/results/osm_environment.rc + export CLOUD_TYPE=azure + export OSM_HOSTNAME=172.21.23.11 + export OSM_IMAGE_NAME=osmtest202312311216 + /robot-systest/cloud-scripts/remote-install-osm.sh Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Get:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease [119 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease [109 kB] Get:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease [110 kB] Get:5 http://azure.archive.ubuntu.com/ubuntu jammy/universe amd64 Packages [14.1 MB] Get:6 http://azure.archive.ubuntu.com/ubuntu jammy/universe Translation-en [5652 kB] Get:7 http://azure.archive.ubuntu.com/ubuntu jammy/universe amd64 c-n-f Metadata [286 kB] Get:8 http://azure.archive.ubuntu.com/ubuntu jammy/multiverse amd64 Packages [217 kB] Get:9 http://azure.archive.ubuntu.com/ubuntu jammy/multiverse Translation-en [112 kB] Get:10 http://azure.archive.ubuntu.com/ubuntu jammy/multiverse amd64 c-n-f Metadata [8372 B] Get:11 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 Packages [1263 kB] Get:12 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main Translation-en [260 kB] Get:13 http://azure.archive.ubuntu.com/ubuntu jammy-updates/restricted amd64 Packages [1250 kB] Get:14 http://azure.archive.ubuntu.com/ubuntu jammy-updates/restricted Translation-en [203 kB] Get:15 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 Packages [1020 kB] Get:16 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe Translation-en [226 kB] Get:17 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 c-n-f Metadata [22.1 kB] Get:18 http://azure.archive.ubuntu.com/ubuntu jammy-updates/multiverse amd64 Packages [41.6 kB] Get:19 http://azure.archive.ubuntu.com/ubuntu jammy-updates/multiverse Translation-en [9768 B] Get:20 http://azure.archive.ubuntu.com/ubuntu jammy-updates/multiverse amd64 c-n-f Metadata [472 B] Get:21 http://azure.archive.ubuntu.com/ubuntu jammy-backports/main amd64 Packages [41.7 kB] Get:22 http://azure.archive.ubuntu.com/ubuntu jammy-backports/main Translation-en [10.5 kB] Get:23 http://azure.archive.ubuntu.com/ubuntu jammy-backports/main amd64 c-n-f Metadata [388 B] Get:24 http://azure.archive.ubuntu.com/ubuntu jammy-backports/restricted amd64 c-n-f Metadata [116 B] Get:25 http://azure.archive.ubuntu.com/ubuntu jammy-backports/universe amd64 Packages [24.3 kB] Get:26 http://azure.archive.ubuntu.com/ubuntu jammy-backports/universe Translation-en [16.5 kB] Get:27 http://azure.archive.ubuntu.com/ubuntu jammy-backports/universe amd64 c-n-f Metadata [644 B] Get:28 http://azure.archive.ubuntu.com/ubuntu jammy-backports/multiverse amd64 c-n-f Metadata [116 B] Get:29 http://azure.archive.ubuntu.com/ubuntu jammy-security/main amd64 Packages [1051 kB] Get:30 http://azure.archive.ubuntu.com/ubuntu jammy-security/main Translation-en [200 kB] Get:31 http://azure.archive.ubuntu.com/ubuntu jammy-security/restricted amd64 Packages [1226 kB] Get:32 http://azure.archive.ubuntu.com/ubuntu jammy-security/restricted Translation-en [200 kB] Get:33 http://azure.archive.ubuntu.com/ubuntu jammy-security/universe amd64 Packages [823 kB] Get:34 http://azure.archive.ubuntu.com/ubuntu jammy-security/universe Translation-en [156 kB] Get:35 http://azure.archive.ubuntu.com/ubuntu jammy-security/universe amd64 c-n-f Metadata [16.8 kB] Get:36 http://azure.archive.ubuntu.com/ubuntu jammy-security/multiverse amd64 Packages [36.5 kB] Get:37 http://azure.archive.ubuntu.com/ubuntu jammy-security/multiverse Translation-en [7060 B] Get:38 http://azure.archive.ubuntu.com/ubuntu jammy-security/multiverse amd64 c-n-f Metadata [260 B] Fetched 28.8 MB in 3s (10.1 MB/s) Reading package lists... Reading package lists... Building dependency tree... Reading state information... Calculating upgrade... The following packages have been kept back: linux-tools-common python3-update-manager update-manager-core The following packages will be upgraded: binutils binutils-common binutils-x86-64-linux-gnu cryptsetup cryptsetup-bin cryptsetup-initramfs curl kpartx libbinutils libc-bin libc6 libcryptsetup12 libctf-nobfd0 libctf0 libcurl3-gnutls libcurl4 libssh-4 linux-cloud-tools-common locales multipath-tools openssh-client openssh-server openssh-sftp-server python3-cryptography systemd-hwe-hwdb tar vim vim-common vim-runtime vim-tiny xxd 31 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Need to get 24.9 MB of archives. After this operation, 26.6 kB of additional disk space will be used. Get:1 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 tar amd64 1.34+dfsg-1ubuntu0.1.22.04.2 [295 kB] Get:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc6 amd64 2.35-0ubuntu3.5 [3235 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc-bin amd64 2.35-0ubuntu3.5 [706 kB] Get:4 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 openssh-sftp-server amd64 1:8.9p1-3ubuntu0.5 [38.7 kB] Get:5 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 openssh-server amd64 1:8.9p1-3ubuntu0.5 [435 kB] Get:6 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 openssh-client amd64 1:8.9p1-3ubuntu0.5 [906 kB] Get:7 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcryptsetup12 amd64 2:2.4.3-1ubuntu1.2 [211 kB] Get:8 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 locales all 2.35-0ubuntu3.5 [4245 kB] Get:9 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim amd64 2:8.2.3995-1ubuntu2.15 [1735 kB] Get:10 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim-tiny amd64 2:8.2.3995-1ubuntu2.15 [710 kB] Get:11 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim-runtime all 2:8.2.3995-1ubuntu2.15 [6835 kB] Get:12 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 xxd amd64 2:8.2.3995-1ubuntu2.15 [55.2 kB] Get:13 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 vim-common all 2:8.2.3995-1ubuntu2.15 [81.5 kB] Get:14 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libctf0 amd64 2.38-4ubuntu2.4 [103 kB] Get:15 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libctf-nobfd0 amd64 2.38-4ubuntu2.4 [108 kB] Get:16 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 binutils-x86-64-linux-gnu amd64 2.38-4ubuntu2.4 [2327 kB] Get:17 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libbinutils amd64 2.38-4ubuntu2.4 [662 kB] Get:18 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 binutils amd64 2.38-4ubuntu2.4 [3194 B] Get:19 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 binutils-common amd64 2.38-4ubuntu2.4 [222 kB] Get:20 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cryptsetup-initramfs all 2:2.4.3-1ubuntu1.2 [26.2 kB] Get:21 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libssh-4 amd64 0.9.6-2ubuntu0.22.04.2 [186 kB] Get:22 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cryptsetup-bin amd64 2:2.4.3-1ubuntu1.2 [145 kB] Get:23 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cryptsetup amd64 2:2.4.3-1ubuntu1.2 [193 kB] Get:24 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 curl amd64 7.81.0-1ubuntu1.15 [194 kB] Get:25 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcurl4 amd64 7.81.0-1ubuntu1.15 [289 kB] Get:26 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcurl3-gnutls amd64 7.81.0-1ubuntu1.15 [284 kB] Get:27 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 linux-cloud-tools-common all 5.15.0-91.101 [95.8 kB] Get:28 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 python3-cryptography amd64 3.4.8-1ubuntu2.1 [236 kB] Get:29 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 systemd-hwe-hwdb all 249.11.4 [2978 B] Get:30 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 kpartx amd64 0.8.8-1ubuntu1.22.04.4 [28.8 kB] Get:31 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 multipath-tools amd64 0.8.8-1ubuntu1.22.04.4 [331 kB] debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline debconf: unable to initialize frontend: Readline debconf: (This frontend requires a controlling tty.) debconf: falling back to frontend: Teletype dpkg-preconfigure: unable to re-open stdin: Fetched 24.9 MB in 0s (79.7 MB/s) (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../tar_1.34+dfsg-1ubuntu0.1.22.04.2_amd64.deb ... Unpacking tar (1.34+dfsg-1ubuntu0.1.22.04.2) over (1.34+dfsg-1ubuntu0.1.22.04.1) ... Setting up tar (1.34+dfsg-1ubuntu0.1.22.04.2) ... (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../libc6_2.35-0ubuntu3.5_amd64.deb ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Unpacking libc6:amd64 (2.35-0ubuntu3.5) over (2.35-0ubuntu3.4) ... Setting up libc6:amd64 (2.35-0ubuntu3.5) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../libc-bin_2.35-0ubuntu3.5_amd64.deb ... Unpacking libc-bin (2.35-0ubuntu3.5) over (2.35-0ubuntu3.4) ... Setting up libc-bin (2.35-0ubuntu3.5) ... (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../00-openssh-sftp-server_1%3a8.9p1-3ubuntu0.5_amd64.deb ... Unpacking openssh-sftp-server (1:8.9p1-3ubuntu0.5) over (1:8.9p1-3ubuntu0.4) ... Preparing to unpack .../01-openssh-server_1%3a8.9p1-3ubuntu0.5_amd64.deb ... Unpacking openssh-server (1:8.9p1-3ubuntu0.5) over (1:8.9p1-3ubuntu0.4) ... Preparing to unpack .../02-openssh-client_1%3a8.9p1-3ubuntu0.5_amd64.deb ... Unpacking openssh-client (1:8.9p1-3ubuntu0.5) over (1:8.9p1-3ubuntu0.4) ... Preparing to unpack .../03-libcryptsetup12_2%3a2.4.3-1ubuntu1.2_amd64.deb ... Unpacking libcryptsetup12:amd64 (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../04-locales_2.35-0ubuntu3.5_all.deb ... Unpacking locales (2.35-0ubuntu3.5) over (2.35-0ubuntu3.4) ... Preparing to unpack .../05-vim_2%3a8.2.3995-1ubuntu2.15_amd64.deb ... Unpacking vim (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../06-vim-tiny_2%3a8.2.3995-1ubuntu2.15_amd64.deb ... Unpacking vim-tiny (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../07-vim-runtime_2%3a8.2.3995-1ubuntu2.15_all.deb ... Unpacking vim-runtime (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../08-xxd_2%3a8.2.3995-1ubuntu2.15_amd64.deb ... Unpacking xxd (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../09-vim-common_2%3a8.2.3995-1ubuntu2.15_all.deb ... Unpacking vim-common (2:8.2.3995-1ubuntu2.15) over (2:8.2.3995-1ubuntu2.13) ... Preparing to unpack .../10-libctf0_2.38-4ubuntu2.4_amd64.deb ... Unpacking libctf0:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../11-libctf-nobfd0_2.38-4ubuntu2.4_amd64.deb ... Unpacking libctf-nobfd0:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../12-binutils-x86-64-linux-gnu_2.38-4ubuntu2.4_amd64.deb ... Unpacking binutils-x86-64-linux-gnu (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../13-libbinutils_2.38-4ubuntu2.4_amd64.deb ... Unpacking libbinutils:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../14-binutils_2.38-4ubuntu2.4_amd64.deb ... Unpacking binutils (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../15-binutils-common_2.38-4ubuntu2.4_amd64.deb ... Unpacking binutils-common:amd64 (2.38-4ubuntu2.4) over (2.38-4ubuntu2.3) ... Preparing to unpack .../16-cryptsetup-initramfs_2%3a2.4.3-1ubuntu1.2_all.deb ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Unpacking cryptsetup-initramfs (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../17-libssh-4_0.9.6-2ubuntu0.22.04.2_amd64.deb ... Unpacking libssh-4:amd64 (0.9.6-2ubuntu0.22.04.2) over (0.9.6-2ubuntu0.22.04.1) ... Preparing to unpack .../18-cryptsetup-bin_2%3a2.4.3-1ubuntu1.2_amd64.deb ... Unpacking cryptsetup-bin (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../19-cryptsetup_2%3a2.4.3-1ubuntu1.2_amd64.deb ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Unpacking cryptsetup (2:2.4.3-1ubuntu1.2) over (2:2.4.3-1ubuntu1.1) ... Preparing to unpack .../20-curl_7.81.0-1ubuntu1.15_amd64.deb ... Unpacking curl (7.81.0-1ubuntu1.15) over (7.81.0-1ubuntu1.14) ... Preparing to unpack .../21-libcurl4_7.81.0-1ubuntu1.15_amd64.deb ... Unpacking libcurl4:amd64 (7.81.0-1ubuntu1.15) over (7.81.0-1ubuntu1.14) ... Preparing to unpack .../22-libcurl3-gnutls_7.81.0-1ubuntu1.15_amd64.deb ... Unpacking libcurl3-gnutls:amd64 (7.81.0-1ubuntu1.15) over (7.81.0-1ubuntu1.14) ... Preparing to unpack .../23-linux-cloud-tools-common_5.15.0-91.101_all.deb ... Unpacking linux-cloud-tools-common (5.15.0-91.101) over (5.15.0-89.99) ... Preparing to unpack .../24-python3-cryptography_3.4.8-1ubuntu2.1_amd64.deb ... Unpacking python3-cryptography (3.4.8-1ubuntu2.1) over (3.4.8-1ubuntu2) ... Preparing to unpack .../25-systemd-hwe-hwdb_249.11.4_all.deb ... Unpacking systemd-hwe-hwdb (249.11.4) over (249.11.3) ... Preparing to unpack .../26-kpartx_0.8.8-1ubuntu1.22.04.4_amd64.deb ... Unpacking kpartx (0.8.8-1ubuntu1.22.04.4) over (0.8.8-1ubuntu1.22.04.3) ... Preparing to unpack .../27-multipath-tools_0.8.8-1ubuntu1.22.04.4_amd64.deb ... Unpacking multipath-tools (0.8.8-1ubuntu1.22.04.4) over (0.8.8-1ubuntu1.22.04.3) ... Setting up openssh-client (1:8.9p1-3ubuntu0.5) ... Setting up binutils-common:amd64 (2.38-4ubuntu2.4) ... Setting up libctf-nobfd0:amd64 (2.38-4ubuntu2.4) ... Setting up linux-cloud-tools-common (5.15.0-91.101) ... Setting up locales (2.35-0ubuntu3.5) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Generating locales (this might take a while)... en_US.UTF-8... done Generation complete. Setting up xxd (2:8.2.3995-1ubuntu2.15) ... Setting up vim-common (2:8.2.3995-1ubuntu2.15) ... Setting up python3-cryptography (3.4.8-1ubuntu2.1) ... Setting up libssh-4:amd64 (0.9.6-2ubuntu0.22.04.2) ... Setting up systemd-hwe-hwdb (249.11.4) ... Setting up kpartx (0.8.8-1ubuntu1.22.04.4) ... Setting up libcurl4:amd64 (7.81.0-1ubuntu1.15) ... Setting up libcryptsetup12:amd64 (2:2.4.3-1ubuntu1.2) ... Setting up curl (7.81.0-1ubuntu1.15) ... Setting up libbinutils:amd64 (2.38-4ubuntu2.4) ... Setting up vim-runtime (2:8.2.3995-1ubuntu2.15) ... Setting up libctf0:amd64 (2.38-4ubuntu2.4) ... Setting up cryptsetup-bin (2:2.4.3-1ubuntu1.2) ... Setting up openssh-sftp-server (1:8.9p1-3ubuntu0.5) ... Setting up vim (2:8.2.3995-1ubuntu2.15) ... Setting up openssh-server (1:8.9p1-3ubuntu0.5) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline rescue-ssh.target is a disabled or a static unit not running, not starting it. ssh.socket is a disabled or a static unit not running, not starting it. Setting up cryptsetup (2:2.4.3-1ubuntu1.2) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline Setting up libcurl3-gnutls:amd64 (7.81.0-1ubuntu1.15) ... Setting up vim-tiny (2:8.2.3995-1ubuntu2.15) ... Setting up multipath-tools (0.8.8-1ubuntu1.22.04.4) ... Could not execute systemctl: at /usr/bin/deb-systemd-invoke line 142. Setting up cryptsetup-initramfs (2:2.4.3-1ubuntu1.2) ... debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline update-initramfs: deferring update (trigger activated) Setting up binutils-x86-64-linux-gnu (2.38-4ubuntu2.4) ... Setting up binutils (2.38-4ubuntu2.4) ... Processing triggers for udev (249.11-0ubuntu3.11) ... Processing triggers for initramfs-tools (0.140ubuntu13.4) ... update-initramfs: Generating /boot/initrd.img-6.2.0-1018-azure Processing triggers for libc-bin (2.35-0ubuntu3.5) ... Processing triggers for ufw (0.36.1-4ubuntu0.1) ... Processing triggers for man-db (2.10.2-1) ... Running kernel seems to be up-to-date. Services to be restarted: systemctl restart chrony.service systemctl restart cron.service systemctl restart irqbalance.service systemctl restart packagekit.service systemctl restart polkit.service systemctl restart rsyslog.service systemctl restart serial-getty@ttyS0.service systemctl restart snapd.service systemctl restart systemd-journald.service systemctl restart systemd-networkd.service systemctl restart systemd-resolved.service systemctl restart systemd-udevd.service systemctl restart walinuxagent.service Service restarts being deferred: /etc/needrestart/restart.d/dbus.service systemctl restart getty@tty1.service systemctl restart networkd-dispatcher.service systemctl restart systemd-logind.service systemctl restart unattended-upgrades.service systemctl restart user@1000.service No containers need to be restarted. No user sessions are running outdated binaries. No VM guests are running outdated hypervisor (qemu) binaries on this host. Connection to 172.21.23.11 closed by remote host. INSTALLER_URL: https://osm-download.etsi.org/ftp/osm-15.0-fifteen/install_osm.sh INSTALLER_PARAMETERS: -R testing-daily -t testing-daily -r testing -y Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Welcome to Ubuntu 22.04.3 LTS (GNU/Linux 6.2.0-1018-azure x86_64) * Documentation: https://help.ubuntu.com * Management: https://landscape.canonical.com * Support: https://ubuntu.com/advantage System information as of Sun Dec 31 12:20:26 UTC 2023 System load: 0.111328125 Processes: 141 Usage of /: 2.7% of 61.84GB Users logged in: 0 Memory usage: 1% IPv4 address for eth0: 172.21.23.11 Swap usage: 0% Expanded Security Maintenance for Applications is not enabled. 2 updates can be applied immediately. 1 of these updates is a standard security update. To see these additional updates run: apt list --upgradable Enable ESM Apps to receive additional future security updates. See https://ubuntu.com/esm or run: sudo pro status + wget https://osm-download.etsi.org/ftp/osm-15.0-fifteen/install_osm.sh --2023-12-31 12:20:26-- https://osm-download.etsi.org/ftp/osm-15.0-fifteen/install_osm.sh Resolving osm-download.etsi.org (osm-download.etsi.org)... 195.238.226.47 Connecting to osm-download.etsi.org (osm-download.etsi.org)|195.238.226.47|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 10475 (10K) [text/x-sh] Saving to: ‘install_osm.sh’ 0K .......... 100% 10.5M=0.001s 2023-12-31 12:20:26 (10.5 MB/s) - ‘install_osm.sh’ saved [10475/10475] + chmod +x install_osm.sh + ./install_osm.sh -R testing-daily -t testing-daily -r testing -y + tee osm_install_log.txt Checking required packages to add ETSI OSM debian repo: software-properties-common apt-transport-https OK Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Get:5 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease [4086 B] Get:6 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing/devops amd64 Packages [442 B] Fetched 4528 B in 1s (7844 B/s) Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Repository: 'deb [arch=amd64] https://osm-download.etsi.org/repository/osm/debian/testing-daily testing devops' Description: Archive for codename: testing components: devops More info: https://osm-download.etsi.org/repository/osm/debian/testing-daily Adding repository. Adding deb entry to /etc/apt/sources.list.d/archive_uri-https_osm-download_etsi_org_repository_osm_debian_testing-daily-jammy.list Adding disabled deb-src entry to /etc/apt/sources.list.d/archive_uri-https_osm-download_etsi_org_repository_osm_debian_testing-daily-jammy.list Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:5 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:5 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Reading package lists... Building dependency tree... Reading state information... The following NEW packages will be installed: osm-devops 0 upgraded, 1 newly installed, 0 to remove and 3 not upgraded. Need to get 20.4 MB of archives. After this operation, 108 MB of additional disk space will be used. Get:1 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing/devops amd64 osm-devops all 15.0.0-1 [20.4 MB] Fetched 20.4 MB in 1s (16.1 MB/s) Selecting previously unselected package osm-devops. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 61587 files and directories currently installed.) Preparing to unpack .../osm-devops_15.0.0-1_all.deb ... Unpacking osm-devops (15.0.0-1) ... Setting up osm-devops (15.0.0-1) ... NEEDRESTART-VER: 3.5 NEEDRESTART-KCUR: 6.2.0-1018-azure NEEDRESTART-KEXP: 6.2.0-1018-azure NEEDRESTART-KSTA: 1 ## Sun Dec 31 12:20:41 UTC 2023 source: INFO: logging sourced ## Sun Dec 31 12:20:41 UTC 2023 source: INFO: config sourced ## Sun Dec 31 12:20:41 UTC 2023 source: INFO: container sourced ## Sun Dec 31 12:20:41 UTC 2023 source: INFO: git_functions sourced ## Sun Dec 31 12:20:41 UTC 2023 source: INFO: track sourced Ubuntu 22.04 OSM_BEHIND_PROXY= OSM_PROXY_ENV_VARIABLES= This machine is not behind a proxy Checking required packages: git wget curl tar snapd /. /etc /etc/bash_completion.d /etc/bash_completion.d/git-prompt /usr /usr/bin /usr/bin/git /usr/bin/git-shell /usr/lib /usr/lib/git-core /usr/lib/git-core/git /usr/lib/git-core/git-add--interactive /usr/lib/git-core/git-bisect /usr/lib/git-core/git-daemon /usr/lib/git-core/git-difftool--helper /usr/lib/git-core/git-filter-branch /usr/lib/git-core/git-http-backend /usr/lib/git-core/git-http-fetch /usr/lib/git-core/git-http-push /usr/lib/git-core/git-imap-send /usr/lib/git-core/git-instaweb /usr/lib/git-core/git-merge-octopus /usr/lib/git-core/git-merge-one-file /usr/lib/git-core/git-merge-resolve /usr/lib/git-core/git-mergetool /usr/lib/git-core/git-mergetool--lib /usr/lib/git-core/git-quiltimport /usr/lib/git-core/git-remote-http /usr/lib/git-core/git-request-pull /usr/lib/git-core/git-sh-i18n /usr/lib/git-core/git-sh-i18n--envsubst /usr/lib/git-core/git-sh-prompt /usr/lib/git-core/git-sh-setup /usr/lib/git-core/git-shell /usr/lib/git-core/git-submodule /usr/lib/git-core/git-subtree /usr/lib/git-core/git-web--browse /usr/lib/git-core/mergetools /usr/lib/git-core/mergetools/araxis /usr/lib/git-core/mergetools/bc /usr/lib/git-core/mergetools/codecompare /usr/lib/git-core/mergetools/deltawalker /usr/lib/git-core/mergetools/diffmerge /usr/lib/git-core/mergetools/diffuse /usr/lib/git-core/mergetools/ecmerge /usr/lib/git-core/mergetools/emerge /usr/lib/git-core/mergetools/examdiff /usr/lib/git-core/mergetools/guiffy /usr/lib/git-core/mergetools/gvimdiff /usr/lib/git-core/mergetools/kdiff3 /usr/lib/git-core/mergetools/kompare /usr/lib/git-core/mergetools/meld /usr/lib/git-core/mergetools/nvimdiff /usr/lib/git-core/mergetools/opendiff /usr/lib/git-core/mergetools/p4merge /usr/lib/git-core/mergetools/smerge /usr/lib/git-core/mergetools/tkdiff /usr/lib/git-core/mergetools/tortoisemerge /usr/lib/git-core/mergetools/vimdiff /usr/lib/git-core/mergetools/winmerge /usr/lib/git-core/mergetools/xxdiff /usr/share /usr/share/bash-completion /usr/share/bash-completion/completions /usr/share/bash-completion/completions/git /usr/share/doc /usr/share/doc/git /usr/share/doc/git/NEWS.Debian.gz /usr/share/doc/git/README.Debian /usr/share/doc/git/README.emacs /usr/share/doc/git/README.md /usr/share/doc/git/README.source /usr/share/doc/git/RelNotes /usr/share/doc/git/RelNotes/1.5.0.1.txt /usr/share/doc/git/RelNotes/1.5.0.2.txt /usr/share/doc/git/RelNotes/1.5.0.3.txt /usr/share/doc/git/RelNotes/1.5.0.4.txt /usr/share/doc/git/RelNotes/1.5.0.5.txt /usr/share/doc/git/RelNotes/1.5.0.6.txt /usr/share/doc/git/RelNotes/1.5.0.7.txt /usr/share/doc/git/RelNotes/1.5.0.txt /usr/share/doc/git/RelNotes/1.5.1.1.txt /usr/share/doc/git/RelNotes/1.5.1.2.txt /usr/share/doc/git/RelNotes/1.5.1.3.txt /usr/share/doc/git/RelNotes/1.5.1.4.txt /usr/share/doc/git/RelNotes/1.5.1.5.txt /usr/share/doc/git/RelNotes/1.5.1.6.txt /usr/share/doc/git/RelNotes/1.5.1.txt /usr/share/doc/git/RelNotes/1.5.2.1.txt /usr/share/doc/git/RelNotes/1.5.2.2.txt /usr/share/doc/git/RelNotes/1.5.2.3.txt /usr/share/doc/git/RelNotes/1.5.2.4.txt /usr/share/doc/git/RelNotes/1.5.2.5.txt /usr/share/doc/git/RelNotes/1.5.2.txt /usr/share/doc/git/RelNotes/1.5.3.1.txt /usr/share/doc/git/RelNotes/1.5.3.2.txt /usr/share/doc/git/RelNotes/1.5.3.3.txt /usr/share/doc/git/RelNotes/1.5.3.4.txt /usr/share/doc/git/RelNotes/1.5.3.5.txt /usr/share/doc/git/RelNotes/1.5.3.6.txt /usr/share/doc/git/RelNotes/1.5.3.7.txt /usr/share/doc/git/RelNotes/1.5.3.8.txt /usr/share/doc/git/RelNotes/1.5.3.txt /usr/share/doc/git/RelNotes/1.5.4.1.txt /usr/share/doc/git/RelNotes/1.5.4.2.txt /usr/share/doc/git/RelNotes/1.5.4.3.txt /usr/share/doc/git/RelNotes/1.5.4.4.txt /usr/share/doc/git/RelNotes/1.5.4.5.txt /usr/share/doc/git/RelNotes/1.5.4.6.txt /usr/share/doc/git/RelNotes/1.5.4.7.txt /usr/share/doc/git/RelNotes/1.5.4.txt /usr/share/doc/git/RelNotes/1.5.5.1.txt /usr/share/doc/git/RelNotes/1.5.5.2.txt /usr/share/doc/git/RelNotes/1.5.5.3.txt /usr/share/doc/git/RelNotes/1.5.5.4.txt /usr/share/doc/git/RelNotes/1.5.5.5.txt /usr/share/doc/git/RelNotes/1.5.5.6.txt /usr/share/doc/git/RelNotes/1.5.5.txt /usr/share/doc/git/RelNotes/1.5.6.1.txt /usr/share/doc/git/RelNotes/1.5.6.2.txt /usr/share/doc/git/RelNotes/1.5.6.3.txt /usr/share/doc/git/RelNotes/1.5.6.4.txt /usr/share/doc/git/RelNotes/1.5.6.5.txt /usr/share/doc/git/RelNotes/1.5.6.6.txt /usr/share/doc/git/RelNotes/1.5.6.txt /usr/share/doc/git/RelNotes/1.6.0.1.txt /usr/share/doc/git/RelNotes/1.6.0.2.txt /usr/share/doc/git/RelNotes/1.6.0.3.txt /usr/share/doc/git/RelNotes/1.6.0.4.txt /usr/share/doc/git/RelNotes/1.6.0.5.txt /usr/share/doc/git/RelNotes/1.6.0.6.txt /usr/share/doc/git/RelNotes/1.6.0.txt /usr/share/doc/git/RelNotes/1.6.1.1.txt /usr/share/doc/git/RelNotes/1.6.1.2.txt /usr/share/doc/git/RelNotes/1.6.1.3.txt /usr/share/doc/git/RelNotes/1.6.1.4.txt /usr/share/doc/git/RelNotes/1.6.1.txt /usr/share/doc/git/RelNotes/1.6.2.1.txt /usr/share/doc/git/RelNotes/1.6.2.2.txt /usr/share/doc/git/RelNotes/1.6.2.3.txt /usr/share/doc/git/RelNotes/1.6.2.4.txt /usr/share/doc/git/RelNotes/1.6.2.5.txt /usr/share/doc/git/RelNotes/1.6.2.txt /usr/share/doc/git/RelNotes/1.6.3.1.txt /usr/share/doc/git/RelNotes/1.6.3.2.txt /usr/share/doc/git/RelNotes/1.6.3.3.txt /usr/share/doc/git/RelNotes/1.6.3.4.txt /usr/share/doc/git/RelNotes/1.6.3.txt /usr/share/doc/git/RelNotes/1.6.4.1.txt /usr/share/doc/git/RelNotes/1.6.4.2.txt /usr/share/doc/git/RelNotes/1.6.4.3.txt /usr/share/doc/git/RelNotes/1.6.4.4.txt /usr/share/doc/git/RelNotes/1.6.4.5.txt /usr/share/doc/git/RelNotes/1.6.4.txt /usr/share/doc/git/RelNotes/1.6.5.1.txt /usr/share/doc/git/RelNotes/1.6.5.2.txt /usr/share/doc/git/RelNotes/1.6.5.3.txt /usr/share/doc/git/RelNotes/1.6.5.4.txt /usr/share/doc/git/RelNotes/1.6.5.5.txt /usr/share/doc/git/RelNotes/1.6.5.6.txt /usr/share/doc/git/RelNotes/1.6.5.7.txt /usr/share/doc/git/RelNotes/1.6.5.8.txt /usr/share/doc/git/RelNotes/1.6.5.9.txt /usr/share/doc/git/RelNotes/1.6.5.txt /usr/share/doc/git/RelNotes/1.6.6.1.txt /usr/share/doc/git/RelNotes/1.6.6.2.txt /usr/share/doc/git/RelNotes/1.6.6.3.txt /usr/share/doc/git/RelNotes/1.6.6.txt /usr/share/doc/git/RelNotes/1.7.0.1.txt /usr/share/doc/git/RelNotes/1.7.0.2.txt /usr/share/doc/git/RelNotes/1.7.0.3.txt /usr/share/doc/git/RelNotes/1.7.0.4.txt /usr/share/doc/git/RelNotes/1.7.0.5.txt /usr/share/doc/git/RelNotes/1.7.0.6.txt /usr/share/doc/git/RelNotes/1.7.0.7.txt /usr/share/doc/git/RelNotes/1.7.0.8.txt /usr/share/doc/git/RelNotes/1.7.0.9.txt /usr/share/doc/git/RelNotes/1.7.0.txt /usr/share/doc/git/RelNotes/1.7.1.1.txt /usr/share/doc/git/RelNotes/1.7.1.2.txt /usr/share/doc/git/RelNotes/1.7.1.3.txt /usr/share/doc/git/RelNotes/1.7.1.4.txt /usr/share/doc/git/RelNotes/1.7.1.txt /usr/share/doc/git/RelNotes/1.7.10.1.txt /usr/share/doc/git/RelNotes/1.7.10.2.txt /usr/share/doc/git/RelNotes/1.7.10.3.txt /usr/share/doc/git/RelNotes/1.7.10.4.txt /usr/share/doc/git/RelNotes/1.7.10.5.txt /usr/share/doc/git/RelNotes/1.7.10.txt /usr/share/doc/git/RelNotes/1.7.11.1.txt /usr/share/doc/git/RelNotes/1.7.11.2.txt /usr/share/doc/git/RelNotes/1.7.11.3.txt /usr/share/doc/git/RelNotes/1.7.11.4.txt /usr/share/doc/git/RelNotes/1.7.11.5.txt /usr/share/doc/git/RelNotes/1.7.11.6.txt /usr/share/doc/git/RelNotes/1.7.11.7.txt /usr/share/doc/git/RelNotes/1.7.11.txt /usr/share/doc/git/RelNotes/1.7.12.1.txt /usr/share/doc/git/RelNotes/1.7.12.2.txt /usr/share/doc/git/RelNotes/1.7.12.3.txt /usr/share/doc/git/RelNotes/1.7.12.4.txt /usr/share/doc/git/RelNotes/1.7.12.txt /usr/share/doc/git/RelNotes/1.7.2.1.txt /usr/share/doc/git/RelNotes/1.7.2.2.txt /usr/share/doc/git/RelNotes/1.7.2.3.txt /usr/share/doc/git/RelNotes/1.7.2.4.txt /usr/share/doc/git/RelNotes/1.7.2.5.txt /usr/share/doc/git/RelNotes/1.7.2.txt /usr/share/doc/git/RelNotes/1.7.3.1.txt /usr/share/doc/git/RelNotes/1.7.3.2.txt /usr/share/doc/git/RelNotes/1.7.3.3.txt /usr/share/doc/git/RelNotes/1.7.3.4.txt /usr/share/doc/git/RelNotes/1.7.3.5.txt /usr/share/doc/git/RelNotes/1.7.3.txt /usr/share/doc/git/RelNotes/1.7.4.1.txt /usr/share/doc/git/RelNotes/1.7.4.2.txt /usr/share/doc/git/RelNotes/1.7.4.3.txt /usr/share/doc/git/RelNotes/1.7.4.4.txt /usr/share/doc/git/RelNotes/1.7.4.5.txt /usr/share/doc/git/RelNotes/1.7.4.txt /usr/share/doc/git/RelNotes/1.7.5.1.txt /usr/share/doc/git/RelNotes/1.7.5.2.txt /usr/share/doc/git/RelNotes/1.7.5.3.txt /usr/share/doc/git/RelNotes/1.7.5.4.txt /usr/share/doc/git/RelNotes/1.7.5.txt /usr/share/doc/git/RelNotes/1.7.6.1.txt /usr/share/doc/git/RelNotes/1.7.6.2.txt /usr/share/doc/git/RelNotes/1.7.6.3.txt /usr/share/doc/git/RelNotes/1.7.6.4.txt /usr/share/doc/git/RelNotes/1.7.6.5.txt /usr/share/doc/git/RelNotes/1.7.6.6.txt /usr/share/doc/git/RelNotes/1.7.6.txt /usr/share/doc/git/RelNotes/1.7.7.1.txt /usr/share/doc/git/RelNotes/1.7.7.2.txt /usr/share/doc/git/RelNotes/1.7.7.3.txt /usr/share/doc/git/RelNotes/1.7.7.4.txt /usr/share/doc/git/RelNotes/1.7.7.5.txt /usr/share/doc/git/RelNotes/1.7.7.6.txt /usr/share/doc/git/RelNotes/1.7.7.7.txt /usr/share/doc/git/RelNotes/1.7.7.txt /usr/share/doc/git/RelNotes/1.7.8.1.txt /usr/share/doc/git/RelNotes/1.7.8.2.txt /usr/share/doc/git/RelNotes/1.7.8.3.txt /usr/share/doc/git/RelNotes/1.7.8.4.txt /usr/share/doc/git/RelNotes/1.7.8.5.txt /usr/share/doc/git/RelNotes/1.7.8.6.txt /usr/share/doc/git/RelNotes/1.7.8.txt /usr/share/doc/git/RelNotes/1.7.9.1.txt /usr/share/doc/git/RelNotes/1.7.9.2.txt /usr/share/doc/git/RelNotes/1.7.9.3.txt /usr/share/doc/git/RelNotes/1.7.9.4.txt /usr/share/doc/git/RelNotes/1.7.9.5.txt /usr/share/doc/git/RelNotes/1.7.9.6.txt /usr/share/doc/git/RelNotes/1.7.9.7.txt /usr/share/doc/git/RelNotes/1.7.9.txt /usr/share/doc/git/RelNotes/1.8.0.1.txt /usr/share/doc/git/RelNotes/1.8.0.2.txt /usr/share/doc/git/RelNotes/1.8.0.3.txt /usr/share/doc/git/RelNotes/1.8.0.txt /usr/share/doc/git/RelNotes/1.8.1.1.txt /usr/share/doc/git/RelNotes/1.8.1.2.txt /usr/share/doc/git/RelNotes/1.8.1.3.txt /usr/share/doc/git/RelNotes/1.8.1.4.txt /usr/share/doc/git/RelNotes/1.8.1.5.txt /usr/share/doc/git/RelNotes/1.8.1.6.txt /usr/share/doc/git/RelNotes/1.8.1.txt /usr/share/doc/git/RelNotes/1.8.2.1.txt /usr/share/doc/git/RelNotes/1.8.2.2.txt /usr/share/doc/git/RelNotes/1.8.2.3.txt /usr/share/doc/git/RelNotes/1.8.2.txt /usr/share/doc/git/RelNotes/1.8.3.1.txt /usr/share/doc/git/RelNotes/1.8.3.2.txt /usr/share/doc/git/RelNotes/1.8.3.3.txt /usr/share/doc/git/RelNotes/1.8.3.4.txt /usr/share/doc/git/RelNotes/1.8.3.txt /usr/share/doc/git/RelNotes/1.8.4.1.txt /usr/share/doc/git/RelNotes/1.8.4.2.txt /usr/share/doc/git/RelNotes/1.8.4.3.txt /usr/share/doc/git/RelNotes/1.8.4.4.txt /usr/share/doc/git/RelNotes/1.8.4.5.txt /usr/share/doc/git/RelNotes/1.8.4.txt /usr/share/doc/git/RelNotes/1.8.5.1.txt /usr/share/doc/git/RelNotes/1.8.5.2.txt /usr/share/doc/git/RelNotes/1.8.5.3.txt /usr/share/doc/git/RelNotes/1.8.5.4.txt /usr/share/doc/git/RelNotes/1.8.5.5.txt /usr/share/doc/git/RelNotes/1.8.5.6.txt /usr/share/doc/git/RelNotes/1.8.5.txt /usr/share/doc/git/RelNotes/1.9.0.txt /usr/share/doc/git/RelNotes/1.9.1.txt /usr/share/doc/git/RelNotes/1.9.2.txt /usr/share/doc/git/RelNotes/1.9.3.txt /usr/share/doc/git/RelNotes/1.9.4.txt /usr/share/doc/git/RelNotes/1.9.5.txt /usr/share/doc/git/RelNotes/2.0.0.txt /usr/share/doc/git/RelNotes/2.0.1.txt /usr/share/doc/git/RelNotes/2.0.2.txt /usr/share/doc/git/RelNotes/2.0.3.txt /usr/share/doc/git/RelNotes/2.0.4.txt /usr/share/doc/git/RelNotes/2.0.5.txt /usr/share/doc/git/RelNotes/2.1.0.txt /usr/share/doc/git/RelNotes/2.1.1.txt /usr/share/doc/git/RelNotes/2.1.2.txt /usr/share/doc/git/RelNotes/2.1.3.txt /usr/share/doc/git/RelNotes/2.1.4.txt /usr/share/doc/git/RelNotes/2.10.0.txt /usr/share/doc/git/RelNotes/2.10.1.txt /usr/share/doc/git/RelNotes/2.10.2.txt /usr/share/doc/git/RelNotes/2.10.3.txt /usr/share/doc/git/RelNotes/2.10.4.txt /usr/share/doc/git/RelNotes/2.10.5.txt /usr/share/doc/git/RelNotes/2.11.0.txt /usr/share/doc/git/RelNotes/2.11.1.txt /usr/share/doc/git/RelNotes/2.11.2.txt /usr/share/doc/git/RelNotes/2.11.3.txt /usr/share/doc/git/RelNotes/2.11.4.txt /usr/share/doc/git/RelNotes/2.12.0.txt /usr/share/doc/git/RelNotes/2.12.1.txt /usr/share/doc/git/RelNotes/2.12.2.txt /usr/share/doc/git/RelNotes/2.12.3.txt /usr/share/doc/git/RelNotes/2.12.4.txt /usr/share/doc/git/RelNotes/2.12.5.txt /usr/share/doc/git/RelNotes/2.13.0.txt /usr/share/doc/git/RelNotes/2.13.1.txt /usr/share/doc/git/RelNotes/2.13.2.txt /usr/share/doc/git/RelNotes/2.13.3.txt /usr/share/doc/git/RelNotes/2.13.4.txt /usr/share/doc/git/RelNotes/2.13.5.txt /usr/share/doc/git/RelNotes/2.13.6.txt /usr/share/doc/git/RelNotes/2.13.7.txt /usr/share/doc/git/RelNotes/2.14.0.txt /usr/share/doc/git/RelNotes/2.14.1.txt /usr/share/doc/git/RelNotes/2.14.2.txt /usr/share/doc/git/RelNotes/2.14.3.txt /usr/share/doc/git/RelNotes/2.14.4.txt /usr/share/doc/git/RelNotes/2.14.5.txt /usr/share/doc/git/RelNotes/2.14.6.txt /usr/share/doc/git/RelNotes/2.15.0.txt /usr/share/doc/git/RelNotes/2.15.1.txt /usr/share/doc/git/RelNotes/2.15.2.txt /usr/share/doc/git/RelNotes/2.15.3.txt /usr/share/doc/git/RelNotes/2.15.4.txt /usr/share/doc/git/RelNotes/2.16.0.txt /usr/share/doc/git/RelNotes/2.16.1.txt /usr/share/doc/git/RelNotes/2.16.2.txt /usr/share/doc/git/RelNotes/2.16.3.txt /usr/share/doc/git/RelNotes/2.16.4.txt /usr/share/doc/git/RelNotes/2.16.5.txt /usr/share/doc/git/RelNotes/2.16.6.txt /usr/share/doc/git/RelNotes/2.17.0.txt /usr/share/doc/git/RelNotes/2.17.1.txt /usr/share/doc/git/RelNotes/2.17.2.txt /usr/share/doc/git/RelNotes/2.17.3.txt /usr/share/doc/git/RelNotes/2.17.4.txt /usr/share/doc/git/RelNotes/2.17.5.txt /usr/share/doc/git/RelNotes/2.17.6.txt /usr/share/doc/git/RelNotes/2.18.0.txt /usr/share/doc/git/RelNotes/2.18.1.txt /usr/share/doc/git/RelNotes/2.18.2.txt /usr/share/doc/git/RelNotes/2.18.3.txt /usr/share/doc/git/RelNotes/2.18.4.txt /usr/share/doc/git/RelNotes/2.18.5.txt /usr/share/doc/git/RelNotes/2.19.0.txt /usr/share/doc/git/RelNotes/2.19.1.txt /usr/share/doc/git/RelNotes/2.19.2.txt /usr/share/doc/git/RelNotes/2.19.3.txt /usr/share/doc/git/RelNotes/2.19.4.txt /usr/share/doc/git/RelNotes/2.19.5.txt /usr/share/doc/git/RelNotes/2.19.6.txt /usr/share/doc/git/RelNotes/2.2.0.txt /usr/share/doc/git/RelNotes/2.2.1.txt /usr/share/doc/git/RelNotes/2.2.2.txt /usr/share/doc/git/RelNotes/2.2.3.txt /usr/share/doc/git/RelNotes/2.20.0.txt /usr/share/doc/git/RelNotes/2.20.1.txt /usr/share/doc/git/RelNotes/2.20.2.txt /usr/share/doc/git/RelNotes/2.20.3.txt /usr/share/doc/git/RelNotes/2.20.4.txt /usr/share/doc/git/RelNotes/2.20.5.txt /usr/share/doc/git/RelNotes/2.21.0.txt /usr/share/doc/git/RelNotes/2.21.1.txt /usr/share/doc/git/RelNotes/2.21.2.txt /usr/share/doc/git/RelNotes/2.21.3.txt /usr/share/doc/git/RelNotes/2.21.4.txt /usr/share/doc/git/RelNotes/2.22.0.txt /usr/share/doc/git/RelNotes/2.22.1.txt /usr/share/doc/git/RelNotes/2.22.2.txt /usr/share/doc/git/RelNotes/2.22.3.txt /usr/share/doc/git/RelNotes/2.22.4.txt /usr/share/doc/git/RelNotes/2.22.5.txt /usr/share/doc/git/RelNotes/2.23.0.txt /usr/share/doc/git/RelNotes/2.23.1.txt /usr/share/doc/git/RelNotes/2.23.2.txt /usr/share/doc/git/RelNotes/2.23.3.txt /usr/share/doc/git/RelNotes/2.23.4.txt /usr/share/doc/git/RelNotes/2.24.0.txt /usr/share/doc/git/RelNotes/2.24.1.txt /usr/share/doc/git/RelNotes/2.24.2.txt /usr/share/doc/git/RelNotes/2.24.3.txt /usr/share/doc/git/RelNotes/2.24.4.txt /usr/share/doc/git/RelNotes/2.25.0.txt /usr/share/doc/git/RelNotes/2.25.1.txt /usr/share/doc/git/RelNotes/2.25.2.txt /usr/share/doc/git/RelNotes/2.25.3.txt /usr/share/doc/git/RelNotes/2.25.4.txt /usr/share/doc/git/RelNotes/2.25.5.txt /usr/share/doc/git/RelNotes/2.26.0.txt /usr/share/doc/git/RelNotes/2.26.1.txt /usr/share/doc/git/RelNotes/2.26.2.txt /usr/share/doc/git/RelNotes/2.26.3.txt /usr/share/doc/git/RelNotes/2.27.0.txt /usr/share/doc/git/RelNotes/2.27.1.txt /usr/share/doc/git/RelNotes/2.28.0.txt /usr/share/doc/git/RelNotes/2.28.1.txt /usr/share/doc/git/RelNotes/2.29.0.txt /usr/share/doc/git/RelNotes/2.29.1.txt /usr/share/doc/git/RelNotes/2.29.2.txt /usr/share/doc/git/RelNotes/2.29.3.txt /usr/share/doc/git/RelNotes/2.3.0.txt /usr/share/doc/git/RelNotes/2.3.1.txt /usr/share/doc/git/RelNotes/2.3.10.txt /usr/share/doc/git/RelNotes/2.3.2.txt /usr/share/doc/git/RelNotes/2.3.3.txt /usr/share/doc/git/RelNotes/2.3.4.txt /usr/share/doc/git/RelNotes/2.3.5.txt /usr/share/doc/git/RelNotes/2.3.6.txt /usr/share/doc/git/RelNotes/2.3.7.txt /usr/share/doc/git/RelNotes/2.3.8.txt /usr/share/doc/git/RelNotes/2.3.9.txt /usr/share/doc/git/RelNotes/2.30.0.txt /usr/share/doc/git/RelNotes/2.30.1.txt /usr/share/doc/git/RelNotes/2.30.2.txt /usr/share/doc/git/RelNotes/2.31.0.txt /usr/share/doc/git/RelNotes/2.31.1.txt /usr/share/doc/git/RelNotes/2.32.0.txt /usr/share/doc/git/RelNotes/2.33.0.txt /usr/share/doc/git/RelNotes/2.33.1.txt /usr/share/doc/git/RelNotes/2.34.0.txt /usr/share/doc/git/RelNotes/2.34.1.txt /usr/share/doc/git/RelNotes/2.4.0.txt /usr/share/doc/git/RelNotes/2.4.1.txt /usr/share/doc/git/RelNotes/2.4.10.txt /usr/share/doc/git/RelNotes/2.4.11.txt /usr/share/doc/git/RelNotes/2.4.12.txt /usr/share/doc/git/RelNotes/2.4.2.txt /usr/share/doc/git/RelNotes/2.4.3.txt /usr/share/doc/git/RelNotes/2.4.4.txt /usr/share/doc/git/RelNotes/2.4.5.txt /usr/share/doc/git/RelNotes/2.4.6.txt /usr/share/doc/git/RelNotes/2.4.7.txt /usr/share/doc/git/RelNotes/2.4.8.txt /usr/share/doc/git/RelNotes/2.4.9.txt /usr/share/doc/git/RelNotes/2.5.0.txt /usr/share/doc/git/RelNotes/2.5.1.txt /usr/share/doc/git/RelNotes/2.5.2.txt /usr/share/doc/git/RelNotes/2.5.3.txt /usr/share/doc/git/RelNotes/2.5.4.txt /usr/share/doc/git/RelNotes/2.5.5.txt /usr/share/doc/git/RelNotes/2.5.6.txt /usr/share/doc/git/RelNotes/2.6.0.txt /usr/share/doc/git/RelNotes/2.6.1.txt /usr/share/doc/git/RelNotes/2.6.2.txt /usr/share/doc/git/RelNotes/2.6.3.txt /usr/share/doc/git/RelNotes/2.6.4.txt /usr/share/doc/git/RelNotes/2.6.5.txt /usr/share/doc/git/RelNotes/2.6.6.txt /usr/share/doc/git/RelNotes/2.6.7.txt /usr/share/doc/git/RelNotes/2.7.0.txt /usr/share/doc/git/RelNotes/2.7.1.txt /usr/share/doc/git/RelNotes/2.7.2.txt /usr/share/doc/git/RelNotes/2.7.3.txt /usr/share/doc/git/RelNotes/2.7.4.txt /usr/share/doc/git/RelNotes/2.7.5.txt /usr/share/doc/git/RelNotes/2.7.6.txt /usr/share/doc/git/RelNotes/2.8.0.txt /usr/share/doc/git/RelNotes/2.8.1.txt /usr/share/doc/git/RelNotes/2.8.2.txt /usr/share/doc/git/RelNotes/2.8.3.txt /usr/share/doc/git/RelNotes/2.8.4.txt /usr/share/doc/git/RelNotes/2.8.5.txt /usr/share/doc/git/RelNotes/2.8.6.txt /usr/share/doc/git/RelNotes/2.9.0.txt /usr/share/doc/git/RelNotes/2.9.1.txt /usr/share/doc/git/RelNotes/2.9.2.txt /usr/share/doc/git/RelNotes/2.9.3.txt /usr/share/doc/git/RelNotes/2.9.4.txt /usr/share/doc/git/RelNotes/2.9.5.txt /usr/share/doc/git/changelog.Debian.gz /usr/share/doc/git/contrib /usr/share/doc/git/contrib/README /usr/share/doc/git/contrib/buildsystems /usr/share/doc/git/contrib/buildsystems/CMakeLists.txt /usr/share/doc/git/contrib/buildsystems/Generators /usr/share/doc/git/contrib/buildsystems/Generators/QMake.pm /usr/share/doc/git/contrib/buildsystems/Generators/Vcproj.pm /usr/share/doc/git/contrib/buildsystems/Generators/Vcxproj.pm /usr/share/doc/git/contrib/buildsystems/Generators.pm /usr/share/doc/git/contrib/buildsystems/engine.pl /usr/share/doc/git/contrib/buildsystems/generate /usr/share/doc/git/contrib/buildsystems/parse.pl /usr/share/doc/git/contrib/coccinelle /usr/share/doc/git/contrib/coccinelle/README /usr/share/doc/git/contrib/coccinelle/array.cocci /usr/share/doc/git/contrib/coccinelle/commit.cocci /usr/share/doc/git/contrib/coccinelle/flex_alloc.cocci /usr/share/doc/git/contrib/coccinelle/free.cocci /usr/share/doc/git/contrib/coccinelle/hashmap.cocci /usr/share/doc/git/contrib/coccinelle/object_id.cocci /usr/share/doc/git/contrib/coccinelle/preincr.cocci /usr/share/doc/git/contrib/coccinelle/qsort.cocci /usr/share/doc/git/contrib/coccinelle/strbuf.cocci /usr/share/doc/git/contrib/coccinelle/swap.cocci /usr/share/doc/git/contrib/coccinelle/the_repository.pending.cocci /usr/share/doc/git/contrib/coccinelle/xcalloc.cocci /usr/share/doc/git/contrib/coccinelle/xopen.cocci /usr/share/doc/git/contrib/coccinelle/xstrdup_or_null.cocci /usr/share/doc/git/contrib/contacts /usr/share/doc/git/contrib/contacts/Makefile /usr/share/doc/git/contrib/contacts/git-contacts /usr/share/doc/git/contrib/contacts/git-contacts.txt /usr/share/doc/git/contrib/coverage-diff.sh /usr/share/doc/git/contrib/credential /usr/share/doc/git/contrib/credential/gnome-keyring /usr/share/doc/git/contrib/credential/gnome-keyring/Makefile /usr/share/doc/git/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c /usr/share/doc/git/contrib/credential/libsecret /usr/share/doc/git/contrib/credential/libsecret/Makefile /usr/share/doc/git/contrib/credential/libsecret/git-credential-libsecret.c /usr/share/doc/git/contrib/credential/netrc /usr/share/doc/git/contrib/credential/netrc/Makefile /usr/share/doc/git/contrib/credential/netrc/git-credential-netrc.perl /usr/share/doc/git/contrib/credential/netrc/t-git-credential-netrc.sh /usr/share/doc/git/contrib/credential/netrc/test.command-option-gpg /usr/share/doc/git/contrib/credential/netrc/test.git-config-gpg /usr/share/doc/git/contrib/credential/netrc/test.netrc /usr/share/doc/git/contrib/credential/netrc/test.pl /usr/share/doc/git/contrib/credential/osxkeychain /usr/share/doc/git/contrib/credential/osxkeychain/Makefile /usr/share/doc/git/contrib/credential/osxkeychain/git-credential-osxkeychain.c /usr/share/doc/git/contrib/credential/wincred /usr/share/doc/git/contrib/credential/wincred/Makefile /usr/share/doc/git/contrib/credential/wincred/git-credential-wincred.c /usr/share/doc/git/contrib/diff-highlight /usr/share/doc/git/contrib/diff-highlight/DiffHighlight.pm /usr/share/doc/git/contrib/diff-highlight/Makefile /usr/share/doc/git/contrib/diff-highlight/README /usr/share/doc/git/contrib/diff-highlight/diff-highlight /usr/share/doc/git/contrib/diff-highlight/diff-highlight.perl /usr/share/doc/git/contrib/diff-highlight/shebang.perl /usr/share/doc/git/contrib/diff-highlight/t /usr/share/doc/git/contrib/diff-highlight/t/Makefile /usr/share/doc/git/contrib/diff-highlight/t/t9400-diff-highlight.sh /usr/share/doc/git/contrib/examples /usr/share/doc/git/contrib/examples/README /usr/share/doc/git/contrib/fast-import /usr/share/doc/git/contrib/fast-import/git-import.perl /usr/share/doc/git/contrib/fast-import/git-import.sh /usr/share/doc/git/contrib/fast-import/git-p4.README /usr/share/doc/git/contrib/fast-import/import-directories.perl /usr/share/doc/git/contrib/fast-import/import-tars.perl /usr/share/doc/git/contrib/fast-import/import-zips.py /usr/share/doc/git/contrib/git-jump /usr/share/doc/git/contrib/git-jump/README /usr/share/doc/git/contrib/git-jump/git-jump /usr/share/doc/git/contrib/git-resurrect.sh /usr/share/doc/git/contrib/git-shell-commands /usr/share/doc/git/contrib/git-shell-commands/README /usr/share/doc/git/contrib/git-shell-commands/help /usr/share/doc/git/contrib/git-shell-commands/list /usr/share/doc/git/contrib/hg-to-git /usr/share/doc/git/contrib/hg-to-git/hg-to-git.py /usr/share/doc/git/contrib/hg-to-git/hg-to-git.txt /usr/share/doc/git/contrib/long-running-filter /usr/share/doc/git/contrib/long-running-filter/example.pl /usr/share/doc/git/contrib/persistent-https /usr/share/doc/git/contrib/persistent-https/Makefile /usr/share/doc/git/contrib/persistent-https/README /usr/share/doc/git/contrib/persistent-https/client.go /usr/share/doc/git/contrib/persistent-https/main.go /usr/share/doc/git/contrib/persistent-https/proxy.go /usr/share/doc/git/contrib/persistent-https/socket.go /usr/share/doc/git/contrib/remote-helpers /usr/share/doc/git/contrib/remote-helpers/README /usr/share/doc/git/contrib/remote-helpers/git-remote-bzr /usr/share/doc/git/contrib/remote-helpers/git-remote-hg /usr/share/doc/git/contrib/remotes2config.sh /usr/share/doc/git/contrib/rerere-train.sh /usr/share/doc/git/contrib/stats /usr/share/doc/git/contrib/stats/git-common-hash /usr/share/doc/git/contrib/stats/mailmap.pl /usr/share/doc/git/contrib/stats/packinfo.pl /usr/share/doc/git/contrib/subtree /usr/share/doc/git/contrib/subtree/INSTALL /usr/share/doc/git/contrib/subtree/Makefile /usr/share/doc/git/contrib/subtree/README /usr/share/doc/git/contrib/subtree/git-subtree /usr/share/doc/git/contrib/subtree/git-subtree.sh /usr/share/doc/git/contrib/subtree/git-subtree.txt /usr/share/doc/git/contrib/subtree/t /usr/share/doc/git/contrib/subtree/t/Makefile /usr/share/doc/git/contrib/subtree/t/t7900-subtree.sh /usr/share/doc/git/contrib/subtree/todo /usr/share/doc/git/contrib/thunderbird-patch-inline /usr/share/doc/git/contrib/thunderbird-patch-inline/README /usr/share/doc/git/contrib/thunderbird-patch-inline/appp.sh /usr/share/doc/git/contrib/update-unicode /usr/share/doc/git/contrib/update-unicode/README /usr/share/doc/git/contrib/update-unicode/update_unicode.sh /usr/share/doc/git/contrib/vscode /usr/share/doc/git/contrib/vscode/README.md /usr/share/doc/git/contrib/vscode/init.sh /usr/share/doc/git/contrib/workdir /usr/share/doc/git/contrib/workdir/git-new-workdir /usr/share/doc/git/copyright /usr/share/git-core /usr/share/git-core/contrib /usr/share/git-core/contrib/hooks /usr/share/git-core/contrib/hooks/post-receive-email /usr/share/git-core/contrib/hooks/pre-auto-gc-battery /usr/share/git-core/contrib/hooks/setgitperms.perl /usr/share/git-core/contrib/hooks/update-paranoid /usr/share/git-core/templates /usr/share/git-core/templates/branches /usr/share/git-core/templates/description /usr/share/git-core/templates/hooks /usr/share/git-core/templates/hooks/applypatch-msg.sample /usr/share/git-core/templates/hooks/commit-msg.sample /usr/share/git-core/templates/hooks/fsmonitor-watchman.sample /usr/share/git-core/templates/hooks/post-update.sample /usr/share/git-core/templates/hooks/pre-applypatch.sample /usr/share/git-core/templates/hooks/pre-commit.sample /usr/share/git-core/templates/hooks/pre-merge-commit.sample /usr/share/git-core/templates/hooks/pre-push.sample /usr/share/git-core/templates/hooks/pre-rebase.sample /usr/share/git-core/templates/hooks/pre-receive.sample /usr/share/git-core/templates/hooks/prepare-commit-msg.sample /usr/share/git-core/templates/hooks/push-to-checkout.sample /usr/share/git-core/templates/hooks/update.sample /usr/share/git-core/templates/info /usr/share/git-core/templates/info/exclude /usr/share/gitweb /usr/share/gitweb/gitweb.cgi /usr/share/gitweb/static /usr/share/gitweb/static/git-favicon.png /usr/share/gitweb/static/git-logo.png /usr/share/gitweb/static/gitweb.css /usr/share/gitweb/static/gitweb.js /usr/share/lintian /usr/share/lintian/overrides /usr/share/lintian/overrides/git /usr/share/perl5 /usr/share/perl5/Git /usr/share/perl5/Git/I18N.pm /usr/share/perl5/Git/IndexInfo.pm /usr/share/perl5/Git/LoadCPAN /usr/share/perl5/Git/LoadCPAN/Error.pm /usr/share/perl5/Git/LoadCPAN/Mail /usr/share/perl5/Git/LoadCPAN/Mail/Address.pm /usr/share/perl5/Git/LoadCPAN.pm /usr/share/perl5/Git/Packet.pm /usr/share/perl5/Git.pm /var /var/lib /var/lib/git /usr/bin/git-receive-pack /usr/bin/git-upload-archive /usr/bin/git-upload-pack /usr/lib/git-core/git-add /usr/lib/git-core/git-am /usr/lib/git-core/git-annotate /usr/lib/git-core/git-apply /usr/lib/git-core/git-archive /usr/lib/git-core/git-bisect--helper /usr/lib/git-core/git-blame /usr/lib/git-core/git-branch /usr/lib/git-core/git-bugreport /usr/lib/git-core/git-bundle /usr/lib/git-core/git-cat-file /usr/lib/git-core/git-check-attr /usr/lib/git-core/git-check-ignore /usr/lib/git-core/git-check-mailmap /usr/lib/git-core/git-check-ref-format /usr/lib/git-core/git-checkout /usr/lib/git-core/git-checkout--worker /usr/lib/git-core/git-checkout-index /usr/lib/git-core/git-cherry /usr/lib/git-core/git-cherry-pick /usr/lib/git-core/git-clean /usr/lib/git-core/git-clone /usr/lib/git-core/git-column /usr/lib/git-core/git-commit /usr/lib/git-core/git-commit-graph /usr/lib/git-core/git-commit-tree /usr/lib/git-core/git-config /usr/lib/git-core/git-count-objects /usr/lib/git-core/git-credential /usr/lib/git-core/git-credential-cache /usr/lib/git-core/git-credential-cache--daemon /usr/lib/git-core/git-credential-store /usr/lib/git-core/git-describe /usr/lib/git-core/git-diff /usr/lib/git-core/git-diff-files /usr/lib/git-core/git-diff-index /usr/lib/git-core/git-diff-tree /usr/lib/git-core/git-difftool /usr/lib/git-core/git-env--helper /usr/lib/git-core/git-fast-export /usr/lib/git-core/git-fast-import /usr/lib/git-core/git-fetch /usr/lib/git-core/git-fetch-pack /usr/lib/git-core/git-fmt-merge-msg /usr/lib/git-core/git-for-each-ref /usr/lib/git-core/git-for-each-repo /usr/lib/git-core/git-format-patch /usr/lib/git-core/git-fsck /usr/lib/git-core/git-fsck-objects /usr/lib/git-core/git-gc /usr/lib/git-core/git-get-tar-commit-id /usr/lib/git-core/git-grep /usr/lib/git-core/git-hash-object /usr/lib/git-core/git-help /usr/lib/git-core/git-index-pack /usr/lib/git-core/git-init /usr/lib/git-core/git-init-db /usr/lib/git-core/git-interpret-trailers /usr/lib/git-core/git-log /usr/lib/git-core/git-ls-files /usr/lib/git-core/git-ls-remote /usr/lib/git-core/git-ls-tree /usr/lib/git-core/git-mailinfo /usr/lib/git-core/git-mailsplit /usr/lib/git-core/git-maintenance /usr/lib/git-core/git-merge /usr/lib/git-core/git-merge-base /usr/lib/git-core/git-merge-file /usr/lib/git-core/git-merge-index /usr/lib/git-core/git-merge-ours /usr/lib/git-core/git-merge-recursive /usr/lib/git-core/git-merge-subtree /usr/lib/git-core/git-merge-tree /usr/lib/git-core/git-mktag /usr/lib/git-core/git-mktree /usr/lib/git-core/git-multi-pack-index /usr/lib/git-core/git-mv /usr/lib/git-core/git-name-rev /usr/lib/git-core/git-notes /usr/lib/git-core/git-pack-objects /usr/lib/git-core/git-pack-redundant /usr/lib/git-core/git-pack-refs /usr/lib/git-core/git-patch-id /usr/lib/git-core/git-prune /usr/lib/git-core/git-prune-packed /usr/lib/git-core/git-pull /usr/lib/git-core/git-push /usr/lib/git-core/git-range-diff /usr/lib/git-core/git-read-tree /usr/lib/git-core/git-rebase /usr/lib/git-core/git-receive-pack /usr/lib/git-core/git-reflog /usr/lib/git-core/git-remote /usr/lib/git-core/git-remote-ext /usr/lib/git-core/git-remote-fd /usr/lib/git-core/git-remote-ftp /usr/lib/git-core/git-remote-ftps /usr/lib/git-core/git-remote-https /usr/lib/git-core/git-repack /usr/lib/git-core/git-replace /usr/lib/git-core/git-rerere /usr/lib/git-core/git-reset /usr/lib/git-core/git-restore /usr/lib/git-core/git-rev-list /usr/lib/git-core/git-rev-parse /usr/lib/git-core/git-revert /usr/lib/git-core/git-rm /usr/lib/git-core/git-send-pack /usr/lib/git-core/git-shortlog /usr/lib/git-core/git-show /usr/lib/git-core/git-show-branch /usr/lib/git-core/git-show-index /usr/lib/git-core/git-show-ref /usr/lib/git-core/git-sparse-checkout /usr/lib/git-core/git-stage /usr/lib/git-core/git-stash /usr/lib/git-core/git-status /usr/lib/git-core/git-stripspace /usr/lib/git-core/git-submodule--helper /usr/lib/git-core/git-switch /usr/lib/git-core/git-symbolic-ref /usr/lib/git-core/git-tag /usr/lib/git-core/git-unpack-file /usr/lib/git-core/git-unpack-objects /usr/lib/git-core/git-update-index /usr/lib/git-core/git-update-ref /usr/lib/git-core/git-update-server-info /usr/lib/git-core/git-upload-archive /usr/lib/git-core/git-upload-pack /usr/lib/git-core/git-var /usr/lib/git-core/git-verify-commit /usr/lib/git-core/git-verify-pack /usr/lib/git-core/git-verify-tag /usr/lib/git-core/git-whatchanged /usr/lib/git-core/git-worktree /usr/lib/git-core/git-write-tree /usr/share/bash-completion/completions/gitk /usr/share/doc/git/contrib/hooks /usr/share/doc/git/contrib/persistent-https/LICENSE /usr/share/doc/git/contrib/subtree/COPYING /usr/share/gitweb/index.cgi /. /etc /etc/wgetrc /usr /usr/bin /usr/bin/wget /usr/share /usr/share/doc /usr/share/doc/wget /usr/share/doc/wget/AUTHORS /usr/share/doc/wget/MAILING-LIST /usr/share/doc/wget/NEWS.gz /usr/share/doc/wget/README /usr/share/doc/wget/changelog.Debian.gz /usr/share/doc/wget/copyright /usr/share/info /usr/share/info/wget.info.gz /usr/share/man /usr/share/man/man1 /usr/share/man/man1/wget.1.gz /. /usr /usr/bin /usr/bin/curl /usr/share /usr/share/doc /usr/share/doc/curl /usr/share/doc/curl/copyright /usr/share/man /usr/share/man/man1 /usr/share/man/man1/curl.1.gz /usr/share/zsh /usr/share/zsh/vendor-completions /usr/share/zsh/vendor-completions/_curl /usr/share/doc/curl/NEWS.Debian.gz /usr/share/doc/curl/changelog.Debian.gz /. /bin /bin/tar /etc /usr /usr/lib /usr/lib/mime /usr/lib/mime/packages /usr/lib/mime/packages/tar /usr/sbin /usr/sbin/rmt-tar /usr/sbin/tarcat /usr/share /usr/share/doc /usr/share/doc/tar /usr/share/doc/tar/AUTHORS /usr/share/doc/tar/NEWS.gz /usr/share/doc/tar/README.Debian /usr/share/doc/tar/THANKS.gz /usr/share/doc/tar/changelog.Debian.gz /usr/share/doc/tar/copyright /usr/share/man /usr/share/man/man1 /usr/share/man/man1/tar.1.gz /usr/share/man/man1/tarcat.1.gz /usr/share/man/man8 /usr/share/man/man8/rmt-tar.8.gz /etc/rmt /. /etc /etc/apparmor.d /etc/apparmor.d/usr.lib.snapd.snap-confine.real /etc/apt /etc/apt/apt.conf.d /etc/apt/apt.conf.d/20snapd.conf /etc/profile.d /etc/profile.d/apps-bin-path.sh /etc/xdg /etc/xdg/autostart /etc/xdg/autostart/snap-userd-autostart.desktop /lib /lib/systemd /lib/systemd/system /lib/systemd/system/snapd.aa-prompt-listener.service /lib/systemd/system/snapd.apparmor.service /lib/systemd/system/snapd.autoimport.service /lib/systemd/system/snapd.core-fixup.service /lib/systemd/system/snapd.failure.service /lib/systemd/system/snapd.mounts-pre.target /lib/systemd/system/snapd.mounts.target /lib/systemd/system/snapd.recovery-chooser-trigger.service /lib/systemd/system/snapd.seeded.service /lib/systemd/system/snapd.service /lib/systemd/system/snapd.snap-repair.service /lib/systemd/system/snapd.snap-repair.timer /lib/systemd/system/snapd.socket /lib/systemd/system/snapd.system-shutdown.service /lib/systemd/system-generators /lib/systemd/system-generators/snapd-generator /lib/udev /lib/udev/rules.d /lib/udev/rules.d/66-snapd-autoimport.rules /snap /usr /usr/bin /usr/bin/snap /usr/bin/snapfuse /usr/lib /usr/lib/environment.d /usr/lib/environment.d/990-snapd.conf /usr/lib/snapd /usr/lib/snapd/complete.sh /usr/lib/snapd/etelpmoc.sh /usr/lib/snapd/info /usr/lib/snapd/snap-bootstrap /usr/lib/snapd/snap-confine /usr/lib/snapd/snap-device-helper /usr/lib/snapd/snap-discard-ns /usr/lib/snapd/snap-exec /usr/lib/snapd/snap-failure /usr/lib/snapd/snap-fde-keymgr /usr/lib/snapd/snap-gdb-shim /usr/lib/snapd/snap-gdbserver-shim /usr/lib/snapd/snap-mgmt /usr/lib/snapd/snap-preseed /usr/lib/snapd/snap-recovery-chooser /usr/lib/snapd/snap-repair /usr/lib/snapd/snap-seccomp /usr/lib/snapd/snap-update-ns /usr/lib/snapd/snapctl /usr/lib/snapd/snapd /usr/lib/snapd/snapd-aa-prompt-listener /usr/lib/snapd/snapd-aa-prompt-ui /usr/lib/snapd/snapd-apparmor /usr/lib/snapd/snapd.core-fixup.sh /usr/lib/snapd/snapd.run-from-snap /usr/lib/snapd/system-shutdown /usr/lib/systemd /usr/lib/systemd/system-environment-generators /usr/lib/systemd/system-environment-generators/snapd-env-generator /usr/lib/systemd/user /usr/lib/systemd/user/snapd.aa-prompt-ui.service /usr/lib/systemd/user/snapd.session-agent.service /usr/lib/systemd/user/snapd.session-agent.socket /usr/lib/systemd/user/sockets.target.wants /usr/lib/tmpfiles.d /usr/lib/tmpfiles.d/snapd.conf /usr/share /usr/share/applications /usr/share/applications/io.snapcraft.SessionAgent.desktop /usr/share/applications/snap-handle-link.desktop /usr/share/bash-completion /usr/share/bash-completion/completions /usr/share/bash-completion/completions/snap /usr/share/dbus-1 /usr/share/dbus-1/services /usr/share/dbus-1/services/io.snapcraft.Launcher.service /usr/share/dbus-1/services/io.snapcraft.Prompt.service /usr/share/dbus-1/services/io.snapcraft.SessionAgent.service /usr/share/dbus-1/services/io.snapcraft.Settings.service /usr/share/dbus-1/session.d /usr/share/dbus-1/session.d/snapd.session-services.conf /usr/share/dbus-1/system.d /usr/share/dbus-1/system.d/snapd.system-services.conf /usr/share/doc /usr/share/doc/snapd /usr/share/doc/snapd/changelog.gz /usr/share/doc/snapd/copyright /usr/share/fish /usr/share/fish/vendor_conf.d /usr/share/fish/vendor_conf.d/snapd.fish /usr/share/man /usr/share/man/man8 /usr/share/man/man8/snap-confine.8.gz /usr/share/man/man8/snap-discard-ns.8.gz /usr/share/man/man8/snap.8.gz /usr/share/man/man8/snapd-env-generator.8.gz /usr/share/polkit-1 /usr/share/polkit-1/actions /usr/share/polkit-1/actions/io.snapcraft.snapd.policy /usr/share/zsh /usr/share/zsh/vendor-completions /usr/share/zsh/vendor-completions/_snap /var /var/cache /var/cache/snapd /var/lib /var/lib/snapd /var/lib/snapd/apparmor /var/lib/snapd/apparmor/snap-confine /var/lib/snapd/auto-import /var/lib/snapd/dbus-1 /var/lib/snapd/dbus-1/services /var/lib/snapd/dbus-1/system-services /var/lib/snapd/desktop /var/lib/snapd/desktop/applications /var/lib/snapd/environment /var/lib/snapd/firstboot /var/lib/snapd/hostfs /var/lib/snapd/inhibit /var/lib/snapd/lib /var/lib/snapd/lib/gl /var/lib/snapd/lib/gl32 /var/lib/snapd/lib/glvnd /var/lib/snapd/lib/vulkan /var/lib/snapd/snaps /var/lib/snapd/snaps/partial /var/lib/snapd/ssl /var/lib/snapd/ssl/store-certs /var/lib/snapd/void /var/snap /lib/udev/snappy-app-dev /usr/bin/snapctl /usr/bin/ubuntu-core-launcher /usr/lib/systemd/user/sockets.target.wants/snapd.session-agent.socket Required packages are present: git wget curl tar snapd Track start release: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025241&event=start&operation=release&value=testing-daily&comment=&tags= Track start docker_tag: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025241&event=start&operation=docker_tag&value=testing-daily&comment=&tags= Track start installation_type: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025241&event=start&operation=installation_type&value=Default&comment=&tags= Track start os_info: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025241&event=start&operation=os_info&value=Ubuntu_22.04&comment=&tags= Track checks checkingroot_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025242&event=checks&operation=checkingroot_ok&value=&comment=&tags= Track checks noroot_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025242&event=checks&operation=noroot_ok&value=&comment=&tags= Track checks proceed_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025242&event=checks&operation=proceed_ok&value=&comment=&tags= Installing OSM Determining IP address of the interface with the default route Track prereq prereqok_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025242&event=prereq&operation=prereqok_ok&value=&comment=&tags= DEBUG_INSTALL= DOCKER_PROXY_URL= OSM_BEHIND_PROXY= USER=ubuntu Removing previous installation of docker ... Reading package lists... Building dependency tree... Reading state information... Package 'docker.io' is not installed, so not removed 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Reading package lists... Building dependency tree... Reading state information... Package 'docker-doc' is not installed, so not removed 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Reading package lists... Building dependency tree... Reading state information... Package 'docker-compose' is not installed, so not removed 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Reading package lists... Building dependency tree... Reading state information... Package 'podman-docker' is not installed, so not removed 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Reading package lists... Building dependency tree... Reading state information... Package 'containerd' is not installed, so not removed 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Reading package lists... Building dependency tree... Reading state information... Package 'runc' is not installed, so not removed 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. Installing Docker CE ... Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:5 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Reading package lists... Building dependency tree... Reading state information... ca-certificates is already the newest version (20230311ubuntu0.22.04.1). ca-certificates set to manually installed. gnupg is already the newest version (2.2.27-3ubuntu2.1). gnupg set to manually installed. software-properties-common is already the newest version (0.99.22.8). software-properties-common set to manually installed. The following NEW packages will be installed: apt-transport-https 0 upgraded, 1 newly installed, 0 to remove and 3 not upgraded. Need to get 1510 B of archives. After this operation, 170 kB of additional disk space will be used. Get:1 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 apt-transport-https all 2.4.11 [1510 B] debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline debconf: unable to initialize frontend: Readline debconf: (This frontend requires a controlling tty.) debconf: falling back to frontend: Teletype dpkg-preconfigure: unable to re-open stdin: Fetched 1510 B in 0s (86.4 kB/s) Selecting previously unselected package apt-transport-https. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 62320 files and directories currently installed.) Preparing to unpack .../apt-transport-https_2.4.11_all.deb ... Unpacking apt-transport-https (2.4.11) ... Setting up apt-transport-https (2.4.11) ... Running kernel seems to be up-to-date. No services need to be restarted. No containers need to be restarted. No user sessions are running outdated binaries. No VM guests are running outdated hypervisor (qemu) binaries on this host. Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Get:5 https://download.docker.com/linux/ubuntu jammy InRelease [48.8 kB] Hit:6 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Get:7 https://download.docker.com/linux/ubuntu jammy/stable amd64 Packages [23.0 kB] Fetched 71.8 kB in 1s (123 kB/s) Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Reading package lists... Building dependency tree... Reading state information... The following additional packages will be installed: containerd.io docker-buildx-plugin docker-ce-cli docker-ce-rootless-extras docker-compose-plugin libltdl7 libslirp0 pigz slirp4netns Suggested packages: aufs-tools cgroupfs-mount | cgroup-lite The following NEW packages will be installed: containerd.io docker-buildx-plugin docker-ce docker-ce-cli docker-ce-rootless-extras docker-compose-plugin libltdl7 libslirp0 pigz slirp4netns 0 upgraded, 10 newly installed, 0 to remove and 3 not upgraded. Need to get 115 MB of archives. After this operation, 411 MB of additional disk space will be used. Get:1 http://azure.archive.ubuntu.com/ubuntu jammy/universe amd64 pigz amd64 2.6-1 [63.6 kB] Get:2 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libltdl7 amd64 2.4.6-15build2 [39.6 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libslirp0 amd64 4.6.1-1build1 [61.5 kB] Get:4 http://azure.archive.ubuntu.com/ubuntu jammy/universe amd64 slirp4netns amd64 1.0.1-2 [28.2 kB] Get:5 https://download.docker.com/linux/ubuntu jammy/stable amd64 containerd.io amd64 1.6.26-1 [29.5 MB] Get:6 https://download.docker.com/linux/ubuntu jammy/stable amd64 docker-buildx-plugin amd64 0.11.2-1~ubuntu.22.04~jammy [28.2 MB] Get:7 https://download.docker.com/linux/ubuntu jammy/stable amd64 docker-ce-cli amd64 5:24.0.7-1~ubuntu.22.04~jammy [13.3 MB] Get:8 https://download.docker.com/linux/ubuntu jammy/stable amd64 docker-ce amd64 5:24.0.7-1~ubuntu.22.04~jammy [22.6 MB] Get:9 https://download.docker.com/linux/ubuntu jammy/stable amd64 docker-ce-rootless-extras amd64 5:24.0.7-1~ubuntu.22.04~jammy [9030 kB] Get:10 https://download.docker.com/linux/ubuntu jammy/stable amd64 docker-compose-plugin amd64 2.21.0-1~ubuntu.22.04~jammy [11.9 MB] debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline debconf: unable to initialize frontend: Readline debconf: (This frontend requires a controlling tty.) debconf: falling back to frontend: Teletype dpkg-preconfigure: unable to re-open stdin: Fetched 115 MB in 1s (81.2 MB/s) Selecting previously unselected package pigz. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 62324 files and directories currently installed.) Preparing to unpack .../0-pigz_2.6-1_amd64.deb ... Unpacking pigz (2.6-1) ... Selecting previously unselected package containerd.io. Preparing to unpack .../1-containerd.io_1.6.26-1_amd64.deb ... Unpacking containerd.io (1.6.26-1) ... Selecting previously unselected package docker-buildx-plugin. Preparing to unpack .../2-docker-buildx-plugin_0.11.2-1~ubuntu.22.04~jammy_amd64.deb ... Unpacking docker-buildx-plugin (0.11.2-1~ubuntu.22.04~jammy) ... Selecting previously unselected package docker-ce-cli. Preparing to unpack .../3-docker-ce-cli_5%3a24.0.7-1~ubuntu.22.04~jammy_amd64.deb ... Unpacking docker-ce-cli (5:24.0.7-1~ubuntu.22.04~jammy) ... Selecting previously unselected package docker-ce. Preparing to unpack .../4-docker-ce_5%3a24.0.7-1~ubuntu.22.04~jammy_amd64.deb ... Unpacking docker-ce (5:24.0.7-1~ubuntu.22.04~jammy) ... Selecting previously unselected package docker-ce-rootless-extras. Preparing to unpack .../5-docker-ce-rootless-extras_5%3a24.0.7-1~ubuntu.22.04~jammy_amd64.deb ... Unpacking docker-ce-rootless-extras (5:24.0.7-1~ubuntu.22.04~jammy) ... Selecting previously unselected package docker-compose-plugin. Preparing to unpack .../6-docker-compose-plugin_2.21.0-1~ubuntu.22.04~jammy_amd64.deb ... Unpacking docker-compose-plugin (2.21.0-1~ubuntu.22.04~jammy) ... Selecting previously unselected package libltdl7:amd64. Preparing to unpack .../7-libltdl7_2.4.6-15build2_amd64.deb ... Unpacking libltdl7:amd64 (2.4.6-15build2) ... Selecting previously unselected package libslirp0:amd64. Preparing to unpack .../8-libslirp0_4.6.1-1build1_amd64.deb ... Unpacking libslirp0:amd64 (4.6.1-1build1) ... Selecting previously unselected package slirp4netns. Preparing to unpack .../9-slirp4netns_1.0.1-2_amd64.deb ... Unpacking slirp4netns (1.0.1-2) ... Setting up docker-buildx-plugin (0.11.2-1~ubuntu.22.04~jammy) ... Setting up containerd.io (1.6.26-1) ... Created symlink /etc/systemd/system/multi-user.target.wants/containerd.service → /lib/systemd/system/containerd.service. Setting up docker-compose-plugin (2.21.0-1~ubuntu.22.04~jammy) ... Setting up libltdl7:amd64 (2.4.6-15build2) ... Setting up docker-ce-cli (5:24.0.7-1~ubuntu.22.04~jammy) ... Setting up libslirp0:amd64 (4.6.1-1build1) ... Setting up pigz (2.6-1) ... Setting up docker-ce-rootless-extras (5:24.0.7-1~ubuntu.22.04~jammy) ... Setting up slirp4netns (1.0.1-2) ... Setting up docker-ce (5:24.0.7-1~ubuntu.22.04~jammy) ... Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /lib/systemd/system/docker.service. Created symlink /etc/systemd/system/sockets.target.wants/docker.socket → /lib/systemd/system/docker.socket. Processing triggers for man-db (2.10.2-1) ... Processing triggers for libc-bin (2.35-0ubuntu3.5) ... Running kernel seems to be up-to-date. No services need to be restarted. No containers need to be restarted. No user sessions are running outdated binaries. No VM guests are running outdated hypervisor (qemu) binaries on this host. Adding user to group 'docker' Configuring containerd to expose CRI, use systemd cgroup and use DOCKER_PROXY_URL as registry mirror Testing Docker CE installation ... Client: Docker Engine - Community Version: 24.0.7 API version: 1.43 Go version: go1.20.10 Git commit: afdd53b Built: Thu Oct 26 09:07:41 2023 OS/Arch: linux/amd64 Context: default Server: Docker Engine - Community Engine: Version: 24.0.7 API version: 1.43 (minimum version 1.12) Go version: go1.20.10 Git commit: 311b9ff Built: Thu Oct 26 09:07:41 2023 OS/Arch: linux/amd64 Experimental: false containerd: Version: 1.6.26 GitCommit: 3dd1e886e55dd695541fdcd67420c2888645a495 runc: Version: 1.1.10 GitCommit: v1.1.10-0-g18a0cb0 docker-init: Version: 0.19.0 GitCommit: de40ad0 Unable to find image 'hello-world:latest' locally latest: Pulling from library/hello-world c1ec31eb5944: Pulling fs layer c1ec31eb5944: Download complete c1ec31eb5944: Pull complete Digest: sha256:ac69084025c660510933cca701f615283cdbb3aa0963188770b54c31c8962493 Status: Downloaded newer image for hello-world:latest Hello from Docker! This message shows that your installation appears to be working correctly. To generate this message, Docker took the following steps: 1. The Docker client contacted the Docker daemon. 2. The Docker daemon pulled the "hello-world" image from the Docker Hub. (amd64) 3. The Docker daemon created a new container from that image which runs the executable that produces the output you are currently reading. 4. The Docker daemon streamed that output to the Docker client, which sent it to your terminal. To try something more ambitious, you can run an Ubuntu container with: $ docker run -it ubuntu bash Share images, automate workflows, and more with a free Docker ID: https://hub.docker.com/ For more examples and ideas, visit: https://docs.docker.com/get-started/ ... Docker CE installation done Track docker_ce docker_ce_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025270&event=docker_ce&operation=docker_ce_ok&value=&comment=&tags= Creating folders for installation DEBUG_INSTALL= DEFAULT_IP=172.21.23.11 OSM_DEVOPS=/usr/share/osm-devops OSM_CLUSTER_WORK_DIR=/etc/osm INSTALL_K8S_MONITOR= HOME=/home/ubuntu Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:5 https://download.docker.com/linux/ubuntu jammy InRelease Hit:6 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Reading package lists... Building dependency tree... Reading state information... ca-certificates is already the newest version (20230311ubuntu0.22.04.1). curl is already the newest version (7.81.0-1ubuntu1.15). curl set to manually installed. apt-transport-https is already the newest version (2.4.11). 0 upgraded, 0 newly installed, 0 to remove and 3 not upgraded. deb [signed-by=/etc/apt/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:5 https://download.docker.com/linux/ubuntu jammy InRelease Hit:6 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Get:7 https://packages.cloud.google.com/apt kubernetes-xenial InRelease [8993 B] Ign:8 https://packages.cloud.google.com/apt kubernetes-xenial/main amd64 Packages Get:8 https://apt.kubernetes.io kubernetes-xenial/main amd64 Packages [69.9 kB] Fetched 78.9 kB in 1s (57.2 kB/s) Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Installing Kubernetes Packages ... Reading package lists... Building dependency tree... Reading state information... The following additional packages will be installed: conntrack cri-tools ebtables kubernetes-cni socat The following NEW packages will be installed: conntrack cri-tools ebtables kubeadm kubectl kubelet kubernetes-cni socat 0 upgraded, 8 newly installed, 0 to remove and 3 not upgraded. Need to get 87.3 MB of archives. After this operation, 341 MB of additional disk space will be used. Get:1 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 conntrack amd64 1:1.4.6-2build2 [33.5 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 ebtables amd64 2.0.11-4build2 [84.9 kB] Get:2 https://packages.cloud.google.com/apt kubernetes-xenial/main amd64 cri-tools amd64 1.26.0-00 [18.9 MB] Get:5 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 socat amd64 1.7.4.1-3ubuntu4 [349 kB] Get:4 https://packages.cloud.google.com/apt kubernetes-xenial/main amd64 kubernetes-cni amd64 1.2.0-00 [27.6 MB] Get:6 https://packages.cloud.google.com/apt kubernetes-xenial/main amd64 kubelet amd64 1.26.5-00 [20.5 MB] Get:7 https://packages.cloud.google.com/apt kubernetes-xenial/main amd64 kubectl amd64 1.26.5-00 [10.1 MB] Get:8 https://packages.cloud.google.com/apt kubernetes-xenial/main amd64 kubeadm amd64 1.26.5-00 [9742 kB] debconf: unable to initialize frontend: Dialog debconf: (Dialog frontend will not work on a dumb terminal, an emacs shell buffer, or without a controlling terminal.) debconf: falling back to frontend: Readline debconf: unable to initialize frontend: Readline debconf: (This frontend requires a controlling tty.) debconf: falling back to frontend: Teletype dpkg-preconfigure: unable to re-open stdin: Fetched 87.3 MB in 7s (13.1 MB/s) Selecting previously unselected package conntrack. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 62591 files and directories currently installed.) Preparing to unpack .../0-conntrack_1%3a1.4.6-2build2_amd64.deb ... Unpacking conntrack (1:1.4.6-2build2) ... Selecting previously unselected package cri-tools. Preparing to unpack .../1-cri-tools_1.26.0-00_amd64.deb ... Unpacking cri-tools (1.26.0-00) ... Selecting previously unselected package ebtables. Preparing to unpack .../2-ebtables_2.0.11-4build2_amd64.deb ... Unpacking ebtables (2.0.11-4build2) ... Selecting previously unselected package kubernetes-cni. Preparing to unpack .../3-kubernetes-cni_1.2.0-00_amd64.deb ... Unpacking kubernetes-cni (1.2.0-00) ... Selecting previously unselected package socat. Preparing to unpack .../4-socat_1.7.4.1-3ubuntu4_amd64.deb ... Unpacking socat (1.7.4.1-3ubuntu4) ... Selecting previously unselected package kubelet. Preparing to unpack .../5-kubelet_1.26.5-00_amd64.deb ... Unpacking kubelet (1.26.5-00) ... Selecting previously unselected package kubectl. Preparing to unpack .../6-kubectl_1.26.5-00_amd64.deb ... Unpacking kubectl (1.26.5-00) ... Selecting previously unselected package kubeadm. Preparing to unpack .../7-kubeadm_1.26.5-00_amd64.deb ... Unpacking kubeadm (1.26.5-00) ... Setting up conntrack (1:1.4.6-2build2) ... Setting up kubectl (1.26.5-00) ... Setting up ebtables (2.0.11-4build2) ... Setting up socat (1.7.4.1-3ubuntu4) ... Setting up cri-tools (1.26.0-00) ... Setting up kubernetes-cni (1.2.0-00) ... Setting up kubelet (1.26.5-00) ... Created symlink /etc/systemd/system/multi-user.target.wants/kubelet.service → /lib/systemd/system/kubelet.service. Setting up kubeadm (1.26.5-00) ... Processing triggers for man-db (2.10.2-1) ... Running kernel seems to be up-to-date. No services need to be restarted. No containers need to be restarted. No user sessions are running outdated binaries. No VM guests are running outdated hypervisor (qemu) binaries on this host. kubelet set on hold. kubeadm set on hold. kubectl set on hold. Track k8scluster install_k8s_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025292&event=k8scluster&operation=install_k8s_ok&value=&comment=&tags= Track k8scluster kubelet: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025292&event=k8scluster&operation=kubelet&value=1.26.5-00&comment=&tags= Track k8scluster kubeadm: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025292&event=k8scluster&operation=kubeadm&value=1.26.5-00&comment=&tags= Track k8scluster kubectl: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025292&event=k8scluster&operation=kubectl&value=1.26.5-00&comment=&tags= I1231 12:21:33.427303 5669 version.go:256] remote version is much newer: v1.29.0; falling back to: stable-1.26 [init] Using Kubernetes version: v1.26.12 [preflight] Running pre-flight checks [preflight] Would pull the required images (like 'kubeadm config images pull') [certs] Using certificateDir folder "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854" [certs] Generating "ca" certificate and key [certs] Generating "apiserver" certificate and key [certs] apiserver serving cert is signed for DNS names [kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local osmtest202312311216] and IPs [10.96.0.1 172.21.23.11] [certs] Generating "apiserver-kubelet-client" certificate and key [certs] Generating "front-proxy-ca" certificate and key [certs] Generating "front-proxy-client" certificate and key [certs] Generating "etcd/ca" certificate and key [certs] Generating "etcd/server" certificate and key [certs] etcd/server serving cert is signed for DNS names [localhost osmtest202312311216] and IPs [172.21.23.11 127.0.0.1 ::1] [certs] Generating "etcd/peer" certificate and key [certs] etcd/peer serving cert is signed for DNS names [localhost osmtest202312311216] and IPs [172.21.23.11 127.0.0.1 ::1] [certs] Generating "etcd/healthcheck-client" certificate and key [certs] Generating "apiserver-etcd-client" certificate and key [certs] Generating "sa" key and public key [kubeconfig] Using kubeconfig folder "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854" [kubeconfig] Writing "admin.conf" kubeconfig file [kubeconfig] Writing "kubelet.conf" kubeconfig file [kubeconfig] Writing "controller-manager.conf" kubeconfig file [kubeconfig] Writing "scheduler.conf" kubeconfig file [kubelet-start] Writing kubelet environment file with flags to file "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854/kubeadm-flags.env" [kubelet-start] Writing kubelet configuration to file "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854/config.yaml" [control-plane] Using manifest folder "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854" [control-plane] Creating static Pod manifest for "kube-apiserver" [control-plane] Creating static Pod manifest for "kube-controller-manager" [control-plane] Creating static Pod manifest for "kube-scheduler" [etcd] Would ensure that "/var/lib/etcd" directory is present [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854" [dryrun] Wrote certificates, kubeconfig files and control plane manifests to the "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854" directory [dryrun] The certificates or kubeconfig files would not be printed due to their sensitive nature [dryrun] Please examine the "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854" directory for details about what would be written [dryrun] Would write file "/etc/kubernetes/manifests/kube-apiserver.yaml" with content: apiVersion: v1 kind: Pod metadata: annotations: kubeadm.kubernetes.io/kube-apiserver.advertise-address.endpoint: 172.21.23.11:6443 creationTimestamp: null labels: component: kube-apiserver tier: control-plane name: kube-apiserver namespace: kube-system spec: containers: - command: - kube-apiserver - --advertise-address=172.21.23.11 - --allow-privileged=true - --authorization-mode=Node,RBAC - --client-ca-file=/etc/kubernetes/pki/ca.crt - --enable-admission-plugins=NodeRestriction - --enable-bootstrap-token-auth=true - --etcd-cafile=/etc/kubernetes/pki/etcd/ca.crt - --etcd-certfile=/etc/kubernetes/pki/apiserver-etcd-client.crt - --etcd-keyfile=/etc/kubernetes/pki/apiserver-etcd-client.key - --etcd-servers=https://127.0.0.1:2379 - --kubelet-client-certificate=/etc/kubernetes/pki/apiserver-kubelet-client.crt - --kubelet-client-key=/etc/kubernetes/pki/apiserver-kubelet-client.key - --kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname - --proxy-client-cert-file=/etc/kubernetes/pki/front-proxy-client.crt - --proxy-client-key-file=/etc/kubernetes/pki/front-proxy-client.key - --requestheader-allowed-names=front-proxy-client - --requestheader-client-ca-file=/etc/kubernetes/pki/front-proxy-ca.crt - --requestheader-extra-headers-prefix=X-Remote-Extra- - --requestheader-group-headers=X-Remote-Group - --requestheader-username-headers=X-Remote-User - --secure-port=6443 - --service-account-issuer=https://kubernetes.default.svc.cluster.local - --service-account-key-file=/etc/kubernetes/pki/sa.pub - --service-account-signing-key-file=/etc/kubernetes/pki/sa.key - --service-cluster-ip-range=10.96.0.0/12 - --service-node-port-range=80-32767 - --tls-cert-file=/etc/kubernetes/pki/apiserver.crt - --tls-private-key-file=/etc/kubernetes/pki/apiserver.key image: registry.k8s.io/kube-apiserver:v1.26.12 imagePullPolicy: IfNotPresent livenessProbe: failureThreshold: 8 httpGet: host: 172.21.23.11 path: /livez port: 6443 scheme: HTTPS initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 name: kube-apiserver readinessProbe: failureThreshold: 3 httpGet: host: 172.21.23.11 path: /readyz port: 6443 scheme: HTTPS periodSeconds: 1 timeoutSeconds: 15 resources: requests: cpu: 250m startupProbe: failureThreshold: 24 httpGet: host: 172.21.23.11 path: /livez port: 6443 scheme: HTTPS initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 volumeMounts: - mountPath: /etc/ssl/certs name: ca-certs readOnly: true - mountPath: /etc/ca-certificates name: etc-ca-certificates readOnly: true - mountPath: /etc/kubernetes/pki name: k8s-certs readOnly: true - mountPath: /usr/local/share/ca-certificates name: usr-local-share-ca-certificates readOnly: true - mountPath: /usr/share/ca-certificates name: usr-share-ca-certificates readOnly: true hostNetwork: true priorityClassName: system-node-critical securityContext: seccompProfile: type: RuntimeDefault volumes: - hostPath: path: /etc/ssl/certs type: DirectoryOrCreate name: ca-certs - hostPath: path: /etc/ca-certificates type: DirectoryOrCreate name: etc-ca-certificates - hostPath: path: /etc/kubernetes/pki type: DirectoryOrCreate name: k8s-certs - hostPath: path: /usr/local/share/ca-certificates type: DirectoryOrCreate name: usr-local-share-ca-certificates - hostPath: path: /usr/share/ca-certificates type: DirectoryOrCreate name: usr-share-ca-certificates status: {} [dryrun] Would write file "/etc/kubernetes/manifests/kube-controller-manager.yaml" with content: apiVersion: v1 kind: Pod metadata: creationTimestamp: null labels: component: kube-controller-manager tier: control-plane name: kube-controller-manager namespace: kube-system spec: containers: - command: - kube-controller-manager - --allocate-node-cidrs=true - --authentication-kubeconfig=/etc/kubernetes/controller-manager.conf - --authorization-kubeconfig=/etc/kubernetes/controller-manager.conf - --bind-address=127.0.0.1 - --client-ca-file=/etc/kubernetes/pki/ca.crt - --cluster-cidr=10.244.0.0/16 - --cluster-name=kubernetes - --cluster-signing-cert-file=/etc/kubernetes/pki/ca.crt - --cluster-signing-key-file=/etc/kubernetes/pki/ca.key - --controllers=*,bootstrapsigner,tokencleaner - --kubeconfig=/etc/kubernetes/controller-manager.conf - --leader-elect=true - --requestheader-client-ca-file=/etc/kubernetes/pki/front-proxy-ca.crt - --root-ca-file=/etc/kubernetes/pki/ca.crt - --service-account-private-key-file=/etc/kubernetes/pki/sa.key - --service-cluster-ip-range=10.96.0.0/12 - --use-service-account-credentials=true image: registry.k8s.io/kube-controller-manager:v1.26.12 imagePullPolicy: IfNotPresent livenessProbe: failureThreshold: 8 httpGet: host: 127.0.0.1 path: /healthz port: 10257 scheme: HTTPS initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 name: kube-controller-manager resources: requests: cpu: 200m startupProbe: failureThreshold: 24 httpGet: host: 127.0.0.1 path: /healthz port: 10257 scheme: HTTPS initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 volumeMounts: - mountPath: /etc/ssl/certs name: ca-certs readOnly: true - mountPath: /etc/ca-certificates name: etc-ca-certificates readOnly: true - mountPath: /usr/libexec/kubernetes/kubelet-plugins/volume/exec name: flexvolume-dir - mountPath: /etc/kubernetes/pki name: k8s-certs readOnly: true - mountPath: /etc/kubernetes/controller-manager.conf name: kubeconfig readOnly: true - mountPath: /usr/local/share/ca-certificates name: usr-local-share-ca-certificates readOnly: true - mountPath: /usr/share/ca-certificates name: usr-share-ca-certificates readOnly: true hostNetwork: true priorityClassName: system-node-critical securityContext: seccompProfile: type: RuntimeDefault volumes: - hostPath: path: /etc/ssl/certs type: DirectoryOrCreate name: ca-certs - hostPath: path: /etc/ca-certificates type: DirectoryOrCreate name: etc-ca-certificates - hostPath: path: /usr/libexec/kubernetes/kubelet-plugins/volume/exec type: DirectoryOrCreate name: flexvolume-dir - hostPath: path: /etc/kubernetes/pki type: DirectoryOrCreate name: k8s-certs - hostPath: path: /etc/kubernetes/controller-manager.conf type: FileOrCreate name: kubeconfig - hostPath: path: /usr/local/share/ca-certificates type: DirectoryOrCreate name: usr-local-share-ca-certificates - hostPath: path: /usr/share/ca-certificates type: DirectoryOrCreate name: usr-share-ca-certificates status: {} [dryrun] Would write file "/etc/kubernetes/manifests/kube-scheduler.yaml" with content: apiVersion: v1 kind: Pod metadata: creationTimestamp: null labels: component: kube-scheduler tier: control-plane name: kube-scheduler namespace: kube-system spec: containers: - command: - kube-scheduler - --authentication-kubeconfig=/etc/kubernetes/scheduler.conf - --authorization-kubeconfig=/etc/kubernetes/scheduler.conf - --bind-address=127.0.0.1 - --kubeconfig=/etc/kubernetes/scheduler.conf - --leader-elect=true image: registry.k8s.io/kube-scheduler:v1.26.12 imagePullPolicy: IfNotPresent livenessProbe: failureThreshold: 8 httpGet: host: 127.0.0.1 path: /healthz port: 10259 scheme: HTTPS initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 name: kube-scheduler resources: requests: cpu: 100m startupProbe: failureThreshold: 24 httpGet: host: 127.0.0.1 path: /healthz port: 10259 scheme: HTTPS initialDelaySeconds: 10 periodSeconds: 10 timeoutSeconds: 15 volumeMounts: - mountPath: /etc/kubernetes/scheduler.conf name: kubeconfig readOnly: true hostNetwork: true priorityClassName: system-node-critical securityContext: seccompProfile: type: RuntimeDefault volumes: - hostPath: path: /etc/kubernetes/scheduler.conf type: FileOrCreate name: kubeconfig status: {} [dryrun] Would write file "/var/lib/kubelet/config.yaml" with content: apiVersion: kubelet.config.k8s.io/v1beta1 authentication: anonymous: enabled: false webhook: cacheTTL: 0s enabled: true x509: clientCAFile: /etc/kubernetes/pki/ca.crt authorization: mode: Webhook webhook: cacheAuthorizedTTL: 0s cacheUnauthorizedTTL: 0s cgroupDriver: systemd clusterDNS: - 10.96.0.10 clusterDomain: cluster.local cpuManagerReconcilePeriod: 0s evictionPressureTransitionPeriod: 0s fileCheckFrequency: 0s healthzBindAddress: 127.0.0.1 healthzPort: 10248 httpCheckFrequency: 0s imageMinimumGCAge: 0s kind: KubeletConfiguration logging: flushFrequency: 0 options: json: infoBufferSize: "0" verbosity: 0 memorySwap: {} nodeStatusReportFrequency: 0s nodeStatusUpdateFrequency: 0s resolvConf: /run/systemd/resolve/resolv.conf rotateCertificates: true runtimeRequestTimeout: 0s shutdownGracePeriod: 0s shutdownGracePeriodCriticalPods: 0s staticPodPath: /etc/kubernetes/manifests streamingConnectionIdleTimeout: 0s syncFrequency: 0s volumeStatsAggPeriod: 0s [dryrun] Would write file "/var/lib/kubelet/kubeadm-flags.env" with content: KUBELET_KUBEADM_ARGS="--container-runtime-endpoint=unix:///var/run/containerd/containerd.sock --pod-infra-container-image=registry.k8s.io/pause:3.9" [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/tmp/kubeadm-init-dryrun3783892854". This can take up to 4m0s [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace [dryrun] Would perform action CREATE on resource "configmaps" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 data: ClusterConfiguration: | apiServer: extraArgs: authorization-mode: Node,RBAC service-node-port-range: 80-32767 timeoutForControlPlane: 4m0s apiVersion: kubeadm.k8s.io/v1beta3 certificatesDir: /etc/kubernetes/pki clusterName: kubernetes controllerManager: {} dns: {} etcd: local: dataDir: /var/lib/etcd imageRepository: registry.k8s.io kind: ClusterConfiguration kubernetesVersion: v1.26.12 networking: dnsDomain: cluster.local podSubnet: 10.244.0.0/16 serviceSubnet: 10.96.0.0/12 scheduler: {} kind: ConfigMap metadata: creationTimestamp: null name: kubeadm-config namespace: kube-system [dryrun] Would perform action CREATE on resource "roles" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: creationTimestamp: null name: kubeadm:nodes-kubeadm-config namespace: kube-system rules: - apiGroups: - "" resourceNames: - kubeadm-config resources: - configmaps verbs: - get [dryrun] Would perform action CREATE on resource "rolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: creationTimestamp: null name: kubeadm:nodes-kubeadm-config namespace: kube-system roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: kubeadm:nodes-kubeadm-config subjects: - kind: Group name: system:bootstrappers:kubeadm:default-node-token - kind: Group name: system:nodes [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster [dryrun] Would perform action CREATE on resource "configmaps" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 data: kubelet: | apiVersion: kubelet.config.k8s.io/v1beta1 authentication: anonymous: enabled: false webhook: cacheTTL: 0s enabled: true x509: clientCAFile: /etc/kubernetes/pki/ca.crt authorization: mode: Webhook webhook: cacheAuthorizedTTL: 0s cacheUnauthorizedTTL: 0s cgroupDriver: systemd clusterDNS: - 10.96.0.10 clusterDomain: cluster.local cpuManagerReconcilePeriod: 0s evictionPressureTransitionPeriod: 0s fileCheckFrequency: 0s healthzBindAddress: 127.0.0.1 healthzPort: 10248 httpCheckFrequency: 0s imageMinimumGCAge: 0s kind: KubeletConfiguration logging: flushFrequency: 0 options: json: infoBufferSize: "0" verbosity: 0 memorySwap: {} nodeStatusReportFrequency: 0s nodeStatusUpdateFrequency: 0s resolvConf: /run/systemd/resolve/resolv.conf rotateCertificates: true runtimeRequestTimeout: 0s shutdownGracePeriod: 0s shutdownGracePeriodCriticalPods: 0s staticPodPath: /etc/kubernetes/manifests streamingConnectionIdleTimeout: 0s syncFrequency: 0s volumeStatsAggPeriod: 0s kind: ConfigMap metadata: annotations: kubeadm.kubernetes.io/component-config.hash: sha256:000a5dd81630f64bd6f310899359dfb9e8818fc8fe011b7fdc0ec73783a42452 creationTimestamp: null name: kubelet-config namespace: kube-system [dryrun] Would perform action CREATE on resource "roles" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: creationTimestamp: null name: kubeadm:kubelet-config namespace: kube-system rules: - apiGroups: - "" resourceNames: - kubelet-config resources: - configmaps verbs: - get [dryrun] Would perform action CREATE on resource "rolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: creationTimestamp: null name: kubeadm:kubelet-config namespace: kube-system roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: kubeadm:kubelet-config subjects: - kind: Group name: system:nodes - kind: Group name: system:bootstrappers:kubeadm:default-node-token [dryrun] Would perform action GET on resource "nodes" in API group "core/v1" [dryrun] Resource name: "osmtest202312311216" [dryrun] Would perform action PATCH on resource "nodes" in API group "core/v1" [dryrun] Resource name: "osmtest202312311216" [dryrun] Attached patch: {"metadata":{"annotations":{"kubeadm.alpha.kubernetes.io/cri-socket":"unix:///var/run/containerd/containerd.sock"}}} [upload-certs] Skipping phase. Please see --upload-certs [mark-control-plane] Marking the node osmtest202312311216 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers] [mark-control-plane] Marking the node osmtest202312311216 as control-plane by adding the taints [node-role.kubernetes.io/control-plane:NoSchedule] [dryrun] Would perform action GET on resource "nodes" in API group "core/v1" [dryrun] Resource name: "osmtest202312311216" [dryrun] Would perform action PATCH on resource "nodes" in API group "core/v1" [dryrun] Resource name: "osmtest202312311216" [dryrun] Attached patch: {"metadata":{"labels":{"node-role.kubernetes.io/control-plane":"","node.kubernetes.io/exclude-from-external-load-balancers":""}},"spec":{"taints":[{"effect":"NoSchedule","key":"node-role.kubernetes.io/control-plane"}]}} [bootstrap-token] Using token: ueeus9.ojpz4xplz2in4poq [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles [dryrun] Would perform action GET on resource "secrets" in API group "core/v1" [dryrun] Resource name: "bootstrap-token-ueeus9" [dryrun] Would perform action CREATE on resource "secrets" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 data: auth-extra-groups: c3lzdGVtOmJvb3RzdHJhcHBlcnM6a3ViZWFkbTpkZWZhdWx0LW5vZGUtdG9rZW4= expiration: MjAyNC0wMS0wMVQxMjoyMTozNlo= token-id: dWVldXM5 token-secret: b2pwejR4cGx6MmluNHBvcQ== usage-bootstrap-authentication: dHJ1ZQ== usage-bootstrap-signing: dHJ1ZQ== kind: Secret metadata: creationTimestamp: null name: bootstrap-token-ueeus9 namespace: kube-system type: bootstrap.kubernetes.io/token [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes [dryrun] Would perform action CREATE on resource "clusterroles" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: creationTimestamp: null name: kubeadm:get-nodes namespace: kube-system rules: - apiGroups: - "" resources: - nodes verbs: - get [dryrun] Would perform action CREATE on resource "clusterrolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: creationTimestamp: null name: kubeadm:get-nodes namespace: kube-system roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: kubeadm:get-nodes subjects: - kind: Group name: system:bootstrappers:kubeadm:default-node-token [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials [dryrun] Would perform action CREATE on resource "clusterrolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: creationTimestamp: null name: kubeadm:kubelet-bootstrap roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: system:node-bootstrapper subjects: - kind: Group name: system:bootstrappers:kubeadm:default-node-token [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token [dryrun] Would perform action CREATE on resource "clusterrolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: creationTimestamp: null name: kubeadm:node-autoapprove-bootstrap roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: system:certificates.k8s.io:certificatesigningrequests:nodeclient subjects: - kind: Group name: system:bootstrappers:kubeadm:default-node-token [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster [dryrun] Would perform action CREATE on resource "clusterrolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: creationTimestamp: null name: kubeadm:node-autoapprove-certificate-rotation roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: system:certificates.k8s.io:certificatesigningrequests:selfnodeclient subjects: - kind: Group name: system:nodes [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace [dryrun] Would perform action CREATE on resource "configmaps" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 data: kubeconfig: | apiVersion: v1 clusters: - cluster: certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMvakNDQWVhZ0F3SUJBZ0lCQURBTkJna3Foa2lHOXcwQkFRc0ZBREFWTVJNd0VRWURWUVFERXdwcmRXSmwKY201bGRHVnpNQjRYRFRJek1USXpNVEV5TWpFek0xb1hEVE16TVRJeU9ERXlNakV6TTFvd0ZURVRNQkVHQTFVRQpBeE1LYTNWaVpYSnVaWFJsY3pDQ0FTSXdEUVlKS29aSWh2Y05BUUVCQlFBRGdnRVBBRENDQVFvQ2dnRUJBTzF4CnNCdEU0b0UxeTNwTEdOU0xBWnUvWTR3b29FL2EvL1NPZ2RLK0UrU0NPMGUwY1F4RW1uRzFkMlJrenNwampVTmgKVnVjTE5VNE5EcnBCaXNTRVRiSWJIT2NWbDIwZDZSV0gwYlpacEUrZlcrZVFsSFhhUTlRVXhKbUhWeXU4R1Z4MgpOaVpQQXltK3hkK04zUDN5elVZU3ZnZmJEWXl6MS9zbERhL0xIMmNUekxNbzV0Z2JxVURJN1puNVJYck12d3kzCnc1RjVPS3ZjR20wQUs2WVNjSGRYbGxOckw4Wi9mVGl2amlBUmNxR0YvYys5RkRoSTVsZUdzemdNQnZjL3RVeGkKQTVucTg3ZlQ1ZkdvRHFGeldtSzZpWXphYzZtMERuWHEyUXdLVkhxZjBOM2ZNQXdVcERHa2pmMitCQ0xtQVltVwpNaDN0ZHlFTXdNWnNHNkdQVVdzQ0F3RUFBYU5aTUZjd0RnWURWUjBQQVFIL0JBUURBZ0trTUE4R0ExVWRFd0VCCi93UUZNQU1CQWY4d0hRWURWUjBPQkJZRUZMQUlkYkNkWVlwRjR0Rm1Zd3huQjg1NEVYUERNQlVHQTFVZEVRUU8KTUF5Q0NtdDFZbVZ5Ym1WMFpYTXdEUVlKS29aSWh2Y05BUUVMQlFBRGdnRUJBTkpyTGJ4aktOdWJGYUtQVkM2RAo3OElhMnNCbk9LWlRoZUQrOVQ2ZzBBd0t4ZXpNNW5IbzRMK2ZNNERpTGJyTzRIelYybGRpYWhQYldUNzdUTjkzCmNsUG1HZVp6T3JEcjJMRXdrSXFycUFLNzJ0WG41YTRPNGxEa0Z3RHF4bDdHakpJNlB4Q3ZyUGVtYnlEeFlSWUoKY1QybXdtMXBaSnBTNy8zN3JnZGYvaDVxZUlrM3hoR2czVExhYXBkMGFlUmFxdXRjY3U3VjQycC92VU1Sb0ZVMQp2cUJJSTN4RXNWejB0U293dmdWSGNUNHV1aTBxNmZONk1pZFJRVmpQaE94WFpNTEZBb3l3bFllMWRhRlo4bWR1Cnk5T0lJbm45c0UvWjYxZ05lVjFURDEyUzZKeGRsTjZKdHZMSWdxY2ZCcStxdkcrTm1jUExpTUxiekxaY2xxTDEKblFRPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg== server: https://172.21.23.11:6443 name: "" contexts: null current-context: "" kind: Config preferences: {} users: null kind: ConfigMap metadata: creationTimestamp: null name: cluster-info namespace: kube-public [dryrun] Would perform action CREATE on resource "roles" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: creationTimestamp: null name: kubeadm:bootstrap-signer-clusterinfo namespace: kube-public rules: - apiGroups: - "" resourceNames: - cluster-info resources: - configmaps verbs: - get [dryrun] Would perform action CREATE on resource "rolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: creationTimestamp: null name: kubeadm:bootstrap-signer-clusterinfo namespace: kube-public roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: kubeadm:bootstrap-signer-clusterinfo subjects: - kind: User name: system:anonymous [dryrun] Would perform action LIST on resource "deployments" in API group "apps/v1" [dryrun] Would perform action GET on resource "configmaps" in API group "core/v1" [dryrun] Resource name: "coredns" [dryrun] Would perform action CREATE on resource "configmaps" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 data: Corefile: | .:53 { errors health { lameduck 5s } ready kubernetes cluster.local in-addr.arpa ip6.arpa { pods insecure fallthrough in-addr.arpa ip6.arpa ttl 30 } prometheus :9153 forward . /etc/resolv.conf { max_concurrent 1000 } cache 30 loop reload loadbalance } kind: ConfigMap metadata: creationTimestamp: null name: coredns namespace: kube-system [dryrun] Would perform action CREATE on resource "clusterroles" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: creationTimestamp: null name: system:coredns rules: - apiGroups: - "" resources: - endpoints - services - pods - namespaces verbs: - list - watch - apiGroups: - "" resources: - nodes verbs: - get - apiGroups: - discovery.k8s.io resources: - endpointslices verbs: - list - watch [dryrun] Would perform action CREATE on resource "clusterrolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: creationTimestamp: null name: system:coredns roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: system:coredns subjects: - kind: ServiceAccount name: coredns namespace: kube-system [dryrun] Would perform action CREATE on resource "serviceaccounts" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 kind: ServiceAccount metadata: creationTimestamp: null name: coredns namespace: kube-system [dryrun] Would perform action CREATE on resource "deployments" in API group "apps/v1" [dryrun] Attached object: apiVersion: apps/v1 kind: Deployment metadata: creationTimestamp: null labels: k8s-app: kube-dns name: coredns namespace: kube-system spec: replicas: 2 selector: matchLabels: k8s-app: kube-dns strategy: rollingUpdate: maxUnavailable: 1 type: RollingUpdate template: metadata: creationTimestamp: null labels: k8s-app: kube-dns spec: affinity: podAntiAffinity: preferredDuringSchedulingIgnoredDuringExecution: - podAffinityTerm: labelSelector: matchExpressions: - key: k8s-app operator: In values: - kube-dns topologyKey: kubernetes.io/hostname weight: 100 containers: - args: - -conf - /etc/coredns/Corefile image: registry.k8s.io/coredns/coredns:v1.9.3 imagePullPolicy: IfNotPresent livenessProbe: failureThreshold: 5 httpGet: path: /health port: 8080 scheme: HTTP initialDelaySeconds: 60 successThreshold: 1 timeoutSeconds: 5 name: coredns ports: - containerPort: 53 name: dns protocol: UDP - containerPort: 53 name: dns-tcp protocol: TCP - containerPort: 9153 name: metrics protocol: TCP readinessProbe: httpGet: path: /ready port: 8181 scheme: HTTP resources: limits: memory: 170Mi requests: cpu: 100m memory: 70Mi securityContext: allowPrivilegeEscalation: false capabilities: add: - NET_BIND_SERVICE drop: - all readOnlyRootFilesystem: true volumeMounts: - mountPath: /etc/coredns name: config-volume readOnly: true dnsPolicy: Default nodeSelector: kubernetes.io/os: linux priorityClassName: system-cluster-critical serviceAccountName: coredns tolerations: - key: CriticalAddonsOnly operator: Exists - effect: NoSchedule key: node-role.kubernetes.io/control-plane volumes: - configMap: items: - key: Corefile path: Corefile name: coredns name: config-volume status: {} [dryrun] Would perform action CREATE on resource "services" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 kind: Service metadata: annotations: prometheus.io/port: "9153" prometheus.io/scrape: "true" creationTimestamp: null labels: k8s-app: kube-dns kubernetes.io/cluster-service: "true" kubernetes.io/name: CoreDNS name: kube-dns namespace: kube-system resourceVersion: "0" spec: clusterIP: 10.96.0.10 ports: - name: dns port: 53 protocol: UDP targetPort: 53 - name: dns-tcp port: 53 protocol: TCP targetPort: 53 - name: metrics port: 9153 protocol: TCP targetPort: 9153 selector: k8s-app: kube-dns status: loadBalancer: {} [addons] Applied essential addon: CoreDNS [dryrun] Would perform action CREATE on resource "configmaps" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 data: config.conf: |- apiVersion: kubeproxy.config.k8s.io/v1alpha1 bindAddress: 0.0.0.0 bindAddressHardFail: false clientConnection: acceptContentTypes: "" burst: 0 contentType: "" kubeconfig: /var/lib/kube-proxy/kubeconfig.conf qps: 0 clusterCIDR: 10.244.0.0/16 configSyncPeriod: 0s conntrack: maxPerCore: null min: null tcpCloseWaitTimeout: null tcpEstablishedTimeout: null detectLocal: bridgeInterface: "" interfaceNamePrefix: "" detectLocalMode: "" enableProfiling: false healthzBindAddress: "" hostnameOverride: "" iptables: localhostNodePorts: null masqueradeAll: false masqueradeBit: null minSyncPeriod: 0s syncPeriod: 0s ipvs: excludeCIDRs: null minSyncPeriod: 0s scheduler: "" strictARP: false syncPeriod: 0s tcpFinTimeout: 0s tcpTimeout: 0s udpTimeout: 0s kind: KubeProxyConfiguration metricsBindAddress: "" mode: "" nodePortAddresses: null oomScoreAdj: null portRange: "" showHiddenMetricsForVersion: "" winkernel: enableDSR: false forwardHealthCheckVip: false networkName: "" rootHnsEndpointName: "" sourceVip: "" kubeconfig.conf: |- apiVersion: v1 kind: Config clusters: - cluster: certificate-authority: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt server: https://172.21.23.11:6443 name: default contexts: - context: cluster: default namespace: default user: default name: default current-context: default users: - name: default user: tokenFile: /var/run/secrets/kubernetes.io/serviceaccount/token kind: ConfigMap metadata: annotations: kubeadm.kubernetes.io/component-config.hash: sha256:49070dca166611c9a463535f86effe210177eb9fe4c00c1943429079a0be10c0 creationTimestamp: null labels: app: kube-proxy name: kube-proxy namespace: kube-system [dryrun] Would perform action CREATE on resource "daemonsets" in API group "apps/v1" [dryrun] Attached object: apiVersion: apps/v1 kind: DaemonSet metadata: creationTimestamp: null labels: k8s-app: kube-proxy name: kube-proxy namespace: kube-system spec: selector: matchLabels: k8s-app: kube-proxy template: metadata: creationTimestamp: null labels: k8s-app: kube-proxy spec: containers: - command: - /usr/local/bin/kube-proxy - --config=/var/lib/kube-proxy/config.conf - --hostname-override=$(NODE_NAME) env: - name: NODE_NAME valueFrom: fieldRef: fieldPath: spec.nodeName image: registry.k8s.io/kube-proxy:v1.26.12 imagePullPolicy: IfNotPresent name: kube-proxy resources: {} securityContext: privileged: true volumeMounts: - mountPath: /var/lib/kube-proxy name: kube-proxy - mountPath: /run/xtables.lock name: xtables-lock - mountPath: /lib/modules name: lib-modules readOnly: true hostNetwork: true nodeSelector: kubernetes.io/os: linux priorityClassName: system-node-critical serviceAccountName: kube-proxy tolerations: - operator: Exists volumes: - configMap: name: kube-proxy name: kube-proxy - hostPath: path: /run/xtables.lock type: FileOrCreate name: xtables-lock - hostPath: path: /lib/modules name: lib-modules updateStrategy: type: RollingUpdate status: currentNumberScheduled: 0 desiredNumberScheduled: 0 numberMisscheduled: 0 numberReady: 0 [dryrun] Would perform action CREATE on resource "serviceaccounts" in API group "core/v1" [dryrun] Attached object: apiVersion: v1 kind: ServiceAccount metadata: creationTimestamp: null name: kube-proxy namespace: kube-system [dryrun] Would perform action CREATE on resource "clusterrolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: creationTimestamp: null name: kubeadm:node-proxier roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: system:node-proxier subjects: - kind: ServiceAccount name: kube-proxy namespace: kube-system [dryrun] Would perform action CREATE on resource "roles" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: creationTimestamp: null name: kube-proxy namespace: kube-system rules: - apiGroups: - "" resourceNames: - kube-proxy resources: - configmaps verbs: - get [dryrun] Would perform action CREATE on resource "rolebindings" in API group "rbac.authorization.k8s.io/v1" [dryrun] Attached object: apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: creationTimestamp: null name: kube-proxy namespace: kube-system roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: kube-proxy subjects: - kind: Group name: system:bootstrappers:kubeadm:default-node-token [addons] Applied essential addon: kube-proxy Your Kubernetes control-plane has initialized successfully! To start using your cluster, you need to run the following as a regular user: mkdir -p $HOME/.kube sudo cp -i /etc/kubernetes/tmp/kubeadm-init-dryrun3783892854/admin.conf $HOME/.kube/config sudo chown $(id -u):$(id -g) $HOME/.kube/config Alternatively, if you are the root user, you can run: export KUBECONFIG=/etc/kubernetes/admin.conf You should now deploy a pod network to the cluster. Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at: https://kubernetes.io/docs/concepts/cluster-administration/addons/ Then you can join any number of worker nodes by running the following on each as root: kubeadm join 172.21.23.11:6443 --token ueeus9.ojpz4xplz2in4poq \ --discovery-token-ca-cert-hash sha256:a70bb3789fad0b5db914116f148c1997c7c994e22f23b405aab4b8378f0c61dc I1231 12:21:36.921544 5697 version.go:256] remote version is much newer: v1.29.0; falling back to: stable-1.26 [init] Using Kubernetes version: v1.26.12 [preflight] Running pre-flight checks [preflight] Pulling images required for setting up a Kubernetes cluster [preflight] This might take a minute or two, depending on the speed of your internet connection [preflight] You can also perform this action in beforehand using 'kubeadm config images pull' [certs] Using certificateDir folder "/etc/kubernetes/pki" [certs] Generating "ca" certificate and key [certs] Generating "apiserver" certificate and key [certs] apiserver serving cert is signed for DNS names [kubernetes kubernetes.default kubernetes.default.svc kubernetes.default.svc.cluster.local osmtest202312311216] and IPs [10.96.0.1 172.21.23.11] [certs] Generating "apiserver-kubelet-client" certificate and key [certs] Generating "front-proxy-ca" certificate and key [certs] Generating "front-proxy-client" certificate and key [certs] Generating "etcd/ca" certificate and key [certs] Generating "etcd/server" certificate and key [certs] etcd/server serving cert is signed for DNS names [localhost osmtest202312311216] and IPs [172.21.23.11 127.0.0.1 ::1] [certs] Generating "etcd/peer" certificate and key [certs] etcd/peer serving cert is signed for DNS names [localhost osmtest202312311216] and IPs [172.21.23.11 127.0.0.1 ::1] [certs] Generating "etcd/healthcheck-client" certificate and key [certs] Generating "apiserver-etcd-client" certificate and key [certs] Generating "sa" key and public key [kubeconfig] Using kubeconfig folder "/etc/kubernetes" [kubeconfig] Writing "admin.conf" kubeconfig file [kubeconfig] Writing "kubelet.conf" kubeconfig file [kubeconfig] Writing "controller-manager.conf" kubeconfig file [kubeconfig] Writing "scheduler.conf" kubeconfig file [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env" [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml" [kubelet-start] Starting the kubelet [control-plane] Using manifest folder "/etc/kubernetes/manifests" [control-plane] Creating static Pod manifest for "kube-apiserver" [control-plane] Creating static Pod manifest for "kube-controller-manager" [control-plane] Creating static Pod manifest for "kube-scheduler" [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests" [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests". This can take up to 4m0s [apiclient] All control plane components are healthy after 5.502688 seconds [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster [upload-certs] Skipping phase. Please see --upload-certs [mark-control-plane] Marking the node osmtest202312311216 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers] [mark-control-plane] Marking the node osmtest202312311216 as control-plane by adding the taints [node-role.kubernetes.io/control-plane:NoSchedule] [bootstrap-token] Using token: z07d56.s5trvh17c6gjrgo5 [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key [addons] Applied essential addon: CoreDNS [addons] Applied essential addon: kube-proxy Your Kubernetes control-plane has initialized successfully! To start using your cluster, you need to run the following as a regular user: mkdir -p $HOME/.kube sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config sudo chown $(id -u):$(id -g) $HOME/.kube/config Alternatively, if you are the root user, you can run: export KUBECONFIG=/etc/kubernetes/admin.conf You should now deploy a pod network to the cluster. Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at: https://kubernetes.io/docs/concepts/cluster-administration/addons/ Then you can join any number of worker nodes by running the following on each as root: kubeadm join 172.21.23.11:6443 --token z07d56.s5trvh17c6gjrgo5 \ --discovery-token-ca-cert-hash sha256:6f4483023737c422982d1009445de62611f5bc766ca011607f5cedc9532262d3 Reading existing namespaces NAME STATUS AGE default Active 7s kube-node-lease Active 8s kube-public Active 8s kube-system Active 8s Track k8scluster init_k8s_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025326&event=k8scluster&operation=init_k8s_ok&value=&comment=&tags= --2023-12-31 12:22:06-- https://raw.githubusercontent.com/coreos/flannel/master/Documentation/kube-flannel.yml Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.110.133, 185.199.111.133, 185.199.108.133, ... Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.110.133|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 4398 (4.3K) [text/plain] Saving to: ‘/tmp/flannel.AQkAW3/kube-flannel.yml’ 0K .... 100% 32.2M=0s 2023-12-31 12:22:07 (32.2 MB/s) - ‘/tmp/flannel.AQkAW3/kube-flannel.yml’ saved [4398/4398] namespace/kube-flannel created clusterrole.rbac.authorization.k8s.io/flannel created clusterrolebinding.rbac.authorization.k8s.io/flannel created serviceaccount/flannel created configmap/kube-flannel-cfg created daemonset.apps/kube-flannel-ds created node/osmtest202312311216 untainted LAST SEEN TYPE REASON OBJECT MESSAGE 20s Normal Starting node/osmtest202312311216 Starting kubelet. 20s Warning InvalidDiskCapacity node/osmtest202312311216 invalid capacity 0 on image filesystem 20s Normal NodeAllocatableEnforced node/osmtest202312311216 Updated Node Allocatable limit across pods 20s Normal NodeHasSufficientMemory node/osmtest202312311216 Node osmtest202312311216 status is now: NodeHasSufficientMemory 20s Normal NodeHasNoDiskPressure node/osmtest202312311216 Node osmtest202312311216 status is now: NodeHasNoDiskPressure 20s Normal NodeHasSufficientPID node/osmtest202312311216 Node osmtest202312311216 status is now: NodeHasSufficientPID 4s Normal RegisteredNode node/osmtest202312311216 Node osmtest202312311216 event: Registered Node osmtest202312311216 in Controller 2s Normal Starting node/osmtest202312311216 Track k8scluster k8s_ready_before_helm: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025341&event=k8scluster&operation=k8s_ready_before_helm&value=&comment=&tags= Deleting existing namespace osm: kubectl delete ns osm Helm3 is not installed, installing ... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 14.7M 100 14.7M 0 0 20.5M 0 --:--:-- --:--:-- --:--:-- 20.6M linux-amd64/ linux-amd64/LICENSE linux-amd64/README.md linux-amd64/helm version.BuildInfo{Version:"v3.11.3", GitCommit:"323249351482b3bbfc9f5004f65d400aa70f9ae7", GitTreeState:"clean", GoVersion:"go1.20.3"} "stable" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "stable" chart repository Update Complete. ⎈Happy Helming!⎈ Track k8scluster install_helm_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025344&event=k8scluster&operation=install_helm_ok&value=&comment=&tags= Installing OpenEBS "openebs" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "stable" chart repository Update Complete. ⎈Happy Helming!⎈ NAME: openebs LAST DEPLOYED: Sun Dec 31 12:22:28 2023 NAMESPACE: openebs STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: Successfully installed OpenEBS. Check the status by running: kubectl get pods -n openebs The default values will install NDM and enable OpenEBS hostpath and device storage engines along with their default StorageClasses. Use `kubectl get sc` to see the list of installed OpenEBS StorageClasses. **Note**: If you are upgrading from the older helm chart that was using cStor and Jiva (non-csi) volumes, you will have to run the following command to include the older provisioners: helm upgrade openebs openebs/openebs \ --namespace openebs \ --set legacy.enabled=true \ --reuse-values For other engines, you will need to perform a few more additional steps to enable the engine, configure the engines (e.g. creating pools) and create StorageClasses. For example, cStor can be enabled using commands like: helm upgrade openebs openebs/openebs \ --namespace openebs \ --set cstor.enabled=true \ --reuse-values For more information, - view the online documentation at https://openebs.io/docs or - connect with an active community on Kubernetes slack #openebs channel. NAME NAMESPACE REVISION UPDATED STATUS CHART APP VERSION openebs openebs 1 2023-12-31 12:22:28.73553724 +0000 UTC deployed openebs-3.7.0 3.7.0 Waiting for storageclass Storageclass available storageclass.storage.k8s.io/openebs-hostpath patched Track k8scluster k8s_storageclass_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025349&event=k8scluster&operation=k8s_storageclass_ok&value=&comment=&tags= Installing MetalLB "metallb" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "metallb" chart repository ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "stable" chart repository Update Complete. ⎈Happy Helming!⎈ NAME: metallb LAST DEPLOYED: Sun Dec 31 12:22:31 2023 NAMESPACE: metallb-system STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MetalLB is now running in the cluster. Now you can configure it via its CRs. Please refer to the metallb official docs on how to use the CRs. Track k8scluster k8s_metallb_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025351&event=k8scluster&operation=k8s_metallb_ok&value=&comment=&tags= Installing cert-manager "jetstack" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "metallb" chart repository ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "jetstack" chart repository ...Successfully got an update from the "stable" chart repository Update Complete. ⎈Happy Helming!⎈ NAME: cert-manager LAST DEPLOYED: Sun Dec 31 12:22:33 2023 NAMESPACE: cert-manager STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: cert-manager v1.9.1 has been deployed successfully! In order to begin issuing certificates, you will need to set up a ClusterIssuer or Issuer resource (for example, by creating a 'letsencrypt-staging' issuer). More information on the different types of issuers and how to configure them can be found in our documentation: https://cert-manager.io/docs/configuration/ For information on how to configure cert-manager to automatically provision Certificates for Ingress resources, take a look at the `ingress-shim` documentation: https://cert-manager.io/docs/usage/ingress/ Track k8scluster k8s_certmanager_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025386&event=k8scluster&operation=k8s_certmanager_ok&value=&comment=&tags= Bootstraping... 1 checks of 100 MetalLB: Waiting for 1 of 2 pods to be ready: metallb-speaker-76lq6 0/4 CertManager: Waiting for 1 of 1 pods to be ready: No resources Bootstraping... 2 checks of 100 MetalLB: Waiting for 1 of 2 pods to be ready: metallb-speaker-76lq6 3/4 CertManager: Waiting for 1 of 1 pods to be ready: No resources Bootstraping... 3 checks of 100 MetalLB: Waiting for 1 of 2 pods to be ready: metallb-speaker-76lq6 3/4 CertManager: Waiting for 1 of 1 pods to be ready: No resources Bootstraping... 4 checks of 100 MetalLB: Waiting for 1 of 2 pods to be ready: metallb-speaker-76lq6 3/4 CertManager: Waiting for 1 of 1 pods to be ready: No resources Bootstraping... 5 checks of 100 MetalLB: Waiting for 1 of 2 pods to be ready: metallb-speaker-76lq6 3/4 CertManager: Waiting for 1 of 1 pods to be ready: No resources Bootstraping... 6 checks of 100 MetalLB: Waiting for 1 of 2 pods to be ready: metallb-speaker-76lq6 3/4 CertManager: Waiting for 1 of 1 pods to be ready: No resources ===> Successful checks: 1/10 ===> Successful checks: 2/10 ===> Successful checks: 3/10 ===> Successful checks: 4/10 ===> Successful checks: 5/10 ===> Successful checks: 6/10 ===> Successful checks: 7/10 ===> Successful checks: 8/10 ===> Successful checks: 9/10 ===> Successful checks: 10/10 K8S CLUSTER IS READY Track k8scluster k8s_ready_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025421&event=k8scluster&operation=k8s_ready_ok&value=&comment=&tags= Creating IP address pool manifest: /etc/osm/metallb-ipaddrpool.yaml apiVersion: metallb.io/v1beta1 kind: IPAddressPool metadata: name: first-pool namespace: metallb-system spec: addresses: - 172.21.23.11/32 Applying IP address pool manifest: kubectl apply -f /etc/osm/metallb-ipaddrpool.yaml ipaddresspool.metallb.io/first-pool created Track k8scluster k8scluster_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025422&event=k8scluster&operation=k8scluster_ok&value=&comment=&tags= Track juju juju_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025422&event=juju&operation=juju_ok&value=&comment=&tags= Track docker_images docker_images_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025422&event=docker_images&operation=docker_images_ok&value=&comment=&tags= DEBUG_INSTALL= OSM_DEVOPS=/usr/share/osm-devops OSM_DOCKER_TAG=testing-daily OSM_HELM_WORK_DIR=/etc/osm/helm "bitnami" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "metallb" chart repository ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "jetstack" chart repository ...Successfully got an update from the "bitnami" chart repository ...Successfully got an update from the "stable" chart repository Update Complete. ⎈Happy Helming!⎈ Release "mongodb-k8s" does not exist. Installing it now. NAME: mongodb-k8s LAST DEPLOYED: Sun Dec 31 12:23:46 2023 NAMESPACE: osm STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: CHART NAME: mongodb CHART VERSION: 13.9.4 APP VERSION: 6.0.5 ** Please be patient while the chart is being deployed ** MongoDB® can be accessed on the following DNS name(s) and ports from within your cluster: mongodb-k8s-0.mongodb-k8s-headless.osm.svc.cluster.local:27017 mongodb-k8s-1.mongodb-k8s-headless.osm.svc.cluster.local:27017 To connect to your database, create a MongoDB® client container: kubectl run --namespace osm mongodb-k8s-client --rm --tty -i --restart='Never' --env="MONGODB_ROOT_PASSWORD=$MONGODB_ROOT_PASSWORD" --image docker.io/bitnami/mongodb:6.0.5-debian-11-r4 --command -- bash Then, run the following command: mongosh admin --host "mongodb-k8s-0.mongodb-k8s-headless.osm.svc.cluster.local:27017,mongodb-k8s-1.mongodb-k8s-headless.osm.svc.cluster.local:27017" Track deploy_osm deploy_mongodb_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025427&event=deploy_osm&operation=deploy_mongodb_ok&value=&comment=&tags= helm install -n osm --create-namespace osm /usr/share/osm-devops/installers/helm/osm --set global.image.repositoryBase=opensourcemano --set mysql.dbHostPath=/var/lib/osm/osm NAME: osm LAST DEPLOYED: Sun Dec 31 12:23:47 2023 NAMESPACE: osm STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: 1. Get the application URL by running these commands: export NODE_PORT=$(kubectl get --namespace osm -o jsonpath="{.spec.ports[0].nodePort}" services nbi) export NODE_IP=$(kubectl get nodes --namespace osm -o jsonpath="{.items[0].status.addresses[0].address}") echo http://$NODE_IP:$NODE_PORT USER-SUPPLIED VALUES: global: image: repositoryBase: opensourcemano mysql: dbHostPath: /var/lib/osm/osm Track deploy_osm deploy_osm_services_k8s_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025428&event=deploy_osm&operation=deploy_osm_services_k8s_ok&value=&comment=&tags= DEBUG_INSTALL= OSM_DEVOPS=/usr/share/osm-devops OSM_DOCKER_TAG=testing-daily OSM_HELM_WORK_DIR=/etc/osm/helm Updating Helm values file helm/values/airflow-values.yaml to use defaultAirflowTag: testing-daily Updating Helm values file helm/values/airflow-values.yaml to use defaultAirflowRepository: opensourcemano/airflow "apache-airflow" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "metallb" chart repository ...Successfully got an update from the "apache-airflow" chart repository ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "jetstack" chart repository ...Successfully got an update from the "bitnami" chart repository ...Successfully got an update from the "stable" chart repository Update Complete. ⎈Happy Helming!⎈ Release "airflow" does not exist. Installing it now. NAME: airflow LAST DEPLOYED: Sun Dec 31 12:23:50 2023 NAMESPACE: osm STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: Thank you for installing Apache Airflow 2.5.3! Your release is named airflow. You can now access your dashboard(s) by executing the following command(s) and visiting the corresponding port at localhost in your browser: Airflow Webserver: kubectl port-forward svc/airflow-webserver 8080:8080 --namespace osm Default Webserver (Airflow UI) Login credentials: username: admin password: admin Default Postgres connection credentials: username: postgres password: postgres port: 5432 You can get Fernet Key value by running the following: echo Fernet Key: $(kubectl get secret --namespace osm airflow-fernet-key -o jsonpath="{.data.fernet-key}" | base64 --decode) Track deploy_osm airflow_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025744&event=deploy_osm&operation=airflow_ok&value=&comment=&tags= "prometheus-community" has been added to your repositories Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "metallb" chart repository ...Successfully got an update from the "apache-airflow" chart repository ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "jetstack" chart repository ...Successfully got an update from the "prometheus-community" chart repository ...Successfully got an update from the "stable" chart repository ...Successfully got an update from the "bitnami" chart repository Update Complete. ⎈Happy Helming!⎈ Release "pushgateway" does not exist. Installing it now. NAME: pushgateway LAST DEPLOYED: Sun Dec 31 12:29:09 2023 NAMESPACE: osm STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: 1. Get the application URL by running these commands: export POD_NAME=$(kubectl get pods --namespace osm -l "app=prometheus-pushgateway,release=pushgateway" -o jsonpath="{.items[0].metadata.name}") echo "Visit http://127.0.0.1:8080 to use your application" kubectl port-forward $POD_NAME 8080:80 Track deploy_osm pushgateway_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025749&event=deploy_osm&operation=pushgateway_ok&value=&comment=&tags= "prometheus-community" already exists with the same configuration, skipping Hang tight while we grab the latest from your chart repositories... ...Successfully got an update from the "metallb" chart repository ...Successfully got an update from the "apache-airflow" chart repository ...Successfully got an update from the "openebs" chart repository ...Successfully got an update from the "jetstack" chart repository ...Successfully got an update from the "prometheus-community" chart repository ...Successfully got an update from the "stable" chart repository ...Successfully got an update from the "bitnami" chart repository Update Complete. ⎈Happy Helming!⎈ Release "alertmanager" does not exist. Installing it now. NAME: alertmanager LAST DEPLOYED: Sun Dec 31 12:29:14 2023 NAMESPACE: osm STATUS: deployed REVISION: 1 NOTES: 1. Get the application URL by running these commands: export NODE_PORT=$(kubectl get --namespace osm -o jsonpath="{.spec.ports[0].nodePort}" services alertmanager) export NODE_IP=$(kubectl get nodes --namespace osm -o jsonpath="{.items[0].status.addresses[0].address}") echo http://$NODE_IP:$NODE_PORT Track deploy_osm alertmanager_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025754&event=deploy_osm&operation=alertmanager_ok&value=&comment=&tags= Track deploy_osm install_osm_ngsa_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025755&event=deploy_osm&operation=install_osm_ngsa_ok&value=&comment=&tags= % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3102 100 3102 0 0 15501 0 --:--:-- --:--:-- --:--:-- 15587 OK Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:5 https://download.docker.com/linux/ubuntu jammy InRelease Hit:6 https://packages.cloud.google.com/apt kubernetes-xenial InRelease Hit:7 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Get:8 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing/osmclient amd64 Packages [482 B] Get:9 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing/IM amd64 Packages [851 B] Fetched 1333 B in 1s (1208 B/s) Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Repository: 'deb [arch=amd64] https://osm-download.etsi.org/repository/osm/debian/testing-daily testing osmclient IM' Description: Archive for codename: testing components: osmclient,IM More info: https://osm-download.etsi.org/repository/osm/debian/testing-daily Adding repository. Adding deb entry to /etc/apt/sources.list.d/archive_uri-https_osm-download_etsi_org_repository_osm_debian_testing-daily-jammy.list Adding disabled deb-src entry to /etc/apt/sources.list.d/archive_uri-https_osm-download_etsi_org_repository_osm_debian_testing-daily-jammy.list Hit:1 http://azure.archive.ubuntu.com/ubuntu jammy InRelease Hit:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates InRelease Hit:3 http://azure.archive.ubuntu.com/ubuntu jammy-backports InRelease Hit:4 https://download.docker.com/linux/ubuntu jammy InRelease Hit:5 http://azure.archive.ubuntu.com/ubuntu jammy-security InRelease Hit:7 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease Hit:6 https://packages.cloud.google.com/apt kubernetes-xenial InRelease Reading package lists... W: https://osm-download.etsi.org/repository/osm/debian/testing-daily/dists/testing/InRelease: Key is stored in legacy trusted.gpg keyring (/etc/apt/trusted.gpg), see the DEPRECATION section in apt-key(8) for details. W: Conflicting distribution: https://osm-download.etsi.org/repository/osm/debian/testing-daily testing InRelease (expected testing but got ) Reading package lists... Building dependency tree... Reading state information... The following additional packages will be installed: build-essential bzip2 cpp cpp-11 dpkg-dev fakeroot fontconfig-config fonts-dejavu-core g++ g++-11 gcc gcc-11 gcc-11-base javascript-common libalgorithm-diff-perl libalgorithm-diff-xs-perl libalgorithm-merge-perl libasan6 libatomic1 libc-dev-bin libc-devtools libc6-dev libcc1-0 libcrypt-dev libdeflate0 libdpkg-perl libexpat1-dev libfakeroot libfile-fcntllock-perl libfontconfig1 libgcc-11-dev libgd3 libgomp1 libisl23 libitm1 libjbig0 libjpeg-turbo8 libjpeg8 libjs-jquery libjs-sphinxdoc libjs-underscore liblsan0 libmpc3 libnsl-dev libpython3-dev libpython3.10-dev libquadmath0 libstdc++-11-dev libtiff5 libtirpc-dev libtsan0 libubsan1 libwebp7 libxpm4 linux-libc-dev lto-disabled-list make manpages-dev python3-dev python3-wheel python3.10-dev rpcsvc-proto zlib1g-dev Suggested packages: bzip2-doc cpp-doc gcc-11-locales debian-keyring g++-multilib g++-11-multilib gcc-11-doc gcc-multilib autoconf automake libtool flex bison gdb gcc-doc gcc-11-multilib apache2 | lighttpd | httpd glibc-doc bzr libgd-tools libstdc++-11-doc make-doc The following NEW packages will be installed: build-essential bzip2 cpp cpp-11 dpkg-dev fakeroot fontconfig-config fonts-dejavu-core g++ g++-11 gcc gcc-11 gcc-11-base javascript-common libalgorithm-diff-perl libalgorithm-diff-xs-perl libalgorithm-merge-perl libasan6 libatomic1 libc-dev-bin libc-devtools libc6-dev libcc1-0 libcrypt-dev libdeflate0 libdpkg-perl libexpat1-dev libfakeroot libfile-fcntllock-perl libfontconfig1 libgcc-11-dev libgd3 libgomp1 libisl23 libitm1 libjbig0 libjpeg-turbo8 libjpeg8 libjs-jquery libjs-sphinxdoc libjs-underscore liblsan0 libmpc3 libnsl-dev libpython3-dev libpython3.10-dev libquadmath0 libstdc++-11-dev libtiff5 libtirpc-dev libtsan0 libubsan1 libwebp7 libxpm4 linux-libc-dev lto-disabled-list make manpages-dev python3-dev python3-pip python3-wheel python3.10-dev rpcsvc-proto zlib1g-dev 0 upgraded, 64 newly installed, 0 to remove and 6 not upgraded. Need to get 71.3 MB of archives. After this operation, 239 MB of additional disk space will be used. Get:1 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc-dev-bin amd64 2.35-0ubuntu3.5 [20.3 kB] Get:2 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 linux-libc-dev amd64 5.15.0-91.101 [1332 kB] Get:3 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libcrypt-dev amd64 1:4.4.27-1 [112 kB] Get:4 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 rpcsvc-proto amd64 1.4.2-0ubuntu6 [68.5 kB] Get:5 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libtirpc-dev amd64 1.3.2-2ubuntu0.1 [192 kB] Get:6 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libnsl-dev amd64 1.3.0-2build2 [71.3 kB] Get:7 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc6-dev amd64 2.35-0ubuntu3.5 [2098 kB] Get:8 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 gcc-11-base amd64 11.4.0-1ubuntu1~22.04 [20.2 kB] Get:9 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libisl23 amd64 0.24-2build1 [727 kB] Get:10 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libmpc3 amd64 1.2.1-2build1 [46.9 kB] Get:11 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 cpp-11 amd64 11.4.0-1ubuntu1~22.04 [10.0 MB] Get:12 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 cpp amd64 4:11.2.0-1ubuntu1 [27.7 kB] Get:13 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libcc1-0 amd64 12.3.0-1ubuntu1~22.04 [48.3 kB] Get:14 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libgomp1 amd64 12.3.0-1ubuntu1~22.04 [126 kB] Get:15 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libitm1 amd64 12.3.0-1ubuntu1~22.04 [30.2 kB] Get:16 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libatomic1 amd64 12.3.0-1ubuntu1~22.04 [10.4 kB] Get:17 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libasan6 amd64 11.4.0-1ubuntu1~22.04 [2282 kB] Get:18 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 liblsan0 amd64 12.3.0-1ubuntu1~22.04 [1069 kB] Get:19 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libtsan0 amd64 11.4.0-1ubuntu1~22.04 [2260 kB] Get:20 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libubsan1 amd64 12.3.0-1ubuntu1~22.04 [976 kB] Get:21 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libquadmath0 amd64 12.3.0-1ubuntu1~22.04 [154 kB] Get:22 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libgcc-11-dev amd64 11.4.0-1ubuntu1~22.04 [2517 kB] Get:23 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 gcc-11 amd64 11.4.0-1ubuntu1~22.04 [20.1 MB] Get:24 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 gcc amd64 4:11.2.0-1ubuntu1 [5112 B] Get:25 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libstdc++-11-dev amd64 11.4.0-1ubuntu1~22.04 [2101 kB] Get:26 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 g++-11 amd64 11.4.0-1ubuntu1~22.04 [11.4 MB] Get:27 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 g++ amd64 4:11.2.0-1ubuntu1 [1412 B] Get:28 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 make amd64 4.3-4.1build1 [180 kB] Get:29 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libdpkg-perl all 1.21.1ubuntu2.2 [237 kB] Get:30 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 bzip2 amd64 1.0.8-5build1 [34.8 kB] Get:31 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 lto-disabled-list all 24 [12.5 kB] Get:32 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 dpkg-dev all 1.21.1ubuntu2.2 [922 kB] Get:33 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 build-essential amd64 12.9ubuntu3 [4744 B] Get:34 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libfakeroot amd64 1.28-1ubuntu1 [31.5 kB] Get:35 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 fakeroot amd64 1.28-1ubuntu1 [60.4 kB] Get:36 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 fonts-dejavu-core all 2.37-2build1 [1041 kB] Get:37 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 fontconfig-config all 2.13.1-4.2ubuntu5 [29.1 kB] Get:38 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 javascript-common all 11+nmu1 [5936 B] Get:39 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libalgorithm-diff-perl all 1.201-1 [41.8 kB] Get:40 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libalgorithm-diff-xs-perl amd64 0.04-6build3 [11.9 kB] Get:41 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libalgorithm-merge-perl all 0.08-3 [12.0 kB] Get:42 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libfontconfig1 amd64 2.13.1-4.2ubuntu5 [131 kB] Get:43 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libjpeg-turbo8 amd64 2.1.2-0ubuntu1 [134 kB] Get:44 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libjpeg8 amd64 8c-2ubuntu10 [2264 B] Get:45 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libdeflate0 amd64 1.10-2 [70.9 kB] Get:46 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libjbig0 amd64 2.1-3.1ubuntu0.22.04.1 [29.2 kB] Get:47 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libwebp7 amd64 1.2.2-2ubuntu0.22.04.2 [206 kB] Get:48 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libtiff5 amd64 4.3.0-6ubuntu0.7 [185 kB] Get:49 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libxpm4 amd64 1:3.5.12-1ubuntu0.22.04.2 [36.7 kB] Get:50 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libgd3 amd64 2.3.0-2ubuntu2 [129 kB] Get:51 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libc-devtools amd64 2.35-0ubuntu3.5 [28.9 kB] Get:52 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libexpat1-dev amd64 2.4.7-1ubuntu0.2 [147 kB] Get:53 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libfile-fcntllock-perl amd64 0.22-3build7 [33.9 kB] Get:54 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libjs-jquery all 3.6.0+dfsg+~3.5.13-1 [321 kB] Get:55 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libjs-underscore all 1.13.2~dfsg-2 [118 kB] Get:56 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 libjs-sphinxdoc all 4.3.2-1 [139 kB] Get:57 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 zlib1g-dev amd64 1:1.2.11.dfsg-2ubuntu9.2 [164 kB] Get:58 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libpython3.10-dev amd64 3.10.12-1~22.04.3 [4762 kB] Get:59 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libpython3-dev amd64 3.10.6-1~22.04 [7166 B] Get:60 http://azure.archive.ubuntu.com/ubuntu jammy/main amd64 manpages-dev all 5.10-1ubuntu1 [2309 kB] Get:61 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 python3.10-dev amd64 3.10.12-1~22.04.3 [507 kB] Get:62 http://azure.archive.ubuntu.com/ubuntu jammy-updates/main amd64 python3-dev amd64 3.10.6-1~22.04 [26.0 kB] Get:63 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 python3-wheel all 0.37.1-2ubuntu0.22.04.1 [32.0 kB] Get:64 http://azure.archive.ubuntu.com/ubuntu jammy-updates/universe amd64 python3-pip all 22.0.2+dfsg-1ubuntu0.4 [1305 kB] Fetched 71.3 MB in 2s (40.0 MB/s) Selecting previously unselected package libc-dev-bin. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 62685 files and directories currently installed.) Preparing to unpack .../00-libc-dev-bin_2.35-0ubuntu3.5_amd64.deb ... Unpacking libc-dev-bin (2.35-0ubuntu3.5) ... Selecting previously unselected package linux-libc-dev:amd64. Preparing to unpack .../01-linux-libc-dev_5.15.0-91.101_amd64.deb ... Unpacking linux-libc-dev:amd64 (5.15.0-91.101) ... Selecting previously unselected package libcrypt-dev:amd64. Preparing to unpack .../02-libcrypt-dev_1%3a4.4.27-1_amd64.deb ... Unpacking libcrypt-dev:amd64 (1:4.4.27-1) ... Selecting previously unselected package rpcsvc-proto. Preparing to unpack .../03-rpcsvc-proto_1.4.2-0ubuntu6_amd64.deb ... Unpacking rpcsvc-proto (1.4.2-0ubuntu6) ... Selecting previously unselected package libtirpc-dev:amd64. Preparing to unpack .../04-libtirpc-dev_1.3.2-2ubuntu0.1_amd64.deb ... Unpacking libtirpc-dev:amd64 (1.3.2-2ubuntu0.1) ... Selecting previously unselected package libnsl-dev:amd64. Preparing to unpack .../05-libnsl-dev_1.3.0-2build2_amd64.deb ... Unpacking libnsl-dev:amd64 (1.3.0-2build2) ... Selecting previously unselected package libc6-dev:amd64. Preparing to unpack .../06-libc6-dev_2.35-0ubuntu3.5_amd64.deb ... Unpacking libc6-dev:amd64 (2.35-0ubuntu3.5) ... Selecting previously unselected package gcc-11-base:amd64. Preparing to unpack .../07-gcc-11-base_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking gcc-11-base:amd64 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package libisl23:amd64. Preparing to unpack .../08-libisl23_0.24-2build1_amd64.deb ... Unpacking libisl23:amd64 (0.24-2build1) ... Selecting previously unselected package libmpc3:amd64. Preparing to unpack .../09-libmpc3_1.2.1-2build1_amd64.deb ... Unpacking libmpc3:amd64 (1.2.1-2build1) ... Selecting previously unselected package cpp-11. Preparing to unpack .../10-cpp-11_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking cpp-11 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package cpp. Preparing to unpack .../11-cpp_4%3a11.2.0-1ubuntu1_amd64.deb ... Unpacking cpp (4:11.2.0-1ubuntu1) ... Selecting previously unselected package libcc1-0:amd64. Preparing to unpack .../12-libcc1-0_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking libcc1-0:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libgomp1:amd64. Preparing to unpack .../13-libgomp1_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking libgomp1:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libitm1:amd64. Preparing to unpack .../14-libitm1_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking libitm1:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libatomic1:amd64. Preparing to unpack .../15-libatomic1_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking libatomic1:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libasan6:amd64. Preparing to unpack .../16-libasan6_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking libasan6:amd64 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package liblsan0:amd64. Preparing to unpack .../17-liblsan0_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking liblsan0:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libtsan0:amd64. Preparing to unpack .../18-libtsan0_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking libtsan0:amd64 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package libubsan1:amd64. Preparing to unpack .../19-libubsan1_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking libubsan1:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libquadmath0:amd64. Preparing to unpack .../20-libquadmath0_12.3.0-1ubuntu1~22.04_amd64.deb ... Unpacking libquadmath0:amd64 (12.3.0-1ubuntu1~22.04) ... Selecting previously unselected package libgcc-11-dev:amd64. Preparing to unpack .../21-libgcc-11-dev_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking libgcc-11-dev:amd64 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package gcc-11. Preparing to unpack .../22-gcc-11_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking gcc-11 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package gcc. Preparing to unpack .../23-gcc_4%3a11.2.0-1ubuntu1_amd64.deb ... Unpacking gcc (4:11.2.0-1ubuntu1) ... Selecting previously unselected package libstdc++-11-dev:amd64. Preparing to unpack .../24-libstdc++-11-dev_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking libstdc++-11-dev:amd64 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package g++-11. Preparing to unpack .../25-g++-11_11.4.0-1ubuntu1~22.04_amd64.deb ... Unpacking g++-11 (11.4.0-1ubuntu1~22.04) ... Selecting previously unselected package g++. Preparing to unpack .../26-g++_4%3a11.2.0-1ubuntu1_amd64.deb ... Unpacking g++ (4:11.2.0-1ubuntu1) ... Selecting previously unselected package make. Preparing to unpack .../27-make_4.3-4.1build1_amd64.deb ... Unpacking make (4.3-4.1build1) ... Selecting previously unselected package libdpkg-perl. Preparing to unpack .../28-libdpkg-perl_1.21.1ubuntu2.2_all.deb ... Unpacking libdpkg-perl (1.21.1ubuntu2.2) ... Selecting previously unselected package bzip2. Preparing to unpack .../29-bzip2_1.0.8-5build1_amd64.deb ... Unpacking bzip2 (1.0.8-5build1) ... Selecting previously unselected package lto-disabled-list. Preparing to unpack .../30-lto-disabled-list_24_all.deb ... Unpacking lto-disabled-list (24) ... Selecting previously unselected package dpkg-dev. Preparing to unpack .../31-dpkg-dev_1.21.1ubuntu2.2_all.deb ... Unpacking dpkg-dev (1.21.1ubuntu2.2) ... Selecting previously unselected package build-essential. Preparing to unpack .../32-build-essential_12.9ubuntu3_amd64.deb ... Unpacking build-essential (12.9ubuntu3) ... Selecting previously unselected package libfakeroot:amd64. Preparing to unpack .../33-libfakeroot_1.28-1ubuntu1_amd64.deb ... Unpacking libfakeroot:amd64 (1.28-1ubuntu1) ... Selecting previously unselected package fakeroot. Preparing to unpack .../34-fakeroot_1.28-1ubuntu1_amd64.deb ... Unpacking fakeroot (1.28-1ubuntu1) ... Selecting previously unselected package fonts-dejavu-core. Preparing to unpack .../35-fonts-dejavu-core_2.37-2build1_all.deb ... Unpacking fonts-dejavu-core (2.37-2build1) ... Selecting previously unselected package fontconfig-config. Preparing to unpack .../36-fontconfig-config_2.13.1-4.2ubuntu5_all.deb ... Unpacking fontconfig-config (2.13.1-4.2ubuntu5) ... Selecting previously unselected package javascript-common. Preparing to unpack .../37-javascript-common_11+nmu1_all.deb ... Unpacking javascript-common (11+nmu1) ... Selecting previously unselected package libalgorithm-diff-perl. Preparing to unpack .../38-libalgorithm-diff-perl_1.201-1_all.deb ... Unpacking libalgorithm-diff-perl (1.201-1) ... Selecting previously unselected package libalgorithm-diff-xs-perl. Preparing to unpack .../39-libalgorithm-diff-xs-perl_0.04-6build3_amd64.deb ... Unpacking libalgorithm-diff-xs-perl (0.04-6build3) ... Selecting previously unselected package libalgorithm-merge-perl. Preparing to unpack .../40-libalgorithm-merge-perl_0.08-3_all.deb ... Unpacking libalgorithm-merge-perl (0.08-3) ... Selecting previously unselected package libfontconfig1:amd64. Preparing to unpack .../41-libfontconfig1_2.13.1-4.2ubuntu5_amd64.deb ... Unpacking libfontconfig1:amd64 (2.13.1-4.2ubuntu5) ... Selecting previously unselected package libjpeg-turbo8:amd64. Preparing to unpack .../42-libjpeg-turbo8_2.1.2-0ubuntu1_amd64.deb ... Unpacking libjpeg-turbo8:amd64 (2.1.2-0ubuntu1) ... Selecting previously unselected package libjpeg8:amd64. Preparing to unpack .../43-libjpeg8_8c-2ubuntu10_amd64.deb ... Unpacking libjpeg8:amd64 (8c-2ubuntu10) ... Selecting previously unselected package libdeflate0:amd64. Preparing to unpack .../44-libdeflate0_1.10-2_amd64.deb ... Unpacking libdeflate0:amd64 (1.10-2) ... Selecting previously unselected package libjbig0:amd64. Preparing to unpack .../45-libjbig0_2.1-3.1ubuntu0.22.04.1_amd64.deb ... Unpacking libjbig0:amd64 (2.1-3.1ubuntu0.22.04.1) ... Selecting previously unselected package libwebp7:amd64. Preparing to unpack .../46-libwebp7_1.2.2-2ubuntu0.22.04.2_amd64.deb ... Unpacking libwebp7:amd64 (1.2.2-2ubuntu0.22.04.2) ... Selecting previously unselected package libtiff5:amd64. Preparing to unpack .../47-libtiff5_4.3.0-6ubuntu0.7_amd64.deb ... Unpacking libtiff5:amd64 (4.3.0-6ubuntu0.7) ... Selecting previously unselected package libxpm4:amd64. Preparing to unpack .../48-libxpm4_1%3a3.5.12-1ubuntu0.22.04.2_amd64.deb ... Unpacking libxpm4:amd64 (1:3.5.12-1ubuntu0.22.04.2) ... Selecting previously unselected package libgd3:amd64. Preparing to unpack .../49-libgd3_2.3.0-2ubuntu2_amd64.deb ... Unpacking libgd3:amd64 (2.3.0-2ubuntu2) ... Selecting previously unselected package libc-devtools. Preparing to unpack .../50-libc-devtools_2.35-0ubuntu3.5_amd64.deb ... Unpacking libc-devtools (2.35-0ubuntu3.5) ... Selecting previously unselected package libexpat1-dev:amd64. Preparing to unpack .../51-libexpat1-dev_2.4.7-1ubuntu0.2_amd64.deb ... Unpacking libexpat1-dev:amd64 (2.4.7-1ubuntu0.2) ... Selecting previously unselected package libfile-fcntllock-perl. Preparing to unpack .../52-libfile-fcntllock-perl_0.22-3build7_amd64.deb ... Unpacking libfile-fcntllock-perl (0.22-3build7) ... Selecting previously unselected package libjs-jquery. Preparing to unpack .../53-libjs-jquery_3.6.0+dfsg+~3.5.13-1_all.deb ... Unpacking libjs-jquery (3.6.0+dfsg+~3.5.13-1) ... Selecting previously unselected package libjs-underscore. Preparing to unpack .../54-libjs-underscore_1.13.2~dfsg-2_all.deb ... Unpacking libjs-underscore (1.13.2~dfsg-2) ... Selecting previously unselected package libjs-sphinxdoc. Preparing to unpack .../55-libjs-sphinxdoc_4.3.2-1_all.deb ... Unpacking libjs-sphinxdoc (4.3.2-1) ... Selecting previously unselected package zlib1g-dev:amd64. Preparing to unpack .../56-zlib1g-dev_1%3a1.2.11.dfsg-2ubuntu9.2_amd64.deb ... Unpacking zlib1g-dev:amd64 (1:1.2.11.dfsg-2ubuntu9.2) ... Selecting previously unselected package libpython3.10-dev:amd64. Preparing to unpack .../57-libpython3.10-dev_3.10.12-1~22.04.3_amd64.deb ... Unpacking libpython3.10-dev:amd64 (3.10.12-1~22.04.3) ... Selecting previously unselected package libpython3-dev:amd64. Preparing to unpack .../58-libpython3-dev_3.10.6-1~22.04_amd64.deb ... Unpacking libpython3-dev:amd64 (3.10.6-1~22.04) ... Selecting previously unselected package manpages-dev. Preparing to unpack .../59-manpages-dev_5.10-1ubuntu1_all.deb ... Unpacking manpages-dev (5.10-1ubuntu1) ... Selecting previously unselected package python3.10-dev. Preparing to unpack .../60-python3.10-dev_3.10.12-1~22.04.3_amd64.deb ... Unpacking python3.10-dev (3.10.12-1~22.04.3) ... Selecting previously unselected package python3-dev. Preparing to unpack .../61-python3-dev_3.10.6-1~22.04_amd64.deb ... Unpacking python3-dev (3.10.6-1~22.04) ... Selecting previously unselected package python3-wheel. Preparing to unpack .../62-python3-wheel_0.37.1-2ubuntu0.22.04.1_all.deb ... Unpacking python3-wheel (0.37.1-2ubuntu0.22.04.1) ... Selecting previously unselected package python3-pip. Preparing to unpack .../63-python3-pip_22.0.2+dfsg-1ubuntu0.4_all.deb ... Unpacking python3-pip (22.0.2+dfsg-1ubuntu0.4) ... Setting up javascript-common (11+nmu1) ... Setting up gcc-11-base:amd64 (11.4.0-1ubuntu1~22.04) ... Setting up manpages-dev (5.10-1ubuntu1) ... Setting up lto-disabled-list (24) ... Setting up libxpm4:amd64 (1:3.5.12-1ubuntu0.22.04.2) ... Setting up libfile-fcntllock-perl (0.22-3build7) ... Setting up libalgorithm-diff-perl (1.201-1) ... Setting up libdeflate0:amd64 (1.10-2) ... Setting up linux-libc-dev:amd64 (5.15.0-91.101) ... Setting up libgomp1:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up bzip2 (1.0.8-5build1) ... Setting up python3-wheel (0.37.1-2ubuntu0.22.04.1) ... Setting up libjbig0:amd64 (2.1-3.1ubuntu0.22.04.1) ... Setting up libfakeroot:amd64 (1.28-1ubuntu1) ... Setting up libasan6:amd64 (11.4.0-1ubuntu1~22.04) ... Setting up fakeroot (1.28-1ubuntu1) ... update-alternatives: using /usr/bin/fakeroot-sysv to provide /usr/bin/fakeroot (fakeroot) in auto mode Setting up libtirpc-dev:amd64 (1.3.2-2ubuntu0.1) ... Setting up rpcsvc-proto (1.4.2-0ubuntu6) ... Setting up make (4.3-4.1build1) ... Setting up libquadmath0:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up libmpc3:amd64 (1.2.1-2build1) ... Setting up libatomic1:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up fonts-dejavu-core (2.37-2build1) ... Setting up python3-pip (22.0.2+dfsg-1ubuntu0.4) ... Setting up libjpeg-turbo8:amd64 (2.1.2-0ubuntu1) ... Setting up libdpkg-perl (1.21.1ubuntu2.2) ... Setting up libwebp7:amd64 (1.2.2-2ubuntu0.22.04.2) ... Setting up libubsan1:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up libnsl-dev:amd64 (1.3.0-2build2) ... Setting up libcrypt-dev:amd64 (1:4.4.27-1) ... Setting up libjs-jquery (3.6.0+dfsg+~3.5.13-1) ... Setting up libisl23:amd64 (0.24-2build1) ... Setting up libc-dev-bin (2.35-0ubuntu3.5) ... Setting up libalgorithm-diff-xs-perl (0.04-6build3) ... Setting up libcc1-0:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up liblsan0:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up libitm1:amd64 (12.3.0-1ubuntu1~22.04) ... Setting up libjs-underscore (1.13.2~dfsg-2) ... Setting up libalgorithm-merge-perl (0.08-3) ... Setting up libtsan0:amd64 (11.4.0-1ubuntu1~22.04) ... Setting up libjpeg8:amd64 (8c-2ubuntu10) ... Setting up cpp-11 (11.4.0-1ubuntu1~22.04) ... Setting up fontconfig-config (2.13.1-4.2ubuntu5) ... Setting up dpkg-dev (1.21.1ubuntu2.2) ... Setting up libjs-sphinxdoc (4.3.2-1) ... Setting up libgcc-11-dev:amd64 (11.4.0-1ubuntu1~22.04) ... Setting up gcc-11 (11.4.0-1ubuntu1~22.04) ... Setting up cpp (4:11.2.0-1ubuntu1) ... Setting up libc6-dev:amd64 (2.35-0ubuntu3.5) ... Setting up libtiff5:amd64 (4.3.0-6ubuntu0.7) ... Setting up libfontconfig1:amd64 (2.13.1-4.2ubuntu5) ... Setting up gcc (4:11.2.0-1ubuntu1) ... Setting up libexpat1-dev:amd64 (2.4.7-1ubuntu0.2) ... Setting up libgd3:amd64 (2.3.0-2ubuntu2) ... Setting up libstdc++-11-dev:amd64 (11.4.0-1ubuntu1~22.04) ... Setting up zlib1g-dev:amd64 (1:1.2.11.dfsg-2ubuntu9.2) ... Setting up libc-devtools (2.35-0ubuntu3.5) ... Setting up g++-11 (11.4.0-1ubuntu1~22.04) ... Setting up libpython3.10-dev:amd64 (3.10.12-1~22.04.3) ... Setting up python3.10-dev (3.10.12-1~22.04.3) ... Setting up g++ (4:11.2.0-1ubuntu1) ... update-alternatives: using /usr/bin/g++ to provide /usr/bin/c++ (c++) in auto mode Setting up build-essential (12.9ubuntu3) ... Setting up libpython3-dev:amd64 (3.10.6-1~22.04) ... Setting up python3-dev (3.10.6-1~22.04) ... Processing triggers for man-db (2.10.2-1) ... Processing triggers for libc-bin (2.35-0ubuntu3.5) ... NEEDRESTART-VER: 3.5 NEEDRESTART-KCUR: 6.2.0-1018-azure NEEDRESTART-KEXP: 6.2.0-1018-azure NEEDRESTART-KSTA: 1 Requirement already satisfied: pip in /usr/lib/python3/dist-packages (22.0.2) Collecting pip Downloading pip-23.3.2-py3-none-any.whl (2.1 MB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 2.1/2.1 MB 18.6 MB/s eta 0:00:00 Installing collected packages: pip Attempting uninstall: pip Found existing installation: pip 22.0.2 Not uninstalling pip at /usr/lib/python3/dist-packages, outside environment /usr Can't uninstall 'pip'. No files were found to uninstall. Successfully installed pip-23.3.2 WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv Reading package lists... Building dependency tree... Reading state information... The following NEW packages will be installed: python3-osm-im python3-osmclient 0 upgraded, 2 newly installed, 0 to remove and 6 not upgraded. Need to get 245 kB of archives. After this operation, 8441 kB of additional disk space will be used. Get:1 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing/IM amd64 python3-osm-im all 15.0.0+g09d7979-1 [176 kB] Get:2 https://osm-download.etsi.org/repository/osm/debian/testing-daily testing/osmclient amd64 python3-osmclient all 11.0.0rc1.post59+gb46c7c6-1 [68.6 kB] Fetched 245 kB in 0s (1123 kB/s) Selecting previously unselected package python3-osm-im. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 69472 files and directories currently installed.) Preparing to unpack .../python3-osm-im_15.0.0+g09d7979-1_all.deb ... Unpacking python3-osm-im (15.0.0+g09d7979-1) ... Selecting previously unselected package python3-osmclient. Preparing to unpack .../python3-osmclient_11.0.0rc1.post59+gb46c7c6-1_all.deb ... Unpacking python3-osmclient (11.0.0rc1.post59+gb46c7c6-1) ... Setting up python3-osmclient (11.0.0rc1.post59+gb46c7c6-1) ... Setting up python3-osm-im (15.0.0+g09d7979-1) ... NEEDRESTART-VER: 3.5 NEEDRESTART-KCUR: 6.2.0-1018-azure NEEDRESTART-KEXP: 6.2.0-1018-azure NEEDRESTART-KSTA: 1 Defaulting to user installation because normal site-packages is not writeable Collecting enum34==1.1.10 (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 17)) Downloading enum34-1.1.10-py3-none-any.whl (11 kB) Collecting lxml==4.9.3 (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 19)) Downloading lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl.metadata (3.8 kB) Collecting pyang==2.5.3 (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 23)) Downloading pyang-2.5.3-py2.py3-none-any.whl (592 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 592.9/592.9 kB 14.0 MB/s eta 0:00:00 Collecting pyangbind==0.8.3.post1 (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 27)) Downloading pyangbind-0.8.3.post1-py3-none-any.whl.metadata (4.2 kB) Collecting pyyaml==6.0.1 (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 29)) Downloading PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB) Collecting regex==2023.8.8 (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 31)) Downloading regex-2023.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (40 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 40.9/40.9 kB 2.5 MB/s eta 0:00:00 Requirement already satisfied: six==1.16.0 in /usr/lib/python3/dist-packages (from -r /usr/lib/python3/dist-packages/osm_im/requirements.txt (line 33)) (1.16.0) Downloading lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl (7.9 MB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 7.9/7.9 MB 60.4 MB/s eta 0:00:00 Downloading pyangbind-0.8.3.post1-py3-none-any.whl (51 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 51.8/51.8 kB 4.2 MB/s eta 0:00:00 Downloading PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (705 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 705.5/705.5 kB 27.7 MB/s eta 0:00:00 Downloading regex-2023.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (771 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 771.9/771.9 kB 27.3 MB/s eta 0:00:00 Installing collected packages: enum34, regex, pyyaml, lxml, pyang, pyangbind Successfully installed enum34-1.1.10 lxml-4.9.3 pyang-2.5.3 pyangbind-0.8.3.post1 pyyaml-6.0.1 regex-2023.8.8 Reading package lists... Building dependency tree... Reading state information... libmagic1 is already the newest version (1:5.41-3ubuntu0.1). libmagic1 set to manually installed. 0 upgraded, 0 newly installed, 0 to remove and 6 not upgraded. Defaulting to user installation because normal site-packages is not writeable Collecting certifi==2023.7.22 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 17)) Downloading certifi-2023.7.22-py3-none-any.whl.metadata (2.2 kB) Collecting charset-normalizer==3.2.0 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 19)) Downloading charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (31 kB) Collecting click==8.1.7 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 21)) Downloading click-8.1.7-py3-none-any.whl.metadata (3.0 kB) Collecting idna==3.4 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 23)) Downloading idna-3.4-py3-none-any.whl (61 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 61.5/61.5 kB 3.2 MB/s eta 0:00:00 Collecting jinja2==3.1.2 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 25)) Downloading Jinja2-3.1.2-py3-none-any.whl (133 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 133.1/133.1 kB 9.0 MB/s eta 0:00:00 Collecting jsonpath-ng==1.6.0 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 27)) Downloading jsonpath_ng-1.6.0-py3-none-any.whl.metadata (17 kB) Collecting markupsafe==2.1.3 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 29)) Downloading MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.0 kB) Collecting packaging==23.1 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 31)) Downloading packaging-23.1-py3-none-any.whl (48 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 48.9/48.9 kB 2.4 MB/s eta 0:00:00 Collecting ply==3.11 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 33)) Downloading ply-3.11-py2.py3-none-any.whl (49 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 49.6/49.6 kB 1.9 MB/s eta 0:00:00 Collecting prettytable==3.9.0 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 35)) Downloading prettytable-3.9.0-py3-none-any.whl.metadata (26 kB) Collecting python-magic==0.4.27 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 37)) Downloading python_magic-0.4.27-py2.py3-none-any.whl (13 kB) Requirement already satisfied: pyyaml==6.0.1 in ./.local/lib/python3.10/site-packages (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 39)) (6.0.1) Collecting requests==2.31.0 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 41)) Downloading requests-2.31.0-py3-none-any.whl.metadata (4.6 kB) Collecting urllib3==2.0.5 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 43)) Downloading urllib3-2.0.5-py3-none-any.whl.metadata (6.6 kB) Collecting verboselogs==1.7 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 45)) Downloading verboselogs-1.7-py2.py3-none-any.whl (11 kB) Collecting wcwidth==0.2.6 (from -r /usr/lib/python3/dist-packages/osmclient/requirements.txt (line 47)) Downloading wcwidth-0.2.6-py2.py3-none-any.whl (29 kB) Downloading certifi-2023.7.22-py3-none-any.whl (158 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 158.3/158.3 kB 13.5 MB/s eta 0:00:00 Downloading charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (201 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 201.8/201.8 kB 9.7 MB/s eta 0:00:00 Downloading click-8.1.7-py3-none-any.whl (97 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 97.9/97.9 kB 4.5 MB/s eta 0:00:00 Downloading jsonpath_ng-1.6.0-py3-none-any.whl (29 kB) Downloading MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (25 kB) Downloading prettytable-3.9.0-py3-none-any.whl (27 kB) Downloading requests-2.31.0-py3-none-any.whl (62 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 62.6/62.6 kB 3.5 MB/s eta 0:00:00 Downloading urllib3-2.0.5-py3-none-any.whl (123 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 123.8/123.8 kB 5.5 MB/s eta 0:00:00 Installing collected packages: wcwidth, verboselogs, ply, urllib3, python-magic, prettytable, packaging, markupsafe, jsonpath-ng, idna, click, charset-normalizer, certifi, requests, jinja2 WARNING: The script jsonpath_ng is installed in '/home/ubuntu/.local/bin' which is not on PATH. Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location. WARNING: The script normalizer is installed in '/home/ubuntu/.local/bin' which is not on PATH. Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location. Successfully installed certifi-2023.7.22 charset-normalizer-3.2.0 click-8.1.7 idna-3.4 jinja2-3.1.2 jsonpath-ng-1.6.0 markupsafe-2.1.3 packaging-23.1 ply-3.11 prettytable-3.9.0 python-magic-0.4.27 requests-2.31.0 urllib3-2.0.5 verboselogs-1.7 wcwidth-0.2.6 OSM client installed OSM client assumes that OSM host is running in localhost (127.0.0.1). In case you want to interact with a different OSM host, you will have to configure this env variable in your .bashrc file: export OSM_HOSTNAME= Track osmclient osmclient_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025797&event=osmclient&operation=osmclient_ok&value=&comment=&tags= Checking OSM health state... helm -n osm list NAME NAMESPACE REVISION UPDATED STATUS CHART APP VERSION airflow osm 1 2023-12-31 12:23:50.870365319 +0000 UTC deployed airflow-1.9.0 2.5.3 alertmanager osm 1 2023-12-31 12:29:14.474646815 +0000 UTC deployed alertmanager-0.22.0 v0.24.0 mongodb-k8s osm 1 2023-12-31 12:23:46.701918986 +0000 UTC deployed mongodb-13.9.4 6.0.5 osm osm 1 2023-12-31 12:23:47.280267087 +0000 UTC deployed osm-0.0.1 15 pushgateway osm 1 2023-12-31 12:29:09.493104116 +0000 UTC deployed prometheus-pushgateway-1.18.2 1.4.2 helm -n osm status osm NAME: osm LAST DEPLOYED: Sun Dec 31 12:23:47 2023 NAMESPACE: osm STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: 1. Get the application URL by running these commands: export NODE_PORT=$(kubectl get --namespace osm -o jsonpath="{.spec.ports[0].nodePort}" services nbi) export NODE_IP=$(kubectl get nodes --namespace osm -o jsonpath="{.items[0].status.addresses[0].address}") echo http://$NODE_IP:$NODE_PORT ===> Successful checks: 1/24 ===> Successful checks: 2/24 ===> Successful checks: 3/24 ===> Successful checks: 4/24 ===> Successful checks: 5/24 ===> Successful checks: 6/24 ===> Successful checks: 7/24 ===> Successful checks: 8/24 ===> Successful checks: 9/24 ===> Successful checks: 10/24 ===> Successful checks: 11/24 ===> Successful checks: 12/24 ===> Successful checks: 13/24 ===> Successful checks: 14/24 ===> Successful checks: 15/24 ===> Successful checks: 16/24 ===> Successful checks: 17/24 ===> Successful checks: 18/24 ===> Successful checks: 19/24 ===> Successful checks: 20/24 ===> Successful checks: 21/24 ===> Successful checks: 22/24 ===> Successful checks: 23/24 ===> Successful checks: 24/24 SYSTEM IS READY Track healthchecks after_healthcheck_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025921&event=healthchecks&operation=after_healthcheck_ok&value=&comment=&tags= 1ad1f26a-169d-4a38-b714-85d1b56b3221 1066bc04-43e7-407b-9943-9911d430cd12 Track final_ops add_local_k8scluster_ok: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025924&event=final_ops&operation=add_local_k8scluster_ok&value=&comment=&tags= Fixing firewall so docker and LXD can share the same host without affecting each other. tee: /etc/iptables/rules.v4: No such file or directory # Generated by iptables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *mangle :PREROUTING ACCEPT [0:0] :INPUT ACCEPT [0:0] :FORWARD ACCEPT [0:0] :OUTPUT ACCEPT [0:0] :POSTROUTING ACCEPT [0:0] :KUBE-IPTABLES-HINT - [0:0] :KUBE-KUBELET-CANARY - [0:0] :KUBE-PROXY-CANARY - [0:0] COMMIT # Completed on Sun Dec 31 12:32:04 2023 # Generated by iptables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *security :INPUT ACCEPT [0:0] :FORWARD ACCEPT [0:0] :OUTPUT ACCEPT [292686:124643561] -A OUTPUT -d 168.63.129.16/32 -p tcp -m tcp --dport 53 -j ACCEPT -A OUTPUT -d 168.63.129.16/32 -p tcp -m owner --uid-owner 0 -j ACCEPT -A OUTPUT -d 168.63.129.16/32 -p tcp -m conntrack --ctstate INVALID,NEW -j DROP COMMIT # Completed on Sun Dec 31 12:32:04 2023 # Generated by iptables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *filter :INPUT ACCEPT [0:0] :FORWARD DROP [0:0] :OUTPUT ACCEPT [0:0] :DOCKER - [0:0] :DOCKER-ISOLATION-STAGE-1 - [0:0] :DOCKER-ISOLATION-STAGE-2 - [0:0] :DOCKER-USER - [0:0] :FLANNEL-FWD - [0:0] :KUBE-EXTERNAL-SERVICES - [0:0] :KUBE-FIREWALL - [0:0] :KUBE-FORWARD - [0:0] :KUBE-KUBELET-CANARY - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-PROXY-CANARY - [0:0] :KUBE-PROXY-FIREWALL - [0:0] :KUBE-SERVICES - [0:0] -A INPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes load balancer firewall" -j KUBE-PROXY-FIREWALL -A INPUT -m comment --comment "kubernetes health check service ports" -j KUBE-NODEPORTS -A INPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes externally-visible service portals" -j KUBE-EXTERNAL-SERVICES -A INPUT -j KUBE-FIREWALL -A FORWARD -m conntrack --ctstate NEW -m comment --comment "kubernetes load balancer firewall" -j KUBE-PROXY-FIREWALL -A FORWARD -m comment --comment "kubernetes forwarding rules" -j KUBE-FORWARD -A FORWARD -m conntrack --ctstate NEW -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A FORWARD -m conntrack --ctstate NEW -m comment --comment "kubernetes externally-visible service portals" -j KUBE-EXTERNAL-SERVICES -A FORWARD -j DOCKER-USER -A FORWARD -j DOCKER-ISOLATION-STAGE-1 -A FORWARD -o docker0 -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT -A FORWARD -o docker0 -j DOCKER -A FORWARD -i docker0 ! -o docker0 -j ACCEPT -A FORWARD -i docker0 -o docker0 -j ACCEPT -A FORWARD -m comment --comment "flanneld forward" -j FLANNEL-FWD -A OUTPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes load balancer firewall" -j KUBE-PROXY-FIREWALL -A OUTPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A OUTPUT -j KUBE-FIREWALL -A DOCKER-ISOLATION-STAGE-1 -i docker0 ! -o docker0 -j DOCKER-ISOLATION-STAGE-2 -A DOCKER-ISOLATION-STAGE-1 -j RETURN -A DOCKER-ISOLATION-STAGE-2 -o docker0 -j DROP -A DOCKER-ISOLATION-STAGE-2 -j RETURN -A DOCKER-USER -j ACCEPT -A DOCKER-USER -j RETURN -A FLANNEL-FWD -s 10.244.0.0/16 -m comment --comment "flanneld forward" -j ACCEPT -A FLANNEL-FWD -d 10.244.0.0/16 -m comment --comment "flanneld forward" -j ACCEPT -A KUBE-FIREWALL ! -s 127.0.0.0/8 -d 127.0.0.0/8 -m comment --comment "block incoming localnet connections" -m conntrack ! --ctstate RELATED,ESTABLISHED,DNAT -j DROP -A KUBE-FIREWALL -m comment --comment "kubernetes firewall for dropping marked packets" -m mark --mark 0x8000/0x8000 -j DROP -A KUBE-FORWARD -m conntrack --ctstate INVALID -j DROP -A KUBE-FORWARD -m comment --comment "kubernetes forwarding rules" -m mark --mark 0x4000/0x4000 -j ACCEPT -A KUBE-FORWARD -m comment --comment "kubernetes forwarding conntrack rule" -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT COMMIT # Completed on Sun Dec 31 12:32:04 2023 # Generated by iptables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *nat :PREROUTING ACCEPT [0:0] :INPUT ACCEPT [0:0] :OUTPUT ACCEPT [0:0] :POSTROUTING ACCEPT [0:0] :DOCKER - [0:0] :FLANNEL-POSTRTG - [0:0] :KUBE-EXT-BF2LB5WJRBPLA42J - [0:0] :KUBE-EXT-FQUXG555W5IVIWW3 - [0:0] :KUBE-EXT-GZN4S7ND4PF6YXD6 - [0:0] :KUBE-EXT-MD4PSIBW5SPF2DB3 - [0:0] :KUBE-EXT-PQIZCPF63EFIBBJY - [0:0] :KUBE-EXT-XUD4OEZNIHB47KQL - [0:0] :KUBE-EXT-YA74QX5VY2UAABIX - [0:0] :KUBE-KUBELET-CANARY - [0:0] :KUBE-MARK-DROP - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-PROXY-CANARY - [0:0] :KUBE-SEP-2EKL53EI67NMIYCL - [0:0] :KUBE-SEP-4IHIWJZYTE5RO3PV - [0:0] :KUBE-SEP-4TE7OZLRL6YAGNFP - [0:0] :KUBE-SEP-7ZL4UUQ5KSMYQBTK - [0:0] :KUBE-SEP-C2Z3JOEZYT4GUPVJ - [0:0] :KUBE-SEP-C523AIPH4Y2GJ7FW - [0:0] :KUBE-SEP-CEWR77HQLZDHWWAJ - [0:0] :KUBE-SEP-EYGXM42F6P7YZK7G - [0:0] :KUBE-SEP-HJ7EWOW62IX6GL6R - [0:0] :KUBE-SEP-JIWKU7LWBAE46CYF - [0:0] :KUBE-SEP-JIZ4GTHNGPIHE43F - [0:0] :KUBE-SEP-LBMQNJ35ID4UIQ2A - [0:0] :KUBE-SEP-MHWK5N6X4ER6N5YC - [0:0] :KUBE-SEP-MNWGAGGUHIHEBZJ5 - [0:0] :KUBE-SEP-MOFIWPYHXNLC5UHH - [0:0] :KUBE-SEP-OP4AXEAS4OXHBEQX - [0:0] :KUBE-SEP-OYQECDV34TM4MQCT - [0:0] :KUBE-SEP-R4ZUHGWZDVOCHPCZ - [0:0] :KUBE-SEP-R7EMXN5TTQQVP4UW - [0:0] :KUBE-SEP-S7EHPJDYTVDKWBVK - [0:0] :KUBE-SEP-S7MPVVC7MGYVFSF3 - [0:0] :KUBE-SEP-SDEOVHTVPO7QT7XS - [0:0] :KUBE-SEP-UJOLMLJZLWUP36UK - [0:0] :KUBE-SEP-XZVNOPGBKFOKTOOW - [0:0] :KUBE-SEP-YO6GK7QQMSG42MKC - [0:0] :KUBE-SEP-YV3BY7OPHE3HCHQL - [0:0] :KUBE-SEP-Z3MJ67D3H4OQ7N4P - [0:0] :KUBE-SEP-ZZLFH7IUJGJIPKFD - [0:0] :KUBE-SERVICES - [0:0] :KUBE-SVC-57RXGDQGDROYCT5J - [0:0] :KUBE-SVC-5QOWUZVRO3UICSLI - [0:0] :KUBE-SVC-BF2LB5WJRBPLA42J - [0:0] :KUBE-SVC-ERIFXISQEP7F7OF4 - [0:0] :KUBE-SVC-FQUXG555W5IVIWW3 - [0:0] :KUBE-SVC-GZ25SP4UFGF7SAVL - [0:0] :KUBE-SVC-GZN4S7ND4PF6YXD6 - [0:0] :KUBE-SVC-JD5MR3NA4I4DYORP - [0:0] :KUBE-SVC-KJWF6GU7OHHSDT5H - [0:0] :KUBE-SVC-MD4PSIBW5SPF2DB3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SVC-O36IMWM6WEZJKHBK - [0:0] :KUBE-SVC-PQIZCPF63EFIBBJY - [0:0] :KUBE-SVC-QE77U7R3P7AE7O5U - [0:0] :KUBE-SVC-QSOISDZI64RJ2IKG - [0:0] :KUBE-SVC-S4GQNWHOWTOQDYZX - [0:0] :KUBE-SVC-TCOU7JCQXEZGVUNU - [0:0] :KUBE-SVC-TTTQGL2HNUNQKPOG - [0:0] :KUBE-SVC-USIDOZAE2VTXK5OJ - [0:0] :KUBE-SVC-XUD4OEZNIHB47KQL - [0:0] :KUBE-SVC-YA74QX5VY2UAABIX - [0:0] :KUBE-SVC-YFBWLFEGVQGI5ORM - [0:0] :KUBE-SVC-ZUD4L6KQKCHD52W4 - [0:0] -A PREROUTING -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A PREROUTING -m addrtype --dst-type LOCAL -j DOCKER -A OUTPUT -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A OUTPUT ! -d 127.0.0.0/8 -m addrtype --dst-type LOCAL -j DOCKER -A POSTROUTING -m comment --comment "kubernetes postrouting rules" -j KUBE-POSTROUTING -A POSTROUTING -s 172.17.0.0/16 ! -o docker0 -j MASQUERADE -A POSTROUTING -m comment --comment "flanneld masq" -j FLANNEL-POSTRTG -A DOCKER -i docker0 -j RETURN -A FLANNEL-POSTRTG -m comment --comment "flanneld masq" -j RETURN -A FLANNEL-POSTRTG -s 10.244.0.0/24 -d 10.244.0.0/16 -m comment --comment "flanneld masq" -j RETURN -A FLANNEL-POSTRTG -s 10.244.0.0/16 -d 10.244.0.0/24 -m comment --comment "flanneld masq" -j RETURN -A FLANNEL-POSTRTG ! -s 10.244.0.0/16 -d 10.244.0.0/24 -m comment --comment "flanneld masq" -j RETURN -A FLANNEL-POSTRTG -s 10.244.0.0/16 ! -d 224.0.0.0/4 -m comment --comment "flanneld masq" -j MASQUERADE --random-fully -A FLANNEL-POSTRTG ! -s 10.244.0.0/16 -d 10.244.0.0/16 -m comment --comment "flanneld masq" -j MASQUERADE --random-fully -A KUBE-EXT-BF2LB5WJRBPLA42J -m comment --comment "masquerade traffic for osm/airflow-webserver:airflow-ui external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-BF2LB5WJRBPLA42J -j KUBE-SVC-BF2LB5WJRBPLA42J -A KUBE-EXT-FQUXG555W5IVIWW3 -m comment --comment "masquerade traffic for osm/nbi external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-FQUXG555W5IVIWW3 -j KUBE-SVC-FQUXG555W5IVIWW3 -A KUBE-EXT-GZN4S7ND4PF6YXD6 -m comment --comment "masquerade traffic for osm/alertmanager:http external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-GZN4S7ND4PF6YXD6 -j KUBE-SVC-GZN4S7ND4PF6YXD6 -A KUBE-EXT-MD4PSIBW5SPF2DB3 -m comment --comment "masquerade traffic for osm/prometheus external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-MD4PSIBW5SPF2DB3 -j KUBE-SVC-MD4PSIBW5SPF2DB3 -A KUBE-EXT-PQIZCPF63EFIBBJY -m comment --comment "masquerade traffic for osm/grafana:service external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-PQIZCPF63EFIBBJY -j KUBE-SVC-PQIZCPF63EFIBBJY -A KUBE-EXT-XUD4OEZNIHB47KQL -m comment --comment "masquerade traffic for osm/webhook-translator external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-XUD4OEZNIHB47KQL -j KUBE-SVC-XUD4OEZNIHB47KQL -A KUBE-EXT-YA74QX5VY2UAABIX -m comment --comment "masquerade traffic for osm/ng-ui external destinations" -j KUBE-MARK-MASQ -A KUBE-EXT-YA74QX5VY2UAABIX -j KUBE-SVC-YA74QX5VY2UAABIX -A KUBE-MARK-DROP -j MARK --set-xmark 0x8000/0x8000 -A KUBE-MARK-MASQ -j MARK --set-xmark 0x4000/0x4000 -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/webhook-translator" -m tcp --dport 9998 -j KUBE-EXT-XUD4OEZNIHB47KQL -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/nbi" -m tcp --dport 9999 -j KUBE-EXT-FQUXG555W5IVIWW3 -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/prometheus" -m tcp --dport 9091 -j KUBE-EXT-MD4PSIBW5SPF2DB3 -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/grafana:service" -m tcp --dport 3000 -j KUBE-EXT-PQIZCPF63EFIBBJY -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/ng-ui" -m tcp --dport 80 -j KUBE-EXT-YA74QX5VY2UAABIX -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/airflow-webserver:airflow-ui" -m tcp --dport 27643 -j KUBE-EXT-BF2LB5WJRBPLA42J -A KUBE-NODEPORTS -p tcp -m comment --comment "osm/alertmanager:http" -m tcp --dport 9093 -j KUBE-EXT-GZN4S7ND4PF6YXD6 -A KUBE-POSTROUTING -m mark ! --mark 0x4000/0x4000 -j RETURN -A KUBE-POSTROUTING -j MARK --set-xmark 0x4000/0x0 -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -j MASQUERADE --random-fully -A KUBE-SEP-2EKL53EI67NMIYCL -s 10.244.0.28/32 -m comment --comment "osm/airflow-statsd:statsd-scrape" -j KUBE-MARK-MASQ -A KUBE-SEP-2EKL53EI67NMIYCL -p tcp -m comment --comment "osm/airflow-statsd:statsd-scrape" -m tcp -j DNAT --to-destination 10.244.0.28:9102 -A KUBE-SEP-4IHIWJZYTE5RO3PV -s 10.244.0.45/32 -m comment --comment "osm/kafka:tcp-client" -j KUBE-MARK-MASQ -A KUBE-SEP-4IHIWJZYTE5RO3PV -p tcp -m comment --comment "osm/kafka:tcp-client" -m tcp -j DNAT --to-destination 10.244.0.45:9092 -A KUBE-SEP-4TE7OZLRL6YAGNFP -s 10.244.0.47/32 -m comment --comment "osm/pushgateway-prometheus-pushgateway:http" -j KUBE-MARK-MASQ -A KUBE-SEP-4TE7OZLRL6YAGNFP -p tcp -m comment --comment "osm/pushgateway-prometheus-pushgateway:http" -m tcp -j DNAT --to-destination 10.244.0.47:9091 -A KUBE-SEP-7ZL4UUQ5KSMYQBTK -s 10.244.0.15/32 -m comment --comment "osm/prometheus" -j KUBE-MARK-MASQ -A KUBE-SEP-7ZL4UUQ5KSMYQBTK -p tcp -m comment --comment "osm/prometheus" -m tcp -j DNAT --to-destination 10.244.0.15:9090 -A KUBE-SEP-C2Z3JOEZYT4GUPVJ -s 10.244.0.5/32 -m comment --comment "cert-manager/cert-manager-webhook:https" -j KUBE-MARK-MASQ -A KUBE-SEP-C2Z3JOEZYT4GUPVJ -p tcp -m comment --comment "cert-manager/cert-manager-webhook:https" -m tcp -j DNAT --to-destination 10.244.0.5:10250 -A KUBE-SEP-C523AIPH4Y2GJ7FW -s 172.21.23.11/32 -m comment --comment "default/kubernetes:https" -j KUBE-MARK-MASQ -A KUBE-SEP-C523AIPH4Y2GJ7FW -p tcp -m comment --comment "default/kubernetes:https" -m tcp -j DNAT --to-destination 172.21.23.11:6443 -A KUBE-SEP-CEWR77HQLZDHWWAJ -s 10.244.0.9/32 -m comment --comment "kube-system/kube-dns:metrics" -j KUBE-MARK-MASQ -A KUBE-SEP-CEWR77HQLZDHWWAJ -p tcp -m comment --comment "kube-system/kube-dns:metrics" -m tcp -j DNAT --to-destination 10.244.0.9:9153 -A KUBE-SEP-EYGXM42F6P7YZK7G -s 10.244.0.14/32 -m comment --comment "osm/nbi" -j KUBE-MARK-MASQ -A KUBE-SEP-EYGXM42F6P7YZK7G -p tcp -m comment --comment "osm/nbi" -m tcp -j DNAT --to-destination 10.244.0.14:9999 -A KUBE-SEP-HJ7EWOW62IX6GL6R -s 10.244.0.10/32 -m comment --comment "kube-system/kube-dns:metrics" -j KUBE-MARK-MASQ -A KUBE-SEP-HJ7EWOW62IX6GL6R -p tcp -m comment --comment "kube-system/kube-dns:metrics" -m tcp -j DNAT --to-destination 10.244.0.10:9153 -A KUBE-SEP-JIWKU7LWBAE46CYF -s 10.244.0.37/32 -m comment --comment "osm/airflow-redis:redis-db" -j KUBE-MARK-MASQ -A KUBE-SEP-JIWKU7LWBAE46CYF -p tcp -m comment --comment "osm/airflow-redis:redis-db" -m tcp -j DNAT --to-destination 10.244.0.37:6379 -A KUBE-SEP-JIZ4GTHNGPIHE43F -s 10.244.0.26/32 -m comment --comment "osm/airflow-webserver:airflow-ui" -j KUBE-MARK-MASQ -A KUBE-SEP-JIZ4GTHNGPIHE43F -p tcp -m comment --comment "osm/airflow-webserver:airflow-ui" -m tcp -j DNAT --to-destination 10.244.0.26:8080 -A KUBE-SEP-LBMQNJ35ID4UIQ2A -s 10.244.0.9/32 -m comment --comment "kube-system/kube-dns:dns" -j KUBE-MARK-MASQ -A KUBE-SEP-LBMQNJ35ID4UIQ2A -p udp -m comment --comment "kube-system/kube-dns:dns" -m udp -j DNAT --to-destination 10.244.0.9:53 -A KUBE-SEP-MHWK5N6X4ER6N5YC -s 10.244.0.21/32 -m comment --comment "osm/ng-ui" -j KUBE-MARK-MASQ -A KUBE-SEP-MHWK5N6X4ER6N5YC -p tcp -m comment --comment "osm/ng-ui" -m tcp -j DNAT --to-destination 10.244.0.21:80 -A KUBE-SEP-MNWGAGGUHIHEBZJ5 -s 10.244.0.40/32 -m comment --comment "osm/zookeeper:tcp-client" -j KUBE-MARK-MASQ -A KUBE-SEP-MNWGAGGUHIHEBZJ5 -p tcp -m comment --comment "osm/zookeeper:tcp-client" -m tcp -j DNAT --to-destination 10.244.0.40:2181 -A KUBE-SEP-MOFIWPYHXNLC5UHH -s 10.244.0.41/32 -m comment --comment "osm/mysql:mysql" -j KUBE-MARK-MASQ -A KUBE-SEP-MOFIWPYHXNLC5UHH -p tcp -m comment --comment "osm/mysql:mysql" -m tcp -j DNAT --to-destination 10.244.0.41:3306 -A KUBE-SEP-OP4AXEAS4OXHBEQX -s 10.244.0.10/32 -m comment --comment "kube-system/kube-dns:dns-tcp" -j KUBE-MARK-MASQ -A KUBE-SEP-OP4AXEAS4OXHBEQX -p tcp -m comment --comment "kube-system/kube-dns:dns-tcp" -m tcp -j DNAT --to-destination 10.244.0.10:53 -A KUBE-SEP-OYQECDV34TM4MQCT -s 10.244.0.40/32 -m comment --comment "osm/zookeeper:tcp-follower" -j KUBE-MARK-MASQ -A KUBE-SEP-OYQECDV34TM4MQCT -p tcp -m comment --comment "osm/zookeeper:tcp-follower" -m tcp -j DNAT --to-destination 10.244.0.40:2888 -A KUBE-SEP-R4ZUHGWZDVOCHPCZ -s 10.244.0.16/32 -m comment --comment "osm/grafana:service" -j KUBE-MARK-MASQ -A KUBE-SEP-R4ZUHGWZDVOCHPCZ -p tcp -m comment --comment "osm/grafana:service" -m tcp -j DNAT --to-destination 10.244.0.16:3000 -A KUBE-SEP-R7EMXN5TTQQVP4UW -s 10.244.0.10/32 -m comment --comment "kube-system/kube-dns:dns" -j KUBE-MARK-MASQ -A KUBE-SEP-R7EMXN5TTQQVP4UW -p udp -m comment --comment "kube-system/kube-dns:dns" -m udp -j DNAT --to-destination 10.244.0.10:53 -A KUBE-SEP-S7EHPJDYTVDKWBVK -s 10.244.0.49/32 -m comment --comment "osm/alertmanager:http" -j KUBE-MARK-MASQ -A KUBE-SEP-S7EHPJDYTVDKWBVK -p tcp -m comment --comment "osm/alertmanager:http" -m tcp -j DNAT --to-destination 10.244.0.49:9093 -A KUBE-SEP-S7MPVVC7MGYVFSF3 -s 10.244.0.9/32 -m comment --comment "kube-system/kube-dns:dns-tcp" -j KUBE-MARK-MASQ -A KUBE-SEP-S7MPVVC7MGYVFSF3 -p tcp -m comment --comment "kube-system/kube-dns:dns-tcp" -m tcp -j DNAT --to-destination 10.244.0.9:53 -A KUBE-SEP-SDEOVHTVPO7QT7XS -s 10.244.0.28/32 -m comment --comment "osm/airflow-statsd:statsd-ingest" -j KUBE-MARK-MASQ -A KUBE-SEP-SDEOVHTVPO7QT7XS -p udp -m comment --comment "osm/airflow-statsd:statsd-ingest" -m udp -j DNAT --to-destination 10.244.0.28:9125 -A KUBE-SEP-UJOLMLJZLWUP36UK -s 10.244.0.35/32 -m comment --comment "osm/kafka:tcp-client" -j KUBE-MARK-MASQ -A KUBE-SEP-UJOLMLJZLWUP36UK -p tcp -m comment --comment "osm/kafka:tcp-client" -m tcp -j DNAT --to-destination 10.244.0.35:9092 -A KUBE-SEP-XZVNOPGBKFOKTOOW -s 10.244.0.4/32 -m comment --comment "metallb-system/metallb-webhook-service" -j KUBE-MARK-MASQ -A KUBE-SEP-XZVNOPGBKFOKTOOW -p tcp -m comment --comment "metallb-system/metallb-webhook-service" -m tcp -j DNAT --to-destination 10.244.0.4:9443 -A KUBE-SEP-YO6GK7QQMSG42MKC -s 10.244.0.40/32 -m comment --comment "osm/zookeeper:tcp-election" -j KUBE-MARK-MASQ -A KUBE-SEP-YO6GK7QQMSG42MKC -p tcp -m comment --comment "osm/zookeeper:tcp-election" -m tcp -j DNAT --to-destination 10.244.0.40:3888 -A KUBE-SEP-YV3BY7OPHE3HCHQL -s 10.244.0.32/32 -m comment --comment "osm/kafka:tcp-client" -j KUBE-MARK-MASQ -A KUBE-SEP-YV3BY7OPHE3HCHQL -p tcp -m comment --comment "osm/kafka:tcp-client" -m tcp -j DNAT --to-destination 10.244.0.32:9092 -A KUBE-SEP-Z3MJ67D3H4OQ7N4P -s 10.244.0.17/32 -m comment --comment "osm/webhook-translator" -j KUBE-MARK-MASQ -A KUBE-SEP-Z3MJ67D3H4OQ7N4P -p tcp -m comment --comment "osm/webhook-translator" -m tcp -j DNAT --to-destination 10.244.0.17:9998 -A KUBE-SEP-ZZLFH7IUJGJIPKFD -s 10.244.0.43/32 -m comment --comment "osm/airflow-postgresql:tcp-postgresql" -j KUBE-MARK-MASQ -A KUBE-SEP-ZZLFH7IUJGJIPKFD -p tcp -m comment --comment "osm/airflow-postgresql:tcp-postgresql" -m tcp -j DNAT --to-destination 10.244.0.43:5432 -A KUBE-SERVICES -d 10.105.149.136/32 -p tcp -m comment --comment "osm/mysql:mysql cluster IP" -m tcp --dport 3306 -j KUBE-SVC-S4GQNWHOWTOQDYZX -A KUBE-SERVICES -d 10.106.117.123/32 -p tcp -m comment --comment "osm/webhook-translator cluster IP" -m tcp --dport 9998 -j KUBE-SVC-XUD4OEZNIHB47KQL -A KUBE-SERVICES -d 10.98.115.33/32 -p tcp -m comment --comment "osm/nbi cluster IP" -m tcp --dport 9999 -j KUBE-SVC-FQUXG555W5IVIWW3 -A KUBE-SERVICES -d 10.106.48.168/32 -p tcp -m comment --comment "osm/zookeeper:tcp-follower cluster IP" -m tcp --dport 2888 -j KUBE-SVC-57RXGDQGDROYCT5J -A KUBE-SERVICES -d 10.106.48.168/32 -p tcp -m comment --comment "osm/zookeeper:tcp-election cluster IP" -m tcp --dport 3888 -j KUBE-SVC-KJWF6GU7OHHSDT5H -A KUBE-SERVICES -d 10.99.25.163/32 -p tcp -m comment --comment "osm/airflow-statsd:statsd-scrape cluster IP" -m tcp --dport 9102 -j KUBE-SVC-TTTQGL2HNUNQKPOG -A KUBE-SERVICES -d 10.99.248.10/32 -p tcp -m comment --comment "osm/airflow-postgresql:tcp-postgresql cluster IP" -m tcp --dport 5432 -j KUBE-SVC-QE77U7R3P7AE7O5U -A KUBE-SERVICES -d 10.96.0.10/32 -p tcp -m comment --comment "kube-system/kube-dns:dns-tcp cluster IP" -m tcp --dport 53 -j KUBE-SVC-ERIFXISQEP7F7OF4 -A KUBE-SERVICES -d 10.102.90.156/32 -p tcp -m comment --comment "metallb-system/metallb-webhook-service cluster IP" -m tcp --dport 443 -j KUBE-SVC-GZ25SP4UFGF7SAVL -A KUBE-SERVICES -d 10.102.202.37/32 -p tcp -m comment --comment "cert-manager/cert-manager-webhook:https cluster IP" -m tcp --dport 443 -j KUBE-SVC-ZUD4L6KQKCHD52W4 -A KUBE-SERVICES -d 10.102.89.249/32 -p tcp -m comment --comment "osm/kafka:tcp-client cluster IP" -m tcp --dport 9092 -j KUBE-SVC-QSOISDZI64RJ2IKG -A KUBE-SERVICES -d 10.100.84.120/32 -p tcp -m comment --comment "osm/prometheus cluster IP" -m tcp --dport 9090 -j KUBE-SVC-MD4PSIBW5SPF2DB3 -A KUBE-SERVICES -d 10.99.25.163/32 -p udp -m comment --comment "osm/airflow-statsd:statsd-ingest cluster IP" -m udp --dport 9125 -j KUBE-SVC-O36IMWM6WEZJKHBK -A KUBE-SERVICES -d 10.105.217.83/32 -p tcp -m comment --comment "osm/pushgateway-prometheus-pushgateway:http cluster IP" -m tcp --dport 9091 -j KUBE-SVC-5QOWUZVRO3UICSLI -A KUBE-SERVICES -d 10.96.0.1/32 -p tcp -m comment --comment "default/kubernetes:https cluster IP" -m tcp --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SERVICES -d 10.96.0.10/32 -p tcp -m comment --comment "kube-system/kube-dns:metrics cluster IP" -m tcp --dport 9153 -j KUBE-SVC-JD5MR3NA4I4DYORP -A KUBE-SERVICES -d 10.108.45.246/32 -p tcp -m comment --comment "osm/grafana:service cluster IP" -m tcp --dport 3000 -j KUBE-SVC-PQIZCPF63EFIBBJY -A KUBE-SERVICES -d 10.110.178.34/32 -p tcp -m comment --comment "osm/ng-ui cluster IP" -m tcp --dport 80 -j KUBE-SVC-YA74QX5VY2UAABIX -A KUBE-SERVICES -d 10.105.14.102/32 -p tcp -m comment --comment "osm/airflow-webserver:airflow-ui cluster IP" -m tcp --dport 8080 -j KUBE-SVC-BF2LB5WJRBPLA42J -A KUBE-SERVICES -d 10.104.34.214/32 -p tcp -m comment --comment "osm/airflow-redis:redis-db cluster IP" -m tcp --dport 6379 -j KUBE-SVC-USIDOZAE2VTXK5OJ -A KUBE-SERVICES -d 10.96.0.10/32 -p udp -m comment --comment "kube-system/kube-dns:dns cluster IP" -m udp --dport 53 -j KUBE-SVC-TCOU7JCQXEZGVUNU -A KUBE-SERVICES -d 10.106.48.168/32 -p tcp -m comment --comment "osm/zookeeper:tcp-client cluster IP" -m tcp --dport 2181 -j KUBE-SVC-YFBWLFEGVQGI5ORM -A KUBE-SERVICES -d 10.111.49.15/32 -p tcp -m comment --comment "osm/alertmanager:http cluster IP" -m tcp --dport 9093 -j KUBE-SVC-GZN4S7ND4PF6YXD6 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS -A KUBE-SVC-57RXGDQGDROYCT5J ! -s 10.244.0.0/16 -d 10.106.48.168/32 -p tcp -m comment --comment "osm/zookeeper:tcp-follower cluster IP" -m tcp --dport 2888 -j KUBE-MARK-MASQ -A KUBE-SVC-57RXGDQGDROYCT5J -m comment --comment "osm/zookeeper:tcp-follower -> 10.244.0.40:2888" -j KUBE-SEP-OYQECDV34TM4MQCT -A KUBE-SVC-5QOWUZVRO3UICSLI ! -s 10.244.0.0/16 -d 10.105.217.83/32 -p tcp -m comment --comment "osm/pushgateway-prometheus-pushgateway:http cluster IP" -m tcp --dport 9091 -j KUBE-MARK-MASQ -A KUBE-SVC-5QOWUZVRO3UICSLI -m comment --comment "osm/pushgateway-prometheus-pushgateway:http -> 10.244.0.47:9091" -j KUBE-SEP-4TE7OZLRL6YAGNFP -A KUBE-SVC-BF2LB5WJRBPLA42J ! -s 10.244.0.0/16 -d 10.105.14.102/32 -p tcp -m comment --comment "osm/airflow-webserver:airflow-ui cluster IP" -m tcp --dport 8080 -j KUBE-MARK-MASQ -A KUBE-SVC-BF2LB5WJRBPLA42J -m comment --comment "osm/airflow-webserver:airflow-ui -> 10.244.0.26:8080" -j KUBE-SEP-JIZ4GTHNGPIHE43F -A KUBE-SVC-ERIFXISQEP7F7OF4 ! -s 10.244.0.0/16 -d 10.96.0.10/32 -p tcp -m comment --comment "kube-system/kube-dns:dns-tcp cluster IP" -m tcp --dport 53 -j KUBE-MARK-MASQ -A KUBE-SVC-ERIFXISQEP7F7OF4 -m comment --comment "kube-system/kube-dns:dns-tcp -> 10.244.0.10:53" -m statistic --mode random --probability 0.50000000000 -j KUBE-SEP-OP4AXEAS4OXHBEQX -A KUBE-SVC-ERIFXISQEP7F7OF4 -m comment --comment "kube-system/kube-dns:dns-tcp -> 10.244.0.9:53" -j KUBE-SEP-S7MPVVC7MGYVFSF3 -A KUBE-SVC-FQUXG555W5IVIWW3 ! -s 10.244.0.0/16 -d 10.98.115.33/32 -p tcp -m comment --comment "osm/nbi cluster IP" -m tcp --dport 9999 -j KUBE-MARK-MASQ -A KUBE-SVC-FQUXG555W5IVIWW3 -m comment --comment "osm/nbi -> 10.244.0.14:9999" -j KUBE-SEP-EYGXM42F6P7YZK7G -A KUBE-SVC-GZ25SP4UFGF7SAVL ! -s 10.244.0.0/16 -d 10.102.90.156/32 -p tcp -m comment --comment "metallb-system/metallb-webhook-service cluster IP" -m tcp --dport 443 -j KUBE-MARK-MASQ -A KUBE-SVC-GZ25SP4UFGF7SAVL -m comment --comment "metallb-system/metallb-webhook-service -> 10.244.0.4:9443" -j KUBE-SEP-XZVNOPGBKFOKTOOW -A KUBE-SVC-GZN4S7ND4PF6YXD6 ! -s 10.244.0.0/16 -d 10.111.49.15/32 -p tcp -m comment --comment "osm/alertmanager:http cluster IP" -m tcp --dport 9093 -j KUBE-MARK-MASQ -A KUBE-SVC-GZN4S7ND4PF6YXD6 -m comment --comment "osm/alertmanager:http -> 10.244.0.49:9093" -j KUBE-SEP-S7EHPJDYTVDKWBVK -A KUBE-SVC-JD5MR3NA4I4DYORP ! -s 10.244.0.0/16 -d 10.96.0.10/32 -p tcp -m comment --comment "kube-system/kube-dns:metrics cluster IP" -m tcp --dport 9153 -j KUBE-MARK-MASQ -A KUBE-SVC-JD5MR3NA4I4DYORP -m comment --comment "kube-system/kube-dns:metrics -> 10.244.0.10:9153" -m statistic --mode random --probability 0.50000000000 -j KUBE-SEP-HJ7EWOW62IX6GL6R -A KUBE-SVC-JD5MR3NA4I4DYORP -m comment --comment "kube-system/kube-dns:metrics -> 10.244.0.9:9153" -j KUBE-SEP-CEWR77HQLZDHWWAJ -A KUBE-SVC-KJWF6GU7OHHSDT5H ! -s 10.244.0.0/16 -d 10.106.48.168/32 -p tcp -m comment --comment "osm/zookeeper:tcp-election cluster IP" -m tcp --dport 3888 -j KUBE-MARK-MASQ -A KUBE-SVC-KJWF6GU7OHHSDT5H -m comment --comment "osm/zookeeper:tcp-election -> 10.244.0.40:3888" -j KUBE-SEP-YO6GK7QQMSG42MKC -A KUBE-SVC-MD4PSIBW5SPF2DB3 ! -s 10.244.0.0/16 -d 10.100.84.120/32 -p tcp -m comment --comment "osm/prometheus cluster IP" -m tcp --dport 9090 -j KUBE-MARK-MASQ -A KUBE-SVC-MD4PSIBW5SPF2DB3 -m comment --comment "osm/prometheus -> 10.244.0.15:9090" -j KUBE-SEP-7ZL4UUQ5KSMYQBTK -A KUBE-SVC-NPX46M4PTMTKRN6Y ! -s 10.244.0.0/16 -d 10.96.0.1/32 -p tcp -m comment --comment "default/kubernetes:https cluster IP" -m tcp --dport 443 -j KUBE-MARK-MASQ -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment "default/kubernetes:https -> 172.21.23.11:6443" -j KUBE-SEP-C523AIPH4Y2GJ7FW -A KUBE-SVC-O36IMWM6WEZJKHBK ! -s 10.244.0.0/16 -d 10.99.25.163/32 -p udp -m comment --comment "osm/airflow-statsd:statsd-ingest cluster IP" -m udp --dport 9125 -j KUBE-MARK-MASQ -A KUBE-SVC-O36IMWM6WEZJKHBK -m comment --comment "osm/airflow-statsd:statsd-ingest -> 10.244.0.28:9125" -j KUBE-SEP-SDEOVHTVPO7QT7XS -A KUBE-SVC-PQIZCPF63EFIBBJY ! -s 10.244.0.0/16 -d 10.108.45.246/32 -p tcp -m comment --comment "osm/grafana:service cluster IP" -m tcp --dport 3000 -j KUBE-MARK-MASQ -A KUBE-SVC-PQIZCPF63EFIBBJY -m comment --comment "osm/grafana:service -> 10.244.0.16:3000" -j KUBE-SEP-R4ZUHGWZDVOCHPCZ -A KUBE-SVC-QE77U7R3P7AE7O5U ! -s 10.244.0.0/16 -d 10.99.248.10/32 -p tcp -m comment --comment "osm/airflow-postgresql:tcp-postgresql cluster IP" -m tcp --dport 5432 -j KUBE-MARK-MASQ -A KUBE-SVC-QE77U7R3P7AE7O5U -m comment --comment "osm/airflow-postgresql:tcp-postgresql -> 10.244.0.43:5432" -j KUBE-SEP-ZZLFH7IUJGJIPKFD -A KUBE-SVC-QSOISDZI64RJ2IKG ! -s 10.244.0.0/16 -d 10.102.89.249/32 -p tcp -m comment --comment "osm/kafka:tcp-client cluster IP" -m tcp --dport 9092 -j KUBE-MARK-MASQ -A KUBE-SVC-QSOISDZI64RJ2IKG -m comment --comment "osm/kafka:tcp-client -> 10.244.0.32:9092" -m statistic --mode random --probability 0.33333333349 -j KUBE-SEP-YV3BY7OPHE3HCHQL -A KUBE-SVC-QSOISDZI64RJ2IKG -m comment --comment "osm/kafka:tcp-client -> 10.244.0.35:9092" -m statistic --mode random --probability 0.50000000000 -j KUBE-SEP-UJOLMLJZLWUP36UK -A KUBE-SVC-QSOISDZI64RJ2IKG -m comment --comment "osm/kafka:tcp-client -> 10.244.0.45:9092" -j KUBE-SEP-4IHIWJZYTE5RO3PV -A KUBE-SVC-S4GQNWHOWTOQDYZX ! -s 10.244.0.0/16 -d 10.105.149.136/32 -p tcp -m comment --comment "osm/mysql:mysql cluster IP" -m tcp --dport 3306 -j KUBE-MARK-MASQ -A KUBE-SVC-S4GQNWHOWTOQDYZX -m comment --comment "osm/mysql:mysql -> 10.244.0.41:3306" -j KUBE-SEP-MOFIWPYHXNLC5UHH -A KUBE-SVC-TCOU7JCQXEZGVUNU ! -s 10.244.0.0/16 -d 10.96.0.10/32 -p udp -m comment --comment "kube-system/kube-dns:dns cluster IP" -m udp --dport 53 -j KUBE-MARK-MASQ -A KUBE-SVC-TCOU7JCQXEZGVUNU -m comment --comment "kube-system/kube-dns:dns -> 10.244.0.10:53" -m statistic --mode random --probability 0.50000000000 -j KUBE-SEP-R7EMXN5TTQQVP4UW -A KUBE-SVC-TCOU7JCQXEZGVUNU -m comment --comment "kube-system/kube-dns:dns -> 10.244.0.9:53" -j KUBE-SEP-LBMQNJ35ID4UIQ2A -A KUBE-SVC-TTTQGL2HNUNQKPOG ! -s 10.244.0.0/16 -d 10.99.25.163/32 -p tcp -m comment --comment "osm/airflow-statsd:statsd-scrape cluster IP" -m tcp --dport 9102 -j KUBE-MARK-MASQ -A KUBE-SVC-TTTQGL2HNUNQKPOG -m comment --comment "osm/airflow-statsd:statsd-scrape -> 10.244.0.28:9102" -j KUBE-SEP-2EKL53EI67NMIYCL -A KUBE-SVC-USIDOZAE2VTXK5OJ ! -s 10.244.0.0/16 -d 10.104.34.214/32 -p tcp -m comment --comment "osm/airflow-redis:redis-db cluster IP" -m tcp --dport 6379 -j KUBE-MARK-MASQ -A KUBE-SVC-USIDOZAE2VTXK5OJ -m comment --comment "osm/airflow-redis:redis-db -> 10.244.0.37:6379" -j KUBE-SEP-JIWKU7LWBAE46CYF -A KUBE-SVC-XUD4OEZNIHB47KQL ! -s 10.244.0.0/16 -d 10.106.117.123/32 -p tcp -m comment --comment "osm/webhook-translator cluster IP" -m tcp --dport 9998 -j KUBE-MARK-MASQ -A KUBE-SVC-XUD4OEZNIHB47KQL -m comment --comment "osm/webhook-translator -> 10.244.0.17:9998" -j KUBE-SEP-Z3MJ67D3H4OQ7N4P -A KUBE-SVC-YA74QX5VY2UAABIX ! -s 10.244.0.0/16 -d 10.110.178.34/32 -p tcp -m comment --comment "osm/ng-ui cluster IP" -m tcp --dport 80 -j KUBE-MARK-MASQ -A KUBE-SVC-YA74QX5VY2UAABIX -m comment --comment "osm/ng-ui -> 10.244.0.21:80" -j KUBE-SEP-MHWK5N6X4ER6N5YC -A KUBE-SVC-YFBWLFEGVQGI5ORM ! -s 10.244.0.0/16 -d 10.106.48.168/32 -p tcp -m comment --comment "osm/zookeeper:tcp-client cluster IP" -m tcp --dport 2181 -j KUBE-MARK-MASQ -A KUBE-SVC-YFBWLFEGVQGI5ORM -m comment --comment "osm/zookeeper:tcp-client -> 10.244.0.40:2181" -j KUBE-SEP-MNWGAGGUHIHEBZJ5 -A KUBE-SVC-ZUD4L6KQKCHD52W4 ! -s 10.244.0.0/16 -d 10.102.202.37/32 -p tcp -m comment --comment "cert-manager/cert-manager-webhook:https cluster IP" -m tcp --dport 443 -j KUBE-MARK-MASQ -A KUBE-SVC-ZUD4L6KQKCHD52W4 -m comment --comment "cert-manager/cert-manager-webhook:https -> 10.244.0.5:10250" -j KUBE-SEP-C2Z3JOEZYT4GUPVJ COMMIT # Completed on Sun Dec 31 12:32:04 2023 tee: /etc/iptables/rules.v6: No such file or directory # Generated by ip6tables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *mangle :PREROUTING ACCEPT [0:0] :INPUT ACCEPT [0:0] :FORWARD ACCEPT [0:0] :OUTPUT ACCEPT [0:0] :POSTROUTING ACCEPT [0:0] :KUBE-IPTABLES-HINT - [0:0] :KUBE-KUBELET-CANARY - [0:0] :KUBE-PROXY-CANARY - [0:0] COMMIT # Completed on Sun Dec 31 12:32:04 2023 # Generated by ip6tables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *filter :INPUT ACCEPT [0:0] :FORWARD ACCEPT [0:0] :OUTPUT ACCEPT [0:0] :KUBE-EXTERNAL-SERVICES - [0:0] :KUBE-FIREWALL - [0:0] :KUBE-FORWARD - [0:0] :KUBE-KUBELET-CANARY - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-PROXY-CANARY - [0:0] :KUBE-PROXY-FIREWALL - [0:0] :KUBE-SERVICES - [0:0] -A INPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes load balancer firewall" -j KUBE-PROXY-FIREWALL -A INPUT -m comment --comment "kubernetes health check service ports" -j KUBE-NODEPORTS -A INPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes externally-visible service portals" -j KUBE-EXTERNAL-SERVICES -A FORWARD -m conntrack --ctstate NEW -m comment --comment "kubernetes load balancer firewall" -j KUBE-PROXY-FIREWALL -A FORWARD -m comment --comment "kubernetes forwarding rules" -j KUBE-FORWARD -A FORWARD -m conntrack --ctstate NEW -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A FORWARD -m conntrack --ctstate NEW -m comment --comment "kubernetes externally-visible service portals" -j KUBE-EXTERNAL-SERVICES -A OUTPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes load balancer firewall" -j KUBE-PROXY-FIREWALL -A OUTPUT -m conntrack --ctstate NEW -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A KUBE-FIREWALL -m comment --comment "kubernetes firewall for dropping marked packets" -m mark --mark 0x8000/0x8000 -j DROP -A KUBE-FORWARD -m conntrack --ctstate INVALID -j DROP -A KUBE-FORWARD -m comment --comment "kubernetes forwarding rules" -m mark --mark 0x4000/0x4000 -j ACCEPT -A KUBE-FORWARD -m comment --comment "kubernetes forwarding conntrack rule" -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT COMMIT # Completed on Sun Dec 31 12:32:04 2023 # Generated by ip6tables-save v1.8.7 on Sun Dec 31 12:32:04 2023 *nat :PREROUTING ACCEPT [0:0] :INPUT ACCEPT [0:0] :OUTPUT ACCEPT [0:0] :POSTROUTING ACCEPT [0:0] :KUBE-KUBELET-CANARY - [0:0] :KUBE-MARK-DROP - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-PROXY-CANARY - [0:0] :KUBE-SERVICES - [0:0] -A PREROUTING -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A OUTPUT -m comment --comment "kubernetes service portals" -j KUBE-SERVICES -A POSTROUTING -m comment --comment "kubernetes postrouting rules" -j KUBE-POSTROUTING -A KUBE-MARK-DROP -j MARK --set-xmark 0x8000/0x8000 -A KUBE-MARK-MASQ -j MARK --set-xmark 0x4000/0x4000 -A KUBE-POSTROUTING -m mark ! --mark 0x4000/0x4000 -j RETURN -A KUBE-POSTROUTING -j MARK --set-xmark 0x4000/0x0 -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -j MASQUERADE --random-fully -A KUBE-SERVICES ! -d ::1/128 -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT # Completed on Sun Dec 31 12:32:04 2023 Track end end: https://osm.etsi.org/InstallLog.php?&installation_id=1704025241-XLdS97dvDXZdtpEe&local_ts=1704025925&event=end&operation=end&value=&comment=&tags= /etc/osm /etc/osm/helm /etc/osm/helm/alertmanager-values.yaml /etc/osm/helm/mongodb-values.yaml /etc/osm/helm/airflow-values.yaml /etc/osm/helm/osm-values.yaml /etc/osm/metallb-ipaddrpool.yaml /etc/osm/kubeadm-config.yaml DONE + set +eux Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Defaulted container "lcm" out of: lcm, kafka-ro-mongo-test (init) Unable to use a TTY - input is not a terminal or the right kind of file + export JUJU_PASSWORD=secret + JUJU_PASSWORD=secret + cat + echo Environment was updated at /robot-systest/results/osm_environment.rc Environment was updated at /robot-systest/results/osm_environment.rc [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Add VIM and K8s cluster to OSM) [Pipeline] sh [azure_robot_tests] Running shell script + . /robot-systest/results/osm_environment.rc + export CLOUD_TYPE=azure + export OSM_HOSTNAME=172.21.23.11 + export OSM_IMAGE_NAME=osmtest202312311216 + export JUJU_PASSWORD=secret + . /robot-systest/results/k8s_environment.rc + export CLOUD_TYPE=azure + export USE_PAAS_K8S=FALSE + export K8S_IP=172.21.23.10 + export K8S_IMAGE_NAME=k8stest202312311208 + export K8S_CREDENTIALS=/robot-systest/results/kubeconfig.yaml + osm version Server version: 9.0.0.post115+gc72bc8e 2020-04-17 Client version: 11.0.0rc1.post59+gb46c7c6 + set +x Adding VIM to OSM 1eb731ff-8cf0-4be1-a4c5-0e9177a9917e +-----------------+--------------------------------------------------------------+ | key | attribute | +-----------------+--------------------------------------------------------------+ | _id | "1eb731ff-8cf0-4be1-a4c5-0e9177a9917e" | | name | "azure-etsi" | | vim_type | "azure" | | description | "None" | | vim_url | "http://www.azure.com" | | vim_user | "7c5ba2e6-2013-49a0-bf9a-f2592030f7ff" | | vim_password | "********" | | vim_tenant_name | "e6746ab5-ebdc-4e9d-821b-a71bdaf63d9b" | | config | { | | | "region_name": "westeurope", | | | "resource_group": "OSM_CICD_GROUP", | | | "subscription_id": "8fb7e78d-097b-413d-bc65-41d29be6bab1", | | | "vnet_name": "OSM-CICD-net", | | | "flavors_pattern": "^Standard" | | | } | | _admin | { | | | "created": 1704025929.2144847, | | | "modified": 1704025929.2144847, | | | "projects_read": [ | | | "7d0dfa29-ed12-4986-ae3d-7ac1556d33a5" | | | ], | | | "projects_write": [ | | | "7d0dfa29-ed12-4986-ae3d-7ac1556d33a5" | | | ], | | | "operationalState": "ENABLED", | | | "operations": [ | | | { | | | "lcmOperationType": "create", | | | "operationState": "COMPLETED", | | | "startTime": 1704025929.214673, | | | "statusEnteredTime": 1704025929.2374957, | | | "operationParams": null | | | } | | | ], | | | "current_operation": null, | | | "detailed-status": "" | | | } | | schema_version | "1.11" | | admin | { | | | "current_operation": 0 | | | } | +-----------------+--------------------------------------------------------------+ Adding K8s cluster to OSM 83f90d55-fa1a-4878-9c05-ca0cd236f650 +----------------+------------------------------------------------------------------------------------------------------+ | key | attribute | +----------------+------------------------------------------------------------------------------------------------------+ | _id | "83f90d55-fa1a-4878-9c05-ca0cd236f650" | | name | "k8stest202312311208" | | credentials | { | | | "apiVersion": "v1", | | | "clusters": [ | | | { | | | "cluster": { | | | "certificate-authority-data": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUREekNDQWZlZ0F3SUJBZ | | | 0lVSjZ4WmU5V1MvTXBKTlRtN0gzS3NVQm5Fejlrd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0Z6RVZNQk1HQTFVRUF3d01NVEF1TVRVe | | | UxqRTRNeTR4TUI0WERUSXpNVEl6TVRFeU1UUXlNVm9YRFRNegpNVEl5T0RFeU1UUXlNVm93RnpFVk1CTUdBMVVFQXd3TU1UQXVNV | | | FV5TGpFNE15NHhNSUlCSWpBTkJna3Foa2lHCjl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4WVZxeTdHZ05Qb1hQRTQ2RlhvW | | | Fl6WFRuOVZscTIvZ1JpcXgKdVN3Smp6bzlZRlBUYVdXOTN1dGhvcXB6VDByQzhmR0ZBOHFpR1FIMnh2MGZYTVFoMTIrbkdyalRjY | | | zcvc3pCNAo1ZEFSYlBEcUhMN3ZvcmlyUkx4ejRXdWxIZjZiaisyb1ljTmxpTkZUcCtvY3J3SW0rdEtWWXN2Q1FFbWQ1aTFKClZmV | | | jBvQ2JoZ1YvbTZWV2tYTGsycUpBckdKUFVwMTNmL3NNOHRGb29ld0xTQ0MwTFFYNmM3cEh3ajk2c0hmdVAKelViN0RiN3FQMnJ2W | | | WNhMXRvWGc2SWdkdnVreXhvUitRNEpIUWdIRGVld2VaTUQ3RTRhK2w5K25Obm9BcEJROApMMk9zSEp1MzNNRURWNGtqWktLNU02c | | | S9adDlXUUxjOHdwNEE0dEVTeG9ZUSs3d2FHd0lEQVFBQm8xTXdVVEFkCkJnTlZIUTRFRmdRVURvOGJEdTdVOWtZMW1kMm5QZkFDM | | | 0JqMk0xWXdId1lEVlIwakJCZ3dGb0FVRG84YkR1N1UKOWtZMW1kMm5QZkFDM0JqMk0xWXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96Q | | | U5CZ2txaGtpRzl3MEJBUXNGQUFPQwpBUUVBdEdYbVNGcVd4b2E0SnNxYnliRzlNcHlrTWdobjBVTlBYYnlrM0k4dWF6enIzbHh3M | | | 2d0ckhZUVR3VnlhCkwvZjVXcnExTGhwbkZTWTJXQmwvOWYxeUdHREp0Z3dxZE14a3ZpUWlsWHE3R0JESThKZE5ML3Y0blhiOXdoO | | | GIKY1VQL0JIaGRCRHd3cjYyR2ttZUVGSW5LYXA2UVIyeWZpcmFWRDZIMktldCtyTG9qRDQvMzk3SjhGUHhvTmM0dQpBWTdCNEo0T | | | 1B6dFlzbm11SGozZnhjSnhFeXI2d1RpaGZqVys0eXBOdk5vTzJGVWpaSVpRb24wYit2clRCMVdOCktCaFhYVGRxVHpqSTMvNUVVb | | | 3R6VXFuYk5RcGFvaDdyY01sYmxFa1pQaHYrcXFzU0ZSbXdBZUtFNVZxdHFUUUoKeUhaR05jK25lUDJlRlNWNHlHZU1ndC91Y1E9P | | | QotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==", | | | "server": "https://172.21.23.10:16443" | | | }, | | | "name": "microk8s-cluster" | | | } | | | ], | | | "contexts": [ | | | { | | | "context": { | | | "cluster": "microk8s-cluster", | | | "user": "admin" | | | }, | | | "name": "microk8s" | | | } | | | ], | | | "current-context": "microk8s", | | | "kind": "Config", | | | "preferences": {}, | | | "users": [ | | | { | | | "name": "admin", | | | "user": { | | | "client-certificate-data": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUN6RENDQWJTZ0F3SUJBZ0lV | | | VlhMMk9hajFlQnRxdEs4Z3NQdmoxZXc0S3hFd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0Z6RVZNQk1HQTFVRUF3d01NVEF1TVRVeUxq | | | RTRNeTR4TUI0WERUSXpNVEl6TVRFeU1UUXlNMW9YRFRNegpNVEl5T0RFeU1UUXlNMW93S1RFT01Bd0dBMVVFQXd3RllXUnRhVzR4 | | | RnpBVkJnTlZCQW9NRG5ONWMzUmxiVHB0CllYTjBaWEp6TUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FR | | | RUF6eE9RUnNHM29jZmEKcUYzbkgzZ1hxSVlEUmN5S3c4MzhrbXdhZng5MWREeVJ3L05nNG13UlZKSWE3UGtlQ0hWcXFuQ3FqcTZk | | | dXkwQgpQZUpSR1hodEtoQ3NrOWZmZkFFUThQc0dNQjhqYzBvMDA2STFvN29ESDB5ZWU4N3V1WTRROFR0NnI4R0FWazg5Cjd2NmMv | | | Ui9tU3FRYitHZ09kbjBPM0VnUktkN3V4NzJxZzBvM25tSXBINHRDS1dQaUJGT05nSWh0aXY1WlZDb0wKa2Nsek9TVWE1cE92TVZk | | | VElWVlZmQ0d4WnFDenp0TDBLME9tM0RrY21IL2ZJUnN5ajZsVFFCNW1ySGZ1QndMOAp6YVFVWnpuaDVLT3cxV0lCODQweVUvc2RQ | | | eW82c1VvRXJ4QktsNGNPNzM5MmxzalY1UGJjN0xpY3FCWUVvYnlxCm1UajBXc3Jnc3dJREFRQUJNQTBHQ1NxR1NJYjNEUUVCQ3dV | | | QUE0SUJBUUMrcldGeUFDSVVoMVFUc1ZnSHp6VzMKSjU4STFPN004K1RsZG9KTmlLaDRYSlVoTzFaSXJ1RjJCdVQ2djhFUjFyTmph | | | T1EzTkE4SG1rYVF1ckdLSmlKYgpmdk5tZjJJbnU4YnB4RzRuNGJuVndlbWlDZVlmU3YxeDFaVlBuYlZtNW9JQ0xYOW9ZSjZGVjhr | | | M0k0Nmh2RVUzCisvTEV2b3RvZThTUGNRQ2FZeENPbFNUaC8yTmVkNlB5bzJ3ajZ3RWNXMnh0dmdBRkxUeFlZTUx6eGh2UFBoTEMK | | | YVJ1QTB5TkNYWWFIS1h5L0hlVWhDSTZUNDJrYkRXOFJ0eXBNVWc4aXZzYXN0Z24ydi9ZVkUzTnk3ZmpBSW9sVgo5K2VWcnh6cjV5 | | | NEJqZ0ptNjRFN1JtS29aRkFGMWNGS0JTRm9uTmFHZUQ0YSt0Tmh0d3ptLzRnRU8yZlliRGRKCi0tLS0tRU5EIENFUlRJRklDQVRF | | | LS0tLS0K", | | | "client-key-data": "LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcEFJQkFBS0NBUUVBenhPUVJz | | | RzNvY2ZhcUYzbkgzZ1hxSVlEUmN5S3c4MzhrbXdhZng5MWREeVJ3L05nCjRtd1JWSklhN1BrZUNIVnFxbkNxanE2ZHV5MEJQZUpS | | | R1hodEtoQ3NrOWZmZkFFUThQc0dNQjhqYzBvMDA2STEKbzdvREgweWVlODd1dVk0UThUdDZyOEdBVms4OTd2NmMvUi9tU3FRYitH | | | Z09kbjBPM0VnUktkN3V4NzJxZzBvMwpubUlwSDR0Q0tXUGlCRk9OZ0lodGl2NVpWQ29Ma2Nsek9TVWE1cE92TVZkVElWVlZmQ0d4 | | | WnFDenp0TDBLME9tCjNEa2NtSC9mSVJzeWo2bFRRQjVtckhmdUJ3TDh6YVFVWnpuaDVLT3cxV0lCODQweVUvc2RQeW82c1VvRXJ4 | | | QksKbDRjTzczOTJsc2pWNVBiYzdMaWNxQllFb2J5cW1UajBXc3Jnc3dJREFRQUJBb0lCQUcrTmdYYXNtdWtiU2M3VApyMU9EbFhN | | | aXBwYXVacjZ1cy9hSktBajlnSCthdFlmVEQ0bEtZRmVuMGJ1QlJFMllPMXNRRU1HR2pkQXNvWE9kCi9NUjAzSklCbzRhTmo0WTA2 | | | TlI0MWsrTUpzUFA0UDkxYmVJQ3JmWHBoNkw0eWgvSnlaUzZ6dy9wZE9LS0U3b3gKRFk5SzVQdW14cTZlRjlSNjhFdDR3MDVFRzJ3 | | | YjVvSmNWOHRsT3NlL3N0L3JqcjdmQk1nOTlxOVFYM1R3NXUzVgpYZE0wWVhpNEhzRnlxcnNvVi9XSkwrc2owakhGcmFlTnNZa25t | | | SVVmaTFIclNpOHcyWmJzUDJodVNLTzE5M0VOClE3ZXovUHZMbHh1TDVYbWpFUjh1a3ZVaVlTc0Z6Y29zeExaUEU4dEFnY294aVJX | | | WENIUzExMjlHZWVjVmt2bGEKbHR2dVYxRUNnWUVBL2UxTXMzU3lxSGdST3FPZ1A4YXp3cEs4YVRnT0QvSXNsSGhTK1JjdU1MT0Mz | | | TW8vMi9WaQpqQ2h0cC9GSnpycE1CUmppV3hJdHFqSzhOSzFPbisybVM0V0E1Y1Btekg2RjdOQXFhUUZDSWhUcXB1REdjeFF3CkRM | | | Q094YzliRGhBa0hFVks4RlpXakMwZzR0MytEdXMzOUJLOUlINy9NeURneU5TRUdsRHNUOXNDZ1lFQTBNUlkKL3JjdEVZdTVKeVcy | | | aUkrOHdZNy9lS0VybUZTWVA0TzBLRXJxenMrMDlpZGIwOXRCOC94dGtVYTdORUFvYWFnOAphTGpqUGV4aFp5cUNBazJ6L05GVmlM | | | U3FjYTM0NlB5QmVTOUlPV1krZ3JNbEM4YXJnUTd6c2wvNGJrNllNM1pSCmlmSE9WbUZSamNOOE1CWXBkUFdGRm9rWUpOOVdWb2lJ | | | WXUyaDFna0NnWUVBekg3T0ZYdjI0WndJYkF3WVl5UTMKNG92ZnRuRVJTaUJEYkxKV1NBbzJCYnlidUFvZFVad1Y2RUFxeDJTWGh5 | | | Z2JUc09Tajd0ZEVVTll0S2VadVV4ZgpiMVN0ME54cXcrZGxIYWtyYm5NMytsQ1VCUjdpVXcxQUFRZ3pndlNTSlFvMHVwdkhWVFND | | | OE5Mck9DbUZrQ2c0CllKU3l3VGZsZUJ5VFNzbHpOZXBUMVo4Q2dZRUFvZldod1VJa2diY3ZGakxJMEZzS3FmTWhPTmw0Yzc0MU94 | | | a1cKL0NkYTBNVU04K2FHOCszbTFiMGcxNUxUQ3U4M0t2anNuS1hMRGJCRHhJQ1N5ZXkwVk0rbUZ0OXFWN1VMSk4wNApLT2JJbGxj | | | KzZiREhiY0pjTFV5SFBQdjlSTDVPajZZY0RmTjlDMENHdjM5SDZocFhZVGhVWHE1Ty85UVFMQ1BTCkEwV3R4dGtDZ1lCOFpLaExN | | | MkFhMXREbVpqQnlvVHIweXNBSHNuZWNsa0htMjE3NmtoTDhQNWJwTmpYWk9wRHIKcGRKdC83UCtVWWVNWUJWZUhqaFRycWNZUDRm | | | L0RNQ29GTTZwVEtNaDkrSUZaWHYxdHF6MGlJWGdiSC91b1NwZAp6MmcvbU1yWUgrcm9jT1JMNzIzUEF5Q3NiOWtOcjNGREswcEF5 | | | Z2VTb21Ib2ZPMEIvWVhpY0E9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo=" | | | } | | | } | | | ] | | | } | | k8s_version | "v1" | | vim_account | "1eb731ff-8cf0-4be1-a4c5-0e9177a9917e" | | nets | { | | | "net1": "OSM-CICD-subnet" | | | } | | description | "Robot cluster" | | namespace | "kube-system" | | _admin | { | | | "created": 1704025942.2386892, | | | "modified": 1704025942.2386892, | | | "projects_read": [ | | | "7d0dfa29-ed12-4986-ae3d-7ac1556d33a5" | | | ], | | | "projects_write": [ | | | "7d0dfa29-ed12-4986-ae3d-7ac1556d33a5" | | | ], | | | "operationalState": "PROCESSING", | | | "operations": [ | | | { | | | "lcmOperationType": "create", | | | "operationState": "PROCESSING", | | | "startTime": 1704025942.2387137, | | | "statusEnteredTime": 1704025942.2387137, | | | "detailed-status": "", | | | "operationParams": null | | | } | | | ], | | | "current_operation": null, | | | "helm_chart_repos": [], | | | "juju_bundle_repos": [] | | | } | | schema_version | "1.11" | +----------------+------------------------------------------------------------------------------------------------------+ [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Run Robot tests) [Pipeline] sh [azure_robot_tests] Running shell script + . /robot-systest/results/osm_environment.rc + export CLOUD_TYPE=azure + export OSM_HOSTNAME=172.21.23.11 + export OSM_IMAGE_NAME=osmtest202312311216 + export JUJU_PASSWORD=secret + . /robot-systest/results/k8s_environment.rc + export CLOUD_TYPE=azure + export USE_PAAS_K8S=FALSE + export K8S_IP=172.21.23.10 + export K8S_IMAGE_NAME=k8stest202312311208 + export K8S_CREDENTIALS=/robot-systest/results/kubeconfig.yaml + /robot-systest/run_test.sh -t azure ============================================================================== Testsuite ============================================================================== Testsuite.Basic 05-Instantiation Parameters In Cloud Init :: [BASIC-05] Ins... ============================================================================== Create Cloudinit VNF Descriptor :: Upload VNF package for the test... | PASS | ------------------------------------------------------------------------------ Create Cloudinit NS Descriptor :: Upload NS package for the testsu... | PASS | ------------------------------------------------------------------------------ Instantiate Cloudinit Network Service Using Instantiation Paramete... | PASS | ------------------------------------------------------------------------------ Get Management Ip Addresses :: Retrieve VNF mgmt IP address from OSM. | PASS | ------------------------------------------------------------------------------ Test SSH Access With The New Password :: Test SSH access with the ... | PASS | ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.Basic 05-Instantiation Parameters In Cloud Init :: [BASI... | PASS | 8 tests, 8 passed, 0 failed ============================================================================== Testsuite.Basic 06-Vnf With Charm :: [BASIC-06] VNF with Charm. ============================================================================== Create Charm VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiates the NS for the t... | FAIL | '+---------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +---------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | basic_06_charm_test | e01333db-5b17-4309-b8ca-03ad5dd362aa | 2023-12-31T12:37:22 | BROKEN | IDLE (None) | Operation: INSTANTIATING.332b4eab-c97e- | | | | | | | 4d16-a91f-3e0f75029965, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA vnf1.: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA vnf2.: create execution | | | | | | | environment. 'cacert' | +---------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Check VNF Charm Application Name :: Checks that the charm applicat... | FAIL | Variable '${MODEL_NAME}' not found. ------------------------------------------------------------------------------ Get Management Ip Addresses :: Get the mgmt IP addresses of both V... | FAIL | Variable '${NS_ID}' not found. ------------------------------------------------------------------------------ Test SSH Access :: Check that both VNF are accessible via SSH in t... | FAIL | msg=IP address of the management VNF 'vnf1' is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 1 Operations :: The Charm VNF h... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation per VNF t... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 2 Operations :: Check whether t... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: basic_06_charm_test == basic_06_charm_test ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 06-Vnf With Charm :: [BASIC-06] VNF with Charm. | FAIL | 12 tests, 2 passed, 10 failed ============================================================================== Testsuite.Basic 07-Secure Key Management :: [BASIC-07] Secure key management. ============================================================================== Create Nopasswd Charm VNF Descriptor :: Upload VNF package for the... | PASS | ------------------------------------------------------------------------------ Create Nopasswd Charm NS Descriptor :: Upload NS package for the t... | PASS | ------------------------------------------------------------------------------ Instantiate Nopasswd Charm Network Service :: Instantiate NS for t... | FAIL | '+--------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +--------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | basic_07_secure_key_management | 2f52dfbe-d770-4598-a56f-dd0a02b72e6e | 2023-12-31T13:13:36 | BROKEN | IDLE (None) | Operation: INSTANTIATING.389e1c2c-c2ae- | | | | | | | 45ac-9b33-349d314247eb, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA vnf1.: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA vnf2.: create execution | | | | | | | environment. 'cacert' | +--------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Ns Id :: Get NS instance id. | PASS | ------------------------------------------------------------------------------ Get Management Ip Addresses :: Get the mgmt IP address of the VNF ... | PASS | ------------------------------------------------------------------------------ Test SSH Access :: Check that the VNF is accessible via SSH in its... | PASS | ------------------------------------------------------------------------------ Check Remote Files Created Via Day 1 Operations :: The Nopasswd VN... | FAIL | 2 != 0 ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation that crea... | FAIL | The ns-action with id 22cee008-ab59-4484-8a1b-2805ef4e206b was not completed ------------------------------------------------------------------------------ Check Remote Files Created Via Day 2 Operations :: Check whether t... | FAIL | 2 != 0 ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: basic_07_secure_key_management == basic_07_secure_key_management ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 07-Secure Key Management :: [BASIC-07] Secure key ... | FAIL | 12 tests, 5 passed, 7 failed ============================================================================== Testsuite.Basic 09-Manual Vdu Scaling :: [BASIC-09] Manual VNF/VDU Scaling. ============================================================================== Create Scaling VNF Descriptor :: Upload VNF package for the testsu... | PASS | ------------------------------------------------------------------------------ Create Scaling NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Scaling Network Service :: Instantiate NS for the test... | PASS | ------------------------------------------------------------------------------ Get Ns Id :: Retrieve NS instance id to be used later on. | PASS | ------------------------------------------------------------------------------ Get Vnf Id :: Retrieve VNF instance id to be used later on. | PASS | ------------------------------------------------------------------------------ Get Vdus Before Scale Out :: Check the number of VDUs instances be... | PASS | ------------------------------------------------------------------------------ Perform Manual Vdu Scale Out :: Perform a manual scale-out operati... | PASS | ------------------------------------------------------------------------------ Check Vdus After Scale Out :: Check whether there is one more VDU ... | PASS | ------------------------------------------------------------------------------ Perform Manual Vdu Scale In :: Perform a manual scale-in operation... | PASS | ------------------------------------------------------------------------------ Check Vdus After Scaling In :: Check whether there is one less VDU... | PASS | ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.Basic 09-Manual Vdu Scaling :: [BASIC-09] Manual VNF/VDU... | PASS | 13 tests, 13 passed, 0 failed ============================================================================== Testsuite.Basic 11-Native Charms :: [BASIC-11] Native Charms ============================================================================== Create Charm VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiate NS for the testsu... | FAIL | '+------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | basic_11_native_charms | 466d09a7-cf5a-4942-adea-6f2ef086c0db | 2023-12-31T13:56:57 | BROKEN | IDLE (None) | Operation: INSTANTIATING.641c1004-32ad- | | | | | | | 40d6-96e3-4411e574f522, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. ... [ Message content over the limit has been removed. ] ... | | | | | {'hostname': '172.21.23.5', 'username': | | | | | | | 'ubuntu'}. 'cacert'. Deploying VCA | | | | | | | vnf2.mgmtVM: register execution | | | | | | | environment {'hostname': '172.21.23.6', | | | | | | | 'username': 'ubuntu'}. 'cacert' | +------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Management Ip Addresses :: Get the mgmt IP address of the two ... | FAIL | Variable '${NS_ID}' not found. ------------------------------------------------------------------------------ Test SSH Access :: Check that both VNF are accessible via SSH in t... | FAIL | msg=IP address of the management VNF 'vnf1' is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 1 Operations :: The Charm VNF h... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: basic_11_native_charms == basic_11_native_charms ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 11-Native Charms :: [BASIC-11] Native Charms | FAIL | 9 tests, 2 passed, 7 failed ============================================================================== Testsuite.Basic 12-Ns Primitives :: [BASIC-12] NS Primitives ============================================================================== Change Juju Password :: NS package needs to be updated with the Ju... | PASS | ------------------------------------------------------------------------------ Upload Vnfds :: Upload VNF packages for the testsuite. | PASS | ------------------------------------------------------------------------------ Upload Nsd :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate NS :: Instantiate NS for the testsuite. | FAIL | '+------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | test_nscharm | 7e191243-83f1-4b7f-848f-60b52fe7f552 | 2023-12-31T14:32:45 | BROKEN | IDLE (None) | Operation: INSTANTIATING.6fc68eb3-c865- | | | | | | | 4884-ab7f-05cda9a25edd, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA 1.userVM: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA 2.policyVM: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA .: create execution | | | | | | | environment. 'cacert' | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Check NS Charm Application Name :: Check that the NS charm has the... | FAIL | Variable '${MODEL_NAME}' not found. ------------------------------------------------------------------------------ Check VDU Charm Application Name :: Check that the VDU charm has t... | FAIL | Variable '${MODEL_NAME}' not found. ------------------------------------------------------------------------------ Delete NS :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: test_nscharm == test_nscharm ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptors :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 12-Ns Primitives :: [BASIC-12] NS Primitives | FAIL | 9 tests, 3 passed, 6 failed ============================================================================== Testsuite.Basic 13-Ns Relations :: [BASIC-13] NS Relations ============================================================================== Create Charm VNF Descriptor Provides :: Upload VNF package for the... | PASS | ------------------------------------------------------------------------------ Create Charm VNF Descriptor Requires :: Upload VNF package for the... | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiate NS for the testsu... | FAIL | '+----------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +----------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | basic_13_ns_relations_test | eba177cd-ab7f-4d1f-a86e-5f8c01db16e9 | 2023-12-31T15:10:46 | BROKEN | IDLE (None) | Operation: INSTANTIATING.5788bc6e-6b43- | | | | | | | 436c-b845-cce01b624d80, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. ... [ Message content over the limit has been removed. ] ... | | | | | '172.21.23.5', 'username': 'ubuntu'}. | | | | | | | 'cacert'. Deploying VCA vnf2.: register | | | | | | | execution environment {'hostname': | | | | | | | '172.21.23.6', 'username': 'ubuntu'}. | | | | | | | 'cacert' | +----------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: basic_13_ns_relations_test == basic_13_ns_relations_test ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor Provides :: Delete first VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor Requires :: Delete second VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 13-Ns Relations :: [BASIC-13] NS Relations | FAIL | 8 tests, 3 passed, 5 failed ============================================================================== Testsuite.Basic 14-Vnf Relations :: [BASIC-14] VNF Relations ============================================================================== Create Charm VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiate NS for the testsu... | FAIL | '+-----------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +-----------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | basic_14_vnf_relations_test | e57f7818-0b08-4e17-9b7d-dda23447767f | 2023-12-31T15:47:43 | BROKEN | IDLE (None) | Operation: INSTANTIATING.802fda99-4cef- | | | | | | | 468d-b5c5-2c5cbcd355e4, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. ... [ Message content over the limit has been removed. ] ... | | | | | 'username': 'ubuntu'}. 'cacert'. | | | | | | | Deploying VCA vnf.simple_provides: | | | | | | | register execution environment | | | | | | | {'hostname': '172.21.23.6', 'username': | | | | | | | 'ubuntu'}. 'cacert' | +-----------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: basic_14_vnf_relations_test == basic_14_vnf_relations_test ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 14-Vnf Relations :: [BASIC-14] VNF Relations | FAIL | 6 tests, 2 passed, 4 failed ============================================================================== Testsuite.Basic 15-Rbac Configurations :: [BASIC-15] RBAC Configurations. ============================================================================== Create And Validate User :: Create a user in OSM. | PASS | ------------------------------------------------------------------------------ Assign Role To User :: Assign a user to a project with a role | PASS | ------------------------------------------------------------------------------ Run Action As User :: Update the password of the user. | PASS | ------------------------------------------------------------------------------ Create And Update Project :: Create a project and update its name. | PASS | ------------------------------------------------------------------------------ Create And Validate Role :: Create a role and check that exists in... | PASS | ------------------------------------------------------------------------------ Update Role Information :: Update a role with a capability. | PASS | ------------------------------------------------------------------------------ Delete Allocated Resources :: Delete user, project and role. | PASS | ------------------------------------------------------------------------------ Testsuite.Basic 15-Rbac Configurations :: [BASIC-15] RBAC Configur... | PASS | 7 tests, 7 passed, 0 failed ============================================================================== Testsuite.Basic 16-Advanced Onboarding And Scaling :: [BASIC-16] Advanced o... ============================================================================== Create Scaling VNF Descriptor :: Upload VNF package for the testsu... | PASS | ------------------------------------------------------------------------------ Create Scaling NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Scaling Network Service :: Instantiate NS for the test... | PASS | ------------------------------------------------------------------------------ Get Vnf Id :: Retrieve VNF instance id to be used later on. | PASS | ------------------------------------------------------------------------------ Check Vdus Before Scale Out :: Check the number of VDUs instances ... | PASS | ------------------------------------------------------------------------------ Perform Manual Vdu Scale Out :: Perform a manual scale-out operati... | PASS | ------------------------------------------------------------------------------ Check Vdus After Scale Out :: Check whether there is one extra VDU... | PASS | ------------------------------------------------------------------------------ Perform Manual Vdu Scale In :: Perform a manual scale-in operation... | PASS | ------------------------------------------------------------------------------ Check Vdus After Scaling In :: Check whether there is one less VDU... | PASS | ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS intance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.Basic 16-Advanced Onboarding And Scaling :: [BASIC-16] A... | PASS | 12 tests, 12 passed, 0 failed ============================================================================== Testsuite.Basic 17-Delete Vnf Package :: [BASIC-17] Delete VNF Package Befo... ============================================================================== Create VNF Package :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create NS Package :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Cannot Delete VNF Package :: Assert that the VNF package cannot be... | PASS | ------------------------------------------------------------------------------ Delete NS Package :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Package :: Delete VNF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.Basic 17-Delete Vnf Package :: [BASIC-17] Delete VNF Pac... | PASS | 5 tests, 5 passed, 0 failed ============================================================================== Testsuite.Basic 20-Manual Native Charm Vdu Scaling :: [BASIC-20] Manual VDU... ============================================================================== Create VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Network Service :: Instantiate NS for the testsuite. | FAIL | '+----------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +----------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | basic_20_manual_vdu_scaling_test | ff616017-bcb4-457f-83f9-4ef07508b087 | 2023-12-31T16:35:19 | BROKEN | IDLE (None) | Operation: INSTANTIATING.b248ec48-5286- | | | | | | | 4583-968b-37772e1216d4, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. ... [ Message content over the limit has been removed. ] ... | | | | | {'hostname': '172.21.23.5', 'username': | | | | | | | 'ubuntu'}. 'cacert'. Deploying VCA | | | | | | | vnf.mgmtVM: register execution | | | | | | | environment {'hostname': '172.21.23.6', | | | | | | | 'username': 'ubuntu'}. 'cacert' | +----------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Vnf Id :: Retrieve VNF instance id to be used later on | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Get Vdus Before Scale Out :: Get the number of VDU records before ... | FAIL | Variable '${VNF_ID}' not found. Did you mean: ${VNFD_PKG} ------------------------------------------------------------------------------ Get Application Name Before Scale Out :: Get the application names. | PASS | ------------------------------------------------------------------------------ Check Number of Duplicated Application Name Before Scale Out :: Ch... | PASS | ------------------------------------------------------------------------------ Perform Manual Vdu Scale Out :: Perform a manual scale-out operati... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Check Vdus After Scale Out :: Check whether there is one more VDU ... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Get Application Name After Scale Out :: Get the application names. | PASS | ------------------------------------------------------------------------------ Check Number of Duplicated Application Name After Scale Out :: Che... | FAIL | There are not 2 application name in the application list after Scale Out ------------------------------------------------------------------------------ Perform Manual Vdu Scale In :: Perform a manual scale-in operation... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Check Vdus After Scaling In :: Check whether there is one less VDU... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Get Application Name After Scale In :: Get the application names. | PASS | ------------------------------------------------------------------------------ Check Number of Duplicated Application Name After Scale In :: Chec... | PASS | ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: basic_20_manual_vdu_scaling_test == basic_20_manual_vdu_scaling_test ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Basic 20-Manual Native Charm Vdu Scaling :: [BASIC-20] M... | FAIL | 18 tests, 7 passed, 11 failed ============================================================================== Testsuite.Basic 23-Sol004 Sol007 Packages :: [BASIC-23] Creation and deleti... ============================================================================== Create VNF Package :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create NS Package :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Cannot Delete VNF Package :: Assert that the VNF package cannot be... | PASS | ------------------------------------------------------------------------------ Delete NS Package :: Delete NS package. | PASS | ------------------------------------------------------------------------------ Delete VNF Package :: Delete VNF package. | PASS | ------------------------------------------------------------------------------ Testsuite.Basic 23-Sol004 Sol007 Packages :: [BASIC-23] Creation a... | PASS | 5 tests, 5 passed, 0 failed ============================================================================== Testsuite.Hackfest Basic :: [HACKFEST-BASIC] Basic NS with a single-VDU VNF ============================================================================== Create Hackfest Basic VNF Descriptor :: Upload VNF package for the... | PASS | ------------------------------------------------------------------------------ Create Hackfest Basic NS Descriptor :: Upload NS package for the t... | PASS | ------------------------------------------------------------------------------ Network Service Instance Test :: Instantiate NS for the testsuite. | PASS | ------------------------------------------------------------------------------ Get Vnf Ip Address :: Get the mgmt IP address of the VNF of the NS. | PASS | ------------------------------------------------------------------------------ Test Ping :: Test that the mgmt IP address of the VNF is reachable... | PASS | ------------------------------------------------------------------------------ Test SSH Access :: Check that the VNF is accessible via SSH in its... | PASS | ------------------------------------------------------------------------------ Delete NS Instance Test :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete VNF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.Hackfest Basic :: [HACKFEST-BASIC] Basic NS with a singl... | PASS | 9 tests, 9 passed, 0 failed ============================================================================== Testsuite.Hackfest Cloudinit :: [HACKFEST-CLOUDINIT] Basic NS with two mult... ============================================================================== Create Hackfest Cloudinit VNF Descriptor :: Upload VNF package for... | PASS | ------------------------------------------------------------------------------ Create Hackfest Cloudinit NS Descriptor :: Upload NS package for t... | PASS | ------------------------------------------------------------------------------ Network Service Instance Test :: Instantiate NS for the testsuite. | PASS | ------------------------------------------------------------------------------ Get Vnf Ip Address :: Get the mgmt IP address of the VNF of the NS. | PASS | ------------------------------------------------------------------------------ Test SSH Access :: Check that the VNF is accessible via SSH in its... | PASS | ------------------------------------------------------------------------------ Check Remote File Injected Via Cloud-init :: Check that the day-0 ... | PASS | ------------------------------------------------------------------------------ Delete NS Instance Test :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: hfcloudinit == hfcloudinit ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Hackfest Cloudinit :: [HACKFEST-CLOUDINIT] Basic NS with... | FAIL | 9 tests, 6 passed, 3 failed ============================================================================== Testsuite.Hackfest Multivdu :: [HACKFEST-MULTIVDU] Basic NS with two multi-... ============================================================================== Create Hackfest multivdu VNF Descriptor :: Upload VNF package for ... | PASS | ------------------------------------------------------------------------------ Create Hackfest Multivdu NS Descriptor :: Upload NS package for th... | PASS | ------------------------------------------------------------------------------ Network Service Instance Test :: Instantiate NS for the testsuite. | PASS | ------------------------------------------------------------------------------ Get Vnf Ip Address :: Get the mgmt IP address of the VNF of the NS. | PASS | ------------------------------------------------------------------------------ Test Ping :: Test that the mgmt IP address of the VNF is reachable... | PASS | ------------------------------------------------------------------------------ Test SSH Access :: Check that the VNF is accessible via SSH in its... | PASS | ------------------------------------------------------------------------------ Delete NS Instance Test :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: hfmultivdu == hfmultivdu ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete VNF package. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.Hackfest Multivdu :: [HACKFEST-MULTIVDU] Basic NS with t... | FAIL | 9 tests, 6 passed, 3 failed ============================================================================== Testsuite.K8S 02-K8Scluster Creation :: [K8s-02] K8s cluster addition. ============================================================================== Create VIM Target Basic :: Creates a VIM for the K8s cluster to be... | PASS | ------------------------------------------------------------------------------ Add K8s Cluster To OSM :: Creates a VIM for the K8s cluster to be ... | PASS | ------------------------------------------------------------------------------ Remove K8s Cluster from OSM :: Delete K8s cluster. | PASS | ------------------------------------------------------------------------------ Delete VIM Target By ID :: Delete the VIM Target created in previo... | PASS | ------------------------------------------------------------------------------ Testsuite.K8S 02-K8Scluster Creation :: [K8s-02] K8s cluster addit... | PASS | 4 tests, 4 passed, 0 failed ============================================================================== Testsuite.K8S 03-Simple K8S :: [K8s-03] Simple K8s. ============================================================================== Create Simple K8s VNF Descriptor :: Upload NF package for the test... | PASS | ------------------------------------------------------------------------------ Create Simple K8s Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Network Service K8s Instance Test :: Instantiate NS for the testsu... | FAIL | '+------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | native-k8s | 7a22f225-c19b-4546-864e-926f247775a6 | 2023-12-31T17:57:22 | BROKEN | IDLE (None) | Operation: INSTANTIATING.0261352f-23e3- | | | | | | | 4f56-aa81-73def6f2183a, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: K8s cluster | | | | | | | '83f90d55-fa1a-4878-9c05-ca0cd236f650' | | | | | | | has not been initialized for 'juju- | | | | | | | bundle' | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation per VNF t... | FAIL | The ns-action with id afcc6d5f-0e31-40a2-ae5d-ed0339455c7a was not completed ------------------------------------------------------------------------------ Delete NS K8s Instance Test :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete NF package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF NS Packages :: Delete tar.gz NF and NS package files. | PASS | ------------------------------------------------------------------------------ Testsuite.K8S 03-Simple K8S :: [K8s-03] Simple K8s. | FAIL | 8 tests, 6 passed, 2 failed ============================================================================== Testsuite.K8S 04-Openldap Helm :: [K8s-04] Openldap Helm chart. ============================================================================== Create Package For OpenLDAP CNF :: Upload NF package for the tests... | PASS | ------------------------------------------------------------------------------ Create Package For OpenLDAP NS :: Upload NS package for the testsu... | PASS | ------------------------------------------------------------------------------ Create Network Service Instance :: Instantiate NS for the testsuite. | PASS | ------------------------------------------------------------------------------ Get Ns Id :: Retrieve NS instance id to be used later on. | PASS | ------------------------------------------------------------------------------ Get Vnf Id :: Retrieve NF instance id to be used later on. | PASS | ------------------------------------------------------------------------------ Execute Upgrade Operation :: Perform OSM action to upgrade the num... | PASS | ------------------------------------------------------------------------------ Check Replicas After Upgrade Operation :: Check that the number of... | PASS | ------------------------------------------------------------------------------ Execute Rollback Operation :: Perform OSM action to rollback the p... | PASS | ------------------------------------------------------------------------------ Check Replicas After Rollback Operation :: Check that the number o... | PASS | ------------------------------------------------------------------------------ Delete Network Service Instance :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete NF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.K8S 04-Openldap Helm :: [K8s-04] Openldap Helm chart. | PASS | 12 tests, 12 passed, 0 failed ============================================================================== Testsuite.K8S 05-K8S Proxy Charms :: [K8s-05] K8s Proxy Charm. ============================================================================== Create Charm VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiate NS for the testsu... | FAIL | '+------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | k8s_05-k8s_proxy_charm | a39614c6-7bce-46b8-913d-aba290a994ed | 2023-12-31T18:01:12 | BROKEN | IDLE (None) | Operation: INSTANTIATING.7f4c10fe-a60f- | | | | | | | 48a8-9137-f58cac099f31, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA vnf1.: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA vnf2.: create execution | | | | | | | environment. 'cacert' | +------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Management Ip Addresses :: Get the mgmt IP address of the two ... | FAIL | Variable '${NS_ID}' not found. ------------------------------------------------------------------------------ Test SSH Access :: Check that both VNF are accessible via SSH in t... | FAIL | msg=IP address of the management VNF 'vnf1' is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 1 Operations :: The Charm VNF h... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation per VNF t... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 2 Operations :: Check whether t... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: k8s_05-k8s_proxy_charm == k8s_05-k8s_proxy_charm ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete NF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.K8S 05-K8S Proxy Charms :: [K8s-05] K8s Proxy Charm. | FAIL | 11 tests, 2 passed, 9 failed ============================================================================== Testsuite.K8S 06-K8S Secure Key Management :: [K8s-06] K8s Secure Key Manag... ============================================================================== Create Charm VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiate NS for the testsu... | FAIL | '+---------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +---------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | k8s_06-nopasswd_k8s_proxy_charm | c42d9204-7180-4ff6-ae6b-92ceac847631 | 2023-12-31T18:37:10 | BROKEN | IDLE (None) | Operation: INSTANTIATING.be6eeb41-39d9- | | | | | | | 4e6e-8f16-1a3773dcac56, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA vnf1.: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA vnf2.: create execution | | | | | | | environment. 'cacert' | +---------------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Ns Id :: Get the NS id and save it NS_ID suite variable to use... | PASS | ------------------------------------------------------------------------------ Get Management Ip Addresses :: Get the mgmt IP address of the two ... | PASS | ------------------------------------------------------------------------------ Test SSH Access :: Check that both VNF are accessible via SSH in t... | PASS | ------------------------------------------------------------------------------ Check Remote Files Created Via Day 1 Operations :: The Charm VNF h... | FAIL | 2 != 0 ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation per VNF t... | FAIL | The ns-action with id 9431b761-dfd2-4c8c-8746-045548562f6b was not completed ------------------------------------------------------------------------------ Check Remote Files Created Via Day 2 Operations :: Check whether t... | FAIL | 2 != 0 ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: k8s_06-nopasswd_k8s_proxy_charm == k8s_06-nopasswd_k8s_proxy_charm ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete NF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.K8S 06-K8S Secure Key Management :: [K8s-06] K8s Secure ... | FAIL | 12 tests, 5 passed, 7 failed ============================================================================== Testsuite.K8S 07-Dummy Helm :: [K8s-07] Openldap Helm in isolated cluster w... ============================================================================== Create Package For OpenLDAP CNF :: Upload NF package for the tests... | PASS | ------------------------------------------------------------------------------ Create Package For OpenLDAP NS :: Upload NS package for the testsu... | PASS | ------------------------------------------------------------------------------ Create Dummy VIM :: Register a VIM of type dummy in OSM. | PASS | ------------------------------------------------------------------------------ Add K8s Cluster To OSM :: Register a K8s cluster associated to the... | PASS | ------------------------------------------------------------------------------ Create Network Service Instance :: Instantiate NS for the testsuite. | PASS | ------------------------------------------------------------------------------ Delete Network Service Instance :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Remove K8s Cluster from OSM :: Remove K8s cluster from OSM. | PASS | ------------------------------------------------------------------------------ Delete VIM :: Remove VIM from OSM. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete NF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.K8S 07-Dummy Helm :: [K8s-07] Openldap Helm in isolated ... | PASS | 10 tests, 10 passed, 0 failed ============================================================================== Testsuite.K8S 08-Simple K8S Scaling :: [K8s-08] Simple K8s Scale. ============================================================================== Create Simple K8s Scale VNF Descriptor :: Upload VNF package for t... | PASS | ------------------------------------------------------------------------------ Create Simple K8s Scale NS Descriptor :: Upload NS package for the... | PASS | ------------------------------------------------------------------------------ Network Service K8s Instance Test :: Instantiate NS for the testsu... | FAIL | '+------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | native-k8s-scale | c7a1182f-cc61-4c3a-aed6-91a2665a08b4 | 2023-12-31T19:16:13 | BROKEN | IDLE (None) | Operation: INSTANTIATING.2c5ecc3c-de6c- | | | | | | | 404a-a6f0-14d310503a0a, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: K8s cluster | | | | | | | '83f90d55-fa1a-4878-9c05-ca0cd236f650' | | | | | | | has not been initialized for 'juju- | | | | | | | bundle' | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get KDU Model Name :: Get the model name of the network service k8... | FAIL | native-kdu ------------------------------------------------------------------------------ Get Scale Count Before Scale Out :: Get the scale count of the app... | FAIL | Invalid IF condition: Evaluating expression ' != 2' failed: SyntaxError: invalid syntax (, line 1) Variables in the original expression '${INITIAL_KDU_COUNT} != ${INSTANTIATION_COUNT}' were resolved before the expression was evaluated. Try using '$INITIAL_KDU_COUNT != $INSTANTIATION_COUNT' syntax to avoid that. See Evaluating Expressions appendix in Robot Framework User Guide for more details. ------------------------------------------------------------------------------ Perform Manual KDU Scale Out :: Scale out the application of netwo... | FAIL | The ns-action with id 9c31a946-39ac-4f7d-85e0-0d842ef6bc7e was not completed ------------------------------------------------------------------------------ Check Scale Count After Scale Out :: Check whether the scale count... | FAIL | Invalid IF condition: Evaluating expression ' != + 1' failed: SyntaxError: invalid syntax (, line 1) Variables in the original expression '${kdu_count} != ${INITIAL_KDU_COUNT} + 1' were resolved before the expression was evaluated. Try using '$kdu_count != $INITIAL_KDU_COUNT + 1' syntax to avoid that. See Evaluating Expressions appendix in Robot Framework User Guide for more details. ------------------------------------------------------------------------------ Perform Manual KDU Scale In :: Scale in the application of network... | FAIL | The ns-action with id ee2e6539-4caa-435c-ba90-47f9c2cf9aa7 was not completed ------------------------------------------------------------------------------ Check Scale Count After Scale In :: Check whether the scale count ... | FAIL | Invalid IF condition: Evaluating expression ' != ' failed: SyntaxError: invalid syntax (, line 1) Variables in the original expression '${kdu_count} != ${INITIAL_KDU_COUNT}' were resolved before the expression was evaluated. Try using '$kdu_count != $INITIAL_KDU_COUNT' syntax to avoid that. See Evaluating Expressions appendix in Robot Framework User Guide for more details. ------------------------------------------------------------------------------ Delete NS K8s Instance Test :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete NF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.K8S 08-Simple K8S Scaling :: [K8s-08] Simple K8s Scale. | FAIL | 12 tests, 5 passed, 7 failed ============================================================================== Testsuite.K8S 09-Pebble Charm K8S :: [K8s-09] Pebble Charm. ============================================================================== Create Simple K8s VNF Descriptor :: Upload VNF package for the tes... | PASS | ------------------------------------------------------------------------------ Create Simple K8s Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Network Service K8s Instance Test :: Instantiate NS for the testsu... | FAIL | '+------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | pebble-charm-k8s | 87e02a27-c078-4097-bd7e-5e3d98d5d342 | 2023-12-31T19:17:01 | BROKEN | IDLE (None) | Operation: INSTANTIATING.68a55822-c487- | | | | | | | 49d5-b37e-d5618a39ea59, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: K8s cluster | | | | | | | '83f90d55-fa1a-4878-9c05-ca0cd236f650' | | | | | | | has not been initialized for 'juju- | | | | | | | bundle' | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation per VNF t... | FAIL | The ns-action with id 4b3a464a-3198-4827-89b4-b836b3e992bc was not completed ------------------------------------------------------------------------------ Delete NS K8s Instance Test :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete the NS package. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete the VNF package. | PASS | ------------------------------------------------------------------------------ Delete VNF NS Packages :: Delete the tar.gz files associated to th... | PASS | ------------------------------------------------------------------------------ Testsuite.K8S 09-Pebble Charm K8S :: [K8s-09] Pebble Charm. | FAIL | 8 tests, 6 passed, 2 failed ============================================================================== Testsuite.K8S 10-Sol004 Sol007 With K8S Proxy Charms :: [K8s-10] K8s Proxy ... ============================================================================== Create Charm VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Create Charm NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Charm Network Service :: Instantiate NS for the testsu... | FAIL | '+------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | k8s_10-k8s_proxy_charm | fb3920b5-c6cd-477d-bde7-127dbb1a442c | 2023-12-31T19:17:37 | BROKEN | IDLE (None) | Operation: INSTANTIATING.cf51c34b-55ef- | | | | | | | 43bd-b0e2-4dd981cd3e69, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA vnf1.: create | | | | | | | execution environment. 'cacert'. | | | | | | | Deploying VCA vnf2.: create execution | | | | | | | environment. 'cacert' | +------------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Management Ip Addresses :: Get the mgmt IP addresses of both V... | FAIL | Variable '${NS_ID}' not found. ------------------------------------------------------------------------------ Test SSH Access :: Check that both VNF are accessible via SSH in t... | FAIL | msg=IP address of the management VNF 'vnf1' is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 1 Operations :: The Charm VNF h... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation per VNF t... | FAIL | msg=Network service instance is not available ------------------------------------------------------------------------------ Check Remote Files Created Via Day 2 Operations :: Check whether t... | FAIL | Variable '${VNF_1_IP_ADDR}' not found. ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: k8s_10-k8s_proxy_charm == k8s_10-k8s_proxy_charm ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.K8S 10-Sol004 Sol007 With K8S Proxy Charms :: [K8s-10] K... | FAIL | 11 tests, 2 passed, 9 failed ============================================================================== Testsuite.K8S 12-Openldap Helm Day-2 :: [K8s-12] Openldap Helm chart. ============================================================================== Create Package For OpenLDAP CNF :: Upload VNF package for the test... | PASS | ------------------------------------------------------------------------------ Create Package For OpenLDAP NS :: Upload NS package for the testsu... | PASS | ------------------------------------------------------------------------------ Add K8s Cluster To OSM :: Register K8s cluster in OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Create Network Service Instance :: Instantiate NS for the testsuite. | FAIL | '+------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ | ldap | b333d955-ca44-4606-b284-c8a052f0605a | 2023-12-31T19:54:04 | BROKEN | IDLE (None) | Operation: INSTANTIATING.ad5c26aa-2eeb- | | | | | | | 420c-9e67-ae4e20a012ba, Stage 2/5: | | | | | | | deployment of KDUs, VMs and execution | | | | | | | environments. | | | | | | | Detail: Deploying VCA openldap.: create | | | | | | | execution environment. 'cacert' | +------------------+--------------------------------------+---------------------+----------+-------------------+-----------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Ns Id :: Get NS instance id from OSM. | PASS | ------------------------------------------------------------------------------ Get Vnf Id :: Get VNF instance id from OSM. /bin/sh: 2: Syntax error: "|" unexpected | FAIL | +--------------------------------------+------+--------------------------------------+------------------+--------------+--------------------------------------+------------+ | vnf id | name | ns id | vnf member index | vnfd name | vim account id | ip address | +--------------------------------------+------+--------------------------------------+------------------+--------------+--------------------------------------+------------+ | 00e2ca59-2b60-4aa1-ab1a-056fd3fe0996 | - | b333d955-ca44-4606-b284-c8a052f0605a | openldap | openldap_knf | 1eb731ff-8cf0-4be1-a4c5-0e9177a9917e | None | +--------------------------------------+------+--------------------------------------+------------------+--------------+--------------------------------------+------------+ ------------------------------------------------------------------------------ Execute Day 2 Operations :: Performs one Day 2 operation. | FAIL | The ns-action with id 113befb1-e152-49e8-959d-a1ebdb546478 was not completed ------------------------------------------------------------------------------ Execute Upgrade Operation :: Perform OSM action to upgrade the num... | PASS | ------------------------------------------------------------------------------ Check Replicas After Upgrade Operation :: Check that the number of... | FAIL | Variable '${VNF_ID}' not found. Did you mean: ${VNFD_PKG} ${NS_ID} ------------------------------------------------------------------------------ Execute Rollback Operation :: Perform OSM action to rollback the p... | PASS | ------------------------------------------------------------------------------ Check Replicas After Rollback Operation :: Check that the number o... | FAIL | Variable '${VNF_ID}' not found. Did you mean: ${VNFD_PKG} ${NS_ID} ------------------------------------------------------------------------------ Delete Network Service Instance :: Delete NS instance. | FAIL | Keyword 'Check For NS Instance To Be Deleted' failed after retrying for 16 minutes. The last error was: ldap == ldap ------------------------------------------------------------------------------ Remove K8s Cluster from OSM :: Delete K8s cluster from OSM. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete NF package from OSM. | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.K8S 12-Openldap Helm Day-2 :: [K8s-12] Openldap Helm chart. | FAIL | 15 tests, 6 passed, 9 failed ============================================================================== [ ERROR ] Error in file '/robot-systest/testsuite/k8s_13-two_helm_kdu.robot' on line 61: Setting variable '${REPO_PASSWORD}' failed: Environment variable '%{OCI_REGISTRY_PASSWORD}' not found. [ ERROR ] Error in file '/robot-systest/testsuite/k8s_13-two_helm_kdu.robot' on line 59: Setting variable '${REPO_URI}' failed: Environment variable '%{OCI_REGISTRY_URL}' not found. [ ERROR ] Error in file '/robot-systest/testsuite/k8s_13-two_helm_kdu.robot' on line 60: Setting variable '${REPO_USER}' failed: Environment variable '%{OCI_REGISTRY_USER}' not found. Testsuite.K8S 13-Two Helm Kdu :: [K8s-13] Two Helm-based KDU stored in publ... ============================================================================== Create Package For CNF :: Create Package For CNF | PASS | ------------------------------------------------------------------------------ Create Package For NS :: Create Package For NS | PASS | ------------------------------------------------------------------------------ Create Helm OCI Repo :: Create Helm OCI Repo for openldap kdu | FAIL | Variable '${REPO_URI}' not found. ------------------------------------------------------------------------------ Create Network Service Instance :: Create Network Service Instance | FAIL | '+------------------+--------------------------------------+---------------------+----------+------------------------------------------------------+------------------------------------------+ | ns instance name | id | date | ns state | current operation | error details | +------------------+--------------------------------------+---------------------+----------+------------------------------------------------------+------------------------------------------+ | ldap | b333d955-ca44-4606-b284-c8a052f0605a | 2023-12-31T19:54:04 | BROKEN | IDLE (None) | Operation: TERMINATING.3158e9ff-6d7a- | | | | | | | 44d0-a05d-fc105847864c, Stage 3/3 delete | | | | | | | all.. | | | | | | | Detail: Terminating all VCA: 'cacert' | | ldap | a2b2a27a-64d9-41be-8185-4dbbd996fbbe | 2023-12-31T20:31:23 | BUILDING | INSTANTIATING (711fa3af-633e-494b-9b0f-77846dd997c4) | N/A | +------------------+--------------------------------------+---------------------+----------+------------------------------------------------------+------------------------------------------+ To get the history of all operations over a NS, run "osm ns-op-list NS_ID" For more details on the current operation, run "osm ns-op-show OPERATION_ID"' contains 'BROKEN' ------------------------------------------------------------------------------ Get Ns Id :: Get ID of NS instance | PASS | ------------------------------------------------------------------------------ Get Vnf Id :: Get ID of VNF /bin/sh: 2: a2b2a27a-64d9-41be-8185-4dbbd996fbbe: not found /bin/sh: 3: a2b2a27a-64d9-41be-8185-4dbbd996fbbe: not found | PASS | ------------------------------------------------------------------------------ Execute Upgrade Operation over first KDU :: Execute Upgrade Operat... | FAIL | ERROR: failed to exec operation ldap: error: Error 400: { "code": "BAD_REQUEST", "status": 400, "detail": "Invalid parameter member_vnf_index='two_helm_oci' is not one of the nsd:constituent-vnfd" } ------------------------------------------------------------------------------ Check Replicas After Upgrade Operation over first KDU :: Check Rep... | FAIL | '' cannot be converted to an integer: ValueError: invalid literal for int() with base 10: '' ------------------------------------------------------------------------------ Execute Rollback Operation over first KDU :: Execute Rollback Oper... | FAIL | ERROR: failed to exec operation ldap: error: Error 400: { "code": "BAD_REQUEST", "status": 400, "detail": "Invalid parameter member_vnf_index='two_helm_oci' is not one of the nsd:constituent-vnfd" } ------------------------------------------------------------------------------ Check Replicas After Rollback Operation over first KDU :: Check Re... | PASS | ------------------------------------------------------------------------------ Execute Upgrade Operation over second KDU :: Execute Upgrade Opera... | FAIL | ERROR: failed to exec operation ldap: error: Error 400: { "code": "BAD_REQUEST", "status": 400, "detail": "Invalid parameter member_vnf_index='two_helm_oci' is not one of the nsd:constituent-vnfd" } ------------------------------------------------------------------------------ Check Replicas After Upgrade Operation over second KDU :: Check Re... | FAIL | '' cannot be converted to an integer: ValueError: invalid literal for int() with base 10: '' ------------------------------------------------------------------------------ Execute Rollback Operation over second KDU :: Execute Rollback Ope... | FAIL | ERROR: failed to exec operation ldap: error: Error 400: { "code": "BAD_REQUEST", "status": 400, "detail": "Invalid parameter member_vnf_index='two_helm_oci' is not one of the nsd:constituent-vnfd" } ------------------------------------------------------------------------------ Check Replicas After Rollback Operation over second KDU :: Check R... | PASS | ------------------------------------------------------------------------------ Delete Network Service Instance :: Delete Network Service Instance | PASS | ------------------------------------------------------------------------------ Delete Helm OCI Repo :: Delete Helm OCI Repo | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete NS Descriptor Test :: Delete NS Descriptor Test | FAIL | 1 != 0 ------------------------------------------------------------------------------ Delete VNF Descriptor Test :: Delete VNF Descriptor Test | FAIL | 1 != 0 ------------------------------------------------------------------------------ Testsuite.K8S 13-Two Helm Kdu :: [K8s-13] Two Helm-based KDU store... | FAIL | 18 tests, 7 passed, 11 failed ============================================================================== Testsuite.Sa 02-Vnf With Vim Metrics And Autoscaling :: [SA-02] VNF with VI... ============================================================================== Create VNF Descriptor :: Upload VNF package for the testsuite. | PASS | ------------------------------------------------------------------------------ Get Thresholds From VNF :: Read metric threshold and threshold tim... | PASS | ------------------------------------------------------------------------------ Create NS Descriptor :: Upload NS package for the testsuite. | PASS | ------------------------------------------------------------------------------ Instantiate Network Service :: Instantiate the NS for the testsuite. | PASS | ------------------------------------------------------------------------------ Get VNF Id :: Retrieve VNF instance id to be used later on. | PASS | ------------------------------------------------------------------------------ Get VNF IP Address :: Get the mgmt IP address of the VNF to be use... | PASS | ------------------------------------------------------------------------------ Get VNF VIM-based Metric Before Auto-scaling :: Get from Prometheu... [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D | FAIL | Keyword 'Get Metric' failed after retrying 15 times. The last error was: ConnectionError: HTTPConnectionPool(host='unknown', port=9090): Max retries exceeded with url: /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D (Caused by NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")) ------------------------------------------------------------------------------ Increase VIM-based Metric To Force Auto-scaling :: Connect to the ... | PASS | ------------------------------------------------------------------------------ Wait VIM-based Metric To Exceed Threshold :: Wait until the VIM me... [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D | FAIL | Keyword 'Check VIM-based Metric Exceeds Threshold' failed after retrying 15 times. The last error was: ConnectionError: HTTPConnectionPool(host='unknown', port=9090): Max retries exceeded with url: /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D (Caused by NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")) ------------------------------------------------------------------------------ Wait Threshold Time :: Wait until the VIM metric has exceeded thre... | PASS | ------------------------------------------------------------------------------ Check VIM-based Metric Exceeds Threshold After Threshold-time :: C... [ WARN ] Retrying (RetryAdapter(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")': /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D | FAIL | ConnectionError: HTTPConnectionPool(host='unknown', port=9090): Max retries exceeded with url: /api/v1/query?query=osm_cpu_utilization%7Bns_id=%2222bd7cf2-aab3-4dc6-993a-d2600f08cb52%22,%7D (Caused by NameResolutionError(": Failed to resolve 'unknown' ([Errno -2] Name or service not known)")) ------------------------------------------------------------------------------ Get VDUs After Auto-scaling :: Check that the VNF has scaled up an... | PASS | ------------------------------------------------------------------------------ Delete NS Instance :: Delete NS instance. | PASS | ------------------------------------------------------------------------------ Delete NS Descriptor :: Delete NS package from OSM. | PASS | ------------------------------------------------------------------------------ Delete VNF Descriptor :: Delete VNF package from OSM. | PASS | ------------------------------------------------------------------------------ Testsuite.Sa 02-Vnf With Vim Metrics And Autoscaling :: [SA-02] VN... | FAIL | 15 tests, 12 passed, 3 failed ============================================================================== Testsuite | FAIL | 287 tests, 172 passed, 115 failed ============================================================================== Output: /robot-systest/reports/output.xml Log: /robot-systest/reports/log.html Report: /robot-systest/reports/report.html [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Declarative: Post Actions) [Pipeline] echo Retrieve container logs [Pipeline] sh [azure_robot_tests] Running shell script + . /robot-systest/results/osm_environment.rc + export CLOUD_TYPE=azure + export OSM_HOSTNAME=172.21.23.11 + export OSM_IMAGE_NAME=osmtest202312311216 + export JUJU_PASSWORD=secret + /robot-systest/cloud-scripts/remote-extract-logs.sh Saving grafana logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving keystone logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving lcm logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving mon logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving nbi logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving pol logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving ro logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving ngui logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving airflow-scheduler logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving pushgateway-prometheus-pushgateway logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving webhook-translator logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving kafka logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving mongo logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving mysql logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving prometheus logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving zookeeper logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. Saving alertmanager logs... Warning: Permanently added '172.21.23.11' (ED25519) to the list of known hosts. All logs saved to /robot-systest/results/ [Pipeline] echo Save results [Pipeline] sh [azure_robot_tests] Running shell script + rm -rf results [Pipeline] sh [azure_robot_tests] Running shell script + cp -var /robot-systest/results /robot-systest/reports/log.html /robot-systest/reports/output.xml /robot-systest/reports/report.html . '/robot-systest/results' -> './results' '/robot-systest/results/k8s_environment.rc' -> './results/k8s_environment.rc' '/robot-systest/results/kubeconfig.yaml' -> './results/kubeconfig.yaml' '/robot-systest/results/osm_environment.rc' -> './results/osm_environment.rc' '/robot-systest/results/osm-deploy-airflow-scheduler.log' -> './results/osm-deploy-airflow-scheduler.log' '/robot-systest/results/osm-deploy-ngui.log' -> './results/osm-deploy-ngui.log' '/robot-systest/results/osm-deploy-pushgateway-prometheus-pushgateway.log' -> './results/osm-deploy-pushgateway-prometheus-pushgateway.log' '/robot-systest/results/osm-deploy-pol.log' -> './results/osm-deploy-pol.log' '/robot-systest/results/osm-deploy-webhook-translator.log' -> './results/osm-deploy-webhook-translator.log' '/robot-systest/results/osm-sts-kafka.log' -> './results/osm-sts-kafka.log' '/robot-systest/results/osm-deploy-grafana.log' -> './results/osm-deploy-grafana.log' '/robot-systest/results/osm-sts-mongo.log' -> './results/osm-sts-mongo.log' '/robot-systest/results/osm-sts-mysql.log' -> './results/osm-sts-mysql.log' '/robot-systest/results/osm-deploy-keystone.log' -> './results/osm-deploy-keystone.log' '/robot-systest/results/osm-sts-prometheus.log' -> './results/osm-sts-prometheus.log' '/robot-systest/results/osm-deploy-lcm.log' -> './results/osm-deploy-lcm.log' '/robot-systest/results/osm-sts-zookeeper.log' -> './results/osm-sts-zookeeper.log' '/robot-systest/results/osm-sts-alertmanager.log' -> './results/osm-sts-alertmanager.log' '/robot-systest/results/osm-deploy-mon.log' -> './results/osm-deploy-mon.log' '/robot-systest/results/osm-deploy-nbi.log' -> './results/osm-deploy-nbi.log' '/robot-systest/results/osm-deploy-ro.log' -> './results/osm-deploy-ro.log' '/robot-systest/reports/log.html' -> './log.html' '/robot-systest/reports/output.xml' -> './output.xml' '/robot-systest/reports/report.html' -> './report.html' Archiving artifacts [Pipeline] step Recording fingerprints [Pipeline] echo Updates the Robot dashboard in Jenkins [Pipeline] robot Robot results publisher started... -Parsing output xml: Done! -Copying log files to build dir: Done! -Assigning results to build: Done! -Checking thresholds: Done! Done publishing Robot results. [Pipeline] echo Destroy the K8s cluster [Pipeline] sh [azure_robot_tests] Running shell script + . /robot-systest/results/k8s_environment.rc + export CLOUD_TYPE=azure + export USE_PAAS_K8S=FALSE + export K8S_IP=172.21.23.10 + export K8S_IMAGE_NAME=k8stest202312311208 + export K8S_CREDENTIALS=/robot-systest/results/kubeconfig.yaml + /robot-systest/cloud-scripts/delete-k8s.sh Deleting IaaS k8s cluster in azure ++ az vm show --resource-group OSM_CICD_GROUP --name k8stest202312311208 --query 'networkProfile.networkInterfaces[0].id' + INTERFACE_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic"' + INTERFACE_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic ++ az vm show --resource-group OSM_CICD_GROUP --name k8stest202312311208 --query storageProfile.osDisk.managedDisk.id + OS_DISK_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/disks/k8stest202312311208_OsDisk_1_abc8ee22be80410895d9b8a7bbf6ef2e"' + OS_DISK_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/disks/k8stest202312311208_OsDisk_1_abc8ee22be80410895d9b8a7bbf6ef2e ++ az network nic show --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic --query networkSecurityGroup.id + SECURITY_GROUP_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG"' + SECURITY_GROUP_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG ++ az network nic show --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic --query 'ipConfigurations[0].publicIpAddress.id' + PUBLIC_IP_ID= + PUBLIC_IP_ID= + az vm delete --resource-group OSM_CICD_GROUP --name k8stest202312311208 --yes + az network nic delete --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/k8stest202312311208VMNic + az disk delete --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/disks/k8stest202312311208_OsDisk_1_abc8ee22be80410895d9b8a7bbf6ef2e --yes + az network nsg delete --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/k8stest202312311208NSG + '[' -n '' ']' [Pipeline] echo Destroy the OSM host [Pipeline] sh [azure_robot_tests] Running shell script + . /robot-systest/results/osm_environment.rc + export CLOUD_TYPE=azure + export OSM_HOSTNAME=172.21.23.11 + export OSM_IMAGE_NAME=osmtest202312311216 + export JUJU_PASSWORD=secret + /robot-systest/cloud-scripts/delete-osm-vm.sh ++ az vm show --resource-group OSM_CICD_GROUP --name osmtest202312311216 --query 'networkProfile.networkInterfaces[0].id' + INTERFACE_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic"' + INTERFACE_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic ++ az vm show --resource-group OSM_CICD_GROUP --name osmtest202312311216 --query storageProfile.osDisk.managedDisk.id + OS_DISK_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/disks/osmtest202312311216_OsDisk_1_8837f7fa04eb46698938239731b9db00"' + OS_DISK_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/disks/osmtest202312311216_OsDisk_1_8837f7fa04eb46698938239731b9db00 ++ az network nic show --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic --query networkSecurityGroup.id + SECURITY_GROUP_ID='"/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG"' + SECURITY_GROUP_ID=/subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG ++ az network nic show --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic --query 'ipConfigurations[0].publicIpAddress.id' + PUBLIC_IP_ID= + PUBLIC_IP_ID= + az vm delete --resource-group OSM_CICD_GROUP --name osmtest202312311216 --yes + az network nic delete --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkInterfaces/osmtest202312311216VMNic + az disk delete --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Compute/disks/osmtest202312311216_OsDisk_1_8837f7fa04eb46698938239731b9db00 --yes + az network nsg delete --id /subscriptions/8fb7e78d-097b-413d-bc65-41d29be6bab1/resourceGroups/OSM_CICD_GROUP/providers/Microsoft.Network/networkSecurityGroups/osmtest202312311216NSG + '[' -n '' ']' [Pipeline] sh [azure_robot_tests] Running shell script + az vm list -o table Name ResourceGroup Location Zones ---------------- ------------------ ---------- ------- vm-CICD-Host OSM_CICD_GROUP westeurope 1 vm-VPN-Host OSM_GROUP westeurope VPN-Gateway OSM_GROUP westeurope vm-Hackfest-Host OSM_HACKFEST_GROUP westeurope [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // withEnv [Pipeline] } $ docker stop --time=1 47440e0bbe8f55705a1ea7ace455183a7b2014673a7894d3142e9c9696ff9721 $ docker rm -f 47440e0bbe8f55705a1ea7ace455183a7b2014673a7894d3142e9c9696ff9721 [Pipeline] // withDockerContainer [Pipeline] } [Pipeline] // node [Pipeline] End of Pipeline Finished: SUCCESS