X-Git-Url: https://osm.etsi.org/gitweb/?a=blobdiff_plain;f=osm_lcm%2Ftests%2Ftest_ns.py;h=d7192c9196846a1908db0f963a4e739780b94fe6;hb=refs%2Fchanges%2F84%2F11984%2F12;hp=dcf502069d0e10803cde3633f97bbeb08c88c8ef;hpb=7ab529a15d7b441e5338eaea0f2f567fe67cd34c;p=osm%2FLCM.git diff --git a/osm_lcm/tests/test_ns.py b/osm_lcm/tests/test_ns.py index dcf5020..d7192c9 100644 --- a/osm_lcm/tests/test_ns.py +++ b/osm_lcm/tests/test_ns.py @@ -12,30 +12,38 @@ # under the License. # # For those usages not covered by the Apache License, Version 2.0 please -# contact: esousa@whitestack.com or alfonso.tiernosepulveda@telefonica.com +# contact: alfonso.tiernosepulveda@telefonica.com ## -import asynctest # pip3 install asynctest --user +import asynctest # pip3 install asynctest --user import asyncio import yaml -# import logging +import copy from os import getenv -from osm_lcm.ns import NsLcm -from osm_common.dbmongo import DbMongo +from osm_lcm import ns from osm_common.msgkafka import MsgKafka -from osm_common.fslocal import FsLocal from osm_lcm.lcm_utils import TaskRegistry -from n2vc.vnf import N2VC +from osm_lcm.ng_ro import NgRoClient +from osm_lcm.data_utils.database.database import Database +from osm_lcm.data_utils.filesystem.filesystem import Filesystem +from osm_lcm.data_utils.vnfd import find_software_version +from osm_lcm.lcm_utils import check_juju_bundle_existence, get_charm_artifact_path +from osm_lcm.lcm_utils import LcmException from uuid import uuid4 +from unittest.mock import Mock, patch + +from osm_lcm.tests import test_db_descriptors as descriptors __author__ = "Alfonso Tierno " """ Perform unittests using asynctest of osm_lcm.ns module It allows, if some testing ENV are supplied, testing without mocking some external libraries for debugging: OSMLCMTEST_NS_PUBKEY: public ssh-key returned by N2VC to inject to VMs + OSMLCMTEST_NS_NAME: change name of NS OSMLCMTEST_PACKAGES_PATH: path where the vnf-packages are stored (de-compressed), each one on a 'vnfd_id' folder OSMLCMTEST_NS_IPADDRESS: IP address where emulated VMs are reached. Comma separate list + OSMLCMTEST_RO_VIMID: VIM id of RO target vim IP. Obtain it with openmano datcenter-list on RO container OSMLCMTEST_VCA_NOMOCK: Do no mock the VCA, N2VC library, for debugging it OSMLCMTEST_RO_NOMOCK: Do no mock the ROClient library, for debugging it OSMLCMTEST_DB_NOMOCK: Do no mock the database library, for debugging it @@ -45,924 +53,47 @@ It allows, if some testing ENV are supplied, testing without mocking some extern OSMLCM_RO_XXX: configuration of RO """ - -vca_config = { # TODO replace with os.get_env to get other configurations - "host": getenv("OSMLCM_VCA_HOST", "vca"), - "port": getenv("OSMLCM_VCA_PORT", 17070), - "user": getenv("OSMLCM_VCA_USER", "admin"), - "secret": getenv("OSMLCM_VCA_SECRET", "vca"), - "pubkey": getenv("OSMLCM_VCA_PUBKEY", None), - 'cacert': getenv("OSMLCM_VCA_CACERT", None) -} - -ro_config = { - "endpoint_url": "http://{}:{}/openmano".format(getenv("OSMLCM_RO_HOST", "ro"), getenv("OSMLCM_RO_PORT", "9090")), - "tenant": getenv("OSMLCM_RO_TENANT", "osm"), - "logger_name": "lcm.ROclient", - "loglevel": "DEBUG", +lcm_config = { + "global": {"loglevel": "DEBUG"}, + "timeout": {}, + "VCA": { # TODO replace with os.get_env to get other configurations + "host": getenv("OSMLCM_VCA_HOST", "vca"), + "port": getenv("OSMLCM_VCA_PORT", 17070), + "user": getenv("OSMLCM_VCA_USER", "admin"), + "secret": getenv("OSMLCM_VCA_SECRET", "vca"), + "public_key": getenv("OSMLCM_VCA_PUBKEY", None), + "ca_cert": getenv("OSMLCM_VCA_CACERT", None), + "apiproxy": getenv("OSMLCM_VCA_APIPROXY", "192.168.1.1"), + }, + "ro_config": { + "uri": "http://{}:{}/openmano".format( + getenv("OSMLCM_RO_HOST", "ro"), getenv("OSMLCM_RO_PORT", "9090") + ), + "tenant": getenv("OSMLCM_RO_TENANT", "osm"), + "logger_name": "lcm.ROclient", + "loglevel": "DEBUG", + "ng": True, + }, } -db_vim_accounts_text = """ ---- -- _admin: - created: 1566818150.3024442 - current_operation: 0 - deployed: - RO: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - RO-account: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - detailed-status: Done - modified: 1566818150.3024442 - operationalState: ENABLED - operations: - - detailed-status: Done - lcmOperationType: create - operationParams: null - operationState: COMPLETED - startTime: 1566818150.3025382 - statusEnteredTime: 1566818150.3025382 - worker: 86434c2948e2 - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - _id: ea958ba5-4e58-4405-bf42-6e3be15d4c3a - description: Openstack site 2, based on Mirantis, also called DSS9000-1, with - tenant tid - name: ost2-mrt-tid - schema_version: '1.1' - vim_password: 5g0yGX86qIhprX86YTMcpg== - vim_tenant_name: osm - vim_type: openstack - vim_url: http://10.95.87.162:5000/v2.0 - vim_user: osm -""" - -db_vnfds_text = """ ---- -- _admin: - created: 1566823352.7154346 - modified: 1566823353.9295402 - onboardingState: ONBOARDED - operationalState: ENABLED - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - storage: - descriptor: hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml - folder: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77 - fs: local - path: /app/storage/ - pkg-dir: hackfest_3charmed_vnfd - zipfile: package.tar.gz - type: vnfd - usageState: NOT_IN_USE - userDefinedData: {} - _id: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77 - connection-point: - - id: vnf-mgmt - name: vnf-mgmt - short-name: vnf-mgmt - type: VPORT - - id: vnf-data - name: vnf-data - short-name: vnf-data - type: VPORT - description: A VNF consisting of 2 VDUs connected to an internal VL, and one VDU - with cloud-init - id: hackfest3charmed-vnf - internal-vld: - - id: internal - internal-connection-point: - - id-ref: mgmtVM-internal - - id-ref: dataVM-internal - name: internal - short-name: internal - type: ELAN - logo: osm.png - mgmt-interface: - cp: vnf-mgmt - monitoring-param: - - aggregation-type: AVERAGE - id: monitor1 - name: monitor1 - vdu-monitoring-param: - vdu-monitoring-param-ref: dataVM_cpu_util - vdu-ref: dataVM - name: hackfest3charmed-vnf - scaling-group-descriptor: - - max-instance-count: 10 - name: scale_dataVM - scaling-config-action: - - trigger: post-scale-out - vnf-config-primitive-name-ref: touch - - trigger: pre-scale-in - vnf-config-primitive-name-ref: touch - scaling-policy: - - cooldown-time: 60 - name: auto_cpu_util_above_threshold - scaling-criteria: - - name: cpu_util_above_threshold - scale-in-relational-operation: LE - scale-in-threshold: '15.0000000000' - scale-out-relational-operation: GE - scale-out-threshold: '60.0000000000' - vnf-monitoring-param-ref: monitor1 - scaling-type: automatic - threshold-time: 0 - vdu: - - count: 1 - vdu-id-ref: dataVM - short-name: hackfest3charmed-vnf - vdu: - - count: '1' - cloud-init-file: cloud-config.txt - id: mgmtVM - image: hackfest3-mgmt - interface: - - external-connection-point-ref: vnf-mgmt - name: mgmtVM-eth0 - position: 1 - type: EXTERNAL - virtual-interface: - type: VIRTIO - - internal-connection-point-ref: mgmtVM-internal - name: mgmtVM-eth1 - position: 2 - type: INTERNAL - virtual-interface: - type: VIRTIO - internal-connection-point: - - id: mgmtVM-internal - name: mgmtVM-internal - short-name: mgmtVM-internal - type: VPORT - name: mgmtVM - vm-flavor: - memory-mb: '1024' - storage-gb: '10' - vcpu-count: 1 - - count: '1' - id: dataVM - image: hackfest3-mgmt - interface: - - internal-connection-point-ref: dataVM-internal - name: dataVM-eth0 - position: 1 - type: INTERNAL - virtual-interface: - type: VIRTIO - - external-connection-point-ref: vnf-data - name: dataVM-xe0 - position: 2 - type: EXTERNAL - virtual-interface: - type: VIRTIO - internal-connection-point: - - id: dataVM-internal - name: dataVM-internal - short-name: dataVM-internal - type: VPORT - monitoring-param: - - id: dataVM_cpu_util - nfvi-metric: cpu_utilization - name: dataVM - vm-flavor: - memory-mb: '1024' - storage-gb: '10' - vcpu-count: 1 - version: '1.0' - vnf-configuration: - config-primitive: - - name: touch - parameter: - - data-type: STRING - default-value: - name: filename - initial-config-primitive: - - name: config - parameter: - - name: ssh-hostname - value: - - name: ssh-username - value: ubuntu - - name: ssh-password - value: osm4u - seq: '1' - - name: touch - parameter: - - name: filename - value: - seq: '2' - juju: - charm: simple -""" - -db_nsds_text = """ ---- -- _admin: - created: 1566823353.971486 - modified: 1566823353.971486 - onboardingState: ONBOARDED - operationalState: ENABLED - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - storage: - descriptor: hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml - folder: 8c2f8b95-bb1b-47ee-8001-36dc090678da - fs: local - path: /app/storage/ - pkg-dir: hackfest_3charmed_nsd - zipfile: package.tar.gz - usageState: NOT_IN_USE - userDefinedData: {} - _id: 8c2f8b95-bb1b-47ee-8001-36dc090678da - constituent-vnfd: - - member-vnf-index: '1' - vnfd-id-ref: hackfest3charmed-vnf - - member-vnf-index: '2' - vnfd-id-ref: hackfest3charmed-vnf - description: NS with 2 VNFs hackfest3charmed-vnf connected by datanet and mgmtnet - VLs - id: hackfest3charmed-ns - logo: osm.png - name: hackfest3charmed-ns - short-name: hackfest3charmed-ns - version: '1.0' - vld: - - id: mgmt - mgmt-network: true - name: mgmt - short-name: mgmt - type: ELAN - vim-network-name: mgmt - vnfd-connection-point-ref: - - member-vnf-index-ref: '1' - vnfd-connection-point-ref: vnf-mgmt - vnfd-id-ref: hackfest3charmed-vnf - - member-vnf-index-ref: '2' - vnfd-connection-point-ref: vnf-mgmt - vnfd-id-ref: hackfest3charmed-vnf - - id: datanet - name: datanet - short-name: datanet - type: ELAN - vnfd-connection-point-ref: - - member-vnf-index-ref: '1' - vnfd-connection-point-ref: vnf-data - vnfd-id-ref: hackfest3charmed-vnf - - member-vnf-index-ref: '2' - vnfd-connection-point-ref: vnf-data - vnfd-id-ref: hackfest3charmed-vnf -""" - -db_nsrs_text = """ ---- -- _admin: - created: 1566823354.3716335 - deployed: - RO: - nsd_id: 876573b5-968d-40b9-b52b-91bf5c5844f7 - nsr_id: c9fe9908-3180-430d-b633-fca2f68db008 - nsr_status: ACTIVE - vnfd: - - id: 1ab2a418-9fe3-4358-bf17-411e5155535f - member-vnf-index: '1' - - id: 0de348e3-c201-4f6a-91cc-7f957e2d5504 - member-vnf-index: '2' - VCA: - - application: alf-b-aa - detailed-status: Ready! - member-vnf-index: '1' - model: f48163a6-c807-47bc-9682-f72caef5af85 - operational-status: active - primitive_id: null - ssh-public-key: ssh-rsa pub-key root@juju-145d3e-0 - step: ssh-public-key-obtained - vdu_count_index: null - vdu_id: null - vdu_name: null - vnfd_id: hackfest3charmed-vnf - - application: alf-c-ab - detailed-status: Ready! - member-vnf-index: '2' - model: f48163a6-c807-47bc-9682-f72caef5af85 - operational-status: active - primitive_id: null - ssh-public-key: ssh-rsa pub-key root@juju-145d3e-0 - step: ssh-public-key-obtained - vdu_count_index: null - vdu_id: null - vdu_name: null - vnfd_id: hackfest3charmed-vnf - VCA-model-name: f48163a6-c807-47bc-9682-f72caef5af85 - modified: 1566823354.3716335 - nsState: INSTANTIATED - nslcmop: null - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - _id: f48163a6-c807-47bc-9682-f72caef5af85 - additionalParamsForNs: null - admin-status: ENABLED - config-status: init - constituent-vnfr-ref: - - 88d90b0c-faff-4b9f-bccd-017f33985984 - - 1ca3bb1a-b29b-49fe-bed6-5f3076d77434 - create-time: 1566823354.36234 - datacenter: ea958ba5-4e58-4405-bf42-6e3be15d4c3a - description: default description - detailed-status: 'ERROR executing proxy charm initial primitives for member_vnf_index=1 - vdu_id=None: charm error executing primitive verify-ssh-credentials for member_vnf_index=1 - vdu_id=None: ''timeout after 600 seconds''' - id: f48163a6-c807-47bc-9682-f72caef5af85 - instantiate_params: - nsDescription: default description - nsName: ALF - nsdId: 8c2f8b95-bb1b-47ee-8001-36dc090678da - vimAccountId: ea958ba5-4e58-4405-bf42-6e3be15d4c3a - name: ALF - name-ref: ALF - ns-instance-config-ref: f48163a6-c807-47bc-9682-f72caef5af85 - nsd: - _admin: - created: 1566823353.971486 - modified: 1566823353.971486 - onboardingState: ONBOARDED - operationalState: ENABLED - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - storage: - descriptor: hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml - folder: 8c2f8b95-bb1b-47ee-8001-36dc090678da - fs: local - path: /app/storage/ - pkg-dir: hackfest_3charmed_nsd - zipfile: package.tar.gz - usageState: NOT_IN_USE - userDefinedData: {} - _id: 8c2f8b95-bb1b-47ee-8001-36dc090678da - constituent-vnfd: - - member-vnf-index: '1' - vnfd-id-ref: hackfest3charmed-vnf - - member-vnf-index: '2' - vnfd-id-ref: hackfest3charmed-vnf - description: NS with 2 VNFs hackfest3charmed-vnf connected by datanet and - mgmtnet VLs - id: hackfest3charmed-ns - logo: osm.png - name: hackfest3charmed-ns - short-name: hackfest3charmed-ns - version: '1.0' - vld: - - id: mgmt - mgmt-network: true - name: mgmt - short-name: mgmt - type: ELAN - vim-network-name: mgmt - vnfd-connection-point-ref: - - member-vnf-index-ref: '1' - vnfd-connection-point-ref: vnf-mgmt - vnfd-id-ref: hackfest3charmed-vnf - - member-vnf-index-ref: '2' - vnfd-connection-point-ref: vnf-mgmt - vnfd-id-ref: hackfest3charmed-vnf - - id: datanet - name: datanet - short-name: datanet - type: ELAN - vnfd-connection-point-ref: - - member-vnf-index-ref: '1' - vnfd-connection-point-ref: vnf-data - vnfd-id-ref: hackfest3charmed-vnf - - member-vnf-index-ref: '2' - vnfd-connection-point-ref: vnf-data - vnfd-id-ref: hackfest3charmed-vnf - nsd-id: 8c2f8b95-bb1b-47ee-8001-36dc090678da - nsd-name-ref: hackfest3charmed-ns - nsd-ref: hackfest3charmed-ns - operational-events: [] - operational-status: failed - orchestration-progress: {} - resource-orchestrator: osmopenmano - short-name: ALF - ssh-authorized-key: null - vld: - - id: mgmt - name: null - status: ACTIVE - status-detailed: null - vim-id: f99ae780-0e2f-4985-af41-574eae6919c0 - vim-network-name: mgmt - - id: datanet - name: ALF-datanet - status: ACTIVE - status-detailed: null - vim-id: c31364ba-f573-4ab6-bf1a-fed30ede39a8 - vnfd-id: - - 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77 -""" - -db_nslcmops_text = """ ---- -- _admin: - created: 1566823354.4148262 - modified: 1566823354.4148262 - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - worker: 86434c2948e2 - _id: a639fac7-e0bb-4225-8ecb-c1f8efcc125e - detailed-status: 'FAILED executing proxy charm initial primitives for member_vnf_index=1 - vdu_id=None: charm error executing primitive verify-ssh-credentials for member_vnf_index=1 - vdu_id=None: ''timeout after 600 seconds''' - id: a639fac7-e0bb-4225-8ecb-c1f8efcc125e - isAutomaticInvocation: false - isCancelPending: false - lcmOperationType: instantiate - links: - nsInstance: /osm/nslcm/v1/ns_instances/f48163a6-c807-47bc-9682-f72caef5af85 - self: /osm/nslcm/v1/ns_lcm_op_occs/a639fac7-e0bb-4225-8ecb-c1f8efcc125e - nsInstanceId: f48163a6-c807-47bc-9682-f72caef5af85 - operationParams: - additionalParamsForVnf: - - additionalParams: - touch_filename: /home/ubuntu/first-touch-1 - touch_filename2: /home/ubuntu/second-touch-1 - member-vnf-index: '1' - - additionalParams: - touch_filename: /home/ubuntu/first-touch-2 - touch_filename2: /home/ubuntu/second-touch-2 - member-vnf-index: '2' - lcmOperationType: instantiate - nsDescription: default description - nsInstanceId: f48163a6-c807-47bc-9682-f72caef5af85 - nsName: ALF - nsdId: 8c2f8b95-bb1b-47ee-8001-36dc090678da - vimAccountId: ea958ba5-4e58-4405-bf42-6e3be15d4c3a - operationState: FAILED - startTime: 1566823354.414689 - statusEnteredTime: 1566824534.5112448 -""" - -db_vnfrs_text = """ ---- -- _admin: - created: 1566823354.3668208 - modified: 1566823354.3668208 - nsState: NOT_INSTANTIATED - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - _id: 88d90b0c-faff-4b9f-bccd-017f33985984 - additionalParamsForVnf: - touch_filename: /home/ubuntu/first-touch-1 - touch_filename2: /home/ubuntu/second-touch-1 - connection-point: - - connection-point-id: vnf-mgmt - id: vnf-mgmt - name: vnf-mgmt - - connection-point-id: vnf-data - id: vnf-data - name: vnf-data - created-time: 1566823354.36234 - id: 88d90b0c-faff-4b9f-bccd-017f33985984 - ip-address: 10.205.1.46 - member-vnf-index-ref: '1' - nsr-id-ref: f48163a6-c807-47bc-9682-f72caef5af85 - vdur: - - _id: f0e7d7ce-2443-4dcb-ad0b-5ab9f3b13d37 - count-index: 0 - interfaces: - - ip-address: 10.205.1.46 - mac-address: fa:16:3e:b4:3e:b1 - mgmt-vnf: true - name: mgmtVM-eth0 - ns-vld-id: mgmt - - ip-address: 192.168.54.2 - mac-address: fa:16:3e:6e:7e:78 - name: mgmtVM-eth1 - vnf-vld-id: internal - internal-connection-point: - - connection-point-id: mgmtVM-internal - id: mgmtVM-internal - name: mgmtVM-internal - ip-address: 10.205.1.46 - name: ALF-1-mgmtVM-1 - status: ACTIVE - status-detailed: null - vdu-id-ref: mgmtVM - vim-id: c2538499-4c30-41c0-acd5-80cb92f48061 - - _id: ab453219-2d9a-45c2-864d-2c0788385028 - count-index: 0 - interfaces: - - ip-address: 192.168.54.3 - mac-address: fa:16:3e:d9:7a:5d - name: dataVM-eth0 - vnf-vld-id: internal - - ip-address: 192.168.24.3 - mac-address: fa:16:3e:d1:6c:0d - name: dataVM-xe0 - ns-vld-id: datanet - internal-connection-point: - - connection-point-id: dataVM-internal - id: dataVM-internal - name: dataVM-internal - ip-address: null - name: ALF-1-dataVM-1 - status: ACTIVE - status-detailed: null - vdu-id-ref: dataVM - vim-id: 87973c3f-365d-4227-95c2-7a8abc74349c - vim-account-id: ea958ba5-4e58-4405-bf42-6e3be15d4c3a - vld: - - id: internal - name: ALF-internal - status: ACTIVE - status-detailed: null - vim-id: ff181e6d-2597-4244-b40b-bb0174bdfeb6 - vnfd-id: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77 - vnfd-ref: hackfest3charmed-vnf -- _admin: - created: 1566823354.3703845 - modified: 1566823354.3703845 - nsState: NOT_INSTANTIATED - projects_read: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - projects_write: - - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4 - _id: 1ca3bb1a-b29b-49fe-bed6-5f3076d77434 - additionalParamsForVnf: - touch_filename: /home/ubuntu/first-touch-2 - touch_filename2: /home/ubuntu/second-touch-2 - connection-point: - - connection-point-id: vnf-mgmt - id: vnf-mgmt - name: vnf-mgmt - - connection-point-id: vnf-data - id: vnf-data - name: vnf-data - created-time: 1566823354.36234 - id: 1ca3bb1a-b29b-49fe-bed6-5f3076d77434 - ip-address: 10.205.1.47 - member-vnf-index-ref: '2' - nsr-id-ref: f48163a6-c807-47bc-9682-f72caef5af85 - vdur: - - _id: 190b4a2c-4f85-4cfe-9406-4cef7ffb1e67 - count-index: 0 - interfaces: - - ip-address: 10.205.1.47 - mac-address: fa:16:3e:cb:9f:c7 - mgmt-vnf: true - name: mgmtVM-eth0 - ns-vld-id: mgmt - - ip-address: 192.168.231.1 - mac-address: fa:16:3e:1a:89:24 - name: mgmtVM-eth1 - vnf-vld-id: internal - internal-connection-point: - - connection-point-id: mgmtVM-internal - id: mgmtVM-internal - name: mgmtVM-internal - ip-address: 10.205.1.47 - name: ALF-2-mgmtVM-1 - status: ACTIVE - status-detailed: null - vdu-id-ref: mgmtVM - vim-id: 248077b2-e3b8-4a37-8b72-575abb8ed912 - - _id: 889b874d-e1c3-4e75-aa45-53a9b0ddabd9 - count-index: 0 - interfaces: - - ip-address: 192.168.231.3 - mac-address: fa:16:3e:7e:ba:8c - name: dataVM-eth0 - vnf-vld-id: internal - - ip-address: 192.168.24.4 - mac-address: fa:16:3e:d2:e1:f5 - name: dataVM-xe0 - ns-vld-id: datanet - internal-connection-point: - - connection-point-id: dataVM-internal - id: dataVM-internal - name: dataVM-internal - ip-address: null - name: ALF-2-dataVM-1 - status: ACTIVE - status-detailed: null - vdu-id-ref: dataVM - vim-id: a4ce4372-e0ad-4ae3-8f9f-1c969f32e77b - vim-account-id: ea958ba5-4e58-4405-bf42-6e3be15d4c3a - vld: - - id: internal - name: ALF-internal - status: ACTIVE - status-detailed: null - vim-id: ff181e6d-2597-4244-b40b-bb0174bdfeb6 - vnfd-id: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77 - vnfd-ref: hackfest3charmed-vnf -""" - -ro_ns_text = """ -datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 -description: null -name: ALF -nets: -- created: false - datacenter_id: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - error_msg: null - ns_net_osm_id: mgmt - related: c6bac394-fa27-4c43-bb34-42f621a9d343 - sce_net_id: 8f215bab-c35e-41e6-a035-42bfaa07af9f - sdn_net_id: null - status: ACTIVE - uuid: c6bac394-fa27-4c43-bb34-42f621a9d343 - vim_info: "{vim_info: null}" - vim_name: null - vim_net_id: f99ae780-0e2f-4985-af41-574eae6919c0 - vnf_net_id: null - vnf_net_osm_id: null -- created: true - datacenter_id: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - error_msg: null - ns_net_osm_id: datanet - related: 509d576c-120f-493a-99a1-5fea99dfe041 - sce_net_id: 3d766bbc-33a8-41aa-a986-2f35e8d25c16 - sdn_net_id: null - status: ACTIVE - uuid: 509d576c-120f-493a-99a1-5fea99dfe041 - vim_info: "{vim_info: null}" - vim_name: ALF-datanet - vim_net_id: c31364ba-f573-4ab6-bf1a-fed30ede39a8 - vnf_net_id: null - vnf_net_osm_id: null -- created: true - datacenter_id: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - error_msg: null - ns_net_osm_id: null - related: 277fed09-3220-4bfd-9052-b96b21a32daf - sce_net_id: null - sdn_net_id: null - status: ACTIVE - uuid: 277fed09-3220-4bfd-9052-b96b21a32daf - vim_info: "{vim_info: null}" - vim_name: ALF-internal - vim_net_id: ff181e6d-2597-4244-b40b-bb0174bdfeb6 - vnf_net_id: 62e62fae-c12b-4ebc-9a9b-30031c6c16fa - vnf_net_osm_id: internal -- created: true - datacenter_id: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - error_msg: null - ns_net_osm_id: null - related: 92534d1a-e697-4372-a84d-aa0aa643b68a - sce_net_id: null - sdn_net_id: null - status: ACTIVE - uuid: 92534d1a-e697-4372-a84d-aa0aa643b68a - vim_info: "{vim_info: null}" - vim_name: ALF-internal - vim_net_id: 09655387-b639-421a-b5f6-72b26d685fb4 - vnf_net_id: 13c6c77d-86a5-4914-832c-990d4ec7b54e - vnf_net_osm_id: internal -nsd_osm_id: f48163a6-c807-47bc-9682-f72caef5af85.2.hackfest3charmed-ns -scenario_id: 876573b5-968d-40b9-b52b-91bf5c5844f7 -scenario_name: hackfest3charmed-ns -sfis: [] -sfps: [] -sfs: [] -tenant_id: 0ea38bd0-2729-47a9-ae07-c6ce76115eb2 -uuid: c9fe9908-3180-430d-b633-fca2f68db008 -vnfs: -- datacenter_id: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - ip_address: 10.205.1.46 - member_vnf_index: '1' - mgmt_access: '{interface_id: 61549ee3-cd6c-4930-8b90-eaad97fe345b, required: ''False'', - vm_id: 6cf4a48f-3b6c-4395-8221-119fa37de24a} - - ' - sce_vnf_id: 83be04a8-c513-42ba-9908-22728f686d31 - uuid: 94724042-7576-4fb0-82ec-6a7ab642741c - vms: - - created_at: '2019-08-26T12:50:38' - error_msg: null - interfaces: - - external_name: vnf-mgmt - instance_net_id: c6bac394-fa27-4c43-bb34-42f621a9d343 - internal_name: mgmtVM-eth0 - ip_address: 10.205.1.46 - mac_address: fa:16:3e:b4:3e:b1 - sdn_port_id: null - type: mgmt - vim_info: "{vim_info: null}" - vim_interface_id: 4d3cb8fd-7040-4169-a0ad-2486d2b006a1 - - external_name: null - instance_net_id: 277fed09-3220-4bfd-9052-b96b21a32daf - internal_name: mgmtVM-eth1 - ip_address: 192.168.54.2 - mac_address: fa:16:3e:6e:7e:78 - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 54ed68e2-9802-4dfe-b68a-280b3fc6e02d - ip_address: 10.205.1.46 - name: mgmtVM - related: d0b91293-a91d-4f08-b15f-0bf841216dfe - status: ACTIVE - uuid: d0b91293-a91d-4f08-b15f-0bf841216dfe - vdu_osm_id: mgmtVM - vim_info: "{vim_info: null}" - vim_name: ALF-1-mgmtVM-1 - vim_vm_id: c2538499-4c30-41c0-acd5-80cb92f48061 - - created_at: '2019-08-26T12:50:38' - error_msg: null - interfaces: - - external_name: null - instance_net_id: 277fed09-3220-4bfd-9052-b96b21a32daf - internal_name: dataVM-eth0 - ip_address: 192.168.54.3 - mac_address: fa:16:3e:d9:7a:5d - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 1637f350-8840-4241-8ed0-4616bdcecfcf - - external_name: vnf-data - instance_net_id: 509d576c-120f-493a-99a1-5fea99dfe041 - internal_name: dataVM-xe0 - ip_address: 192.168.24.3 - mac_address: fa:16:3e:d1:6c:0d - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 54c73e83-7059-41fe-83a9-4c4ae997b481 - name: dataVM - related: 5c08253d-8a35-474f-b0d3-c5297d174c13 - status: ACTIVE - uuid: 5c08253d-8a35-474f-b0d3-c5297d174c13 - vdu_osm_id: dataVM - vim_info: "{vim_info: null}" - vim_name: ALF-1-dataVM-1 - vim_vm_id: 87973c3f-365d-4227-95c2-7a8abc74349c - - created_at: '2019-08-26T13:40:54' - error_msg: null - interfaces: - - external_name: null - instance_net_id: 277fed09-3220-4bfd-9052-b96b21a32daf - internal_name: dataVM-eth0 - ip_address: 192.168.54.5 - mac_address: fa:16:3e:e4:17:45 - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 7e246e40-8710-4c33-9c95-78fc3c02bc5b - - external_name: vnf-data - instance_net_id: 509d576c-120f-493a-99a1-5fea99dfe041 - internal_name: dataVM-xe0 - ip_address: 192.168.24.5 - mac_address: fa:16:3e:29:6f:a6 - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: ce81af7a-9adf-494b-950e-6581fd04ecc4 - name: dataVM - related: 1ae5a0a2-c15a-49a4-a77c-2991d97f6dbe - status: ACTIVE - uuid: 1ae5a0a2-c15a-49a4-a77c-2991d97f6dbe - vdu_osm_id: dataVM - vim_info: "{vim_info: null}" - vim_name: ALF-1-dataVM-2 - vim_vm_id: 4916533e-36c6-4861-9fe3-366a8fb0a5f8 - vnf_id: 1ab2a418-9fe3-4358-bf17-411e5155535f - vnf_name: hackfest3charmed-vnf.1 - vnfd_osm_id: f48163a6-c807-47bc-9682-f72caef5af85.0.1 -- datacenter_id: dc51ce6c-c7f2-11e9-b9c0-02420aff0004 - datacenter_tenant_id: dc5c67fa-c7f2-11e9-b9c0-02420aff0004 - ip_address: 10.205.1.47 - member_vnf_index: '2' - mgmt_access: '{interface_id: 538604c3-5c5e-41eb-8f84-c0239c7fabcd, required: ''False'', - vm_id: dd04d792-05c9-4ecc-bf28-f77384d00311} - - ' - sce_vnf_id: c4f3607a-08ff-4f75-893c-fce507e2f240 - uuid: 00020403-e80f-4ef2-bb7e-b29669643035 - vms: - - created_at: '2019-08-26T12:50:38' - error_msg: null - interfaces: - - external_name: vnf-mgmt - instance_net_id: c6bac394-fa27-4c43-bb34-42f621a9d343 - internal_name: mgmtVM-eth0 - ip_address: 10.205.1.47 - mac_address: fa:16:3e:cb:9f:c7 - sdn_port_id: null - type: mgmt - vim_info: "{vim_info: null}" - vim_interface_id: dcd6d2de-3c68-481c-883e-e9d38c671dc4 - - external_name: null - instance_net_id: 92534d1a-e697-4372-a84d-aa0aa643b68a - internal_name: mgmtVM-eth1 - ip_address: 192.168.231.1 - mac_address: fa:16:3e:1a:89:24 - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 50e538e3-aba0-4652-93bb-20487f3f28e1 - ip_address: 10.205.1.47 - name: mgmtVM - related: 4543ab5d-578c-427c-9df2-affd17e21b66 - status: ACTIVE - uuid: 4543ab5d-578c-427c-9df2-affd17e21b66 - vdu_osm_id: mgmtVM - vim_info: "{vim_info: null}" - vim_name: ALF-2-mgmtVM-1 - vim_vm_id: 248077b2-e3b8-4a37-8b72-575abb8ed912 - - created_at: '2019-08-26T12:50:38' - error_msg: null - interfaces: - - external_name: null - instance_net_id: 92534d1a-e697-4372-a84d-aa0aa643b68a - internal_name: dataVM-eth0 - ip_address: 192.168.231.3 - mac_address: fa:16:3e:7e:ba:8c - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 15274862-14ea-4527-b405-101cae8bc1a0 - - external_name: vnf-data - instance_net_id: 509d576c-120f-493a-99a1-5fea99dfe041 - internal_name: dataVM-xe0 - ip_address: 192.168.24.4 - mac_address: fa:16:3e:d2:e1:f5 - sdn_port_id: null - type: bridge - vim_info: "{vim_info: null}" - vim_interface_id: 253ebe4e-38d5-46be-8777-dbb57510a2ec - name: dataVM - related: 6f03f16b-295a-47a1-9a69-2d069d574a33 - status: ACTIVE - uuid: 6f03f16b-295a-47a1-9a69-2d069d574a33 - vdu_osm_id: dataVM - vim_info: "{vim_info: null}" - vim_name: ALF-2-dataVM-1 - vim_vm_id: a4ce4372-e0ad-4ae3-8f9f-1c969f32e77b - vnf_id: 0de348e3-c201-4f6a-91cc-7f957e2d5504 - vnf_name: hackfest3charmed-vnf.2 - vnfd_osm_id: f48163a6-c807-47bc-9682-f72caef5af85.1.2 -""" - class TestMyNS(asynctest.TestCase): - - def _db_get_one(self, table, q_filter=None, fail_on_empty=True, fail_on_more=True): - if table not in self.db_content: - self.assertTrue(False, "db.get_one called with table={}".format(table)) - for db_item in self.db_content[table]: - if db_item["_id"] == q_filter["_id"]: - return db_item - else: - self.assertTrue(False, "db.get_one, table={}, not found _id={}".format(table, q_filter["_id"])) - - def _db_get_list(self, table, q_filter=None): - if table not in self.db_content: - self.assertTrue(False, "db.get_list called with table={} not found".format(table)) - return self.db_content[table] - - def _db_set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None): - db_item = self._db_get_one(table, q_filter, fail_on_empty=fail_on_empty) - for k, v in update_dict.items(): - db_nested = db_item - k_list = k.split(".") - for k_nested in k_list[0:-1]: - if isinstance(db_nested, list): - db_nested = db_nested[int(k_nested)] - else: - if k_nested not in db_nested: - db_nested[k_nested] = {} - db_nested = db_nested[k_nested] - k_nested = k_list[-1] - if isinstance(db_nested, list): - if int(k_nested) < len(db_nested): - db_nested[int(k_nested)] = v - else: - db_nested.insert(int(k_nested), v) - else: - db_nested[k_nested] = v - - async def _n2vc_DeployCharms(self, model_name, application_name, vnfd, charm_path, params={}, machine_spec={}, - callback=None, *callback_args): + async def _n2vc_DeployCharms( + self, + model_name, + application_name, + vnfd, + charm_path, + params={}, + machine_spec={}, + callback=None, + *callback_args + ): if callback: - for status, message in (("maintenance", "installing sofwware"), ("active", "Ready!")): + for status, message in ( + ("maintenance", "installing sofwware"), + ("active", "Ready!"), + ): # call callback after some time asyncio.sleep(5, loop=self.loop) callback(model_name, application_name, status, message, *callback_args) @@ -974,7 +105,9 @@ class TestMyNS(asynctest.TestCase): yield "app_name-{}".format(num_calls) num_calls += 1 - def _n2vc_CreateExecutionEnvironment(self, namespace): + def _n2vc_CreateExecutionEnvironment( + self, namespace, reuse_ee_id, db_dict, *args, **kwargs + ): k_list = namespace.split(".") ee_id = k_list[1] + "." if len(k_list) >= 2: @@ -982,10 +115,26 @@ class TestMyNS(asynctest.TestCase): ee_id += k[:8] else: ee_id += "_NS_" - return ee_id + return ee_id, {} - def _ro_show(self, *args, **kwargs): - ro_ns_desc = yaml.load(ro_ns_text) + def _ro_status(self, *args, **kwargs): + print("Args > {}".format(args)) + print("kwargs > {}".format(kwargs)) + if args: + if "update" in args: + ro_ns_desc = yaml.load( + descriptors.ro_update_action_text, Loader=yaml.Loader + ) + while True: + yield ro_ns_desc + if kwargs.get("delete"): + ro_ns_desc = yaml.load( + descriptors.ro_delete_action_text, Loader=yaml.Loader + ) + while True: + yield ro_ns_desc + + ro_ns_desc = yaml.load(descriptors.ro_ns_text, Loader=yaml.Loader) # if ip address provided, replace descriptor ip_addresses = getenv("OSMLCMTEST_NS_IPADDRESS", "") @@ -1013,36 +162,62 @@ class TestMyNS(asynctest.TestCase): vm["status"] = "ACTIVE" break - def _ro_create(self, *args, **kwargs): - while True: - yield {"uuid": str(uuid4())} + def _ro_deploy(self, *args, **kwargs): + return {"action_id": args[1]["action_id"], "nsr_id": args[0], "status": "ok"} def _return_uuid(self, *args, **kwargs): return str(uuid4()) async def setUp(self): + # Mock DB if not getenv("OSMLCMTEST_DB_NOMOCK"): - self.db = asynctest.Mock(DbMongo()) - self.db.get_one.side_effect = self._db_get_one - self.db.get_list.side_effect = self._db_get_list - self.db.set_one.side_effect = self._db_set_one - self.db_content = { - "nsrs": yaml.load(db_nsrs_text), - "nslcmops": yaml.load(db_nslcmops_text), - "vnfrs": yaml.load(db_vnfrs_text), - "vnfds": yaml.load(db_vnfds_text), - "vim_accounts": yaml.load(db_vim_accounts_text), - } - self.db_vim_accounts = yaml.load(db_vim_accounts_text) + # Cleanup singleton Database instance + Database.instance = None + + self.db = Database({"database": {"driver": "memory"}}).instance.db + self.db.create_list( + "vnfds", yaml.load(descriptors.db_vnfds_text, Loader=yaml.Loader) + ) + self.db.create_list( + "vnfds_revisions", + yaml.load(descriptors.db_vnfds_revisions_text, Loader=yaml.Loader), + ) + self.db.create_list( + "nsds", yaml.load(descriptors.db_nsds_text, Loader=yaml.Loader) + ) + self.db.create_list( + "nsrs", yaml.load(descriptors.db_nsrs_text, Loader=yaml.Loader) + ) + self.db.create_list( + "vim_accounts", + yaml.load(descriptors.db_vim_accounts_text, Loader=yaml.Loader), + ) + self.db.create_list( + "k8sclusters", + yaml.load(descriptors.db_k8sclusters_text, Loader=yaml.Loader), + ) + self.db.create_list( + "nslcmops", yaml.load(descriptors.db_nslcmops_text, Loader=yaml.Loader) + ) + self.db.create_list( + "vnfrs", yaml.load(descriptors.db_vnfrs_text, Loader=yaml.Loader) + ) + self.db_vim_accounts = yaml.load( + descriptors.db_vim_accounts_text, Loader=yaml.Loader + ) # Mock kafka self.msg = asynctest.Mock(MsgKafka()) # Mock filesystem if not getenv("OSMLCMTEST_FS_NOMOCK"): - self.fs = asynctest.Mock(FsLocal()) - self.fs.get_params.return_value = {"path": getenv("OSMLCMTEST_PACKAGES_PATH", "./test/temp/packages")} + self.fs = asynctest.Mock( + Filesystem({"storage": {"driver": "local", "path": "/"}}).instance.fs + ) + self.fs.get_params.return_value = { + "path": getenv("OSMLCMTEST_PACKAGES_PATH", "./test/temp/packages") + } self.fs.file_open = asynctest.mock_open() # self.fs.file_open.return_value.__enter__.return_value = asynctest.MagicMock() # called on a python "with" # self.fs.file_open.return_value.__enter__.return_value.read.return_value = "" # empty file @@ -1053,8 +228,21 @@ class TestMyNS(asynctest.TestCase): self.lcm_tasks.waitfor_related_HA.return_value = None self.lcm_tasks.lookfor_related.return_value = ("", []) + # Mock VCA - K8s + if not getenv("OSMLCMTEST_VCA_K8s_NOMOCK"): + ns.K8sJujuConnector = asynctest.MagicMock(ns.K8sJujuConnector) + ns.K8sHelmConnector = asynctest.MagicMock(ns.K8sHelmConnector) + ns.K8sHelm3Connector = asynctest.MagicMock(ns.K8sHelm3Connector) + + if not getenv("OSMLCMTEST_VCA_NOMOCK"): + ns.N2VCJujuConnector = asynctest.MagicMock(ns.N2VCJujuConnector) + ns.LCMHelmConn = asynctest.MagicMock(ns.LCMHelmConn) + # Create NsLCM class - self.my_ns = NsLcm(self.db, self.msg, self.fs, self.lcm_tasks, ro_config, vca_config, self.loop) + self.my_ns = ns.NsLcm(self.msg, self.lcm_tasks, lcm_config, self.loop) + self.my_ns.fs = self.fs + self.my_ns.db = self.db + self.my_ns._wait_dependent_n2vc = asynctest.CoroutineMock() # Mock logging if not getenv("OSMLCMTEST_LOGGING_NOMOCK"): @@ -1063,67 +251,1451 @@ class TestMyNS(asynctest.TestCase): # Mock VCA - N2VC if not getenv("OSMLCMTEST_VCA_NOMOCK"): pub_key = getenv("OSMLCMTEST_NS_PUBKEY", "ssh-rsa test-pub-key t@osm.com") - self.my_ns.n2vc = asynctest.Mock(N2VC()) - self.my_ns.n2vc.GetPublicKey.return_value = getenv("OSMLCM_VCA_PUBKEY", "public_key") + # self.my_ns.n2vc = asynctest.Mock(N2VC()) + self.my_ns.n2vc.GetPublicKey.return_value = getenv( + "OSMLCM_VCA_PUBKEY", "public_key" + ) # allow several versions of n2vc - self.my_ns.n2vc.FormatApplicationName = asynctest.Mock(side_effect=self._n2vc_FormatApplicationName()) - self.my_ns.n2vc.DeployCharms = asynctest.CoroutineMock(side_effect=self._n2vc_DeployCharms) - self.my_ns.n2vc.CreateExecutionEnvironment = asynctest.CoroutineMock( - side_effect=self._n2vc_CreateExecutionEnvironment) - self.my_ns.n2vc.InstallConfigurationSW = asynctest.CoroutineMock(return_value=pub_key) - self.my_ns.n2vc.ExecutePrimitive = asynctest.CoroutineMock(side_effect=self._return_uuid) - self.my_ns.n2vc.GetPrimitiveStatus = asynctest.CoroutineMock(return_value="completed") - self.my_ns.n2vc.GetPrimitiveOutput = asynctest.CoroutineMock(return_value={"result": "ok", - "pubkey": pub_key}) + self.my_ns.n2vc.FormatApplicationName = asynctest.Mock( + side_effect=self._n2vc_FormatApplicationName() + ) + self.my_ns.n2vc.DeployCharms = asynctest.CoroutineMock( + side_effect=self._n2vc_DeployCharms + ) + self.my_ns.n2vc.create_execution_environment = asynctest.CoroutineMock( + side_effect=self._n2vc_CreateExecutionEnvironment + ) + self.my_ns.n2vc.install_configuration_sw = asynctest.CoroutineMock( + return_value=pub_key + ) + self.my_ns.n2vc.get_ee_ssh_public__key = asynctest.CoroutineMock( + return_value=pub_key + ) + self.my_ns.n2vc.exec_primitive = asynctest.CoroutineMock( + side_effect=self._return_uuid + ) + self.my_ns.n2vc.exec_primitive = asynctest.CoroutineMock( + side_effect=self._return_uuid + ) + self.my_ns.n2vc.GetPrimitiveStatus = asynctest.CoroutineMock( + return_value="completed" + ) + self.my_ns.n2vc.GetPrimitiveOutput = asynctest.CoroutineMock( + return_value={"result": "ok", "pubkey": pub_key} + ) + self.my_ns.n2vc.delete_execution_environment = asynctest.CoroutineMock( + return_value=None + ) + self.my_ns.n2vc.get_public_key = asynctest.CoroutineMock( + return_value=getenv("OSMLCM_VCA_PUBKEY", "public_key") + ) + self.my_ns.n2vc.delete_namespace = asynctest.CoroutineMock( + return_value=None + ) # Mock RO if not getenv("OSMLCMTEST_RO_NOMOCK"): - # self.my_ns.RO = asynctest.Mock(ROclient.ROClient(self.loop, **ro_config)) + self.my_ns.RO = asynctest.Mock( + NgRoClient(self.loop, **lcm_config["ro_config"]) + ) # TODO first time should be empty list, following should return a dict - self.my_ns.RO.get_list = asynctest.CoroutineMock(self.my_ns.RO.get_list, return_value=[]) - self.my_ns.RO.create = asynctest.CoroutineMock(self.my_ns.RO.create, side_effect=self._ro_create()) - self.my_ns.RO.show = asynctest.CoroutineMock(self.my_ns.RO.show, side_effect=self._ro_show()) + # self.my_ns.RO.get_list = asynctest.CoroutineMock(self.my_ns.RO.get_list, return_value=[]) + self.my_ns.RO.deploy = asynctest.CoroutineMock( + self.my_ns.RO.deploy, side_effect=self._ro_deploy + ) + # self.my_ns.RO.status = asynctest.CoroutineMock(self.my_ns.RO.status, side_effect=self._ro_status) + # self.my_ns.RO.create_action = asynctest.CoroutineMock(self.my_ns.RO.create_action, + # return_value={"vm-id": {"vim_result": 200, + # "description": "done"}}) + self.my_ns.RO.delete = asynctest.CoroutineMock(self.my_ns.RO.delete) - @asynctest.fail_on(active_handles=True) # all async tasks must be completed - async def test_instantiate(self): - nsr_id = self.db_content["nsrs"][0]["_id"] - nslcmop_id = self.db_content["nslcmops"][0]["_id"] - print("Test instantiate started") + # @asynctest.fail_on(active_handles=True) # all async tasks must be completed + # async def test_instantiate(self): + # nsr_id = descriptors.test_ids["TEST-A"]["ns"] + # nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + # # print("Test instantiate started") - # delete deployed information of database - if not getenv("OSMLCMTEST_DB_NOMOCK"): - if self.db_content["nsrs"][0]["_admin"].get("deployed"): - del self.db_content["nsrs"][0]["_admin"]["deployed"] - for db_vnfr in self.db_content["vnfrs"]: - db_vnfr.pop("ip_address", None) - for db_vdur in db_vnfr["vdur"]: - db_vdur.pop("ip_address", None) - db_vdur.pop("mac_address", None) - - await self.my_ns.instantiate(nsr_id, nslcmop_id) - - print("instantiate_result: {}".format(self._db_get_one("nslcmops", {"_id": nslcmop_id}).get("detailed-status"))) - - self.msg.aiowrite.assert_called_once_with("ns", "instantiated", - {"nsr_id": nsr_id, "nslcmop_id": nslcmop_id, - "operationState": "COMPLETED"}, - loop=self.loop) - self.lcm_tasks.lock_HA.assert_called_once_with('ns', 'nslcmops', nslcmop_id) - if not getenv("OSMLCMTEST_LOGGING_NOMOCK"): - self.assertTrue(self.my_ns.logger.debug.called, "Debug method not called") - self.my_ns.logger.error.assert_not_called() - self.my_ns.logger.exception().assert_not_called() + # # delete deployed information of database + # if not getenv("OSMLCMTEST_DB_NOMOCK"): + # if self.db.get_list("nsrs")[0]["_admin"].get("deployed"): + # del self.db.get_list("nsrs")[0]["_admin"]["deployed"] + # for db_vnfr in self.db.get_list("vnfrs"): + # db_vnfr.pop("ip_address", None) + # for db_vdur in db_vnfr["vdur"]: + # db_vdur.pop("ip_address", None) + # db_vdur.pop("mac_address", None) + # if getenv("OSMLCMTEST_RO_VIMID"): + # self.db.get_list("vim_accounts")[0]["_admin"]["deployed"]["RO"] = getenv("OSMLCMTEST_RO_VIMID") + # if getenv("OSMLCMTEST_RO_VIMID"): + # self.db.get_list("nsrs")[0]["_admin"]["deployed"]["RO"] = getenv("OSMLCMTEST_RO_VIMID") - if not getenv("OSMLCMTEST_DB_NOMOCK"): - self.assertTrue(self.db.set_one.called, "db.set_one not called") + # await self.my_ns.instantiate(nsr_id, nslcmop_id) + + # self.msg.aiowrite.assert_called_once_with("ns", "instantiated", + # {"nsr_id": nsr_id, "nslcmop_id": nslcmop_id, + # "operationState": "COMPLETED"}, + # loop=self.loop) + # self.lcm_tasks.lock_HA.assert_called_once_with('ns', 'nslcmops', nslcmop_id) + # if not getenv("OSMLCMTEST_LOGGING_NOMOCK"): + # self.assertTrue(self.my_ns.logger.debug.called, "Debug method not called") + # self.my_ns.logger.error.assert_not_called() + # self.my_ns.logger.exception().assert_not_called() + + # if not getenv("OSMLCMTEST_DB_NOMOCK"): + # self.assertTrue(self.db.set_one.called, "db.set_one not called") + # db_nsr = self.db.get_one("nsrs", {"_id": nsr_id}) + # db_vnfrs_list = self.db.get_list("vnfrs", {"nsr-id-ref": nsr_id}) + # self.assertEqual(db_nsr["_admin"].get("nsState"), "INSTANTIATED", "Not instantiated") + # for vnfr in db_vnfrs_list: + # self.assertEqual(vnfr["_admin"].get("nsState"), "INSTANTIATED", "Not instantiated") + + # if not getenv("OSMLCMTEST_VCA_NOMOCK"): + # # check intial-primitives called + # self.assertTrue(self.my_ns.n2vc.exec_primitive.called, + # "Exec primitive not called for initial config primitive") + # for _call in self.my_ns.n2vc.exec_primitive.call_args_list: + # self.assertIn(_call[1]["primitive_name"], ("config", "touch"), + # "called exec primitive with a primitive different than config or touch") + + # # TODO add more checks of called methods + # # TODO add a terminate + + # async def test_instantiate_ee_list(self): + # # Using modern IM where configuration is in the new format of execution_environment_list + # ee_descriptor_id = "charm_simple" + # non_used_initial_primitive = { + # "name": "not_to_be_called", + # "seq": 3, + # "execution-environment-ref": "not_used_ee" + # } + # ee_list = [ + # { + # "id": ee_descriptor_id, + # "juju": {"charm": "simple"}, - # TODO add more checks of called methods - # TODO add a terminate + # }, + # ] - @asynctest.fail_on(active_handles=True) # all async tasks must be completed + # self.db.set_one( + # "vnfds", + # q_filter={"_id": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77"}, + # update_dict={"vnf-configuration.0.execution-environment-list": ee_list, + # "vnf-configuration.0.initial-config-primitive.0.execution-environment-ref": ee_descriptor_id, + # "vnf-configuration.0.initial-config-primitive.1.execution-environment-ref": ee_descriptor_id, + # "vnf-configuration.0.initial-config-primitive.2": non_used_initial_primitive, + # "vnf-configuration.0.config-primitive.0.execution-environment-ref": ee_descriptor_id, + # "vnf-configuration.0.config-primitive.0.execution-environment-primitive": "touch_charm", + # }, + # unset={"vnf-configuration.juju": None}) + # await self.test_instantiate() + # # this will check that the initial-congig-primitive 'not_to_be_called' is not called + + @asynctest.fail_on(active_handles=True) + async def test_start_stop_rebuild_pass(self): + nsr_id = descriptors.test_ids["TEST-OP-VNF"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-OP-VNF"]["nslcmops"] + vnf_id = descriptors.test_ids["TEST-OP-VNF"]["vnfrs"] + additional_param = {"count-index": "0"} + operation_type = "start" + await self.my_ns.rebuild_start_stop( + nsr_id, nslcmop_id, vnf_id, additional_param, operation_type + ) + expected_value = "COMPLETED" + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + self.assertEqual(return_value, expected_value) + + @asynctest.fail_on(active_handles=True) + async def test_start_stop_rebuild_fail(self): + nsr_id = descriptors.test_ids["TEST-OP-VNF"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-OP-VNF"]["nslcmops1"] + vnf_id = descriptors.test_ids["TEST-OP-VNF"]["vnfrs"] + additional_param = {"count-index": "0"} + operation_type = "stop" + await self.my_ns.rebuild_start_stop( + nsr_id, nslcmop_id, vnf_id, additional_param, operation_type + ) + expected_value = "Error" + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + self.assertEqual(return_value, expected_value) + + # Test scale() and related methods + @asynctest.fail_on(active_handles=True) # all async tasks must be completed async def test_scale(self): - pass + # print("Test scale started") + + # TODO: Add more higher-lever tests here, for example: + # scale-out/scale-in operations with success/error result + + # Test scale() with missing 'scaleVnfData', should return operationState = 'FAILED' + nsr_id = descriptors.test_ids["TEST-A"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + await self.my_ns.scale(nsr_id, nslcmop_id) + expected_value = "FAILED" + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + self.assertEqual(return_value, expected_value) + # print("scale_result: {}".format(self.db.get_one("nslcmops", {"_id": nslcmop_id}).get("detailed-status"))) + + # Test scale() for native kdu + # this also includes testing _scale_kdu() + nsr_id = descriptors.test_ids["TEST-NATIVE-KDU"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-NATIVE-KDU"]["instantiate"] + + self.my_ns.k8sclusterjuju.scale = asynctest.mock.CoroutineMock() + self.my_ns.k8sclusterjuju.exec_primitive = asynctest.mock.CoroutineMock() + self.my_ns.k8sclusterjuju.get_scale_count = asynctest.mock.CoroutineMock( + return_value=1 + ) + await self.my_ns.scale(nsr_id, nslcmop_id) + expected_value = "COMPLETED" + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + self.assertEqual(return_value, expected_value) + self.my_ns.k8sclusterjuju.scale.assert_called_once() + + # Test scale() for native kdu with 2 resource + nsr_id = descriptors.test_ids["TEST-NATIVE-KDU-2"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-NATIVE-KDU-2"]["instantiate"] + + self.my_ns.k8sclusterjuju.get_scale_count.return_value = 2 + await self.my_ns.scale(nsr_id, nslcmop_id) + expected_value = "COMPLETED" + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + self.assertEqual(return_value, expected_value) + self.my_ns.k8sclusterjuju.scale.assert_called() + + async def test_vca_status_refresh(self): + nsr_id = descriptors.test_ids["TEST-A"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + await self.my_ns.vca_status_refresh(nsr_id, nslcmop_id) + expected_value = dict() + return_value = dict() + vnf_descriptors = self.db.get_list("vnfds") + for i, _ in enumerate(vnf_descriptors): + for j, value in enumerate(vnf_descriptors[i]["df"]): + if "lcm-operations-configuration" in vnf_descriptors[i]["df"][j]: + if ( + "day1-2" + in value["lcm-operations-configuration"][ + "operate-vnf-op-config" + ] + ): + for k, v in enumerate( + value["lcm-operations-configuration"][ + "operate-vnf-op-config" + ]["day1-2"] + ): + if ( + v.get("execution-environment-list") + and "juju" in v["execution-environment-list"][k] + ): + expected_value = self.db.get_list("nsrs")[i][ + "vcaStatus" + ] + await self.my_ns._on_update_n2vc_db( + "nsrs", {"_id": nsr_id}, "_admin.deployed.VCA.0", {} + ) + return_value = self.db.get_list("nsrs")[i]["vcaStatus"] + self.assertEqual(return_value, expected_value) + + # Test _retry_or_skip_suboperation() + # Expected result: + # - if a suboperation's 'operationState' is marked as 'COMPLETED', SUBOPERATION_STATUS_SKIP is expected + # - if marked as anything but 'COMPLETED', the suboperation index is expected + def test_scale_retry_or_skip_suboperation(self): + # Load an alternative 'nslcmops' YAML for this test + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + op_index = 2 + # Test when 'operationState' is 'COMPLETED' + db_nslcmop["_admin"]["operations"][op_index]["operationState"] = "COMPLETED" + return_value = self.my_ns._retry_or_skip_suboperation(db_nslcmop, op_index) + expected_value = self.my_ns.SUBOPERATION_STATUS_SKIP + self.assertEqual(return_value, expected_value) + # Test when 'operationState' is not 'COMPLETED' + db_nslcmop["_admin"]["operations"][op_index]["operationState"] = None + return_value = self.my_ns._retry_or_skip_suboperation(db_nslcmop, op_index) + expected_value = op_index + self.assertEqual(return_value, expected_value) + + # Test _find_suboperation() + # Expected result: index of the found sub-operation, or SUBOPERATION_STATUS_NOT_FOUND if not found + def test_scale_find_suboperation(self): + # Load an alternative 'nslcmops' YAML for this test + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + # Find this sub-operation + op_index = 2 + vnf_index = db_nslcmop["_admin"]["operations"][op_index]["member_vnf_index"] + primitive = db_nslcmop["_admin"]["operations"][op_index]["primitive"] + primitive_params = db_nslcmop["_admin"]["operations"][op_index][ + "primitive_params" + ] + match = { + "member_vnf_index": vnf_index, + "primitive": primitive, + "primitive_params": primitive_params, + } + found_op_index = self.my_ns._find_suboperation(db_nslcmop, match) + self.assertEqual(found_op_index, op_index) + # Test with not-matching params + match = { + "member_vnf_index": vnf_index, + "primitive": "", + "primitive_params": primitive_params, + } + found_op_index = self.my_ns._find_suboperation(db_nslcmop, match) + self.assertEqual(found_op_index, self.my_ns.SUBOPERATION_STATUS_NOT_FOUND) + # Test with None + match = None + found_op_index = self.my_ns._find_suboperation(db_nslcmop, match) + self.assertEqual(found_op_index, self.my_ns.SUBOPERATION_STATUS_NOT_FOUND) + + # Test _update_suboperation_status() + def test_scale_update_suboperation_status(self): + self.db.set_one = asynctest.Mock() + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + op_index = 0 + # Force the initial values to be distinct from the updated ones + q_filter = {"_id": db_nslcmop["_id"]} + # Test to change 'operationState' and 'detailed-status' + operationState = "COMPLETED" + detailed_status = "Done" + expected_update_dict = { + "_admin.operations.0.operationState": operationState, + "_admin.operations.0.detailed-status": detailed_status, + } + self.my_ns._update_suboperation_status( + db_nslcmop, op_index, operationState, detailed_status + ) + self.db.set_one.assert_called_once_with( + "nslcmops", + q_filter=q_filter, + update_dict=expected_update_dict, + fail_on_empty=False, + ) + + def test_scale_add_suboperation(self): + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + vnf_index = "1" + num_ops_before = len(db_nslcmop.get("_admin", {}).get("operations", [])) - 1 + vdu_id = None + vdu_count_index = None + vdu_name = None + primitive = "touch" + mapped_primitive_params = { + "parameter": [ + { + "data-type": "STRING", + "name": "filename", + "default-value": "", + } + ], + "name": "touch", + } + operationState = "PROCESSING" + detailed_status = "In progress" + operationType = "PRE-SCALE" + # Add a 'pre-scale' suboperation + op_index_after = self.my_ns._add_suboperation( + db_nslcmop, + vnf_index, + vdu_id, + vdu_count_index, + vdu_name, + primitive, + mapped_primitive_params, + operationState, + detailed_status, + operationType, + ) + self.assertEqual(op_index_after, num_ops_before + 1) + + # Delete all suboperations and add the same operation again + del db_nslcmop["_admin"]["operations"] + op_index_zero = self.my_ns._add_suboperation( + db_nslcmop, + vnf_index, + vdu_id, + vdu_count_index, + vdu_name, + primitive, + mapped_primitive_params, + operationState, + detailed_status, + operationType, + ) + self.assertEqual(op_index_zero, 0) + + # Add a 'RO' suboperation + RO_nsr_id = "1234567890" + RO_scaling_info = [ + { + "type": "create", + "count": 1, + "member-vnf-index": "1", + "osm_vdu_id": "dataVM", + } + ] + op_index = self.my_ns._add_suboperation( + db_nslcmop, + vnf_index, + vdu_id, + vdu_count_index, + vdu_name, + primitive, + mapped_primitive_params, + operationState, + detailed_status, + operationType, + RO_nsr_id, + RO_scaling_info, + ) + db_RO_nsr_id = db_nslcmop["_admin"]["operations"][op_index]["RO_nsr_id"] + self.assertEqual(op_index, 1) + self.assertEqual(RO_nsr_id, db_RO_nsr_id) + + # Try to add an invalid suboperation, should return SUBOPERATION_STATUS_NOT_FOUND + op_index_invalid = self.my_ns._add_suboperation( + None, None, None, None, None, None, None, None, None, None, None + ) + self.assertEqual(op_index_invalid, self.my_ns.SUBOPERATION_STATUS_NOT_FOUND) + + # Test _check_or_add_scale_suboperation() and _check_or_add_scale_suboperation_RO() + # check the possible return values: + # - SUBOPERATION_STATUS_NEW: This is a new sub-operation + # - op_index (non-negative number): This is an existing sub-operation, operationState != 'COMPLETED' + # - SUBOPERATION_STATUS_SKIP: This is an existing sub-operation, operationState == 'COMPLETED' + def test_scale_check_or_add_scale_suboperation(self): + nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + operationType = "PRE-SCALE" + vnf_index = "1" + primitive = "touch" + primitive_params = { + "parameter": [ + { + "data-type": "STRING", + "name": "filename", + "default-value": "", + } + ], + "name": "touch", + } + + # Delete all sub-operations to be sure this is a new sub-operation + del db_nslcmop["_admin"]["operations"] + + # Add a new sub-operation + # For new sub-operations, operationState is set to 'PROCESSING' by default + op_index_new = self.my_ns._check_or_add_scale_suboperation( + db_nslcmop, vnf_index, primitive, primitive_params, operationType + ) + self.assertEqual(op_index_new, self.my_ns.SUBOPERATION_STATUS_NEW) + + # Use the same parameters again to match the already added sub-operation + # which has status 'PROCESSING' (!= 'COMPLETED') by default + # The expected return value is a non-negative number + op_index_existing = self.my_ns._check_or_add_scale_suboperation( + db_nslcmop, vnf_index, primitive, primitive_params, operationType + ) + self.assertTrue(op_index_existing >= 0) + + # Change operationState 'manually' for this sub-operation + db_nslcmop["_admin"]["operations"][op_index_existing][ + "operationState" + ] = "COMPLETED" + # Then use the same parameters again to match the already added sub-operation, + # which now has status 'COMPLETED' + # The expected return value is SUBOPERATION_STATUS_SKIP + op_index_skip = self.my_ns._check_or_add_scale_suboperation( + db_nslcmop, vnf_index, primitive, primitive_params, operationType + ) + self.assertEqual(op_index_skip, self.my_ns.SUBOPERATION_STATUS_SKIP) + + # RO sub-operation test: + # Repeat tests for the very similar _check_or_add_scale_suboperation_RO(), + RO_nsr_id = "1234567890" + RO_scaling_info = [ + { + "type": "create", + "count": 1, + "member-vnf-index": "1", + "osm_vdu_id": "dataVM", + } + ] + op_index_new_RO = self.my_ns._check_or_add_scale_suboperation( + db_nslcmop, vnf_index, None, None, "SCALE-RO", RO_nsr_id, RO_scaling_info + ) + self.assertEqual(op_index_new_RO, self.my_ns.SUBOPERATION_STATUS_NEW) + + # Use the same parameters again to match the already added RO sub-operation + op_index_existing_RO = self.my_ns._check_or_add_scale_suboperation( + db_nslcmop, vnf_index, None, None, "SCALE-RO", RO_nsr_id, RO_scaling_info + ) + self.assertTrue(op_index_existing_RO >= 0) + + # Change operationState 'manually' for this RO sub-operation + db_nslcmop["_admin"]["operations"][op_index_existing_RO][ + "operationState" + ] = "COMPLETED" + # Then use the same parameters again to match the already added sub-operation, + # which now has status 'COMPLETED' + # The expected return value is SUBOPERATION_STATUS_SKIP + op_index_skip_RO = self.my_ns._check_or_add_scale_suboperation( + db_nslcmop, vnf_index, None, None, "SCALE-RO", RO_nsr_id, RO_scaling_info + ) + self.assertEqual(op_index_skip_RO, self.my_ns.SUBOPERATION_STATUS_SKIP) + + async def test_deploy_kdus(self): + nsr_id = descriptors.test_ids["TEST-KDU"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-KDU"]["instantiate"] + db_nsr = self.db.get_one("nsrs", {"_id": nsr_id}) + db_vnfr = self.db.get_one( + "vnfrs", {"nsr-id-ref": nsr_id, "member-vnf-index-ref": "multikdu"} + ) + db_vnfrs = {"multikdu": db_vnfr} + db_vnfd = self.db.get_one("vnfds", {"_id": db_vnfr["vnfd-id"]}) + db_vnfds = [db_vnfd] + task_register = {} + logging_text = "KDU" + self.my_ns.k8sclusterhelm3.generate_kdu_instance_name = asynctest.mock.Mock() + self.my_ns.k8sclusterhelm3.generate_kdu_instance_name.return_value = "k8s_id" + self.my_ns.k8sclusterhelm3.install = asynctest.CoroutineMock() + self.my_ns.k8sclusterhelm3.synchronize_repos = asynctest.CoroutineMock( + return_value=("", "") + ) + self.my_ns.k8sclusterhelm3.get_services = asynctest.CoroutineMock( + return_value=([]) + ) + await self.my_ns.deploy_kdus( + logging_text, nsr_id, nslcmop_id, db_vnfrs, db_vnfds, task_register + ) + await asyncio.wait(list(task_register.keys()), timeout=100) + db_nsr = self.db.get_list("nsrs")[1] + self.assertIn( + "K8s", + db_nsr["_admin"]["deployed"], + "K8s entry not created at '_admin.deployed'", + ) + self.assertIsInstance( + db_nsr["_admin"]["deployed"]["K8s"], list, "K8s entry is not of type list" + ) + self.assertEqual( + len(db_nsr["_admin"]["deployed"]["K8s"]), 2, "K8s entry is not of type list" + ) + k8s_instace_info = { + "kdu-instance": "k8s_id", + "k8scluster-uuid": "73d96432-d692-40d2-8440-e0c73aee209c", + "k8scluster-type": "helm-chart-v3", + "kdu-name": "ldap", + "member-vnf-index": "multikdu", + "namespace": None, + "kdu-deployment-name": None, + } + + nsr_result = copy.deepcopy(db_nsr["_admin"]["deployed"]["K8s"][0]) + nsr_kdu_model_result = nsr_result.pop("kdu-model") + expected_kdu_model = "stable/openldap:1.2.1" + self.assertEqual(nsr_result, k8s_instace_info) + self.assertTrue( + nsr_kdu_model_result in expected_kdu_model + or expected_kdu_model in nsr_kdu_model_result + ) + nsr_result = copy.deepcopy(db_nsr["_admin"]["deployed"]["K8s"][1]) + nsr_kdu_model_result = nsr_result.pop("kdu-model") + k8s_instace_info["kdu-name"] = "mongo" + expected_kdu_model = "stable/mongodb" + self.assertEqual(nsr_result, k8s_instace_info) + self.assertTrue( + nsr_kdu_model_result in expected_kdu_model + or expected_kdu_model in nsr_kdu_model_result + ) + + # Test remove_vnf() and related methods + @asynctest.fail_on(active_handles=True) # all async tasks must be completed + async def test_remove_vnf(self): + # Test REMOVE_VNF + nsr_id = descriptors.test_ids["TEST-UPDATE"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-UPDATE"]["removeVnf"] + vnf_instance_id = descriptors.test_ids["TEST-UPDATE"]["vnf"] + mock_wait_ng_ro = asynctest.CoroutineMock() + with patch("osm_lcm.ns.NsLcm._wait_ng_ro", mock_wait_ng_ro): + await self.my_ns.update(nsr_id, nslcmop_id) + expected_value = "COMPLETED" + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + self.assertEqual(return_value, expected_value) + with self.assertRaises(Exception) as context: + self.db.get_one("vnfrs", {"_id": vnf_instance_id}) + self.assertTrue("database exception Not found entry with filter" in str(context.exception)) + + # test vertical scale executes sucessfully + # @patch("osm_lcm.ng_ro.status.response") + @asynctest.fail_on(active_handles=True) + async def test_vertical_scaling(self): + nsr_id = descriptors.test_ids["TEST-V-SCALE"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-V-SCALE"]["instantiate"] + + # calling the vertical scale fucntion + # self.my_ns.RO.status = asynctest.CoroutineMock(self.my_ns.RO.status, side_effect=self._ro_status("update")) + mock_wait_ng_ro = asynctest.CoroutineMock() + with patch("osm_lcm.ns.NsLcm._wait_ng_ro", mock_wait_ng_ro): + await self.my_ns.vertical_scale(nsr_id, nslcmop_id) + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + expected_value = "COMPLETED" + self.assertEqual(return_value, expected_value) + + # test vertical scale executes fail + @asynctest.fail_on(active_handles=True) + async def test_vertical_scaling_fail(self): + # get th nsr nad nslcmops id from descriptors + nsr_id = descriptors.test_ids["TEST-V-SCALE"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-V-SCALE"]["instantiate-1"] + + # calling the vertical scale fucntion + await self.my_ns.vertical_scale(nsr_id, nslcmop_id) + return_value = self.db.get_one("nslcmops", {"_id": nslcmop_id}).get( + "operationState" + ) + expected_value = "FAILED" + self.assertEqual(return_value, expected_value) + + # async def test_instantiate_pdu(self): + # nsr_id = descriptors.test_ids["TEST-A"]["ns"] + # nslcmop_id = descriptors.test_ids["TEST-A"]["instantiate"] + # # Modify vnfd/vnfr to change KDU for PDU. Adding keys that NBI will already set + # self.db.set_one("vnfrs", {"nsr-id-ref": nsr_id, "member-vnf-index-ref": "1"}, + # update_dict={"ip-address": "10.205.1.46", + # "vdur.0.pdu-id": "53e1ec21-2464-451e-a8dc-6e311d45b2c8", + # "vdur.0.pdu-type": "PDU-TYPE-1", + # "vdur.0.ip-address": "10.205.1.46", + # }, + # unset={"vdur.status": None}) + # self.db.set_one("vnfrs", {"nsr-id-ref": nsr_id, "member-vnf-index-ref": "2"}, + # update_dict={"ip-address": "10.205.1.47", + # "vdur.0.pdu-id": "53e1ec21-2464-451e-a8dc-6e311d45b2c8", + # "vdur.0.pdu-type": "PDU-TYPE-1", + # "vdur.0.ip-address": "10.205.1.47", + # }, + # unset={"vdur.status": None}) + + # await self.my_ns.instantiate(nsr_id, nslcmop_id) + # db_nsr = self.db.get_one("nsrs", {"_id": nsr_id}) + # self.assertEqual(db_nsr.get("nsState"), "READY", str(db_nsr.get("errorDescription "))) + # self.assertEqual(db_nsr.get("currentOperation"), "IDLE", "currentOperation different than 'IDLE'") + # self.assertEqual(db_nsr.get("currentOperationID"), None, "currentOperationID different than None") + # self.assertEqual(db_nsr.get("errorDescription "), None, "errorDescription different than None") + # self.assertEqual(db_nsr.get("errorDetail"), None, "errorDetail different than None") + + # @asynctest.fail_on(active_handles=True) # all async tasks must be completed + # async def test_terminate_without_configuration(self): + # nsr_id = descriptors.test_ids["TEST-A"]["ns"] + # nslcmop_id = descriptors.test_ids["TEST-A"]["terminate"] + # # set instantiation task as completed + # self.db.set_list("nslcmops", {"nsInstanceId": nsr_id, "_id.ne": nslcmop_id}, + # update_dict={"operationState": "COMPLETED"}) + # self.db.set_one("nsrs", {"_id": nsr_id}, + # update_dict={"_admin.deployed.VCA.0": None, "_admin.deployed.VCA.1": None}) + + # await self.my_ns.terminate(nsr_id, nslcmop_id) + # db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + # self.assertEqual(db_nslcmop.get("operationState"), 'COMPLETED', db_nslcmop.get("detailed-status")) + # db_nsr = self.db.get_one("nsrs", {"_id": nsr_id}) + # self.assertEqual(db_nsr.get("nsState"), "NOT_INSTANTIATED", str(db_nsr.get("errorDescription "))) + # self.assertEqual(db_nsr["_admin"].get("nsState"), "NOT_INSTANTIATED", str(db_nsr.get("errorDescription "))) + # self.assertEqual(db_nsr.get("currentOperation"), "IDLE", "currentOperation different than 'IDLE'") + # self.assertEqual(db_nsr.get("currentOperationID"), None, "currentOperationID different than None") + # self.assertEqual(db_nsr.get("errorDescription "), None, "errorDescription different than None") + # self.assertEqual(db_nsr.get("errorDetail"), None, "errorDetail different than None") + # db_vnfrs_list = self.db.get_list("vnfrs", {"nsr-id-ref": nsr_id}) + # for vnfr in db_vnfrs_list: + # self.assertEqual(vnfr["_admin"].get("nsState"), "NOT_INSTANTIATED", "Not instantiated") + + # @asynctest.fail_on(active_handles=True) # all async tasks must be completed + # async def test_terminate_primitive(self): + # nsr_id = descriptors.test_ids["TEST-A"]["ns"] + # nslcmop_id = descriptors.test_ids["TEST-A"]["terminate"] + # # set instantiation task as completed + # self.db.set_list("nslcmops", {"nsInstanceId": nsr_id, "_id.ne": nslcmop_id}, + # update_dict={"operationState": "COMPLETED"}) + + # # modify vnfd descriptor to include terminate_primitive + # terminate_primitive = [{ + # "name": "touch", + # "parameter": [{"name": "filename", "value": "terminate_filename"}], + # "seq": '1' + # }] + # db_vnfr = self.db.get_one("vnfrs", {"nsr-id-ref": nsr_id, "member-vnf-index-ref": "1"}) + # self.db.set_one("vnfds", {"_id": db_vnfr["vnfd-id"]}, + # {"vnf-configuration.0.terminate-config-primitive": terminate_primitive}) + + # await self.my_ns.terminate(nsr_id, nslcmop_id) + # db_nslcmop = self.db.get_one("nslcmops", {"_id": nslcmop_id}) + # self.assertEqual(db_nslcmop.get("operationState"), 'COMPLETED', db_nslcmop.get("detailed-status")) + # db_nsr = self.db.get_one("nsrs", {"_id": nsr_id}) + # self.assertEqual(db_nsr.get("nsState"), "NOT_INSTANTIATED", str(db_nsr.get("errorDescription "))) + # self.assertEqual(db_nsr["_admin"].get("nsState"), "NOT_INSTANTIATED", str(db_nsr.get("errorDescription "))) + # self.assertEqual(db_nsr.get("currentOperation"), "IDLE", "currentOperation different than 'IDLE'") + # self.assertEqual(db_nsr.get("currentOperationID"), None, "currentOperationID different than None") + # self.assertEqual(db_nsr.get("errorDescription "), None, "errorDescription different than None") + # self.assertEqual(db_nsr.get("errorDetail"), None, "errorDetail different than None") + + # Test update method + + async def test_update(self): + + nsr_id = descriptors.test_ids["TEST-A"]["ns"] + nslcmop_id = descriptors.test_ids["TEST-A"]["update"] + vnfr_id = "6421c7c9-d865-4fb4-9a13-d4275d243e01" + vnfd_id = "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77" + + def mock_reset(): + mock_charm_hash.reset_mock() + mock_juju_bundle.reset_mock() + fs.sync.reset_mock() + mock_charm_upgrade.reset_mock() + mock_software_version.reset_mock() + + with self.subTest( + i=1, + t="Update type: CHANGE_VNFPKG, latest_vnfd revision changed," + "Charm package changed, sw-version is not changed.", + ): + + self.db.set_one( + "vnfds", + q_filter={"_id": vnfd_id}, + update_dict={"_admin.revision": 3, "kdu": []}, + ) + + self.db.set_one( + "vnfds_revisions", + q_filter={"_id": vnfd_id + ":1"}, + update_dict={"_admin.revision": 1, "kdu": []}, + ) + + self.db.set_one( + "vnfrs", q_filter={"_id": vnfr_id}, update_dict={"revision": 1} + ) + + mock_charm_hash = Mock(autospec=True) + mock_charm_hash.return_value = True + + mock_juju_bundle = Mock(return_value=None) + + mock_software_version = Mock(autospec=True) + mock_software_version.side_effect = ["1.0", "1.0"] + + mock_charm_upgrade = asynctest.Mock(autospec=True) + task = asyncio.Future() + task.set_result(("COMPLETED", "some_output")) + mock_charm_upgrade.return_value = task + + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + fs.sync.side_effect = [None, None] + + instance = self.my_ns + + expected_operation_state = "COMPLETED" + expected_operation_error = "" + expected_vnfr_revision = 3 + expected_ns_state = "INSTANTIATED" + expected_ns_operational_state = "running" + + with patch.object(instance, "fs", fs), patch( + "osm_lcm.lcm_utils.LcmBase.check_charm_hash_changed", mock_charm_hash + ), patch("osm_lcm.ns.NsLcm._ns_charm_upgrade", mock_charm_upgrade), patch( + "osm_lcm.data_utils.vnfd.find_software_version", mock_software_version + ), patch( + "osm_lcm.lcm_utils.check_juju_bundle_existence", mock_juju_bundle + ): + + await instance.update(nsr_id, nslcmop_id) + return_operation_state = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("operationState") + return_operation_error = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("errorMessage") + return_ns_operational_state = self.db.get_one( + "nsrs", {"_id": nsr_id} + ).get("operational-status") + + return_vnfr_revision = self.db.get_one("vnfrs", {"_id": vnfr_id}).get( + "revision" + ) + + return_ns_state = self.db.get_one("nsrs", {"_id": nsr_id}).get( + "nsState" + ) + + mock_charm_hash.assert_called_with( + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:1/hackfest_3charmed_vnfd/charms/simple", + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:3/hackfest_3charmed_vnfd/charms/simple", + ) + + self.assertEqual(fs.sync.call_count, 2) + self.assertEqual(return_ns_state, expected_ns_state) + self.assertEqual(return_operation_state, expected_operation_state) + self.assertEqual(return_operation_error, expected_operation_error) + self.assertEqual( + return_ns_operational_state, expected_ns_operational_state + ) + self.assertEqual(return_vnfr_revision, expected_vnfr_revision) + + mock_reset() + + with self.subTest( + i=2, t="Update type: CHANGE_VNFPKG, latest_vnfd revision not changed" + ): + + self.db.set_one( + "vnfds", q_filter={"_id": vnfd_id}, update_dict={"_admin.revision": 1} + ) + + self.db.set_one( + "vnfrs", q_filter={"_id": vnfr_id}, update_dict={"revision": 1} + ) + + mock_charm_hash = Mock(autospec=True) + mock_charm_hash.return_value = True + + mock_juju_bundle = Mock(return_value=None) + mock_software_version = Mock(autospec=True) + + mock_charm_upgrade = asynctest.Mock(autospec=True) + task = asyncio.Future() + task.set_result(("COMPLETED", "some_output")) + mock_charm_upgrade.return_value = task + + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + fs.sync.side_effect = [None, None] + + instance = self.my_ns + + expected_operation_state = "COMPLETED" + expected_operation_error = "" + expected_vnfr_revision = 1 + expected_ns_state = "INSTANTIATED" + expected_ns_operational_state = "running" + + with patch.object(instance, "fs", fs), patch( + "osm_lcm.lcm_utils.LcmBase.check_charm_hash_changed", mock_charm_hash + ), patch("osm_lcm.ns.NsLcm._ns_charm_upgrade", mock_charm_upgrade), patch( + "osm_lcm.lcm_utils.check_juju_bundle_existence", mock_juju_bundle + ): + + await instance.update(nsr_id, nslcmop_id) + + return_operation_state = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("operationState") + + return_operation_error = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("errorMessage") + + return_ns_operational_state = self.db.get_one( + "nsrs", {"_id": nsr_id} + ).get("operational-status") + + return_ns_state = self.db.get_one("nsrs", {"_id": nsr_id}).get( + "nsState" + ) + + return_vnfr_revision = self.db.get_one("vnfrs", {"_id": vnfr_id}).get( + "revision" + ) + + mock_charm_hash.assert_not_called() + mock_software_version.assert_not_called() + mock_juju_bundle.assert_not_called() + mock_charm_upgrade.assert_not_called() + fs.sync.assert_not_called() + + self.assertEqual(return_ns_state, expected_ns_state) + self.assertEqual(return_operation_state, expected_operation_state) + self.assertEqual(return_operation_error, expected_operation_error) + self.assertEqual( + return_ns_operational_state, expected_ns_operational_state + ) + self.assertEqual(return_vnfr_revision, expected_vnfr_revision) + + mock_reset() + + with self.subTest( + i=3, + t="Update type: CHANGE_VNFPKG, latest_vnfd revision changed, " + "Charm package is not changed, sw-version is not changed.", + ): + + self.db.set_one( + "vnfds", q_filter={"_id": vnfd_id}, update_dict={"_admin.revision": 3} + ) + + self.db.set_one( + "vnfds_revisions", + q_filter={"_id": vnfd_id + ":1"}, + update_dict={"_admin.revision": 1}, + ) + + self.db.set_one( + "vnfrs", q_filter={"_id": vnfr_id}, update_dict={"revision": 1} + ) + + mock_charm_hash = Mock(autospec=True) + mock_charm_hash.return_value = False + + mock_juju_bundle = Mock(return_value=None) + + mock_software_version = Mock(autospec=True) + + mock_charm_upgrade = asynctest.Mock(autospec=True) + task = asyncio.Future() + task.set_result(("COMPLETED", "some_output")) + mock_charm_upgrade.return_value = task + mock_software_version.side_effect = ["1.0", "1.0"] + + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + fs.sync.side_effect = [None, None] + + instance = self.my_ns + + expected_operation_state = "COMPLETED" + expected_operation_error = "" + expected_vnfr_revision = 3 + expected_ns_state = "INSTANTIATED" + expected_ns_operational_state = "running" + + with patch.object(instance, "fs", fs), patch( + "osm_lcm.lcm_utils.LcmBase.check_charm_hash_changed", mock_charm_hash + ), patch("osm_lcm.ns.NsLcm._ns_charm_upgrade", mock_charm_upgrade), patch( + "osm_lcm.lcm_utils.check_juju_bundle_existence", mock_juju_bundle + ): + + await instance.update(nsr_id, nslcmop_id) + + return_operation_state = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("operationState") + + return_operation_error = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("errorMessage") + + return_ns_operational_state = self.db.get_one( + "nsrs", {"_id": nsr_id} + ).get("operational-status") + + return_vnfr_revision = self.db.get_one("vnfrs", {"_id": vnfr_id}).get( + "revision" + ) + + return_ns_state = self.db.get_one("nsrs", {"_id": nsr_id}).get( + "nsState" + ) + + mock_charm_hash.assert_called_with( + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:1/hackfest_3charmed_vnfd/charms/simple", + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:3/hackfest_3charmed_vnfd/charms/simple", + ) + + self.assertEqual(fs.sync.call_count, 2) + self.assertEqual(mock_charm_hash.call_count, 1) + + mock_juju_bundle.assert_not_called() + mock_charm_upgrade.assert_not_called() + + self.assertEqual(return_ns_state, expected_ns_state) + self.assertEqual(return_operation_state, expected_operation_state) + self.assertEqual(return_operation_error, expected_operation_error) + self.assertEqual( + return_ns_operational_state, expected_ns_operational_state + ) + self.assertEqual(return_vnfr_revision, expected_vnfr_revision) + + mock_reset() + + with self.subTest( + i=4, + t="Update type: CHANGE_VNFPKG, latest_vnfd revision changed, " + "Charm package exists, sw-version changed.", + ): + + self.db.set_one( + "vnfds", + q_filter={"_id": vnfd_id}, + update_dict={"_admin.revision": 3, "software-version": "3.0"}, + ) + + self.db.set_one( + "vnfds_revisions", + q_filter={"_id": vnfd_id + ":1"}, + update_dict={"_admin.revision": 1}, + ) + + self.db.set_one( + "vnfrs", + q_filter={"_id": vnfr_id}, + update_dict={"revision": 1}, + ) + + mock_charm_hash = Mock(autospec=True) + mock_charm_hash.return_value = False + + mock_juju_bundle = Mock(return_value=None) + + mock_charm_upgrade = asynctest.Mock(autospec=True) + task = asyncio.Future() + task.set_result(("COMPLETED", "some_output")) + mock_charm_upgrade.return_value = task + + mock_charm_artifact = Mock(autospec=True) + mock_charm_artifact.side_effect = [ + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:1/hackfest_3charmed_vnfd/charms/simple", + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77/hackfest_3charmed_vnfd/charms/simple", + ] + + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + fs.sync.side_effect = [None, None] + + instance = self.my_ns + + expected_operation_state = "FAILED" + expected_operation_error = "FAILED Checking if existing VNF has charm: Software version change is not supported as VNF instance 6421c7c9-d865-4fb4-9a13-d4275d243e01 has charm." + expected_vnfr_revision = 1 + expected_ns_state = "INSTANTIATED" + expected_ns_operational_state = "running" + + with patch.object(instance, "fs", fs), patch( + "osm_lcm.lcm_utils.LcmBase.check_charm_hash_changed", mock_charm_hash + ), patch("osm_lcm.ns.NsLcm._ns_charm_upgrade", mock_charm_upgrade), patch( + "osm_lcm.lcm_utils.get_charm_artifact_path", mock_charm_artifact + ): + + await instance.update(nsr_id, nslcmop_id) + + return_operation_state = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("operationState") + + return_operation_error = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("errorMessage") + + return_ns_operational_state = self.db.get_one( + "nsrs", {"_id": nsr_id} + ).get("operational-status") + + return_vnfr_revision = self.db.get_one("vnfrs", {"_id": vnfr_id}).get( + "revision" + ) + + return_ns_state = self.db.get_one("nsrs", {"_id": nsr_id}).get( + "nsState" + ) + + self.assertEqual(fs.sync.call_count, 2) + mock_charm_hash.assert_not_called() + + mock_juju_bundle.assert_not_called() + mock_charm_upgrade.assert_not_called() + + self.assertEqual(return_ns_state, expected_ns_state) + self.assertEqual(return_operation_state, expected_operation_state) + self.assertEqual(return_operation_error, expected_operation_error) + self.assertEqual( + return_ns_operational_state, expected_ns_operational_state + ) + self.assertEqual(return_vnfr_revision, expected_vnfr_revision) + + mock_reset() + + with self.subTest( + i=5, + t="Update type: CHANGE_VNFPKG, latest_vnfd revision changed," + "Charm package exists, sw-version not changed, juju-bundle exists", + ): + + self.db.set_one( + "vnfds", + q_filter={"_id": vnfd_id}, + update_dict={ + "_admin.revision": 3, + "software-version": "1.0", + "kdu.0.juju-bundle": "stable/native-kdu", + }, + ) + + self.db.set_one( + "vnfds_revisions", + q_filter={"_id": vnfd_id + ":1"}, + update_dict={ + "_admin.revision": 1, + "software-version": "1.0", + "kdu.0.juju-bundle": "stable/native-kdu", + }, + ) + + self.db.set_one( + "vnfrs", q_filter={"_id": vnfr_id}, update_dict={"revision": 1} + ) + + mock_charm_hash = Mock(autospec=True) + mock_charm_hash.return_value = True + + mock_charm_artifact = Mock(autospec=True) + mock_charm_artifact.side_effect = [ + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:1/hackfest_3charmed_vnfd/charms/simple", + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77/hackfest_3charmed_vnfd/charms/simple", + ] + + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + fs.sync.side_effect = [None, None] + + instance = self.my_ns + + expected_operation_state = "FAILED" + expected_operation_error = "FAILED Checking whether VNF uses juju bundle: Charm upgrade is not supported for the instance which uses juju-bundle: stable/native-kdu" + expected_vnfr_revision = 1 + expected_ns_state = "INSTANTIATED" + expected_ns_operational_state = "running" + + with patch.object(instance, "fs", fs), patch( + "osm_lcm.lcm_utils.LcmBase.check_charm_hash_changed", mock_charm_hash + ), patch("osm_lcm.lcm_utils.get_charm_artifact_path", mock_charm_artifact): + + await instance.update(nsr_id, nslcmop_id) + + return_operation_state = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("operationState") + + return_operation_error = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("errorMessage") + + return_ns_operational_state = self.db.get_one( + "nsrs", {"_id": nsr_id} + ).get("operational-status") + + return_vnfr_revision = self.db.get_one("vnfrs", {"_id": vnfr_id}).get( + "revision" + ) + + return_ns_state = self.db.get_one("nsrs", {"_id": nsr_id}).get( + "nsState" + ) + + self.assertEqual(fs.sync.call_count, 2) + self.assertEqual(mock_charm_hash.call_count, 1) + self.assertEqual(mock_charm_hash.call_count, 1) + + mock_charm_upgrade.assert_not_called() + + self.assertEqual(return_ns_state, expected_ns_state) + self.assertEqual(return_operation_state, expected_operation_state) + self.assertEqual(return_operation_error, expected_operation_error) + self.assertEqual( + return_ns_operational_state, expected_ns_operational_state + ) + self.assertEqual(return_vnfr_revision, expected_vnfr_revision) + + mock_reset() + + with self.subTest( + i=6, + t="Update type: CHANGE_VNFPKG, latest_vnfd revision changed," + "Charm package exists, sw-version not changed, charm-upgrade failed", + ): + + self.db.set_one( + "vnfds", + q_filter={"_id": vnfd_id}, + update_dict={ + "_admin.revision": 3, + "software-version": "1.0", + "kdu": [], + }, + ) + + self.db.set_one( + "vnfds_revisions", + q_filter={"_id": vnfd_id + ":1"}, + update_dict={ + "_admin.revision": 1, + "software-version": "1.0", + "kdu": [], + }, + ) + + self.db.set_one( + "vnfrs", q_filter={"_id": vnfr_id}, update_dict={"revision": 1} + ) + + mock_charm_hash = Mock(autospec=True) + mock_charm_hash.return_value = True + + mock_charm_upgrade = asynctest.Mock(autospec=True) + task = asyncio.Future() + task.set_result(("FAILED", "some_error")) + mock_charm_upgrade.return_value = task + + mock_charm_artifact = Mock(autospec=True) + mock_charm_artifact.side_effect = [ + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:1/hackfest_3charmed_vnfd/charms/simple", + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77/hackfest_3charmed_vnfd/charms/simple", + ] + + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + fs.sync.side_effect = [None, None] + + instance = self.my_ns + + expected_operation_state = "FAILED" + expected_operation_error = "some_error" + expected_vnfr_revision = 1 + expected_ns_state = "INSTANTIATED" + expected_ns_operational_state = "running" + + with patch.object(instance, "fs", fs), patch( + "osm_lcm.lcm_utils.LcmBase.check_charm_hash_changed", mock_charm_hash + ), patch("osm_lcm.ns.NsLcm._ns_charm_upgrade", mock_charm_upgrade): + + await instance.update(nsr_id, nslcmop_id) + + return_operation_state = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("operationState") + + return_operation_error = self.db.get_one( + "nslcmops", {"_id": nslcmop_id} + ).get("errorMessage") + + return_ns_operational_state = self.db.get_one( + "nsrs", {"_id": nsr_id} + ).get("operational-status") + + return_vnfr_revision = self.db.get_one("vnfrs", {"_id": vnfr_id}).get( + "revision" + ) + + return_ns_state = self.db.get_one("nsrs", {"_id": nsr_id}).get( + "nsState" + ) + + self.assertEqual(fs.sync.call_count, 2) + self.assertEqual(mock_charm_hash.call_count, 1) + self.assertEqual(mock_charm_upgrade.call_count, 1) + + self.assertEqual(return_ns_state, expected_ns_state) + self.assertEqual(return_operation_state, expected_operation_state) + self.assertEqual(return_operation_error, expected_operation_error) + self.assertEqual( + return_ns_operational_state, expected_ns_operational_state + ) + self.assertEqual(return_vnfr_revision, expected_vnfr_revision) + + mock_reset() + + def test_ns_update_helper_methods(self): + def mock_reset(): + fs.mock_reset() + mock_path.mock_reset() + mock_checksumdir.mock_reset() + + with self.subTest( + i=1, t="Find software version, VNFD does not have have software version" + ): + # Testing method find_software_version + + db_vnfd = self.db.get_one( + "vnfds", {"_id": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77"} + ) + expected_result = "1.0" + result = find_software_version(db_vnfd) + self.assertEqual( + result, expected_result, "Default sw version should be 1.0" + ) + + with self.subTest( + i=2, t="Find software version, VNFD includes software version" + ): + # Testing method find_software_version + + db_vnfd = self.db.get_one( + "vnfds", {"_id": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77"} + ) + db_vnfd["software-version"] = "3.1" + expected_result = "3.1" + result = find_software_version(db_vnfd) + self.assertEqual(result, expected_result, "VNFD software version is wrong") + + with self.subTest(i=3, t="Check charm hash, Hash has did not change"): + # Testing method check_charm_hash_changed + + current_path, target_path = "/tmp/charm1", "/tmp/charm1" + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + + mock_path = Mock(autospec=True) + mock_path.exists.side_effect = [True, True] + + mock_checksumdir = Mock(autospec=True) + mock_checksumdir.dirhash.side_effect = ["hash_value", "hash_value"] + + instance = self.my_ns + expected_result = False + + with patch.object(instance, "fs", fs), patch( + "checksumdir.dirhash", mock_checksumdir.dirhash + ), patch("os.path.exists", mock_path.exists): + + result = instance.check_charm_hash_changed(current_path, target_path) + self.assertEqual( + result, expected_result, "Wrong charm hash control value" + ) + self.assertEqual(mock_path.exists.call_count, 2) + self.assertEqual(mock_checksumdir.dirhash.call_count, 2) + + mock_reset() + + with self.subTest(i=4, t="Check charm hash, Hash has changed"): + # Testing method check_charm_hash_changed + + current_path, target_path = "/tmp/charm1", "/tmp/charm2" + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + + mock_path = Mock(autospec=True) + mock_path.exists.side_effect = [True, True] + + mock_checksumdir = Mock(autospec=True) + mock_checksumdir.dirhash.side_effect = ["hash_value", "another_hash_value"] + + instance = self.my_ns + expected_result = True + + with patch.object(instance, "fs", fs), patch( + "checksumdir.dirhash", mock_checksumdir.dirhash + ), patch("os.path.exists", mock_path.exists): + + result = instance.check_charm_hash_changed(current_path, target_path) + self.assertEqual( + result, expected_result, "Wrong charm hash control value" + ) + self.assertEqual(mock_path.exists.call_count, 2) + self.assertEqual(mock_checksumdir.dirhash.call_count, 2) + + mock_reset() + + with self.subTest(i=5, t="Check charm hash, Charm path does not exists"): + # Testing method check_charm_hash_changed + + current_path, target_path = "/tmp/charm1", "/tmp/charm2" + fs = Mock(autospec=True) + fs.path.__add__ = Mock() + fs.path.side_effect = ["/", "/", "/", "/"] + + mock_path = Mock(autospec=True) + mock_path.exists.side_effect = [True, False] + + mock_checksumdir = Mock(autospec=True) + mock_checksumdir.dirhash.side_effect = ["hash_value", "hash_value"] + + instance = self.my_ns + + with patch.object(instance, "fs", fs), patch( + "checksumdir.dirhash", mock_checksumdir.dirhash + ), patch("os.path.exists", mock_path.exists): + + with self.assertRaises(LcmException): + + instance.check_charm_hash_changed(current_path, target_path) + self.assertEqual(mock_path.exists.call_count, 2) + self.assertEqual(mock_checksumdir.dirhash.call_count, 0) + + mock_reset() + + with self.subTest(i=6, t="Check juju bundle existence"): + # Testing method check_juju_bundle_existence + + test_vnfd1 = self.db.get_one( + "vnfds", {"_id": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77"} + ) + test_vnfd2 = self.db.get_one( + "vnfds", {"_id": "d96b1cdf-5ad6-49f7-bf65-907ada989293"} + ) + + expected_result = None + result = check_juju_bundle_existence(test_vnfd1) + self.assertEqual(result, expected_result, "Wrong juju bundle name") + + expected_result = "stable/native-kdu" + result = check_juju_bundle_existence(test_vnfd2) + self.assertEqual(result, expected_result, "Wrong juju bundle name") + + with self.subTest(i=7, t="Check charm artifacts"): + # Testing method check_juju_bundle_existence + + base_folder = { + "folder": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77", + "pkg-dir": "hackfest_3charmed_vnfd", + } + charm_name = "simple" + charm_type = "lxc_proxy_charm" + revision = 3 + + expected_result = "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77:3/hackfest_3charmed_vnfd/charms/simple" + result = get_charm_artifact_path( + base_folder, charm_name, charm_type, revision + ) + self.assertEqual(result, expected_result, "Wrong charm artifact path") + + # SOL004 packages + base_folder = { + "folder": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77", + } + charm_name = "basic" + charm_type = "" + revision = "" + + expected_result = ( + "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77/Scripts/helm-charts/basic" + ) + result = get_charm_artifact_path( + base_folder, charm_name, charm_type, revision + ) + self.assertEqual(result, expected_result, "Wrong charm artifact path") -if __name__ == '__main__': +if __name__ == "__main__": asynctest.main()