Bug 1830 fixed: maps completed operations to original operation types 87/13987/2 master
authorGabriel Cuba <gcuba@whitestack.com>
Wed, 8 Nov 2023 05:14:33 +0000 (00:14 -0500)
committergarciadeblas <gerardo.garciadeblas@telefonica.com>
Tue, 30 Jan 2024 14:36:12 +0000 (15:36 +0100)
Change-Id: I1a2f60f183ede39cabd9a7441ae64f10d7557232
Signed-off-by: Gabriel Cuba <gcuba@whitestack.com>
52 files changed:
Dockerfile
Dockerfile.local
Jenkinsfile
attic/run_test.py [new file with mode: 0755]
devops-stages/stage-archive.sh
devops-stages/stage-test.sh
osm_nbi/admin_topics.py
osm_nbi/auth.py
osm_nbi/authconn.py
osm_nbi/authconn_internal.py
osm_nbi/authconn_keystone.py
osm_nbi/base_topic.py
osm_nbi/descriptor_topics.py
osm_nbi/engine.py
osm_nbi/html_out.py
osm_nbi/instance_topics.py
osm_nbi/nbi.cfg
osm_nbi/nbi.py
osm_nbi/notifications.py
osm_nbi/osm_vnfm/__init__.py [new file with mode: 0644]
osm_nbi/osm_vnfm/base_methods.py [new file with mode: 0644]
osm_nbi/osm_vnfm/vnf_instance_actions.py [new file with mode: 0644]
osm_nbi/osm_vnfm/vnf_instances.py [new file with mode: 0644]
osm_nbi/osm_vnfm/vnf_subscription.py [new file with mode: 0644]
osm_nbi/resources_to_operations.yml
osm_nbi/roles_to_operations.yml
osm_nbi/subscription_topics.py
osm_nbi/subscriptions.py
osm_nbi/tests/run_test.py [deleted file]
osm_nbi/tests/send_kafka.py [deleted file]
osm_nbi/tests/test_admin_topics.py
osm_nbi/tests/test_base_topic.py
osm_nbi/tests/test_db_descriptors.py
osm_nbi/tests/test_descriptor_topics.py
osm_nbi/tests/test_instance_topics.py
osm_nbi/tests/test_osm_vnfm.py [new file with mode: 0644]
osm_nbi/tests/test_pkg_descriptors.py
osm_nbi/tests/test_pmjobs_topic.py
osm_nbi/tests/upload.py [deleted file]
osm_nbi/utils.py
osm_nbi/validation.py
osm_nbi/vnf_instance_topics.py [new file with mode: 0644]
pyangbind.patch [new file with mode: 0644]
requirements-dev.in
requirements-dev.txt
requirements-dist.in
requirements-dist.txt
requirements-test.in
requirements-test.txt
requirements.in
requirements.txt
tox.ini

index ebfab05..c4c9b6c 100644 (file)
 #   devops-stages/stage-build.sh
 #
 
 #   devops-stages/stage-build.sh
 #
 
-FROM ubuntu:18.04
+FROM ubuntu:22.04
+
+ARG APT_PROXY
+RUN if [ ! -z $APT_PROXY ] ; then \
+    echo "Acquire::http::Proxy \"$APT_PROXY\";" > /etc/apt/apt.conf.d/proxy.conf ;\
+    echo "Acquire::https::Proxy \"$APT_PROXY\";" >> /etc/apt/apt.conf.d/proxy.conf ;\
+    fi
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
     DEBIAN_FRONTEND=noninteractive apt-get -y install \
         debhelper \
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
     DEBIAN_FRONTEND=noninteractive apt-get -y install \
         debhelper \
+        dh-python \
         git \
         python3 \
         python3-all \
         python3-dev \
         git \
         python3 \
         python3-all \
         python3-dev \
-        python3-setuptools
+        python3-setuptools \
+        python3-pip \
+        tox
 
 
-RUN python3 -m easy_install pip==21.0.1
-RUN pip3 install tox==3.22.0
+ENV LC_ALL C.UTF-8
+ENV LANG C.UTF-8
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get -y install wget
 
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get -y install wget
 
index 2816d0a..04f2d2e 100644 (file)
@@ -16,7 +16,7 @@
 
 ########################################################################
 
 
 ########################################################################
 
-FROM ubuntu:18.04 as INSTALL
+FROM ubuntu:20.04 as INSTALL
 
 WORKDIR /build
 
 
 WORKDIR /build
 
@@ -46,13 +46,13 @@ RUN python3 -m build /build && \
     python3 -m pip install /build/dist/*.whl
 
 
     python3 -m pip install /build/dist/*.whl
 
 
-FROM ubuntu:18.04 as FINAL
+FROM ubuntu:20.04 as FINAL
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get --yes update && \
     DEBIAN_FRONTEND=noninteractive apt-get --yes install python3-minimal
 
 COPY --from=INSTALL /usr/lib/python3/dist-packages /usr/lib/python3/dist-packages
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get --yes update && \
     DEBIAN_FRONTEND=noninteractive apt-get --yes install python3-minimal
 
 COPY --from=INSTALL /usr/lib/python3/dist-packages /usr/lib/python3/dist-packages
-COPY --from=INSTALL /usr/local/lib/python3.6/dist-packages  /usr/local/lib/python3.6/dist-packages
+COPY --from=INSTALL /usr/local/lib/python3.8/dist-packages  /usr/local/lib/python3.8/dist-packages
 
 RUN mkdir -p /app/storage/kafka && mkdir -p /app/log
 
 
 RUN mkdir -p /app/storage/kafka && mkdir -p /app/log
 
@@ -60,9 +60,9 @@ WORKDIR /app/osm_nbi
 
 EXPOSE 9999
 
 
 EXPOSE 9999
 
-RUN cp -R /usr/local/lib/python3.6/dist-packages/osm_nbi/html_public /app/osm_nbi/html_public
-RUN cp /usr/local/lib/python3.6/dist-packages/osm_nbi/nbi.cfg /app/osm_nbi/
-RUN cp -R /usr/local/lib/python3.6/dist-packages/osm_nbi/http /app/osm_nbi/
+RUN cp -R /usr/local/lib/python3.8/dist-packages/osm_nbi/html_public /app/osm_nbi/html_public
+RUN cp /usr/local/lib/python3.8/dist-packages/osm_nbi/nbi.cfg /app/osm_nbi/
+RUN cp -R /usr/local/lib/python3.8/dist-packages/osm_nbi/http /app/osm_nbi/
 
 # Used for local storage
 VOLUME /app/storage
 
 # Used for local storage
 VOLUME /app/storage
index e283f05..9c0924a 100644 (file)
@@ -1,17 +1,20 @@
-/*
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
+/* Copyright ETSI OSM and others
+ *
+ * All Rights Reserved.
+ *
+ *   Licensed under the Apache License, Version 2.0 (the "License"); you may
+ *   not use this file except in compliance with the License. You may obtain
+ *   a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ *   WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ *   License for the specific language governing permissions and limitations
+ *   under the License.
+ */
 
 
-     http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-  implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-*/
 properties([
     parameters([
         string(defaultValue: env.BRANCH_NAME, description: '', name: 'GERRIT_BRANCH'),
 properties([
     parameters([
         string(defaultValue: env.BRANCH_NAME, description: '', name: 'GERRIT_BRANCH'),
@@ -30,7 +33,7 @@ def devops_checkout() {
     }
 }
 
     }
 }
 
-node('docker') {
+node('stage_2') {
     checkout scm
     devops_checkout()
 
     checkout scm
     devops_checkout()
 
diff --git a/attic/run_test.py b/attic/run_test.py
new file mode 100755 (executable)
index 0000000..b7768ba
--- /dev/null
@@ -0,0 +1,5889 @@
+#! /usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import getopt
+import sys
+import requests
+import json
+import logging
+import yaml
+
+# import json
+# import tarfile
+from time import sleep
+from random import randint
+import os
+from sys import stderr
+from uuid import uuid4
+import re
+
+__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
+__date__ = "$2018-03-01$"
+__version__ = "0.3"
+version_date = "Oct 2018"
+
+
+def usage():
+    print("Usage: ", sys.argv[0], "[options]")
+    print(
+        "      Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'"
+    )
+    print(
+        "      If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
+        "where deployment is done"
+    )
+    print("OPTIONS")
+    print("      -h|--help: shows this help")
+    print("      --insecure: Allows non trusted https NBI server")
+    print("      --list: list available tests")
+    print(
+        "      --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
+        "'--test-osm'"
+    )
+    print("      -p|--password PASSWORD: NBI access password. 'admin' by default")
+    print("      ---project PROJECT: NBI access project. 'admin' by default")
+    print(
+        "      --test TEST[,...]: Execute only a test or a comma separated list of tests"
+    )
+    print(
+        "      --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config"
+    )
+    print(
+        "      --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
+        "this flag to test the system. LCM and RO components are expected to be up and running"
+    )
+    print(
+        "      --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)
+    )
+    print(
+        "      --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(
+            timeout_deploy
+        )
+    )
+    print(
+        "      --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
+        " by default {}s".format(timeout_configure)
+    )
+    print("      -u|--user USERNAME: NBI access username. 'admin' by default")
+    print(
+        "      --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default"
+    )
+    print("      -v|--verbose print debug information, can be used several times")
+    print("      --no-verbose remove verbosity")
+    print("      --version: prints current version")
+    print("ENV variables used for real deployment tests with option osm-test.")
+    print("      export OSMNBITEST_VIM_NAME=vim-name")
+    print("      export OSMNBITEST_VIM_URL=vim-url")
+    print("      export OSMNBITEST_VIM_TYPE=vim-type")
+    print("      export OSMNBITEST_VIM_TENANT=vim-tenant")
+    print("      export OSMNBITEST_VIM_USER=vim-user")
+    print("      export OSMNBITEST_VIM_PASSWORD=vim-password")
+    print('      export OSMNBITEST_VIM_CONFIG="vim-config"')
+    print('      export OSMNBITEST_NS_NAME="vim-config"')
+    return
+
+
+r_header_json = {"Content-type": "application/json"}
+headers_json = {"Content-type": "application/json", "Accept": "application/json"}
+r_header_yaml = {"Content-type": "application/yaml"}
+headers_yaml = {"Content-type": "application/yaml", "Accept": "application/yaml"}
+r_header_text = {"Content-type": "text/plain"}
+r_header_octect = {"Content-type": "application/octet-stream"}
+headers_text = {"Accept": "text/plain,application/yaml"}
+r_header_zip = {"Content-type": "application/zip"}
+headers_zip = {"Accept": "application/zip,application/yaml"}
+headers_zip_yaml = {"Accept": "application/yaml", "Content-type": "application/zip"}
+headers_zip_json = {"Accept": "application/json", "Content-type": "application/zip"}
+headers_txt_json = {"Accept": "application/json", "Content-type": "text/plain"}
+r_headers_yaml_location_vnfd = {
+    "Location": "/vnfpkgm/v1/vnf_packages_content/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsd = {
+    "Location": "/nsd/v1/ns_descriptors_content/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nst = {
+    "Location": "/nst/v1/netslice_templates_content",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nslcmop = {
+    "Location": "nslcm/v1/ns_lcm_op_occs/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsilcmop = {
+    "Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/",
+    "Content-Type": "application/yaml",
+}
+
+# test ones authorized
+test_authorized_list = (
+    (
+        "AU1",
+        "Invalid vnfd id",
+        "GET",
+        "/vnfpkgm/v1/vnf_packages/non-existing-id",
+        headers_json,
+        None,
+        404,
+        r_header_json,
+        "json",
+    ),
+    (
+        "AU2",
+        "Invalid nsd id",
+        "GET",
+        "/nsd/v1/ns_descriptors/non-existing-id",
+        headers_yaml,
+        None,
+        404,
+        r_header_yaml,
+        "yaml",
+    ),
+    (
+        "AU3",
+        "Invalid nsd id",
+        "DELETE",
+        "/nsd/v1/ns_descriptors_content/non-existing-id",
+        headers_yaml,
+        None,
+        404,
+        r_header_yaml,
+        "yaml",
+    ),
+)
+timeout = 120  # general timeout
+timeout_deploy = 60 * 10  # timeout for NS deploying without charms
+timeout_configure = 60 * 20  # timeout for NS deploying and configuring
+
+
+class TestException(Exception):
+    pass
+
+
+class TestRest:
+    def __init__(
+        self,
+        url_base,
+        header_base=None,
+        verify=False,
+        user="admin",
+        password="admin",
+        project="admin",
+    ):
+        self.url_base = url_base
+        if header_base is None:
+            self.header_base = {}
+        else:
+            self.header_base = header_base.copy()
+        self.s = requests.session()
+        self.s.headers = self.header_base
+        self.verify = verify
+        self.token = False
+        self.user = user
+        self.password = password
+        self.project = project
+        self.vim_id = None
+        # contains ID of tests obtained from Location response header. "" key contains last obtained id
+        self.last_id = ""
+        self.test_name = None
+        self.step = 0  # number of subtest under test
+        self.passed_tests = 0
+        self.failed_tests = 0
+
+    def set_test_name(self, test_name):
+        self.test_name = test_name
+        self.step = 0
+        self.last_id = ""
+
+    def set_header(self, header):
+        self.s.headers.update(header)
+
+    def set_tet_name(self, test_name):
+        self.test_name = test_name
+
+    def unset_header(self, key):
+        if key in self.s.headers:
+            del self.s.headers[key]
+
+    def test(
+        self,
+        description,
+        method,
+        url,
+        headers,
+        payload,
+        expected_codes,
+        expected_headers,
+        expected_payload,
+        store_file=None,
+        pooling=False,
+    ):
+        """
+        Performs an http request and check http code response. Exit if different than allowed. It get the returned id
+        that can be used by following test in the URL with {name} where name is the name of the test
+        :param description:  description of the test
+        :param method: HTTP method: GET,PUT,POST,DELETE,...
+        :param url: complete URL or relative URL
+        :param headers: request headers to add to the base headers
+        :param payload: Can be a dict, transformed to json, a text or a file if starts with '@'
+        :param expected_codes: expected response codes, can be int, int tuple or int range
+        :param expected_headers: expected response headers, dict with key values
+        :param expected_payload: expected payload, 0 if empty, 'yaml', 'json', 'text', 'zip', 'octet-stream'
+        :param store_file: filename to store content
+        :param pooling: if True do not count neither log this test. Because a pooling is done with many equal requests
+        :return: requests response
+        """
+        r = None
+        try:
+            if not self.s:
+                self.s = requests.session()
+            # URL
+            if not url:
+                url = self.url_base
+            elif not url.startswith("http"):
+                url = self.url_base + url
+
+            # replace url <> with the last ID
+            url = url.replace("<>", self.last_id)
+            if payload:
+                if isinstance(payload, str):
+                    if payload.startswith("@"):
+                        mode = "r"
+                        file_name = payload[1:]
+                        if payload.startswith("@b"):
+                            mode = "rb"
+                            file_name = payload[2:]
+                        with open(file_name, mode) as f:
+                            payload = f.read()
+                elif isinstance(payload, dict):
+                    payload = json.dumps(payload)
+
+            if not pooling:
+                test_description = "Test {}{} {} {} {}".format(
+                    self.test_name, self.step, description, method, url
+                )
+                logger.warning(test_description)
+                self.step += 1
+            stream = False
+            if expected_payload in ("zip", "octet-string") or store_file:
+                stream = True
+            __retry = 0
+            while True:
+                try:
+                    r = getattr(self.s, method.lower())(
+                        url,
+                        data=payload,
+                        headers=headers,
+                        verify=self.verify,
+                        stream=stream,
+                    )
+                    break
+                except requests.exceptions.ConnectionError as e:
+                    if __retry == 2:
+                        raise
+                    logger.error("Exception {}. Retrying".format(e))
+                    __retry += 1
+
+            if expected_payload in ("zip", "octet-string") or store_file:
+                logger.debug("RX {}".format(r.status_code))
+            else:
+                logger.debug("RX {}: {}".format(r.status_code, r.text))
+
+            # check response
+            if expected_codes:
+                if isinstance(expected_codes, int):
+                    expected_codes = (expected_codes,)
+                if r.status_code not in expected_codes:
+                    raise TestException(
+                        "Got status {}. Expected {}. {}".format(
+                            r.status_code, expected_codes, r.text
+                        )
+                    )
+
+            if expected_headers:
+                for header_key, header_val in expected_headers.items():
+                    if header_key.lower() not in r.headers:
+                        raise TestException("Header {} not present".format(header_key))
+                    if header_val and header_val.lower() not in r.headers[header_key]:
+                        raise TestException(
+                            "Header {} does not contain {} but {}".format(
+                                header_key, header_val, r.headers[header_key]
+                            )
+                        )
+
+            if expected_payload is not None:
+                if expected_payload == 0 and len(r.content) > 0:
+                    raise TestException("Expected empty payload")
+                elif expected_payload == "json":
+                    try:
+                        r.json()
+                    except Exception as e:
+                        raise TestException(
+                            "Expected json response payload, but got Exception {}".format(
+                                e
+                            )
+                        )
+                elif expected_payload == "yaml":
+                    try:
+                        yaml.safe_load(r.text)
+                    except Exception as e:
+                        raise TestException(
+                            "Expected yaml response payload, but got Exception {}".format(
+                                e
+                            )
+                        )
+                elif expected_payload in ("zip", "octet-string"):
+                    if len(r.content) == 0:
+                        raise TestException(
+                            "Expected some response payload, but got empty"
+                        )
+                    # try:
+                    #     tar = tarfile.open(None, 'r:gz', fileobj=r.raw)
+                    #     for tarinfo in tar:
+                    #         tarname = tarinfo.name
+                    #         print(tarname)
+                    # except Exception as e:
+                    #     raise TestException("Expected zip response payload, but got Exception {}".format(e))
+                elif expected_payload == "text":
+                    if len(r.content) == 0:
+                        raise TestException(
+                            "Expected some response payload, but got empty"
+                        )
+                    # r.text
+            if store_file:
+                with open(store_file, "wb") as fd:
+                    for chunk in r.iter_content(chunk_size=128):
+                        fd.write(chunk)
+
+            location = r.headers.get("Location")
+            if location:
+                _id = location[location.rfind("/") + 1 :]
+                if _id:
+                    self.last_id = str(_id)
+            if not pooling:
+                self.passed_tests += 1
+            return r
+        except TestException as e:
+            self.failed_tests += 1
+            r_status_code = None
+            r_text = None
+            if r:
+                r_status_code = r.status_code
+                r_text = r.text
+            logger.error("{} \nRX code{}: {}".format(e, r_status_code, r_text))
+            return None
+            # exit(1)
+        except IOError as e:
+            if store_file:
+                logger.error("Cannot open file {}: {}".format(store_file, e))
+            else:
+                logger.error("Exception: {}".format(e), exc_info=True)
+            self.failed_tests += 1
+            return None
+            # exit(1)
+        except requests.exceptions.RequestException as e:
+            logger.error("Exception: {}".format(e))
+
+    def get_autorization(self):  # user=None, password=None, project=None):
+        if (
+            self.token
+        ):  # and self.user == user and self.password == password and self.project == project:
+            return
+        # self.user = user
+        # self.password = password
+        # self.project = project
+        r = self.test(
+            "Obtain token",
+            "POST",
+            "/admin/v1/tokens",
+            headers_json,
+            {
+                "username": self.user,
+                "password": self.password,
+                "project_id": self.project,
+            },
+            (200, 201),
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return
+        response = r.json()
+        self.token = response["id"]
+        self.set_header({"Authorization": "Bearer {}".format(self.token)})
+
+    def remove_authorization(self):
+        if self.token:
+            self.test(
+                "Delete token",
+                "DELETE",
+                "/admin/v1/tokens/{}".format(self.token),
+                headers_json,
+                None,
+                (200, 201, 204),
+                None,
+                None,
+            )
+        self.token = None
+        self.unset_header("Authorization")
+
+    def get_create_vim(self, test_osm):
+        if self.vim_id:
+            return self.vim_id
+        self.get_autorization()
+        if test_osm:
+            vim_name = os.environ.get("OSMNBITEST_VIM_NAME")
+            if not vim_name:
+                raise TestException(
+                    "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment"
+                )
+        else:
+            vim_name = "fakeVim"
+        # Get VIM
+        r = self.test(
+            "Get VIM ID",
+            "GET",
+            "/admin/v1/vim_accounts?name={}".format(vim_name),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return
+        vims = r.json()
+        if vims:
+            return vims[0]["_id"]
+        # Add VIM
+        if test_osm:
+            # check needed environ parameters:
+            if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get(
+                "OSMNBITEST_VIM_TENANT"
+            ):
+                raise TestException(
+                    "Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
+                    " to deploy on whit the --test-osm option"
+                )
+            vim_data = (
+                "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}',"
+                "vim_tenant_name: '{}', "
+                "vim_user: {}, vim_password: {}"
+            ).format(
+                vim_name,
+                os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
+                os.environ.get("OSMNBITEST_VIM_URL"),
+                os.environ.get("OSMNBITEST_VIM_TENANT"),
+                os.environ.get("OSMNBITEST_VIM_USER"),
+                os.environ.get("OSMNBITEST_VIM_PASSWORD"),
+            )
+            if os.environ.get("OSMNBITEST_VIM_CONFIG"):
+                vim_data += " ,config: {}".format(
+                    os.environ.get("OSMNBITEST_VIM_CONFIG")
+                )
+            vim_data += "}"
+        else:
+            vim_data = (
+                "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"
+                ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
+            )
+        self.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_yaml,
+            vim_data,
+            (201, 202),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"},
+            "yaml",
+        )
+        return self.last_id
+
+    def print_results(self):
+        print("\n\n\n--------------------------------------------")
+        print(
+            "TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(
+                self.passed_tests + self.failed_tests,
+                self.passed_tests,
+                self.failed_tests,
+            )
+        )
+        print("--------------------------------------------")
+
+    def wait_until_delete(self, url_op, timeout_delete):
+        """
+        Make a pooling until topic is not present, because of deleted
+        :param url_op:
+        :param timeout_delete:
+        :return:
+        """
+        description = "Wait to topic being deleted"
+        test_description = "Test {}{} {} {} {}".format(
+            self.test_name, self.step, description, "GET", url_op
+        )
+        logger.warning(test_description)
+        self.step += 1
+
+        wait = timeout_delete
+        while wait >= 0:
+            r = self.test(
+                description,
+                "GET",
+                url_op,
+                headers_yaml,
+                None,
+                (200, 404),
+                None,
+                r_header_yaml,
+                "yaml",
+                pooling=True,
+            )
+            if not r:
+                return
+            if r.status_code == 404:
+                self.passed_tests += 1
+                break
+            elif r.status_code == 200:
+                wait -= 5
+                sleep(5)
+        else:
+            raise TestException(
+                "Topic is not deleted after {} seconds".format(timeout_delete)
+            )
+            self.failed_tests += 1
+
+    def wait_operation_ready(self, ns_nsi, opp_id, timeout, expected_fail=False):
+        """
+        Wait until nslcmop or nsilcmop finished
+        :param ns_nsi: "ns" o "nsi"
+        :param opp_id: Id o fthe operation
+        :param timeout:
+        :param expected_fail:
+        :return: None. Updates passed/failed_tests
+        """
+        if ns_nsi == "ns":
+            url_op = "/nslcm/v1/ns_lcm_op_occs/{}".format(opp_id)
+        else:
+            url_op = "/nsilcm/v1/nsi_lcm_op_occs/{}".format(opp_id)
+        description = "Wait to {} lcm operation complete".format(ns_nsi)
+        test_description = "Test {}{} {} {} {}".format(
+            self.test_name, self.step, description, "GET", url_op
+        )
+        logger.warning(test_description)
+        self.step += 1
+        wait = timeout
+        while wait >= 0:
+            r = self.test(
+                description,
+                "GET",
+                url_op,
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+                pooling=True,
+            )
+            if not r:
+                return
+            nslcmop = r.json()
+            if "COMPLETED" in nslcmop["operationState"]:
+                if expected_fail:
+                    logger.error(
+                        "NS terminate has success, expecting failing: {}".format(
+                            nslcmop["detailed-status"]
+                        )
+                    )
+                    self.failed_tests += 1
+                else:
+                    self.passed_tests += 1
+                break
+            elif "FAILED" in nslcmop["operationState"]:
+                if not expected_fail:
+                    logger.error(
+                        "NS terminate has failed: {}".format(nslcmop["detailed-status"])
+                    )
+                    self.failed_tests += 1
+                else:
+                    self.passed_tests += 1
+                break
+
+            print(".", end="", file=stderr)
+            wait -= 10
+            sleep(10)
+        else:
+            self.failed_tests += 1
+            logger.error(
+                "NS instantiate is not terminate after {} seconds".format(timeout)
+            )
+            return
+        print("", file=stderr)
+
+
+class TestNonAuthorized:
+    description = "Test invalid URLs. methods and no authorization"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("NonAuth")
+        engine.remove_authorization()
+        test_not_authorized_list = (
+            (
+                "Invalid token",
+                "GET",
+                "/admin/v1/users",
+                headers_json,
+                None,
+                401,
+                r_header_json,
+                "json",
+            ),
+            (
+                "Invalid URL",
+                "POST",
+                "/admin/v1/nonexist",
+                headers_yaml,
+                None,
+                405,
+                r_header_yaml,
+                "yaml",
+            ),
+            (
+                "Invalid version",
+                "DELETE",
+                "/admin/v2/users",
+                headers_yaml,
+                None,
+                405,
+                r_header_yaml,
+                "yaml",
+            ),
+        )
+        for t in test_not_authorized_list:
+            engine.test(*t)
+
+
+class TestUsersProjects:
+    description = "test project and user creation"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("UserProject")
+        # backend = test_params.get("backend") if test_params else None   # UNUSED
+
+        # Initialisation
+        p1 = p2 = p3 = None
+        padmin = pbad = None
+        u1 = u2 = u3 = u4 = None
+
+        engine.get_autorization()
+
+        res = engine.test(
+            "Create project non admin 1",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P1"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        p1 = engine.last_id if res else None
+
+        res = engine.test(
+            "Create project admin",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "Padmin", "admin": True},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        padmin = engine.last_id if res else None
+
+        res = engine.test(
+            "Create project bad format",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": 1},
+            (400, 422),
+            r_header_json,
+            "json",
+        )
+        pbad = engine.last_id if res else None
+
+        res = engine.test(
+            "Get project admin role",
+            "GET",
+            "/admin/v1/roles?name=project_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        rpa = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get project user role",
+            "GET",
+            "/admin/v1/roles?name=project_user",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        rpu = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get system admin role",
+            "GET",
+            "/admin/v1/roles?name=system_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        rsa = res.json()[0]["_id"] if res else None
+
+        data = {"username": "U1", "password": "pw1"}
+        p2 = uuid4().hex
+        data["project_role_mappings"] = [
+            {"project": p1, "role": rpa},
+            {"project": p2, "role": rpa},
+            {"project": padmin, "role": rpu},
+        ]
+        rc = 201
+        xhd = {"Location": "/admin/v1/users/", "Content-Type": "application/json"}
+        res = engine.test(
+            "Create user with bad project and force",
+            "POST",
+            "/admin/v1/users?FORCE=True",
+            headers_json,
+            data,
+            rc,
+            xhd,
+            "json",
+        )
+        if res:
+            u1 = engine.last_id
+        else:
+            # User is created sometimes even though an exception is raised
+            res = engine.test(
+                "Get user U1",
+                "GET",
+                "/admin/v1/users?username=U1",
+                headers_json,
+                {},
+                (200),
+                {"Content-Type": "application/json"},
+                "json",
+            )
+            u1 = res.json()[0]["_id"] if res else None
+
+        data = {"username": "U2", "password": "pw2"}
+        data["project_role_mappings"] = [
+            {"project": p1, "role": rpa},
+            {"project": padmin, "role": rsa},
+        ]
+        res = engine.test(
+            "Create user 2",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+        u2 = engine.last_id if res else None
+
+        if u1:
+            ftt = "project_role_mappings"
+            xpr = [{"project": p1, "role": rpa}, {"project": padmin, "role": rpu}]
+            data = {ftt: xpr}
+            engine.test(
+                "Edit user U1, delete  P2 project",
+                "PATCH",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                data,
+                204,
+                None,
+                None,
+            )
+            res = engine.test(
+                "Check user U1, contains the right projects",
+                "GET",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                None,
+                200,
+                None,
+                json,
+            )
+            if res:
+                rj = res.json()
+                xpr[0]["project_name"] = "P1"
+                xpr[0]["role_name"] = "project_admin"
+                xpr[1]["project_name"] = "Padmin"
+                xpr[1]["role_name"] = "project_user"
+                ok = True
+                for pr in rj[ftt]:
+                    if pr not in xpr:
+                        ok = False
+                for pr in xpr:
+                    if pr not in rj[ftt]:
+                        ok = False
+                if not ok:
+                    logger.error(
+                        "User {} '{}' are different than expected '{}'. Edition was not done properly".format(
+                            ftt, rj[ftt], xpr
+                        )
+                    )
+                    engine.failed_tests += 1
+
+        p2 = None  # To prevent deletion attempts
+
+        # Add a test of 'default project' for Keystone?
+
+        if u2:
+            engine.test(
+                "Edit user U2, change password",
+                "PUT",
+                "/admin/v1/users/" + u2,
+                headers_json,
+                {"password": "pw2_new"},
+                204,
+                None,
+                None,
+            )
+
+        if p1:
+            engine.test(
+                "Change to project P1 non existing",
+                "POST",
+                "/admin/v1/tokens/",
+                headers_json,
+                {"project_id": p1},
+                401,
+                r_header_json,
+                "json",
+            )
+
+        if u2 and p1:
+            res = engine.test(
+                "Change to user U2 project P1",
+                "POST",
+                "/admin/v1/tokens",
+                headers_json,
+                {"username": "U2", "password": "pw2_new", "project_id": "P1"},
+                (200, 201),
+                r_header_json,
+                "json",
+            )
+            if res:
+                rj = res.json()
+                engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
+
+                engine.test(
+                    "Edit user projects non admin",
+                    "PUT",
+                    "/admin/v1/users/U1",
+                    headers_json,
+                    {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
+                    401,
+                    r_header_json,
+                    "json",
+                )
+
+                res = engine.test(
+                    "Add new project non admin",
+                    "POST",
+                    "/admin/v1/projects",
+                    headers_json,
+                    {"name": "P2"},
+                    401,
+                    r_header_json,
+                    "json",
+                )
+                if res is None or res.status_code == 201:
+                    # The project has been created even though it shouldn't
+                    res = engine.test(
+                        "Get project P2",
+                        "GET",
+                        "/admin/v1/projects/P2",
+                        headers_json,
+                        None,
+                        200,
+                        r_header_json,
+                        "json",
+                    )
+                    p2 = res.json()["_id"] if res else None
+
+                if p1:
+                    data = {"username": "U3", "password": "pw3"}
+                    data["project_role_mappings"] = [{"project": p1, "role": rpu}]
+                    res = engine.test(
+                        "Add new user non admin",
+                        "POST",
+                        "/admin/v1/users",
+                        headers_json,
+                        data,
+                        401,
+                        r_header_json,
+                        "json",
+                    )
+                    if res is None or res.status_code == 201:
+                        # The user has been created even though it shouldn't
+                        res = engine.test(
+                            "Get user U3",
+                            "GET",
+                            "/admin/v1/users/U3",
+                            headers_json,
+                            None,
+                            200,
+                            r_header_json,
+                            "json",
+                        )
+                        u3 = res.json()["_id"] if res else None
+                else:
+                    u3 = None
+
+                if padmin:
+                    res = engine.test(
+                        "Change to user U2 project Padmin",
+                        "POST",
+                        "/admin/v1/tokens",
+                        headers_json,
+                        {
+                            "project_id": "Padmin"
+                        },  # Caused a Keystone authentication error
+                        # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
+                        (200, 201),
+                        r_header_json,
+                        "json",
+                    )
+                    if res:
+                        rj = res.json()
+                        engine.set_header(
+                            {"Authorization": "Bearer {}".format(rj["id"])}
+                        )
+
+                        res = engine.test(
+                            "Add new project admin",
+                            "POST",
+                            "/admin/v1/projects",
+                            headers_json,
+                            {"name": "P3"},
+                            (201, 204),
+                            {
+                                "Location": "/admin/v1/projects/",
+                                "Content-Type": "application/json",
+                            },
+                            "json",
+                        )
+                        p3 = engine.last_id if res else None
+
+                        if p1:
+                            data = {"username": "U4", "password": "pw4"}
+                            data["project_role_mappings"] = [
+                                {"project": p1, "role": rpa}
+                            ]
+                            res = engine.test(
+                                "Add new user admin",
+                                "POST",
+                                "/admin/v1/users",
+                                headers_json,
+                                data,
+                                (201, 204),
+                                {
+                                    "Location": "/admin/v1/users/",
+                                    "Content-Type": "application/json",
+                                },
+                                "json",
+                            )
+                            u4 = engine.last_id if res else None
+                        else:
+                            u4 = None
+
+                        if u4 and p3:
+                            data = {
+                                "project_role_mappings": [{"project": p3, "role": rpa}]
+                            }
+                            engine.test(
+                                "Edit user projects admin",
+                                "PUT",
+                                "/admin/v1/users/U4",
+                                headers_json,
+                                data,
+                                204,
+                                None,
+                                None,
+                            )
+                            # Project is deleted even though it shouldn't - PROVISIONAL?
+                            res = engine.test(
+                                "Delete project P3 conflict",
+                                "DELETE",
+                                "/admin/v1/projects/" + p3,
+                                headers_json,
+                                None,
+                                409,
+                                None,
+                                None,
+                            )
+                            if res and res.status_code in (200, 204):
+                                p3 = None
+                            if p3:
+                                res = engine.test(
+                                    "Delete project P3 forcing",
+                                    "DELETE",
+                                    "/admin/v1/projects/" + p3 + "?FORCE=True",
+                                    headers_json,
+                                    None,
+                                    204,
+                                    None,
+                                    None,
+                                )
+                                if res and res.status_code in (200, 204):
+                                    p3 = None
+
+                        if u2:
+                            res = engine.test(
+                                "Delete user U2. Conflict deleting own user",
+                                "DELETE",
+                                "/admin/v1/users/" + u2,
+                                headers_json,
+                                None,
+                                409,
+                                r_header_json,
+                                "json",
+                            )
+                            if res is None or res.status_code in (200, 204):
+                                u2 = None
+                        if u4:
+                            res = engine.test(
+                                "Delete user U4",
+                                "DELETE",
+                                "/admin/v1/users/" + u4,
+                                headers_json,
+                                None,
+                                204,
+                                None,
+                                None,
+                            )
+                            if res and res.status_code in (200, 204):
+                                u4 = None
+                        if p3:
+                            res = engine.test(
+                                "Delete project P3",
+                                "DELETE",
+                                "/admin/v1/projects/" + p3,
+                                headers_json,
+                                None,
+                                204,
+                                None,
+                                None,
+                            )
+                            if res and res.status_code in (200, 204):
+                                p3 = None
+
+                if u3:
+                    res = engine.test(
+                        "Delete user U3",
+                        "DELETE",
+                        "/admin/v1/users/" + u3,
+                        headers_json,
+                        None,
+                        204,
+                        None,
+                        None,
+                    )
+                    if res:
+                        u3 = None
+
+        # change to admin
+        engine.remove_authorization()  # To force get authorization
+        engine.get_autorization()
+        if u1:
+            engine.test(
+                "Delete user U1",
+                "DELETE",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if u2:
+            engine.test(
+                "Delete user U2",
+                "DELETE",
+                "/admin/v1/users/" + u2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if u3:
+            engine.test(
+                "Delete user U3",
+                "DELETE",
+                "/admin/v1/users/" + u3,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if u4:
+            engine.test(
+                "Delete user U4",
+                "DELETE",
+                "/admin/v1/users/" + u4,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if p1:
+            engine.test(
+                "Delete project P1",
+                "DELETE",
+                "/admin/v1/projects/" + p1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if p2:
+            engine.test(
+                "Delete project P2",
+                "DELETE",
+                "/admin/v1/projects/" + p2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if p3:
+            engine.test(
+                "Delete project P3",
+                "DELETE",
+                "/admin/v1/projects/" + p3,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if padmin:
+            engine.test(
+                "Delete project Padmin",
+                "DELETE",
+                "/admin/v1/projects/" + padmin,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if pbad:
+            engine.test(
+                "Delete bad project",
+                "DELETE",
+                "/admin/v1/projects/" + pbad,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+
+        # BEGIN New Tests - Addressing Projects/Users by Name/ID
+        pid1 = pid2 = None
+        uid1 = uid2 = None
+        res = engine.test(
+            "Create new project P1",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P1"},
+            201,
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            pid1 = res.json()["id"]
+            # print("# pid =", pid1)
+        res = engine.test(
+            "Create new project P2",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P2"},
+            201,
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            pid2 = res.json()["id"]
+            # print("# pid =", pid2)
+        data = {"username": "U1", "password": "pw1"}
+        data["project_role_mappings"] = [{"project": pid1, "role": rpu}]
+        res = engine.test(
+            "Create new user U1",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            uid1 = res.json()["id"]
+            # print("# uid =", uid1)
+        data = {"username": "U2", "password": "pw2"}
+        data["project_role_mappings"] = [{"project": pid2, "role": rpu}]
+        res = engine.test(
+            "Create new user U2",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            uid2 = res.json()["id"]
+            # print("# uid =", uid2)
+        if pid1:
+            engine.test(
+                "Get Project P1 by Name",
+                "GET",
+                "/admin/v1/projects/P1",
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+            engine.test(
+                "Get Project P1 by ID",
+                "GET",
+                "/admin/v1/projects/" + pid1,
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+        if uid1:
+            engine.test(
+                "Get User U1 by Name",
+                "GET",
+                "/admin/v1/users/U1",
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+            engine.test(
+                "Get User U1 by ID",
+                "GET",
+                "/admin/v1/users/" + uid1,
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+        if pid1:
+            res = engine.test(
+                "Rename Project P1 by Name",
+                "PUT",
+                "/admin/v1/projects/P1",
+                headers_json,
+                {"name": "P3"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get Project P1 by new Name",
+                    "GET",
+                    "/admin/v1/projects/P3",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+        if pid2:
+            res = engine.test(
+                "Rename Project P2 by ID",
+                "PUT",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                {"name": "P4"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get Project P2 by new Name",
+                    "GET",
+                    "/admin/v1/projects/P4",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+
+        if uid1:
+            res = engine.test(
+                "Rename User U1 by Name",
+                "PUT",
+                "/admin/v1/users/U1",
+                headers_json,
+                {"username": "U3"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get User U1 by new Name",
+                    "GET",
+                    "/admin/v1/users/U3",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+
+        if uid2:
+            res = engine.test(
+                "Rename User U2 by ID",
+                "PUT",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                {"username": "U4"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get User U2 by new Name",
+                    "GET",
+                    "/admin/v1/users/U4",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+        if uid1:
+            res = engine.test(
+                "Delete User U1 by Name",
+                "DELETE",
+                "/admin/v1/users/U3",
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                uid1 = None
+
+        if uid2:
+            res = engine.test(
+                "Delete User U2 by ID",
+                "DELETE",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                uid2 = None
+
+        if pid1:
+            res = engine.test(
+                "Delete Project P1 by Name",
+                "DELETE",
+                "/admin/v1/projects/P3",
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                pid1 = None
+
+        if pid2:
+            res = engine.test(
+                "Delete Project P2 by ID",
+                "DELETE",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                pid2 = None
+
+        # END New Tests - Addressing Projects/Users by Name
+
+        # CLEANUP
+        if pid1:
+            engine.test(
+                "Delete Project P1",
+                "DELETE",
+                "/admin/v1/projects/" + pid1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if pid2:
+            engine.test(
+                "Delete Project P2",
+                "DELETE",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if uid1:
+            engine.test(
+                "Delete User U1",
+                "DELETE",
+                "/admin/v1/users/" + uid1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if uid2:
+            engine.test(
+                "Delete User U2",
+                "DELETE",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+
+        engine.remove_authorization()  # To finish
+
+
+class TestProjectsDescriptors:
+    description = "test descriptors visibility among projects"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        vnfd_ids = []
+        engine.set_test_name("ProjectDescriptors")
+        engine.get_autorization()
+
+        project_admin_id = None
+        res = engine.test(
+            "Get my project Padmin",
+            "GET",
+            "/admin/v1/projects/{}".format(engine.project),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if res:
+            response = res.json()
+            project_admin_id = response["_id"]
+        engine.test(
+            "Create project Padmin",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "Padmin", "admin": True},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Create project P2",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P2"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Create project P3",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P3"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+
+        engine.test(
+            "Create user U1",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {
+                "username": "U1",
+                "password": "pw1",
+                "project_role_mappings": [
+                    {"project": "Padmin", "role": "system_admin"},
+                    {"project": "P2", "role": "project_admin"},
+                    {"project": "P3", "role": "project_admin"},
+                ],
+            },
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+
+        engine.test(
+            "Onboard VNFD id1",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id1",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+        engine.test(
+            "Onboard VNFD id2 PUBLIC",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+        engine.test(
+            "Onboard VNFD id3",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+
+        res = engine.test(
+            "Get VNFD descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 3:
+            logger.error(
+                "Only 3 vnfds should be present for project admin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        # Change to other project Padmin
+        res = engine.test(
+            "Change to user U1 project Padmin",
+            "POST",
+            "/admin/v1/tokens",
+            headers_json,
+            {"username": "U1", "password": "pw1", "project_id": "Padmin"},
+            (200, 201),
+            r_header_json,
+            "json",
+        )
+        if res:
+            response = res.json()
+            engine.set_header({"Authorization": "Bearer {}".format(response["id"])})
+
+        # list vnfds
+        res = engine.test(
+            "List VNFD descriptors for Padmin",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 0:
+            logger.error(
+                "Only 0 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        # list Public vnfds
+        res = engine.test(
+            "List VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 1:
+            logger.error(
+                "Only 1 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        # list vnfds belonging to project "admin"
+        res = engine.test(
+            "List VNFD of admin project",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if res:
+            response = res.json()
+            if len(response) != 3:
+                logger.error(
+                    "Only 3 vnfds should be present for project Padmin. {} listed".format(
+                        len(response)
+                    )
+                )
+                engine.failed_tests += 1
+
+        # Get Public vnfds
+        engine.test(
+            "Get VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        # Edit not owned vnfd
+        engine.test(
+            "Edit VNFD ",
+            "PATCH",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+            headers_yaml,
+            "{name: pepe}",
+            404,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # Add to my catalog
+        engine.test(
+            "Add VNFD id2 to my catalog",
+            "PATCH",
+            "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # Add a new vnfd
+        engine.test(
+            "Onboard VNFD id4",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id4",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+
+        # list vnfds
+        res = engine.test(
+            "List VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 2:
+            logger.error(
+                "Only 2 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        if manual_check:
+            input(
+                "VNFDs have been omboarded. Perform manual check and press enter to resume"
+            )
+
+        test_rest.test(
+            "Delete VNFD id2",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # change to admin project
+        engine.remove_authorization()  # To force get authorization
+        engine.get_autorization()
+        test_rest.test(
+            "Delete VNFD id1",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id2",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id3",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id4",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
+            headers_yaml,
+            None,
+            404,
+            r_header_yaml,
+            "yaml",
+        )
+        test_rest.test(
+            "Delete VNFD id4",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        # Get Public vnfds
+        engine.test(
+            "Get VNFD deleted id1",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id2",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id3",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id4",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+
+        engine.test(
+            "Delete user U1",
+            "DELETE",
+            "/admin/v1/users/U1",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project Padmin",
+            "DELETE",
+            "/admin/v1/projects/Padmin",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project P2",
+            "DELETE",
+            "/admin/v1/projects/P2",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project P3",
+            "DELETE",
+            "/admin/v1/projects/P3",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+
+
+class TestFakeVim:
+    description = "Creates/edit/delete fake VIMs and SDN controllers"
+
+    def __init__(self):
+        self.vim = {
+            "schema_version": "1.0",
+            "schema_type": "No idea",
+            "name": "myVim",
+            "description": "Descriptor name",
+            "vim_type": "openstack",
+            "vim_url": "http://localhost:/vim",
+            "vim_tenant_name": "vimTenant",
+            "vim_user": "user",
+            "vim_password": "password",
+            "config": {"config_param": 1},
+        }
+        self.sdn = {
+            "name": "sdn-name",
+            "description": "sdn-description",
+            "dpid": "50:50:52:54:00:94:21:21",
+            "ip": "192.168.15.17",
+            "port": 8080,
+            "type": "opendaylight",
+            "version": "3.5.6",
+            "user": "user",
+            "password": "passwd",
+        }
+        self.port_mapping = [
+            {
+                "compute_node": "compute node 1",
+                "ports": [
+                    {
+                        "pci": "0000:81:00.0",
+                        "switch_port": "port-2/1",
+                        "switch_mac": "52:54:00:94:21:21",
+                    },
+                    {
+                        "pci": "0000:81:00.1",
+                        "switch_port": "port-2/2",
+                        "switch_mac": "52:54:00:94:21:22",
+                    },
+                ],
+            },
+            {
+                "compute_node": "compute node 2",
+                "ports": [
+                    {
+                        "pci": "0000:81:00.0",
+                        "switch_port": "port-2/3",
+                        "switch_mac": "52:54:00:94:21:23",
+                    },
+                    {
+                        "pci": "0000:81:00.1",
+                        "switch_port": "port-2/4",
+                        "switch_mac": "52:54:00:94:21:24",
+                    },
+                ],
+            },
+        ]
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        vim_bad = self.vim.copy()
+        vim_bad.pop("name")
+
+        engine.set_test_name("FakeVim")
+        engine.get_autorization()
+        engine.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            (201, 202),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+            "json",
+        )
+        vim_id = engine.last_id
+        engine.test(
+            "Create VIM without name, bad schema",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            vim_bad,
+            422,
+            None,
+            headers_json,
+        )
+        engine.test(
+            "Create VIM name repeated",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            409,
+            None,
+            headers_json,
+        )
+        engine.test(
+            "Show VIMs",
+            "GET",
+            "/admin/v1/vim_accounts",
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+        engine.test(
+            "Show VIM",
+            "GET",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+        if not test_osm:
+            # delete with FORCE
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Check VIM is deleted",
+                "GET",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+        else:
+            # delete and wait until is really deleted
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.wait_until_delete(
+                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+            )
+
+
+class TestVIMSDN(TestFakeVim):
+    description = "Creates VIM with SDN editing SDN controllers and port_mapping"
+
+    def __init__(self):
+        TestFakeVim.__init__(self)
+        self.wim = {
+            "schema_version": "1.0",
+            "schema_type": "No idea",
+            "name": "myWim",
+            "description": "Descriptor name",
+            "wim_type": "odl",
+            "wim_url": "http://localhost:/wim",
+            "user": "user",
+            "password": "password",
+            "config": {"config_param": 1},
+        }
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("VimSdn")
+        engine.get_autorization()
+        # Added SDN
+        engine.test(
+            "Create SDN",
+            "POST",
+            "/admin/v1/sdns",
+            headers_json,
+            self.sdn,
+            (201, 202),
+            {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"},
+            "json",
+        )
+        sdnc_id = engine.last_id
+        # sleep(5)
+        # Edit SDN
+        engine.test(
+            "Edit SDN",
+            "PATCH",
+            "/admin/v1/sdns/{}".format(sdnc_id),
+            headers_json,
+            {"name": "new_sdn_name"},
+            (202, 204),
+            None,
+            None,
+        )
+        # sleep(5)
+        # VIM with SDN
+        self.vim["config"]["sdn-controller"] = sdnc_id
+        self.vim["config"]["sdn-port-mapping"] = self.port_mapping
+        engine.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            (200, 202, 201),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+            "json",
+        ),
+
+        vim_id = engine.last_id
+        self.port_mapping[0]["compute_node"] = "compute node XX"
+        engine.test(
+            "Edit VIM change port-mapping",
+            "PUT",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_json,
+            {"config": {"sdn-port-mapping": self.port_mapping}},
+            (202, 204),
+            None,
+            None,
+        )
+        engine.test(
+            "Edit VIM remove port-mapping",
+            "PUT",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_json,
+            {"config": {"sdn-port-mapping": None}},
+            (202, 204),
+            None,
+            None,
+        )
+
+        engine.test(
+            "Create WIM",
+            "POST",
+            "/admin/v1/wim_accounts",
+            headers_json,
+            self.wim,
+            (200, 202, 201),
+            {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"},
+            "json",
+        ),
+        wim_id = engine.last_id
+
+        if not test_osm:
+            # delete with FORCE
+            engine.test(
+                "Delete VIM remove port-mapping",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Delete SDNC",
+                "DELETE",
+                "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+
+            engine.test(
+                "Delete WIM",
+                "DELETE",
+                "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Check VIM is deleted",
+                "GET",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+            engine.test(
+                "Check SDN is deleted",
+                "GET",
+                "/admin/v1/sdns/{}".format(sdnc_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+            engine.test(
+                "Check WIM is deleted",
+                "GET",
+                "/admin/v1/wim_accounts/{}".format(wim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+        else:
+            if manual_check:
+                input(
+                    "VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume"
+                )
+            # delete and wait until is really deleted
+            engine.test(
+                "Delete VIM remove port-mapping",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.test(
+                "Delete SDN",
+                "DELETE",
+                "/admin/v1/sdns/{}".format(sdnc_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/wim_accounts/{}".format(wim_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.wait_until_delete(
+                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+            )
+            engine.wait_until_delete("/admin/v1/sdns/{}".format(sdnc_id), timeout)
+            engine.wait_until_delete(
+                "/admin/v1/wim_accounts/{}".format(wim_id), timeout
+            )
+
+
+class TestDeploy:
+    description = "Base class for downloading descriptors from ETSI, onboard and deploy in real VIM"
+
+    def __init__(self):
+        self.test_name = "DEPLOY"
+        self.nsd_id = None
+        self.vim_id = None
+        self.ns_id = None
+        self.vnfds_id = []
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        )
+        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
+        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
+        self.descriptor_edit = None
+        self.uses_configuration = False
+        self.users = {}
+        self.passwords = {}
+        self.commands = {}
+        self.keys = {}
+        self.timeout = 120
+        self.qforce = ""
+        self.ns_params = None
+        self.vnfr_ip_list = {}
+
+    def create_descriptors(self, engine):
+        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
+        if not os.path.exists(temp_dir):
+            os.makedirs(temp_dir)
+        for vnfd_index, vnfd_filename in enumerate(self.vnfd_filenames):
+            if "/" in vnfd_filename:
+                vnfd_filename_path = vnfd_filename
+                if not os.path.exists(vnfd_filename_path):
+                    raise TestException(
+                        "File '{}' does not exist".format(vnfd_filename_path)
+                    )
+            else:
+                vnfd_filename_path = temp_dir + vnfd_filename
+                if not os.path.exists(vnfd_filename_path):
+                    with open(vnfd_filename_path, "wb") as file:
+                        response = requests.get(self.descriptor_url + vnfd_filename)
+                        if response.status_code >= 300:
+                            raise TestException(
+                                "Error downloading descriptor from '{}': {}".format(
+                                    self.descriptor_url + vnfd_filename,
+                                    response.status_code,
+                                )
+                            )
+                        file.write(response.content)
+            if vnfd_filename_path.endswith(".yaml"):
+                headers = headers_yaml
+            else:
+                headers = headers_zip_yaml
+            if randint(0, 1) == 0:
+                # vnfd CREATE AND UPLOAD in one step:
+                engine.test(
+                    "Onboard VNFD in one step",
+                    "POST",
+                    "/vnfpkgm/v1/vnf_packages_content" + self.qforce,
+                    headers,
+                    "@b" + vnfd_filename_path,
+                    201,
+                    r_headers_yaml_location_vnfd,
+                    "yaml",
+                )
+                self.vnfds_id.append(engine.last_id)
+            else:
+                # vnfd CREATE AND UPLOAD ZIP
+                engine.test(
+                    "Onboard VNFD step 1",
+                    "POST",
+                    "/vnfpkgm/v1/vnf_packages",
+                    headers_json,
+                    None,
+                    201,
+                    {
+                        "Location": "/vnfpkgm/v1/vnf_packages/",
+                        "Content-Type": "application/json",
+                    },
+                    "json",
+                )
+                self.vnfds_id.append(engine.last_id)
+                engine.test(
+                    "Onboard VNFD step 2 as ZIP",
+                    "PUT",
+                    "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
+                    headers,
+                    "@b" + vnfd_filename_path,
+                    204,
+                    None,
+                    0,
+                )
+
+            if self.descriptor_edit:
+                if "vnfd{}".format(vnfd_index) in self.descriptor_edit:
+                    # Modify VNFD
+                    engine.test(
+                        "Edit VNFD ",
+                        "PATCH",
+                        "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
+                        headers_yaml,
+                        self.descriptor_edit["vnfd{}".format(vnfd_index)],
+                        204,
+                        None,
+                        None,
+                    )
+
+        if "/" in self.nsd_filename:
+            nsd_filename_path = self.nsd_filename
+            if not os.path.exists(nsd_filename_path):
+                raise TestException(
+                    "File '{}' does not exist".format(nsd_filename_path)
+                )
+        else:
+            nsd_filename_path = temp_dir + self.nsd_filename
+            if not os.path.exists(nsd_filename_path):
+                with open(nsd_filename_path, "wb") as file:
+                    response = requests.get(self.descriptor_url + self.nsd_filename)
+                    if response.status_code >= 300:
+                        raise TestException(
+                            "Error downloading descriptor from '{}': {}".format(
+                                self.descriptor_url + self.nsd_filename,
+                                response.status_code,
+                            )
+                        )
+                    file.write(response.content)
+        if nsd_filename_path.endswith(".yaml"):
+            headers = headers_yaml
+        else:
+            headers = headers_zip_yaml
+
+        if randint(0, 1) == 0:
+            # nsd CREATE AND UPLOAD in one step:
+            engine.test(
+                "Onboard NSD in one step",
+                "POST",
+                "/nsd/v1/ns_descriptors_content" + self.qforce,
+                headers,
+                "@b" + nsd_filename_path,
+                201,
+                r_headers_yaml_location_nsd,
+                yaml,
+            )
+            self.nsd_id = engine.last_id
+        else:
+            # nsd CREATE AND UPLOAD ZIP
+            engine.test(
+                "Onboard NSD step 1",
+                "POST",
+                "/nsd/v1/ns_descriptors",
+                headers_json,
+                None,
+                201,
+                {
+                    "Location": "/nsd/v1/ns_descriptors/",
+                    "Content-Type": "application/json",
+                },
+                "json",
+            )
+            self.nsd_id = engine.last_id
+            engine.test(
+                "Onboard NSD step 2 as ZIP",
+                "PUT",
+                "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
+                headers,
+                "@b" + nsd_filename_path,
+                204,
+                None,
+                0,
+            )
+
+        if self.descriptor_edit and "nsd" in self.descriptor_edit:
+            # Modify NSD
+            engine.test(
+                "Edit NSD ",
+                "PATCH",
+                "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+                headers_yaml,
+                self.descriptor_edit["nsd"],
+                204,
+                None,
+                None,
+            )
+
+    def delete_descriptors(self, engine):
+        # delete descriptors
+        engine.test(
+            "Delete NSSD SOL005",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        for vnfd_id in self.vnfds_id:
+            engine.test(
+                "Delete VNFD SOL005",
+                "DELETE",
+                "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+
+    def instantiate(self, engine, ns_data):
+        ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256)
+        # create NS Two steps
+        r = engine.test(
+            "Create NS step 1",
+            "POST",
+            "/nslcm/v1/ns_instances",
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"},
+            "yaml",
+        )
+        if not r:
+            return
+        self.ns_id = engine.last_id
+        engine.test(
+            "Instantiate NS step 2",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id),
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop_id = engine.last_id
+
+        if test_osm:
+            # Wait until status is Ok
+            timeout = timeout_configure if self.uses_configuration else timeout_deploy
+            engine.wait_operation_ready("ns", nslcmop_id, timeout)
+
+    def terminate(self, engine):
+        # remove deployment
+        if test_osm:
+            engine.test(
+                "Terminate NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id),
+                headers_yaml,
+                None,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
+            nslcmop2_id = engine.last_id
+            # Wait until status is Ok
+            engine.wait_operation_ready("ns", nslcmop2_id, timeout_deploy)
+
+            engine.test(
+                "Delete NS",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+        else:
+            engine.test(
+                "Delete NS with FORCE",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+
+        # check all it is deleted
+        engine.test(
+            "Check NS is deleted",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_yaml,
+            None,
+            404,
+            None,
+            "yaml",
+        )
+        r = engine.test(
+            "Check NSLCMOPs are deleted",
+            "GET",
+            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            None,
+            "json",
+        )
+        if not r:
+            return
+        nslcmops = r.json()
+        if not isinstance(nslcmops, list) or nslcmops:
+            raise TestException(
+                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+                    self.ns_id, nslcmops
+                )
+            )
+
+    def test_ns(
+        self,
+        engine,
+        test_osm,
+        commands=None,
+        users=None,
+        passwds=None,
+        keys=None,
+        timeout=0,
+    ):
+        r = engine.test(
+            "GET VNFR IDs",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return
+        ns_data = r.json()
+
+        vnfr_list = ns_data["constituent-vnfr-ref"]
+        time = 0
+        _commands = commands if commands is not None else self.commands
+        _users = users if users is not None else self.users
+        _passwds = passwds if passwds is not None else self.passwords
+        _keys = keys if keys is not None else self.keys
+        _timeout = timeout if timeout != 0 else self.timeout
+
+        # vnfr_list=[d8272263-6bd3-4680-84ca-6a4be23b3f2d, 88b22e2f-994a-4b61-94fd-4a3c90de3dc4]
+        for vnfr_id in vnfr_list:
+            r = engine.test(
+                "Get VNFR to get IP_ADDRESS",
+                "GET",
+                "/nslcm/v1/vnfrs/{}".format(vnfr_id),
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+            )
+            if not r:
+                continue
+            vnfr_data = r.json()
+
+            vnf_index = str(vnfr_data["member-vnf-index-ref"])
+
+            ip_address = self.get_vnfr_ip(engine, vnf_index)
+            description = "Exec command='{}' at VNFR={} IP={}".format(
+                _commands.get(vnf_index)[0], vnf_index, ip_address
+            )
+            engine.step += 1
+            test_description = "{}{} {}".format(
+                engine.test_name, engine.step, description
+            )
+            logger.warning(test_description)
+            while _timeout >= time:
+                result, message = self.do_checks(
+                    [ip_address],
+                    vnf_index=vnfr_data["member-vnf-index-ref"],
+                    commands=_commands.get(vnf_index),
+                    user=_users.get(vnf_index),
+                    passwd=_passwds.get(vnf_index),
+                    key=_keys.get(vnf_index),
+                )
+                if result == 1:
+                    engine.passed_tests += 1
+                    logger.debug(message)
+                    break
+                elif result == 0:
+                    time += 20
+                    sleep(20)
+                elif result == -1:
+                    engine.failed_tests += 1
+                    logger.error(message)
+                    break
+                else:
+                    time -= 20
+                    engine.failed_tests += 1
+                    logger.error(message)
+            else:
+                engine.failed_tests += 1
+                logger.error(
+                    "VNFR {} has not mgmt address. Check failed".format(vnf_index)
+                )
+
+    def do_checks(self, ip, vnf_index, commands=[], user=None, passwd=None, key=None):
+        try:
+            import urllib3
+            from pssh.clients import ParallelSSHClient
+            from pssh.utils import load_private_key
+            from ssh2 import exceptions as ssh2Exception
+        except ImportError as e:
+            logger.critical(
+                "Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
+                "parallel-ssh urllib3': {}".format(e)
+            )
+            return -1, "install needed packages 'pip3 install parallel-ssh urllib3'"
+        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+        try:
+            p_host = os.environ.get("PROXY_HOST")
+            p_user = os.environ.get("PROXY_USER")
+            p_password = os.environ.get("PROXY_PASSWD")
+
+            if key:
+                pkey = load_private_key(key)
+            else:
+                pkey = None
+
+            client = ParallelSSHClient(
+                ip,
+                user=user,
+                password=passwd,
+                pkey=pkey,
+                proxy_host=p_host,
+                proxy_user=p_user,
+                proxy_password=p_password,
+                timeout=10,
+                num_retries=0,
+            )
+            for cmd in commands:
+                output = client.run_command(cmd)
+                client.join(output)
+                if output[ip[0]].exit_code:
+                    return -1, "VNFR {} command '{}' returns error: '{}'".format(
+                        ip[0], cmd, "\n".join(output[ip[0]].stderr)
+                    )
+                else:
+                    return 1, "VNFR {} command '{}' successful".format(ip[0], cmd)
+        except (
+            ssh2Exception.ChannelFailure,
+            ssh2Exception.SocketDisconnectError,
+            ssh2Exception.SocketTimeout,
+            ssh2Exception.SocketRecvError,
+        ) as e:
+            return 0, "Timeout accessing the VNFR {}: {}".format(ip[0], str(e))
+        except Exception as e:
+            return -1, "ERROR checking the VNFR {}: {}".format(ip[0], str(e))
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        pass
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name(self.test_name)
+        engine.get_autorization()
+        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
+        if test_params:
+            if "vnfd-files" in test_params:
+                self.vnfd_filenames = test_params["vnfd-files"].split(",")
+            if "nsd-file" in test_params:
+                self.nsd_filename = test_params["nsd-file"]
+            if test_params.get("ns-name"):
+                nsname = test_params["ns-name"]
+        self.create_descriptors(engine)
+
+        # create real VIM if not exist
+        self.vim_id = engine.get_create_vim(test_osm)
+        ns_data = {
+            "nsDescription": "default description",
+            "nsName": nsname,
+            "nsdId": self.nsd_id,
+            "vimAccountId": self.vim_id,
+        }
+        if self.ns_params:
+            ns_data.update(self.ns_params)
+        if test_params and test_params.get("ns-config"):
+            if isinstance(test_params["ns-config"], str):
+                ns_data.update(yaml.safe_load(test_params["ns-config"]))
+            else:
+                ns_data.update(test_params["ns-config"])
+        self.instantiate(engine, ns_data)
+
+        if manual_check:
+            input(
+                "NS has been deployed. Perform manual check and press enter to resume"
+            )
+        if test_osm and self.commands:
+            self.test_ns(engine, test_osm)
+        self.additional_operations(engine, test_osm, manual_check)
+        self.terminate(engine)
+        self.delete_descriptors(engine)
+
+    def get_first_ip(self, ip_string):
+        # When using a floating IP, the vnfr_data['ip-address'] contains a semicolon-separated list of IP:s.
+        first_ip = ip_string.split(";")[0] if ip_string else ""
+        return first_ip
+
+    def get_vnfr_ip(self, engine, vnfr_index_wanted):
+        # If the IP address list has been obtained before, it has been stored in 'vnfr_ip_list'
+        ip = self.vnfr_ip_list.get(vnfr_index_wanted, "")
+        if ip:
+            return self.get_first_ip(ip)
+        r = engine.test(
+            "Get VNFR to get IP_ADDRESS",
+            "GET",
+            "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
+                vnfr_index_wanted, self.ns_id
+            ),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return ""
+        vnfr_data = r.json()
+        if not (vnfr_data and vnfr_data[0]):
+            return ""
+        # Store the IP (or list of IPs) in 'vnfr_ip_list'
+        ip_list = vnfr_data[0].get("ip-address", "")
+        if ip_list:
+            self.vnfr_ip_list[vnfr_index_wanted] = ip_list
+            ip = self.get_first_ip(ip_list)
+        return ip
+
+
+class TestDeployHackfestCirros(TestDeploy):
+    description = "Load and deploy Hackfest cirros_2vnf_ns example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "CIRROS"
+        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
+        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "cirros", "2": "cirros"}
+        self.passwords = {"1": "cubswin:)", "2": "cubswin:)"}
+
+    def terminate(self, engine):
+        # Make a delete in one step, overriding the normal two step of TestDeploy that launched terminate and delete
+        if test_osm:
+            engine.test(
+                "Terminate and delete NS in one step",
+                "DELETE",
+                "/nslcm/v1/ns_instances_content/{}".format(self.ns_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                "yaml",
+            )
+
+            engine.wait_until_delete(
+                "/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy
+            )
+        else:
+            engine.test(
+                "Delete NS with FORCE",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+
+        # check all it is deleted
+        engine.test(
+            "Check NS is deleted",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_yaml,
+            None,
+            404,
+            None,
+            "yaml",
+        )
+        r = engine.test(
+            "Check NSLCMOPs are deleted",
+            "GET",
+            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            None,
+            "json",
+        )
+        if not r:
+            return
+        nslcmops = r.json()
+        if not isinstance(nslcmops, list) or nslcmops:
+            raise TestException(
+                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+                    self.ns_id, nslcmops
+                )
+            )
+
+
+class TestDeployHackfest1(TestDeploy):
+    description = "Load and deploy Hackfest_1_vnfd example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST1-"
+        self.vnfd_filenames = ("hackfest_1_vnfd.tar.gz",)
+        self.nsd_filename = "hackfest_1_nsd.tar.gz"
+        # self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
+        # self.users = {'1': "cirros", '2': "cirros"}
+        # self.passwords = {'1': "cubswin:)", '2': "cubswin:)"}
+
+
+class TestDeployHackfestCirrosScaling(TestDeploy):
+    description = (
+        "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "CIRROS-SCALE"
+        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
+        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
+        # Modify VNFD to add scaling and count=2
+        self.descriptor_edit = {
+            "vnfd0": {
+                "vdu": {"$id: 'cirros_vnfd-VM'": {"count": 2}},
+                "scaling-group-descriptor": [
+                    {
+                        "name": "scale_cirros",
+                        "max-instance-count": 2,
+                        "vdu": [{"vdu-id-ref": "cirros_vnfd-VM", "count": 2}],
+                    }
+                ],
+            }
+        }
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        if not test_osm:
+            return
+        # 2 perform scale out twice
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        )
+        for i in range(0, 2):
+            engine.test(
+                "Execute scale action over NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+                headers_yaml,
+                payload,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
+            nslcmop2_scale_out = engine.last_id
+            engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
+            if manual_check:
+                input("NS scale out done. Check that two more vdus are there")
+            # TODO check automatic
+
+        # 2 perform scale in
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        )
+        for i in range(0, 2):
+            engine.test(
+                "Execute scale IN action over NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+                headers_yaml,
+                payload,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
+            nslcmop2_scale_in = engine.last_id
+            engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
+            if manual_check:
+                input("NS scale in done. Check that two less vdus are there")
+            # TODO check automatic
+
+        # perform scale in that must fail as reached limit
+        engine.test(
+            "Execute scale IN out of limit action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_scale_in = engine.last_id
+        engine.wait_operation_ready(
+            "ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True
+        )
+
+
+class TestDeployIpMac(TestDeploy):
+    description = "Load and deploy descriptor examples setting mac, ip address at descriptor and instantiate params"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "SetIpMac"
+        self.vnfd_filenames = (
+            "vnfd_2vdu_set_ip_mac2.yaml",
+            "vnfd_2vdu_set_ip_mac.yaml",
+        )
+        self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml"
+        self.descriptor_url = "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "osm", "2": "osm"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+        self.timeout = 360
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        # super().run(engine, test_osm, manual_check, test_params)
+        # run again setting IPs with instantiate parameters
+        instantiation_params = {
+            "vnf": [
+                {
+                    "member-vnf-index": "1",
+                    "internal-vld": [
+                        {
+                            "name": "internal_vld1",  # net_internal
+                            "ip-profile": {
+                                "ip-version": "ipv4",
+                                "subnet-address": "10.9.8.0/24",
+                                "dhcp-params": {
+                                    "count": 100,
+                                    "start-address": "10.9.8.100",
+                                },
+                            },
+                            "internal-connection-point": [
+                                {
+                                    "id-ref": "eth2",
+                                    "ip-address": "10.9.8.2",
+                                },
+                                {
+                                    "id-ref": "eth3",
+                                    "ip-address": "10.9.8.3",
+                                },
+                            ],
+                        },
+                    ],
+                    "vdu": [
+                        {
+                            "id": "VM1",
+                            "interface": [
+                                # {
+                                #     "name": "iface11",
+                                #     "floating-ip-required": True,
+                                # },
+                                {"name": "iface13", "mac-address": "52:33:44:55:66:13"},
+                            ],
+                        },
+                        {
+                            "id": "VM2",
+                            "interface": [
+                                {
+                                    "name": "iface21",
+                                    "ip-address": "10.31.31.22",
+                                    "mac-address": "52:33:44:55:66:21",
+                                },
+                            ],
+                        },
+                    ],
+                },
+            ]
+        }
+
+        super().run(
+            engine,
+            test_osm,
+            manual_check,
+            test_params={"ns-config": instantiation_params},
+        )
+
+
+class TestDeployHackfest4(TestDeploy):
+    description = "Load and deploy Hackfest 4 example."
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST4-"
+        self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",)
+        self.nsd_filename = "hackfest_4_nsd.tar.gz"
+        self.uses_configuration = True
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+        # Modify VNFD to add scaling
+        # self.descriptor_edit = {
+        #     "vnfd0": {
+        #         'vnf-configuration': {
+        #             'config-primitive': [{
+        #                 'name': 'touch',
+        #                 'parameter': [{
+        #                     'name': 'filename',
+        #                     'data-type': 'STRING',
+        #                     'default-value': '/home/ubuntu/touched'
+        #                 }]
+        #             }]
+        #         },
+        #         'scaling-group-descriptor': [{
+        #             'name': 'scale_dataVM',
+        #             'scaling-policy': [{
+        #                 'threshold-time': 0,
+        #                 'name': 'auto_cpu_util_above_threshold',
+        #                 'scaling-type': 'automatic',
+        #                 'scaling-criteria': [{
+        #                     'name': 'cpu_util_above_threshold',
+        #                     'vnf-monitoring-param-ref': 'all_aaa_cpu_util',
+        #                     'scale-out-relational-operation': 'GE',
+        #                     'scale-in-threshold': 15,
+        #                     'scale-out-threshold': 60,
+        #                     'scale-in-relational-operation': 'LE'
+        #                 }],
+        #                 'cooldown-time': 60
+        #             }],
+        #             'max-instance-count': 10,
+        #             'scaling-config-action': [
+        #                 {'vnf-config-primitive-name-ref': 'touch',
+        #                  'trigger': 'post-scale-out'},
+        #                 {'vnf-config-primitive-name-ref': 'touch',
+        #                  'trigger': 'pre-scale-in'}
+        #             ],
+        #             'vdu': [{
+        #                 'vdu-id-ref': 'dataVM',
+        #                 'count': 1
+        #             }]
+        #         }]
+        #     }
+        # }
+
+
+class TestDeployHackfest3Charmed(TestDeploy):
+    description = "Load and deploy Hackfest 3charmed_ns example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST3-"
+        self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",)
+        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
+        self.uses_configuration = True
+        self.commands = {
+            "1": ["ls -lrt /home/ubuntu/first-touch"],
+            "2": ["ls -lrt /home/ubuntu/first-touch"],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+        self.descriptor_edit = {
+            "vnfd0": yaml.safe_load(
+                """
+                vnf-configuration:
+                    terminate-config-primitive:
+                    -   seq: '1'
+                        name: touch
+                        parameter:
+                        -   name: filename
+                            value: '/home/ubuntu/last-touch1'
+                    -   seq: '3'
+                        name: touch
+                        parameter:
+                        -   name: filename
+                            value: '/home/ubuntu/last-touch3'
+                    -   seq: '2'
+                        name: touch
+                        parameter:
+                        -   name: filename
+                            value: '/home/ubuntu/last-touch2'
+                """
+            )
+        }
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        if not test_osm:
+            return
+        # 1 perform action
+        vnfr_index_selected = "2"
+        payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}'
+        engine.test(
+            "Exec service primitive over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/action".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_action = engine.last_id
+        # Wait until status is Ok
+        engine.wait_operation_ready("ns", nslcmop2_action, timeout_deploy)
+        vnfr_ip = self.get_vnfr_ip(engine, vnfr_index_selected)
+        if manual_check:
+            input(
+                "NS service primitive has been executed."
+                "Check that file /home/ubuntu/OSMTESTNBI is present at {}".format(
+                    vnfr_ip
+                )
+            )
+        if test_osm:
+            commands = {
+                "1": [""],
+                "2": [
+                    "ls -lrt /home/ubuntu/OSMTESTNBI",
+                ],
+            }
+            self.test_ns(engine, test_osm, commands=commands)
+
+        # # 2 perform scale out
+        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
+        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        # engine.test("Execute scale action over NS", "POST",
+        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
+        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        # nslcmop2_scale_out = engine.last_id
+        # engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
+        # if manual_check:
+        #     input('NS scale out done. Check that file /home/ubuntu/touched is present and new VM is created')
+        # # TODO check automatic
+        #
+        # # 2 perform scale in
+        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
+        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        # engine.test("Execute scale action over NS", "POST",
+        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
+        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        # nslcmop2_scale_in = engine.last_id
+        # engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
+        # if manual_check:
+        #     input('NS scale in done. Check that file /home/ubuntu/touched is updated and new VM is deleted')
+        # # TODO check automatic
+
+
+class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
+    description = (
+        "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in "
+        "ids and member-vnf-index."
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST3v2-"
+        self.qforce = "?FORCE=True"
+        self.descriptor_edit = {
+            "vnfd0": {
+                "vdu": {
+                    "$[0]": {
+                        "interface": {
+                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+                        }
+                    },
+                    "$[1]": None,
+                },
+                "vnf-configuration": None,
+                "connection-point": {
+                    "$[0]": {
+                        "id": "pdu-mgmt",
+                        "name": "pdu-mgmt",
+                        "short-name": "pdu-mgmt",
+                    },
+                    "$[1]": None,
+                },
+                "mgmt-interface": {"cp": "pdu-mgmt"},
+                "description": "A vnf single vdu to be used as PDU",
+                "id": "vdu-as-pdu",
+                "internal-vld": {
+                    "$[0]": {
+                        "id": "pdu_internal",
+                        "name": "pdu_internal",
+                        "internal-connection-point": {"$[1]": None},
+                        "short-name": "pdu_internal",
+                        "type": "ELAN",
+                    }
+                },
+            },
+            # Modify NSD accordingly
+            "nsd": {
+                "constituent-vnfd": {
+                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
+                    "$[1]": None,
+                },
+                "description": "A nsd to deploy the vnf to act as as PDU",
+                "id": "nsd-as-pdu",
+                "name": "nsd-as-pdu",
+                "short-name": "nsd-as-pdu",
+                "vld": {
+                    "$[0]": {
+                        "id": "mgmt_pdu",
+                        "name": "mgmt_pdu",
+                        "short-name": "mgmt_pdu",
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "vnfd-connection-point-ref": "pdu-mgmt",
+                                "vnfd-id-ref": "vdu-as-pdu",
+                            },
+                            "$[1]": None,
+                        },
+                        "type": "ELAN",
+                    },
+                    "$[1]": None,
+                },
+            },
+        }
+
+
+class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
+    description = "Load and deploy Hackfest 3charmed_ns example modified version to test scaling and NS parameters"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST3v3-"
+        self.commands = {
+            "1": ["ls -lrt /home/ubuntu/first-touch-1"],
+            "2": ["ls -lrt /home/ubuntu/first-touch-2"],
+        }
+        self.descriptor_edit = {
+            "vnfd0": yaml.safe_load(
+                """
+                scaling-group-descriptor:
+                    -   name: "scale_dataVM"
+                        max-instance-count: 10
+                        scaling-policy:
+                        -   name: "auto_cpu_util_above_threshold"
+                            scaling-type: "automatic"
+                            threshold-time: 0
+                            cooldown-time: 60
+                            scaling-criteria:
+                            -   name: "cpu_util_above_threshold"
+                                scale-in-threshold: 15
+                                scale-in-relational-operation: "LE"
+                                scale-out-threshold: 60
+                                scale-out-relational-operation: "GE"
+                                vnf-monitoring-param-ref: "monitor1"
+                        vdu:
+                        -   vdu-id-ref: dataVM
+                            count: 1
+                        scaling-config-action:
+                        -   trigger: post-scale-out
+                            vnf-config-primitive-name-ref: touch
+                        -   trigger: pre-scale-in
+                            vnf-config-primitive-name-ref: touch
+                vdu:
+                    "$id: dataVM":
+                        monitoring-param:
+                        -   id: "dataVM_cpu_util"
+                            nfvi-metric: "cpu_utilization"
+
+                monitoring-param:
+                -   id: "monitor1"
+                    name: "monitor1"
+                    aggregation-type: AVERAGE
+                    vdu-monitoring-param:
+                      vdu-ref: "dataVM"
+                      vdu-monitoring-param-ref: "dataVM_cpu_util"
+                vnf-configuration:
+                    initial-config-primitive:
+                        "$[1]":
+                            parameter:
+                                "$[0]":
+                                    value: "<touch_filename>"   # default-value: /home/ubuntu/first-touch
+                    config-primitive:
+                        "$[0]":
+                            parameter:
+                                "$[0]":
+                                    default-value: "<touch_filename2>"
+                """,
+            )
+        }
+        self.ns_params = {
+            "additionalParamsForVnf": [
+                {
+                    "member-vnf-index": "1",
+                    "additionalParams": {
+                        "touch_filename": "/home/ubuntu/first-touch-1",
+                        "touch_filename2": "/home/ubuntu/second-touch-1",
+                    },
+                },
+                {
+                    "member-vnf-index": "2",
+                    "additionalParams": {
+                        "touch_filename": "/home/ubuntu/first-touch-2",
+                        "touch_filename2": "/home/ubuntu/second-touch-2",
+                    },
+                },
+            ]
+        }
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        super().additional_operations(engine, test_osm, manual_check)
+        if not test_osm:
+            return
+
+        # 2 perform scale out
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        )
+        engine.test(
+            "Execute scale action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_scale_out = engine.last_id
+        engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
+        if manual_check:
+            input(
+                "NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created"
+            )
+        if test_osm:
+            commands = {
+                "1": [
+                    "ls -lrt /home/ubuntu/second-touch-1",
+                ]
+            }
+            self.test_ns(engine, test_osm, commands=commands)
+            # TODO check automatic connection to scaled VM
+
+        # 2 perform scale in
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        )
+        engine.test(
+            "Execute scale action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_scale_in = engine.last_id
+        engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
+        if manual_check:
+            input(
+                "NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted"
+            )
+        # TODO check automatic
+
+
+class TestDeploySimpleCharm(TestDeploy):
+    description = "Deploy hackfest-4 hackfest_simplecharm example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST-SIMPLE"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
+        )
+        self.vnfd_filenames = ("hackfest_simplecharm_vnf.tar.gz",)
+        self.nsd_filename = "hackfest_simplecharm_ns.tar.gz"
+        self.uses_configuration = True
+        self.commands = {
+            "1": [""],
+            "2": [
+                "ls -lrt /home/ubuntu/first-touch",
+            ],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+
+
+class TestDeploySimpleCharm2(TestDeploySimpleCharm):
+    description = (
+        "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and "
+        "vnf-member-index"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST-SIMPLE2-"
+        self.qforce = "?FORCE=True"
+        self.descriptor_edit = {
+            "vnfd0": {"id": "hackfest.simplecharm.vnf"},
+            "nsd": {
+                "id": "hackfest.simplecharm.ns",
+                "constituent-vnfd": {
+                    "$[0]": {
+                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                        "member-vnf-index": "$1",
+                    },
+                    "$[1]": {
+                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                        "member-vnf-index": "$2",
+                    },
+                },
+                "vld": {
+                    "$[0]": {
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "member-vnf-index-ref": "$1",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                            "$[1]": {
+                                "member-vnf-index-ref": "$2",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                        },
+                    },
+                    "$[1]": {
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "member-vnf-index-ref": "$1",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                            "$[1]": {
+                                "member-vnf-index-ref": "$2",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                        },
+                    },
+                },
+            },
+        }
+
+
+class TestDeploySingleVdu(TestDeployHackfest3Charmed):
+    description = (
+        "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "SingleVDU"
+        self.qforce = "?FORCE=True"
+        self.descriptor_edit = {
+            # Modify VNFD to remove one VDU
+            "vnfd0": {
+                "vdu": {
+                    "$[0]": {
+                        "interface": {
+                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+                        }
+                    },
+                    "$[1]": None,
+                },
+                "vnf-configuration": None,
+                "connection-point": {
+                    "$[0]": {
+                        "id": "pdu-mgmt",
+                        "name": "pdu-mgmt",
+                        "short-name": "pdu-mgmt",
+                    },
+                    "$[1]": None,
+                },
+                "mgmt-interface": {"cp": "pdu-mgmt"},
+                "description": "A vnf single vdu to be used as PDU",
+                "id": "vdu-as-pdu",
+                "internal-vld": {
+                    "$[0]": {
+                        "id": "pdu_internal",
+                        "name": "pdu_internal",
+                        "internal-connection-point": {"$[1]": None},
+                        "short-name": "pdu_internal",
+                        "type": "ELAN",
+                    }
+                },
+            },
+            # Modify NSD accordingly
+            "nsd": {
+                "constituent-vnfd": {
+                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
+                    "$[1]": None,
+                },
+                "description": "A nsd to deploy the vnf to act as as PDU",
+                "id": "nsd-as-pdu",
+                "name": "nsd-as-pdu",
+                "short-name": "nsd-as-pdu",
+                "vld": {
+                    "$[0]": {
+                        "id": "mgmt_pdu",
+                        "name": "mgmt_pdu",
+                        "short-name": "mgmt_pdu",
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "vnfd-connection-point-ref": "pdu-mgmt",
+                                "vnfd-id-ref": "vdu-as-pdu",
+                            },
+                            "$[1]": None,
+                        },
+                        "type": "ELAN",
+                    },
+                    "$[1]": None,
+                },
+            },
+        }
+
+
+class TestDeployHnfd(TestDeployHackfest3Charmed):
+    description = (
+        "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HNFD"
+        self.pduDeploy = TestDeploySingleVdu()
+        self.pdu_interface_0 = {}
+        self.pdu_interface_1 = {}
+
+        self.pdu_id = None
+        # self.vnf_to_pdu = """
+        #     vdu:
+        #         "$[0]":
+        #             pdu-type: PDU-TYPE-1
+        #             interface:
+        #                 "$[0]":
+        #                     name: mgmt-iface
+        #                 "$[1]":
+        #                     name: pdu-iface-internal
+        #     id: hfn1
+        #     description: HFND, one PDU + One VDU
+        #     name: hfn1
+        #     short-name: hfn1
+        #
+        # """
+
+        self.pdu_descriptor = {
+            "name": "my-PDU",
+            "type": "PDU-TYPE-1",
+            "vim_accounts": "to-override",
+            "interfaces": [
+                {
+                    "name": "mgmt-iface",
+                    "mgmt": True,
+                    "type": "overlay",
+                    "ip-address": "to override",
+                    "mac-address": "mac_address",
+                    "vim-network-name": "mgmt",
+                },
+                {
+                    "name": "pdu-iface-internal",
+                    "mgmt": False,
+                    "type": "overlay",
+                    "ip-address": "to override",
+                    "mac-address": "mac_address",
+                    "vim-network-name": "pdu_internal",  # OSMNBITEST-PDU-pdu_internal
+                },
+            ],
+        }
+        self.vnfd_filenames = (
+            "hackfest_3charmed_vnfd.tar.gz",
+            "hackfest_3charmed_vnfd.tar.gz",
+        )
+
+        self.descriptor_edit = {
+            "vnfd0": {
+                "id": "hfnd1",
+                "name": "hfn1",
+                "short-name": "hfn1",
+                "vdu": {
+                    "$[0]": {
+                        "pdu-type": "PDU-TYPE-1",
+                        "interface": {
+                            "$[0]": {"name": "mgmt-iface"},
+                            "$[1]": {"name": "pdu-iface-internal"},
+                        },
+                    }
+                },
+            },
+            "nsd": {
+                "constituent-vnfd": {"$[1]": {"vnfd-id-ref": "hfnd1"}},
+                "vld": {
+                    "$[0]": {
+                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+                    },
+                    "$[1]": {
+                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+                    },
+                },
+            },
+        }
+
+    def create_descriptors(self, engine):
+        super().create_descriptors(engine)
+
+        # Create PDU
+        self.pdu_descriptor["interfaces"][0].update(self.pdu_interface_0)
+        self.pdu_descriptor["interfaces"][1].update(self.pdu_interface_1)
+        self.pdu_descriptor["vim_accounts"] = [self.vim_id]
+        # TODO get vim-network-name from vnfr.vld.name
+        self.pdu_descriptor["interfaces"][1]["vim-network-name"] = "{}-{}-{}".format(
+            os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST"),
+            "PDU",
+            self.pdu_descriptor["interfaces"][1]["vim-network-name"],
+        )
+        engine.test(
+            "Onboard PDU descriptor",
+            "POST",
+            "/pdu/v1/pdu_descriptors",
+            {
+                "Location": "/pdu/v1/pdu_descriptors/",
+                "Content-Type": "application/yaml",
+            },
+            self.pdu_descriptor,
+            201,
+            r_header_yaml,
+            "yaml",
+        )
+        self.pdu_id = engine.last_id
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.get_autorization()
+        engine.set_test_name(self.test_name)
+        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
+
+        # create real VIM if not exist
+        self.vim_id = engine.get_create_vim(test_osm)
+        # instantiate PDU
+        self.pduDeploy.create_descriptors(engine)
+        self.pduDeploy.instantiate(
+            engine,
+            {
+                "nsDescription": "to be used as PDU",
+                "nsName": nsname + "-PDU",
+                "nsdId": self.pduDeploy.nsd_id,
+                "vimAccountId": self.vim_id,
+            },
+        )
+        if manual_check:
+            input(
+                "VNF to be used as PDU has been deployed. Perform manual check and press enter to resume"
+            )
+        if test_osm:
+            self.pduDeploy.test_ns(engine, test_osm)
+
+        if test_osm:
+            r = engine.test(
+                "Get VNFR to obtain IP_ADDRESS",
+                "GET",
+                "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id),
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+            )
+            if not r:
+                return
+            vnfr_data = r.json()
+            # print(vnfr_data)
+
+            self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                0
+            ].get("ip-address")
+            self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                1
+            ].get("ip-address")
+            self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                0
+            ].get("mac-address")
+            self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                1
+            ].get("mac-address")
+            if not self.pdu_interface_0["ip-address"]:
+                raise TestException("Vnfr has not managment ip address")
+        else:
+            self.pdu_interface_0["ip-address"] = "192.168.10.10"
+            self.pdu_interface_1["ip-address"] = "192.168.11.10"
+            self.pdu_interface_0["mac-address"] = "52:33:44:55:66:13"
+            self.pdu_interface_1["mac-address"] = "52:33:44:55:66:14"
+
+        self.create_descriptors(engine)
+
+        ns_data = {
+            "nsDescription": "default description",
+            "nsName": nsname,
+            "nsdId": self.nsd_id,
+            "vimAccountId": self.vim_id,
+        }
+        if test_params and test_params.get("ns-config"):
+            if isinstance(test_params["ns-config"], str):
+                ns_data.update(yaml.safe_load(test_params["ns-config"]))
+            else:
+                ns_data.update(test_params["ns-config"])
+
+        self.instantiate(engine, ns_data)
+        if manual_check:
+            input(
+                "NS has been deployed. Perform manual check and press enter to resume"
+            )
+        if test_osm:
+            self.test_ns(engine, test_osm)
+        self.additional_operations(engine, test_osm, manual_check)
+        self.terminate(engine)
+        self.pduDeploy.terminate(engine)
+        self.delete_descriptors(engine)
+        self.pduDeploy.delete_descriptors(engine)
+
+    def delete_descriptors(self, engine):
+        super().delete_descriptors(engine)
+        # delete pdu
+        engine.test(
+            "Delete PDU SOL005",
+            "DELETE",
+            "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestDescriptors:
+    description = "Test VNFD, NSD, PDU descriptors CRUD and dependencies"
+    vnfd_empty = """vnfd:vnfd-catalog:
+        vnfd:
+        -   name: prova
+            short-name: prova
+            id: prova
+    """
+    vnfd_prova = """vnfd:vnfd-catalog:
+        vnfd:
+        -   connection-point:
+            -   name: cp_0h8m
+                type: VPORT
+            id: prova
+            name: prova
+            short-name: prova
+            vdu:
+            -   id: vdu_z4bm
+                image: ubuntu
+                interface:
+                -   external-connection-point-ref: cp_0h8m
+                    name: eth0
+                    virtual-interface:
+                    type: VIRTIO
+                name: vdu_z4bm
+            version: '1.0'
+    """
+
+    def __init__(self):
+        self.vnfd_filename = "hackfest_3charmed_vnfd.tar.gz"
+        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        )
+        self.vnfd_id = None
+        self.nsd_id = None
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("Descriptors")
+        engine.get_autorization()
+        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
+        if not os.path.exists(temp_dir):
+            os.makedirs(temp_dir)
+
+        # download files
+        for filename in (self.vnfd_filename, self.nsd_filename):
+            filename_path = temp_dir + filename
+            if not os.path.exists(filename_path):
+                with open(filename_path, "wb") as file:
+                    response = requests.get(self.descriptor_url + filename)
+                    if response.status_code >= 300:
+                        raise TestException(
+                            "Error downloading descriptor from '{}': {}".format(
+                                self.descriptor_url + filename, response.status_code
+                            )
+                        )
+                    file.write(response.content)
+
+        vnfd_filename_path = temp_dir + self.vnfd_filename
+        nsd_filename_path = temp_dir + self.nsd_filename
+
+        engine.test(
+            "Onboard empty VNFD in one step",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_id = engine.last_id
+
+        # test bug 605
+        engine.test(
+            "Upload invalid VNFD ",
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_yaml,
+            self.vnfd_prova,
+            422,
+            r_header_yaml,
+            "yaml",
+        )
+
+        engine.test(
+            "Upload VNFD {}".format(self.vnfd_filename),
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_zip_yaml,
+            "@b" + vnfd_filename_path,
+            204,
+            None,
+            0,
+        )
+
+        queries = [
+            "mgmt-interface.cp=mgmt",
+            "vdu.0.interface.0.external-connection-point-ref=mgmt",
+            "vdu.0.interface.1.internal-connection-point-ref=internal",
+            "internal-vld.0.internal-connection-point.0.id-ref=internal",
+            # Detection of duplicated VLD names in VNF Descriptors
+            # URL: internal-vld=[
+            #        {id: internal1, name: internal, type:ELAN,
+            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
+            #        {id: internal2, name: internal, type:ELAN,
+            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
+            #        ]
+            "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
+            "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
+            "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
+            "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
+            "id-ref%3A%20dataVM-internal%7D%5D%7D%5D",
+        ]
+        for query in queries:
+            engine.test(
+                "Upload invalid VNFD ",
+                "PUT",
+                "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(
+                    self.vnfd_id, query
+                ),
+                headers_zip_yaml,
+                "@b" + vnfd_filename_path,
+                422,
+                r_header_yaml,
+                "yaml",
+            )
+
+        # test bug 605
+        engine.test(
+            "Upload invalid VNFD ",
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_yaml,
+            self.vnfd_prova,
+            422,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # get vnfd descriptor
+        engine.test(
+            "Get VNFD descriptor",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # get vnfd file descriptor
+        engine.test(
+            "Get VNFD file descriptor",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
+            headers_text,
+            None,
+            200,
+            r_header_text,
+            "text",
+            temp_dir + "vnfd-yaml",
+        )
+        # TODO compare files: diff vnfd-yaml hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml
+
+        # get vnfd zip file package
+        engine.test(
+            "Get VNFD zip package",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_zip,
+            "zip",
+            temp_dir + "vnfd-zip",
+        )
+        # TODO compare files: diff vnfd-zip hackfest_3charmed_vnfd.tar.gz
+
+        # get vnfd artifact
+        engine.test(
+            "Get VNFD artifact package",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_octect,
+            "octet-string",
+            temp_dir + "vnfd-icon",
+        )
+        # TODO compare files: diff vnfd-icon hackfest_3charmed_vnfd/icons/osm.png
+
+        # nsd CREATE AND UPLOAD in one step:
+        engine.test(
+            "Onboard NSD in one step",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_zip_yaml,
+            "@b" + nsd_filename_path,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_id = engine.last_id
+
+        queries = ["vld.0.vnfd-connection-point-ref.0.vnfd-id-ref=hf"]
+        for query in queries:
+            engine.test(
+                "Upload invalid NSD ",
+                "PUT",
+                "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
+                headers_zip_yaml,
+                "@b" + nsd_filename_path,
+                422,
+                r_header_yaml,
+                "yaml",
+            )
+
+        # get nsd descriptor
+        engine.test(
+            "Get NSD descriptor",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # get nsd file descriptor
+        engine.test(
+            "Get NSD file descriptor",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id),
+            headers_text,
+            None,
+            200,
+            r_header_text,
+            "text",
+            temp_dir + "nsd-yaml",
+        )
+        # TODO compare files: diff nsd-yaml hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml
+
+        # get nsd zip file package
+        engine.test(
+            "Get NSD zip package",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_zip,
+            "zip",
+            temp_dir + "nsd-zip",
+        )
+        # TODO compare files: diff nsd-zip hackfest_3charmed_nsd.tar.gz
+
+        # get nsd artifact
+        engine.test(
+            "Get NSD artifact package",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_octect,
+            "octet-string",
+            temp_dir + "nsd-icon",
+        )
+        # TODO compare files: diff nsd-icon hackfest_3charmed_nsd/icons/osm.png
+
+        # vnfd DELETE
+        test_rest.test(
+            "Delete VNFD conflict",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            409,
+            None,
+            None,
+        )
+
+        test_rest.test(
+            "Delete VNFD force",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # nsd DELETE
+        test_rest.test(
+            "Delete NSD",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestNetSliceTemplates:
+    description = "Upload a NST to OSM"
+
+    def __init__(self):
+        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        # nst CREATE
+        engine.set_test_name("NST step ")
+        engine.get_autorization()
+        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
+        if not os.path.exists(temp_dir):
+            os.makedirs(temp_dir)
+
+        # Onboard VNFDs
+        engine.test(
+            "Onboard edge VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard middle VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename_middle,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_middle_id = engine.last_id
+
+        # Onboard NSDs
+        engine.test(
+            "Onboard NSD edge",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard NSD middle",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename_middle,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_middle_id = engine.last_id
+
+        # Onboard NST
+        engine.test(
+            "Onboard NST",
+            "POST",
+            "/nst/v1/netslice_templates_content",
+            headers_yaml,
+            self.nst_filenames,
+            201,
+            r_headers_yaml_location_nst,
+            "yaml",
+        )
+        nst_id = engine.last_id
+
+        # nstd SHOW OSM format
+        engine.test(
+            "Show NSTD OSM format",
+            "GET",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+
+        # nstd DELETE
+        engine.test(
+            "Delete NSTD",
+            "DELETE",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # NSDs DELETE
+        test_rest.test(
+            "Delete NSD middle",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete NSD edge",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # VNFDs DELETE
+        test_rest.test(
+            "Delete VNFD edge",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete VNFD middle",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestNetSliceInstances:
+    """
+    Test procedure:
+    1. Populate databases with VNFD, NSD, NST with the following scenario
+       +-----------------management-----------------+
+       |                     |                      |
+    +--+---+            +----+----+             +---+--+
+    |      |            |         |             |      |
+    | edge +---data1----+  middle +---data2-----+ edge |
+    |      |            |         |             |      |
+    +------+            +---------+             +------+
+                        shared-nss
+    2. Create NSI-1
+    3. Instantiate NSI-1
+    4. Create NSI-2
+    5. Instantiate NSI-2
+        Manual check - Are 2 slices instantiated correctly?
+        NSI-1 3 nss (2 nss-edges + 1 nss-middle)
+        NSI-2 2 nss (2 nss-edge sharing nss-middle)
+    6. Terminate NSI-1
+    7. Delete NSI-1
+        Manual check - Is slice NSI-1 deleted correctly?
+        NSI-2 with 2 nss-edge + 1 nss-middle (The one from NSI-1)
+    8. Create NSI-3
+    9. Instantiate NSI-3
+        Manual check - Is slice NSI-3 instantiated correctly?
+        NSI-3 reuse nss-middle. NSI-3 only create 2 nss-edge
+    10. Delete NSI-2
+    11. Terminate NSI-2
+    12. Delete NSI-3
+    13. Terminate NSI-3
+        Manual check - All cleaned correctly?
+        NSI-2 and NSI-3 were terminated and deleted
+    14. Cleanup database
+    """
+
+    description = "Upload a NST to OSM"
+
+    def __init__(self):
+        self.vim_id = None
+        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
+
+    def create_slice(self, engine, nsi_data, name):
+        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
+        r = engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances",
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            {
+                "Location": "nsilcm/v1/netslice_instances/",
+                "Content-Type": "application/yaml",
+            },
+            "yaml",
+        )
+        return r
+
+    def instantiate_slice(self, engine, nsi_data, nsi_id, name):
+        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
+        engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id),
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            r_headers_yaml_location_nsilcmop,
+            "yaml",
+        )
+
+    def terminate_slice(self, engine, nsi_id, name):
+        engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
+            headers_yaml,
+            None,
+            (201, 202),
+            r_headers_yaml_location_nsilcmop,
+            "yaml",
+        )
+
+    def delete_slice(self, engine, nsi_id, name):
+        engine.test(
+            name,
+            "DELETE",
+            "/nsilcm/v1/netslice_instances/{}".format(nsi_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        # nst CREATE
+        engine.set_test_name("NSI")
+        engine.get_autorization()
+
+        # Onboard VNFDs
+        engine.test(
+            "Onboard edge VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard middle VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename_middle,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_middle_id = engine.last_id
+
+        # Onboard NSDs
+        engine.test(
+            "Onboard NSD edge",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard NSD middle",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename_middle,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_middle_id = engine.last_id
+
+        # Onboard NST
+        engine.test(
+            "Onboard NST",
+            "POST",
+            "/nst/v1/netslice_templates_content",
+            headers_yaml,
+            self.nst_filenames,
+            201,
+            r_headers_yaml_location_nst,
+            "yaml",
+        )
+        nst_id = engine.last_id
+
+        self.vim_id = engine.get_create_vim(test_osm)
+
+        # CREATE NSI-1
+        ns_data = {
+            "nsiName": "Deploy-NSI-1",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
+        r = self.create_slice(engine, ns_data, "Create NSI-1 step 1")
+        if not r:
+            return
+        self.nsi_id1 = engine.last_id
+
+        # INSTANTIATE NSI-1
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2"
+        )
+        nsilcmop_id1 = engine.last_id
+
+        # Waiting for NSI-1
+        if test_osm:
+            engine.wait_operation_ready("nsi", nsilcmop_id1, timeout_deploy)
+
+        # CREATE NSI-2
+        ns_data = {
+            "nsiName": "Deploy-NSI-2",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
+        r = self.create_slice(engine, ns_data, "Create NSI-2 step 1")
+        if not r:
+            return
+        self.nsi_id2 = engine.last_id
+
+        # INSTANTIATE NSI-2
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2"
+        )
+        nsilcmop_id2 = engine.last_id
+
+        # Waiting for NSI-2
+        if test_osm:
+            engine.wait_operation_ready("nsi", nsilcmop_id2, timeout_deploy)
+
+        if manual_check:
+            input(
+                "NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume"
+            )
+
+        # TERMINATE NSI-1
+        if test_osm:
+            self.terminate_slice(engine, self.nsi_id1, "Terminate NSI-1")
+            nsilcmop1_id = engine.last_id
+
+            # Wait terminate NSI-1
+            engine.wait_operation_ready("nsi", nsilcmop1_id, timeout_deploy)
+
+        # DELETE NSI-1
+        self.delete_slice(engine, self.nsi_id1, "Delete NS")
+
+        if manual_check:
+            input(
+                "NSI-1 has been deleted. Perform manual check and press enter to resume"
+            )
+
+        # CREATE NSI-3
+        ns_data = {
+            "nsiName": "Deploy-NSI-3",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
+        r = self.create_slice(engine, ns_data, "Create NSI-3 step 1")
+
+        if not r:
+            return
+        self.nsi_id3 = engine.last_id
+
+        # INSTANTIATE NSI-3
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2"
+        )
+        nsilcmop_id3 = engine.last_id
+
+        # Wait Instantiate NSI-3
+        if test_osm:
+            engine.wait_operation_ready("nsi", nsilcmop_id3, timeout_deploy)
+
+        if manual_check:
+            input(
+                "NSI-3 has been deployed. Perform manual check and press enter to resume"
+            )
+
+        # TERMINATE NSI-2
+        if test_osm:
+            self.terminate_slice(engine, self.nsi_id2, "Terminate NSI-2")
+            nsilcmop2_id = engine.last_id
+
+            # Wait terminate NSI-2
+            engine.wait_operation_ready("nsi", nsilcmop2_id, timeout_deploy)
+
+        # DELETE NSI-2
+        self.delete_slice(engine, self.nsi_id2, "DELETE NSI-2")
+
+        # TERMINATE NSI-3
+        if test_osm:
+            self.terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
+            nsilcmop3_id = engine.last_id
+
+            # Wait terminate NSI-3
+            engine.wait_operation_ready("nsi", nsilcmop3_id, timeout_deploy)
+
+        # DELETE NSI-3
+        self.delete_slice(engine, self.nsi_id3, "DELETE NSI-3")
+
+        if manual_check:
+            input(
+                "NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume"
+            )
+
+        # nstd DELETE
+        engine.test(
+            "Delete NSTD",
+            "DELETE",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # NSDs DELETE
+        test_rest.test(
+            "Delete NSD middle",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete NSD edge",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # VNFDs DELETE
+        test_rest.test(
+            "Delete VNFD edge",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete VNFD middle",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestAuthentication:
+    description = "Test Authentication"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("Authentication")
+        # backend = test_params.get("backend") if test_params else None   # UNUSED
+
+        admin_project_id = test_project_id = None
+        project_admin_role_id = project_user_role_id = None
+        test_user_id = empty_user_id = None
+        default_role_id = empty_role_id = token_role_id = None
+
+        engine.get_autorization()
+
+        # GET
+        engine.test(
+            "Get tokens",
+            "GET",
+            "/admin/v1/tokens",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get projects",
+            "GET",
+            "/admin/v1/projects",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get users",
+            "GET",
+            "/admin/v1/users",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get roles",
+            "GET",
+            "/admin/v1/roles",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        res = engine.test(
+            "Get admin project",
+            "GET",
+            "/admin/v1/projects?name=admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        admin_project_id = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get project admin role",
+            "GET",
+            "/admin/v1/roles?name=project_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        project_admin_role_id = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get project user role",
+            "GET",
+            "/admin/v1/roles?name=project_user",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        project_user_role_id = res.json()[0]["_id"] if res else None
+
+        # POST
+        res = engine.test(
+            "Create test project",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "test"},
+            (201),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        test_project_id = engine.last_id if res else None
+        res = engine.test(
+            "Create role without permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {"name": "empty"},
+            (201),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        empty_role_id = engine.last_id if res else None
+        res = engine.test(
+            "Create role with default permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {"name": "default", "permissions": {"default": True}},
+            (201),
+            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+            "json",
+        )
+        default_role_id = engine.last_id if res else None
+        res = engine.test(
+            "Create role with token permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {
+                "name": "tokens",
+                "permissions": {"tokens": True},
+            },  # is default required ?
+            (201),
+            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+            "json",
+        )
+        token_role_id = engine.last_id if res else None
+        pr = "project-role mappings"
+        res = engine.test(
+            "Create user without " + pr,
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {"username": "empty", "password": "empty"},
+            201,
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        empty_user_id = engine.last_id if res else None
+        if (
+            admin_project_id
+            and test_project_id
+            and project_admin_role_id
+            and project_user_role_id
+        ):
+            data = {"username": "test", "password": "test"}
+            data["project_role_mappings"] = [
+                {"project": test_project_id, "role": project_admin_role_id},
+                {"project": admin_project_id, "role": project_user_role_id},
+            ]
+            res = engine.test(
+                "Create user with " + pr,
+                "POST",
+                "/admin/v1/users",
+                headers_json,
+                data,
+                (201),
+                {"Content-Type": "application/json"},
+                "json",
+            )
+            test_user_id = engine.last_id if res else None
+
+        # PUT
+        if test_user_id:
+            engine.test(
+                "Modify test user's password",
+                "PUT",
+                "/admin/v1/users/" + test_user_id,
+                headers_json,
+                {"password": "password"},
+                (204),
+                {},
+                0,
+            )
+        if (
+            empty_user_id
+            and admin_project_id
+            and test_project_id
+            and project_admin_role_id
+            and project_user_role_id
+        ):
+            data = {
+                "project_role_mappings": [
+                    {"project": test_project_id, "role": project_admin_role_id},
+                    {"project": admin_project_id, "role": project_user_role_id},
+                ]
+            }
+            engine.test(
+                "Modify empty user's " + pr,
+                "PUT",
+                "/admin/v1/users/" + empty_user_id,
+                headers_json,
+                data,
+                (204),
+                {},
+                0,
+            )
+
+        # DELETE
+        if empty_user_id:
+            engine.test(
+                "Delete empty user",
+                "DELETE",
+                "/admin/v1/users/" + empty_user_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if test_user_id:
+            engine.test(
+                "Delete test user",
+                "DELETE",
+                "/admin/v1/users/" + test_user_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if empty_role_id:
+            engine.test(
+                "Delete empty role",
+                "DELETE",
+                "/admin/v1/roles/" + empty_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if default_role_id:
+            engine.test(
+                "Delete default role",
+                "DELETE",
+                "/admin/v1/roles/" + default_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if token_role_id:
+            engine.test(
+                "Delete token role",
+                "DELETE",
+                "/admin/v1/roles/" + token_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if test_project_id:
+            engine.test(
+                "Delete test project",
+                "DELETE",
+                "/admin/v1/projects/" + test_project_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+
+        # END Tests
+
+        engine.remove_authorization()  # To finish
+
+
+class TestNbiQuotas:
+    description = "Test NBI Quotas"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("NBI-Quotas_")
+        # backend = test_params.get("backend") if test_params else None   # UNUSED
+
+        test_username = "test-nbi-quotas"
+        test_password = "test-nbi-quotas"
+        test_project = "test-nbi-quotas"
+
+        test_vim = "test-nbi-quotas"
+        test_wim = "test-nbi-quotas"
+        test_sdn = "test-nbi-quotas"
+
+        test_user_id = None
+        test_project_id = None
+
+        test_vim_ids = []
+        test_wim_ids = []
+        test_sdn_ids = []
+        test_vnfd_ids = []
+        test_nsd_ids = []
+        test_nst_ids = []
+        test_pdu_ids = []
+        test_nsr_ids = []
+        test_nsi_ids = []
+
+        # Save admin access data
+        admin_username = engine.user
+        admin_password = engine.password
+        admin_project = engine.project
+
+        # Get admin access
+        engine.get_autorization()
+        admin_token = engine.last_id
+
+        # Check that test project,user do not exist
+        res1 = engine.test(
+            "Check that test project doesn't exist",
+            "GET",
+            "/admin/v1/projects/" + test_project,
+            headers_json,
+            {},
+            (404),
+            {},
+            True,
+        )
+        res2 = engine.test(
+            "Check that test user doesn't exist",
+            "GET",
+            "/admin/v1/users/" + test_username,
+            headers_json,
+            {},
+            (404),
+            {},
+            True,
+        )
+        if None in [res1, res2]:
+            engine.remove_authorization()
+            logger.error("Test project and/or user already exist")
+            return
+
+        # Create test project&user
+        res = engine.test(
+            "Create test project",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {
+                "name": test_username,
+                "quotas": {
+                    "vnfds": 2,
+                    "nsds": 2,
+                    "nsts": 1,
+                    "pdus": 1,
+                    "nsrs": 2,
+                    "nsis": 1,
+                    "vim_accounts": 1,
+                    "wim_accounts": 1,
+                    "sdns": 1,
+                },
+            },
+            (201),
+            r_header_json,
+            "json",
+        )
+        test_project_id = engine.last_id if res else None
+        res = engine.test(
+            "Create test user",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {
+                "username": test_username,
+                "password": test_password,
+                "project_role_mappings": [
+                    {"project": test_project, "role": "project_admin"}
+                ],
+            },
+            (201),
+            r_header_json,
+            "json",
+        )
+        test_user_id = engine.last_id if res else None
+
+        if test_project_id and test_user_id:
+            # Get user access
+            engine.token = None
+            engine.user = test_username
+            engine.password = test_password
+            engine.project = test_project
+            engine.get_autorization()
+            user_token = engine.last_id
+
+            # Create test VIM
+            res = engine.test(
+                "Create test VIM",
+                "POST",
+                "/admin/v1/vim_accounts",
+                headers_json,
+                {
+                    "name": test_vim,
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_vim_ids += [engine.last_id if res else None]
+
+            res = engine.test(
+                "Try to create second test VIM",
+                "POST",
+                "/admin/v1/vim_accounts",
+                headers_json,
+                {
+                    "name": test_vim + "_2",
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
+            test_vim_ids += [engine.last_id if res is None else None]
+
+            res = engine.test(
+                "Try to create second test VIM with FORCE",
+                "POST",
+                "/admin/v1/vim_accounts?FORCE",
+                headers_json,
+                {
+                    "name": test_vim + "_3",
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_vim_ids += [engine.last_id if res else None]
+
+            if test_vim_ids[0]:
+                # Download descriptor files (if required)
+                test_dir = "/tmp/" + test_username + "/"
+                test_url = "https://osm-download.etsi.org/ftp/osm-6.0-six/7th-hackfest/packages/"
+                vnfd_filenames = [
+                    "slice_hackfest_vnfd.tar.gz",
+                    "slice_hackfest_middle_vnfd.tar.gz",
+                ]
+                nsd_filenames = [
+                    "slice_hackfest_nsd.tar.gz",
+                    "slice_hackfest_middle_nsd.tar.gz",
+                ]
+                nst_filenames = ["slice_hackfest_nstd.yaml"]
+                pdu_filenames = ["PDU_router.yaml"]
+                desc_filenames = (
+                    vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
+                )
+                if not os.path.exists(test_dir):
+                    os.makedirs(test_dir)
+                for filename in desc_filenames:
+                    if not os.path.exists(test_dir + filename):
+                        res = requests.get(test_url + filename)
+                        if res.status_code < 300:
+                            with open(test_dir + filename, "wb") as file:
+                                file.write(res.content)
+
+                if all([os.path.exists(test_dir + p) for p in desc_filenames]):
+                    # Test VNFD Quotas
+                    res = engine.test(
+                        "Create test VNFD #1",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res else None]
+                    res = engine.test(
+                        "Create test VNFD #2",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[1],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res else None]
+                    res = engine.test(
+                        "Try to create extra test VNFD",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (422),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res is None else None]
+                    res = engine.test(
+                        "Try to create extra test VNFD with FORCE",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content?FORCE",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res else None]
+
+                    # Remove extra VNFDs to prevent further errors
+                    for i in [2, 3]:
+                        if test_vnfd_ids[i]:
+                            res = engine.test(
+                                "Delete test VNFD #" + str(i),
+                                "DELETE",
+                                "/vnfpkgm/v1/vnf_packages_content/"
+                                + test_vnfd_ids[i]
+                                + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                            if res:
+                                test_vnfd_ids[i] = None
+
+                    if test_vnfd_ids[0] and test_vnfd_ids[1]:
+                        # Test NSD Quotas
+                        res = engine.test(
+                            "Create test NSD #1",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res else None]
+                        res = engine.test(
+                            "Create test NSD #2",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[1],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res else None]
+                        res = engine.test(
+                            "Try to create extra test NSD",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (422),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res is None else None]
+                        res = engine.test(
+                            "Try to create extra test NSD with FORCE",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content?FORCE",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res else None]
+
+                        # Remove extra NSDs to prevent further errors
+                        for i in [2, 3]:
+                            if test_nsd_ids[i]:
+                                res = engine.test(
+                                    "Delete test NSD #" + str(i),
+                                    "DELETE",
+                                    "/nsd/v1/ns_descriptors_content/"
+                                    + test_nsd_ids[i]
+                                    + "?FORCE",
+                                    headers_json,
+                                    {},
+                                    (204),
+                                    {},
+                                    0,
+                                )
+                                if res:
+                                    test_nsd_ids[i] = None
+
+                        if test_nsd_ids[0] and test_nsd_ids[1]:
+                            # Test NSR Quotas
+                            res = engine.test(
+                                "Create test NSR #1",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_1",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res else None]
+                            res = engine.test(
+                                "Create test NSR #2",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_2",
+                                    "nsdId": test_nsd_ids[1],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res else None]
+                            res = engine.test(
+                                "Try to create extra test NSR",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_3",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (422),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res is None else None]
+                            res = engine.test(
+                                "Try to create test NSR with FORCE",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content?FORCE",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_4",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res else None]
+
+                            # Test NST Quotas
+                            res = engine.test(
+                                "Create test NST",
+                                "POST",
+                                "/nst/v1/netslice_templates_content",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nst_ids += [engine.last_id if res else None]
+                            res = engine.test(
+                                "Try to create extra test NST",
+                                "POST",
+                                "/nst/v1/netslice_templates_content",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (422),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nst_ids += [engine.last_id if res is None else None]
+                            res = engine.test(
+                                "Try to create extra test NST with FORCE",
+                                "POST",
+                                "/nst/v1/netslice_templates_content?FORCE",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nst_ids += [engine.last_id if res else None]
+
+                            if test_nst_ids[0]:
+                                # Remove NSR Quota
+                                engine.set_header(
+                                    {"Authorization": "Bearer {}".format(admin_token)}
+                                )
+                                res = engine.test(
+                                    "Remove NSR Quota",
+                                    "PUT",
+                                    "/admin/v1/projects/" + test_project_id,
+                                    headers_json,
+                                    {"quotas": {"nsrs": None}},
+                                    (204),
+                                    {},
+                                    0,
+                                )
+                                engine.set_header(
+                                    {"Authorization": "Bearer {}".format(user_token)}
+                                )
+                                if res:
+                                    # Test NSI Quotas
+                                    res = engine.test(
+                                        "Create test NSI",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (201),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [engine.last_id if res else None]
+                                    res = engine.test(
+                                        "Try to create extra test NSI",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (400),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [
+                                        engine.last_id if res is None else None
+                                    ]
+                                    res = engine.test(
+                                        "Try to create extra test NSI with FORCE",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content?FORCE",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (201),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [engine.last_id if res else None]
+
+                    # Test PDU Quotas
+                    with open(test_dir + pdu_filenames[0], "rb") as file:
+                        pdu_text = re.sub(
+                            r"ip-address: *\[[^\]]*\]",
+                            "ip-address: '0.0.0.0'",
+                            file.read().decode("utf-8"),
+                        )
+                    with open(test_dir + pdu_filenames[0], "wb") as file:
+                        file.write(pdu_text.encode("utf-8"))
+                    res = engine.test(
+                        "Create test PDU",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (201),
+                        r_header_yaml,
+                        "yaml",
+                    )
+                    test_pdu_ids += [engine.last_id if res else None]
+                    res = engine.test(
+                        "Try to create extra test PDU",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (422),
+                        r_header_yaml,
+                        "yaml",
+                    )
+                    test_pdu_ids += [engine.last_id if res is None else None]
+                    res = engine.test(
+                        "Try to create extra test PDU with FORCE",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors?FORCE",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (201),
+                        r_header_yaml,
+                        "yaml",
+                    )
+                    test_pdu_ids += [engine.last_id if res else None]
+
+                    # Cleanup
+                    for i, id in enumerate(test_nsi_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NSI #" + str(i),
+                                "DELETE",
+                                "/nsilcm/v1/netslice_instances_content/"
+                                + id
+                                + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_nsr_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NSR #" + str(i),
+                                "DELETE",
+                                "/nslcm/v1/ns_instances_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_nst_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NST #" + str(i),
+                                "DELETE",
+                                "/nst/v1/netslice_templates_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_nsd_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NSD #" + str(i),
+                                "DELETE",
+                                "/nsd/v1/ns_descriptors_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_vnfd_ids):
+                        if id:
+                            engine.test(
+                                "Delete test VNFD #" + str(i),
+                                "DELETE",
+                                "/vnfpkgm/v1/vnf_packages_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_pdu_ids):
+                        if id:
+                            engine.test(
+                                "Delete test PDU #" + str(i),
+                                "DELETE",
+                                "/pdu/v1/pdu_descriptors/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+
+                    # END Test NBI Quotas
+
+            # Test WIM Quotas
+            res = engine.test(
+                "Create test WIM",
+                "POST",
+                "/admin/v1/wim_accounts",
+                headers_json,
+                {
+                    "name": test_wim,
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_wim_ids += [engine.last_id if res else None]
+            res = engine.test(
+                "Try to create second test WIM",
+                "POST",
+                "/admin/v1/wim_accounts",
+                headers_json,
+                {
+                    "name": test_wim + "_2",
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
+            test_wim_ids += [engine.last_id if res is None else None]
+            res = engine.test(
+                "Try to create second test WIM with FORCE",
+                "POST",
+                "/admin/v1/wim_accounts?FORCE",
+                headers_json,
+                {
+                    "name": test_wim + "_3",
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_wim_ids += [engine.last_id if res else None]
+
+            # Test SDN Quotas
+            res = engine.test(
+                "Create test SDN",
+                "POST",
+                "/admin/v1/sdns",
+                headers_json,
+                {
+                    "name": test_sdn,
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_sdn_ids += [engine.last_id if res else None]
+            res = engine.test(
+                "Try to create second test SDN",
+                "POST",
+                "/admin/v1/sdns",
+                headers_json,
+                {
+                    "name": test_sdn + "_2",
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
+            test_sdn_ids += [engine.last_id if res is None else None]
+            res = engine.test(
+                "Try to create second test SDN with FORCE",
+                "POST",
+                "/admin/v1/sdns?FORCE",
+                headers_json,
+                {
+                    "name": test_sdn + "_3",
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_sdn_ids += [engine.last_id if res else None]
+
+            # Cleanup
+            for i, id in enumerate(test_vim_ids):
+                if id:
+                    engine.test(
+                        "Delete test VIM #" + str(i),
+                        "DELETE",
+                        "/admin/v1/vim_accounts/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
+            for i, id in enumerate(test_wim_ids):
+                if id:
+                    engine.test(
+                        "Delete test WIM #" + str(i),
+                        "DELETE",
+                        "/admin/v1/wim_accounts/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
+            for i, id in enumerate(test_sdn_ids):
+                if id:
+                    engine.test(
+                        "Delete test SDN #" + str(i),
+                        "DELETE",
+                        "/admin/v1/sdns/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
+
+            # Release user access
+            engine.remove_authorization()
+
+        # Cleanup
+        engine.user = admin_username
+        engine.password = admin_password
+        engine.project = admin_project
+        engine.get_autorization()
+        if test_user_id:
+            engine.test(
+                "Delete test user",
+                "DELETE",
+                "/admin/v1/users/" + test_user_id + "?FORCE",
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if test_project_id:
+            engine.test(
+                "Delete test project",
+                "DELETE",
+                "/admin/v1/projects/" + test_project_id + "?FORCE",
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        engine.remove_authorization()
+
+    # END class TestNbiQuotas
+
+
+if __name__ == "__main__":
+    global logger
+    test = ""
+
+    # Disable warnings from self-signed certificates.
+    requests.packages.urllib3.disable_warnings()
+    try:
+        logging.basicConfig(format="%(levelname)s %(message)s", level=logging.ERROR)
+        logger = logging.getLogger("NBI")
+        # load parameters and configuration
+        opts, args = getopt.getopt(
+            sys.argv[1:],
+            "hvu:p:",
+            [
+                "url=",
+                "user=",
+                "password=",
+                "help",
+                "version",
+                "verbose",
+                "no-verbose",
+                "project=",
+                "insecure",
+                "timeout",
+                "timeout-deploy",
+                "timeout-configure",
+                "test=",
+                "list",
+                "test-osm",
+                "manual-check",
+                "params=",
+                "fail-fast",
+            ],
+        )
+        url = "https://localhost:9999/osm"
+        user = password = project = "admin"
+        test_osm = False
+        manual_check = False
+        verbose = 0
+        verify = True
+        fail_fast = False
+        test_classes = {
+            "NonAuthorized": TestNonAuthorized,
+            "FakeVIM": TestFakeVim,
+            "Users-Projects": TestUsersProjects,
+            "Projects-Descriptors": TestProjectsDescriptors,
+            "VIM-SDN": TestVIMSDN,
+            "Deploy-Custom": TestDeploy,
+            "Deploy-Hackfest-Cirros": TestDeployHackfestCirros,
+            "Deploy-Hackfest-Cirros-Scaling": TestDeployHackfestCirrosScaling,
+            "Deploy-Hackfest-3Charmed": TestDeployHackfest3Charmed,
+            "Deploy-Hackfest-3Charmed2": TestDeployHackfest3Charmed2,
+            "Deploy-Hackfest-3Charmed3": TestDeployHackfest3Charmed3,
+            "Deploy-Hackfest-4": TestDeployHackfest4,
+            "Deploy-CirrosMacIp": TestDeployIpMac,
+            "Descriptors": TestDescriptors,
+            "Deploy-Hackfest1": TestDeployHackfest1,
+            # "Deploy-MultiVIM": TestDeployMultiVIM,
+            "Deploy-SingleVdu": TestDeploySingleVdu,
+            "Deploy-Hnfd": TestDeployHnfd,
+            "Upload-Slice-Template": TestNetSliceTemplates,
+            "Deploy-Slice-Instance": TestNetSliceInstances,
+            "Deploy-SimpleCharm": TestDeploySimpleCharm,
+            "Deploy-SimpleCharm2": TestDeploySimpleCharm2,
+            "Authentication": TestAuthentication,
+            "NBI-Quotas": TestNbiQuotas,
+        }
+        test_to_do = []
+        test_params = {}
+
+        for o, a in opts:
+            # print("parameter:", o, a)
+            if o == "--version":
+                print("test version " + __version__ + " " + version_date)
+                exit()
+            elif o == "--list":
+                for test, test_class in sorted(test_classes.items()):
+                    print("{:32} {}".format(test + ":", test_class.description))
+                exit()
+            elif o in ("-v", "--verbose"):
+                verbose += 1
+            elif o == "no-verbose":
+                verbose = -1
+            elif o in ("-h", "--help"):
+                usage()
+                sys.exit()
+            elif o == "--test-osm":
+                test_osm = True
+            elif o == "--manual-check":
+                manual_check = True
+            elif o == "--url":
+                url = a
+            elif o in ("-u", "--user"):
+                user = a
+            elif o in ("-p", "--password"):
+                password = a
+            elif o == "--project":
+                project = a
+            elif o == "--fail-fast":
+                fail_fast = True
+            elif o == "--test":
+                for _test in a.split(","):
+                    if _test not in test_classes:
+                        print(
+                            "Invalid test name '{}'. Use option '--list' to show available tests".format(
+                                _test
+                            ),
+                            file=sys.stderr,
+                        )
+                        exit(1)
+                    test_to_do.append(_test)
+            elif o == "--params":
+                param_key, _, param_value = a.partition("=")
+                text_index = len(test_to_do)
+                if text_index not in test_params:
+                    test_params[text_index] = {}
+                test_params[text_index][param_key] = param_value
+            elif o == "--insecure":
+                verify = False
+            elif o == "--timeout":
+                timeout = int(a)
+            elif o == "--timeout-deploy":
+                timeout_deploy = int(a)
+            elif o == "--timeout-configure":
+                timeout_configure = int(a)
+            else:
+                assert False, "Unhandled option"
+        if verbose == 0:
+            logger.setLevel(logging.WARNING)
+        elif verbose > 1:
+            logger.setLevel(logging.DEBUG)
+        else:
+            logger.setLevel(logging.ERROR)
+
+        test_rest = TestRest(url, user=user, password=password, project=project)
+        # print("tests to do:", test_to_do)
+        if test_to_do:
+            text_index = 0
+            for test in test_to_do:
+                if fail_fast and test_rest.failed_tests:
+                    break
+                text_index += 1
+                test_class = test_classes[test]
+                test_class().run(
+                    test_rest, test_osm, manual_check, test_params.get(text_index)
+                )
+        else:
+            for test, test_class in sorted(test_classes.items()):
+                if fail_fast and test_rest.failed_tests:
+                    break
+                test_class().run(test_rest, test_osm, manual_check, test_params.get(0))
+        test_rest.print_results()
+        exit(1 if test_rest.failed_tests else 0)
+
+    except TestException as e:
+        logger.error(test + "Test {} Exception: {}".format(test, str(e)))
+        exit(1)
+    except getopt.GetoptError as e:
+        logger.error(e)
+        print(e, file=sys.stderr)
+        exit(1)
+    except Exception as e:
+        logger.critical(test + " Exception: " + str(e), exc_info=True)
index 36d341f..0c1e93c 100755 (executable)
@@ -18,7 +18,4 @@ rm -rf pool
 rm -rf dists
 mkdir -p pool/$MDG
 mv deb_dist/*.deb pool/$MDG/
 rm -rf dists
 mkdir -p pool/$MDG
 mv deb_dist/*.deb pool/$MDG/
-mkdir -p dists/unstable/$MDG/binary-amd64/
-apt-ftparchive packages pool/$MDG > dists/unstable/$MDG/binary-amd64/Packages
-gzip -9fk dists/unstable/$MDG/binary-amd64/Packages
-echo "dists/**,pool/$MDG/*.deb"
+
index e5a8e85..5a86dc9 100755 (executable)
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,5 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-OUTPUT=$(TOX_PARALLEL_NO_SPINNER=1 tox --parallel=auto)
-printf "$OUTPUT"
+echo "Launching tox"
+TOX_PARALLEL_NO_SPINNER=1 tox --parallel=auto
index 786d237..de0ad1f 100644 (file)
@@ -345,7 +345,6 @@ class CommonVimWimSdn(BaseTopic):
                 schema_version
             ) or self.config_to_encrypt.get("default")
             if edit_content.get("config") and config_to_encrypt_keys:
                 schema_version
             ) or self.config_to_encrypt.get("default")
             if edit_content.get("config") and config_to_encrypt_keys:
-
                 for p in config_to_encrypt_keys:
                     if edit_content["config"].get(p):
                         final_content["config"][p] = self.db.encrypt(
                 for p in config_to_encrypt_keys:
                     if edit_content["config"].get(p):
                         final_content["config"][p] = self.db.encrypt(
@@ -395,6 +394,29 @@ class CommonVimWimSdn(BaseTopic):
         # create operation
         content["_admin"]["operations"] = [self._create_operation("create")]
         content["_admin"]["current_operation"] = None
         # create operation
         content["_admin"]["operations"] = [self._create_operation("create")]
         content["_admin"]["current_operation"] = None
+        # create Resource in Openstack based VIM
+        if content.get("vim_type"):
+            if content["vim_type"] == "openstack":
+                compute = {
+                    "ram": {"total": None, "used": None},
+                    "vcpus": {"total": None, "used": None},
+                    "instances": {"total": None, "used": None},
+                }
+                storage = {
+                    "volumes": {"total": None, "used": None},
+                    "snapshots": {"total": None, "used": None},
+                    "storage": {"total": None, "used": None},
+                }
+                network = {
+                    "networks": {"total": None, "used": None},
+                    "subnets": {"total": None, "used": None},
+                    "floating_ips": {"total": None, "used": None},
+                }
+                content["resources"] = {
+                    "compute": compute,
+                    "storage": storage,
+                    "network": network,
+                }
 
         return "{}:0".format(content["_id"])
 
 
         return "{}:0".format(content["_id"])
 
@@ -418,7 +440,7 @@ class CommonVimWimSdn(BaseTopic):
 
         # remove reference from project_read if there are more projects referencing it. If it last one,
         # do not remove reference, but order via kafka to delete it
 
         # remove reference from project_read if there are more projects referencing it. If it last one,
         # do not remove reference, but order via kafka to delete it
-        if session["project_id"] and session["project_id"]:
+        if session["project_id"]:
             other_projects_referencing = next(
                 (
                     p
             other_projects_referencing = next(
                 (
                     p
@@ -522,7 +544,7 @@ class WimAccountTopic(CommonVimWimSdn):
     schema_new = wim_account_new_schema
     schema_edit = wim_account_edit_schema
     multiproject = True
     schema_new = wim_account_new_schema
     schema_edit = wim_account_edit_schema
     multiproject = True
-    password_to_encrypt = "wim_password"
+    password_to_encrypt = "password"
     config_to_encrypt = {}
 
 
     config_to_encrypt = {}
 
 
@@ -978,6 +1000,8 @@ class UserTopicAuth(UserTopic):
                 or indata.get("project_role_mappings")
                 or indata.get("projects")
                 or indata.get("add_projects")
                 or indata.get("project_role_mappings")
                 or indata.get("projects")
                 or indata.get("add_projects")
+                or indata.get("unlock")
+                or indata.get("renew")
             ):
                 return _id
             if indata.get("project_role_mappings") and (
             ):
                 return _id
             if indata.get("project_role_mappings") and (
@@ -1051,7 +1075,6 @@ class UserTopicAuth(UserTopic):
                         mapping["role"],
                         mapping["role_name"],
                     ):
                         mapping["role"],
                         mapping["role_name"],
                     ):
-
                         if mapping in mappings_to_remove:  # do not remove
                             mappings_to_remove.remove(mapping)
                         break  # do not add, it is already at user
                         if mapping in mappings_to_remove:  # do not remove
                             mappings_to_remove.remove(mapping)
                         break  # do not add, it is already at user
@@ -1098,8 +1121,12 @@ class UserTopicAuth(UserTopic):
                     "_id": _id,
                     "username": indata.get("username"),
                     "password": indata.get("password"),
                     "_id": _id,
                     "username": indata.get("username"),
                     "password": indata.get("password"),
+                    "old_password": indata.get("old_password"),
                     "add_project_role_mappings": mappings_to_add,
                     "remove_project_role_mappings": mappings_to_remove,
                     "add_project_role_mappings": mappings_to_add,
                     "remove_project_role_mappings": mappings_to_remove,
+                    "system_admin_id": indata.get("system_admin_id"),
+                    "unlock": indata.get("unlock"),
+                    "renew": indata.get("renew"),
                 }
             )
             data_to_send = {"_id": _id, "changes": indata}
                 }
             )
             data_to_send = {"_id": _id, "changes": indata}
index eef2ae7..9c8c8d3 100644 (file)
@@ -44,6 +44,7 @@ from osm_nbi.authconn import AuthException, AuthconnException, AuthExceptionUnau
 from osm_nbi.authconn_keystone import AuthconnKeystone
 from osm_nbi.authconn_internal import AuthconnInternal
 from osm_nbi.authconn_tacacs import AuthconnTacacs
 from osm_nbi.authconn_keystone import AuthconnKeystone
 from osm_nbi.authconn_internal import AuthconnInternal
 from osm_nbi.authconn_tacacs import AuthconnTacacs
+from osm_nbi.utils import cef_event, cef_event_builder
 from osm_common import dbmemory, dbmongo, msglocal, msgkafka
 from osm_common.dbbase import DbException
 from osm_nbi.validation import is_valid_uuid
 from osm_common import dbmemory, dbmongo, msglocal, msgkafka
 from osm_common.dbbase import DbException
 from osm_nbi.validation import is_valid_uuid
@@ -88,6 +89,7 @@ class Authenticator:
         self.valid_query_string = valid_query_string
         self.system_admin_role_id = None  # system_role id
         self.test_project_id = None  # test_project_id
         self.valid_query_string = valid_query_string
         self.system_admin_role_id = None  # system_role id
         self.test_project_id = None  # test_project_id
+        self.cef_logger = None
 
     def start(self, config):
         """
 
     def start(self, config):
         """
@@ -98,6 +100,7 @@ class Authenticator:
         :param config: dictionary containing the relevant parameters for this object.
         """
         self.config = config
         :param config: dictionary containing the relevant parameters for this object.
         """
         self.config = config
+        self.cef_logger = cef_event_builder(config["authentication"])
 
         try:
             if not self.db:
 
         try:
             if not self.db:
@@ -249,7 +252,7 @@ class Authenticator:
         user_desc = {
             "username": "admin",
             "password": "admin",
         user_desc = {
             "username": "admin",
             "password": "admin",
-            "_admin": {"created": now, "modified": now},
+            "_admin": {"created": now, "modified": now, "user_status": "always-active"},
         }
         if project_id:
             pid = project_id
         }
         if project_id:
             pid = project_id
@@ -283,7 +286,7 @@ class Authenticator:
             (r for r in records if r["name"] == "system_admin"), None
         ):
             with open(self.roles_to_operations_file, "r") as stream:
             (r for r in records if r["name"] == "system_admin"), None
         ):
             with open(self.roles_to_operations_file, "r") as stream:
-                roles_to_operations_yaml = yaml.load(stream, Loader=yaml.Loader)
+                roles_to_operations_yaml = yaml.safe_load(stream)
 
             role_names = []
             for role_with_operations in roles_to_operations_yaml["roles"]:
 
             role_names = []
             for role_with_operations in roles_to_operations_yaml["roles"]:
@@ -449,9 +452,11 @@ class Authenticator:
                 elif auth_list[0].lower() == "basic":
                     user_passwd64 = auth_list[-1]
             if not token:
                 elif auth_list[0].lower() == "basic":
                     user_passwd64 = auth_list[-1]
             if not token:
-                if cherrypy.session.get("Authorization"):
+                if cherrypy.session.get("Authorization"):  # pylint: disable=E1101
                     # 2. Try using session before request a new token. If not, basic authentication will generate
                     # 2. Try using session before request a new token. If not, basic authentication will generate
-                    token = cherrypy.session.get("Authorization")
+                    token = cherrypy.session.get(  # pylint: disable=E1101
+                        "Authorization"
+                    )
                     if token == "logout":
                         token = None  # force Unauthorized response to insert user password again
                 elif user_passwd64 and cherrypy.request.config.get(
                     if token == "logout":
                         token = None  # force Unauthorized response to insert user password again
                 elif user_passwd64 and cherrypy.request.config.get(
@@ -466,10 +471,10 @@ class Authenticator:
                     except Exception:
                         pass
                     outdata = self.new_token(
                     except Exception:
                         pass
                     outdata = self.new_token(
-                        None, {"username": user, "password": passwd}
+                        None, {"username": user, "password": passwd}, None
                     )
                     token = outdata["_id"]
                     )
                     token = outdata["_id"]
-                    cherrypy.session["Authorization"] = token
+                    cherrypy.session["Authorization"] = token  # pylint: disable=E1101
 
             if not token:
                 raise AuthException(
 
             if not token:
                 raise AuthException(
@@ -503,13 +508,25 @@ class Authenticator:
                     item_id,
                 )
                 self.logger.info("RBAC_auth: {}".format(RBAC_auth))
                     item_id,
                 )
                 self.logger.info("RBAC_auth: {}".format(RBAC_auth))
+                if RBAC_auth:
+                    cef_event(
+                        self.cef_logger,
+                        {
+                            "name": "System Access",
+                            "sourceUserName": token_info.get("username"),
+                            "message": "Accessing account with system privileges, Project={}".format(
+                                token_info.get("project_name")
+                            ),
+                        },
+                    )
+                    self.logger.info("{}".format(self.cef_logger))
                 token_info["allow_show_user_project_role"] = RBAC_auth
 
             return token_info
         except AuthException as e:
             if not isinstance(e, AuthExceptionUnauthorized):
                 token_info["allow_show_user_project_role"] = RBAC_auth
 
             return token_info
         except AuthException as e:
             if not isinstance(e, AuthExceptionUnauthorized):
-                if cherrypy.session.get("Authorization"):
-                    del cherrypy.session["Authorization"]
+                if cherrypy.session.get("Authorization"):  # pylint: disable=E1101
+                    del cherrypy.session["Authorization"]  # pylint: disable=E1101
                 cherrypy.response.headers[
                     "WWW-Authenticate"
                 ] = 'Bearer realm="{}"'.format(e)
                 cherrypy.response.headers[
                     "WWW-Authenticate"
                 ] = 'Bearer realm="{}"'.format(e)
@@ -768,3 +785,30 @@ class Authenticator:
         else:
             self.tokens_cache.clear()
         self.msg.write("admin", "revoke_token", {"_id": token} if token else None)
         else:
             self.tokens_cache.clear()
         self.msg.write("admin", "revoke_token", {"_id": token} if token else None)
+
+    def check_password_expiry(self, outdata):
+        """
+        This method will check for password expiry of the user
+        :param outdata: user token information
+        """
+        user_list = None
+        present_time = time()
+        user = outdata["username"]
+        if self.config["authentication"].get("user_management"):
+            user_list = self.db.get_list("users", {"username": user})
+            if user_list:
+                user_content = user_list[0]
+                if not user_content.get("username") == "admin":
+                    user_content["_admin"]["modified"] = present_time
+                    if user_content.get("_admin").get("password_expire_time"):
+                        password_expire_time = user_content["_admin"][
+                            "password_expire_time"
+                        ]
+                    else:
+                        password_expire_time = present_time
+                    uid = user_content["_id"]
+                    self.db.set_one("users", {"_id": uid}, user_content)
+                    if not present_time < password_expire_time:
+                        return True
+        else:
+            pass
index 0f4b523..2f70405 100644 (file)
@@ -204,6 +204,7 @@ class Authconn:
         :param filter_q: dictionary to filter user list by name (username is also admited) and/or _id
         :return: returns a list of users.
         """
         :param filter_q: dictionary to filter user list by name (username is also admited) and/or _id
         :return: returns a list of users.
         """
+        return list()  # Default return value so that the method get_user passes pylint
 
     def get_user(self, _id, fail=True):
         """
 
     def get_user(self, _id, fail=True):
         """
index e342150..0f414b1 100644 (file)
@@ -33,9 +33,14 @@ __date__ = "$06-jun-2019 11:16:08$"
 import logging
 import re
 
 import logging
 import re
 
-from osm_nbi.authconn import Authconn, AuthException  # , AuthconnOperationException
+from osm_nbi.authconn import (
+    Authconn,
+    AuthException,
+    AuthconnConflictException,
+)  # , AuthconnOperationException
 from osm_common.dbbase import DbException
 from osm_nbi.base_topic import BaseTopic
 from osm_common.dbbase import DbException
 from osm_nbi.base_topic import BaseTopic
+from osm_nbi.utils import cef_event, cef_event_builder
 from osm_nbi.validation import is_valid_uuid
 from time import time, sleep
 from http import HTTPStatus
 from osm_nbi.validation import is_valid_uuid
 from time import time, sleep
 from http import HTTPStatus
@@ -64,6 +69,7 @@ class AuthconnInternal(Authconn):
 
         # To be Confirmed
         self.sess = None
 
         # To be Confirmed
         self.sess = None
+        self.cef_logger = cef_event_builder(config)
 
     def validate_token(self, token):
         """
 
     def validate_token(self, token):
         """
@@ -150,15 +156,134 @@ class AuthconnInternal(Authconn):
         user_rows = self.db.get_list(
             self.users_collection, {BaseTopic.id_field("users", user): user}
         )
         user_rows = self.db.get_list(
             self.users_collection, {BaseTopic.id_field("users", user): user}
         )
+        now = time()
         user_content = None
         user_content = None
-        if user_rows:
-            user_content = user_rows[0]
-            salt = user_content["_admin"]["salt"]
-            shadow_password = sha256(
-                password.encode("utf-8") + salt.encode("utf-8")
-            ).hexdigest()
-            if shadow_password != user_content["password"]:
-                user_content = None
+        if user:
+            user_rows = self.db.get_list(
+                self.users_collection,
+                {BaseTopic.id_field(self.users_collection, user): user},
+            )
+            if user_rows:
+                user_content = user_rows[0]
+                # Updating user_status for every system_admin id role login
+                mapped_roles = user_content.get("project_role_mappings")
+                for role in mapped_roles:
+                    role_id = role.get("role")
+                    role_assigned = self.db.get_one(
+                        self.roles_collection,
+                        {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                    )
+
+                    if role_assigned.get("permissions")["admin"]:
+                        if role_assigned.get("permissions")["default"]:
+                            if self.config.get("user_management"):
+                                filt = {}
+                                users = self.db.get_list(self.users_collection, filt)
+                                for user_info in users:
+                                    if not user_info.get("username") == "admin":
+                                        if not user_info.get("_admin").get(
+                                            "account_expire_time"
+                                        ):
+                                            expire = now + 86400 * self.config.get(
+                                                "account_expire_days"
+                                            )
+                                            self.db.set_one(
+                                                self.users_collection,
+                                                {"_id": user_info["_id"]},
+                                                {"_admin.account_expire_time": expire},
+                                            )
+                                        else:
+                                            if now > user_info.get("_admin").get(
+                                                "account_expire_time"
+                                            ):
+                                                self.db.set_one(
+                                                    self.users_collection,
+                                                    {"_id": user_info["_id"]},
+                                                    {"_admin.user_status": "expired"},
+                                                )
+                                break
+
+                # To add "admin" user_status key while upgrading osm setup with feature enabled
+                if user_content.get("username") == "admin":
+                    if self.config.get("user_management"):
+                        self.db.set_one(
+                            self.users_collection,
+                            {"_id": user_content["_id"]},
+                            {"_admin.user_status": "always-active"},
+                        )
+
+                if not user_content.get("username") == "admin":
+                    if self.config.get("user_management"):
+                        if not user_content.get("_admin").get("account_expire_time"):
+                            account_expire_time = now + 86400 * self.config.get(
+                                "account_expire_days"
+                            )
+                            self.db.set_one(
+                                self.users_collection,
+                                {"_id": user_content["_id"]},
+                                {"_admin.account_expire_time": account_expire_time},
+                            )
+                        else:
+                            account_expire_time = user_content.get("_admin").get(
+                                "account_expire_time"
+                            )
+
+                        if now > account_expire_time:
+                            self.db.set_one(
+                                self.users_collection,
+                                {"_id": user_content["_id"]},
+                                {"_admin.user_status": "expired"},
+                            )
+                            raise AuthException(
+                                "Account expired", http_code=HTTPStatus.UNAUTHORIZED
+                            )
+
+                        if user_content.get("_admin").get("user_status") == "locked":
+                            raise AuthException(
+                                "Failed to login as the account is locked due to MANY FAILED ATTEMPTS"
+                            )
+                        elif user_content.get("_admin").get("user_status") == "expired":
+                            raise AuthException(
+                                "Failed to login as the account is expired"
+                            )
+
+                salt = user_content["_admin"]["salt"]
+                shadow_password = sha256(
+                    password.encode("utf-8") + salt.encode("utf-8")
+                ).hexdigest()
+                if shadow_password != user_content["password"]:
+                    count = 1
+                    if user_content.get("_admin").get("retry_count") >= 0:
+                        count += user_content.get("_admin").get("retry_count")
+                        self.db.set_one(
+                            self.users_collection,
+                            {"_id": user_content["_id"]},
+                            {"_admin.retry_count": count},
+                        )
+                        self.logger.debug(
+                            "Failed Authentications count: {}".format(count)
+                        )
+
+                    if user_content.get("username") == "admin":
+                        user_content = None
+                    else:
+                        if not self.config.get("user_management"):
+                            user_content = None
+                        else:
+                            if (
+                                user_content.get("_admin").get("retry_count")
+                                >= self.config["max_pwd_attempt"] - 1
+                            ):
+                                self.db.set_one(
+                                    self.users_collection,
+                                    {"_id": user_content["_id"]},
+                                    {"_admin.user_status": "locked"},
+                                )
+                                raise AuthException(
+                                    "Failed to login as the account is locked due to MANY FAILED ATTEMPTS"
+                                )
+                            else:
+                                user_content = None
         return user_content
 
     def authenticate(self, credentials, token_info=None):
         return user_content
 
     def authenticate(self, credentials, token_info=None):
@@ -189,6 +314,18 @@ class AuthconnInternal(Authconn):
         if user:
             user_content = self.validate_user(user, password)
             if not user_content:
         if user:
             user_content = self.validate_user(user, password)
             if not user_content:
+                cef_event(
+                    self.cef_logger,
+                    {
+                        "name": "User login",
+                        "sourceUserName": user,
+                        "message": "Invalid username/password Project={} Outcome=Failure".format(
+                            project
+                        ),
+                        "severity": "3",
+                    },
+                )
+                self.logger.exception("{}".format(self.cef_logger))
                 raise AuthException(
                     "Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED
                 )
                 raise AuthException(
                     "Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED
                 )
@@ -218,10 +355,14 @@ class AuthconnInternal(Authconn):
             sleep(self.token_delay)
         # user_content["_admin"]["last_token_time"] = now
         # self.db.replace("users", user_content["_id"], user_content)   # might cause race conditions
             sleep(self.token_delay)
         # user_content["_admin"]["last_token_time"] = now
         # self.db.replace("users", user_content["_id"], user_content)   # might cause race conditions
+        user_data = {
+            "_admin.last_token_time": now,
+            "_admin.retry_count": 0,
+        }
         self.db.set_one(
             self.users_collection,
             {"_id": user_content["_id"]},
         self.db.set_one(
             self.users_collection,
             {"_id": user_content["_id"]},
-            {"_admin.last_token_time": now},
+            user_data,
         )
 
         token_id = "".join(
         )
 
         token_id = "".join(
@@ -281,6 +422,24 @@ class AuthconnInternal(Authconn):
             ]
             roles_list = [{"name": "project_admin", "id": rid}]
 
             ]
             roles_list = [{"name": "project_admin", "id": rid}]
 
+        login_count = user_content.get("_admin").get("retry_count")
+        last_token_time = user_content.get("_admin").get("last_token_time")
+
+        admin_show = False
+        user_show = False
+        if self.config.get("user_management"):
+            for role in roles_list:
+                role_id = role.get("id")
+                permission = self.db.get_one(
+                    self.roles_collection,
+                    {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                )
+                if permission.get("permissions")["admin"]:
+                    if permission.get("permissions")["default"]:
+                        admin_show = True
+                        break
+            else:
+                user_show = True
         new_token = {
             "issued_at": now,
             "expires": now + 3600,
         new_token = {
             "issued_at": now,
             "expires": now + 3600,
@@ -292,6 +451,10 @@ class AuthconnInternal(Authconn):
             "user_id": user_content["_id"],
             "admin": token_admin,
             "roles": roles_list,
             "user_id": user_content["_id"],
             "admin": token_admin,
             "roles": roles_list,
+            "login_count": login_count,
+            "last_login": last_token_time,
+            "admin_show": admin_show,
+            "user_show": user_show,
         }
 
         self.db.create(self.tokens_collection, new_token)
         }
 
         self.db.create(self.tokens_collection, new_token)
@@ -352,10 +515,24 @@ class AuthconnInternal(Authconn):
         BaseTopic.format_on_new(user_info, make_public=False)
         salt = uuid4().hex
         user_info["_admin"]["salt"] = salt
         BaseTopic.format_on_new(user_info, make_public=False)
         salt = uuid4().hex
         user_info["_admin"]["salt"] = salt
+        user_info["_admin"]["user_status"] = "active"
+        present = time()
+        if not user_info["username"] == "admin":
+            if self.config.get("user_management"):
+                user_info["_admin"]["modified"] = present
+                user_info["_admin"]["password_expire_time"] = present
+                account_expire_time = present + 86400 * self.config.get(
+                    "account_expire_days"
+                )
+                user_info["_admin"]["account_expire_time"] = account_expire_time
+
+        user_info["_admin"]["retry_count"] = 0
+        user_info["_admin"]["last_token_time"] = present
         if "password" in user_info:
             user_info["password"] = sha256(
                 user_info["password"].encode("utf-8") + salt.encode("utf-8")
             ).hexdigest()
         if "password" in user_info:
             user_info["password"] = sha256(
                 user_info["password"].encode("utf-8") + salt.encode("utf-8")
             ).hexdigest()
+            user_info["_admin"]["password_history"] = {salt: user_info["password"]}
         # "projects" are not stored any more
         if "projects" in user_info:
             del user_info["projects"]
         # "projects" are not stored any more
         if "projects" in user_info:
             del user_info["projects"]
@@ -369,9 +546,104 @@ class AuthconnInternal(Authconn):
         :param user_info: user info modifications
         """
         uid = user_info["_id"]
         :param user_info: user info modifications
         """
         uid = user_info["_id"]
+        old_pwd = user_info.get("old_password")
+        unlock = user_info.get("unlock")
+        renew = user_info.get("renew")
+        permission_id = user_info.get("system_admin_id")
+
         user_data = self.db.get_one(
             self.users_collection, {BaseTopic.id_field("users", uid): uid}
         )
         user_data = self.db.get_one(
             self.users_collection, {BaseTopic.id_field("users", uid): uid}
         )
+        if old_pwd:
+            salt = user_data["_admin"]["salt"]
+            shadow_password = sha256(
+                old_pwd.encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
+            if shadow_password != user_data["password"]:
+                raise AuthconnConflictException(
+                    "Incorrect password", http_code=HTTPStatus.CONFLICT
+                )
+        # Unlocking the user
+        if unlock:
+            system_user = None
+            unlock_state = False
+            if not permission_id:
+                raise AuthconnConflictException(
+                    "system_admin_id is the required field to unlock the user",
+                    http_code=HTTPStatus.CONFLICT,
+                )
+            else:
+                system_user = self.db.get_one(
+                    self.users_collection,
+                    {
+                        BaseTopic.id_field(
+                            self.users_collection, permission_id
+                        ): permission_id
+                    },
+                )
+                mapped_roles = system_user.get("project_role_mappings")
+                for role in mapped_roles:
+                    role_id = role.get("role")
+                    role_assigned = self.db.get_one(
+                        self.roles_collection,
+                        {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                    )
+                    if role_assigned.get("permissions")["admin"]:
+                        if role_assigned.get("permissions")["default"]:
+                            user_data["_admin"]["retry_count"] = 0
+                            user_data["_admin"]["user_status"] = "active"
+                            unlock_state = True
+                            break
+                if not unlock_state:
+                    raise AuthconnConflictException(
+                        "User '{}' does not have the privilege to unlock the user".format(
+                            permission_id
+                        ),
+                        http_code=HTTPStatus.CONFLICT,
+                    )
+        # Renewing the user
+        if renew:
+            system_user = None
+            renew_state = False
+            if not permission_id:
+                raise AuthconnConflictException(
+                    "system_admin_id is the required field to renew the user",
+                    http_code=HTTPStatus.CONFLICT,
+                )
+            else:
+                system_user = self.db.get_one(
+                    self.users_collection,
+                    {
+                        BaseTopic.id_field(
+                            self.users_collection, permission_id
+                        ): permission_id
+                    },
+                )
+                mapped_roles = system_user.get("project_role_mappings")
+                for role in mapped_roles:
+                    role_id = role.get("role")
+                    role_assigned = self.db.get_one(
+                        self.roles_collection,
+                        {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                    )
+                    if role_assigned.get("permissions")["admin"]:
+                        if role_assigned.get("permissions")["default"]:
+                            present = time()
+                            account_expire = (
+                                present + 86400 * self.config["account_expire_days"]
+                            )
+                            user_data["_admin"]["modified"] = present
+                            user_data["_admin"]["account_expire_time"] = account_expire
+                            user_data["_admin"]["user_status"] = "active"
+                            renew_state = True
+                            break
+                if not renew_state:
+                    raise AuthconnConflictException(
+                        "User '{}' does not have the privilege to renew the user".format(
+                            permission_id
+                        ),
+                        http_code=HTTPStatus.CONFLICT,
+                    )
         BaseTopic.format_on_edit(user_data, user_info)
         # User Name
         usnm = user_info.get("username")
         BaseTopic.format_on_edit(user_data, user_info)
         # User Name
         usnm = user_info.get("username")
@@ -382,13 +654,46 @@ class AuthconnInternal(Authconn):
         if pswd and (
             len(pswd) != 64 or not re.match("[a-fA-F0-9]*", pswd)
         ):  # TODO: Improve check?
         if pswd and (
             len(pswd) != 64 or not re.match("[a-fA-F0-9]*", pswd)
         ):  # TODO: Improve check?
+            cef_event(
+                self.cef_logger,
+                {
+                    "name": "Change Password",
+                    "sourceUserName": user_data["username"],
+                    "message": "Changing Password for user, Outcome=Success",
+                    "severity": "2",
+                },
+            )
+            self.logger.info("{}".format(self.cef_logger))
             salt = uuid4().hex
             if "_admin" not in user_data:
                 user_data["_admin"] = {}
             salt = uuid4().hex
             if "_admin" not in user_data:
                 user_data["_admin"] = {}
+            if user_data.get("_admin").get("password_history"):
+                old_pwds = user_data.get("_admin").get("password_history")
+            else:
+                old_pwds = {}
+            for k, v in old_pwds.items():
+                shadow_password = sha256(
+                    pswd.encode("utf-8") + k.encode("utf-8")
+                ).hexdigest()
+                if v == shadow_password:
+                    raise AuthconnConflictException(
+                        "Password is used before", http_code=HTTPStatus.CONFLICT
+                    )
             user_data["_admin"]["salt"] = salt
             user_data["password"] = sha256(
                 pswd.encode("utf-8") + salt.encode("utf-8")
             ).hexdigest()
             user_data["_admin"]["salt"] = salt
             user_data["password"] = sha256(
                 pswd.encode("utf-8") + salt.encode("utf-8")
             ).hexdigest()
+            if len(old_pwds) >= 3:
+                old_pwds.pop(list(old_pwds.keys())[0])
+            old_pwds.update({salt: user_data["password"]})
+            user_data["_admin"]["password_history"] = old_pwds
+            if not user_data["username"] == "admin":
+                if self.config.get("user_management"):
+                    present = time()
+                    if self.config.get("pwd_expire_days"):
+                        expire = present + 86400 * self.config.get("pwd_expire_days")
+                        user_data["_admin"]["modified"] = present
+                        user_data["_admin"]["password_expire_time"] = expire
         # Project-Role Mappings
         # TODO: Check that user_info NEVER includes "project_role_mappings"
         if "project_role_mappings" not in user_data:
         # Project-Role Mappings
         # TODO: Check that user_info NEVER includes "project_role_mappings"
         if "project_role_mappings" not in user_data:
index 5e34485..a84b3d9 100644 (file)
@@ -328,7 +328,6 @@ class AuthconnKeystone(Authconn):
         :return: returns the id of the user in keystone.
         """
         try:
         :return: returns the id of the user in keystone.
         """
         try:
-
             if (
                 user_info.get("domain_name")
                 and user_info["domain_name"] in self.user_domain_ro_list
             if (
                 user_info.get("domain_name")
                 and user_info["domain_name"] in self.user_domain_ro_list
index 8c67c2d..820bd8e 100644 (file)
@@ -17,7 +17,7 @@ import logging
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
-from osm_common.dbbase import deep_update_rfc7396
+from osm_common.dbbase import deep_update_rfc7396, DbException
 from osm_nbi.validation import validate_input, ValidationError, is_valid_uuid
 from yaml import safe_load, YAMLError
 
 from osm_nbi.validation import validate_input, ValidationError, is_valid_uuid
 from yaml import safe_load, YAMLError
 
@@ -30,6 +30,20 @@ class EngineException(Exception):
         super(Exception, self).__init__(message)
 
 
         super(Exception, self).__init__(message)
 
 
+class NBIBadArgumentsException(Exception):
+    """
+    Bad argument values exception
+    """
+
+    def __init__(self, message: str = "", bad_args: list = None):
+        Exception.__init__(self, message)
+        self.message = message
+        self.bad_args = bad_args
+
+    def __str__(self):
+        return "{}, Bad arguments: {}".format(self.message, self.bad_args)
+
+
 def deep_get(target_dict, key_list):
     """
     Get a value from target_dict entering in the nested keys. If keys does not exist, it returns None
 def deep_get(target_dict, key_list):
     """
     Get a value from target_dict entering in the nested keys. If keys does not exist, it returns None
@@ -45,6 +59,85 @@ def deep_get(target_dict, key_list):
     return target_dict
 
 
     return target_dict
 
 
+def detect_descriptor_usage(descriptor: dict, db_collection: str, db: object) -> bool:
+    """Detect the descriptor usage state.
+
+    Args:
+        descriptor (dict):   VNF or NS Descriptor as dictionary
+        db_collection (str):   collection name which is looked for in DB
+        db (object):   name of db object
+
+    Returns:
+        True if descriptor is in use else None
+
+    """
+    try:
+        if not descriptor:
+            raise NBIBadArgumentsException(
+                "Argument is mandatory and can not be empty", "descriptor"
+            )
+
+        if not db:
+            raise NBIBadArgumentsException("A valid DB object should be provided", "db")
+
+        search_dict = {
+            "vnfds": ("vnfrs", "vnfd-id"),
+            "nsds": ("nsrs", "nsd-id"),
+        }
+
+        if db_collection not in search_dict:
+            raise NBIBadArgumentsException(
+                "db_collection should be equal to vnfds or nsds", "db_collection"
+            )
+
+        record_list = db.get_list(
+            search_dict[db_collection][0],
+            {search_dict[db_collection][1]: descriptor["_id"]},
+        )
+
+        if record_list:
+            return True
+
+    except (DbException, KeyError, NBIBadArgumentsException) as error:
+        raise EngineException(
+            f"Error occured while detecting the descriptor usage: {error}"
+        )
+
+
+def update_descriptor_usage_state(
+    descriptor: dict, db_collection: str, db: object
+) -> None:
+    """Updates the descriptor usage state.
+
+    Args:
+        descriptor (dict):   VNF or NS Descriptor as dictionary
+        db_collection (str):   collection name which is looked for in DB
+        db (object):   name of db object
+
+    Returns:
+        None
+
+    """
+    try:
+        descriptor_update = {
+            "_admin.usageState": "NOT_IN_USE",
+        }
+
+        if detect_descriptor_usage(descriptor, db_collection, db):
+            descriptor_update = {
+                "_admin.usageState": "IN_USE",
+            }
+
+        db.set_one(
+            db_collection, {"_id": descriptor["_id"]}, update_dict=descriptor_update
+        )
+
+    except (DbException, KeyError, NBIBadArgumentsException) as error:
+        raise EngineException(
+            f"Error occured while updating the descriptor usage state: {error}"
+        )
+
+
 def get_iterable(input_var):
     """
     Returns an iterable, in case input_var is None it just returns an empty tuple
 def get_iterable(input_var):
     """
     Returns an iterable, in case input_var is None it just returns an empty tuple
index 6bf437d..b165b76 100644 (file)
 import tarfile
 import yaml
 import json
 import tarfile
 import yaml
 import json
-import importlib
 import copy
 import copy
+import os
+import shutil
+import functools
+import re
 
 # import logging
 
 # import logging
+from deepdiff import DeepDiff
 from hashlib import md5
 from osm_common.dbbase import DbException, deep_update_rfc7396
 from http import HTTPStatus
 from time import time
 from uuid import uuid4
 from re import fullmatch
 from hashlib import md5
 from osm_common.dbbase import DbException, deep_update_rfc7396
 from http import HTTPStatus
 from time import time
 from uuid import uuid4
 from re import fullmatch
+from zipfile import ZipFile
+from urllib.parse import urlparse
 from osm_nbi.validation import (
     ValidationError,
     pdu_new_schema,
 from osm_nbi.validation import (
     ValidationError,
     pdu_new_schema,
@@ -33,10 +39,13 @@ from osm_nbi.validation import (
     validate_input,
     vnfpkgop_new_schema,
 )
     validate_input,
     vnfpkgop_new_schema,
 )
-from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
-
-etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd")
-etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd")
+from osm_nbi.base_topic import (
+    BaseTopic,
+    EngineException,
+    get_iterable,
+    detect_descriptor_usage,
+)
+from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
 from osm_im.nst import nst as nst_im
 from pyangbind.lib.serialise import pybindJSONDecoder
 import pyangbind.lib.pybindJSON as pybindJSON
 from osm_im.nst import nst as nst_im
 from pyangbind.lib.serialise import pybindJSONDecoder
 import pyangbind.lib.pybindJSON as pybindJSON
@@ -44,10 +53,17 @@ from osm_nbi import utils
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
+valid_helm_chart_re = re.compile(
+    r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
+)
+
 
 class DescriptorTopic(BaseTopic):
     def __init__(self, db, fs, msg, auth):
 
 class DescriptorTopic(BaseTopic):
     def __init__(self, db, fs, msg, auth):
-        BaseTopic.__init__(self, db, fs, msg, auth)
+        super().__init__(db, fs, msg, auth)
+
+    def _validate_input_new(self, indata, storage_params, force=False):
+        return indata
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
         final_content = super().check_conflict_on_edit(
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
         final_content = super().check_conflict_on_edit(
@@ -114,7 +130,7 @@ class DescriptorTopic(BaseTopic):
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
                 raise EngineException(
                     "{} with id '{}' already exists for this project".format(
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
                 raise EngineException(
                     "{} with id '{}' already exists for this project".format(
-                        self.topic[:-1], final_content["id"]
+                        (str(self.topic))[:-1], final_content["id"]
                     ),
                     HTTPStatus.CONFLICT,
                 )
                     ),
                     HTTPStatus.CONFLICT,
                 )
@@ -139,6 +155,12 @@ class DescriptorTopic(BaseTopic):
         """
         self.fs.file_delete(_id, ignore_non_exist=True)
         self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
         """
         self.fs.file_delete(_id, ignore_non_exist=True)
         self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
+        # Remove file revisions
+        if "revision" in db_content["_admin"]:
+            revision = db_content["_admin"]["revision"]
+            while revision > 0:
+                self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
+                revision = revision - 1
 
     @staticmethod
     def get_one_by_id(db, session, topic, id):
 
     @staticmethod
     def get_one_by_id(db, session, topic, id):
@@ -203,7 +225,8 @@ class DescriptorTopic(BaseTopic):
         # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
         # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
 
         # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
         # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
 
-        content = {"_admin": {"userDefinedData": indata}}
+        content = {"_admin": {"userDefinedData": indata, "revision": 0}}
+
         self.format_on_new(
             content, session["project_id"], make_public=session["public"]
         )
         self.format_on_new(
             content, session["project_id"], make_public=session["public"]
         )
@@ -234,15 +257,25 @@ class DescriptorTopic(BaseTopic):
             content_type
             and "application/gzip" in content_type
             or "application/x-gzip" in content_type
             content_type
             and "application/gzip" in content_type
             or "application/x-gzip" in content_type
-            or "application/zip" in content_type
         ):
             compressed = "gzip"
         ):
             compressed = "gzip"
+        if content_type and "application/zip" in content_type:
+            compressed = "zip"
         filename = headers.get("Content-Filename")
         filename = headers.get("Content-Filename")
-        if not filename:
-            filename = "package.tar.gz" if compressed else "package"
+        if not filename and compressed:
+            filename = "package.tar.gz" if compressed == "gzip" else "package.zip"
+        elif not filename:
+            filename = "package"
+
+        revision = 1
+        if "revision" in current_desc["_admin"]:
+            revision = current_desc["_admin"]["revision"] + 1
+
         # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
         file_pkg = None
         error_text = ""
         # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
         file_pkg = None
         error_text = ""
+        fs_rollback = []
+
         try:
             if content_range_text:
                 content_range = (
         try:
             if content_range_text:
                 content_range = (
@@ -257,23 +290,26 @@ class DescriptorTopic(BaseTopic):
                 total = int(content_range[3])
             else:
                 start = 0
                 total = int(content_range[3])
             else:
                 start = 0
-            temp_folder = (
-                _id + "_"
+            # Rather than using a temp folder, we will store the package in a folder based on
+            # the current revision.
+            proposed_revision_path = (
+                _id + ":" + str(revision)
             )  # all the content is upload here and if ok, it is rename from id_ to is folder
 
             if start:
             )  # all the content is upload here and if ok, it is rename from id_ to is folder
 
             if start:
-                if not self.fs.file_exists(temp_folder, "dir"):
+                if not self.fs.file_exists(proposed_revision_path, "dir"):
                     raise EngineException(
                         "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
                     )
             else:
                     raise EngineException(
                         "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
                     )
             else:
-                self.fs.file_delete(temp_folder, ignore_non_exist=True)
-                self.fs.mkdir(temp_folder)
+                self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
+                self.fs.mkdir(proposed_revision_path)
+                fs_rollback.append(proposed_revision_path)
 
             storage = self.fs.get_params()
 
             storage = self.fs.get_params()
-            storage["folder"] = _id
+            storage["folder"] = proposed_revision_path
 
 
-            file_path = (temp_folder, filename)
+            file_path = (proposed_revision_path, filename)
             if self.fs.file_exists(file_path, "file"):
                 file_size = self.fs.file_size(file_path)
             else:
             if self.fs.file_exists(file_path, "file"):
                 file_size = self.fs.file_size(file_path)
             else:
@@ -354,9 +390,48 @@ class DescriptorTopic(BaseTopic):
                     )
                 storage["descriptor"] = descriptor_file_name
                 storage["zipfile"] = filename
                     )
                 storage["descriptor"] = descriptor_file_name
                 storage["zipfile"] = filename
-                self.fs.file_extract(tar, temp_folder)
+                self.fs.file_extract(tar, proposed_revision_path)
                 with self.fs.file_open(
                 with self.fs.file_open(
-                    (temp_folder, descriptor_file_name), "r"
+                    (proposed_revision_path, descriptor_file_name), "r"
+                ) as descriptor_file:
+                    content = descriptor_file.read()
+            elif compressed == "zip":
+                zipfile = ZipFile(file_pkg)
+                descriptor_file_name = None
+                for package_file in zipfile.infolist():
+                    zipfilename = package_file.filename
+                    file_path = zipfilename.split("/")
+                    if (
+                        not file_path[0] or ".." in zipfilename
+                    ):  # if start with "/" means absolute path
+                        raise EngineException(
+                            "Absolute path or '..' are not allowed for package descriptor zip"
+                        )
+
+                    if (
+                        zipfilename.endswith(".yaml")
+                        or zipfilename.endswith(".json")
+                        or zipfilename.endswith(".yml")
+                    ) and (
+                        zipfilename.find("/") < 0
+                        or zipfilename.find("Definitions") >= 0
+                    ):
+                        storage["pkg-dir"] = ""
+                        if descriptor_file_name:
+                            raise EngineException(
+                                "Found more than one descriptor file at package descriptor zip"
+                            )
+                        descriptor_file_name = zipfilename
+                if not descriptor_file_name:
+                    raise EngineException(
+                        "Not found any descriptor file at package descriptor zip"
+                    )
+                storage["descriptor"] = descriptor_file_name
+                storage["zipfile"] = filename
+                self.fs.file_extract(zipfile, proposed_revision_path)
+
+                with self.fs.file_open(
+                    (proposed_revision_path, descriptor_file_name), "r"
                 ) as descriptor_file:
                     content = descriptor_file.read()
             else:
                 ) as descriptor_file:
                     content = descriptor_file.read()
             else:
@@ -368,11 +443,39 @@ class DescriptorTopic(BaseTopic):
                 indata = json.load(content)
             else:
                 error_text = "Invalid yaml format "
                 indata = json.load(content)
             else:
                 error_text = "Invalid yaml format "
-                indata = yaml.load(content, Loader=yaml.SafeLoader)
+                indata = yaml.safe_load(content)
 
 
-            current_desc["_admin"]["storage"] = storage
-            current_desc["_admin"]["onboardingState"] = "ONBOARDED"
-            current_desc["_admin"]["operationalState"] = "ENABLED"
+            # Need to close the file package here so it can be copied from the
+            # revision to the current, unrevisioned record
+            if file_pkg:
+                file_pkg.close()
+            file_pkg = None
+
+            # Fetch both the incoming, proposed revision and the original revision so we
+            # can call a validate method to compare them
+            current_revision_path = _id + "/"
+            self.fs.sync(from_path=current_revision_path)
+            self.fs.sync(from_path=proposed_revision_path)
+
+            if revision > 1:
+                try:
+                    self._validate_descriptor_changes(
+                        _id,
+                        descriptor_file_name,
+                        current_revision_path,
+                        proposed_revision_path,
+                    )
+                except Exception as e:
+                    shutil.rmtree(
+                        self.fs.path + current_revision_path, ignore_errors=True
+                    )
+                    shutil.rmtree(
+                        self.fs.path + proposed_revision_path, ignore_errors=True
+                    )
+                    # Only delete the new revision.  We need to keep the original version in place
+                    # as it has not been changed.
+                    self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
+                    raise e
 
             indata = self._remove_envelop(indata)
 
 
             indata = self._remove_envelop(indata)
 
@@ -380,13 +483,36 @@ class DescriptorTopic(BaseTopic):
             if kwargs:
                 self._update_input_with_kwargs(indata, kwargs)
 
             if kwargs:
                 self._update_input_with_kwargs(indata, kwargs)
 
+            current_desc["_admin"]["storage"] = storage
+            current_desc["_admin"]["onboardingState"] = "ONBOARDED"
+            current_desc["_admin"]["operationalState"] = "ENABLED"
+            current_desc["_admin"]["modified"] = time()
+            current_desc["_admin"]["revision"] = revision
+
             deep_update_rfc7396(current_desc, indata)
             current_desc = self.check_conflict_on_edit(
                 session, current_desc, indata, _id=_id
             )
             deep_update_rfc7396(current_desc, indata)
             current_desc = self.check_conflict_on_edit(
                 session, current_desc, indata, _id=_id
             )
-            current_desc["_admin"]["modified"] = time()
+
+            # Copy the revision to the active package name by its original id
+            shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
+            os.rename(
+                self.fs.path + proposed_revision_path,
+                self.fs.path + current_revision_path,
+            )
+            self.fs.file_delete(current_revision_path, ignore_non_exist=True)
+            self.fs.mkdir(current_revision_path)
+            self.fs.reverse_sync(from_path=current_revision_path)
+
+            shutil.rmtree(self.fs.path + _id)
+
             self.db.replace(self.topic, _id, current_desc)
             self.db.replace(self.topic, _id, current_desc)
-            self.fs.dir_rename(temp_folder, _id)
+
+            #  Store a copy of the package as a point in time revision
+            revision_desc = dict(current_desc)
+            revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
+            self.db.create(self.topic + "_revisions", revision_desc)
+            fs_rollback = []
 
             indata["_id"] = _id
             self._send_msg("edited", indata)
 
             indata["_id"] = _id
             self._send_msg("edited", indata)
@@ -418,6 +544,8 @@ class DescriptorTopic(BaseTopic):
         finally:
             if file_pkg:
                 file_pkg.close()
         finally:
             if file_pkg:
                 file_pkg.close()
+            for file in fs_rollback:
+                self.fs.file_delete(file, ignore_non_exist=True)
 
     def get_file(self, session, _id, path=None, accept_header=None):
         """
 
     def get_file(self, session, _id, path=None, accept_header=None):
         """
@@ -452,7 +580,7 @@ class DescriptorTopic(BaseTopic):
             )
         storage = content["_admin"]["storage"]
         if path is not None and path != "$DESCRIPTOR":  # artifacts
             )
         storage = content["_admin"]["storage"]
         if path is not None and path != "$DESCRIPTOR":  # artifacts
-            if not storage.get("pkg-dir"):
+            if not storage.get("pkg-dir") and not storage.get("folder"):
                 raise EngineException(
                     "Packages does not contains artifacts",
                     http_code=HTTPStatus.BAD_REQUEST,
                 raise EngineException(
                     "Packages does not contains artifacts",
                     http_code=HTTPStatus.BAD_REQUEST,
@@ -554,7 +682,7 @@ class DescriptorTopic(BaseTopic):
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
-            if type(data) == dict:
+            if isinstance(data, dict):
                 indata["_admin"]["userDefinedData"] = data
             else:
                 raise EngineException(
                 indata["_admin"]["userDefinedData"] = data
             else:
                 raise EngineException(
@@ -578,6 +706,20 @@ class DescriptorTopic(BaseTopic):
 
         return indata
 
 
         return indata
 
+    def _validate_descriptor_changes(
+        self,
+        descriptor_id,
+        descriptor_file_name,
+        old_descriptor_directory,
+        new_descriptor_directory,
+    ):
+        # Example:
+        #    raise EngineException(
+        #           "Error in validating new descriptor: <NODE> cannot be modified",
+        #           http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+        #    )
+        pass
+
 
 class VnfdTopic(DescriptorTopic):
     topic = "vnfds"
 
 class VnfdTopic(DescriptorTopic):
     topic = "vnfds"
@@ -711,9 +853,29 @@ class VnfdTopic(DescriptorTopic):
         self.validate_internal_virtual_links(indata)
         self.validate_monitoring_params(indata)
         self.validate_scaling_group_descriptor(indata)
         self.validate_internal_virtual_links(indata)
         self.validate_monitoring_params(indata)
         self.validate_scaling_group_descriptor(indata)
+        self.validate_helm_chart(indata)
 
         return indata
 
 
         return indata
 
+    @staticmethod
+    def validate_helm_chart(indata):
+        def is_url(url):
+            result = urlparse(url)
+            return all([result.scheme, result.netloc])
+
+        kdus = indata.get("kdu", [])
+        for kdu in kdus:
+            helm_chart_value = kdu.get("helm-chart")
+            if not helm_chart_value:
+                continue
+            if not (
+                valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
+            ):
+                raise EngineException(
+                    "helm-chart '{}' is not valid".format(helm_chart_value),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
+
     @staticmethod
     def validate_mgmt_interface_connection_point(indata):
         if not indata.get("vdu"):
     @staticmethod
     def validate_mgmt_interface_connection_point(indata):
         if not indata.get("vdu"):
@@ -793,6 +955,8 @@ class VnfdTopic(DescriptorTopic):
                         ):
                             if not self._validate_package_folders(
                                 storage_params, "charms"
                         ):
                             if not self._validate_package_folders(
                                 storage_params, "charms"
+                            ) and not self._validate_package_folders(
+                                storage_params, "Scripts/charms"
                             ):
                                 raise EngineException(
                                     "Charm defined in vnf[id={}] but not present in "
                             ):
                                 raise EngineException(
                                     "Charm defined in vnf[id={}] but not present in "
@@ -804,6 +968,8 @@ class VnfdTopic(DescriptorTopic):
             return
         if not self._validate_package_folders(
             storage_params, "cloud_init", vdu["cloud-init-file"]
             return
         if not self._validate_package_folders(
             storage_params, "cloud_init", vdu["cloud-init-file"]
+        ) and not self._validate_package_folders(
+            storage_params, "Scripts/cloud_init", vdu["cloud-init-file"]
         ):
             raise EngineException(
                 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
         ):
             raise EngineException(
                 "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
@@ -828,14 +994,30 @@ class VnfdTopic(DescriptorTopic):
                         day_1_2_config.get("execution-environment-list", []),
                         lambda ee: "juju" in ee,
                     ):
                         day_1_2_config.get("execution-environment-list", []),
                         lambda ee: "juju" in ee,
                     ):
-                        if not self._validate_package_folders(storage_params, "charms"):
+                        if not self._validate_package_folders(
+                            storage_params, "charms"
+                        ) and not self._validate_package_folders(
+                            storage_params, "Scripts/charms"
+                        ):
                             raise EngineException(
                                 "Charm defined in vnf[id={}] but not present in "
                                 "package".format(indata["id"])
                             )
 
     def _validate_package_folders(self, storage_params, folder, file=None):
                             raise EngineException(
                                 "Charm defined in vnf[id={}] but not present in "
                                 "package".format(indata["id"])
                             )
 
     def _validate_package_folders(self, storage_params, folder, file=None):
-        if not storage_params or not storage_params.get("pkg-dir"):
+        if not storage_params:
+            return False
+        elif not storage_params.get("pkg-dir"):
+            if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
+                f = "{}_/{}".format(storage_params["folder"], folder)
+            else:
+                f = "{}/{}".format(storage_params["folder"], folder)
+            if file:
+                return self.fs.file_exists("{}/{}".format(f, file), "file")
+            else:
+                if self.fs.file_exists(f, "dir"):
+                    if self.fs.dir_ls(f):
+                        return True
             return False
         else:
             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
             return False
         else:
             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
@@ -1027,6 +1209,7 @@ class VnfdTopic(DescriptorTopic):
         """
         super().delete_extra(session, _id, db_content, not_send_msg)
         self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
         """
         super().delete_extra(session, _id, db_content, not_send_msg)
         self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
+        self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
 
     def sol005_projection(self, data):
         data["onboardingState"] = data["_admin"]["onboardingState"]
 
     def sol005_projection(self, data):
         data["onboardingState"] = data["_admin"]["onboardingState"]
@@ -1043,13 +1226,182 @@ class VnfdTopic(DescriptorTopic):
 
         return super().sol005_projection(data)
 
 
         return super().sol005_projection(data)
 
+    @staticmethod
+    def find_software_version(vnfd: dict) -> str:
+        """Find the sotware version in the VNFD descriptors
+
+        Args:
+            vnfd (dict): Descriptor as a dictionary
+
+        Returns:
+            software-version (str)
+        """
+        default_sw_version = "1.0"
+        if vnfd.get("vnfd"):
+            vnfd = vnfd["vnfd"]
+        if vnfd.get("software-version"):
+            return vnfd["software-version"]
+        else:
+            return default_sw_version
+
+    @staticmethod
+    def extract_policies(vnfd: dict) -> dict:
+        """Removes the policies from the VNFD descriptors
+
+        Args:
+            vnfd (dict):   Descriptor as a dictionary
+
+        Returns:
+            vnfd (dict): VNFD which does not include policies
+        """
+        for df in vnfd.get("df", {}):
+            for policy in ["scaling-aspect", "healing-aspect"]:
+                if df.get(policy, {}):
+                    df.pop(policy)
+        for vdu in vnfd.get("vdu", {}):
+            for alarm_policy in ["alarm", "monitoring-parameter"]:
+                if vdu.get(alarm_policy, {}):
+                    vdu.pop(alarm_policy)
+        return vnfd
+
+    @staticmethod
+    def extract_day12_primitives(vnfd: dict) -> dict:
+        """Removes the day12 primitives from the VNFD descriptors
+
+        Args:
+            vnfd (dict):   Descriptor as a dictionary
+
+        Returns:
+            vnfd (dict)
+        """
+        for df_id, df in enumerate(vnfd.get("df", {})):
+            if (
+                df.get("lcm-operations-configuration", {})
+                .get("operate-vnf-op-config", {})
+                .get("day1-2")
+            ):
+                day12 = df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
+                    "day1-2"
+                )
+                for config_id, config in enumerate(day12):
+                    for key in [
+                        "initial-config-primitive",
+                        "config-primitive",
+                        "terminate-config-primitive",
+                    ]:
+                        config.pop(key, None)
+                        day12[config_id] = config
+                df["lcm-operations-configuration"]["operate-vnf-op-config"][
+                    "day1-2"
+                ] = day12
+            vnfd["df"][df_id] = df
+        return vnfd
+
+    def remove_modifiable_items(self, vnfd: dict) -> dict:
+        """Removes the modifiable parts from the VNFD descriptors
+
+        It calls different extract functions according to different update types
+        to clear all the modifiable items from VNFD
+
+        Args:
+            vnfd (dict): Descriptor as a dictionary
+
+        Returns:
+            vnfd (dict): Descriptor which does not include modifiable contents
+        """
+        if vnfd.get("vnfd"):
+            vnfd = vnfd["vnfd"]
+        vnfd.pop("_admin", None)
+        # If the other extractions need to be done from VNFD,
+        # the new extract methods could be appended to below list.
+        for extract_function in [self.extract_day12_primitives, self.extract_policies]:
+            vnfd_temp = extract_function(vnfd)
+            vnfd = vnfd_temp
+        return vnfd
+
+    def _validate_descriptor_changes(
+        self,
+        descriptor_id: str,
+        descriptor_file_name: str,
+        old_descriptor_directory: str,
+        new_descriptor_directory: str,
+    ):
+        """Compares the old and new VNFD descriptors and validates the new descriptor.
+
+        Args:
+            old_descriptor_directory (str):   Directory of descriptor which is in-use
+            new_descriptor_directory (str):   Directory of descriptor which is proposed to update (new revision)
+
+        Returns:
+            None
+
+        Raises:
+            EngineException:    In case of error when there are unallowed changes
+        """
+        try:
+            # If VNFD does not exist in DB or it is not in use by any NS,
+            # validation is not required.
+            vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
+            if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
+                return
+
+            # Get the old and new descriptor contents in order to compare them.
+            with self.fs.file_open(
+                (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
+            ) as old_descriptor_file:
+                with self.fs.file_open(
+                    (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
+                ) as new_descriptor_file:
+                    old_content = yaml.safe_load(old_descriptor_file.read())
+                    new_content = yaml.safe_load(new_descriptor_file.read())
+
+                    # If software version has changed, we do not need to validate
+                    # the differences anymore.
+                    if old_content and new_content:
+                        if self.find_software_version(
+                            old_content
+                        ) != self.find_software_version(new_content):
+                            return
+
+                        disallowed_change = DeepDiff(
+                            self.remove_modifiable_items(old_content),
+                            self.remove_modifiable_items(new_content),
+                        )
+
+                        if disallowed_change:
+                            changed_nodes = functools.reduce(
+                                lambda a, b: a + " , " + b,
+                                [
+                                    node.lstrip("root")
+                                    for node in disallowed_change.get(
+                                        "values_changed"
+                                    ).keys()
+                                ],
+                            )
+
+                            raise EngineException(
+                                f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
+                                "there are disallowed changes in the vnf descriptor.",
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
+        except (
+            DbException,
+            AttributeError,
+            IndexError,
+            KeyError,
+            ValueError,
+        ) as e:
+            raise type(e)(
+                "VNF Descriptor could not be processed with error: {}.".format(e)
+            )
+
 
 class NsdTopic(DescriptorTopic):
     topic = "nsds"
     topic_msg = "nsd"
 
     def __init__(self, db, fs, msg, auth):
 
 class NsdTopic(DescriptorTopic):
     topic = "nsds"
     topic_msg = "nsd"
 
     def __init__(self, db, fs, msg, auth):
-        DescriptorTopic.__init__(self, db, fs, msg, auth)
+        super().__init__(db, fs, msg, auth)
 
     def pyangbind_validation(self, item, data, force=False):
         if self._descriptor_data_is_in_old_format(data):
 
     def pyangbind_validation(self, item, data, force=False):
         if self._descriptor_data_is_in_old_format(data):
@@ -1115,6 +1467,8 @@ class NsdTopic(DescriptorTopic):
         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
         for vld in get_iterable(indata.get("virtual-link-desc")):
             self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
         for vld in get_iterable(indata.get("virtual-link-desc")):
             self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
+        for fg in get_iterable(indata.get("vnffgd")):
+            self.validate_vnffgd_data(fg, indata)
 
         self.validate_vnf_profiles_vnfd_id(indata)
 
 
         self.validate_vnf_profiles_vnfd_id(indata)
 
@@ -1136,6 +1490,45 @@ class NsdTopic(DescriptorTopic):
                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
                         )
 
                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
                         )
 
+    @staticmethod
+    def validate_vnffgd_data(fg, indata):
+        position_list = []
+        all_vnf_ids = set(get_iterable(fg.get("vnf-profile-id")))
+        for fgposition in get_iterable(fg.get("nfp-position-element")):
+            position_list.append(fgposition["id"])
+
+        for nfpd in get_iterable(fg.get("nfpd")):
+            nfp_position = []
+            for position in get_iterable(nfpd.get("position-desc-id")):
+                nfp_position = position.get("nfp-position-element-id")
+                if position == "nfp-position-element-id":
+                    nfp_position = position.get("nfp-position-element-id")
+                if nfp_position[0] not in position_list:
+                    raise EngineException(
+                        "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
+                        "does not match any nfp-position-element".format(
+                            nfpd["id"], nfp_position[0]
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+                for cp in get_iterable(position.get("cp-profile-id")):
+                    for cpe in get_iterable(cp.get("constituent-profile-elements")):
+                        constituent_base_element_id = cpe.get(
+                            "constituent-base-element-id"
+                        )
+                        if (
+                            constituent_base_element_id
+                            and constituent_base_element_id not in all_vnf_ids
+                        ):
+                            raise EngineException(
+                                "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
+                                "does not match any constituent-base-element-id".format(
+                                    cpe["id"], constituent_base_element_id
+                                ),
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
+
     @staticmethod
     def validate_vnf_profiles_vnfd_id(indata):
         all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
     @staticmethod
     def validate_vnf_profiles_vnfd_id(indata):
         all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
@@ -1179,7 +1572,7 @@ class NsdTopic(DescriptorTopic):
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
-            if type(data) == dict:
+            if isinstance(data, dict):
                 indata["_admin"]["userDefinedData"] = data
             else:
                 raise EngineException(
                 indata["_admin"]["userDefinedData"] = data
             else:
                 raise EngineException(
@@ -1304,6 +1697,132 @@ class NsdTopic(DescriptorTopic):
                 http_code=HTTPStatus.CONFLICT,
             )
 
                 http_code=HTTPStatus.CONFLICT,
             )
 
+    def delete_extra(self, session, _id, db_content, not_send_msg=None):
+        """
+        Deletes associate file system storage (via super)
+        Deletes associated vnfpkgops from database.
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param db_content: The database content of the descriptor
+        :return: None
+        :raises: FsException in case of error while deleting associated storage
+        """
+        super().delete_extra(session, _id, db_content, not_send_msg)
+        self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
+
+    @staticmethod
+    def extract_day12_primitives(nsd: dict) -> dict:
+        """Removes the day12 primitives from the NSD descriptors
+
+        Args:
+            nsd (dict):    Descriptor as a dictionary
+
+        Returns:
+            nsd (dict):    Cleared NSD
+        """
+        if nsd.get("ns-configuration"):
+            for key in [
+                "config-primitive",
+                "initial-config-primitive",
+                "terminate-config-primitive",
+            ]:
+                nsd["ns-configuration"].pop(key, None)
+        return nsd
+
+    def remove_modifiable_items(self, nsd: dict) -> dict:
+        """Removes the modifiable parts from the VNFD descriptors
+
+        It calls different extract functions according to different update types
+        to clear all the modifiable items from NSD
+
+        Args:
+            nsd (dict):  Descriptor as a dictionary
+
+        Returns:
+            nsd (dict):  Descriptor which does not include modifiable contents
+        """
+        while isinstance(nsd, dict) and nsd.get("nsd"):
+            nsd = nsd["nsd"]
+        if isinstance(nsd, list):
+            nsd = nsd[0]
+        nsd.pop("_admin", None)
+        # If the more extractions need to be done from NSD,
+        # the new extract methods could be appended to below list.
+        for extract_function in [self.extract_day12_primitives]:
+            nsd_temp = extract_function(nsd)
+            nsd = nsd_temp
+        return nsd
+
+    def _validate_descriptor_changes(
+        self,
+        descriptor_id: str,
+        descriptor_file_name: str,
+        old_descriptor_directory: str,
+        new_descriptor_directory: str,
+    ):
+        """Compares the old and new NSD descriptors and validates the new descriptor
+
+        Args:
+            old_descriptor_directory:   Directory of descriptor which is in-use
+            new_descriptor_directory:   Directory of descriptor which is proposed to update (new revision)
+
+        Returns:
+            None
+
+        Raises:
+            EngineException:    In case of error if the changes are not allowed
+        """
+
+        try:
+            # If NSD does not exist in DB, or it is not in use by any NS,
+            # validation is not required.
+            nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
+            if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
+                return
+
+            # Get the old and new descriptor contents in order to compare them.
+            with self.fs.file_open(
+                (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
+            ) as old_descriptor_file:
+                with self.fs.file_open(
+                    (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
+                ) as new_descriptor_file:
+                    old_content = yaml.safe_load(old_descriptor_file.read())
+                    new_content = yaml.safe_load(new_descriptor_file.read())
+
+                    if old_content and new_content:
+                        disallowed_change = DeepDiff(
+                            self.remove_modifiable_items(old_content),
+                            self.remove_modifiable_items(new_content),
+                        )
+
+                        if disallowed_change:
+                            changed_nodes = functools.reduce(
+                                lambda a, b: a + ", " + b,
+                                [
+                                    node.lstrip("root")
+                                    for node in disallowed_change.get(
+                                        "values_changed"
+                                    ).keys()
+                                ],
+                            )
+
+                            raise EngineException(
+                                f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
+                                "there are disallowed changes in the ns descriptor. ",
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
+        except (
+            DbException,
+            AttributeError,
+            IndexError,
+            KeyError,
+            ValueError,
+        ) as e:
+            raise type(e)(
+                "NS Descriptor could not be processed with error: {}.".format(e)
+            )
+
     def sol005_projection(self, data):
         data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
         data["nsdOperationalState"] = data["_admin"]["operationalState"]
     def sol005_projection(self, data):
         data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
         data["nsdOperationalState"] = data["_admin"]["operationalState"]
index cb27414..c4c8eb2 100644 (file)
@@ -52,8 +52,10 @@ from osm_nbi.instance_topics import (
     NsiTopic,
     NsiLcmOpTopic,
 )
     NsiTopic,
     NsiLcmOpTopic,
 )
+from osm_nbi.vnf_instance_topics import VnfInstances, VnfLcmOpTopic
 from osm_nbi.pmjobs_topics import PmJobsTopic
 from osm_nbi.subscription_topics import NslcmSubscriptionsTopic
 from osm_nbi.pmjobs_topics import PmJobsTopic
 from osm_nbi.subscription_topics import NslcmSubscriptionsTopic
+from osm_nbi.osm_vnfm.vnf_subscription import VnflcmSubscriptionsTopic
 from base64 import b64encode
 from os import urandom  # , path
 from threading import Lock
 from base64 import b64encode
 from os import urandom  # , path
 from threading import Lock
@@ -85,6 +87,9 @@ class Engine(object):
         "nsilcmops": NsiLcmOpTopic,
         "vnfpkgops": VnfPkgOpTopic,
         "nslcm_subscriptions": NslcmSubscriptionsTopic,
         "nsilcmops": NsiLcmOpTopic,
         "vnfpkgops": VnfPkgOpTopic,
         "nslcm_subscriptions": NslcmSubscriptionsTopic,
+        "vnf_instances": VnfInstances,
+        "vnflcmops": VnfLcmOpTopic,
+        "vnflcm_subscriptions": VnflcmSubscriptionsTopic,
         # [NEW_TOPIC]: add an entry here
         # "pm_jobs": PmJobsTopic will be added manually because it needs other parameters
     }
         # [NEW_TOPIC]: add an entry here
         # "pm_jobs": PmJobsTopic will be added manually because it needs other parameters
     }
@@ -200,7 +205,7 @@ class Engine(object):
             #                 "resources_to_operations file missing")
             #
             #     with open(resources_to_operations_file, 'r') as f:
             #                 "resources_to_operations file missing")
             #
             #     with open(resources_to_operations_file, 'r') as f:
-            #         resources_to_operations = yaml.load(f, Loader=yaml.Loader)
+            #         resources_to_operations = yaml.safeload(f)
             #
             #     self.operations = []
             #
             #
             #     self.operations = []
             #
@@ -288,7 +293,9 @@ class Engine(object):
         :return: The list, it can be empty if no one match the filter_q.
         """
         if topic not in self.map_topic:
         :return: The list, it can be empty if no one match the filter_q.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].list(session, filter_q, api_req)
 
     def get_item(self, session, topic, _id, filter_q=None, api_req=False):
         return self.map_topic[topic].list(session, filter_q, api_req)
 
     def get_item(self, session, topic, _id, filter_q=None, api_req=False):
@@ -302,7 +309,9 @@ class Engine(object):
         :return: dictionary, raise exception if not found.
         """
         if topic not in self.map_topic:
         :return: dictionary, raise exception if not found.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].show(session, _id, filter_q, api_req)
 
     def get_file(self, session, topic, _id, path=None, accept_header=None):
         return self.map_topic[topic].show(session, _id, filter_q, api_req)
 
     def get_file(self, session, topic, _id, path=None, accept_header=None):
@@ -371,6 +380,26 @@ class Engine(object):
         with self.write_lock:
             return self.map_topic[topic].edit(session, _id, indata, kwargs)
 
         with self.write_lock:
             return self.map_topic[topic].edit(session, _id, indata, kwargs)
 
+    def cancel_item(
+        self, rollback, session, topic, indata=None, kwargs=None, headers=None
+    ):
+        """
+        Cancels an item
+        :param rollback: list to append created items at database in case a rollback must to be done
+        :param session: contains the used login username and working project, force to avoid checkins, public
+        :param topic: it can be: users, projects, vim_accounts, sdns, nsrs, nsds, vnfds
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id: identity of the inserted data.
+        """
+        if topic not in self.map_topic:
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
+        with self.write_lock:
+            self.map_topic[topic].cancel(rollback, session, indata, kwargs, headers)
+
     def upgrade_db(self, current_version, target_version):
         if target_version not in self.map_target_version_to_int.keys():
             raise EngineException(
     def upgrade_db(self, current_version, target_version):
         if target_version not in self.map_target_version_to_int.keys():
             raise EngineException(
index 2d5a929..f591a70 100644 (file)
@@ -197,9 +197,10 @@ def format(data, request, response, toke_info):
             body += '<a href="{}"> show </a>'.format(response.headers["Location"])
         else:
             _id = request.path_info[request.path_info.rfind("/") + 1 :]
             body += '<a href="{}"> show </a>'.format(response.headers["Location"])
         else:
             _id = request.path_info[request.path_info.rfind("/") + 1 :]
-            body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'.format(
-                request.path_info
-            )
+            body += (
+                '<a href="/osm/{}?METHOD=DELETE"> '
+                '<img src="/osm/static/delete.png" height="25" width="25"> </a>'
+            ).format(request.path_info)
             if request.path_info.startswith(
                 "/nslcm/v1/ns_instances_content/"
             ) or request.path_info.startswith("/nslcm/v1/ns_instances/"):
             if request.path_info.startswith(
                 "/nslcm/v1/ns_instances_content/"
             ) or request.path_info.startswith("/nslcm/v1/ns_instances/"):
index d76b0b1..695a8f8 100644 (file)
@@ -14,6 +14,7 @@
 # limitations under the License.
 
 # import logging
 # limitations under the License.
 
 # import logging
+import json
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
@@ -25,7 +26,12 @@ from osm_nbi.validation import (
     ns_terminate,
     ns_action,
     ns_scale,
     ns_terminate,
     ns_action,
     ns_scale,
+    ns_update,
+    ns_heal,
     nsi_instantiate,
     nsi_instantiate,
+    ns_migrate,
+    ns_verticalscale,
+    nslcmop_cancel,
 )
 from osm_nbi.base_topic import (
     BaseTopic,
 )
 from osm_nbi.base_topic import (
     BaseTopic,
@@ -33,6 +39,7 @@ from osm_nbi.base_topic import (
     get_iterable,
     deep_get,
     increment_ip_mac,
     get_iterable,
     deep_get,
     increment_ip_mac,
+    update_descriptor_usage_state,
 )
 from yaml import safe_dump
 from osm_common.dbbase import DbException
 )
 from yaml import safe_dump
 from osm_common.dbbase import DbException
@@ -55,24 +62,6 @@ class NsrTopic(BaseTopic):
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
 
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
 
-    def _check_descriptor_dependencies(self, session, descriptor):
-        """
-        Check that the dependent descriptors exist on a new descriptor or edition
-        :param session: client session information
-        :param descriptor: descriptor to be inserted or edit
-        :return: None or raises exception
-        """
-        if not descriptor.get("nsdId"):
-            return
-        nsd_id = descriptor["nsdId"]
-        if not self.get_item_list(session, "nsds", {"id": nsd_id}):
-            raise EngineException(
-                "Descriptor error at nsdId='{}' references a non exist nsd".format(
-                    nsd_id
-                ),
-                http_code=HTTPStatus.CONFLICT,
-            )
-
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
@@ -164,9 +153,14 @@ class NsrTopic(BaseTopic):
         ns_request, member_vnf_index=None, vdu_id=None, kdu_name=None, descriptor=None
     ):
         """
         ns_request, member_vnf_index=None, vdu_id=None, kdu_name=None, descriptor=None
     ):
         """
-        Get and format user additional params for NS or VNF
+        Get and format user additional params for NS or VNF.
+        The vdu_id and kdu_name params are mutually exclusive! If none of them are given, then the method will
+        exclusively search for the VNF/NS LCM additional params.
+
         :param ns_request: User instantiation additional parameters
         :param member_vnf_index: None for extract NS params, or member_vnf_index to extract VNF params
         :param ns_request: User instantiation additional parameters
         :param member_vnf_index: None for extract NS params, or member_vnf_index to extract VNF params
+        :vdu_id: VDU's ID against which we want to format the additional params
+        :kdu_name: KDU's name against which we want to format the additional params
         :param descriptor: If not None it check that needed parameters of descriptor are supplied
         :return: tuple with a formatted copy of additional params or None if not supplied, plus other parameters
         """
         :param descriptor: If not None it check that needed parameters of descriptor are supplied
         :return: tuple with a formatted copy of additional params or None if not supplied, plus other parameters
         """
@@ -239,14 +233,19 @@ class NsrTopic(BaseTopic):
                             where_, k
                         )
                     )
                             where_, k
                         )
                     )
-                if "." in k or "$" in k:
+                if "$" in k:
                     raise EngineException(
                     raise EngineException(
-                        "Invalid param at {}:{}. Keys must not contain dots or $".format(
+                        "Invalid param at {}:{}. Keys must not contain $ symbol".format(
                             where_, k
                         )
                     )
                 if isinstance(v, (dict, tuple, list)):
                     additional_params[k] = "!!yaml " + safe_dump(v)
                             where_, k
                         )
                     )
                 if isinstance(v, (dict, tuple, list)):
                     additional_params[k] = "!!yaml " + safe_dump(v)
+            if kdu_name:
+                additional_params = json.dumps(additional_params)
+
+        # Select the VDU ID, KDU name or NS/VNF ID, depending on the method's call intent
+        selector = vdu_id if vdu_id else kdu_name if kdu_name else descriptor.get("id")
 
         if descriptor:
             for df in descriptor.get("df", []):
 
         if descriptor:
             for df in descriptor.get("df", []):
@@ -262,10 +261,13 @@ class NsrTopic(BaseTopic):
                         for config in df["lcm-operations-configuration"][
                             "operate-vnf-op-config"
                         ].get("day1-2", []):
                         for config in df["lcm-operations-configuration"][
                             "operate-vnf-op-config"
                         ].get("day1-2", []):
-                            for primitive in get_iterable(
-                                config.get("initial-config-primitive")
-                            ):
-                                initial_primitives.append(primitive)
+                            # Verify the target object (VNF|NS|VDU|KDU) where we need to populate
+                            # the params with the additional ones given by the user
+                            if config.get("id") == selector:
+                                for primitive in get_iterable(
+                                    config.get("initial-config-primitive")
+                                ):
+                                    initial_primitives.append(primitive)
                 else:
                     initial_primitives = deep_get(
                         descriptor, ("ns-configuration", "initial-config-primitive")
                 else:
                     initial_primitives = deep_get(
                         descriptor, ("ns-configuration", "initial-config-primitive")
@@ -280,6 +282,7 @@ class NsrTopic(BaseTopic):
                                 "<rw_mgmt_ip>",
                                 "<VDU_SCALE_INFO>",
                                 "<ns_config_info>",
                                 "<rw_mgmt_ip>",
                                 "<VDU_SCALE_INFO>",
                                 "<ns_config_info>",
+                                "<OSM>",
                             ):
                                 continue
                             if (
                             ):
                                 continue
                             if (
@@ -309,8 +312,8 @@ class NsrTopic(BaseTopic):
             EngineException, ValidationError, DbException, FsException, MsgException.
             Note: Exceptions are not captured on purpose. They should be captured at called
         """
             EngineException, ValidationError, DbException, FsException, MsgException.
             Note: Exceptions are not captured on purpose. They should be captured at called
         """
+        step = "checking quotas"  # first step must be defined outside try
         try:
         try:
-            step = "checking quotas"
             self.check_quota(session)
 
             step = "validating input parameters"
             self.check_quota(session)
 
             step = "validating input parameters"
@@ -334,7 +337,7 @@ class NsrTopic(BaseTopic):
             # Create VNFRs
             needed_vnfds = {}
             # TODO: Change for multiple df support
             # Create VNFRs
             needed_vnfds = {}
             # TODO: Change for multiple df support
-            vnf_profiles = nsd.get("df", [[]])[0].get("vnf-profile", ())
+            vnf_profiles = nsd.get("df", [{}])[0].get("vnf-profile", ())
             for vnfp in vnf_profiles:
                 vnfd_id = vnfp.get("vnfd-id")
                 vnf_index = vnfp.get("id")
             for vnfp in vnf_profiles:
                 vnfd_id = vnfp.get("vnfd-id")
                 vnf_index = vnfp.get("id")
@@ -345,6 +348,9 @@ class NsrTopic(BaseTopic):
                 )
                 if vnfd_id not in needed_vnfds:
                     vnfd = self._get_vnfd_from_db(vnfd_id, session)
                 )
                 if vnfd_id not in needed_vnfds:
                     vnfd = self._get_vnfd_from_db(vnfd_id, session)
+                    if "revision" in vnfd["_admin"]:
+                        vnfd["revision"] = vnfd["_admin"]["revision"]
+                    vnfd.pop("_admin")
                     needed_vnfds[vnfd_id] = vnfd
                     nsr_descriptor["vnfd-id"].append(vnfd["_id"])
                 else:
                     needed_vnfds[vnfd_id] = vnfd
                     nsr_descriptor["vnfd-id"].append(vnfd["_id"])
                 else:
@@ -368,9 +374,13 @@ class NsrTopic(BaseTopic):
                 )
                 self._add_vnfr_to_db(vnfr_descriptor, rollback, session)
                 nsr_descriptor["constituent-vnfr-ref"].append(vnfr_descriptor["id"])
                 )
                 self._add_vnfr_to_db(vnfr_descriptor, rollback, session)
                 nsr_descriptor["constituent-vnfr-ref"].append(vnfr_descriptor["id"])
+                step = "Updating VNFD usageState"
+                update_descriptor_usage_state(vnfd, "vnfds", self.db)
 
             step = "creating nsr at database"
             self._add_nsr_to_db(nsr_descriptor, rollback, session)
 
             step = "creating nsr at database"
             self._add_nsr_to_db(nsr_descriptor, rollback, session)
+            step = "Updating NSD usageState"
+            update_descriptor_usage_state(nsd, "nsds", self.db)
 
             step = "creating nsr temporal folder"
             self.fs.mkdir(nsr_id)
 
             step = "creating nsr temporal folder"
             self.fs.mkdir(nsr_id)
@@ -394,7 +404,6 @@ class NsrTopic(BaseTopic):
         _filter = self._get_project_filter(session)
         _filter["id"] = vnfd_id
         vnfd = self.db.get_one("vnfds", _filter, fail_on_empty=True, fail_on_more=True)
         _filter = self._get_project_filter(session)
         _filter["id"] = vnfd_id
         vnfd = self.db.get_one("vnfds", _filter, fail_on_empty=True, fail_on_more=True)
-        vnfd.pop("_admin")
         return vnfd
 
     def _add_nsr_to_db(self, nsr_descriptor, rollback, session):
         return vnfd
 
     def _add_nsr_to_db(self, nsr_descriptor, rollback, session):
@@ -432,6 +441,83 @@ class NsrTopic(BaseTopic):
 
         return ns_k8s_namespace
 
 
         return ns_k8s_namespace
 
+    def _add_shared_volumes_to_nsr(
+        self, vdu, vnfd, nsr_descriptor, member_vnf_index, revision=None
+    ):
+        svsd = []
+        for vsd in vnfd.get("virtual-storage-desc", ()):
+            if vsd.get("vdu-storage-requirements"):
+                if (
+                    vsd.get("vdu-storage-requirements")[0].get("key") == "multiattach"
+                    and vsd.get("vdu-storage-requirements")[0].get("value") == "True"
+                ):
+                    # Avoid setting the volume name multiple times
+                    if not match(f"shared-.*-{vnfd['id']}", vsd["id"]):
+                        vsd["id"] = f"shared-{vsd['id']}-{vnfd['id']}"
+                    svsd.append(vsd)
+        if svsd:
+            nsr_descriptor["shared-volumes"] = svsd
+
+    def _add_flavor_to_nsr(
+        self, vdu, vnfd, nsr_descriptor, member_vnf_index, revision=None
+    ):
+        flavor_data = {}
+        guest_epa = {}
+        # Find this vdu compute and storage descriptors
+        vdu_virtual_compute = {}
+        vdu_virtual_storage = {}
+        for vcd in vnfd.get("virtual-compute-desc", ()):
+            if vcd.get("id") == vdu.get("virtual-compute-desc"):
+                vdu_virtual_compute = vcd
+        for vsd in vnfd.get("virtual-storage-desc", ()):
+            if vsd.get("id") == vdu.get("virtual-storage-desc", [[]])[0]:
+                vdu_virtual_storage = vsd
+        # Get this vdu vcpus, memory and storage info for flavor_data
+        if vdu_virtual_compute.get("virtual-cpu", {}).get("num-virtual-cpu"):
+            flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"][
+                "num-virtual-cpu"
+            ]
+        if vdu_virtual_compute.get("virtual-memory", {}).get("size"):
+            flavor_data["memory-mb"] = (
+                float(vdu_virtual_compute["virtual-memory"]["size"]) * 1024.0
+            )
+        if vdu_virtual_storage.get("size-of-storage"):
+            flavor_data["storage-gb"] = vdu_virtual_storage["size-of-storage"]
+        # Get this vdu EPA info for guest_epa
+        if vdu_virtual_compute.get("virtual-cpu", {}).get("cpu-quota"):
+            guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"]["cpu-quota"]
+        if vdu_virtual_compute.get("virtual-cpu", {}).get("pinning"):
+            vcpu_pinning = vdu_virtual_compute["virtual-cpu"]["pinning"]
+            if vcpu_pinning.get("thread-policy"):
+                guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning["thread-policy"]
+            if vcpu_pinning.get("policy"):
+                cpu_policy = (
+                    "SHARED" if vcpu_pinning["policy"] == "dynamic" else "DEDICATED"
+                )
+                guest_epa["cpu-pinning-policy"] = cpu_policy
+        if vdu_virtual_compute.get("virtual-memory", {}).get("mem-quota"):
+            guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"]["mem-quota"]
+        if vdu_virtual_compute.get("virtual-memory", {}).get("mempage-size"):
+            guest_epa["mempage-size"] = vdu_virtual_compute["virtual-memory"][
+                "mempage-size"
+            ]
+        if vdu_virtual_compute.get("virtual-memory", {}).get("numa-node-policy"):
+            guest_epa["numa-node-policy"] = vdu_virtual_compute["virtual-memory"][
+                "numa-node-policy"
+            ]
+        if vdu_virtual_storage.get("disk-io-quota"):
+            guest_epa["disk-io-quota"] = vdu_virtual_storage["disk-io-quota"]
+
+        if guest_epa:
+            flavor_data["guest-epa"] = guest_epa
+
+        revision = revision if revision is not None else 1
+        flavor_data["name"] = (
+            vdu["id"][:56] + "-" + member_vnf_index + "-" + str(revision) + "-flv"
+        )
+        flavor_data["id"] = str(len(nsr_descriptor["flavor"]))
+        nsr_descriptor["flavor"].append(flavor_data)
+
     def _create_nsr_descriptor_from_nsd(self, nsd, ns_request, nsr_id, session):
         now = time()
         additional_params, _ = self._format_additional_params(
     def _create_nsr_descriptor_from_nsd(self, nsd, ns_request, nsr_id, session):
         now = time()
         additional_params, _ = self._format_additional_params(
@@ -474,11 +560,16 @@ class NsrTopic(BaseTopic):
             "ssh-authorized-key": ns_request.get("ssh_keys"),  # TODO remove
             "flavor": [],
             "image": [],
             "ssh-authorized-key": ns_request.get("ssh_keys"),  # TODO remove
             "flavor": [],
             "image": [],
+            "affinity-or-anti-affinity-group": [],
+            "shared-volumes": [],
+            "vnffgd": [],
         }
         }
+        if "revision" in nsd["_admin"]:
+            nsr_descriptor["revision"] = nsd["_admin"]["revision"]
+
         ns_request["nsr_id"] = nsr_id
         if ns_request and ns_request.get("config-units"):
             nsr_descriptor["config-units"] = ns_request["config-units"]
         ns_request["nsr_id"] = nsr_id
         if ns_request and ns_request.get("config-units"):
             nsr_descriptor["config-units"] = ns_request["config-units"]
-
         # Create vld
         if nsd.get("virtual-link-desc"):
             nsr_vld = deepcopy(nsd.get("virtual-link-desc", []))
         # Create vld
         if nsd.get("virtual-link-desc"):
             nsr_vld = deepcopy(nsd.get("virtual-link-desc", []))
@@ -504,81 +595,14 @@ class NsrTopic(BaseTopic):
                         )
 
                 vnfd = self._get_vnfd_from_db(vnf_profile.get("vnfd-id"), session)
                         )
 
                 vnfd = self._get_vnfd_from_db(vnf_profile.get("vnfd-id"), session)
+                vnfd.pop("_admin")
 
                 for vdu in vnfd.get("vdu", ()):
 
                 for vdu in vnfd.get("vdu", ()):
-                    flavor_data = {}
-                    guest_epa = {}
-                    # Find this vdu compute and storage descriptors
-                    vdu_virtual_compute = {}
-                    vdu_virtual_storage = {}
-                    for vcd in vnfd.get("virtual-compute-desc", ()):
-                        if vcd.get("id") == vdu.get("virtual-compute-desc"):
-                            vdu_virtual_compute = vcd
-                    for vsd in vnfd.get("virtual-storage-desc", ()):
-                        if vsd.get("id") == vdu.get("virtual-storage-desc", [[]])[0]:
-                            vdu_virtual_storage = vsd
-                    # Get this vdu vcpus, memory and storage info for flavor_data
-                    if vdu_virtual_compute.get("virtual-cpu", {}).get(
-                        "num-virtual-cpu"
-                    ):
-                        flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"][
-                            "num-virtual-cpu"
-                        ]
-                    if vdu_virtual_compute.get("virtual-memory", {}).get("size"):
-                        flavor_data["memory-mb"] = (
-                            float(vdu_virtual_compute["virtual-memory"]["size"])
-                            * 1024.0
-                        )
-                    if vdu_virtual_storage.get("size-of-storage"):
-                        flavor_data["storage-gb"] = vdu_virtual_storage[
-                            "size-of-storage"
-                        ]
-                    # Get this vdu EPA info for guest_epa
-                    if vdu_virtual_compute.get("virtual-cpu", {}).get("cpu-quota"):
-                        guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"][
-                            "cpu-quota"
-                        ]
-                    if vdu_virtual_compute.get("virtual-cpu", {}).get("pinning"):
-                        vcpu_pinning = vdu_virtual_compute["virtual-cpu"]["pinning"]
-                        if vcpu_pinning.get("thread-policy"):
-                            guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning[
-                                "thread-policy"
-                            ]
-                        if vcpu_pinning.get("policy"):
-                            cpu_policy = (
-                                "SHARED"
-                                if vcpu_pinning["policy"] == "dynamic"
-                                else "DEDICATED"
-                            )
-                            guest_epa["cpu-pinning-policy"] = cpu_policy
-                    if vdu_virtual_compute.get("virtual-memory", {}).get("mem-quota"):
-                        guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"][
-                            "mem-quota"
-                        ]
-                    if vdu_virtual_compute.get("virtual-memory", {}).get(
-                        "mempage-size"
-                    ):
-                        guest_epa["mempage-size"] = vdu_virtual_compute[
-                            "virtual-memory"
-                        ]["mempage-size"]
-                    if vdu_virtual_compute.get("virtual-memory", {}).get(
-                        "numa-node-policy"
-                    ):
-                        guest_epa["numa-node-policy"] = vdu_virtual_compute[
-                            "virtual-memory"
-                        ]["numa-node-policy"]
-                    if vdu_virtual_storage.get("disk-io-quota"):
-                        guest_epa["disk-io-quota"] = vdu_virtual_storage[
-                            "disk-io-quota"
-                        ]
-
-                    if guest_epa:
-                        flavor_data["guest-epa"] = guest_epa
-
-                    flavor_data["name"] = vdu["id"][:56] + "-flv"
-                    flavor_data["id"] = str(len(nsr_descriptor["flavor"]))
-                    nsr_descriptor["flavor"].append(flavor_data)
-
+                    member_vnf_index = vnf_profile.get("id")
+                    self._add_flavor_to_nsr(vdu, vnfd, nsr_descriptor, member_vnf_index)
+                    self._add_shared_volumes_to_nsr(
+                        vdu, vnfd, nsr_descriptor, member_vnf_index
+                    )
                     sw_image_id = vdu.get("sw-image-desc")
                     if sw_image_id:
                         image_data = self._get_image_data_from_vnfd(vnfd, sw_image_id)
                     sw_image_id = vdu.get("sw-image-desc")
                     if sw_image_id:
                         image_data = self._get_image_data_from_vnfd(vnfd, sw_image_id)
@@ -589,15 +613,93 @@ class NsrTopic(BaseTopic):
                         image_data = self._get_image_data_from_vnfd(vnfd, alt_image)
                         self._add_image_to_nsr(nsr_descriptor, image_data)
 
                         image_data = self._get_image_data_from_vnfd(vnfd, alt_image)
                         self._add_image_to_nsr(nsr_descriptor, image_data)
 
+                # Add Affinity or Anti-affinity group information to NSR
+                vdu_profiles = vnfd.get("df", [[]])[0].get("vdu-profile", ())
+                affinity_group_prefix_name = "{}-{}".format(
+                    nsr_descriptor["name"][:16], vnf_profile.get("id")[:16]
+                )
+
+                for vdu_profile in vdu_profiles:
+                    affinity_group_data = {}
+                    for affinity_group in vdu_profile.get(
+                        "affinity-or-anti-affinity-group", ()
+                    ):
+                        affinity_group_data = (
+                            self._get_affinity_or_anti_affinity_group_data_from_vnfd(
+                                vnfd, affinity_group["id"]
+                            )
+                        )
+                        affinity_group_data["member-vnf-index"] = vnf_profile.get("id")
+                        self._add_affinity_or_anti_affinity_group_to_nsr(
+                            nsr_descriptor,
+                            affinity_group_data,
+                            affinity_group_prefix_name,
+                        )
+
             for vld in nsr_vld:
                 vld["vnfd-connection-point-ref"] = all_vld_connection_point_data.get(
                     vld.get("id"), []
                 )
                 vld["name"] = vld["id"]
             nsr_descriptor["vld"] = nsr_vld
             for vld in nsr_vld:
                 vld["vnfd-connection-point-ref"] = all_vld_connection_point_data.get(
                     vld.get("id"), []
                 )
                 vld["name"] = vld["id"]
             nsr_descriptor["vld"] = nsr_vld
+        if nsd.get("vnffgd"):
+            vnffgd = nsd.get("vnffgd")
+            for vnffg in vnffgd:
+                info = {}
+                for k, v in vnffg.items():
+                    if k == "id":
+                        info.update({k: v})
+                    if k == "nfpd":
+                        info.update({k: v})
+                nsr_descriptor["vnffgd"].append(info)
 
         return nsr_descriptor
 
 
         return nsr_descriptor
 
+    def _get_affinity_or_anti_affinity_group_data_from_vnfd(
+        self, vnfd, affinity_group_id
+    ):
+        """
+        Gets affinity-or-anti-affinity-group info from df and returns the desired affinity group
+        """
+        affinity_group = utils.find_in_list(
+            vnfd.get("df", [[]])[0].get("affinity-or-anti-affinity-group", ()),
+            lambda ag: ag["id"] == affinity_group_id,
+        )
+        affinity_group_data = {}
+        if affinity_group:
+            if affinity_group.get("id"):
+                affinity_group_data["ag-id"] = affinity_group["id"]
+            if affinity_group.get("type"):
+                affinity_group_data["type"] = affinity_group["type"]
+            if affinity_group.get("scope"):
+                affinity_group_data["scope"] = affinity_group["scope"]
+        return affinity_group_data
+
+    def _add_affinity_or_anti_affinity_group_to_nsr(
+        self, nsr_descriptor, affinity_group_data, affinity_group_prefix_name
+    ):
+        """
+        Adds affinity-or-anti-affinity-group to nsr checking first it is not already added
+        """
+        affinity_group = next(
+            (
+                f
+                for f in nsr_descriptor["affinity-or-anti-affinity-group"]
+                if all(f.get(k) == affinity_group_data[k] for k in affinity_group_data)
+            ),
+            None,
+        )
+        if not affinity_group:
+            affinity_group_data["id"] = str(
+                len(nsr_descriptor["affinity-or-anti-affinity-group"])
+            )
+            affinity_group_data["name"] = "{}-{}".format(
+                affinity_group_prefix_name, affinity_group_data["ag-id"][:32]
+            )
+            nsr_descriptor["affinity-or-anti-affinity-group"].append(
+                affinity_group_data
+            )
+
     def _get_image_data_from_vnfd(self, vnfd, sw_image_id):
         sw_image_desc = utils.find_in_list(
             vnfd.get("sw-image-desc", ()), lambda sw: sw["id"] == sw_image_id
     def _get_image_data_from_vnfd(self, vnfd, sw_image_id):
         sw_image_desc = utils.find_in_list(
             vnfd.get("sw-image-desc", ()), lambda sw: sw["id"] == sw_image_id
@@ -636,6 +738,7 @@ class NsrTopic(BaseTopic):
         nsr_descriptor,
         ns_request,
         ns_k8s_namespace,
         nsr_descriptor,
         ns_request,
         ns_k8s_namespace,
+        revision=None,
     ):
         vnfr_id = str(uuid4())
         nsr_id = nsr_descriptor["id"]
     ):
         vnfr_id = str(uuid4())
         nsr_id = nsr_descriptor["id"]
@@ -660,6 +763,12 @@ class NsrTopic(BaseTopic):
             "connection-point": [],
             "ip-address": None,  # mgmt-interface filled by LCM
         }
             "connection-point": [],
             "ip-address": None,  # mgmt-interface filled by LCM
         }
+
+        # Revision backwards compatility.  Only specify the revision in the record if
+        # the original VNFD has a revision.
+        if "revision" in vnfd:
+            vnfr_descriptor["revision"] = vnfd["revision"]
+
         vnf_k8s_namespace = ns_k8s_namespace
         if vnf_params:
             if vnf_params.get("k8s-namespace"):
         vnf_k8s_namespace = ns_k8s_namespace
         if vnf_params:
             if vnf_params.get("k8s-namespace"):
@@ -767,6 +876,14 @@ class NsrTopic(BaseTopic):
             additional_params, vdu_params = self._format_additional_params(
                 ns_request, vnf_index, vdu_id=vdu["id"], descriptor=vnfd
             )
             additional_params, vdu_params = self._format_additional_params(
                 ns_request, vnf_index, vdu_id=vdu["id"], descriptor=vnfd
             )
+
+            try:
+                vdu_virtual_storage_descriptors = utils.filter_in_list(
+                    vnfd.get("virtual-storage-desc", []),
+                    lambda stg_desc: stg_desc["id"] in vdu["virtual-storage-desc"],
+                )
+            except Exception:
+                vdu_virtual_storage_descriptors = []
             vdur = {
                 "vdu-id-ref": vdu["id"],
                 # TODO      "name": ""     Name of the VDU in the VIM
             vdur = {
                 "vdu-id-ref": vdu["id"],
                 # TODO      "name": ""     Name of the VDU in the VIM
@@ -776,6 +893,7 @@ class NsrTopic(BaseTopic):
                 "interfaces": [],
                 "additionalParams": additional_params,
                 "vdu-name": vdu["name"],
                 "interfaces": [],
                 "additionalParams": additional_params,
                 "vdu-name": vdu["name"],
+                "virtual-storages": vdu_virtual_storage_descriptors,
             }
             if vdu_params and vdu_params.get("config-units"):
                 vdur["config-units"] = vdu_params["config-units"]
             }
             if vdu_params and vdu_params.get("config-units"):
                 vdur["config-units"] = vdu_params["config-units"]
@@ -797,7 +915,10 @@ class NsrTopic(BaseTopic):
                 vdur["internal-connection-point"].append(vdu_icp)
 
                 for iface in icp.get("virtual-network-interface-requirement", ()):
                 vdur["internal-connection-point"].append(vdu_icp)
 
                 for iface in icp.get("virtual-network-interface-requirement", ()):
-                    iface_fields = ("name", "mac-address")
+                    # Name, mac-address and interface position is taken from VNFD
+                    # and included into VNFR. By this way RO can process this information
+                    # while creating the VDU.
+                    iface_fields = ("name", "mac-address", "position", "ip-address")
                     vdu_iface = {
                         x: iface[x] for x in iface_fields if iface.get(x) is not None
                     }
                     vdu_iface = {
                         x: iface[x] for x in iface_fields if iface.get(x) is not None
                     }
@@ -867,7 +988,7 @@ class NsrTopic(BaseTopic):
                                         if (
                                             cpd.get("constituent-cpd-id")
                                             == iface_ext_cp
                                         if (
                                             cpd.get("constituent-cpd-id")
                                             == iface_ext_cp
-                                        ):
+                                        ) and vnf_profile.get("id") == vnf_index:
                                             vdu_iface["ns-vld-id"] = vlc.get(
                                                 "virtual-link-profile-id"
                                             )
                                             vdu_iface["ns-vld-id"] = vlc.get(
                                                 "virtual-link-profile-id"
                                             )
@@ -921,7 +1042,10 @@ class NsrTopic(BaseTopic):
                     alt_image_ids.append(nsr_sw_image_data["id"])
                 vdur["alt-image-ids"] = alt_image_ids
 
                     alt_image_ids.append(nsr_sw_image_data["id"])
                 vdur["alt-image-ids"] = alt_image_ids
 
-            flavor_data_name = vdu["id"][:56] + "-flv"
+            revision = revision if revision is not None else 1
+            flavor_data_name = (
+                vdu["id"][:56] + "-" + vnf_index + "-" + str(revision) + "-flv"
+            )
             nsr_flavor_desc = utils.find_in_list(
                 nsr_descriptor["flavor"],
                 lambda flavor: flavor["name"] == flavor_data_name,
             nsr_flavor_desc = utils.find_in_list(
                 nsr_descriptor["flavor"],
                 lambda flavor: flavor["name"] == flavor_data_name,
@@ -930,6 +1054,64 @@ class NsrTopic(BaseTopic):
             if nsr_flavor_desc:
                 vdur["ns-flavor-id"] = nsr_flavor_desc["id"]
 
             if nsr_flavor_desc:
                 vdur["ns-flavor-id"] = nsr_flavor_desc["id"]
 
+            # Adding Shared Volume information to vdur
+            if vdur.get("virtual-storages"):
+                nsr_sv = []
+                for vsd in vdur["virtual-storages"]:
+                    if vsd.get("vdu-storage-requirements"):
+                        if (
+                            vsd["vdu-storage-requirements"][0].get("key")
+                            == "multiattach"
+                            and vsd["vdu-storage-requirements"][0].get("value")
+                            == "True"
+                        ):
+                            nsr_sv.append(vsd["id"])
+                if nsr_sv:
+                    vdur["shared-volumes-id"] = nsr_sv
+
+            # Adding Affinity groups information to vdur
+            try:
+                vdu_profile_affinity_group = utils.find_in_list(
+                    vnfd.get("df")[0]["vdu-profile"],
+                    lambda a_vdu: a_vdu["id"] == vdu["id"],
+                )
+            except Exception:
+                vdu_profile_affinity_group = None
+
+            if vdu_profile_affinity_group:
+                affinity_group_ids = []
+                for affinity_group in vdu_profile_affinity_group.get(
+                    "affinity-or-anti-affinity-group", ()
+                ):
+                    vdu_affinity_group = utils.find_in_list(
+                        vdu_profile_affinity_group.get(
+                            "affinity-or-anti-affinity-group", ()
+                        ),
+                        lambda ag_fp: ag_fp["id"] == affinity_group["id"],
+                    )
+                    nsr_affinity_group = utils.find_in_list(
+                        nsr_descriptor["affinity-or-anti-affinity-group"],
+                        lambda nsr_ag: (
+                            nsr_ag.get("ag-id") == vdu_affinity_group.get("id")
+                            and nsr_ag.get("member-vnf-index")
+                            == vnfr_descriptor.get("member-vnf-index-ref")
+                        ),
+                    )
+                    # Update Affinity Group VIM name if VDU instantiation parameter is present
+                    if vnf_params and vnf_params.get("affinity-or-anti-affinity-group"):
+                        vnf_params_affinity_group = utils.find_in_list(
+                            vnf_params["affinity-or-anti-affinity-group"],
+                            lambda vnfp_ag: (
+                                vnfp_ag.get("id") == vdu_affinity_group.get("id")
+                            ),
+                        )
+                        if vnf_params_affinity_group.get("vim-affinity-group-id"):
+                            nsr_affinity_group[
+                                "vim-affinity-group-id"
+                            ] = vnf_params_affinity_group["vim-affinity-group-id"]
+                    affinity_group_ids.append(nsr_affinity_group["id"])
+                vdur["affinity-or-anti-affinity-group-id"] = affinity_group_ids
+
             if vdu_instantiation_level:
                 count = vdu_instantiation_level.get("number-of-instances")
             else:
             if vdu_instantiation_level:
                 count = vdu_instantiation_level.get("number-of-instances")
             else:
@@ -947,7 +1129,6 @@ class NsrTopic(BaseTopic):
                 vdur["id"] = vdur["_id"]
                 vdur["count-index"] = index
                 vnfr_descriptor["vdur"].append(vdur)
                 vdur["id"] = vdur["_id"]
                 vdur["count-index"] = index
                 vnfr_descriptor["vdur"].append(vdur)
-
         return vnfr_descriptor
 
     def vca_status_refresh(self, session, ns_instance_content, filter_q):
         return vnfr_descriptor
 
     def vca_status_refresh(self, session, ns_instance_content, filter_q):
@@ -959,15 +1140,22 @@ class NsrTopic(BaseTopic):
         :param filter_q: dict: query parameter containing vcaStatus-refresh as true or false
         :return: None
         """
         :param filter_q: dict: query parameter containing vcaStatus-refresh as true or false
         :return: None
         """
-        time_now, time_delta = time(), time() - ns_instance_content["_admin"]["modified"]
-        force_refresh = isinstance(filter_q, dict) and filter_q.get('vcaStatusRefresh') == 'true'
+        time_now, time_delta = (
+            time(),
+            time() - ns_instance_content["_admin"]["modified"],
+        )
+        force_refresh = (
+            isinstance(filter_q, dict) and filter_q.get("vcaStatusRefresh") == "true"
+        )
         threshold_reached = time_delta > 120
         if force_refresh or threshold_reached:
             operation, _id = "vca_status_refresh", ns_instance_content["_id"]
             ns_instance_content["_admin"]["modified"] = time_now
             self.db.set_one(self.topic, {"_id": _id}, ns_instance_content)
             nslcmop_desc = NsLcmOpTopic._create_nslcmop(_id, operation, None)
         threshold_reached = time_delta > 120
         if force_refresh or threshold_reached:
             operation, _id = "vca_status_refresh", ns_instance_content["_id"]
             ns_instance_content["_admin"]["modified"] = time_now
             self.db.set_one(self.topic, {"_id": _id}, ns_instance_content)
             nslcmop_desc = NsLcmOpTopic._create_nslcmop(_id, operation, None)
-            self.format_on_new(nslcmop_desc, session["project_id"], make_public=session["public"])
+            self.format_on_new(
+                nslcmop_desc, session["project_id"], make_public=session["public"]
+            )
             nslcmop_desc["_admin"].pop("nsState")
             self.msg.write("ns", operation, nslcmop_desc)
         return
             nslcmop_desc["_admin"].pop("nsState")
             self.msg.write("ns", operation, nslcmop_desc)
         return
@@ -1021,18 +1209,24 @@ class NsLcmOpTopic(BaseTopic):
     operation_schema = {  # mapping between operation and jsonschema to validate
         "instantiate": ns_instantiate,
         "action": ns_action,
     operation_schema = {  # mapping between operation and jsonschema to validate
         "instantiate": ns_instantiate,
         "action": ns_action,
+        "update": ns_update,
         "scale": ns_scale,
         "scale": ns_scale,
+        "heal": ns_heal,
         "terminate": ns_terminate,
         "terminate": ns_terminate,
+        "migrate": ns_migrate,
+        "verticalscale": ns_verticalscale,
+        "cancel": nslcmop_cancel,
     }
 
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
     }
 
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
+        self.nsrtopic = NsrTopic(db, fs, msg, auth)
 
     def _check_ns_operation(self, session, nsr, operation, indata):
         """
         Check that user has enter right parameters for the operation
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
 
     def _check_ns_operation(self, session, nsr, operation, indata):
         """
         Check that user has enter right parameters for the operation
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
-        :param operation: it can be: instantiate, terminate, action, TODO: update, heal
+        :param operation: it can be: instantiate, terminate, action, update, heal
         :param indata: descriptor with the parameters of the operation
         :return: None
         """
         :param indata: descriptor with the parameters of the operation
         :return: None
         """
@@ -1040,6 +1234,10 @@ class NsLcmOpTopic(BaseTopic):
             self._check_action_ns_operation(indata, nsr)
         elif operation == "scale":
             self._check_scale_ns_operation(indata, nsr)
             self._check_action_ns_operation(indata, nsr)
         elif operation == "scale":
             self._check_scale_ns_operation(indata, nsr)
+        elif operation == "update":
+            self._check_update_ns_operation(indata, nsr)
+        elif operation == "heal":
+            self._check_heal_ns_operation(indata, nsr)
         elif operation == "instantiate":
             self._check_instantiate_ns_operation(indata, nsr, session)
 
         elif operation == "instantiate":
             self._check_instantiate_ns_operation(indata, nsr, session)
 
@@ -1132,6 +1330,94 @@ class NsLcmOpTopic(BaseTopic):
                 )
             )
 
                 )
             )
 
+    def _check_update_ns_operation(self, indata, nsr) -> None:
+        """Validates the ns-update request according to updateType
+
+        If updateType is CHANGE_VNFPKG:
+        - it checks the vnfInstanceId, whether it's available under ns instance
+        - it checks the vnfdId whether it matches with the vnfd-id in the vnf-record of specified VNF.
+        Otherwise exception will be raised.
+        If updateType is REMOVE_VNF:
+        - it checks if the vnfInstanceId is available in the ns instance
+        - Otherwise exception will be raised.
+
+        Args:
+            indata: includes updateType such as CHANGE_VNFPKG,
+            nsr: network service record
+
+        Raises:
+           EngineException:
+                a meaningful error if given update parameters are not proper such as
+                "Error in validating ns-update request: <ID> does not match
+                with the vnfd-id of vnfinstance
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY"
+
+        """
+        try:
+            if indata["updateType"] == "CHANGE_VNFPKG":
+                # vnfInstanceId, nsInstanceId, vnfdId are mandatory
+                vnf_instance_id = indata["changeVnfPackageData"]["vnfInstanceId"]
+                ns_instance_id = indata["nsInstanceId"]
+                vnfd_id_2update = indata["changeVnfPackageData"]["vnfdId"]
+
+                if vnf_instance_id not in nsr["constituent-vnfr-ref"]:
+                    raise EngineException(
+                        f"Error in validating ns-update request: vnf {vnf_instance_id} does not "
+                        f"belong to NS {ns_instance_id}",
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+                # Getting vnfrs through the ns_instance_id
+                vnfrs = self.db.get_list("vnfrs", {"nsr-id-ref": ns_instance_id})
+                constituent_vnfd_id = next(
+                    (
+                        vnfr["vnfd-id"]
+                        for vnfr in vnfrs
+                        if vnfr["id"] == vnf_instance_id
+                    ),
+                    None,
+                )
+
+                # Check the given vnfd-id belongs to given vnf instance
+                if constituent_vnfd_id and (vnfd_id_2update != constituent_vnfd_id):
+                    raise EngineException(
+                        f"Error in validating ns-update request: vnfd-id {vnfd_id_2update} does not "
+                        f"match with the vnfd-id: {constituent_vnfd_id} of VNF instance: {vnf_instance_id}",
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+                # Validating the ns update timeout
+                if (
+                    indata.get("timeout_ns_update")
+                    and indata["timeout_ns_update"] < 300
+                ):
+                    raise EngineException(
+                        "Error in validating ns-update request: {} second is not enough "
+                        "to upgrade the VNF instance: {}".format(
+                            indata["timeout_ns_update"], vnf_instance_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+            elif indata["updateType"] == "REMOVE_VNF":
+                vnf_instance_id = indata["removeVnfInstanceId"]
+                ns_instance_id = indata["nsInstanceId"]
+                if vnf_instance_id not in nsr["constituent-vnfr-ref"]:
+                    raise EngineException(
+                        "Invalid VNF Instance Id. '{}' is not "
+                        "present in the NS '{}'".format(vnf_instance_id, ns_instance_id)
+                    )
+
+        except (
+            DbException,
+            AttributeError,
+            IndexError,
+            KeyError,
+            ValueError,
+        ) as e:
+            raise type(e)(
+                "Ns update request could not be processed with error: {}.".format(e)
+            )
+
     def _check_scale_ns_operation(self, indata, nsr):
         vnfd = self._get_vnfd_from_vnf_member_index(
             indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"], nsr["_id"]
     def _check_scale_ns_operation(self, indata, nsr):
         vnfd = self._get_vnfd_from_vnf_member_index(
             indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"], nsr["_id"]
@@ -1152,6 +1438,9 @@ class NsLcmOpTopic(BaseTopic):
                 )
             )
 
                 )
             )
 
+    def _check_heal_ns_operation(self, indata, nsr):
+        return
+
     def _check_instantiate_ns_operation(self, indata, nsr, session):
         vnf_member_index_to_vnfd = {}  # map between vnf_member_index to vnf descriptor.
         vim_accounts = []
     def _check_instantiate_ns_operation(self, indata, nsr, session):
         vnf_member_index_to_vnfd = {}  # map between vnf_member_index to vnf descriptor.
         vim_accounts = []
@@ -1202,7 +1491,18 @@ class NsLcmOpTopic(BaseTopic):
                 "Invalid parameter member_vnf_index='{}' is not one of the "
                 "nsd:constituent-vnfd".format(member_vnf_index)
             )
                 "Invalid parameter member_vnf_index='{}' is not one of the "
                 "nsd:constituent-vnfd".format(member_vnf_index)
             )
-        vnfd = self.db.get_one("vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=False)
+
+        # Backwards compatibility: if there is no revision, get it from the one and only VNFD entry
+        if "revision" in vnfr:
+            vnfd_revision = vnfr["vnfd-id"] + ":" + str(vnfr["revision"])
+            vnfd = self.db.get_one(
+                "vnfds_revisions", {"_id": vnfd_revision}, fail_on_empty=False
+            )
+        else:
+            vnfd = self.db.get_one(
+                "vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=False
+            )
+
         if not vnfd:
             raise EngineException(
                 "vnfd id={} has been deleted!. Operation cannot be performed".format(
         if not vnfd:
             raise EngineException(
                 "vnfd id={} has been deleted!. Operation cannot be performed".format(
@@ -1239,7 +1539,7 @@ class NsLcmOpTopic(BaseTopic):
                 if in_vdu["id"] == vdu["id"]:
                     for volume in get_iterable(in_vdu.get("volume")):
                         for volumed in get_iterable(vdu.get("virtual-storage-desc")):
                 if in_vdu["id"] == vdu["id"]:
                     for volume in get_iterable(in_vdu.get("volume")):
                         for volumed in get_iterable(vdu.get("virtual-storage-desc")):
-                            if volumed["id"] == volume["name"]:
+                            if volumed == volume["name"]:
                                 break
                         else:
                             raise EngineException(
                                 break
                         else:
                             raise EngineException(
@@ -1259,7 +1559,7 @@ class NsLcmOpTopic(BaseTopic):
                         ):
                             vdu_if_names.add(iface.get("name"))
 
                         ):
                             vdu_if_names.add(iface.get("name"))
 
-                    for in_iface in get_iterable(in_vdu["interface"]):
+                    for in_iface in get_iterable(in_vdu.get("interface")):
                         if in_iface["name"] in vdu_if_names:
                             break
                         else:
                         if in_iface["name"] in vdu_if_names:
                             break
                         else:
@@ -1283,8 +1583,8 @@ class NsLcmOpTopic(BaseTopic):
             ivld.get("id"): set()
             for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))
         }
             ivld.get("id"): set()
             for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))
         }
-        for vdu in get_iterable(vnfd.get("vdu")):
-            for cpd in get_iterable(vnfd.get("int-cpd")):
+        for vdu in vnfd.get("vdu", {}):
+            for cpd in vdu.get("int-cpd", {}):
                 if cpd.get("int-virtual-link-desc"):
                     vnfd_ivlds_cpds[cpd.get("int-virtual-link-desc")] = cpd.get("id")
 
                 if cpd.get("int-virtual-link-desc"):
                     vnfd_ivlds_cpds[cpd.get("int-virtual-link-desc")] = cpd.get("id")
 
@@ -1326,13 +1626,23 @@ class NsLcmOpTopic(BaseTopic):
             )
         vim_accounts.append(vim_account)
 
             )
         vim_accounts.append(vim_account)
 
+    def _get_vim_account(self, vim_id: str, session):
+        try:
+            db_filter = self._get_project_filter(session)
+            db_filter["_id"] = vim_id
+            return self.db.get_one("vim_accounts", db_filter)
+        except Exception:
+            raise EngineException(
+                "Invalid vimAccountId='{}' not present for the project".format(vim_id)
+            )
+
     def _check_valid_wim_account(self, wim_account, wim_accounts, session):
         if not isinstance(wim_account, str):
             return
         if wim_account in wim_accounts:
             return
         try:
     def _check_valid_wim_account(self, wim_account, wim_accounts, session):
         if not isinstance(wim_account, str):
             return
         if wim_account in wim_accounts:
             return
         try:
-            db_filter = self._get_project_filter(session, write=False, show_all=True)
+            db_filter = self._get_project_filter(session)
             db_filter["_id"] = wim_account
             self.db.get_one("wim_accounts", db_filter)
         except Exception:
             db_filter["_id"] = wim_account
             self.db.get_one("wim_accounts", db_filter)
         except Exception:
@@ -1588,6 +1898,86 @@ class NsLcmOpTopic(BaseTopic):
             # TODO check that this forcing is not incompatible with other forcing
         return ifaces_forcing_vim_network
 
             # TODO check that this forcing is not incompatible with other forcing
         return ifaces_forcing_vim_network
 
+    def _update_vnfrs_from_nsd(self, nsr):
+        step = "Getting vnf_profiles from nsd"  # first step must be defined outside try
+        try:
+            nsr_id = nsr["_id"]
+            nsd = nsr["nsd"]
+
+            vnf_profiles = nsd.get("df", [{}])[0].get("vnf-profile", ())
+            vld_fixed_ip_connection_point_data = {}
+
+            step = "Getting ip-address info from vnf_profile if it exists"
+            for vnfp in vnf_profiles:
+                # Checking ip-address info from nsd.vnf_profile and storing
+                for vlc in vnfp.get("virtual-link-connectivity", ()):
+                    for cpd in vlc.get("constituent-cpd-id", ()):
+                        if cpd.get("ip-address"):
+                            step = "Storing ip-address info"
+                            vld_fixed_ip_connection_point_data.update(
+                                {
+                                    vlc.get("virtual-link-profile-id")
+                                    + "."
+                                    + cpd.get("constituent-base-element-id"): {
+                                        "vnfd-connection-point-ref": cpd.get(
+                                            "constituent-cpd-id"
+                                        ),
+                                        "ip-address": cpd.get("ip-address"),
+                                    }
+                                }
+                            )
+
+            # Inserting ip address to vnfr
+            if len(vld_fixed_ip_connection_point_data) > 0:
+                step = "Getting vnfrs"
+                vnfrs = self.db.get_list("vnfrs", {"nsr-id-ref": nsr_id})
+                for item in vld_fixed_ip_connection_point_data.keys():
+                    step = "Filtering vnfrs"
+                    vnfr = next(
+                        filter(
+                            lambda vnfr: vnfr["member-vnf-index-ref"]
+                            == item.split(".")[1],
+                            vnfrs,
+                        ),
+                        None,
+                    )
+                    if vnfr:
+                        vnfr_update = {}
+                        for vdur_index, vdur in enumerate(vnfr["vdur"]):
+                            for iface_index, iface in enumerate(vdur["interfaces"]):
+                                step = "Looking for matched interface"
+                                if (
+                                    iface.get("external-connection-point-ref")
+                                    == vld_fixed_ip_connection_point_data[item].get(
+                                        "vnfd-connection-point-ref"
+                                    )
+                                    and iface.get("ns-vld-id") == item.split(".")[0]
+                                ):
+                                    vnfr_update_text = "vdur.{}.interfaces.{}".format(
+                                        vdur_index, iface_index
+                                    )
+                                    step = "Storing info in order to update vnfr"
+                                    vnfr_update[
+                                        vnfr_update_text + ".ip-address"
+                                    ] = increment_ip_mac(
+                                        vld_fixed_ip_connection_point_data[item].get(
+                                            "ip-address"
+                                        ),
+                                        vdur.get("count-index", 0),
+                                    )
+                                    vnfr_update[vnfr_update_text + ".fixed-ip"] = True
+
+                        step = "updating vnfr at database"
+                        self.db.set_one("vnfrs", {"_id": vnfr["_id"]}, vnfr_update)
+        except (
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
+        ) as e:
+            raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
+
     def _update_vnfrs(self, session, rollback, nsr, indata):
         # get vnfr
         nsr_id = nsr["_id"]
     def _update_vnfrs(self, session, rollback, nsr, indata):
         # get vnfr
         nsr_id = nsr["_id"]
@@ -1600,15 +1990,14 @@ class NsLcmOpTopic(BaseTopic):
             # update vim-account-id
 
             vim_account = indata["vimAccountId"]
             # update vim-account-id
 
             vim_account = indata["vimAccountId"]
-            vca_id = indata.get("vcaId")
+            vca_id = self._get_vim_account(vim_account, session).get("vca")
             # check instantiate parameters
             for vnf_inst_params in get_iterable(indata.get("vnf")):
                 if vnf_inst_params["member-vnf-index"] != member_vnf_index:
                     continue
                 if vnf_inst_params.get("vimAccountId"):
                     vim_account = vnf_inst_params.get("vimAccountId")
             # check instantiate parameters
             for vnf_inst_params in get_iterable(indata.get("vnf")):
                 if vnf_inst_params["member-vnf-index"] != member_vnf_index:
                     continue
                 if vnf_inst_params.get("vimAccountId"):
                     vim_account = vnf_inst_params.get("vimAccountId")
-                if vnf_inst_params.get("vcaId"):
-                    vca_id = vnf_inst_params.get("vcaId")
+                    vca_id = self._get_vim_account(vim_account, session).get("vca")
 
                 # get vnf.vdu.interface instantiation params to update vnfr.vdur.interfaces ip, mac
                 for vdu_inst_param in get_iterable(vnf_inst_params.get("vdu")):
 
                 # get vnf.vdu.interface instantiation params to update vnfr.vdur.interfaces ip, mac
                 for vdu_inst_param in get_iterable(vnf_inst_params.get("vdu")):
@@ -1788,7 +2177,7 @@ class NsLcmOpTopic(BaseTopic):
         """
         Creates a ns-lcm-opp content to be stored at database.
         :param nsr_id: internal id of the instance
         """
         Creates a ns-lcm-opp content to be stored at database.
         :param nsr_id: internal id of the instance
-        :param operation: instantiate, terminate, scale, action, ...
+        :param operation: instantiate, terminate, scale, action, update ...
         :param params: user parameters for the operation
         :return: dictionary following SOL005 format
         """
         :param params: user parameters for the operation
         :return: dictionary following SOL005 format
         """
@@ -1844,7 +2233,7 @@ class NsLcmOpTopic(BaseTopic):
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param indata: descriptor with the parameters of the operation. It must contains among others
             nsInstanceId: _id of the nsr to perform the operation
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param indata: descriptor with the parameters of the operation. It must contains among others
             nsInstanceId: _id of the nsr to perform the operation
-            operation: it can be: instantiate, terminate, action, TODO: update, heal
+            operation: it can be: instantiate, terminate, action, update TODO: heal
         :param kwargs: used to override the indata descriptor
         :param headers: http request headers
         :return: id of the nslcmops
         :param kwargs: used to override the indata descriptor
         :param headers: http request headers
         :return: id of the nslcmops
@@ -1906,10 +2295,75 @@ class NsLcmOpTopic(BaseTopic):
                         HTTPStatus.CONFLICT,
                     )
             self._check_ns_operation(session, nsr, operation, indata)
                         HTTPStatus.CONFLICT,
                     )
             self._check_ns_operation(session, nsr, operation, indata)
+            if indata.get("primitive_params"):
+                indata["primitive_params"] = json.dumps(indata["primitive_params"])
+            elif indata.get("additionalParamsForVnf"):
+                indata["additionalParamsForVnf"] = json.dumps(
+                    indata["additionalParamsForVnf"]
+                )
 
             if operation == "instantiate":
 
             if operation == "instantiate":
+                self._update_vnfrs_from_nsd(nsr)
                 self._update_vnfrs(session, rollback, nsr, indata)
                 self._update_vnfrs(session, rollback, nsr, indata)
-
+            if (operation == "update") and (indata["updateType"] == "CHANGE_VNFPKG"):
+                nsr_update = {}
+                vnfd_id = indata["changeVnfPackageData"]["vnfdId"]
+                vnfd = self.db.get_one("vnfds", {"_id": vnfd_id})
+                nsd = self.db.get_one("nsds", {"_id": nsr["nsd-id"]})
+                ns_request = nsr["instantiate_params"]
+                vnfr = self.db.get_one(
+                    "vnfrs", {"_id": indata["changeVnfPackageData"]["vnfInstanceId"]}
+                )
+                latest_vnfd_revision = vnfd["_admin"].get("revision", 1)
+                vnfr_vnfd_revision = vnfr.get("revision", 1)
+                if latest_vnfd_revision != vnfr_vnfd_revision:
+                    old_vnfd_id = vnfd_id + ":" + str(vnfr_vnfd_revision)
+                    old_db_vnfd = self.db.get_one(
+                        "vnfds_revisions", {"_id": old_vnfd_id}
+                    )
+                    old_sw_version = old_db_vnfd.get("software-version", "1.0")
+                    new_sw_version = vnfd.get("software-version", "1.0")
+                    if new_sw_version != old_sw_version:
+                        vnf_index = vnfr["member-vnf-index-ref"]
+                        self.logger.info("nsr {}".format(nsr))
+                        for vdu in vnfd["vdu"]:
+                            self.nsrtopic._add_shared_volumes_to_nsr(
+                                vdu, vnfd, nsr, vnf_index, latest_vnfd_revision
+                            )
+                            self.nsrtopic._add_flavor_to_nsr(
+                                vdu, vnfd, nsr, vnf_index, latest_vnfd_revision
+                            )
+                            sw_image_id = vdu.get("sw-image-desc")
+                            if sw_image_id:
+                                image_data = self.nsrtopic._get_image_data_from_vnfd(
+                                    vnfd, sw_image_id
+                                )
+                                self.nsrtopic._add_image_to_nsr(nsr, image_data)
+                            for alt_image in vdu.get("alternative-sw-image-desc", ()):
+                                image_data = self.nsrtopic._get_image_data_from_vnfd(
+                                    vnfd, alt_image
+                                )
+                                self.nsrtopic._add_image_to_nsr(nsr, image_data)
+                        nsr_update["image"] = nsr["image"]
+                        nsr_update["flavor"] = nsr["flavor"]
+                        nsr_update["shared-volumes"] = nsr["shared-volumes"]
+                        self.db.set_one("nsrs", {"_id": nsr["_id"]}, nsr_update)
+                        ns_k8s_namespace = self.nsrtopic._get_ns_k8s_namespace(
+                            nsd, ns_request, session
+                        )
+                        vnfr_descriptor = (
+                            self.nsrtopic._create_vnfr_descriptor_from_vnfd(
+                                nsd,
+                                vnfd,
+                                vnfd_id,
+                                vnf_index,
+                                nsr,
+                                ns_request,
+                                ns_k8s_namespace,
+                                latest_vnfd_revision,
+                            )
+                        )
+                        indata["newVdur"] = vnfr_descriptor["vdur"]
             nslcmop_desc = self._create_nslcmop(nsInstanceId, operation, indata)
             _id = nslcmop_desc["_id"]
             self.format_on_new(
             nslcmop_desc = self._create_nslcmop(nsInstanceId, operation, indata)
             _id = nslcmop_desc["_id"]
             self.format_on_new(
@@ -1930,6 +2384,41 @@ class NsLcmOpTopic(BaseTopic):
         # except DbException as e:
         #     raise EngineException("Cannot get ns_instance '{}': {}".format(e), HTTPStatus.NOT_FOUND)
 
         # except DbException as e:
         #     raise EngineException("Cannot get ns_instance '{}': {}".format(e), HTTPStatus.NOT_FOUND)
 
+    def cancel(self, rollback, session, indata=None, kwargs=None, headers=None):
+        validate_input(indata, self.operation_schema["cancel"])
+        # Override descriptor with query string kwargs
+        self._update_input_with_kwargs(indata, kwargs, yaml_format=True)
+        nsLcmOpOccId = indata["nsLcmOpOccId"]
+        cancelMode = indata["cancelMode"]
+        # get nslcmop from nsLcmOpOccId
+        _filter = BaseTopic._get_project_filter(session)
+        _filter["_id"] = nsLcmOpOccId
+        nslcmop = self.db.get_one("nslcmops", _filter)
+        # Fail is this is not an ongoing nslcmop
+        if nslcmop.get("operationState") not in [
+            "STARTING",
+            "PROCESSING",
+            "ROLLING_BACK",
+        ]:
+            raise EngineException(
+                "Operation is not in STARTING, PROCESSING or ROLLING_BACK state",
+                http_code=HTTPStatus.CONFLICT,
+            )
+        nsInstanceId = nslcmop["nsInstanceId"]
+        update_dict = {
+            "isCancelPending": True,
+            "cancelMode": cancelMode,
+        }
+        self.db.set_one(
+            "nslcmops", q_filter=_filter, update_dict=update_dict, fail_on_empty=False
+        )
+        data = {
+            "_id": nsLcmOpOccId,
+            "nsInstanceId": nsInstanceId,
+            "cancelMode": cancelMode,
+        }
+        self.msg.write("nslcmops", "cancel", data)
+
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
         raise EngineException(
             "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
         raise EngineException(
             "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
@@ -1982,24 +2471,6 @@ class NsiTopic(BaseTopic):
                     additional_params[k] = "!!yaml " + safe_dump(v)
         return additional_params
 
                     additional_params[k] = "!!yaml " + safe_dump(v)
         return additional_params
 
-    def _check_descriptor_dependencies(self, session, descriptor):
-        """
-        Check that the dependent descriptors exist on a new descriptor or edition
-        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
-        :param descriptor: descriptor to be inserted or edit
-        :return: None or raises exception
-        """
-        if not descriptor.get("nst-ref"):
-            return
-        nstd_id = descriptor["nst-ref"]
-        if not self.get_item_list(session, "nsts", {"id": nstd_id}):
-            raise EngineException(
-                "Descriptor error at nst-ref='{}' references a non exist nstd".format(
-                    nstd_id
-                ),
-                http_code=HTTPStatus.CONFLICT,
-            )
-
     def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that NSI is not instantiated
     def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that NSI is not instantiated
@@ -2085,8 +2556,8 @@ class NsiTopic(BaseTopic):
         :return: the _id of nsi descriptor created at database
         """
 
         :return: the _id of nsi descriptor created at database
         """
 
+        step = "checking quotas"  # first step must be defined outside try
         try:
         try:
-            step = "checking quotas"
             self.check_quota(session)
 
             step = ""
             self.check_quota(session)
 
             step = ""
@@ -2274,13 +2745,13 @@ class NsiTopic(BaseTopic):
             self.db.create("nsis", nsi_descriptor)
             rollback.append({"topic": "nsis", "_id": nsi_id})
             return nsi_id, None
             self.db.create("nsis", nsi_descriptor)
             rollback.append({"topic": "nsis", "_id": nsi_id})
             return nsi_id, None
+        except ValidationError as e:
+            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
         except Exception as e:  # TODO remove try Except, it is captured at nbi.py
             self.logger.exception(
                 "Exception {} at NsiTopic.new()".format(e), exc_info=True
             )
             raise EngineException("Error {}: {}".format(step, e))
         except Exception as e:  # TODO remove try Except, it is captured at nbi.py
             self.logger.exception(
                 "Exception {} at NsiTopic.new()".format(e), exc_info=True
             )
             raise EngineException("Error {}: {}".format(step, e))
-        except ValidationError as e:
-            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
         raise EngineException(
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
         raise EngineException(
index 60320ad..7035cae 100644 (file)
@@ -116,5 +116,16 @@ backend: "keystone"         # internal or keystone or tacacs
 # tacacs_port: 49    # Default value
 # tacacs_timeout: 10 # Default value
 
 # tacacs_port: 49    # Default value
 # tacacs_timeout: 10 # Default value
 
+# User Management configuration
+user_management: True
+pwd_expire_days: 30         # Password expiry Default value
+max_pwd_attempt: 5
+account_expire_days: 90     # Account expiry Default value
+
+# CEF Configuration
+version: "0"
+deviceVendor: "OSM"
+deviceProduct: "OSM"
+
 [rbac]
 # roles_to_operations: "roles_to_operations.yml"  # initial role generation when database
 [rbac]
 # roles_to_operations: "roles_to_operations.yml"  # initial role generation when database
index 67cf58b..8f87135 100644 (file)
@@ -28,6 +28,7 @@ from osm_nbi.authconn import AuthException, AuthconnException
 from osm_nbi.auth import Authenticator
 from osm_nbi.engine import Engine, EngineException
 from osm_nbi.subscriptions import SubscriptionThread
 from osm_nbi.auth import Authenticator
 from osm_nbi.engine import Engine, EngineException
 from osm_nbi.subscriptions import SubscriptionThread
+from osm_nbi.utils import cef_event, cef_event_builder
 from osm_nbi.validation import ValidationError
 from osm_common.dbbase import DbException
 from osm_common.fsbase import FsException
 from osm_nbi.validation import ValidationError
 from osm_common.dbbase import DbException
 from osm_common.fsbase import FsException
@@ -46,6 +47,7 @@ database_version = "1.2"
 auth_database_version = "1.0"
 nbi_server = None  # instance of Server class
 subscription_thread = None  # instance of SubscriptionThread class
 auth_database_version = "1.0"
 nbi_server = None  # instance of Server class
 subscription_thread = None  # instance of SubscriptionThread class
+cef_logger = None
 
 """
 North Bound Interface  (O: OSM specific; 5,X: SOL005 not implemented yet; O5: SOL005 implemented)
 
 """
 North Bound Interface  (O: OSM specific; 5,X: SOL005 not implemented yet; O5: SOL005 implemented)
@@ -85,10 +87,12 @@ URL: /osm                                                       GET     POST
                     terminate                                           O5
                     action                                              O
                     scale                                               O5
                     terminate                                           O5
                     action                                              O
                     scale                                               O5
-                    heal                                                5
+                    migrate                                             O
+                    update                                              05
+                    heal                                                O5
             /ns_lcm_op_occs                                     5       5
                 /<nsLcmOpOccId>                                 5                       5       5
             /ns_lcm_op_occs                                     5       5
                 /<nsLcmOpOccId>                                 5                       5       5
-                    TO BE COMPLETED                             5               5
+                    cancel                                              05
             /vnf_instances  (also vnfrs for compatibility)      O
                 /<vnfInstanceId>                                O
             /subscriptions                                      5       5
             /vnf_instances  (also vnfrs for compatibility)      O
                 /<vnfInstanceId>                                O
             /subscriptions                                      5       5
@@ -170,7 +174,7 @@ query string:
         ADMIN: To act as an administrator or a different project
         PUBLIC: To get public descriptors or set a descriptor as public
         SET_PROJECT: To make a descriptor available for other project
         ADMIN: To act as an administrator or a different project
         PUBLIC: To get public descriptors or set a descriptor as public
         SET_PROJECT: To make a descriptor available for other project
-        
+
 Header field name      Reference       Example Descriptions
     Accept     IETF RFC 7231 [19]      application/json        Content-Types that are acceptable for the response.
     This header field shall be present if the response is expected to have a non-empty message body.
 Header field name      Reference       Example Descriptions
     Accept     IETF RFC 7231 [19]      application/json        Content-Types that are acceptable for the response.
     This header field shall be present if the response is expected to have a non-empty message body.
@@ -431,6 +435,10 @@ valid_url_methods = {
                 "<ID>": {
                     "METHODS": ("GET", "DELETE"),
                     "ROLE_PERMISSION": "ns_instances:id:",
                 "<ID>": {
                     "METHODS": ("GET", "DELETE"),
                     "ROLE_PERMISSION": "ns_instances:id:",
+                    "heal": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:heal:",
+                    },
                     "scale": {
                         "METHODS": ("POST",),
                         "ROLE_PERMISSION": "ns_instances:id:scale:",
                     "scale": {
                         "METHODS": ("POST",),
                         "ROLE_PERMISSION": "ns_instances:id:scale:",
@@ -443,10 +451,22 @@ valid_url_methods = {
                         "METHODS": ("POST",),
                         "ROLE_PERMISSION": "ns_instances:id:instantiate:",
                     },
                         "METHODS": ("POST",),
                         "ROLE_PERMISSION": "ns_instances:id:instantiate:",
                     },
+                    "migrate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:migrate:",
+                    },
                     "action": {
                         "METHODS": ("POST",),
                         "ROLE_PERMISSION": "ns_instances:id:action:",
                     },
                     "action": {
                         "METHODS": ("POST",),
                         "ROLE_PERMISSION": "ns_instances:id:action:",
                     },
+                    "update": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:update:",
+                    },
+                    "verticalscale": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:verticalscale:",
+                    },
                 },
             },
             "ns_lcm_op_occs": {
                 },
             },
             "ns_lcm_op_occs": {
@@ -455,6 +475,10 @@ valid_url_methods = {
                 "<ID>": {
                     "METHODS": ("GET",),
                     "ROLE_PERMISSION": "ns_instances:opps:id:",
                 "<ID>": {
                     "METHODS": ("GET",),
                     "ROLE_PERMISSION": "ns_instances:opps:id:",
+                    "cancel": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:opps:cancel:",
+                    },
                 },
             },
             "vnfrs": {
                 },
             },
             "vnfrs": {
@@ -477,6 +501,46 @@ valid_url_methods = {
             },
         }
     },
             },
         }
     },
+    "vnflcm": {
+        "v1": {
+            "vnf_instances": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vnflcm_instances:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "vnflcm_instances:id:",
+                    "scale": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnflcm_instances:id:scale:",
+                    },
+                    "terminate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnflcm_instances:id:terminate:",
+                    },
+                    "instantiate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnflcm_instances:id:instantiate:",
+                    },
+                },
+            },
+            "vnf_lcm_op_occs": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "vnf_instances:opps:",
+                "<ID>": {
+                    "METHODS": ("GET",),
+                    "ROLE_PERMISSION": "vnf_instances:opps:id:",
+                },
+            },
+            "subscriptions": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vnflcm_subscriptions:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "vnflcm_subscriptions:id:",
+                },
+            },
+        }
+    },
     "nst": {
         "v1": {
             "netslice_templates_content": {
     "nst": {
         "v1": {
             "netslice_templates_content": {
@@ -571,12 +635,14 @@ valid_url_methods = {
     },
     "nsfm": {
         "v1": {
     },
     "nsfm": {
         "v1": {
-            "alarms": {"METHODS": ("GET", "PATCH"),
-                       "ROLE_PERMISSION": "alarms:",
-                       "<ID>": {"METHODS": ("GET", "PATCH"),
-                                "ROLE_PERMISSION": "alarms:id:",
-                                },
-                       }
+            "alarms": {
+                "METHODS": ("GET", "PATCH"),
+                "ROLE_PERMISSION": "alarms:",
+                "<ID>": {
+                    "METHODS": ("GET", "PATCH"),
+                    "ROLE_PERMISSION": "alarms:id:",
+                },
+            }
         },
     },
 }
         },
     },
 }
@@ -599,6 +665,7 @@ class Server(object):
         self.engine = Engine(self.authenticator)
 
     def _format_in(self, kwargs):
         self.engine = Engine(self.authenticator)
 
     def _format_in(self, kwargs):
+        error_text = ""  # error_text must be initialized outside try
         try:
             indata = None
             if cherrypy.request.body.length:
         try:
             indata = None
             if cherrypy.request.body.length:
@@ -611,9 +678,7 @@ class Server(object):
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif "application/yaml" in cherrypy.request.headers["Content-Type"]:
                         error_text = "Invalid yaml format "
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif "application/yaml" in cherrypy.request.headers["Content-Type"]:
                         error_text = "Invalid yaml format "
-                        indata = yaml.load(
-                            cherrypy.request.body, Loader=yaml.SafeLoader
-                        )
+                        indata = yaml.safe_load(cherrypy.request.body)
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif (
                         "application/binary" in cherrypy.request.headers["Content-Type"]
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif (
                         "application/binary" in cherrypy.request.headers["Content-Type"]
@@ -643,13 +708,11 @@ class Server(object):
                         #                          "Only 'Content-Type' of type 'application/json' or
                         # 'application/yaml' for input format are available")
                         error_text = "Invalid yaml format "
                         #                          "Only 'Content-Type' of type 'application/json' or
                         # 'application/yaml' for input format are available")
                         error_text = "Invalid yaml format "
-                        indata = yaml.load(
-                            cherrypy.request.body, Loader=yaml.SafeLoader
-                        )
+                        indata = yaml.safe_load(cherrypy.request.body)
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                 else:
                     error_text = "Invalid yaml format "
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                 else:
                     error_text = "Invalid yaml format "
-                    indata = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                    indata = yaml.safe_load(cherrypy.request.body)
                     cherrypy.request.headers.pop("Content-File-MD5", None)
             if not indata:
                 indata = {}
                     cherrypy.request.headers.pop("Content-File-MD5", None)
             if not indata:
                 indata = {}
@@ -664,7 +727,7 @@ class Server(object):
                         kwargs[k] = None
                     elif format_yaml:
                         try:
                         kwargs[k] = None
                     elif format_yaml:
                         try:
-                            kwargs[k] = yaml.load(v, Loader=yaml.SafeLoader)
+                            kwargs[k] = yaml.safe_load(v)
                         except Exception:
                             pass
                     elif (
                         except Exception:
                             pass
                     elif (
@@ -688,7 +751,7 @@ class Server(object):
                             v[index] = None
                         elif format_yaml:
                             try:
                             v[index] = None
                         elif format_yaml:
                             try:
-                                v[index] = yaml.load(v[index], Loader=yaml.SafeLoader)
+                                v[index] = yaml.safe_load(v[index])
                             except Exception:
                                 pass
 
                             except Exception:
                                 pass
 
@@ -835,55 +898,88 @@ class Server(object):
 
     # NS Fault Management
     @cherrypy.expose
 
     # NS Fault Management
     @cherrypy.expose
-    def nsfm(self, version=None, topic=None, uuid=None, project_name=None, ns_id=None, *args, **kwargs):
-        if topic == 'alarms':
+    def nsfm(
+        self,
+        version=None,
+        topic=None,
+        uuid=None,
+        project_name=None,
+        ns_id=None,
+        *args,
+        **kwargs
+    ):
+        if topic == "alarms":
             try:
                 method = cherrypy.request.method
             try:
                 method = cherrypy.request.method
-                role_permission = self._check_valid_url_method(method, "nsfm", version, topic, None, None, *args)
-                query_string_operations = self._extract_query_string_operations(kwargs, method)
+                role_permission = self._check_valid_url_method(
+                    method, "nsfm", version, topic, None, None, *args
+                )
+                query_string_operations = self._extract_query_string_operations(
+                    kwargs, method
+                )
 
 
-                self.authenticator.authorize(role_permission, query_string_operations, None)
+                self.authenticator.authorize(
+                    role_permission, query_string_operations, None
+                )
 
                 # to handle get request
 
                 # to handle get request
-                if cherrypy.request.method == 'GET':
+                if cherrypy.request.method == "GET":
                     # if request is on basis of uuid
                     # if request is on basis of uuid
-                    if uuid and uuid != 'None':
+                    if uuid and uuid != "None":
                         try:
                             alarm = self.engine.db.get_one("alarms", {"uuid": uuid})
                         try:
                             alarm = self.engine.db.get_one("alarms", {"uuid": uuid})
-                            alarm_action = self.engine.db.get_one("alarms_action", {"uuid": uuid})
+                            alarm_action = self.engine.db.get_one(
+                                "alarms_action", {"uuid": uuid}
+                            )
                             alarm.update(alarm_action)
                             alarm.update(alarm_action)
-                            vnf = self.engine.db.get_one("vnfrs", {"nsr-id-ref": alarm["tags"]["ns_id"]})
+                            vnf = self.engine.db.get_one(
+                                "vnfrs", {"nsr-id-ref": alarm["tags"]["ns_id"]}
+                            )
                             alarm["vnf-id"] = vnf["_id"]
                             return self._format_out(str(alarm))
                         except Exception:
                             return self._format_out("Please provide valid alarm uuid")
                             alarm["vnf-id"] = vnf["_id"]
                             return self._format_out(str(alarm))
                         except Exception:
                             return self._format_out("Please provide valid alarm uuid")
-                    elif ns_id and ns_id != 'None':
+                    elif ns_id and ns_id != "None":
                         # if request is on basis of ns_id
                         try:
                         # if request is on basis of ns_id
                         try:
-                            alarms = self.engine.db.get_list("alarms", {"tags.ns_id": ns_id})
+                            alarms = self.engine.db.get_list(
+                                "alarms", {"tags.ns_id": ns_id}
+                            )
                             for alarm in alarms:
                             for alarm in alarms:
-                                alarm_action = self.engine.db.get_one("alarms_action", {"uuid": alarm['uuid']})
+                                alarm_action = self.engine.db.get_one(
+                                    "alarms_action", {"uuid": alarm["uuid"]}
+                                )
                                 alarm.update(alarm_action)
                             return self._format_out(str(alarms))
                         except Exception:
                             return self._format_out("Please provide valid ns id")
                     else:
                         # to return only alarm which are related to given project
                                 alarm.update(alarm_action)
                             return self._format_out(str(alarms))
                         except Exception:
                             return self._format_out("Please provide valid ns id")
                     else:
                         # to return only alarm which are related to given project
-                        project = self.engine.db.get_one("projects", {"name": project_name})
-                        project_id = project.get('_id')
-                        ns_list = self.engine.db.get_list("nsrs", {"_admin.projects_read": project_id})
+                        project = self.engine.db.get_one(
+                            "projects", {"name": project_name}
+                        )
+                        project_id = project.get("_id")
+                        ns_list = self.engine.db.get_list(
+                            "nsrs", {"_admin.projects_read": project_id}
+                        )
                         ns_ids = []
                         for ns in ns_list:
                             ns_ids.append(ns.get("_id"))
                         alarms = self.engine.db.get_list("alarms")
                         ns_ids = []
                         for ns in ns_list:
                             ns_ids.append(ns.get("_id"))
                         alarms = self.engine.db.get_list("alarms")
-                        alarm_list = [alarm for alarm in alarms if alarm["tags"]["ns_id"] in ns_ids]
+                        alarm_list = [
+                            alarm
+                            for alarm in alarms
+                            if alarm["tags"]["ns_id"] in ns_ids
+                        ]
                         for alrm in alarm_list:
                         for alrm in alarm_list:
-                            action = self.engine.db.get_one("alarms_action", {"uuid": alrm.get("uuid")})
+                            action = self.engine.db.get_one(
+                                "alarms_action", {"uuid": alrm.get("uuid")}
+                            )
                             alrm.update(action)
                         return self._format_out(str(alarm_list))
                 # to handle patch request for alarm update
                             alrm.update(action)
                         return self._format_out(str(alarm_list))
                 # to handle patch request for alarm update
-                elif cherrypy.request.method == 'PATCH':
-                    data = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                elif cherrypy.request.method == "PATCH":
+                    data = yaml.safe_load(cherrypy.request.body)
                     try:
                         # check if uuid is valid
                         self.engine.db.get_one("alarms", {"uuid": data.get("uuid")})
                     try:
                         # check if uuid is valid
                         self.engine.db.get_one("alarms", {"uuid": data.get("uuid")})
@@ -891,24 +987,42 @@ class Server(object):
                         return self._format_out("Please provide valid alarm uuid.")
                     if data.get("is_enable") is not None:
                         if data.get("is_enable"):
                         return self._format_out("Please provide valid alarm uuid.")
                     if data.get("is_enable") is not None:
                         if data.get("is_enable"):
-                            alarm_status = 'ok'
+                            alarm_status = "ok"
                         else:
                         else:
-                            alarm_status = 'disabled'
-                        self.engine.db.set_one("alarms", {"uuid": data.get("uuid")},
-                                               {"alarm_status": alarm_status})
+                            alarm_status = "disabled"
+                        self.engine.db.set_one(
+                            "alarms",
+                            {"uuid": data.get("uuid")},
+                            {"alarm_status": alarm_status},
+                        )
                     else:
                     else:
-                        self.engine.db.set_one("alarms", {"uuid": data.get("uuid")},
-                                               {"threshold": data.get("threshold")})
+                        self.engine.db.set_one(
+                            "alarms",
+                            {"uuid": data.get("uuid")},
+                            {"threshold": data.get("threshold")},
+                        )
                     return self._format_out("Alarm updated")
             except Exception as e:
                     return self._format_out("Alarm updated")
             except Exception as e:
-                cherrypy.response.status = e.http_code.value
-                if isinstance(e, (NbiException, EngineException, DbException, FsException, MsgException, AuthException,
-                              ValidationError, AuthconnException)):
+                if isinstance(
+                    e,
+                    (
+                        NbiException,
+                        EngineException,
+                        DbException,
+                        FsException,
+                        MsgException,
+                        AuthException,
+                        ValidationError,
+                        AuthconnException,
+                    ),
+                ):
                     http_code_value = cherrypy.response.status = e.http_code.value
                     http_code_name = e.http_code.name
                     cherrypy.log("Exception {}".format(e))
                 else:
                     http_code_value = cherrypy.response.status = e.http_code.value
                     http_code_name = e.http_code.name
                     cherrypy.log("Exception {}".format(e))
                 else:
-                    http_code_value = cherrypy.response.status = HTTPStatus.BAD_REQUEST.value  # INTERNAL_SERVER_ERROR
+                    http_code_value = (
+                        cherrypy.response.status
+                    ) = HTTPStatus.BAD_REQUEST.value  # INTERNAL_SERVER_ERROR
                     cherrypy.log("CRITICAL: Exception {}".format(e), traceback=True)
                     http_code_name = HTTPStatus.BAD_REQUEST.name
                 problem_details = {
                     cherrypy.log("CRITICAL: Exception {}".format(e), traceback=True)
                     http_code_name = HTTPStatus.BAD_REQUEST.name
                 problem_details = {
@@ -949,13 +1063,30 @@ class Server(object):
             outdata = token_info = self.authenticator.new_token(
                 token_info, indata, cherrypy.request.remote
             )
             outdata = token_info = self.authenticator.new_token(
                 token_info, indata, cherrypy.request.remote
             )
-            cherrypy.session["Authorization"] = outdata["_id"]
+            cherrypy.session["Authorization"] = outdata["_id"]  # pylint: disable=E1101
             self._set_location_header("admin", "v1", "tokens", outdata["_id"])
             # for logging
             self._format_login(token_info)
             self._set_location_header("admin", "v1", "tokens", outdata["_id"])
             # for logging
             self._format_login(token_info)
-
+            # password expiry check
+            if self.authenticator.check_password_expiry(outdata):
+                outdata = {
+                    "id": outdata["id"],
+                    "message": "change_password",
+                    "user_id": outdata["user_id"],
+                }
             # cherrypy.response.cookie["Authorization"] = outdata["id"]
             # cherrypy.response.cookie["Authorization"]['expires'] = 3600
             # cherrypy.response.cookie["Authorization"] = outdata["id"]
             # cherrypy.response.cookie["Authorization"]['expires'] = 3600
+            cef_event(
+                cef_logger,
+                {
+                    "name": "User Login",
+                    "sourceUserName": token_info.get("username"),
+                    "message": "User Logged In, Project={} Outcome=Success".format(
+                        token_info.get("project_name")
+                    ),
+                },
+            )
+            cherrypy.log("{}".format(cef_logger))
         elif method == "DELETE":
             if not token_id and "id" in kwargs:
                 token_id = kwargs["id"]
         elif method == "DELETE":
             if not token_id and "id" in kwargs:
                 token_id = kwargs["id"]
@@ -964,9 +1095,27 @@ class Server(object):
                 # for logging
                 self._format_login(token_info)
                 token_id = token_info["_id"]
                 # for logging
                 self._format_login(token_info)
                 token_id = token_info["_id"]
+            if current_backend != "keystone":
+                token_details = self.engine.db.get_one("tokens", {"_id": token_id})
+                current_user = token_details.get("username")
+                current_project = token_details.get("project_name")
+            else:
+                current_user = "keystone backend"
+                current_project = "keystone backend"
             outdata = self.authenticator.del_token(token_id)
             token_info = None
             outdata = self.authenticator.del_token(token_id)
             token_info = None
-            cherrypy.session["Authorization"] = "logout"
+            cherrypy.session["Authorization"] = "logout"  # pylint: disable=E1101
+            cef_event(
+                cef_logger,
+                {
+                    "name": "User Logout",
+                    "sourceUserName": current_user,
+                    "message": "User Logged Out, Project={} Outcome=Success".format(
+                        current_project
+                    ),
+                },
+            )
+            cherrypy.log("{}".format(cef_logger))
             # cherrypy.response.cookie["Authorization"] = token_id
             # cherrypy.response.cookie["Authorization"]['expires'] = 0
         else:
             # cherrypy.response.cookie["Authorization"] = token_id
             # cherrypy.response.cookie["Authorization"]['expires'] = 0
         else:
@@ -994,7 +1143,8 @@ class Server(object):
         elif args and args[0] == "init":
             try:
                 # self.engine.load_dbase(cherrypy.request.app.config)
         elif args and args[0] == "init":
             try:
                 # self.engine.load_dbase(cherrypy.request.app.config)
-                self.engine.create_admin()
+                pid = self.authenticator.create_admin_project()
+                self.authenticator.create_admin_user(pid)
                 return "Done. User 'admin', password 'admin' created"
             except Exception:
                 cherrypy.response.status = HTTPStatus.FORBIDDEN.value
                 return "Done. User 'admin', password 'admin' created"
             except Exception:
                 cherrypy.response.status = HTTPStatus.FORBIDDEN.value
@@ -1052,13 +1202,13 @@ class Server(object):
             return_text = "<html><pre>{} ->\n".format(main_topic)
             try:
                 if cherrypy.request.method == "POST":
             return_text = "<html><pre>{} ->\n".format(main_topic)
             try:
                 if cherrypy.request.method == "POST":
-                    to_send = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                    to_send = yaml.safe_load(cherrypy.request.body)
                     for k, v in to_send.items():
                         self.engine.msg.write(main_topic, k, v)
                         return_text += "  {}: {}\n".format(k, v)
                 elif cherrypy.request.method == "GET":
                     for k, v in kwargs.items():
                     for k, v in to_send.items():
                         self.engine.msg.write(main_topic, k, v)
                         return_text += "  {}: {}\n".format(k, v)
                 elif cherrypy.request.method == "GET":
                     for k, v in kwargs.items():
-                        v_dict = yaml.load(v, Loader=yaml.SafeLoader)
+                        v_dict = yaml.safe_load(v)
                         self.engine.msg.write(main_topic, k, v_dict)
                         return_text += "  {}: {}\n".format(k, v_dict)
             except Exception as e:
                         self.engine.msg.write(main_topic, k, v_dict)
                         return_text += "  {}: {}\n".format(k, v_dict)
             except Exception as e:
@@ -1072,10 +1222,12 @@ class Server(object):
             + "  headers: {}\n".format(cherrypy.request.headers)
             + "  path_info: {}\n".format(cherrypy.request.path_info)
             + "  query_string: {}\n".format(cherrypy.request.query_string)
             + "  headers: {}\n".format(cherrypy.request.headers)
             + "  path_info: {}\n".format(cherrypy.request.path_info)
             + "  query_string: {}\n".format(cherrypy.request.query_string)
-            + "  session: {}\n".format(cherrypy.session)
+            + "  session: {}\n".format(cherrypy.session)  # pylint: disable=E1101
             + "  cookie: {}\n".format(cherrypy.request.cookie)
             + "  method: {}\n".format(cherrypy.request.method)
             + "  cookie: {}\n".format(cherrypy.request.cookie)
             + "  method: {}\n".format(cherrypy.request.method)
-            + "  session: {}\n".format(cherrypy.session.get("fieldname"))
+            + "  session: {}\n".format(
+                cherrypy.session.get("fieldname")  # pylint: disable=E1101
+            )
             + "  body:\n"
         )
         return_text += "    length: {}\n".format(cherrypy.request.body.length)
             + "  body:\n"
         )
         return_text += "    length: {}\n".format(cherrypy.request.body.length)
@@ -1267,12 +1419,20 @@ class Server(object):
         **kwargs
     ):
         token_info = None
         **kwargs
     ):
         token_info = None
-        outdata = None
+        outdata = {}
         _format = None
         method = "DONE"
         engine_topic = None
         rollback = []
         engine_session = None
         _format = None
         method = "DONE"
         engine_topic = None
         rollback = []
         engine_session = None
+        url_id = ""
+        log_mapping = {
+            "POST": "Creating",
+            "GET": "Fetching",
+            "DELETE": "Deleting",
+            "PUT": "Updating",
+            "PATCH": "Updating",
+        }
         try:
             if not main_topic or not version or not topic:
                 raise NbiException(
         try:
             if not main_topic or not version or not topic:
                 raise NbiException(
@@ -1288,6 +1448,7 @@ class Server(object):
                 "nst",
                 "nsilcm",
                 "nspm",
                 "nst",
                 "nsilcm",
                 "nspm",
+                "vnflcm",
             ):
                 raise NbiException(
                     "URL main_topic '{}' not supported".format(main_topic),
             ):
                 raise NbiException(
                     "URL main_topic '{}' not supported".format(main_topic),
@@ -1298,6 +1459,8 @@ class Server(object):
                     "URL version '{}' not supported".format(version),
                     HTTPStatus.METHOD_NOT_ALLOWED,
                 )
                     "URL version '{}' not supported".format(version),
                     HTTPStatus.METHOD_NOT_ALLOWED,
                 )
+            if _id is not None:
+                url_id = _id
 
             if (
                 kwargs
 
             if (
                 kwargs
@@ -1342,6 +1505,9 @@ class Server(object):
                     engine_topic = "nslcmops"
                 if topic == "vnfrs" or topic == "vnf_instances":
                     engine_topic = "vnfrs"
                     engine_topic = "nslcmops"
                 if topic == "vnfrs" or topic == "vnf_instances":
                     engine_topic = "vnfrs"
+            elif main_topic == "vnflcm":
+                if topic == "vnf_lcm_op_occs":
+                    engine_topic = "vnflcmops"
             elif main_topic == "nst":
                 engine_topic = "nsts"
             elif main_topic == "nsilcm":
             elif main_topic == "nst":
                 engine_topic = "nsts"
             elif main_topic == "nsilcm":
@@ -1395,7 +1561,9 @@ class Server(object):
                     filter_q = None
                     if "vcaStatusRefresh" in kwargs:
                         filter_q = {"vcaStatusRefresh": kwargs["vcaStatusRefresh"]}
                     filter_q = None
                     if "vcaStatusRefresh" in kwargs:
                         filter_q = {"vcaStatusRefresh": kwargs["vcaStatusRefresh"]}
-                    outdata = self.engine.get_item(engine_session, engine_topic, _id, filter_q, True)
+                    outdata = self.engine.get_item(
+                        engine_session, engine_topic, _id, filter_q, True
+                    )
 
             elif method == "POST":
                 cherrypy.response.status = HTTPStatus.CREATED.value
 
             elif method == "POST":
                 cherrypy.response.status = HTTPStatus.CREATED.value
@@ -1499,6 +1667,24 @@ class Server(object):
                         "_links": link,
                     }
                     cherrypy.response.status = HTTPStatus.CREATED.value
                         "_links": link,
                     }
                     cherrypy.response.status = HTTPStatus.CREATED.value
+                elif topic == "vnf_instances" and item:
+                    indata["lcmOperationType"] = item
+                    indata["vnfInstanceId"] = _id
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, "vnflcmops", indata, kwargs
+                    )
+                    self._set_location_header(
+                        main_topic, version, "vnf_lcm_op_occs", _id
+                    )
+                    outdata = {"id": _id}
+                    cherrypy.response.status = HTTPStatus.ACCEPTED.value
+                elif topic == "ns_lcm_op_occs" and item == "cancel":
+                    indata["nsLcmOpOccId"] = _id
+                    self.engine.cancel_item(
+                        rollback, engine_session, "nslcmops", indata, None
+                    )
+                    self._set_location_header(main_topic, version, topic, _id)
+                    cherrypy.response.status = HTTPStatus.ACCEPTED.value
                 else:
                     _id, op_id = self.engine.new_item(
                         rollback,
                 else:
                     _id, op_id = self.engine.new_item(
                         rollback,
@@ -1610,6 +1796,36 @@ class Server(object):
             ):
                 self.authenticator.remove_token_from_cache()
 
             ):
                 self.authenticator.remove_token_from_cache()
 
+            if item is not None:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username"),
+                        "message": "Performing {} operation on {} {}, Project={} Outcome=Success".format(
+                            item,
+                            topic,
+                            url_id,
+                            token_info.get("project_name"),
+                        ),
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
+            else:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username"),
+                        "message": "{} {} {}, Project={} Outcome=Success".format(
+                            log_mapping[method],
+                            topic,
+                            url_id,
+                            token_info.get("project_name"),
+                        ),
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
             return self._format_out(outdata, token_info, _format)
         except Exception as e:
             if isinstance(
             return self._format_out(outdata, token_info, _format)
         except Exception as e:
             if isinstance(
@@ -1651,7 +1867,6 @@ class Server(object):
                         self.engine.db.del_list(
                             rollback_item["topic"],
                             rollback_item["filter"],
                         self.engine.db.del_list(
                             rollback_item["topic"],
                             rollback_item["filter"],
-                            fail_on_empty=False,
                         )
                     else:
                         self.engine.db.del_one(
                         )
                     else:
                         self.engine.db.del_one(
@@ -1673,6 +1888,38 @@ class Server(object):
                 "status": http_code_value,
                 "detail": error_text,
             }
                 "status": http_code_value,
                 "detail": error_text,
             }
+            if item is not None and token_info is not None:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username", None),
+                        "message": "Performing {} operation on {} {}, Project={} Outcome=Failure".format(
+                            item,
+                            topic,
+                            url_id,
+                            token_info.get("project_name", None),
+                        ),
+                        "severity": "2",
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
+            elif token_info is not None:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username", None),
+                        "message": "{} {} {}, Project={} Outcome=Failure".format(
+                            item,
+                            topic,
+                            url_id,
+                            token_info.get("project_name", None),
+                        ),
+                        "severity": "2",
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
             return self._format_out(problem_details, token_info)
             # raise cherrypy.HTTPError(e.http_code.value, str(e))
         finally:
             return self._format_out(problem_details, token_info)
             # raise cherrypy.HTTPError(e.http_code.value, str(e))
         finally:
@@ -1695,12 +1942,17 @@ def _start_service():
     """
     global nbi_server
     global subscription_thread
     """
     global nbi_server
     global subscription_thread
+    global cef_logger
+    global current_backend
     cherrypy.log.error("Starting osm_nbi")
     # update general cherrypy configuration
     update_dict = {}
 
     engine_config = cherrypy.tree.apps["/osm"].config
     for k, v in environ.items():
     cherrypy.log.error("Starting osm_nbi")
     # update general cherrypy configuration
     update_dict = {}
 
     engine_config = cherrypy.tree.apps["/osm"].config
     for k, v in environ.items():
+        if k == "OSMNBI_USER_MANAGEMENT":
+            feature_state = eval(v.title())
+            engine_config["authentication"]["user_management"] = feature_state
         if not k.startswith("OSMNBI_"):
             continue
         k1, _, k2 = k[7:].lower().partition("_")
         if not k.startswith("OSMNBI_"):
             continue
         k1, _, k2 = k[7:].lower().partition("_")
@@ -1727,7 +1979,9 @@ def _start_service():
         except ValueError as e:
             cherrypy.log.error("Ignoring environ '{}': " + str(e))
         except Exception as e:
         except ValueError as e:
             cherrypy.log.error("Ignoring environ '{}': " + str(e))
         except Exception as e:
-            cherrypy.log.warn("skipping environ '{}' on exception '{}'".format(k, e))
+            cherrypy.log(
+                "WARNING: skipping environ '{}' on exception '{}'".format(k, e)
+            )
 
     if update_dict:
         cherrypy.config.update(update_dict)
 
     if update_dict:
         cherrypy.config.update(update_dict)
@@ -1794,6 +2048,8 @@ def _start_service():
         target_version=auth_database_version
     )
 
         target_version=auth_database_version
     )
 
+    cef_logger = cef_event_builder(engine_config["authentication"])
+
     # start subscriptions thread:
     subscription_thread = SubscriptionThread(
         config=engine_config, engine=nbi_server.engine
     # start subscriptions thread:
     subscription_thread = SubscriptionThread(
         config=engine_config, engine=nbi_server.engine
@@ -1802,6 +2058,7 @@ def _start_service():
     # Do not capture except SubscriptionException
 
     backend = engine_config["authentication"]["backend"]
     # Do not capture except SubscriptionException
 
     backend = engine_config["authentication"]["backend"]
+    current_backend = backend
     cherrypy.log.error(
         "Starting OSM NBI Version '{} {}' with '{}' authentication backend".format(
             nbi_version, nbi_version_date, backend
     cherrypy.log.error(
         "Starting OSM NBI Version '{} {}' with '{}' authentication backend".format(
             nbi_version, nbi_version_date, backend
index 7b681a1..22413d0 100644 (file)
@@ -41,7 +41,6 @@ class NotificationException(Exception):
 
 
 class NotificationBase:
 
 
 class NotificationBase:
-
     response_models = None
     # Common HTTP payload header for all notifications.
     payload_header = {"Content-Type": "application/json", "Accept": "application/json"}
     response_models = None
     # Common HTTP payload header for all notifications.
     payload_header = {"Content-Type": "application/json", "Accept": "application/json"}
@@ -109,12 +108,12 @@ class NotificationBase:
         return payload
 
     async def send_notifications(
         return payload
 
     async def send_notifications(
-        self, subscribers: list, loop: asyncio.AbstractEventLoop = None
+        self,
+        subscribers: list,
     ):
         """
         Generate tasks for all notification for an event.
         :param subscribers: A list of subscribers who want to be notified for event.
     ):
         """
         Generate tasks for all notification for an event.
         :param subscribers: A list of subscribers who want to be notified for event.
-        :param loop: Event loop object.
         """
         notifications = []
         for subscriber in subscribers:
         """
         notifications = []
         for subscriber in subscribers:
@@ -155,21 +154,19 @@ class NotificationBase:
 
         if notifications:
             tasks = []
 
         if notifications:
             tasks = []
-            async with aiohttp.ClientSession(loop=loop) as session:
+            async with aiohttp.ClientSession() as session:
                 for notification in notifications:
                     tasks.append(
                         asyncio.ensure_future(
                 for notification in notifications:
                     tasks.append(
                         asyncio.ensure_future(
-                            self.send_notification(session, notification, loop=loop),
-                            loop=loop,
+                            self.send_notification(session, notification),
                         )
                     )
                         )
                     )
-                await asyncio.gather(*tasks, loop=loop)
+                await asyncio.gather(*tasks)
 
     async def send_notification(
         self,
         session: aiohttp.ClientSession,
         notification: dict,
 
     async def send_notification(
         self,
         session: aiohttp.ClientSession,
         notification: dict,
-        loop: asyncio.AbstractEventLoop = None,
         retry_count: int = 5,
         timeout: float = 5.0,
     ):
         retry_count: int = 5,
         timeout: float = 5.0,
     ):
@@ -178,7 +175,6 @@ class NotificationBase:
         after maximum number of reties, then notification is dropped.
         :param session: An aiohttp client session object to maintain http session.
         :param notification: A dictionary containing all necessary data to make POST request.
         after maximum number of reties, then notification is dropped.
         :param session: An aiohttp client session object to maintain http session.
         :param notification: A dictionary containing all necessary data to make POST request.
-        :param loop: Event loop object.
         :param retry_count: An integer specifying the maximum number of reties for a notification.
         :param timeout: A float representing client timeout of each HTTP request.
         """
         :param retry_count: An integer specifying the maximum number of reties for a notification.
         :param timeout: A float representing client timeout of each HTTP request.
         """
@@ -227,7 +223,7 @@ class NotificationBase:
                         notification["payload"]["subscriptionId"], backoff_delay
                     )
                 )
                         notification["payload"]["subscriptionId"], backoff_delay
                     )
                 )
-                await asyncio.sleep(backoff_delay, loop=loop)
+                await asyncio.sleep(backoff_delay)
         # Dropping notification
         self.logger.debug(
             "Notification {} sent failed to subscriber:{}.".format(
         # Dropping notification
         self.logger.debug(
             "Notification {} sent failed to subscriber:{}.".format(
@@ -239,7 +235,14 @@ class NotificationBase:
 
 
 class NsLcmNotification(NotificationBase):
 
 
 class NsLcmNotification(NotificationBase):
-
+    # maps kafka commands of completed operations to the original operation type
+    completed_operation_map = {
+        "INSTANTIATED": "INSTANTIATE",
+        "SCALED": "SCALE",
+        "TERMINATED": "TERMINATE",
+        "UPDATED": "UPDATE",
+        "HEALED": "HEAL",
+    }
     # SOL005 response model for nslcm notifications
     response_models = {
         "NsLcmOperationOccurrenceNotification": {
     # SOL005 response model for nslcm notifications
     response_models = {
         "NsLcmOperationOccurrenceNotification": {
@@ -348,15 +351,23 @@ class NsLcmNotification(NotificationBase):
         :param event_details: dict containing raw data of event occured.
         :return: List of interested subscribers for occurred event.
         """
         :param event_details: dict containing raw data of event occured.
         :return: List of interested subscribers for occurred event.
         """
+        notification_type = [
+            "NsLcmOperationOccurrenceNotification",
+            "NsChangeNotification",
+            "NsIdentifierCreationNotification",
+            "NsIdentifierDeletionNotification",
+        ]
         filter_q = {
             "identifier": [nsd_id, ns_instance_id],
             "operationStates": ["ANY"],
             "operationTypes": ["ANY"],
         filter_q = {
             "identifier": [nsd_id, ns_instance_id],
             "operationStates": ["ANY"],
             "operationTypes": ["ANY"],
+            "notificationType": notification_type,
         }
         if op_state:
             filter_q["operationStates"].append(op_state)
         if command:
         }
         if op_state:
             filter_q["operationStates"].append(op_state)
         if command:
-            filter_q["operationTypes"].append(command)
+            op_type = self.completed_operation_map.get(command, command)
+            filter_q["operationTypes"].append(op_type)
         # self.logger.debug("Db query is: {}".format(filter_q))
         subscribers = []
         try:
         # self.logger.debug("Db query is: {}".format(filter_q))
         subscribers = []
         try:
@@ -369,6 +380,129 @@ class NsLcmNotification(NotificationBase):
             return subscribers
 
 
             return subscribers
 
 
+class VnfLcmNotification(NotificationBase):
+    # SOL003 response model for vnflcm notifications
+    response_models = {
+        "VnfLcmOperationOccurrenceNotification": {
+            "id",
+            "notificationType",
+            "subscriptionId",
+            "timeStamp",
+            "notificationStatus",
+            "operationState",
+            "vnfInstanceId",
+            "operation",
+            "isAutomaticInvocation",
+            "vnfLcmOpOccId",
+            "affectedVnfcs",
+            "affectedVirtualLinks",
+            "affectedExtLinkPorts",
+            "affectedVirtualStorages",
+            "changedInfo",
+            "changedExtConnectivity",
+            "modificationsTriggeredByVnfPkgChange",
+            "error",
+            "_links",
+        },
+        "VnfIdentifierCreationNotification": {
+            "id",
+            "notificationType",
+            "subscriptionId",
+            "timeStamp",
+            "vnfInstanceId",
+            "_links",
+        },
+        "VnfIdentifierDeletionNotification": {
+            "id",
+            "notificationType",
+            "subscriptionId",
+            "timeStamp",
+            "vnfInstanceId",
+            "_links",
+        },
+    }
+
+    def __init__(self, db) -> None:
+        """
+        Constructor of VnfLcmNotification class.
+        :param db: Database handler.
+        """
+        super().__init__(db)
+        self.subscriber_collection = "mapped_subscriptions"
+
+    def get_models(self) -> dict:
+        """
+        Returns the SOL003 model of notification class
+        :param None
+        :return: dict of SOL003 data model
+        """
+        return self.response_models
+
+    def _format_vnflcm_subscribers(
+        self, subscribers: list, event_details: dict
+    ) -> list:
+        """
+        Formats the raw event details from kafka message and subscriber details.
+        :param subscribers: A list of subscribers whom the event needs to be notified.
+        :param event_details: A dict containing all meta data of event.
+        :return:
+        """
+        notification_id = str(uuid4())
+        event_timestamp = time.time()
+        event_operation = event_details["command"]
+        for subscriber in subscribers:
+            subscriber["id"] = notification_id
+            subscriber["timeStamp"] = event_timestamp
+            subscriber["subscriptionId"] = subscriber["reference"]
+            subscriber["operation"] = event_operation
+            del subscriber["reference"]
+            del subscriber["_id"]
+            subscriber.update(event_details["params"])
+        return subscribers
+
+    def get_subscribers(
+        self,
+        vnfd_id: str,
+        vnf_instance_id: str,
+        command: str,
+        op_state: str,
+        event_details: dict,
+    ) -> list:
+        """
+        Queries database and returns list of subscribers.
+        :param vnfd_id: Vnfd id of a VNF whose lifecycle has changed. (instantiated, scaled, terminated. etc)
+        :param vnf_instance_id: Vnf instance id of a VNF whose lifecycle has changed.
+        :param command: the command for event.
+        :param op_state: the operation state of VNF.
+        :param event_details: dict containing raw data of event occurred.
+        :return: List of interested subscribers for occurred event.
+        """
+        notification_type = [
+            "VnfIdentifierCreationNotification",
+            "VnfLcmOperationOccurrenceNotification",
+            "VnfIdentifierDeletionNotification",
+        ]
+        filter_q = {
+            "identifier": [vnfd_id, vnf_instance_id],
+            "operationStates": ["ANY"],
+            "operationTypes": ["ANY"],
+            "notificationType": notification_type,
+        }
+        if op_state:
+            filter_q["operationStates"].append(op_state)
+        if command:
+            filter_q["operationTypes"].append(command)
+        subscribers = []
+        try:
+            subscribers = self.db.get_list(self.subscriber_collection, filter_q)
+            subscribers = self._format_vnflcm_subscribers(subscribers, event_details)
+        except Exception as e:
+            error_text = type(e).__name__ + ": " + str(e)
+            self.logger.debug("Error getting vnflcm subscribers: {}".format(error_text))
+        finally:
+            return subscribers
+
+
 class NsdNotification(NotificationBase):
     def __init__(self, db):
         """
 class NsdNotification(NotificationBase):
     def __init__(self, db):
         """
diff --git a/osm_nbi/osm_vnfm/__init__.py b/osm_nbi/osm_vnfm/__init__.py
new file mode 100644 (file)
index 0000000..b62a185
--- /dev/null
@@ -0,0 +1,14 @@
+# Copyright 2021 K Sai Kiran (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/osm_nbi/osm_vnfm/base_methods.py b/osm_nbi/osm_vnfm/base_methods.py
new file mode 100644 (file)
index 0000000..bbf119f
--- /dev/null
@@ -0,0 +1,28 @@
+# Copyright 2021 K Sai Kiran (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = "K Sai Kiran <saikiran.k@tataelxsi.co.in>, Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
+__date__ = "$12-June-2021 8:30:59$"
+
+
+import logging
+
+
+class BaseMethod:
+    def __init__(self):
+        """
+        Constructor of the base method
+        """
+        self.logger = logging.getLogger("nbi.engine")
diff --git a/osm_nbi/osm_vnfm/vnf_instance_actions.py b/osm_nbi/osm_vnfm/vnf_instance_actions.py
new file mode 100644 (file)
index 0000000..b34d203
--- /dev/null
@@ -0,0 +1,253 @@
+# Copyright 2021 K Sai Kiran (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = "K Sai Kiran <saikiran.k@tataelxsi.co.in>, Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
+__date__ = "$12-June-2021 8:30:59$"
+
+from osm_nbi.instance_topics import NsrTopic, NsLcmOpTopic, VnfrTopic
+from .base_methods import BaseMethod
+
+
+class VnfLcmOp2NsLcmOp:
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor of Vnf lcm op to Ns lcm op
+        """
+        self.new_vnf_lcmop = NewVnfLcmOp(db, fs, msg, auth)
+        self.list_vnf_lcmop = ListVnfLcmOp(db, fs, msg, auth)
+        self.show_vnf_lcmop = ShowVnfLcmOp(db, fs, msg, auth)
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates a new entry into database.
+        :param rollback: list to append created items at database in case a rollback may to be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id, op_id:
+            _id: identity of the inserted data.
+             op_id: operation id if this is asynchronous, None otherwise
+        """
+        return self.new_vnf_lcmop.action(rollback, session, indata, kwargs, headers)
+
+    def list(self, session, filter_q=None, api_req=False):
+        """
+        Get a list of the Vnf Lcm Operation that matches a filter
+        :param session: contains the used login username and working project
+        :param filter_q: filter of data to be applied
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: The list, it can be empty if no one match the filter.
+        """
+        return self.list_vnf_lcmop.action(session, filter_q, api_req)
+
+    def show(self, session, _id, api_req=False):
+        """
+        Get complete information on an Vnf Lcm Operation
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        return self.show_vnf_lcmop.action(session, _id, api_req)
+
+
+class NewVnfLcmOp(BaseMethod):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor of new Vnf Lcm Op
+        """
+        super().__init__()
+        self.msg = msg
+        self.nslcmoptopic = NsLcmOpTopic(db, fs, msg, auth)
+        self.nsrtopic = NsrTopic(db, fs, msg, auth)
+        self.vnfrtopic = VnfrTopic(db, fs, msg, auth)
+
+    def __get_nsdid(self, session, vnf_instance_id):
+        """
+        Returns a nsd id from vnf instance id.
+        :param session: contains the used login username and working project
+        :param vnf_instance_id: id of vnf instance
+        :return: id of nsd id
+        """
+        nsr = self.nsrtopic.show(session, vnf_instance_id)
+        return nsr["nsd"]["_id"]
+
+    def __get_formatted_indata(self, session, indata):
+        """
+        Returns formatted data for new vnf lcm op
+        :param session: contains the used login username and working project
+        :param indata: contains information for new lcm operation.
+        :return: formatted indata for new lcm op.
+        """
+        formatted_indata = {}
+        if indata["lcmOperationType"] == "instantiate":
+            formatted_indata = {
+                "nsName": indata["vnfName"],
+                "nsDescription": indata["vnfDescription"],
+                "nsdId": self.__get_nsdid(session, indata["vnfInstanceId"]),
+                "vimAccountId": indata["vimAccountId"],
+                "nsr_id": indata["vnfInstanceId"],
+                "lcmOperationType": indata["lcmOperationType"],
+                "nsInstanceId": indata["vnfInstanceId"],
+            }
+        elif indata["lcmOperationType"] == "terminate":
+            formatted_indata = {
+                "lcmOperationType": indata["lcmOperationType"],
+                "nsInstanceId": indata["vnfInstanceId"],
+            }
+        elif indata["lcmOperationType"] == "scale":
+            formatted_indata = {
+                "lcmOperationType": indata["lcmOperationType"],
+                "nsInstanceId": indata["vnfInstanceId"],
+                "scaleType": "SCALE_VNF",
+                "scaleVnfData": {
+                    "scaleVnfType": indata["type"],
+                    "scaleByStepData": {
+                        "scaling-group-descriptor": indata["aspectId"],
+                        "member-vnf-index": indata["additionalParams"][
+                            "member-vnf-index"
+                        ],
+                    },
+                },
+            }
+        elif indata["lcmOperationType"] == "action":
+            formatted_indata = {
+                "lcmOperationType": indata["lcmOperationType"],
+                "nsInstanceId": indata["vnfInstanceId"],
+                "member_vnf_index": indata["member_vnf_index"],
+                "primitive": indata["primitive"],
+                "primitive_params": indata["primitive_params"],
+            }
+        return formatted_indata
+
+    def notify_operation(self, session, _id, lcm_operation, op_id):
+        """
+        Formats the operation message params and sends to kafka
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: vnf instance id
+        :param lcm_operation: lcm operation type of a VNF (instantiate, scale, terminate)
+        :param op_id: lcm operation id of a VNF
+        :return: None
+        """
+        vnfInstanceId = _id
+        operation = lcm_operation
+        nslcmop_rec = self.nslcmoptopic.show(session, op_id)
+        operation_status = nslcmop_rec["operationState"]
+        vnfr = self.vnfrtopic.show(session, vnfInstanceId)
+        links = {
+            "self": "/osm/vnflcm/v1/vnf_lcm_op_occs/" + op_id,
+            "vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + vnfInstanceId,
+        }
+        params = {
+            "vnfdId": vnfr["vnfd-ref"],
+            "vnfInstanceId": vnfInstanceId,
+            "operationState": operation_status,
+            "vnfLcmOpOccId": op_id,
+            "_links": links,
+        }
+        self.msg.write("vnf", operation, params)
+        return None
+
+    def action(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates an new lcm operation.
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the nsr
+        :param kwargs: used to override the indata
+        :param headers: http request headers
+        :return: id of new lcm operation.
+        """
+        vnfInstanceId = indata["vnfInstanceId"]
+        lcm_operation = indata["lcmOperationType"]
+        vnfr = self.vnfrtopic.show(session, vnfInstanceId)
+        indata["vnfInstanceId"] = vnfr.get("nsr-id-ref")
+        indata = self.__get_formatted_indata(session, indata)
+        op_id, _ = self.nslcmoptopic.new(rollback, session, indata, kwargs, headers)
+        self.notify_operation(session, vnfInstanceId, lcm_operation, op_id)
+        return op_id, _
+
+
+class ListVnfLcmOp(BaseMethod):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for listing vnf lcm operations
+        """
+        super().__init__()
+        self.nslcmoptopic = NsLcmOpTopic(db, fs, msg, auth)
+        self.nsrtopic = NsrTopic(db, fs, msg, auth)
+
+    def action(self, session, filter_q=None, api_req=False):
+        """
+        To get list of vnf lcm operations that matches a filter
+        :param session: contains the used login username and working project
+        :param filter_q: filter of data to be applied
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: The list, it can be empty if no one match the filter.
+        """
+        list = []
+        records = self.nslcmoptopic.list(session, filter_q, api_req)
+        for record in records:
+            ns_id = record.get("nsInstanceId")
+            nsr = self.nsrtopic.show(session, ns_id)
+            vnfInstance_id = nsr["constituent-vnfr-ref"][0]
+            outdata = sol003_projection(record, vnfInstance_id)
+            list.append(outdata)
+        return list
+
+
+class ShowVnfLcmOp(BaseMethod):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for showing vnf lcm operation
+        """
+        super().__init__()
+        self.nslcmoptopic = NsLcmOpTopic(db, fs, msg, auth)
+        self.nsrtopic = NsrTopic(db, fs, msg, auth)
+
+    def action(self, session, _id, api_req=False):
+        """
+        Get complete information on an Vnf Lcm Operation.
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: Vnf Lcm operation id
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        record = self.nslcmoptopic.show(session, _id, api_req)
+        ns_id = record.get("nsInstanceId")
+        nsr = self.nsrtopic.show(session, ns_id)
+        vnfinstance_id = nsr["constituent-vnfr-ref"][0]
+        outdata = sol003_projection(record, vnfinstance_id)
+        return outdata
+
+
+def sol003_projection(data, vnfinstance_id):
+    """
+    Returns SOL003 formatted data
+    :param data: contains Lcm Operation information
+    :param vnfinstance_id: id of vnf_instance
+    :return: SOL003 formatted data of vnf lcm op
+    """
+    data.pop("nsInstanceId")
+    data.pop("operationParams")
+    data.pop("links")
+    links = {
+        "self": "/osm/vnflcm/v1/vnf_lcm_op_occs/" + data["_id"],
+        "vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + vnfinstance_id,
+    }
+    data["_links"] = links
+    data["vnfInstanceId"] = vnfinstance_id
+    return data
diff --git a/osm_nbi/osm_vnfm/vnf_instances.py b/osm_nbi/osm_vnfm/vnf_instances.py
new file mode 100644 (file)
index 0000000..c9d05ac
--- /dev/null
@@ -0,0 +1,334 @@
+# Copyright 2021 K Sai Kiran (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = "K Sai Kiran <saikiran.k@tataelxsi.co.in>, Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
+__date__ = "$12-June-2021 8:30:59$"
+
+from copy import deepcopy
+from osm_nbi.descriptor_topics import NsdTopic
+from .base_methods import BaseMethod
+
+from osm_nbi.instance_topics import NsrTopic, VnfrTopic
+
+
+class VnfInstances2NsInstances:
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor of Vnf Instances to Ns Instances
+        """
+        self.new_vnf_instance = NewVnfInstance(db, fs, msg, auth)
+        self.list_vnf_instance = ListVnfInstance(db, fs, msg, auth)
+        self.show_vnf_instance = ShowVnfInstance(db, fs, msg, auth)
+        self.delete_vnf_instance = DeleteVnfInstance(db, fs, msg, auth)
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates a new entry into database.
+        :param rollback: list to append created items at database in case a rollback may have to be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id, op_id:
+            _id: identity of the inserted data.
+             op_id: operation id if this is asynchronous, None otherwise
+        """
+        return self.new_vnf_instance.action(rollback, session, indata, kwargs, headers)
+
+    def list(self, session, filter_q=None, api_req=False):
+        """
+        Get a list of the Vnfs that match a filter
+        :param session: contains the used login username and working project
+        :param filter_q: filter of data to be applied
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: The list, it can be empty if no one match the filter.
+        """
+        return self.list_vnf_instance.action(session, filter_q, api_req)
+
+    def show(self, session, _id, api_req=False):
+        """
+        Get complete information on an Vnf
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        return self.show_vnf_instance.action(session, _id, api_req)
+
+    def delete(self, session, _id, dry_run=False, not_send_msg=None):
+        """
+        Delete item by its internal _id
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param dry_run: make checking but do not delete
+        :param not_send_msg: To not send message (False) or store content (list) instead
+        :return: operation id (None if there is not operation), raise exception if error or not found, conflict, ...
+        """
+        return self.delete_vnf_instance.action(session, _id, dry_run, not_send_msg)
+
+
+class NewVnfInstance(BaseMethod):
+    # sample ns descriptor
+    sample_nsd = {
+        "nsd": {
+            "nsd": [
+                {
+                    "df": [
+                        {
+                            "id": "default-df",
+                            "vnf-profile": [
+                                {
+                                    "id": 1,
+                                    "virtual-link-connectivity": [
+                                        {
+                                            "constituent-cpd-id": [
+                                                {
+                                                    "constituent-base-element-id": 1,
+                                                    "constituent-cpd-id": "eth0-ext",
+                                                }
+                                            ],
+                                            "virtual-link-profile-id": "mgmtnet",
+                                        }
+                                    ],
+                                    "vnfd-id": "cirros_vnfd",
+                                }
+                            ],
+                        }
+                    ],
+                    "vnfd-id": ["cirros_vnfd"],
+                    "description": "Generated by OSM pacakage generator",
+                    "id": "cirros_2vnf_nsd",
+                    "name": "cirros_2vnf_ns",
+                    "short-name": "cirros_2vnf_ns",
+                    "vendor": "OSM",
+                    "version": "1.0",
+                }
+            ]
+        }
+    }
+
+    @staticmethod
+    def __get_formatted_indata(indata, nsd_id):
+        """
+        Create indata for nsd_id
+        :param indata: Contains unformatted data for new vnf instance
+        :param nsd_id: Id of nsd
+        :return: formatted indata for nsd_id
+        """
+        formatted_indata = deepcopy(indata)
+        formatted_indata["nsdId"] = nsd_id
+        formatted_indata["nsName"] = indata["vnfInstanceName"] + "-ns"
+        for invalid_key in (
+            "vnfdId",
+            "vnfInstanceName",
+            "vnfInstanceDescription",
+            "additionalParams",
+        ):
+            formatted_indata.pop(invalid_key)
+        return formatted_indata
+
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor for new vnf instance
+        """
+        super().__init__()
+        self.msg = msg
+        self.nsdtopic = NsdTopic(db, fs, msg, auth)
+        self.nsrtopic = NsrTopic(db, fs, msg, auth)
+
+    def __get_vnfd(self):
+        # get vnfd from nfvo
+        pass
+
+    def __onboard_vnfd(self):
+        self.__get_vnfd()
+        pass
+
+    def __create_nsd(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates new ns descriptor from a vnfd.
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the nsr
+        :param kwargs: used to override the indata
+        :param headers: http request headers
+        :return: id of new nsd created
+        """
+        _id, *others = self.nsdtopic.new(rollback, session, {}, None, headers)
+        new_nsd = deepcopy(NewVnfInstance.sample_nsd)
+        vnf_content = {
+            "id": "default-df",
+            "vnf-profile": [
+                {
+                    "id": "1",
+                    "virtual-link-connectivity": [
+                        {
+                            "constituent-cpd-id": [
+                                {
+                                    "constituent-base-element-id": "1",
+                                    "constituent-cpd-id": indata["additionalParams"][
+                                        "constituent-cpd-id"
+                                    ],
+                                }
+                            ],
+                            "virtual-link-profile-id": indata["additionalParams"][
+                                "virtual-link-profile-id"
+                            ],
+                        }
+                    ],
+                    "vnfd-id": indata["vnfdId"],
+                }
+            ],
+        }
+        vnf_profile = vnf_content["vnf-profile"][0]
+        virtual_link_connectivity = vnf_profile["virtual-link-connectivity"][0]
+        constituent_cpd_id = virtual_link_connectivity["constituent-cpd-id"][0]
+        if "ip-address" in indata["additionalParams"]:
+            constituent_cpd_id["ip-address"] = indata["additionalParams"]["ip-address"]
+        new_nsd["nsd"]["nsd"][0] = {
+            "description": indata["vnfInstanceDescription"],
+            "designer": "OSM",
+            "id": indata["vnfdId"] + "-ns",
+            "name": indata["vnfdId"] + "-ns",
+            "version": "1.0",
+            "df": [
+                vnf_content,
+            ],
+            "virtual-link-desc": indata["additionalParams"]["virtual-link-desc"],
+            "vnfd-id": [indata["vnfdId"]],
+        }
+        return _id, new_nsd
+
+    def __create_nsr(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates an new ns record in database
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the nsr
+        :param kwargs: used to override the indata
+        :param headers: http request headers
+        :return: id of new nsr
+        """
+        return self.nsrtopic.new(rollback, session, indata, kwargs, headers)
+
+    def __action_pre_processing(
+        self, rollback, session, indata=None, kwargs=None, headers=None
+    ):
+        """
+        Pre process for creating new vnf instance
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the nsr
+        :param kwargs: used to override the indata
+        :param headers: http request headers
+        :return: id nsr
+        """
+        self.__onboard_vnfd()
+        nsd_id, nsd = self.__create_nsd(rollback, session, indata, kwargs, headers)
+        self.nsdtopic.upload_content(session, nsd_id, nsd, kwargs, headers)
+        formatted_indata = NewVnfInstance.__get_formatted_indata(indata, nsd_id)
+        nsr_id, _ = self.__create_nsr(
+            rollback, session, formatted_indata, kwargs, headers
+        )
+        nsr = self.nsrtopic.show(session, nsr_id)
+        vnfr_id = nsr["constituent-vnfr-ref"][0]
+        if vnfr_id:
+            links = {"vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + vnfr_id}
+            indata["vnfInstanceId"] = vnfr_id
+            indata["_links"] = links
+            self.msg.write("vnf", "create", indata)
+        return vnfr_id, None
+
+    def action(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates an new vnf instance
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the nsr
+        :param kwargs: used to override the indata
+        :param headers: http request headers
+        :return: id of new vnf instance
+        """
+        return self.__action_pre_processing(rollback, session, indata, kwargs, headers)
+
+
+class ListVnfInstance(BaseMethod):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for listing vnfs
+        """
+        super().__init__()
+        self.vnfrtopic = VnfrTopic(db, fs, msg, auth)
+
+    def action(self, session, filter_q=None, api_req=False):
+        """
+        To get list of vnfs that matches a filter
+        :param session: contains the used login username and working project
+        :param filter_q: filter of data to be applied
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: The list, it can be empty if no one match the filter.
+        """
+        return self.vnfrtopic.list(session, filter_q, api_req)
+
+
+class ShowVnfInstance(BaseMethod):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for showing vnf lcm operation
+        """
+        super().__init__()
+        self.vnfrtopic = VnfrTopic(db, fs, msg, auth)
+
+    def action(self, session, _id, api_req=False):
+        """
+        Get complete information on an Vnf Lcm Operation.
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: Vnf Lcm operation id
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        return self.vnfrtopic.show(session, _id, api_req)
+
+
+class DeleteVnfInstance(BaseMethod):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for deleting vnf
+        """
+        super().__init__()
+        self.msg = msg
+        self.nsrtopic = NsrTopic(db, fs, msg, auth)
+        self.nsdtopic = NsdTopic(db, fs, msg, auth)
+        self.vnfrtopic = VnfrTopic(db, fs, msg, auth)
+
+    def action(self, session, _id, dry_run=False, not_send_msg=None):
+        """
+        Delete vnf instance by its internal _id
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param dry_run: make checking but do not delete
+        :param not_send_msg: To not send message (False) or store content (list) instead
+        :return: operation id (None if there is not operation), raise exception if error or not found, conflict, ...
+        """
+        vnfInstanceId = _id
+        vnfr = self.vnfrtopic.show(session, vnfInstanceId)
+        ns_id = vnfr.get("nsr-id-ref")
+        nsr = self.nsrtopic.show(session, ns_id)
+        nsd_to_del = nsr["nsd"]["_id"]
+        links = {"vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + _id}
+        params = {"vnfdId": vnfr["vnfd-ref"], "vnfInstanceId": _id, "_links": links}
+        self.msg.write("vnf", "delete", params)
+        self.nsrtopic.delete(session, ns_id, dry_run, not_send_msg)
+        return self.nsdtopic.delete(session, nsd_to_del, dry_run, not_send_msg)
diff --git a/osm_nbi/osm_vnfm/vnf_subscription.py b/osm_nbi/osm_vnfm/vnf_subscription.py
new file mode 100644 (file)
index 0000000..c04fbde
--- /dev/null
@@ -0,0 +1,71 @@
+# Copyright 2021 Selvi Jayaraman (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = "Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
+
+from osm_nbi.subscription_topics import CommonSubscriptions
+from osm_nbi.validation import vnf_subscription
+
+
+class VnflcmSubscriptionsTopic(CommonSubscriptions):
+    schema_new = vnf_subscription
+
+    def _subscription_mapper(self, _id, data, table):
+        """
+        Performs data transformation on subscription request
+        :param _id: subscription reference id
+        :param data: data to be transformed
+        :param table: table in which transformed data are inserted
+        """
+        formatted_data = []
+        formed_data = {
+            "reference": data.get("_id"),
+            "CallbackUri": data.get("CallbackUri"),
+        }
+        if data.get("authentication"):
+            formed_data.update({"authentication": data.get("authentication")})
+        if data.get("filter"):
+            if data["filter"].get("VnfInstanceSubscriptionFilter"):
+                key = list(data["filter"]["VnfInstanceSubscriptionFilter"].keys())[0]
+                identifier = data["filter"]["VnfInstanceSubscriptionFilter"][key]
+                formed_data.update({"identifier": identifier})
+            if data["filter"].get("notificationTypes"):
+                for elem in data["filter"].get("notificationTypes"):
+                    update_dict = formed_data.copy()
+                    update_dict["notificationType"] = elem
+                    if elem == "VnfIdentifierCreationNotification":
+                        update_dict["operationTypes"] = "CREATE"
+                        update_dict["operationStates"] = "ANY"
+                        formatted_data.append(update_dict)
+                    elif elem == "VnfIdentifierDeletionNotification":
+                        update_dict["operationTypes"] = "DELETE"
+                        update_dict["operationStates"] = "ANY"
+                        formatted_data.append(update_dict)
+                    elif elem == "VnfLcmOperationOccurrenceNotification":
+                        if "operationTypes" in data["filter"].keys():
+                            update_dict["operationTypes"] = data["filter"][
+                                "operationTypes"
+                            ]
+                        else:
+                            update_dict["operationTypes"] = "ANY"
+                        if "operationStates" in data["filter"].keys():
+                            update_dict["operationStates"] = data["filter"][
+                                "operationStates"
+                            ]
+                        else:
+                            update_dict["operationStates"] = "ANY"
+                        formatted_data.append(update_dict)
+        self.db.create_list(table, formatted_data)
+        return None
index 07f6a8f..3c5ba3d 100644 (file)
@@ -131,8 +131,12 @@ resources_to_operations:
 
   "POST /nslcm/v1/ns_instances/<nsInstanceId>/action": "ns_instances:id:action:post"
 
 
   "POST /nslcm/v1/ns_instances/<nsInstanceId>/action": "ns_instances:id:action:post"
 
+  "POST /nslcm/v1/ns_instances/<nsInstanceId>/update": "ns_instances:id:update:post"
+
   "POST /nslcm/v1/ns_instances/<nsInstanceId>/scale": "ns_instances:id:scale:post"
 
   "POST /nslcm/v1/ns_instances/<nsInstanceId>/scale": "ns_instances:id:scale:post"
 
+  "POST /nslcm/v1/ns_instances/<nsInstanceId>/migrate": "ns_instances:id:migrate:post"
+
   "GET /nslcm/v1/ns_lcm_op_occs": "ns_instances:opps:get"
 
   "GET /nslcm/v1/ns_lcm_op_occs/<nsLcmOpOccId>": "ns_instances:opps:id:get"
   "GET /nslcm/v1/ns_lcm_op_occs": "ns_instances:opps:get"
 
   "GET /nslcm/v1/ns_lcm_op_occs/<nsLcmOpOccId>": "ns_instances:opps:id:get"
@@ -147,6 +151,12 @@ resources_to_operations:
   "GET /nslcm/v1/vnfrs/<vnfInstanceId>": "vnf_instances:id:get"
   "GET /nslcm/v1/vnf_instances/<vnfInstanceId>": "vnf_instances:id:get"
 
   "GET /nslcm/v1/vnfrs/<vnfInstanceId>": "vnf_instances:id:get"
   "GET /nslcm/v1/vnf_instances/<vnfInstanceId>": "vnf_instances:id:get"
 
+  "GET /vnflcm/v1/vnf_instances/": "vnflcm_instances:get"
+  "POST /vnflcm/v1/vnf_instances/": "vnflcm_instances:post"
+
+  "GET /vnflcm/v1/vnf_instances/<vnfInstanceId>": "vnflcm_instances:id:get"
+  "DELETE /vnflcm/v1/vnf_instances/<vnfInstanceId>": "vnflcm_instances:id:delete"
+
 ################################################################################
 #################################### Tokens ####################################
 ################################################################################
 ################################################################################
 #################################### Tokens ####################################
 ################################################################################
index 13cbffd..a97e0c1 100644 (file)
@@ -96,9 +96,11 @@ roles:
     permissions:
         default:  true
         admin:    false
     permissions:
         default:  true
         admin:    false
-        users:    false
         projects: false
         roles:    false
         projects: false
         roles:    false
+        # Users
+        users:          false
+        users:id:patch: true
 
   - name: "project_user"
     permissions:
 
   - name: "project_user"
     permissions:
@@ -113,9 +115,11 @@ roles:
         ns_instances:    true
         vnf_instances:   true
         slice_instances: true
         ns_instances:    true
         vnf_instances:   true
         slice_instances: true
-        users:    false
         projects: false
         roles:    false
         projects: false
         roles:    false
+        # Users
+        users:          false
+        users:id:patch: true
         # VIMs
         vims:        false
         vims:get:    true
         # VIMs
         vims:        false
         vims:get:    true
index 92c7417..0389483 100644 (file)
@@ -25,6 +25,14 @@ class CommonSubscriptions(BaseTopic):
     topic = "subscriptions"
     topic_msg = None
 
     topic = "subscriptions"
     topic_msg = None
 
+    def _subscription_mapper(self, _id, data, table):
+        """
+        Performs data transformation on subscription request
+        :param data: data to be trasformed
+        :param table: table in which transformed data are inserted
+        """
+        pass
+
     def format_subscription(self, subs_data):
         """
         Brings lexicographical order for list items at any nested level. For subscriptions max level of nesting is 4.
     def format_subscription(self, subs_data):
         """
         Brings lexicographical order for list items at any nested level. For subscriptions max level of nesting is 4.
index 6810ccd..846e7d3 100644 (file)
@@ -29,7 +29,7 @@ from osm_common import dbmongo, dbmemory, msglocal, msgkafka
 from osm_common.dbbase import DbException
 from osm_common.msgbase import MsgException
 from osm_nbi.engine import EngineException
 from osm_common.dbbase import DbException
 from osm_common.msgbase import MsgException
 from osm_nbi.engine import EngineException
-from osm_nbi.notifications import NsLcmNotification
+from osm_nbi.notifications import NsLcmNotification, VnfLcmNotification
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
@@ -53,7 +53,6 @@ class SubscriptionThread(threading.Thread):
         self.db = None
         self.msg = None
         self.engine = engine
         self.db = None
         self.msg = None
         self.engine = engine
-        self.loop = None
         self.logger = logging.getLogger("nbi.subscriptions")
         self.aiomain_task_admin = (
             None  # asyncio task for receiving admin actions from kafka bus
         self.logger = logging.getLogger("nbi.subscriptions")
         self.aiomain_task_admin = (
             None  # asyncio task for receiving admin actions from kafka bus
@@ -70,6 +69,7 @@ class SubscriptionThread(threading.Thread):
             "method": "delete",
         }
         self.nslcm = None
             "method": "delete",
         }
         self.nslcm = None
+        self.vnflcm = None
 
     async def start_kafka(self):
         # timeout_wait_for_kafka = 3*60
 
     async def start_kafka(self):
         # timeout_wait_for_kafka = 3*60
@@ -80,40 +80,38 @@ class SubscriptionThread(threading.Thread):
                 # created.
                 # Before subscribe, send dummy messages
                 await self.msg.aiowrite(
                 # created.
                 # Before subscribe, send dummy messages
                 await self.msg.aiowrite(
-                    "admin", "echo", "dummy message", loop=self.loop
+                    "admin",
+                    "echo",
+                    "dummy message",
                 )
                 )
-                await self.msg.aiowrite("ns", "echo", "dummy message", loop=self.loop)
-                await self.msg.aiowrite("nsi", "echo", "dummy message", loop=self.loop)
+                await self.msg.aiowrite("ns", "echo", "dummy message")
+                await self.msg.aiowrite("nsi", "echo", "dummy message")
+                await self.msg.aiowrite("vnf", "echo", "dummy message")
                 if not kafka_working:
                     self.logger.critical("kafka is working again")
                     kafka_working = True
                 if not self.aiomain_task_admin:
                 if not kafka_working:
                     self.logger.critical("kafka is working again")
                     kafka_working = True
                 if not self.aiomain_task_admin:
-                    await asyncio.sleep(10, loop=self.loop)
+                    await asyncio.sleep(10)
                     self.logger.debug("Starting admin subscription task")
                     self.aiomain_task_admin = asyncio.ensure_future(
                         self.msg.aioread(
                             ("admin",),
                     self.logger.debug("Starting admin subscription task")
                     self.aiomain_task_admin = asyncio.ensure_future(
                         self.msg.aioread(
                             ("admin",),
-                            loop=self.loop,
                             group_id=False,
                             aiocallback=self._msg_callback,
                         ),
                             group_id=False,
                             aiocallback=self._msg_callback,
                         ),
-                        loop=self.loop,
                     )
                 if not self.aiomain_task:
                     )
                 if not self.aiomain_task:
-                    await asyncio.sleep(10, loop=self.loop)
+                    await asyncio.sleep(10)
                     self.logger.debug("Starting non-admin subscription task")
                     self.aiomain_task = asyncio.ensure_future(
                         self.msg.aioread(
                     self.logger.debug("Starting non-admin subscription task")
                     self.aiomain_task = asyncio.ensure_future(
                         self.msg.aioread(
-                            ("ns", "nsi"),
-                            loop=self.loop,
+                            ("ns", "nsi", "vnf"),
                             aiocallback=self._msg_callback,
                         ),
                             aiocallback=self._msg_callback,
                         ),
-                        loop=self.loop,
                     )
                 done, _ = await asyncio.wait(
                     [self.aiomain_task, self.aiomain_task_admin],
                     timeout=None,
                     )
                 done, _ = await asyncio.wait(
                     [self.aiomain_task, self.aiomain_task_admin],
                     timeout=None,
-                    loop=self.loop,
                     return_when=asyncio.FIRST_COMPLETED,
                 )
                 try:
                     return_when=asyncio.FIRST_COMPLETED,
                 )
                 try:
@@ -140,14 +138,13 @@ class SubscriptionThread(threading.Thread):
                         "Error accessing kafka '{}'. Retrying ...".format(e)
                     )
                     kafka_working = False
                         "Error accessing kafka '{}'. Retrying ...".format(e)
                     )
                     kafka_working = False
-            await asyncio.sleep(10, loop=self.loop)
+            await asyncio.sleep(10)
 
     def run(self):
         """
         Start of the thread
         :return: None
         """
 
     def run(self):
         """
         Start of the thread
         :return: None
         """
-        self.loop = asyncio.new_event_loop()
         try:
             if not self.db:
                 if self.config["database"]["driver"] == "mongo":
         try:
             if not self.db:
                 if self.config["database"]["driver"] == "mongo":
@@ -164,7 +161,6 @@ class SubscriptionThread(threading.Thread):
                     )
             if not self.msg:
                 config_msg = self.config["message"].copy()
                     )
             if not self.msg:
                 config_msg = self.config["message"].copy()
-                config_msg["loop"] = self.loop
                 if config_msg["driver"] == "local":
                     self.msg = msglocal.MsgLocal()
                     self.msg.connect(config_msg)
                 if config_msg["driver"] == "local":
                     self.msg = msglocal.MsgLocal()
                     self.msg.connect(config_msg)
@@ -178,18 +174,14 @@ class SubscriptionThread(threading.Thread):
                         )
                     )
             self.nslcm = NsLcmNotification(self.db)
                         )
                     )
             self.nslcm = NsLcmNotification(self.db)
+            self.vnflcm = VnfLcmNotification(self.db)
         except (DbException, MsgException) as e:
             raise SubscriptionException(str(e), http_code=e.http_code)
 
         self.logger.debug("Starting")
         while not self.to_terminate:
             try:
         except (DbException, MsgException) as e:
             raise SubscriptionException(str(e), http_code=e.http_code)
 
         self.logger.debug("Starting")
         while not self.to_terminate:
             try:
-
-                self.loop.run_until_complete(
-                    asyncio.ensure_future(self.start_kafka(), loop=self.loop)
-                )
-            # except asyncio.CancelledError:
-            #     break  # if cancelled it should end, breaking loop
+                asyncio.run(self.start_kafka())
             except Exception as e:
                 if not self.to_terminate:
                     self.logger.exception(
             except Exception as e:
                 if not self.to_terminate:
                     self.logger.exception(
@@ -198,7 +190,6 @@ class SubscriptionThread(threading.Thread):
 
         self.logger.debug("Finishing")
         self._stop()
 
         self.logger.debug("Finishing")
         self._stop()
-        self.loop.close()
 
     async def _msg_callback(self, topic, command, params):
         """
 
     async def _msg_callback(self, topic, command, params):
         """
@@ -263,15 +254,36 @@ class SubscriptionThread(threading.Thread):
                             # self.logger.debug(subscribers)
                             if subscribers:
                                 asyncio.ensure_future(
                             # self.logger.debug(subscribers)
                             if subscribers:
                                 asyncio.ensure_future(
-                                    self.nslcm.send_notifications(
-                                        subscribers, loop=self.loop
-                                    ),
-                                    loop=self.loop,
+                                    self.nslcm.send_notifications(subscribers),
                                 )
                 else:
                     self.logger.debug(
                         "Message can not be used for notification of nslcm"
                     )
                                 )
                 else:
                     self.logger.debug(
                         "Message can not be used for notification of nslcm"
                     )
+            elif topic == "vnf":
+                if isinstance(params, dict):
+                    vnfd_id = params["vnfdId"]
+                    vnf_instance_id = params["vnfInstanceId"]
+                    if command == "create" or command == "delete":
+                        op_state = command
+                    else:
+                        op_state = params["operationState"]
+                    event_details = {
+                        "topic": topic,
+                        "command": command.upper(),
+                        "params": params,
+                    }
+                    subscribers = self.vnflcm.get_subscribers(
+                        vnfd_id,
+                        vnf_instance_id,
+                        command.upper(),
+                        op_state,
+                        event_details,
+                    )
+                    if subscribers:
+                        asyncio.ensure_future(
+                            self.vnflcm.send_notifications(subscribers),
+                        )
             elif topic == "nsi":
                 if command == "terminated" and params["operationState"] in (
                     "COMPLETED",
             elif topic == "nsi":
                 if command == "terminated" and params["operationState"] in (
                     "COMPLETED",
@@ -316,7 +328,7 @@ class SubscriptionThread(threading.Thread):
             # writing to kafka must be done with our own loop. For this reason it is not allowed Engine to do that,
             # but content to be written is stored at msg_to_send
             for msg in msg_to_send:
             # writing to kafka must be done with our own loop. For this reason it is not allowed Engine to do that,
             # but content to be written is stored at msg_to_send
             for msg in msg_to_send:
-                await self.msg.aiowrite(*msg, loop=self.loop)
+                await self.msg.aiowrite(*msg)
         except (EngineException, DbException, MsgException) as e:
             self.logger.error(
                 "Error while processing topic={} command={}: {}".format(
         except (EngineException, DbException, MsgException) as e:
             self.logger.error(
                 "Error while processing topic={} command={}: {}".format(
@@ -352,6 +364,8 @@ class SubscriptionThread(threading.Thread):
         """
         self.to_terminate = True
         if self.aiomain_task:
         """
         self.to_terminate = True
         if self.aiomain_task:
-            self.loop.call_soon_threadsafe(self.aiomain_task.cancel)
+            asyncio.get_event_loop().call_soon_threadsafe(self.aiomain_task.cancel)
         if self.aiomain_task_admin:
         if self.aiomain_task_admin:
-            self.loop.call_soon_threadsafe(self.aiomain_task_admin.cancel)
+            asyncio.get_event_loop().call_soon_threadsafe(
+                self.aiomain_task_admin.cancel
+            )
diff --git a/osm_nbi/tests/run_test.py b/osm_nbi/tests/run_test.py
deleted file mode 100755 (executable)
index f339354..0000000
+++ /dev/null
@@ -1,5893 +0,0 @@
-#! /usr/bin/python3
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import getopt
-import sys
-import requests
-import json
-import logging
-import yaml
-
-# import json
-# import tarfile
-from time import sleep
-from random import randint
-import os
-from sys import stderr
-from uuid import uuid4
-import re
-
-__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
-__date__ = "$2018-03-01$"
-__version__ = "0.3"
-version_date = "Oct 2018"
-
-
-def usage():
-    print("Usage: ", sys.argv[0], "[options]")
-    print(
-        "      Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'"
-    )
-    print(
-        "      If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
-        "where deployment is done"
-    )
-    print("OPTIONS")
-    print("      -h|--help: shows this help")
-    print("      --insecure: Allows non trusted https NBI server")
-    print("      --list: list available tests")
-    print(
-        "      --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
-        "'--test-osm'"
-    )
-    print("      -p|--password PASSWORD: NBI access password. 'admin' by default")
-    print("      ---project PROJECT: NBI access project. 'admin' by default")
-    print(
-        "      --test TEST[,...]: Execute only a test or a comma separated list of tests"
-    )
-    print(
-        "      --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config"
-    )
-    print(
-        "      --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
-        "this flag to test the system. LCM and RO components are expected to be up and running"
-    )
-    print(
-        "      --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)
-    )
-    print(
-        "      --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(
-            timeout_deploy
-        )
-    )
-    print(
-        "      --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
-        " by default {}s".format(timeout_configure)
-    )
-    print("      -u|--user USERNAME: NBI access username. 'admin' by default")
-    print(
-        "      --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default"
-    )
-    print("      -v|--verbose print debug information, can be used several times")
-    print("      --no-verbose remove verbosity")
-    print("      --version: prints current version")
-    print("ENV variables used for real deployment tests with option osm-test.")
-    print("      export OSMNBITEST_VIM_NAME=vim-name")
-    print("      export OSMNBITEST_VIM_URL=vim-url")
-    print("      export OSMNBITEST_VIM_TYPE=vim-type")
-    print("      export OSMNBITEST_VIM_TENANT=vim-tenant")
-    print("      export OSMNBITEST_VIM_USER=vim-user")
-    print("      export OSMNBITEST_VIM_PASSWORD=vim-password")
-    print('      export OSMNBITEST_VIM_CONFIG="vim-config"')
-    print('      export OSMNBITEST_NS_NAME="vim-config"')
-    return
-
-
-r_header_json = {"Content-type": "application/json"}
-headers_json = {"Content-type": "application/json", "Accept": "application/json"}
-r_header_yaml = {"Content-type": "application/yaml"}
-headers_yaml = {"Content-type": "application/yaml", "Accept": "application/yaml"}
-r_header_text = {"Content-type": "text/plain"}
-r_header_octect = {"Content-type": "application/octet-stream"}
-headers_text = {"Accept": "text/plain,application/yaml"}
-r_header_zip = {"Content-type": "application/zip"}
-headers_zip = {"Accept": "application/zip,application/yaml"}
-headers_zip_yaml = {"Accept": "application/yaml", "Content-type": "application/zip"}
-headers_zip_json = {"Accept": "application/json", "Content-type": "application/zip"}
-headers_txt_json = {"Accept": "application/json", "Content-type": "text/plain"}
-r_headers_yaml_location_vnfd = {
-    "Location": "/vnfpkgm/v1/vnf_packages_content/",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nsd = {
-    "Location": "/nsd/v1/ns_descriptors_content/",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nst = {
-    "Location": "/nst/v1/netslice_templates_content",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nslcmop = {
-    "Location": "nslcm/v1/ns_lcm_op_occs/",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nsilcmop = {
-    "Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/",
-    "Content-Type": "application/yaml",
-}
-
-# test ones authorized
-test_authorized_list = (
-    (
-        "AU1",
-        "Invalid vnfd id",
-        "GET",
-        "/vnfpkgm/v1/vnf_packages/non-existing-id",
-        headers_json,
-        None,
-        404,
-        r_header_json,
-        "json",
-    ),
-    (
-        "AU2",
-        "Invalid nsd id",
-        "GET",
-        "/nsd/v1/ns_descriptors/non-existing-id",
-        headers_yaml,
-        None,
-        404,
-        r_header_yaml,
-        "yaml",
-    ),
-    (
-        "AU3",
-        "Invalid nsd id",
-        "DELETE",
-        "/nsd/v1/ns_descriptors_content/non-existing-id",
-        headers_yaml,
-        None,
-        404,
-        r_header_yaml,
-        "yaml",
-    ),
-)
-timeout = 120  # general timeout
-timeout_deploy = 60 * 10  # timeout for NS deploying without charms
-timeout_configure = 60 * 20  # timeout for NS deploying and configuring
-
-
-class TestException(Exception):
-    pass
-
-
-class TestRest:
-    def __init__(
-        self,
-        url_base,
-        header_base=None,
-        verify=False,
-        user="admin",
-        password="admin",
-        project="admin",
-    ):
-        self.url_base = url_base
-        if header_base is None:
-            self.header_base = {}
-        else:
-            self.header_base = header_base.copy()
-        self.s = requests.session()
-        self.s.headers = self.header_base
-        self.verify = verify
-        self.token = False
-        self.user = user
-        self.password = password
-        self.project = project
-        self.vim_id = None
-        # contains ID of tests obtained from Location response header. "" key contains last obtained id
-        self.last_id = ""
-        self.test_name = None
-        self.step = 0  # number of subtest under test
-        self.passed_tests = 0
-        self.failed_tests = 0
-
-    def set_test_name(self, test_name):
-        self.test_name = test_name
-        self.step = 0
-        self.last_id = ""
-
-    def set_header(self, header):
-        self.s.headers.update(header)
-
-    def set_tet_name(self, test_name):
-        self.test_name = test_name
-
-    def unset_header(self, key):
-        if key in self.s.headers:
-            del self.s.headers[key]
-
-    def test(
-        self,
-        description,
-        method,
-        url,
-        headers,
-        payload,
-        expected_codes,
-        expected_headers,
-        expected_payload,
-        store_file=None,
-        pooling=False,
-    ):
-        """
-        Performs an http request and check http code response. Exit if different than allowed. It get the returned id
-        that can be used by following test in the URL with {name} where name is the name of the test
-        :param description:  description of the test
-        :param method: HTTP method: GET,PUT,POST,DELETE,...
-        :param url: complete URL or relative URL
-        :param headers: request headers to add to the base headers
-        :param payload: Can be a dict, transformed to json, a text or a file if starts with '@'
-        :param expected_codes: expected response codes, can be int, int tuple or int range
-        :param expected_headers: expected response headers, dict with key values
-        :param expected_payload: expected payload, 0 if empty, 'yaml', 'json', 'text', 'zip', 'octet-stream'
-        :param store_file: filename to store content
-        :param pooling: if True do not count neither log this test. Because a pooling is done with many equal requests
-        :return: requests response
-        """
-        r = None
-        try:
-            if not self.s:
-                self.s = requests.session()
-            # URL
-            if not url:
-                url = self.url_base
-            elif not url.startswith("http"):
-                url = self.url_base + url
-
-            # replace url <> with the last ID
-            url = url.replace("<>", self.last_id)
-            if payload:
-                if isinstance(payload, str):
-                    if payload.startswith("@"):
-                        mode = "r"
-                        file_name = payload[1:]
-                        if payload.startswith("@b"):
-                            mode = "rb"
-                            file_name = payload[2:]
-                        with open(file_name, mode) as f:
-                            payload = f.read()
-                elif isinstance(payload, dict):
-                    payload = json.dumps(payload)
-
-            if not pooling:
-                test_description = "Test {}{} {} {} {}".format(
-                    self.test_name, self.step, description, method, url
-                )
-                logger.warning(test_description)
-                self.step += 1
-            stream = False
-            if expected_payload in ("zip", "octet-string") or store_file:
-                stream = True
-            __retry = 0
-            while True:
-                try:
-                    r = getattr(self.s, method.lower())(
-                        url,
-                        data=payload,
-                        headers=headers,
-                        verify=self.verify,
-                        stream=stream,
-                    )
-                    break
-                except requests.exceptions.ConnectionError as e:
-                    if __retry == 2:
-                        raise
-                    logger.error("Exception {}. Retrying".format(e))
-                    __retry += 1
-
-            if expected_payload in ("zip", "octet-string") or store_file:
-                logger.debug("RX {}".format(r.status_code))
-            else:
-                logger.debug("RX {}: {}".format(r.status_code, r.text))
-
-            # check response
-            if expected_codes:
-                if isinstance(expected_codes, int):
-                    expected_codes = (expected_codes,)
-                if r.status_code not in expected_codes:
-                    raise TestException(
-                        "Got status {}. Expected {}. {}".format(
-                            r.status_code, expected_codes, r.text
-                        )
-                    )
-
-            if expected_headers:
-                for header_key, header_val in expected_headers.items():
-                    if header_key.lower() not in r.headers:
-                        raise TestException("Header {} not present".format(header_key))
-                    if header_val and header_val.lower() not in r.headers[header_key]:
-                        raise TestException(
-                            "Header {} does not contain {} but {}".format(
-                                header_key, header_val, r.headers[header_key]
-                            )
-                        )
-
-            if expected_payload is not None:
-                if expected_payload == 0 and len(r.content) > 0:
-                    raise TestException("Expected empty payload")
-                elif expected_payload == "json":
-                    try:
-                        r.json()
-                    except Exception as e:
-                        raise TestException(
-                            "Expected json response payload, but got Exception {}".format(
-                                e
-                            )
-                        )
-                elif expected_payload == "yaml":
-                    try:
-                        yaml.safe_load(r.text)
-                    except Exception as e:
-                        raise TestException(
-                            "Expected yaml response payload, but got Exception {}".format(
-                                e
-                            )
-                        )
-                elif expected_payload in ("zip", "octet-string"):
-                    if len(r.content) == 0:
-                        raise TestException(
-                            "Expected some response payload, but got empty"
-                        )
-                    # try:
-                    #     tar = tarfile.open(None, 'r:gz', fileobj=r.raw)
-                    #     for tarinfo in tar:
-                    #         tarname = tarinfo.name
-                    #         print(tarname)
-                    # except Exception as e:
-                    #     raise TestException("Expected zip response payload, but got Exception {}".format(e))
-                elif expected_payload == "text":
-                    if len(r.content) == 0:
-                        raise TestException(
-                            "Expected some response payload, but got empty"
-                        )
-                    # r.text
-            if store_file:
-                with open(store_file, "wb") as fd:
-                    for chunk in r.iter_content(chunk_size=128):
-                        fd.write(chunk)
-
-            location = r.headers.get("Location")
-            if location:
-                _id = location[location.rfind("/") + 1 :]
-                if _id:
-                    self.last_id = str(_id)
-            if not pooling:
-                self.passed_tests += 1
-            return r
-        except TestException as e:
-            self.failed_tests += 1
-            r_status_code = None
-            r_text = None
-            if r:
-                r_status_code = r.status_code
-                r_text = r.text
-            logger.error("{} \nRX code{}: {}".format(e, r_status_code, r_text))
-            return None
-            # exit(1)
-        except IOError as e:
-            if store_file:
-                logger.error("Cannot open file {}: {}".format(store_file, e))
-            else:
-                logger.error("Exception: {}".format(e), exc_info=True)
-            self.failed_tests += 1
-            return None
-            # exit(1)
-        except requests.exceptions.RequestException as e:
-            logger.error("Exception: {}".format(e))
-
-    def get_autorization(self):  # user=None, password=None, project=None):
-        if (
-            self.token
-        ):  # and self.user == user and self.password == password and self.project == project:
-            return
-        # self.user = user
-        # self.password = password
-        # self.project = project
-        r = self.test(
-            "Obtain token",
-            "POST",
-            "/admin/v1/tokens",
-            headers_json,
-            {
-                "username": self.user,
-                "password": self.password,
-                "project_id": self.project,
-            },
-            (200, 201),
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return
-        response = r.json()
-        self.token = response["id"]
-        self.set_header({"Authorization": "Bearer {}".format(self.token)})
-
-    def remove_authorization(self):
-        if self.token:
-            self.test(
-                "Delete token",
-                "DELETE",
-                "/admin/v1/tokens/{}".format(self.token),
-                headers_json,
-                None,
-                (200, 201, 204),
-                None,
-                None,
-            )
-        self.token = None
-        self.unset_header("Authorization")
-
-    def get_create_vim(self, test_osm):
-        if self.vim_id:
-            return self.vim_id
-        self.get_autorization()
-        if test_osm:
-            vim_name = os.environ.get("OSMNBITEST_VIM_NAME")
-            if not vim_name:
-                raise TestException(
-                    "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment"
-                )
-        else:
-            vim_name = "fakeVim"
-        # Get VIM
-        r = self.test(
-            "Get VIM ID",
-            "GET",
-            "/admin/v1/vim_accounts?name={}".format(vim_name),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return
-        vims = r.json()
-        if vims:
-            return vims[0]["_id"]
-        # Add VIM
-        if test_osm:
-            # check needed environ parameters:
-            if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get(
-                "OSMNBITEST_VIM_TENANT"
-            ):
-                raise TestException(
-                    "Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
-                    " to deploy on whit the --test-osm option"
-                )
-            vim_data = "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}', vim_tenant_name: '{}', " "vim_user: {}, vim_password: {}".format(
-                vim_name,
-                os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
-                os.environ.get("OSMNBITEST_VIM_URL"),
-                os.environ.get("OSMNBITEST_VIM_TENANT"),
-                os.environ.get("OSMNBITEST_VIM_USER"),
-                os.environ.get("OSMNBITEST_VIM_PASSWORD"),
-            )
-            if os.environ.get("OSMNBITEST_VIM_CONFIG"):
-                vim_data += " ,config: {}".format(
-                    os.environ.get("OSMNBITEST_VIM_CONFIG")
-                )
-            vim_data += "}"
-        else:
-            vim_data = (
-                "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"
-                ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
-            )
-        self.test(
-            "Create VIM",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_yaml,
-            vim_data,
-            (201, 202),
-            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"},
-            "yaml",
-        )
-        return self.last_id
-
-    def print_results(self):
-        print("\n\n\n--------------------------------------------")
-        print(
-            "TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(
-                self.passed_tests + self.failed_tests,
-                self.passed_tests,
-                self.failed_tests,
-            )
-        )
-        print("--------------------------------------------")
-
-    def wait_until_delete(self, url_op, timeout_delete):
-        """
-        Make a pooling until topic is not present, because of deleted
-        :param url_op:
-        :param timeout_delete:
-        :return:
-        """
-        description = "Wait to topic being deleted"
-        test_description = "Test {}{} {} {} {}".format(
-            self.test_name, self.step, description, "GET", url_op
-        )
-        logger.warning(test_description)
-        self.step += 1
-
-        wait = timeout_delete
-        while wait >= 0:
-            r = self.test(
-                description,
-                "GET",
-                url_op,
-                headers_yaml,
-                None,
-                (200, 404),
-                None,
-                r_header_yaml,
-                "yaml",
-                pooling=True,
-            )
-            if not r:
-                return
-            if r.status_code == 404:
-                self.passed_tests += 1
-                break
-            elif r.status_code == 200:
-                wait -= 5
-                sleep(5)
-        else:
-            raise TestException(
-                "Topic is not deleted after {} seconds".format(timeout_delete)
-            )
-            self.failed_tests += 1
-
-    def wait_operation_ready(self, ns_nsi, opp_id, timeout, expected_fail=False):
-        """
-        Wait until nslcmop or nsilcmop finished
-        :param ns_nsi: "ns" o "nsi"
-        :param opp_id: Id o fthe operation
-        :param timeout:
-        :param expected_fail:
-        :return: None. Updates passed/failed_tests
-        """
-        if ns_nsi == "ns":
-            url_op = "/nslcm/v1/ns_lcm_op_occs/{}".format(opp_id)
-        else:
-            url_op = "/nsilcm/v1/nsi_lcm_op_occs/{}".format(opp_id)
-        description = "Wait to {} lcm operation complete".format(ns_nsi)
-        test_description = "Test {}{} {} {} {}".format(
-            self.test_name, self.step, description, "GET", url_op
-        )
-        logger.warning(test_description)
-        self.step += 1
-        wait = timeout
-        while wait >= 0:
-            r = self.test(
-                description,
-                "GET",
-                url_op,
-                headers_json,
-                None,
-                200,
-                r_header_json,
-                "json",
-                pooling=True,
-            )
-            if not r:
-                return
-            nslcmop = r.json()
-            if "COMPLETED" in nslcmop["operationState"]:
-                if expected_fail:
-                    logger.error(
-                        "NS terminate has success, expecting failing: {}".format(
-                            nslcmop["detailed-status"]
-                        )
-                    )
-                    self.failed_tests += 1
-                else:
-                    self.passed_tests += 1
-                break
-            elif "FAILED" in nslcmop["operationState"]:
-                if not expected_fail:
-                    logger.error(
-                        "NS terminate has failed: {}".format(nslcmop["detailed-status"])
-                    )
-                    self.failed_tests += 1
-                else:
-                    self.passed_tests += 1
-                break
-
-            print(".", end="", file=stderr)
-            wait -= 10
-            sleep(10)
-        else:
-            self.failed_tests += 1
-            logger.error(
-                "NS instantiate is not terminate after {} seconds".format(timeout)
-            )
-            return
-        print("", file=stderr)
-
-
-class TestNonAuthorized:
-    description = "Test invalid URLs. methods and no authorization"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("NonAuth")
-        engine.remove_authorization()
-        test_not_authorized_list = (
-            (
-                "Invalid token",
-                "GET",
-                "/admin/v1/users",
-                headers_json,
-                None,
-                401,
-                r_header_json,
-                "json",
-            ),
-            (
-                "Invalid URL",
-                "POST",
-                "/admin/v1/nonexist",
-                headers_yaml,
-                None,
-                405,
-                r_header_yaml,
-                "yaml",
-            ),
-            (
-                "Invalid version",
-                "DELETE",
-                "/admin/v2/users",
-                headers_yaml,
-                None,
-                405,
-                r_header_yaml,
-                "yaml",
-            ),
-        )
-        for t in test_not_authorized_list:
-            engine.test(*t)
-
-
-class TestUsersProjects:
-    description = "test project and user creation"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("UserProject")
-        # backend = test_params.get("backend") if test_params else None   # UNUSED
-
-        # Initialisation
-        p1 = p2 = p3 = None
-        padmin = pbad = None
-        u1 = u2 = u3 = u4 = None
-
-        engine.get_autorization()
-
-        res = engine.test(
-            "Create project non admin 1",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P1"},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        p1 = engine.last_id if res else None
-
-        res = engine.test(
-            "Create project admin",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "Padmin", "admin": True},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        padmin = engine.last_id if res else None
-
-        res = engine.test(
-            "Create project bad format",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": 1},
-            (400, 422),
-            r_header_json,
-            "json",
-        )
-        pbad = engine.last_id if res else None
-
-        res = engine.test(
-            "Get project admin role",
-            "GET",
-            "/admin/v1/roles?name=project_admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        rpa = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get project user role",
-            "GET",
-            "/admin/v1/roles?name=project_user",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        rpu = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get system admin role",
-            "GET",
-            "/admin/v1/roles?name=system_admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        rsa = res.json()[0]["_id"] if res else None
-
-        data = {"username": "U1", "password": "pw1"}
-        p2 = uuid4().hex
-        data["project_role_mappings"] = [
-            {"project": p1, "role": rpa},
-            {"project": p2, "role": rpa},
-            {"project": padmin, "role": rpu},
-        ]
-        rc = 201
-        xhd = {"Location": "/admin/v1/users/", "Content-Type": "application/json"}
-        res = engine.test(
-            "Create user with bad project and force",
-            "POST",
-            "/admin/v1/users?FORCE=True",
-            headers_json,
-            data,
-            rc,
-            xhd,
-            "json",
-        )
-        if res:
-            u1 = engine.last_id
-        else:
-            # User is created sometimes even though an exception is raised
-            res = engine.test(
-                "Get user U1",
-                "GET",
-                "/admin/v1/users?username=U1",
-                headers_json,
-                {},
-                (200),
-                {"Content-Type": "application/json"},
-                "json",
-            )
-            u1 = res.json()[0]["_id"] if res else None
-
-        data = {"username": "U2", "password": "pw2"}
-        data["project_role_mappings"] = [
-            {"project": p1, "role": rpa},
-            {"project": padmin, "role": rsa},
-        ]
-        res = engine.test(
-            "Create user 2",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            data,
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-        u2 = engine.last_id if res else None
-
-        if u1:
-            ftt = "project_role_mappings"
-            xpr = [{"project": p1, "role": rpa}, {"project": padmin, "role": rpu}]
-            data = {ftt: xpr}
-            engine.test(
-                "Edit user U1, delete  P2 project",
-                "PATCH",
-                "/admin/v1/users/" + u1,
-                headers_json,
-                data,
-                204,
-                None,
-                None,
-            )
-            res = engine.test(
-                "Check user U1, contains the right projects",
-                "GET",
-                "/admin/v1/users/" + u1,
-                headers_json,
-                None,
-                200,
-                None,
-                json,
-            )
-            if res:
-                rj = res.json()
-                xpr[0]["project_name"] = "P1"
-                xpr[0]["role_name"] = "project_admin"
-                xpr[1]["project_name"] = "Padmin"
-                xpr[1]["role_name"] = "project_user"
-                ok = True
-                for pr in rj[ftt]:
-                    if pr not in xpr:
-                        ok = False
-                for pr in xpr:
-                    if pr not in rj[ftt]:
-                        ok = False
-                if not ok:
-                    logger.error(
-                        "User {} '{}' are different than expected '{}'. Edition was not done properly".format(
-                            ftt, rj[ftt], xpr
-                        )
-                    )
-                    engine.failed_tests += 1
-
-        p2 = None  # To prevent deletion attempts
-
-        # Add a test of 'default project' for Keystone?
-
-        if u2:
-            engine.test(
-                "Edit user U2, change password",
-                "PUT",
-                "/admin/v1/users/" + u2,
-                headers_json,
-                {"password": "pw2_new"},
-                204,
-                None,
-                None,
-            )
-
-        if p1:
-            engine.test(
-                "Change to project P1 non existing",
-                "POST",
-                "/admin/v1/tokens/",
-                headers_json,
-                {"project_id": p1},
-                401,
-                r_header_json,
-                "json",
-            )
-
-        if u2 and p1:
-            res = engine.test(
-                "Change to user U2 project P1",
-                "POST",
-                "/admin/v1/tokens",
-                headers_json,
-                {"username": "U2", "password": "pw2_new", "project_id": "P1"},
-                (200, 201),
-                r_header_json,
-                "json",
-            )
-            if res:
-                rj = res.json()
-                engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
-
-                engine.test(
-                    "Edit user projects non admin",
-                    "PUT",
-                    "/admin/v1/users/U1",
-                    headers_json,
-                    {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
-                    401,
-                    r_header_json,
-                    "json",
-                )
-
-                res = engine.test(
-                    "Add new project non admin",
-                    "POST",
-                    "/admin/v1/projects",
-                    headers_json,
-                    {"name": "P2"},
-                    401,
-                    r_header_json,
-                    "json",
-                )
-                if res is None or res.status_code == 201:
-                    # The project has been created even though it shouldn't
-                    res = engine.test(
-                        "Get project P2",
-                        "GET",
-                        "/admin/v1/projects/P2",
-                        headers_json,
-                        None,
-                        200,
-                        r_header_json,
-                        "json",
-                    )
-                    p2 = res.json()["_id"] if res else None
-
-                if p1:
-                    data = {"username": "U3", "password": "pw3"}
-                    data["project_role_mappings"] = [{"project": p1, "role": rpu}]
-                    res = engine.test(
-                        "Add new user non admin",
-                        "POST",
-                        "/admin/v1/users",
-                        headers_json,
-                        data,
-                        401,
-                        r_header_json,
-                        "json",
-                    )
-                    if res is None or res.status_code == 201:
-                        # The user has been created even though it shouldn't
-                        res = engine.test(
-                            "Get user U3",
-                            "GET",
-                            "/admin/v1/users/U3",
-                            headers_json,
-                            None,
-                            200,
-                            r_header_json,
-                            "json",
-                        )
-                        u3 = res.json()["_id"] if res else None
-                else:
-                    u3 = None
-
-                if padmin:
-                    res = engine.test(
-                        "Change to user U2 project Padmin",
-                        "POST",
-                        "/admin/v1/tokens",
-                        headers_json,
-                        {
-                            "project_id": "Padmin"
-                        },  # Caused a Keystone authentication error
-                        # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
-                        (200, 201),
-                        r_header_json,
-                        "json",
-                    )
-                    if res:
-                        rj = res.json()
-                        engine.set_header(
-                            {"Authorization": "Bearer {}".format(rj["id"])}
-                        )
-
-                        res = engine.test(
-                            "Add new project admin",
-                            "POST",
-                            "/admin/v1/projects",
-                            headers_json,
-                            {"name": "P3"},
-                            (201, 204),
-                            {
-                                "Location": "/admin/v1/projects/",
-                                "Content-Type": "application/json",
-                            },
-                            "json",
-                        )
-                        p3 = engine.last_id if res else None
-
-                        if p1:
-                            data = {"username": "U4", "password": "pw4"}
-                            data["project_role_mappings"] = [
-                                {"project": p1, "role": rpa}
-                            ]
-                            res = engine.test(
-                                "Add new user admin",
-                                "POST",
-                                "/admin/v1/users",
-                                headers_json,
-                                data,
-                                (201, 204),
-                                {
-                                    "Location": "/admin/v1/users/",
-                                    "Content-Type": "application/json",
-                                },
-                                "json",
-                            )
-                            u4 = engine.last_id if res else None
-                        else:
-                            u4 = None
-
-                        if u4 and p3:
-                            data = {
-                                "project_role_mappings": [{"project": p3, "role": rpa}]
-                            }
-                            engine.test(
-                                "Edit user projects admin",
-                                "PUT",
-                                "/admin/v1/users/U4",
-                                headers_json,
-                                data,
-                                204,
-                                None,
-                                None,
-                            )
-                            # Project is deleted even though it shouldn't - PROVISIONAL?
-                            res = engine.test(
-                                "Delete project P3 conflict",
-                                "DELETE",
-                                "/admin/v1/projects/" + p3,
-                                headers_json,
-                                None,
-                                409,
-                                None,
-                                None,
-                            )
-                            if res and res.status_code in (200, 204):
-                                p3 = None
-                            if p3:
-                                res = engine.test(
-                                    "Delete project P3 forcing",
-                                    "DELETE",
-                                    "/admin/v1/projects/" + p3 + "?FORCE=True",
-                                    headers_json,
-                                    None,
-                                    204,
-                                    None,
-                                    None,
-                                )
-                                if res and res.status_code in (200, 204):
-                                    p3 = None
-
-                        if u2:
-                            res = engine.test(
-                                "Delete user U2. Conflict deleting own user",
-                                "DELETE",
-                                "/admin/v1/users/" + u2,
-                                headers_json,
-                                None,
-                                409,
-                                r_header_json,
-                                "json",
-                            )
-                            if res is None or res.status_code in (200, 204):
-                                u2 = None
-                        if u4:
-                            res = engine.test(
-                                "Delete user U4",
-                                "DELETE",
-                                "/admin/v1/users/" + u4,
-                                headers_json,
-                                None,
-                                204,
-                                None,
-                                None,
-                            )
-                            if res and res.status_code in (200, 204):
-                                u4 = None
-                        if p3:
-                            res = engine.test(
-                                "Delete project P3",
-                                "DELETE",
-                                "/admin/v1/projects/" + p3,
-                                headers_json,
-                                None,
-                                204,
-                                None,
-                                None,
-                            )
-                            if res and res.status_code in (200, 204):
-                                p3 = None
-
-                if u3:
-                    res = engine.test(
-                        "Delete user U3",
-                        "DELETE",
-                        "/admin/v1/users/" + u3,
-                        headers_json,
-                        None,
-                        204,
-                        None,
-                        None,
-                    )
-                    if res:
-                        u3 = None
-
-        # change to admin
-        engine.remove_authorization()  # To force get authorization
-        engine.get_autorization()
-        if u1:
-            engine.test(
-                "Delete user U1",
-                "DELETE",
-                "/admin/v1/users/" + u1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if u2:
-            engine.test(
-                "Delete user U2",
-                "DELETE",
-                "/admin/v1/users/" + u2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if u3:
-            engine.test(
-                "Delete user U3",
-                "DELETE",
-                "/admin/v1/users/" + u3,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if u4:
-            engine.test(
-                "Delete user U4",
-                "DELETE",
-                "/admin/v1/users/" + u4,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if p1:
-            engine.test(
-                "Delete project P1",
-                "DELETE",
-                "/admin/v1/projects/" + p1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if p2:
-            engine.test(
-                "Delete project P2",
-                "DELETE",
-                "/admin/v1/projects/" + p2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if p3:
-            engine.test(
-                "Delete project P3",
-                "DELETE",
-                "/admin/v1/projects/" + p3,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if padmin:
-            engine.test(
-                "Delete project Padmin",
-                "DELETE",
-                "/admin/v1/projects/" + padmin,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if pbad:
-            engine.test(
-                "Delete bad project",
-                "DELETE",
-                "/admin/v1/projects/" + pbad,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-
-        # BEGIN New Tests - Addressing Projects/Users by Name/ID
-        pid1 = pid2 = None
-        uid1 = uid2 = None
-        res = engine.test(
-            "Create new project P1",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P1"},
-            201,
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            pid1 = res.json()["id"]
-            # print("# pid =", pid1)
-        res = engine.test(
-            "Create new project P2",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P2"},
-            201,
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            pid2 = res.json()["id"]
-            # print("# pid =", pid2)
-        data = {"username": "U1", "password": "pw1"}
-        data["project_role_mappings"] = [{"project": pid1, "role": rpu}]
-        res = engine.test(
-            "Create new user U1",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            data,
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            uid1 = res.json()["id"]
-            # print("# uid =", uid1)
-        data = {"username": "U2", "password": "pw2"}
-        data["project_role_mappings"] = [{"project": pid2, "role": rpu}]
-        res = engine.test(
-            "Create new user U2",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            data,
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            uid2 = res.json()["id"]
-            # print("# uid =", uid2)
-        if pid1:
-            engine.test(
-                "Get Project P1 by Name",
-                "GET",
-                "/admin/v1/projects/P1",
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-            engine.test(
-                "Get Project P1 by ID",
-                "GET",
-                "/admin/v1/projects/" + pid1,
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-        if uid1:
-            engine.test(
-                "Get User U1 by Name",
-                "GET",
-                "/admin/v1/users/U1",
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-            engine.test(
-                "Get User U1 by ID",
-                "GET",
-                "/admin/v1/users/" + uid1,
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-        if pid1:
-            res = engine.test(
-                "Rename Project P1 by Name",
-                "PUT",
-                "/admin/v1/projects/P1",
-                headers_json,
-                {"name": "P3"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get Project P1 by new Name",
-                    "GET",
-                    "/admin/v1/projects/P3",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-        if pid2:
-            res = engine.test(
-                "Rename Project P2 by ID",
-                "PUT",
-                "/admin/v1/projects/" + pid2,
-                headers_json,
-                {"name": "P4"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get Project P2 by new Name",
-                    "GET",
-                    "/admin/v1/projects/P4",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-
-        if uid1:
-            res = engine.test(
-                "Rename User U1 by Name",
-                "PUT",
-                "/admin/v1/users/U1",
-                headers_json,
-                {"username": "U3"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get User U1 by new Name",
-                    "GET",
-                    "/admin/v1/users/U3",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-
-        if uid2:
-            res = engine.test(
-                "Rename User U2 by ID",
-                "PUT",
-                "/admin/v1/users/" + uid2,
-                headers_json,
-                {"username": "U4"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get User U2 by new Name",
-                    "GET",
-                    "/admin/v1/users/U4",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-        if uid1:
-            res = engine.test(
-                "Delete User U1 by Name",
-                "DELETE",
-                "/admin/v1/users/U3",
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                uid1 = None
-
-        if uid2:
-            res = engine.test(
-                "Delete User U2 by ID",
-                "DELETE",
-                "/admin/v1/users/" + uid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                uid2 = None
-
-        if pid1:
-            res = engine.test(
-                "Delete Project P1 by Name",
-                "DELETE",
-                "/admin/v1/projects/P3",
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                pid1 = None
-
-        if pid2:
-            res = engine.test(
-                "Delete Project P2 by ID",
-                "DELETE",
-                "/admin/v1/projects/" + pid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                pid2 = None
-
-        # END New Tests - Addressing Projects/Users by Name
-
-        # CLEANUP
-        if pid1:
-            engine.test(
-                "Delete Project P1",
-                "DELETE",
-                "/admin/v1/projects/" + pid1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if pid2:
-            engine.test(
-                "Delete Project P2",
-                "DELETE",
-                "/admin/v1/projects/" + pid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if uid1:
-            engine.test(
-                "Delete User U1",
-                "DELETE",
-                "/admin/v1/users/" + uid1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if uid2:
-            engine.test(
-                "Delete User U2",
-                "DELETE",
-                "/admin/v1/users/" + uid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-
-        engine.remove_authorization()  # To finish
-
-
-class TestProjectsDescriptors:
-    description = "test descriptors visibility among projects"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        vnfd_ids = []
-        engine.set_test_name("ProjectDescriptors")
-        engine.get_autorization()
-
-        project_admin_id = None
-        res = engine.test(
-            "Get my project Padmin",
-            "GET",
-            "/admin/v1/projects/{}".format(engine.project),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if res:
-            response = res.json()
-            project_admin_id = response["_id"]
-        engine.test(
-            "Create project Padmin",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "Padmin", "admin": True},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Create project P2",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P2"},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Create project P3",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P3"},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-
-        engine.test(
-            "Create user U1",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            {
-                "username": "U1",
-                "password": "pw1",
-                "project_role_mappings": [
-                    {"project": "Padmin", "role": "system_admin"},
-                    {"project": "P2", "role": "project_admin"},
-                    {"project": "P3", "role": "project_admin"},
-                ],
-            },
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-
-        engine.test(
-            "Onboard VNFD id1",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id1",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-        engine.test(
-            "Onboard VNFD id2 PUBLIC",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-        engine.test(
-            "Onboard VNFD id3",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-
-        res = engine.test(
-            "Get VNFD descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 3:
-            logger.error(
-                "Only 3 vnfds should be present for project admin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        # Change to other project Padmin
-        res = engine.test(
-            "Change to user U1 project Padmin",
-            "POST",
-            "/admin/v1/tokens",
-            headers_json,
-            {"username": "U1", "password": "pw1", "project_id": "Padmin"},
-            (200, 201),
-            r_header_json,
-            "json",
-        )
-        if res:
-            response = res.json()
-            engine.set_header({"Authorization": "Bearer {}".format(response["id"])})
-
-        # list vnfds
-        res = engine.test(
-            "List VNFD descriptors for Padmin",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 0:
-            logger.error(
-                "Only 0 vnfds should be present for project Padmin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        # list Public vnfds
-        res = engine.test(
-            "List VNFD public descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 1:
-            logger.error(
-                "Only 1 vnfds should be present for project Padmin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        # list vnfds belonging to project "admin"
-        res = engine.test(
-            "List VNFD of admin project",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if res:
-            response = res.json()
-            if len(response) != 3:
-                logger.error(
-                    "Only 3 vnfds should be present for project Padmin. {} listed".format(
-                        len(response)
-                    )
-                )
-                engine.failed_tests += 1
-
-        # Get Public vnfds
-        engine.test(
-            "Get VNFD public descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        # Edit not owned vnfd
-        engine.test(
-            "Edit VNFD ",
-            "PATCH",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
-            headers_yaml,
-            "{name: pepe}",
-            404,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # Add to my catalog
-        engine.test(
-            "Add VNFD id2 to my catalog",
-            "PATCH",
-            "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".format(vnfd_ids[1]),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # Add a new vnfd
-        engine.test(
-            "Onboard VNFD id4",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id4",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-
-        # list vnfds
-        res = engine.test(
-            "List VNFD public descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 2:
-            logger.error(
-                "Only 2 vnfds should be present for project Padmin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        if manual_check:
-            input(
-                "VNFDs have been omboarded. Perform manual check and press enter to resume"
-            )
-
-        test_rest.test(
-            "Delete VNFD id2",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # change to admin project
-        engine.remove_authorization()  # To force get authorization
-        engine.get_autorization()
-        test_rest.test(
-            "Delete VNFD id1",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        test_rest.test(
-            "Delete VNFD id2",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        test_rest.test(
-            "Delete VNFD id3",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        test_rest.test(
-            "Delete VNFD id4",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
-            headers_yaml,
-            None,
-            404,
-            r_header_yaml,
-            "yaml",
-        )
-        test_rest.test(
-            "Delete VNFD id4",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        # Get Public vnfds
-        engine.test(
-            "Get VNFD deleted id1",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-        engine.test(
-            "Get VNFD deleted id2",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-        engine.test(
-            "Get VNFD deleted id3",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-        engine.test(
-            "Get VNFD deleted id4",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-
-        engine.test(
-            "Delete user U1",
-            "DELETE",
-            "/admin/v1/users/U1",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-        engine.test(
-            "Delete project Padmin",
-            "DELETE",
-            "/admin/v1/projects/Padmin",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-        engine.test(
-            "Delete project P2",
-            "DELETE",
-            "/admin/v1/projects/P2",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-        engine.test(
-            "Delete project P3",
-            "DELETE",
-            "/admin/v1/projects/P3",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-
-
-class TestFakeVim:
-    description = "Creates/edit/delete fake VIMs and SDN controllers"
-
-    def __init__(self):
-        self.vim = {
-            "schema_version": "1.0",
-            "schema_type": "No idea",
-            "name": "myVim",
-            "description": "Descriptor name",
-            "vim_type": "openstack",
-            "vim_url": "http://localhost:/vim",
-            "vim_tenant_name": "vimTenant",
-            "vim_user": "user",
-            "vim_password": "password",
-            "config": {"config_param": 1},
-        }
-        self.sdn = {
-            "name": "sdn-name",
-            "description": "sdn-description",
-            "dpid": "50:50:52:54:00:94:21:21",
-            "ip": "192.168.15.17",
-            "port": 8080,
-            "type": "opendaylight",
-            "version": "3.5.6",
-            "user": "user",
-            "password": "passwd",
-        }
-        self.port_mapping = [
-            {
-                "compute_node": "compute node 1",
-                "ports": [
-                    {
-                        "pci": "0000:81:00.0",
-                        "switch_port": "port-2/1",
-                        "switch_mac": "52:54:00:94:21:21",
-                    },
-                    {
-                        "pci": "0000:81:00.1",
-                        "switch_port": "port-2/2",
-                        "switch_mac": "52:54:00:94:21:22",
-                    },
-                ],
-            },
-            {
-                "compute_node": "compute node 2",
-                "ports": [
-                    {
-                        "pci": "0000:81:00.0",
-                        "switch_port": "port-2/3",
-                        "switch_mac": "52:54:00:94:21:23",
-                    },
-                    {
-                        "pci": "0000:81:00.1",
-                        "switch_port": "port-2/4",
-                        "switch_mac": "52:54:00:94:21:24",
-                    },
-                ],
-            },
-        ]
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-
-        vim_bad = self.vim.copy()
-        vim_bad.pop("name")
-
-        engine.set_test_name("FakeVim")
-        engine.get_autorization()
-        engine.test(
-            "Create VIM",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            self.vim,
-            (201, 202),
-            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
-            "json",
-        )
-        vim_id = engine.last_id
-        engine.test(
-            "Create VIM without name, bad schema",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            vim_bad,
-            422,
-            None,
-            headers_json,
-        )
-        engine.test(
-            "Create VIM name repeated",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            self.vim,
-            409,
-            None,
-            headers_json,
-        )
-        engine.test(
-            "Show VIMs",
-            "GET",
-            "/admin/v1/vim_accounts",
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-        engine.test(
-            "Show VIM",
-            "GET",
-            "/admin/v1/vim_accounts/{}".format(vim_id),
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-        if not test_osm:
-            # delete with FORCE
-            engine.test(
-                "Delete VIM",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
-                headers_yaml,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.test(
-                "Check VIM is deleted",
-                "GET",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-        else:
-            # delete and wait until is really deleted
-            engine.test(
-                "Delete VIM",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_yaml,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.wait_until_delete(
-                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
-            )
-
-
-class TestVIMSDN(TestFakeVim):
-    description = "Creates VIM with SDN editing SDN controllers and port_mapping"
-
-    def __init__(self):
-        TestFakeVim.__init__(self)
-        self.wim = {
-            "schema_version": "1.0",
-            "schema_type": "No idea",
-            "name": "myWim",
-            "description": "Descriptor name",
-            "wim_type": "odl",
-            "wim_url": "http://localhost:/wim",
-            "user": "user",
-            "password": "password",
-            "config": {"config_param": 1},
-        }
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("VimSdn")
-        engine.get_autorization()
-        # Added SDN
-        engine.test(
-            "Create SDN",
-            "POST",
-            "/admin/v1/sdns",
-            headers_json,
-            self.sdn,
-            (201, 202),
-            {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"},
-            "json",
-        )
-        sdnc_id = engine.last_id
-        # sleep(5)
-        # Edit SDN
-        engine.test(
-            "Edit SDN",
-            "PATCH",
-            "/admin/v1/sdns/{}".format(sdnc_id),
-            headers_json,
-            {"name": "new_sdn_name"},
-            (202, 204),
-            None,
-            None,
-        )
-        # sleep(5)
-        # VIM with SDN
-        self.vim["config"]["sdn-controller"] = sdnc_id
-        self.vim["config"]["sdn-port-mapping"] = self.port_mapping
-        engine.test(
-            "Create VIM",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            self.vim,
-            (200, 202, 201),
-            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
-            "json",
-        ),
-
-        vim_id = engine.last_id
-        self.port_mapping[0]["compute_node"] = "compute node XX"
-        engine.test(
-            "Edit VIM change port-mapping",
-            "PUT",
-            "/admin/v1/vim_accounts/{}".format(vim_id),
-            headers_json,
-            {"config": {"sdn-port-mapping": self.port_mapping}},
-            (202, 204),
-            None,
-            None,
-        )
-        engine.test(
-            "Edit VIM remove port-mapping",
-            "PUT",
-            "/admin/v1/vim_accounts/{}".format(vim_id),
-            headers_json,
-            {"config": {"sdn-port-mapping": None}},
-            (202, 204),
-            None,
-            None,
-        )
-
-        engine.test(
-            "Create WIM",
-            "POST",
-            "/admin/v1/wim_accounts",
-            headers_json,
-            self.wim,
-            (200, 202, 201),
-            {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"},
-            "json",
-        ),
-        wim_id = engine.last_id
-
-        if not test_osm:
-            # delete with FORCE
-            engine.test(
-                "Delete VIM remove port-mapping",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
-                headers_json,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.test(
-                "Delete SDNC",
-                "DELETE",
-                "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id),
-                headers_json,
-                None,
-                202,
-                None,
-                0,
-            )
-
-            engine.test(
-                "Delete WIM",
-                "DELETE",
-                "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id),
-                headers_json,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.test(
-                "Check VIM is deleted",
-                "GET",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-            engine.test(
-                "Check SDN is deleted",
-                "GET",
-                "/admin/v1/sdns/{}".format(sdnc_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-            engine.test(
-                "Check WIM is deleted",
-                "GET",
-                "/admin/v1/wim_accounts/{}".format(wim_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-        else:
-            if manual_check:
-                input(
-                    "VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume"
-                )
-            # delete and wait until is really deleted
-            engine.test(
-                "Delete VIM remove port-mapping",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_json,
-                None,
-                (202, 201, 204),
-                None,
-                0,
-            )
-            engine.test(
-                "Delete SDN",
-                "DELETE",
-                "/admin/v1/sdns/{}".format(sdnc_id),
-                headers_json,
-                None,
-                (202, 201, 204),
-                None,
-                0,
-            )
-            engine.test(
-                "Delete VIM",
-                "DELETE",
-                "/admin/v1/wim_accounts/{}".format(wim_id),
-                headers_json,
-                None,
-                (202, 201, 204),
-                None,
-                0,
-            )
-            engine.wait_until_delete(
-                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
-            )
-            engine.wait_until_delete("/admin/v1/sdns/{}".format(sdnc_id), timeout)
-            engine.wait_until_delete(
-                "/admin/v1/wim_accounts/{}".format(wim_id), timeout
-            )
-
-
-class TestDeploy:
-    description = "Base class for downloading descriptors from ETSI, onboard and deploy in real VIM"
-
-    def __init__(self):
-        self.test_name = "DEPLOY"
-        self.nsd_id = None
-        self.vim_id = None
-        self.ns_id = None
-        self.vnfds_id = []
-        self.descriptor_url = (
-            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
-        )
-        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
-        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        self.descriptor_edit = None
-        self.uses_configuration = False
-        self.users = {}
-        self.passwords = {}
-        self.commands = {}
-        self.keys = {}
-        self.timeout = 120
-        self.qforce = ""
-        self.ns_params = None
-        self.vnfr_ip_list = {}
-
-    def create_descriptors(self, engine):
-        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
-        if not os.path.exists(temp_dir):
-            os.makedirs(temp_dir)
-        for vnfd_index, vnfd_filename in enumerate(self.vnfd_filenames):
-            if "/" in vnfd_filename:
-                vnfd_filename_path = vnfd_filename
-                if not os.path.exists(vnfd_filename_path):
-                    raise TestException(
-                        "File '{}' does not exist".format(vnfd_filename_path)
-                    )
-            else:
-                vnfd_filename_path = temp_dir + vnfd_filename
-                if not os.path.exists(vnfd_filename_path):
-                    with open(vnfd_filename_path, "wb") as file:
-                        response = requests.get(self.descriptor_url + vnfd_filename)
-                        if response.status_code >= 300:
-                            raise TestException(
-                                "Error downloading descriptor from '{}': {}".format(
-                                    self.descriptor_url + vnfd_filename,
-                                    response.status_code,
-                                )
-                            )
-                        file.write(response.content)
-            if vnfd_filename_path.endswith(".yaml"):
-                headers = headers_yaml
-            else:
-                headers = headers_zip_yaml
-            if randint(0, 1) == 0:
-                # vnfd CREATE AND UPLOAD in one step:
-                engine.test(
-                    "Onboard VNFD in one step",
-                    "POST",
-                    "/vnfpkgm/v1/vnf_packages_content" + self.qforce,
-                    headers,
-                    "@b" + vnfd_filename_path,
-                    201,
-                    r_headers_yaml_location_vnfd,
-                    "yaml",
-                )
-                self.vnfds_id.append(engine.last_id)
-            else:
-                # vnfd CREATE AND UPLOAD ZIP
-                engine.test(
-                    "Onboard VNFD step 1",
-                    "POST",
-                    "/vnfpkgm/v1/vnf_packages",
-                    headers_json,
-                    None,
-                    201,
-                    {
-                        "Location": "/vnfpkgm/v1/vnf_packages/",
-                        "Content-Type": "application/json",
-                    },
-                    "json",
-                )
-                self.vnfds_id.append(engine.last_id)
-                engine.test(
-                    "Onboard VNFD step 2 as ZIP",
-                    "PUT",
-                    "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
-                    headers,
-                    "@b" + vnfd_filename_path,
-                    204,
-                    None,
-                    0,
-                )
-
-            if self.descriptor_edit:
-                if "vnfd{}".format(vnfd_index) in self.descriptor_edit:
-                    # Modify VNFD
-                    engine.test(
-                        "Edit VNFD ",
-                        "PATCH",
-                        "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
-                        headers_yaml,
-                        self.descriptor_edit["vnfd{}".format(vnfd_index)],
-                        204,
-                        None,
-                        None,
-                    )
-
-        if "/" in self.nsd_filename:
-            nsd_filename_path = self.nsd_filename
-            if not os.path.exists(nsd_filename_path):
-                raise TestException(
-                    "File '{}' does not exist".format(nsd_filename_path)
-                )
-        else:
-            nsd_filename_path = temp_dir + self.nsd_filename
-            if not os.path.exists(nsd_filename_path):
-                with open(nsd_filename_path, "wb") as file:
-                    response = requests.get(self.descriptor_url + self.nsd_filename)
-                    if response.status_code >= 300:
-                        raise TestException(
-                            "Error downloading descriptor from '{}': {}".format(
-                                self.descriptor_url + self.nsd_filename,
-                                response.status_code,
-                            )
-                        )
-                    file.write(response.content)
-        if nsd_filename_path.endswith(".yaml"):
-            headers = headers_yaml
-        else:
-            headers = headers_zip_yaml
-
-        if randint(0, 1) == 0:
-            # nsd CREATE AND UPLOAD in one step:
-            engine.test(
-                "Onboard NSD in one step",
-                "POST",
-                "/nsd/v1/ns_descriptors_content" + self.qforce,
-                headers,
-                "@b" + nsd_filename_path,
-                201,
-                r_headers_yaml_location_nsd,
-                yaml,
-            )
-            self.nsd_id = engine.last_id
-        else:
-            # nsd CREATE AND UPLOAD ZIP
-            engine.test(
-                "Onboard NSD step 1",
-                "POST",
-                "/nsd/v1/ns_descriptors",
-                headers_json,
-                None,
-                201,
-                {
-                    "Location": "/nsd/v1/ns_descriptors/",
-                    "Content-Type": "application/json",
-                },
-                "json",
-            )
-            self.nsd_id = engine.last_id
-            engine.test(
-                "Onboard NSD step 2 as ZIP",
-                "PUT",
-                "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
-                headers,
-                "@b" + nsd_filename_path,
-                204,
-                None,
-                0,
-            )
-
-        if self.descriptor_edit and "nsd" in self.descriptor_edit:
-            # Modify NSD
-            engine.test(
-                "Edit NSD ",
-                "PATCH",
-                "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-                headers_yaml,
-                self.descriptor_edit["nsd"],
-                204,
-                None,
-                None,
-            )
-
-    def delete_descriptors(self, engine):
-        # delete descriptors
-        engine.test(
-            "Delete NSSD SOL005",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        for vnfd_id in self.vnfds_id:
-            engine.test(
-                "Delete VNFD SOL005",
-                "DELETE",
-                "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-
-    def instantiate(self, engine, ns_data):
-        ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256)
-        # create NS Two steps
-        r = engine.test(
-            "Create NS step 1",
-            "POST",
-            "/nslcm/v1/ns_instances",
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"},
-            "yaml",
-        )
-        if not r:
-            return
-        self.ns_id = engine.last_id
-        engine.test(
-            "Instantiate NS step 2",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id),
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop_id = engine.last_id
-
-        if test_osm:
-            # Wait until status is Ok
-            timeout = timeout_configure if self.uses_configuration else timeout_deploy
-            engine.wait_operation_ready("ns", nslcmop_id, timeout)
-
-    def terminate(self, engine):
-        # remove deployment
-        if test_osm:
-            engine.test(
-                "Terminate NS",
-                "POST",
-                "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id),
-                headers_yaml,
-                None,
-                (201, 202),
-                r_headers_yaml_location_nslcmop,
-                "yaml",
-            )
-            nslcmop2_id = engine.last_id
-            # Wait until status is Ok
-            engine.wait_operation_ready("ns", nslcmop2_id, timeout_deploy)
-
-            engine.test(
-                "Delete NS",
-                "DELETE",
-                "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-        else:
-            engine.test(
-                "Delete NS with FORCE",
-                "DELETE",
-                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-
-        # check all it is deleted
-        engine.test(
-            "Check NS is deleted",
-            "GET",
-            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-            headers_yaml,
-            None,
-            404,
-            None,
-            "yaml",
-        )
-        r = engine.test(
-            "Check NSLCMOPs are deleted",
-            "GET",
-            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
-            headers_json,
-            None,
-            200,
-            None,
-            "json",
-        )
-        if not r:
-            return
-        nslcmops = r.json()
-        if not isinstance(nslcmops, list) or nslcmops:
-            raise TestException(
-                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
-                    self.ns_id, nslcmops
-                )
-            )
-
-    def test_ns(
-        self,
-        engine,
-        test_osm,
-        commands=None,
-        users=None,
-        passwds=None,
-        keys=None,
-        timeout=0,
-    ):
-
-        r = engine.test(
-            "GET VNFR IDs",
-            "GET",
-            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return
-        ns_data = r.json()
-
-        vnfr_list = ns_data["constituent-vnfr-ref"]
-        time = 0
-        _commands = commands if commands is not None else self.commands
-        _users = users if users is not None else self.users
-        _passwds = passwds if passwds is not None else self.passwords
-        _keys = keys if keys is not None else self.keys
-        _timeout = timeout if timeout != 0 else self.timeout
-
-        # vnfr_list=[d8272263-6bd3-4680-84ca-6a4be23b3f2d, 88b22e2f-994a-4b61-94fd-4a3c90de3dc4]
-        for vnfr_id in vnfr_list:
-            r = engine.test(
-                "Get VNFR to get IP_ADDRESS",
-                "GET",
-                "/nslcm/v1/vnfrs/{}".format(vnfr_id),
-                headers_json,
-                None,
-                200,
-                r_header_json,
-                "json",
-            )
-            if not r:
-                continue
-            vnfr_data = r.json()
-
-            vnf_index = str(vnfr_data["member-vnf-index-ref"])
-
-            ip_address = self.get_vnfr_ip(engine, vnf_index)
-            description = "Exec command='{}' at VNFR={} IP={}".format(
-                _commands.get(vnf_index)[0], vnf_index, ip_address
-            )
-            engine.step += 1
-            test_description = "{}{} {}".format(
-                engine.test_name, engine.step, description
-            )
-            logger.warning(test_description)
-            while _timeout >= time:
-                result, message = self.do_checks(
-                    [ip_address],
-                    vnf_index=vnfr_data["member-vnf-index-ref"],
-                    commands=_commands.get(vnf_index),
-                    user=_users.get(vnf_index),
-                    passwd=_passwds.get(vnf_index),
-                    key=_keys.get(vnf_index),
-                )
-                if result == 1:
-                    engine.passed_tests += 1
-                    logger.debug(message)
-                    break
-                elif result == 0:
-                    time += 20
-                    sleep(20)
-                elif result == -1:
-                    engine.failed_tests += 1
-                    logger.error(message)
-                    break
-                else:
-                    time -= 20
-                    engine.failed_tests += 1
-                    logger.error(message)
-            else:
-                engine.failed_tests += 1
-                logger.error(
-                    "VNFR {} has not mgmt address. Check failed".format(vnf_index)
-                )
-
-    def do_checks(self, ip, vnf_index, commands=[], user=None, passwd=None, key=None):
-        try:
-            import urllib3
-            from pssh.clients import ParallelSSHClient
-            from pssh.utils import load_private_key
-            from ssh2 import exceptions as ssh2Exception
-        except ImportError as e:
-            logger.critical(
-                "Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
-                "parallel-ssh urllib3': {}".format(e)
-            )
-            return -1, "install needed packages 'pip3 install parallel-ssh urllib3'"
-        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
-        try:
-            p_host = os.environ.get("PROXY_HOST")
-            p_user = os.environ.get("PROXY_USER")
-            p_password = os.environ.get("PROXY_PASSWD")
-
-            if key:
-                pkey = load_private_key(key)
-            else:
-                pkey = None
-
-            client = ParallelSSHClient(
-                ip,
-                user=user,
-                password=passwd,
-                pkey=pkey,
-                proxy_host=p_host,
-                proxy_user=p_user,
-                proxy_password=p_password,
-                timeout=10,
-                num_retries=0,
-            )
-            for cmd in commands:
-                output = client.run_command(cmd)
-                client.join(output)
-                if output[ip[0]].exit_code:
-                    return -1, "VNFR {} command '{}' returns error: '{}'".format(
-                        ip[0], cmd, "\n".join(output[ip[0]].stderr)
-                    )
-                else:
-                    return 1, "VNFR {} command '{}' successful".format(ip[0], cmd)
-        except (
-            ssh2Exception.ChannelFailure,
-            ssh2Exception.SocketDisconnectError,
-            ssh2Exception.SocketTimeout,
-            ssh2Exception.SocketRecvError,
-        ) as e:
-            return 0, "Timeout accessing the VNFR {}: {}".format(ip[0], str(e))
-        except Exception as e:
-            return -1, "ERROR checking the VNFR {}: {}".format(ip[0], str(e))
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        pass
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name(self.test_name)
-        engine.get_autorization()
-        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
-        if test_params:
-            if "vnfd-files" in test_params:
-                self.vnfd_filenames = test_params["vnfd-files"].split(",")
-            if "nsd-file" in test_params:
-                self.nsd_filename = test_params["nsd-file"]
-            if test_params.get("ns-name"):
-                nsname = test_params["ns-name"]
-        self.create_descriptors(engine)
-
-        # create real VIM if not exist
-        self.vim_id = engine.get_create_vim(test_osm)
-        ns_data = {
-            "nsDescription": "default description",
-            "nsName": nsname,
-            "nsdId": self.nsd_id,
-            "vimAccountId": self.vim_id,
-        }
-        if self.ns_params:
-            ns_data.update(self.ns_params)
-        if test_params and test_params.get("ns-config"):
-            if isinstance(test_params["ns-config"], str):
-                ns_data.update(yaml.load(test_params["ns-config"]), Loader=yaml.Loader)
-            else:
-                ns_data.update(test_params["ns-config"])
-        self.instantiate(engine, ns_data)
-
-        if manual_check:
-            input(
-                "NS has been deployed. Perform manual check and press enter to resume"
-            )
-        if test_osm and self.commands:
-            self.test_ns(engine, test_osm)
-        self.additional_operations(engine, test_osm, manual_check)
-        self.terminate(engine)
-        self.delete_descriptors(engine)
-
-    def get_first_ip(self, ip_string):
-        # When using a floating IP, the vnfr_data['ip-address'] contains a semicolon-separated list of IP:s.
-        first_ip = ip_string.split(";")[0] if ip_string else ""
-        return first_ip
-
-    def get_vnfr_ip(self, engine, vnfr_index_wanted):
-        # If the IP address list has been obtained before, it has been stored in 'vnfr_ip_list'
-        ip = self.vnfr_ip_list.get(vnfr_index_wanted, "")
-        if ip:
-            return self.get_first_ip(ip)
-        r = engine.test(
-            "Get VNFR to get IP_ADDRESS",
-            "GET",
-            "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
-                vnfr_index_wanted, self.ns_id
-            ),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return ""
-        vnfr_data = r.json()
-        if not (vnfr_data and vnfr_data[0]):
-            return ""
-        # Store the IP (or list of IPs) in 'vnfr_ip_list'
-        ip_list = vnfr_data[0].get("ip-address", "")
-        if ip_list:
-            self.vnfr_ip_list[vnfr_index_wanted] = ip_list
-            ip = self.get_first_ip(ip_list)
-        return ip
-
-
-class TestDeployHackfestCirros(TestDeploy):
-    description = "Load and deploy Hackfest cirros_2vnf_ns example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "CIRROS"
-        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
-        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        self.commands = {
-            "1": [
-                "ls -lrt",
-            ],
-            "2": [
-                "ls -lrt",
-            ],
-        }
-        self.users = {"1": "cirros", "2": "cirros"}
-        self.passwords = {"1": "cubswin:)", "2": "cubswin:)"}
-
-    def terminate(self, engine):
-        # Make a delete in one step, overriding the normal two step of TestDeploy that launched terminate and delete
-        if test_osm:
-            engine.test(
-                "Terminate and delete NS in one step",
-                "DELETE",
-                "/nslcm/v1/ns_instances_content/{}".format(self.ns_id),
-                headers_yaml,
-                None,
-                202,
-                None,
-                "yaml",
-            )
-
-            engine.wait_until_delete(
-                "/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy
-            )
-        else:
-            engine.test(
-                "Delete NS with FORCE",
-                "DELETE",
-                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-
-        # check all it is deleted
-        engine.test(
-            "Check NS is deleted",
-            "GET",
-            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-            headers_yaml,
-            None,
-            404,
-            None,
-            "yaml",
-        )
-        r = engine.test(
-            "Check NSLCMOPs are deleted",
-            "GET",
-            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
-            headers_json,
-            None,
-            200,
-            None,
-            "json",
-        )
-        if not r:
-            return
-        nslcmops = r.json()
-        if not isinstance(nslcmops, list) or nslcmops:
-            raise TestException(
-                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
-                    self.ns_id, nslcmops
-                )
-            )
-
-
-class TestDeployHackfest1(TestDeploy):
-    description = "Load and deploy Hackfest_1_vnfd example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST1-"
-        self.vnfd_filenames = ("hackfest_1_vnfd.tar.gz",)
-        self.nsd_filename = "hackfest_1_nsd.tar.gz"
-        # self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
-        # self.users = {'1': "cirros", '2': "cirros"}
-        # self.passwords = {'1': "cubswin:)", '2': "cubswin:)"}
-
-
-class TestDeployHackfestCirrosScaling(TestDeploy):
-    description = (
-        "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "CIRROS-SCALE"
-        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
-        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        # Modify VNFD to add scaling and count=2
-        self.descriptor_edit = {
-            "vnfd0": {
-                "vdu": {"$id: 'cirros_vnfd-VM'": {"count": 2}},
-                "scaling-group-descriptor": [
-                    {
-                        "name": "scale_cirros",
-                        "max-instance-count": 2,
-                        "vdu": [{"vdu-id-ref": "cirros_vnfd-VM", "count": 2}],
-                    }
-                ],
-            }
-        }
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        if not test_osm:
-            return
-        # 2 perform scale out twice
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
-            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
-        )
-        for i in range(0, 2):
-            engine.test(
-                "Execute scale action over NS",
-                "POST",
-                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-                headers_yaml,
-                payload,
-                (201, 202),
-                r_headers_yaml_location_nslcmop,
-                "yaml",
-            )
-            nslcmop2_scale_out = engine.last_id
-            engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
-            if manual_check:
-                input("NS scale out done. Check that two more vdus are there")
-            # TODO check automatic
-
-        # 2 perform scale in
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
-            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
-        )
-        for i in range(0, 2):
-            engine.test(
-                "Execute scale IN action over NS",
-                "POST",
-                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-                headers_yaml,
-                payload,
-                (201, 202),
-                r_headers_yaml_location_nslcmop,
-                "yaml",
-            )
-            nslcmop2_scale_in = engine.last_id
-            engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
-            if manual_check:
-                input("NS scale in done. Check that two less vdus are there")
-            # TODO check automatic
-
-        # perform scale in that must fail as reached limit
-        engine.test(
-            "Execute scale IN out of limit action over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_scale_in = engine.last_id
-        engine.wait_operation_ready(
-            "ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True
-        )
-
-
-class TestDeployIpMac(TestDeploy):
-    description = "Load and deploy descriptor examples setting mac, ip address at descriptor and instantiate params"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "SetIpMac"
-        self.vnfd_filenames = (
-            "vnfd_2vdu_set_ip_mac2.yaml",
-            "vnfd_2vdu_set_ip_mac.yaml",
-        )
-        self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml"
-        self.descriptor_url = "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
-        self.commands = {
-            "1": [
-                "ls -lrt",
-            ],
-            "2": [
-                "ls -lrt",
-            ],
-        }
-        self.users = {"1": "osm", "2": "osm"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-        self.timeout = 360
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        # super().run(engine, test_osm, manual_check, test_params)
-        # run again setting IPs with instantiate parameters
-        instantiation_params = {
-            "vnf": [
-                {
-                    "member-vnf-index": "1",
-                    "internal-vld": [
-                        {
-                            "name": "internal_vld1",  # net_internal
-                            "ip-profile": {
-                                "ip-version": "ipv4",
-                                "subnet-address": "10.9.8.0/24",
-                                "dhcp-params": {
-                                    "count": 100,
-                                    "start-address": "10.9.8.100",
-                                },
-                            },
-                            "internal-connection-point": [
-                                {
-                                    "id-ref": "eth2",
-                                    "ip-address": "10.9.8.2",
-                                },
-                                {
-                                    "id-ref": "eth3",
-                                    "ip-address": "10.9.8.3",
-                                },
-                            ],
-                        },
-                    ],
-                    "vdu": [
-                        {
-                            "id": "VM1",
-                            "interface": [
-                                # {
-                                #     "name": "iface11",
-                                #     "floating-ip-required": True,
-                                # },
-                                {"name": "iface13", "mac-address": "52:33:44:55:66:13"},
-                            ],
-                        },
-                        {
-                            "id": "VM2",
-                            "interface": [
-                                {
-                                    "name": "iface21",
-                                    "ip-address": "10.31.31.22",
-                                    "mac-address": "52:33:44:55:66:21",
-                                },
-                            ],
-                        },
-                    ],
-                },
-            ]
-        }
-
-        super().run(
-            engine,
-            test_osm,
-            manual_check,
-            test_params={"ns-config": instantiation_params},
-        )
-
-
-class TestDeployHackfest4(TestDeploy):
-    description = "Load and deploy Hackfest 4 example."
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST4-"
-        self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",)
-        self.nsd_filename = "hackfest_4_nsd.tar.gz"
-        self.uses_configuration = True
-        self.commands = {
-            "1": [
-                "ls -lrt",
-            ],
-            "2": [
-                "ls -lrt",
-            ],
-        }
-        self.users = {"1": "ubuntu", "2": "ubuntu"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-        # Modify VNFD to add scaling
-        # self.descriptor_edit = {
-        #     "vnfd0": {
-        #         'vnf-configuration': {
-        #             'config-primitive': [{
-        #                 'name': 'touch',
-        #                 'parameter': [{
-        #                     'name': 'filename',
-        #                     'data-type': 'STRING',
-        #                     'default-value': '/home/ubuntu/touched'
-        #                 }]
-        #             }]
-        #         },
-        #         'scaling-group-descriptor': [{
-        #             'name': 'scale_dataVM',
-        #             'scaling-policy': [{
-        #                 'threshold-time': 0,
-        #                 'name': 'auto_cpu_util_above_threshold',
-        #                 'scaling-type': 'automatic',
-        #                 'scaling-criteria': [{
-        #                     'name': 'cpu_util_above_threshold',
-        #                     'vnf-monitoring-param-ref': 'all_aaa_cpu_util',
-        #                     'scale-out-relational-operation': 'GE',
-        #                     'scale-in-threshold': 15,
-        #                     'scale-out-threshold': 60,
-        #                     'scale-in-relational-operation': 'LE'
-        #                 }],
-        #                 'cooldown-time': 60
-        #             }],
-        #             'max-instance-count': 10,
-        #             'scaling-config-action': [
-        #                 {'vnf-config-primitive-name-ref': 'touch',
-        #                  'trigger': 'post-scale-out'},
-        #                 {'vnf-config-primitive-name-ref': 'touch',
-        #                  'trigger': 'pre-scale-in'}
-        #             ],
-        #             'vdu': [{
-        #                 'vdu-id-ref': 'dataVM',
-        #                 'count': 1
-        #             }]
-        #         }]
-        #     }
-        # }
-
-
-class TestDeployHackfest3Charmed(TestDeploy):
-    description = "Load and deploy Hackfest 3charmed_ns example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST3-"
-        self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",)
-        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
-        self.uses_configuration = True
-        self.commands = {
-            "1": ["ls -lrt /home/ubuntu/first-touch"],
-            "2": ["ls -lrt /home/ubuntu/first-touch"],
-        }
-        self.users = {"1": "ubuntu", "2": "ubuntu"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-        self.descriptor_edit = {
-            "vnfd0": yaml.safe_load(
-                """
-                vnf-configuration:
-                    terminate-config-primitive:
-                    -   seq: '1'
-                        name: touch
-                        parameter:
-                        -   name: filename
-                            value: '/home/ubuntu/last-touch1'
-                    -   seq: '3'
-                        name: touch
-                        parameter:
-                        -   name: filename
-                            value: '/home/ubuntu/last-touch3'
-                    -   seq: '2'
-                        name: touch
-                        parameter:
-                        -   name: filename
-                            value: '/home/ubuntu/last-touch2'
-                """
-            )
-        }
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        if not test_osm:
-            return
-        # 1 perform action
-        vnfr_index_selected = "2"
-        payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}'
-        engine.test(
-            "Exec service primitive over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/action".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_action = engine.last_id
-        # Wait until status is Ok
-        engine.wait_operation_ready("ns", nslcmop2_action, timeout_deploy)
-        vnfr_ip = self.get_vnfr_ip(engine, vnfr_index_selected)
-        if manual_check:
-            input(
-                "NS service primitive has been executed."
-                "Check that file /home/ubuntu/OSMTESTNBI is present at {}".format(
-                    vnfr_ip
-                )
-            )
-        if test_osm:
-            commands = {
-                "1": [""],
-                "2": [
-                    "ls -lrt /home/ubuntu/OSMTESTNBI",
-                ],
-            }
-            self.test_ns(engine, test_osm, commands=commands)
-
-        # # 2 perform scale out
-        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
-        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        # engine.test("Execute scale action over NS", "POST",
-        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
-        # nslcmop2_scale_out = engine.last_id
-        # engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
-        # if manual_check:
-        #     input('NS scale out done. Check that file /home/ubuntu/touched is present and new VM is created')
-        # # TODO check automatic
-        #
-        # # 2 perform scale in
-        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
-        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        # engine.test("Execute scale action over NS", "POST",
-        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
-        # nslcmop2_scale_in = engine.last_id
-        # engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
-        # if manual_check:
-        #     input('NS scale in done. Check that file /home/ubuntu/touched is updated and new VM is deleted')
-        # # TODO check automatic
-
-
-class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
-    description = (
-        "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in "
-        "ids and member-vnf-index."
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST3v2-"
-        self.qforce = "?FORCE=True"
-        self.descriptor_edit = {
-            "vnfd0": {
-                "vdu": {
-                    "$[0]": {
-                        "interface": {
-                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
-                        }
-                    },
-                    "$[1]": None,
-                },
-                "vnf-configuration": None,
-                "connection-point": {
-                    "$[0]": {
-                        "id": "pdu-mgmt",
-                        "name": "pdu-mgmt",
-                        "short-name": "pdu-mgmt",
-                    },
-                    "$[1]": None,
-                },
-                "mgmt-interface": {"cp": "pdu-mgmt"},
-                "description": "A vnf single vdu to be used as PDU",
-                "id": "vdu-as-pdu",
-                "internal-vld": {
-                    "$[0]": {
-                        "id": "pdu_internal",
-                        "name": "pdu_internal",
-                        "internal-connection-point": {"$[1]": None},
-                        "short-name": "pdu_internal",
-                        "type": "ELAN",
-                    }
-                },
-            },
-            # Modify NSD accordingly
-            "nsd": {
-                "constituent-vnfd": {
-                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
-                    "$[1]": None,
-                },
-                "description": "A nsd to deploy the vnf to act as as PDU",
-                "id": "nsd-as-pdu",
-                "name": "nsd-as-pdu",
-                "short-name": "nsd-as-pdu",
-                "vld": {
-                    "$[0]": {
-                        "id": "mgmt_pdu",
-                        "name": "mgmt_pdu",
-                        "short-name": "mgmt_pdu",
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "vnfd-connection-point-ref": "pdu-mgmt",
-                                "vnfd-id-ref": "vdu-as-pdu",
-                            },
-                            "$[1]": None,
-                        },
-                        "type": "ELAN",
-                    },
-                    "$[1]": None,
-                },
-            },
-        }
-
-
-class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
-    description = "Load and deploy Hackfest 3charmed_ns example modified version to test scaling and NS parameters"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST3v3-"
-        self.commands = {
-            "1": ["ls -lrt /home/ubuntu/first-touch-1"],
-            "2": ["ls -lrt /home/ubuntu/first-touch-2"],
-        }
-        self.descriptor_edit = {
-            "vnfd0": yaml.load(
-                """
-                scaling-group-descriptor:
-                    -   name: "scale_dataVM"
-                        max-instance-count: 10
-                        scaling-policy:
-                        -   name: "auto_cpu_util_above_threshold"
-                            scaling-type: "automatic"
-                            threshold-time: 0
-                            cooldown-time: 60
-                            scaling-criteria:
-                            -   name: "cpu_util_above_threshold"
-                                scale-in-threshold: 15
-                                scale-in-relational-operation: "LE"
-                                scale-out-threshold: 60
-                                scale-out-relational-operation: "GE"
-                                vnf-monitoring-param-ref: "monitor1"
-                        vdu:
-                        -   vdu-id-ref: dataVM
-                            count: 1
-                        scaling-config-action:
-                        -   trigger: post-scale-out
-                            vnf-config-primitive-name-ref: touch
-                        -   trigger: pre-scale-in
-                            vnf-config-primitive-name-ref: touch
-                vdu:
-                    "$id: dataVM":
-                        monitoring-param:
-                        -   id: "dataVM_cpu_util"
-                            nfvi-metric: "cpu_utilization"
-
-                monitoring-param:
-                -   id: "monitor1"
-                    name: "monitor1"
-                    aggregation-type: AVERAGE
-                    vdu-monitoring-param:
-                      vdu-ref: "dataVM"
-                      vdu-monitoring-param-ref: "dataVM_cpu_util"
-                vnf-configuration:
-                    initial-config-primitive:
-                        "$[1]":
-                            parameter:
-                                "$[0]":
-                                    value: "<touch_filename>"   # default-value: /home/ubuntu/first-touch
-                    config-primitive:
-                        "$[0]":
-                            parameter:
-                                "$[0]":
-                                    default-value: "<touch_filename2>"
-                """,
-                Loader=yaml.Loader,
-            )
-        }
-        self.ns_params = {
-            "additionalParamsForVnf": [
-                {
-                    "member-vnf-index": "1",
-                    "additionalParams": {
-                        "touch_filename": "/home/ubuntu/first-touch-1",
-                        "touch_filename2": "/home/ubuntu/second-touch-1",
-                    },
-                },
-                {
-                    "member-vnf-index": "2",
-                    "additionalParams": {
-                        "touch_filename": "/home/ubuntu/first-touch-2",
-                        "touch_filename2": "/home/ubuntu/second-touch-2",
-                    },
-                },
-            ]
-        }
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        super().additional_operations(engine, test_osm, manual_check)
-        if not test_osm:
-            return
-
-        # 2 perform scale out
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
-            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        )
-        engine.test(
-            "Execute scale action over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_scale_out = engine.last_id
-        engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
-        if manual_check:
-            input(
-                "NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created"
-            )
-        if test_osm:
-            commands = {
-                "1": [
-                    "ls -lrt /home/ubuntu/second-touch-1",
-                ]
-            }
-            self.test_ns(engine, test_osm, commands=commands)
-            # TODO check automatic connection to scaled VM
-
-        # 2 perform scale in
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
-            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        )
-        engine.test(
-            "Execute scale action over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_scale_in = engine.last_id
-        engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
-        if manual_check:
-            input(
-                "NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted"
-            )
-        # TODO check automatic
-
-
-class TestDeploySimpleCharm(TestDeploy):
-    description = "Deploy hackfest-4 hackfest_simplecharm example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST-SIMPLE"
-        self.descriptor_url = (
-            "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
-        )
-        self.vnfd_filenames = ("hackfest_simplecharm_vnf.tar.gz",)
-        self.nsd_filename = "hackfest_simplecharm_ns.tar.gz"
-        self.uses_configuration = True
-        self.commands = {
-            "1": [""],
-            "2": [
-                "ls -lrt /home/ubuntu/first-touch",
-            ],
-        }
-        self.users = {"1": "ubuntu", "2": "ubuntu"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-
-
-class TestDeploySimpleCharm2(TestDeploySimpleCharm):
-    description = (
-        "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and "
-        "vnf-member-index"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST-SIMPLE2-"
-        self.qforce = "?FORCE=True"
-        self.descriptor_edit = {
-            "vnfd0": {"id": "hackfest.simplecharm.vnf"},
-            "nsd": {
-                "id": "hackfest.simplecharm.ns",
-                "constituent-vnfd": {
-                    "$[0]": {
-                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                        "member-vnf-index": "$1",
-                    },
-                    "$[1]": {
-                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                        "member-vnf-index": "$2",
-                    },
-                },
-                "vld": {
-                    "$[0]": {
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "member-vnf-index-ref": "$1",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                            "$[1]": {
-                                "member-vnf-index-ref": "$2",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                        },
-                    },
-                    "$[1]": {
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "member-vnf-index-ref": "$1",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                            "$[1]": {
-                                "member-vnf-index-ref": "$2",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                        },
-                    },
-                },
-            },
-        }
-
-
-class TestDeploySingleVdu(TestDeployHackfest3Charmed):
-    description = (
-        "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "SingleVDU"
-        self.qforce = "?FORCE=True"
-        self.descriptor_edit = {
-            # Modify VNFD to remove one VDU
-            "vnfd0": {
-                "vdu": {
-                    "$[0]": {
-                        "interface": {
-                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
-                        }
-                    },
-                    "$[1]": None,
-                },
-                "vnf-configuration": None,
-                "connection-point": {
-                    "$[0]": {
-                        "id": "pdu-mgmt",
-                        "name": "pdu-mgmt",
-                        "short-name": "pdu-mgmt",
-                    },
-                    "$[1]": None,
-                },
-                "mgmt-interface": {"cp": "pdu-mgmt"},
-                "description": "A vnf single vdu to be used as PDU",
-                "id": "vdu-as-pdu",
-                "internal-vld": {
-                    "$[0]": {
-                        "id": "pdu_internal",
-                        "name": "pdu_internal",
-                        "internal-connection-point": {"$[1]": None},
-                        "short-name": "pdu_internal",
-                        "type": "ELAN",
-                    }
-                },
-            },
-            # Modify NSD accordingly
-            "nsd": {
-                "constituent-vnfd": {
-                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
-                    "$[1]": None,
-                },
-                "description": "A nsd to deploy the vnf to act as as PDU",
-                "id": "nsd-as-pdu",
-                "name": "nsd-as-pdu",
-                "short-name": "nsd-as-pdu",
-                "vld": {
-                    "$[0]": {
-                        "id": "mgmt_pdu",
-                        "name": "mgmt_pdu",
-                        "short-name": "mgmt_pdu",
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "vnfd-connection-point-ref": "pdu-mgmt",
-                                "vnfd-id-ref": "vdu-as-pdu",
-                            },
-                            "$[1]": None,
-                        },
-                        "type": "ELAN",
-                    },
-                    "$[1]": None,
-                },
-            },
-        }
-
-
-class TestDeployHnfd(TestDeployHackfest3Charmed):
-    description = (
-        "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HNFD"
-        self.pduDeploy = TestDeploySingleVdu()
-        self.pdu_interface_0 = {}
-        self.pdu_interface_1 = {}
-
-        self.pdu_id = None
-        # self.vnf_to_pdu = """
-        #     vdu:
-        #         "$[0]":
-        #             pdu-type: PDU-TYPE-1
-        #             interface:
-        #                 "$[0]":
-        #                     name: mgmt-iface
-        #                 "$[1]":
-        #                     name: pdu-iface-internal
-        #     id: hfn1
-        #     description: HFND, one PDU + One VDU
-        #     name: hfn1
-        #     short-name: hfn1
-        #
-        # """
-
-        self.pdu_descriptor = {
-            "name": "my-PDU",
-            "type": "PDU-TYPE-1",
-            "vim_accounts": "to-override",
-            "interfaces": [
-                {
-                    "name": "mgmt-iface",
-                    "mgmt": True,
-                    "type": "overlay",
-                    "ip-address": "to override",
-                    "mac-address": "mac_address",
-                    "vim-network-name": "mgmt",
-                },
-                {
-                    "name": "pdu-iface-internal",
-                    "mgmt": False,
-                    "type": "overlay",
-                    "ip-address": "to override",
-                    "mac-address": "mac_address",
-                    "vim-network-name": "pdu_internal",  # OSMNBITEST-PDU-pdu_internal
-                },
-            ],
-        }
-        self.vnfd_filenames = (
-            "hackfest_3charmed_vnfd.tar.gz",
-            "hackfest_3charmed_vnfd.tar.gz",
-        )
-
-        self.descriptor_edit = {
-            "vnfd0": {
-                "id": "hfnd1",
-                "name": "hfn1",
-                "short-name": "hfn1",
-                "vdu": {
-                    "$[0]": {
-                        "pdu-type": "PDU-TYPE-1",
-                        "interface": {
-                            "$[0]": {"name": "mgmt-iface"},
-                            "$[1]": {"name": "pdu-iface-internal"},
-                        },
-                    }
-                },
-            },
-            "nsd": {
-                "constituent-vnfd": {"$[1]": {"vnfd-id-ref": "hfnd1"}},
-                "vld": {
-                    "$[0]": {
-                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
-                    },
-                    "$[1]": {
-                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
-                    },
-                },
-            },
-        }
-
-    def create_descriptors(self, engine):
-        super().create_descriptors(engine)
-
-        # Create PDU
-        self.pdu_descriptor["interfaces"][0].update(self.pdu_interface_0)
-        self.pdu_descriptor["interfaces"][1].update(self.pdu_interface_1)
-        self.pdu_descriptor["vim_accounts"] = [self.vim_id]
-        # TODO get vim-network-name from vnfr.vld.name
-        self.pdu_descriptor["interfaces"][1]["vim-network-name"] = "{}-{}-{}".format(
-            os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST"),
-            "PDU",
-            self.pdu_descriptor["interfaces"][1]["vim-network-name"],
-        )
-        engine.test(
-            "Onboard PDU descriptor",
-            "POST",
-            "/pdu/v1/pdu_descriptors",
-            {
-                "Location": "/pdu/v1/pdu_descriptors/",
-                "Content-Type": "application/yaml",
-            },
-            self.pdu_descriptor,
-            201,
-            r_header_yaml,
-            "yaml",
-        )
-        self.pdu_id = engine.last_id
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.get_autorization()
-        engine.set_test_name(self.test_name)
-        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
-
-        # create real VIM if not exist
-        self.vim_id = engine.get_create_vim(test_osm)
-        # instantiate PDU
-        self.pduDeploy.create_descriptors(engine)
-        self.pduDeploy.instantiate(
-            engine,
-            {
-                "nsDescription": "to be used as PDU",
-                "nsName": nsname + "-PDU",
-                "nsdId": self.pduDeploy.nsd_id,
-                "vimAccountId": self.vim_id,
-            },
-        )
-        if manual_check:
-            input(
-                "VNF to be used as PDU has been deployed. Perform manual check and press enter to resume"
-            )
-        if test_osm:
-            self.pduDeploy.test_ns(engine, test_osm)
-
-        if test_osm:
-            r = engine.test(
-                "Get VNFR to obtain IP_ADDRESS",
-                "GET",
-                "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id),
-                headers_json,
-                None,
-                200,
-                r_header_json,
-                "json",
-            )
-            if not r:
-                return
-            vnfr_data = r.json()
-            # print(vnfr_data)
-
-            self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                0
-            ].get("ip-address")
-            self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                1
-            ].get("ip-address")
-            self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                0
-            ].get("mac-address")
-            self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                1
-            ].get("mac-address")
-            if not self.pdu_interface_0["ip-address"]:
-                raise TestException("Vnfr has not managment ip address")
-        else:
-            self.pdu_interface_0["ip-address"] = "192.168.10.10"
-            self.pdu_interface_1["ip-address"] = "192.168.11.10"
-            self.pdu_interface_0["mac-address"] = "52:33:44:55:66:13"
-            self.pdu_interface_1["mac-address"] = "52:33:44:55:66:14"
-
-        self.create_descriptors(engine)
-
-        ns_data = {
-            "nsDescription": "default description",
-            "nsName": nsname,
-            "nsdId": self.nsd_id,
-            "vimAccountId": self.vim_id,
-        }
-        if test_params and test_params.get("ns-config"):
-            if isinstance(test_params["ns-config"], str):
-                ns_data.update(yaml.load(test_params["ns-config"]), Loader=yaml.Loader)
-            else:
-                ns_data.update(test_params["ns-config"])
-
-        self.instantiate(engine, ns_data)
-        if manual_check:
-            input(
-                "NS has been deployed. Perform manual check and press enter to resume"
-            )
-        if test_osm:
-            self.test_ns(engine, test_osm)
-        self.additional_operations(engine, test_osm, manual_check)
-        self.terminate(engine)
-        self.pduDeploy.terminate(engine)
-        self.delete_descriptors(engine)
-        self.pduDeploy.delete_descriptors(engine)
-
-    def delete_descriptors(self, engine):
-        super().delete_descriptors(engine)
-        # delete pdu
-        engine.test(
-            "Delete PDU SOL005",
-            "DELETE",
-            "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestDescriptors:
-    description = "Test VNFD, NSD, PDU descriptors CRUD and dependencies"
-    vnfd_empty = """vnfd:vnfd-catalog:
-        vnfd:
-        -   name: prova
-            short-name: prova
-            id: prova
-    """
-    vnfd_prova = """vnfd:vnfd-catalog:
-        vnfd:
-        -   connection-point:
-            -   name: cp_0h8m
-                type: VPORT
-            id: prova
-            name: prova
-            short-name: prova
-            vdu:
-            -   id: vdu_z4bm
-                image: ubuntu
-                interface:
-                -   external-connection-point-ref: cp_0h8m
-                    name: eth0
-                    virtual-interface:
-                    type: VIRTIO
-                name: vdu_z4bm
-            version: '1.0'
-    """
-
-    def __init__(self):
-        self.vnfd_filename = "hackfest_3charmed_vnfd.tar.gz"
-        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
-        self.descriptor_url = (
-            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
-        )
-        self.vnfd_id = None
-        self.nsd_id = None
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("Descriptors")
-        engine.get_autorization()
-        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
-        if not os.path.exists(temp_dir):
-            os.makedirs(temp_dir)
-
-        # download files
-        for filename in (self.vnfd_filename, self.nsd_filename):
-            filename_path = temp_dir + filename
-            if not os.path.exists(filename_path):
-                with open(filename_path, "wb") as file:
-                    response = requests.get(self.descriptor_url + filename)
-                    if response.status_code >= 300:
-                        raise TestException(
-                            "Error downloading descriptor from '{}': {}".format(
-                                self.descriptor_url + filename, response.status_code
-                            )
-                        )
-                    file.write(response.content)
-
-        vnfd_filename_path = temp_dir + self.vnfd_filename
-        nsd_filename_path = temp_dir + self.nsd_filename
-
-        engine.test(
-            "Onboard empty VNFD in one step",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_id = engine.last_id
-
-        # test bug 605
-        engine.test(
-            "Upload invalid VNFD ",
-            "PUT",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_yaml,
-            self.vnfd_prova,
-            422,
-            r_header_yaml,
-            "yaml",
-        )
-
-        engine.test(
-            "Upload VNFD {}".format(self.vnfd_filename),
-            "PUT",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_zip_yaml,
-            "@b" + vnfd_filename_path,
-            204,
-            None,
-            0,
-        )
-
-        queries = [
-            "mgmt-interface.cp=mgmt",
-            "vdu.0.interface.0.external-connection-point-ref=mgmt",
-            "vdu.0.interface.1.internal-connection-point-ref=internal",
-            "internal-vld.0.internal-connection-point.0.id-ref=internal",
-            # Detection of duplicated VLD names in VNF Descriptors
-            # URL: internal-vld=[
-            #        {id: internal1, name: internal, type:ELAN,
-            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
-            #        {id: internal2, name: internal, type:ELAN,
-            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
-            #        ]
-            "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
-            "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
-            "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
-            "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
-            "id-ref%3A%20dataVM-internal%7D%5D%7D%5D",
-        ]
-        for query in queries:
-            engine.test(
-                "Upload invalid VNFD ",
-                "PUT",
-                "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(
-                    self.vnfd_id, query
-                ),
-                headers_zip_yaml,
-                "@b" + vnfd_filename_path,
-                422,
-                r_header_yaml,
-                "yaml",
-            )
-
-        # test bug 605
-        engine.test(
-            "Upload invalid VNFD ",
-            "PUT",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_yaml,
-            self.vnfd_prova,
-            422,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # get vnfd descriptor
-        engine.test(
-            "Get VNFD descriptor",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # get vnfd file descriptor
-        engine.test(
-            "Get VNFD file descriptor",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
-            headers_text,
-            None,
-            200,
-            r_header_text,
-            "text",
-            temp_dir + "vnfd-yaml",
-        )
-        # TODO compare files: diff vnfd-yaml hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml
-
-        # get vnfd zip file package
-        engine.test(
-            "Get VNFD zip package",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_zip,
-            "zip",
-            temp_dir + "vnfd-zip",
-        )
-        # TODO compare files: diff vnfd-zip hackfest_3charmed_vnfd.tar.gz
-
-        # get vnfd artifact
-        engine.test(
-            "Get VNFD artifact package",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_octect,
-            "octet-string",
-            temp_dir + "vnfd-icon",
-        )
-        # TODO compare files: diff vnfd-icon hackfest_3charmed_vnfd/icons/osm.png
-
-        # nsd CREATE AND UPLOAD in one step:
-        engine.test(
-            "Onboard NSD in one step",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_zip_yaml,
-            "@b" + nsd_filename_path,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_id = engine.last_id
-
-        queries = ["vld.0.vnfd-connection-point-ref.0.vnfd-id-ref=hf"]
-        for query in queries:
-            engine.test(
-                "Upload invalid NSD ",
-                "PUT",
-                "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
-                headers_zip_yaml,
-                "@b" + nsd_filename_path,
-                422,
-                r_header_yaml,
-                "yaml",
-            )
-
-        # get nsd descriptor
-        engine.test(
-            "Get NSD descriptor",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # get nsd file descriptor
-        engine.test(
-            "Get NSD file descriptor",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id),
-            headers_text,
-            None,
-            200,
-            r_header_text,
-            "text",
-            temp_dir + "nsd-yaml",
-        )
-        # TODO compare files: diff nsd-yaml hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml
-
-        # get nsd zip file package
-        engine.test(
-            "Get NSD zip package",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_zip,
-            "zip",
-            temp_dir + "nsd-zip",
-        )
-        # TODO compare files: diff nsd-zip hackfest_3charmed_nsd.tar.gz
-
-        # get nsd artifact
-        engine.test(
-            "Get NSD artifact package",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_octect,
-            "octet-string",
-            temp_dir + "nsd-icon",
-        )
-        # TODO compare files: diff nsd-icon hackfest_3charmed_nsd/icons/osm.png
-
-        # vnfd DELETE
-        test_rest.test(
-            "Delete VNFD conflict",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
-            headers_yaml,
-            None,
-            409,
-            None,
-            None,
-        )
-
-        test_rest.test(
-            "Delete VNFD force",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # nsd DELETE
-        test_rest.test(
-            "Delete NSD",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestNetSliceTemplates:
-    description = "Upload a NST to OSM"
-
-    def __init__(self):
-        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
-        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
-        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
-        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
-        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        # nst CREATE
-        engine.set_test_name("NST step ")
-        engine.get_autorization()
-        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
-        if not os.path.exists(temp_dir):
-            os.makedirs(temp_dir)
-
-        # Onboard VNFDs
-        engine.test(
-            "Onboard edge VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard middle VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename_middle,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_middle_id = engine.last_id
-
-        # Onboard NSDs
-        engine.test(
-            "Onboard NSD edge",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard NSD middle",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename_middle,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_middle_id = engine.last_id
-
-        # Onboard NST
-        engine.test(
-            "Onboard NST",
-            "POST",
-            "/nst/v1/netslice_templates_content",
-            headers_yaml,
-            self.nst_filenames,
-            201,
-            r_headers_yaml_location_nst,
-            "yaml",
-        )
-        nst_id = engine.last_id
-
-        # nstd SHOW OSM format
-        engine.test(
-            "Show NSTD OSM format",
-            "GET",
-            "/nst/v1/netslice_templates/{}".format(nst_id),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-
-        # nstd DELETE
-        engine.test(
-            "Delete NSTD",
-            "DELETE",
-            "/nst/v1/netslice_templates/{}".format(nst_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # NSDs DELETE
-        test_rest.test(
-            "Delete NSD middle",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete NSD edge",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # VNFDs DELETE
-        test_rest.test(
-            "Delete VNFD edge",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete VNFD middle",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestNetSliceInstances:
-    """
-    Test procedure:
-    1. Populate databases with VNFD, NSD, NST with the following scenario
-       +-----------------management-----------------+
-       |                     |                      |
-    +--+---+            +----+----+             +---+--+
-    |      |            |         |             |      |
-    | edge +---data1----+  middle +---data2-----+ edge |
-    |      |            |         |             |      |
-    +------+            +---------+             +------+
-                        shared-nss
-    2. Create NSI-1
-    3. Instantiate NSI-1
-    4. Create NSI-2
-    5. Instantiate NSI-2
-        Manual check - Are 2 slices instantiated correctly?
-        NSI-1 3 nss (2 nss-edges + 1 nss-middle)
-        NSI-2 2 nss (2 nss-edge sharing nss-middle)
-    6. Terminate NSI-1
-    7. Delete NSI-1
-        Manual check - Is slice NSI-1 deleted correctly?
-        NSI-2 with 2 nss-edge + 1 nss-middle (The one from NSI-1)
-    8. Create NSI-3
-    9. Instantiate NSI-3
-        Manual check - Is slice NSI-3 instantiated correctly?
-        NSI-3 reuse nss-middle. NSI-3 only create 2 nss-edge
-    10. Delete NSI-2
-    11. Terminate NSI-2
-    12. Delete NSI-3
-    13. Terminate NSI-3
-        Manual check - All cleaned correctly?
-        NSI-2 and NSI-3 were terminated and deleted
-    14. Cleanup database
-    """
-
-    description = "Upload a NST to OSM"
-
-    def __init__(self):
-        self.vim_id = None
-        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
-        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
-        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
-        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
-        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
-
-    def create_slice(self, engine, nsi_data, name):
-        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
-        r = engine.test(
-            name,
-            "POST",
-            "/nsilcm/v1/netslice_instances",
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            {
-                "Location": "nsilcm/v1/netslice_instances/",
-                "Content-Type": "application/yaml",
-            },
-            "yaml",
-        )
-        return r
-
-    def instantiate_slice(self, engine, nsi_data, nsi_id, name):
-        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
-        engine.test(
-            name,
-            "POST",
-            "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id),
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            r_headers_yaml_location_nsilcmop,
-            "yaml",
-        )
-
-    def terminate_slice(self, engine, nsi_id, name):
-        engine.test(
-            name,
-            "POST",
-            "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
-            headers_yaml,
-            None,
-            (201, 202),
-            r_headers_yaml_location_nsilcmop,
-            "yaml",
-        )
-
-    def delete_slice(self, engine, nsi_id, name):
-        engine.test(
-            name,
-            "DELETE",
-            "/nsilcm/v1/netslice_instances/{}".format(nsi_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        # nst CREATE
-        engine.set_test_name("NSI")
-        engine.get_autorization()
-
-        # Onboard VNFDs
-        engine.test(
-            "Onboard edge VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard middle VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename_middle,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_middle_id = engine.last_id
-
-        # Onboard NSDs
-        engine.test(
-            "Onboard NSD edge",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard NSD middle",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename_middle,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_middle_id = engine.last_id
-
-        # Onboard NST
-        engine.test(
-            "Onboard NST",
-            "POST",
-            "/nst/v1/netslice_templates_content",
-            headers_yaml,
-            self.nst_filenames,
-            201,
-            r_headers_yaml_location_nst,
-            "yaml",
-        )
-        nst_id = engine.last_id
-
-        self.vim_id = engine.get_create_vim(test_osm)
-
-        # CREATE NSI-1
-        ns_data = {
-            "nsiName": "Deploy-NSI-1",
-            "vimAccountId": self.vim_id,
-            "nstId": nst_id,
-            "nsiDescription": "default",
-        }
-        r = self.create_slice(engine, ns_data, "Create NSI-1 step 1")
-        if not r:
-            return
-        self.nsi_id1 = engine.last_id
-
-        # INSTANTIATE NSI-1
-        self.instantiate_slice(
-            engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2"
-        )
-        nsilcmop_id1 = engine.last_id
-
-        # Waiting for NSI-1
-        if test_osm:
-            engine.wait_operation_ready("nsi", nsilcmop_id1, timeout_deploy)
-
-        # CREATE NSI-2
-        ns_data = {
-            "nsiName": "Deploy-NSI-2",
-            "vimAccountId": self.vim_id,
-            "nstId": nst_id,
-            "nsiDescription": "default",
-        }
-        r = self.create_slice(engine, ns_data, "Create NSI-2 step 1")
-        if not r:
-            return
-        self.nsi_id2 = engine.last_id
-
-        # INSTANTIATE NSI-2
-        self.instantiate_slice(
-            engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2"
-        )
-        nsilcmop_id2 = engine.last_id
-
-        # Waiting for NSI-2
-        if test_osm:
-            engine.wait_operation_ready("nsi", nsilcmop_id2, timeout_deploy)
-
-        if manual_check:
-            input(
-                "NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume"
-            )
-
-        # TERMINATE NSI-1
-        if test_osm:
-            self.terminate_slice(engine, self.nsi_id1, "Terminate NSI-1")
-            nsilcmop1_id = engine.last_id
-
-            # Wait terminate NSI-1
-            engine.wait_operation_ready("nsi", nsilcmop1_id, timeout_deploy)
-
-        # DELETE NSI-1
-        self.delete_slice(engine, self.nsi_id1, "Delete NS")
-
-        if manual_check:
-            input(
-                "NSI-1 has been deleted. Perform manual check and press enter to resume"
-            )
-
-        # CREATE NSI-3
-        ns_data = {
-            "nsiName": "Deploy-NSI-3",
-            "vimAccountId": self.vim_id,
-            "nstId": nst_id,
-            "nsiDescription": "default",
-        }
-        r = self.create_slice(engine, ns_data, "Create NSI-3 step 1")
-
-        if not r:
-            return
-        self.nsi_id3 = engine.last_id
-
-        # INSTANTIATE NSI-3
-        self.instantiate_slice(
-            engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2"
-        )
-        nsilcmop_id3 = engine.last_id
-
-        # Wait Instantiate NSI-3
-        if test_osm:
-            engine.wait_operation_ready("nsi", nsilcmop_id3, timeout_deploy)
-
-        if manual_check:
-            input(
-                "NSI-3 has been deployed. Perform manual check and press enter to resume"
-            )
-
-        # TERMINATE NSI-2
-        if test_osm:
-            self.terminate_slice(engine, self.nsi_id2, "Terminate NSI-2")
-            nsilcmop2_id = engine.last_id
-
-            # Wait terminate NSI-2
-            engine.wait_operation_ready("nsi", nsilcmop2_id, timeout_deploy)
-
-        # DELETE NSI-2
-        self.delete_slice(engine, self.nsi_id2, "DELETE NSI-2")
-
-        # TERMINATE NSI-3
-        if test_osm:
-            self.terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
-            nsilcmop3_id = engine.last_id
-
-            # Wait terminate NSI-3
-            engine.wait_operation_ready("nsi", nsilcmop3_id, timeout_deploy)
-
-        # DELETE NSI-3
-        self.delete_slice(engine, self.nsi_id3, "DELETE NSI-3")
-
-        if manual_check:
-            input(
-                "NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume"
-            )
-
-        # nstd DELETE
-        engine.test(
-            "Delete NSTD",
-            "DELETE",
-            "/nst/v1/netslice_templates/{}".format(nst_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # NSDs DELETE
-        test_rest.test(
-            "Delete NSD middle",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete NSD edge",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # VNFDs DELETE
-        test_rest.test(
-            "Delete VNFD edge",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete VNFD middle",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestAuthentication:
-    description = "Test Authentication"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("Authentication")
-        # backend = test_params.get("backend") if test_params else None   # UNUSED
-
-        admin_project_id = test_project_id = None
-        project_admin_role_id = project_user_role_id = None
-        test_user_id = empty_user_id = None
-        default_role_id = empty_role_id = token_role_id = None
-
-        engine.get_autorization()
-
-        # GET
-        engine.test(
-            "Get tokens",
-            "GET",
-            "/admin/v1/tokens",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Get projects",
-            "GET",
-            "/admin/v1/projects",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Get users",
-            "GET",
-            "/admin/v1/users",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Get roles",
-            "GET",
-            "/admin/v1/roles",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        res = engine.test(
-            "Get admin project",
-            "GET",
-            "/admin/v1/projects?name=admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        admin_project_id = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get project admin role",
-            "GET",
-            "/admin/v1/roles?name=project_admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        project_admin_role_id = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get project user role",
-            "GET",
-            "/admin/v1/roles?name=project_user",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        project_user_role_id = res.json()[0]["_id"] if res else None
-
-        # POST
-        res = engine.test(
-            "Create test project",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "test"},
-            (201),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        test_project_id = engine.last_id if res else None
-        res = engine.test(
-            "Create role without permissions",
-            "POST",
-            "/admin/v1/roles",
-            headers_json,
-            {"name": "empty"},
-            (201),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        empty_role_id = engine.last_id if res else None
-        res = engine.test(
-            "Create role with default permissions",
-            "POST",
-            "/admin/v1/roles",
-            headers_json,
-            {"name": "default", "permissions": {"default": True}},
-            (201),
-            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
-            "json",
-        )
-        default_role_id = engine.last_id if res else None
-        res = engine.test(
-            "Create role with token permissions",
-            "POST",
-            "/admin/v1/roles",
-            headers_json,
-            {
-                "name": "tokens",
-                "permissions": {"tokens": True},
-            },  # is default required ?
-            (201),
-            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
-            "json",
-        )
-        token_role_id = engine.last_id if res else None
-        pr = "project-role mappings"
-        res = engine.test(
-            "Create user without " + pr,
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            {"username": "empty", "password": "empty"},
-            201,
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        empty_user_id = engine.last_id if res else None
-        if (
-            admin_project_id
-            and test_project_id
-            and project_admin_role_id
-            and project_user_role_id
-        ):
-            data = {"username": "test", "password": "test"}
-            data["project_role_mappings"] = [
-                {"project": test_project_id, "role": project_admin_role_id},
-                {"project": admin_project_id, "role": project_user_role_id},
-            ]
-            res = engine.test(
-                "Create user with " + pr,
-                "POST",
-                "/admin/v1/users",
-                headers_json,
-                data,
-                (201),
-                {"Content-Type": "application/json"},
-                "json",
-            )
-            test_user_id = engine.last_id if res else None
-
-        # PUT
-        if test_user_id:
-            engine.test(
-                "Modify test user's password",
-                "PUT",
-                "/admin/v1/users/" + test_user_id,
-                headers_json,
-                {"password": "password"},
-                (204),
-                {},
-                0,
-            )
-        if (
-            empty_user_id
-            and admin_project_id
-            and test_project_id
-            and project_admin_role_id
-            and project_user_role_id
-        ):
-            data = {
-                "project_role_mappings": [
-                    {"project": test_project_id, "role": project_admin_role_id},
-                    {"project": admin_project_id, "role": project_user_role_id},
-                ]
-            }
-            engine.test(
-                "Modify empty user's " + pr,
-                "PUT",
-                "/admin/v1/users/" + empty_user_id,
-                headers_json,
-                data,
-                (204),
-                {},
-                0,
-            )
-
-        # DELETE
-        if empty_user_id:
-            engine.test(
-                "Delete empty user",
-                "DELETE",
-                "/admin/v1/users/" + empty_user_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if test_user_id:
-            engine.test(
-                "Delete test user",
-                "DELETE",
-                "/admin/v1/users/" + test_user_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if empty_role_id:
-            engine.test(
-                "Delete empty role",
-                "DELETE",
-                "/admin/v1/roles/" + empty_role_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if default_role_id:
-            engine.test(
-                "Delete default role",
-                "DELETE",
-                "/admin/v1/roles/" + default_role_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if token_role_id:
-            engine.test(
-                "Delete token role",
-                "DELETE",
-                "/admin/v1/roles/" + token_role_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if test_project_id:
-            engine.test(
-                "Delete test project",
-                "DELETE",
-                "/admin/v1/projects/" + test_project_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-
-        # END Tests
-
-        engine.remove_authorization()  # To finish
-
-
-class TestNbiQuotas:
-    description = "Test NBI Quotas"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("NBI-Quotas_")
-        # backend = test_params.get("backend") if test_params else None   # UNUSED
-
-        test_username = "test-nbi-quotas"
-        test_password = "test-nbi-quotas"
-        test_project = "test-nbi-quotas"
-
-        test_vim = "test-nbi-quotas"
-        test_wim = "test-nbi-quotas"
-        test_sdn = "test-nbi-quotas"
-
-        test_user_id = None
-        test_project_id = None
-
-        test_vim_ids = []
-        test_wim_ids = []
-        test_sdn_ids = []
-        test_vnfd_ids = []
-        test_nsd_ids = []
-        test_nst_ids = []
-        test_pdu_ids = []
-        test_nsr_ids = []
-        test_nsi_ids = []
-
-        # Save admin access data
-        admin_username = engine.user
-        admin_password = engine.password
-        admin_project = engine.project
-
-        # Get admin access
-        engine.get_autorization()
-        admin_token = engine.last_id
-
-        # Check that test project,user do not exist
-        res1 = engine.test(
-            "Check that test project doesn't exist",
-            "GET",
-            "/admin/v1/projects/" + test_project,
-            headers_json,
-            {},
-            (404),
-            {},
-            True,
-        )
-        res2 = engine.test(
-            "Check that test user doesn't exist",
-            "GET",
-            "/admin/v1/users/" + test_username,
-            headers_json,
-            {},
-            (404),
-            {},
-            True,
-        )
-        if None in [res1, res2]:
-            engine.remove_authorization()
-            logger.error("Test project and/or user already exist")
-            return
-
-        # Create test project&user
-        res = engine.test(
-            "Create test project",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {
-                "name": test_username,
-                "quotas": {
-                    "vnfds": 2,
-                    "nsds": 2,
-                    "nsts": 1,
-                    "pdus": 1,
-                    "nsrs": 2,
-                    "nsis": 1,
-                    "vim_accounts": 1,
-                    "wim_accounts": 1,
-                    "sdns": 1,
-                },
-            },
-            (201),
-            r_header_json,
-            "json",
-        )
-        test_project_id = engine.last_id if res else None
-        res = engine.test(
-            "Create test user",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            {
-                "username": test_username,
-                "password": test_password,
-                "project_role_mappings": [
-                    {"project": test_project, "role": "project_admin"}
-                ],
-            },
-            (201),
-            r_header_json,
-            "json",
-        )
-        test_user_id = engine.last_id if res else None
-
-        if test_project_id and test_user_id:
-
-            # Get user access
-            engine.token = None
-            engine.user = test_username
-            engine.password = test_password
-            engine.project = test_project
-            engine.get_autorization()
-            user_token = engine.last_id
-
-            # Create test VIM
-            res = engine.test(
-                "Create test VIM",
-                "POST",
-                "/admin/v1/vim_accounts",
-                headers_json,
-                {
-                    "name": test_vim,
-                    "vim_type": "openvim",
-                    "vim_user": test_username,
-                    "vim_password": test_password,
-                    "vim_tenant_name": test_project,
-                    "vim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_vim_ids += [engine.last_id if res else None]
-
-            res = engine.test(
-                "Try to create second test VIM",
-                "POST",
-                "/admin/v1/vim_accounts",
-                headers_json,
-                {
-                    "name": test_vim + "_2",
-                    "vim_type": "openvim",
-                    "vim_user": test_username,
-                    "vim_password": test_password,
-                    "vim_tenant_name": test_project,
-                    "vim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (422),
-                r_header_json,
-                "json",
-            )
-            test_vim_ids += [engine.last_id if res is None else None]
-
-            res = engine.test(
-                "Try to create second test VIM with FORCE",
-                "POST",
-                "/admin/v1/vim_accounts?FORCE",
-                headers_json,
-                {
-                    "name": test_vim + "_3",
-                    "vim_type": "openvim",
-                    "vim_user": test_username,
-                    "vim_password": test_password,
-                    "vim_tenant_name": test_project,
-                    "vim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_vim_ids += [engine.last_id if res else None]
-
-            if test_vim_ids[0]:
-
-                # Download descriptor files (if required)
-                test_dir = "/tmp/" + test_username + "/"
-                test_url = "https://osm-download.etsi.org/ftp/osm-6.0-six/7th-hackfest/packages/"
-                vnfd_filenames = [
-                    "slice_hackfest_vnfd.tar.gz",
-                    "slice_hackfest_middle_vnfd.tar.gz",
-                ]
-                nsd_filenames = [
-                    "slice_hackfest_nsd.tar.gz",
-                    "slice_hackfest_middle_nsd.tar.gz",
-                ]
-                nst_filenames = ["slice_hackfest_nstd.yaml"]
-                pdu_filenames = ["PDU_router.yaml"]
-                desc_filenames = (
-                    vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
-                )
-                if not os.path.exists(test_dir):
-                    os.makedirs(test_dir)
-                for filename in desc_filenames:
-                    if not os.path.exists(test_dir + filename):
-                        res = requests.get(test_url + filename)
-                        if res.status_code < 300:
-                            with open(test_dir + filename, "wb") as file:
-                                file.write(res.content)
-
-                if all([os.path.exists(test_dir + p) for p in desc_filenames]):
-
-                    # Test VNFD Quotas
-                    res = engine.test(
-                        "Create test VNFD #1",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[0],
-                        (201),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res else None]
-                    res = engine.test(
-                        "Create test VNFD #2",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[1],
-                        (201),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res else None]
-                    res = engine.test(
-                        "Try to create extra test VNFD",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[0],
-                        (422),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res is None else None]
-                    res = engine.test(
-                        "Try to create extra test VNFD with FORCE",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content?FORCE",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[0],
-                        (201),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res else None]
-
-                    # Remove extra VNFDs to prevent further errors
-                    for i in [2, 3]:
-                        if test_vnfd_ids[i]:
-                            res = engine.test(
-                                "Delete test VNFD #" + str(i),
-                                "DELETE",
-                                "/vnfpkgm/v1/vnf_packages_content/"
-                                + test_vnfd_ids[i]
-                                + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                            if res:
-                                test_vnfd_ids[i] = None
-
-                    if test_vnfd_ids[0] and test_vnfd_ids[1]:
-
-                        # Test NSD Quotas
-                        res = engine.test(
-                            "Create test NSD #1",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[0],
-                            (201),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res else None]
-                        res = engine.test(
-                            "Create test NSD #2",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[1],
-                            (201),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res else None]
-                        res = engine.test(
-                            "Try to create extra test NSD",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[0],
-                            (422),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res is None else None]
-                        res = engine.test(
-                            "Try to create extra test NSD with FORCE",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content?FORCE",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[0],
-                            (201),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res else None]
-
-                        # Remove extra NSDs to prevent further errors
-                        for i in [2, 3]:
-                            if test_nsd_ids[i]:
-                                res = engine.test(
-                                    "Delete test NSD #" + str(i),
-                                    "DELETE",
-                                    "/nsd/v1/ns_descriptors_content/"
-                                    + test_nsd_ids[i]
-                                    + "?FORCE",
-                                    headers_json,
-                                    {},
-                                    (204),
-                                    {},
-                                    0,
-                                )
-                                if res:
-                                    test_nsd_ids[i] = None
-
-                        if test_nsd_ids[0] and test_nsd_ids[1]:
-
-                            # Test NSR Quotas
-                            res = engine.test(
-                                "Create test NSR #1",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_1",
-                                    "nsdId": test_nsd_ids[0],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res else None]
-                            res = engine.test(
-                                "Create test NSR #2",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_2",
-                                    "nsdId": test_nsd_ids[1],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res else None]
-                            res = engine.test(
-                                "Try to create extra test NSR",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_3",
-                                    "nsdId": test_nsd_ids[0],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (422),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res is None else None]
-                            res = engine.test(
-                                "Try to create test NSR with FORCE",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content?FORCE",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_4",
-                                    "nsdId": test_nsd_ids[0],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res else None]
-
-                            # Test NST Quotas
-                            res = engine.test(
-                                "Create test NST",
-                                "POST",
-                                "/nst/v1/netslice_templates_content",
-                                headers_txt_json,
-                                "@b" + test_dir + nst_filenames[0],
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nst_ids += [engine.last_id if res else None]
-                            res = engine.test(
-                                "Try to create extra test NST",
-                                "POST",
-                                "/nst/v1/netslice_templates_content",
-                                headers_txt_json,
-                                "@b" + test_dir + nst_filenames[0],
-                                (422),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nst_ids += [engine.last_id if res is None else None]
-                            res = engine.test(
-                                "Try to create extra test NST with FORCE",
-                                "POST",
-                                "/nst/v1/netslice_templates_content?FORCE",
-                                headers_txt_json,
-                                "@b" + test_dir + nst_filenames[0],
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nst_ids += [engine.last_id if res else None]
-
-                            if test_nst_ids[0]:
-                                # Remove NSR Quota
-                                engine.set_header(
-                                    {"Authorization": "Bearer {}".format(admin_token)}
-                                )
-                                res = engine.test(
-                                    "Remove NSR Quota",
-                                    "PUT",
-                                    "/admin/v1/projects/" + test_project_id,
-                                    headers_json,
-                                    {"quotas": {"nsrs": None}},
-                                    (204),
-                                    {},
-                                    0,
-                                )
-                                engine.set_header(
-                                    {"Authorization": "Bearer {}".format(user_token)}
-                                )
-                                if res:
-                                    # Test NSI Quotas
-                                    res = engine.test(
-                                        "Create test NSI",
-                                        "POST",
-                                        "/nsilcm/v1/netslice_instances_content",
-                                        headers_json,
-                                        {
-                                            "nsiName": test_username,
-                                            "nstId": test_nst_ids[0],
-                                            "vimAccountId": test_vim_ids[0],
-                                        },
-                                        (201),
-                                        r_header_json,
-                                        "json",
-                                    )
-                                    test_nsi_ids += [engine.last_id if res else None]
-                                    res = engine.test(
-                                        "Try to create extra test NSI",
-                                        "POST",
-                                        "/nsilcm/v1/netslice_instances_content",
-                                        headers_json,
-                                        {
-                                            "nsiName": test_username,
-                                            "nstId": test_nst_ids[0],
-                                            "vimAccountId": test_vim_ids[0],
-                                        },
-                                        (400),
-                                        r_header_json,
-                                        "json",
-                                    )
-                                    test_nsi_ids += [
-                                        engine.last_id if res is None else None
-                                    ]
-                                    res = engine.test(
-                                        "Try to create extra test NSI with FORCE",
-                                        "POST",
-                                        "/nsilcm/v1/netslice_instances_content?FORCE",
-                                        headers_json,
-                                        {
-                                            "nsiName": test_username,
-                                            "nstId": test_nst_ids[0],
-                                            "vimAccountId": test_vim_ids[0],
-                                        },
-                                        (201),
-                                        r_header_json,
-                                        "json",
-                                    )
-                                    test_nsi_ids += [engine.last_id if res else None]
-
-                    # Test PDU Quotas
-                    with open(test_dir + pdu_filenames[0], "rb") as file:
-                        pdu_text = re.sub(
-                            r"ip-address: *\[[^\]]*\]",
-                            "ip-address: '0.0.0.0'",
-                            file.read().decode("utf-8"),
-                        )
-                    with open(test_dir + pdu_filenames[0], "wb") as file:
-                        file.write(pdu_text.encode("utf-8"))
-                    res = engine.test(
-                        "Create test PDU",
-                        "POST",
-                        "/pdu/v1/pdu_descriptors",
-                        headers_yaml,
-                        "@b" + test_dir + pdu_filenames[0],
-                        (201),
-                        r_header_yaml,
-                        "yaml",
-                    )
-                    test_pdu_ids += [engine.last_id if res else None]
-                    res = engine.test(
-                        "Try to create extra test PDU",
-                        "POST",
-                        "/pdu/v1/pdu_descriptors",
-                        headers_yaml,
-                        "@b" + test_dir + pdu_filenames[0],
-                        (422),
-                        r_header_yaml,
-                        "yaml",
-                    )
-                    test_pdu_ids += [engine.last_id if res is None else None]
-                    res = engine.test(
-                        "Try to create extra test PDU with FORCE",
-                        "POST",
-                        "/pdu/v1/pdu_descriptors?FORCE",
-                        headers_yaml,
-                        "@b" + test_dir + pdu_filenames[0],
-                        (201),
-                        r_header_yaml,
-                        "yaml",
-                    )
-                    test_pdu_ids += [engine.last_id if res else None]
-
-                    # Cleanup
-                    for i, id in enumerate(test_nsi_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NSI #" + str(i),
-                                "DELETE",
-                                "/nsilcm/v1/netslice_instances_content/"
-                                + id
-                                + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_nsr_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NSR #" + str(i),
-                                "DELETE",
-                                "/nslcm/v1/ns_instances_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_nst_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NST #" + str(i),
-                                "DELETE",
-                                "/nst/v1/netslice_templates_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_nsd_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NSD #" + str(i),
-                                "DELETE",
-                                "/nsd/v1/ns_descriptors_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_vnfd_ids):
-                        if id:
-                            engine.test(
-                                "Delete test VNFD #" + str(i),
-                                "DELETE",
-                                "/vnfpkgm/v1/vnf_packages_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_pdu_ids):
-                        if id:
-                            engine.test(
-                                "Delete test PDU #" + str(i),
-                                "DELETE",
-                                "/pdu/v1/pdu_descriptors/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-
-                    # END Test NBI Quotas
-
-            # Test WIM Quotas
-            res = engine.test(
-                "Create test WIM",
-                "POST",
-                "/admin/v1/wim_accounts",
-                headers_json,
-                {
-                    "name": test_wim,
-                    "wim_type": "onos",
-                    "wim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_wim_ids += [engine.last_id if res else None]
-            res = engine.test(
-                "Try to create second test WIM",
-                "POST",
-                "/admin/v1/wim_accounts",
-                headers_json,
-                {
-                    "name": test_wim + "_2",
-                    "wim_type": "onos",
-                    "wim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (422),
-                r_header_json,
-                "json",
-            )
-            test_wim_ids += [engine.last_id if res is None else None]
-            res = engine.test(
-                "Try to create second test WIM with FORCE",
-                "POST",
-                "/admin/v1/wim_accounts?FORCE",
-                headers_json,
-                {
-                    "name": test_wim + "_3",
-                    "wim_type": "onos",
-                    "wim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_wim_ids += [engine.last_id if res else None]
-
-            # Test SDN Quotas
-            res = engine.test(
-                "Create test SDN",
-                "POST",
-                "/admin/v1/sdns",
-                headers_json,
-                {
-                    "name": test_sdn,
-                    "type": "onos",
-                    "ip": "0.0.0.0",
-                    "port": 9999,
-                    "dpid": "00:00:00:00:00:00:00:00",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_sdn_ids += [engine.last_id if res else None]
-            res = engine.test(
-                "Try to create second test SDN",
-                "POST",
-                "/admin/v1/sdns",
-                headers_json,
-                {
-                    "name": test_sdn + "_2",
-                    "type": "onos",
-                    "ip": "0.0.0.0",
-                    "port": 9999,
-                    "dpid": "00:00:00:00:00:00:00:00",
-                },
-                (422),
-                r_header_json,
-                "json",
-            )
-            test_sdn_ids += [engine.last_id if res is None else None]
-            res = engine.test(
-                "Try to create second test SDN with FORCE",
-                "POST",
-                "/admin/v1/sdns?FORCE",
-                headers_json,
-                {
-                    "name": test_sdn + "_3",
-                    "type": "onos",
-                    "ip": "0.0.0.0",
-                    "port": 9999,
-                    "dpid": "00:00:00:00:00:00:00:00",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_sdn_ids += [engine.last_id if res else None]
-
-            # Cleanup
-            for i, id in enumerate(test_vim_ids):
-                if id:
-                    engine.test(
-                        "Delete test VIM #" + str(i),
-                        "DELETE",
-                        "/admin/v1/vim_accounts/" + id + "?FORCE",
-                        headers_json,
-                        {},
-                        (202),
-                        {},
-                        0,
-                    )
-            for i, id in enumerate(test_wim_ids):
-                if id:
-                    engine.test(
-                        "Delete test WIM #" + str(i),
-                        "DELETE",
-                        "/admin/v1/wim_accounts/" + id + "?FORCE",
-                        headers_json,
-                        {},
-                        (202),
-                        {},
-                        0,
-                    )
-            for i, id in enumerate(test_sdn_ids):
-                if id:
-                    engine.test(
-                        "Delete test SDN #" + str(i),
-                        "DELETE",
-                        "/admin/v1/sdns/" + id + "?FORCE",
-                        headers_json,
-                        {},
-                        (202),
-                        {},
-                        0,
-                    )
-
-            # Release user access
-            engine.remove_authorization()
-
-        # Cleanup
-        engine.user = admin_username
-        engine.password = admin_password
-        engine.project = admin_project
-        engine.get_autorization()
-        if test_user_id:
-            engine.test(
-                "Delete test user",
-                "DELETE",
-                "/admin/v1/users/" + test_user_id + "?FORCE",
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if test_project_id:
-            engine.test(
-                "Delete test project",
-                "DELETE",
-                "/admin/v1/projects/" + test_project_id + "?FORCE",
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        engine.remove_authorization()
-
-    # END class TestNbiQuotas
-
-
-if __name__ == "__main__":
-    global logger
-    test = ""
-
-    # Disable warnings from self-signed certificates.
-    requests.packages.urllib3.disable_warnings()
-    try:
-        logging.basicConfig(format="%(levelname)s %(message)s", level=logging.ERROR)
-        logger = logging.getLogger("NBI")
-        # load parameters and configuration
-        opts, args = getopt.getopt(
-            sys.argv[1:],
-            "hvu:p:",
-            [
-                "url=",
-                "user=",
-                "password=",
-                "help",
-                "version",
-                "verbose",
-                "no-verbose",
-                "project=",
-                "insecure",
-                "timeout",
-                "timeout-deploy",
-                "timeout-configure",
-                "test=",
-                "list",
-                "test-osm",
-                "manual-check",
-                "params=",
-                "fail-fast",
-            ],
-        )
-        url = "https://localhost:9999/osm"
-        user = password = project = "admin"
-        test_osm = False
-        manual_check = False
-        verbose = 0
-        verify = True
-        fail_fast = False
-        test_classes = {
-            "NonAuthorized": TestNonAuthorized,
-            "FakeVIM": TestFakeVim,
-            "Users-Projects": TestUsersProjects,
-            "Projects-Descriptors": TestProjectsDescriptors,
-            "VIM-SDN": TestVIMSDN,
-            "Deploy-Custom": TestDeploy,
-            "Deploy-Hackfest-Cirros": TestDeployHackfestCirros,
-            "Deploy-Hackfest-Cirros-Scaling": TestDeployHackfestCirrosScaling,
-            "Deploy-Hackfest-3Charmed": TestDeployHackfest3Charmed,
-            "Deploy-Hackfest-3Charmed2": TestDeployHackfest3Charmed2,
-            "Deploy-Hackfest-3Charmed3": TestDeployHackfest3Charmed3,
-            "Deploy-Hackfest-4": TestDeployHackfest4,
-            "Deploy-CirrosMacIp": TestDeployIpMac,
-            "Descriptors": TestDescriptors,
-            "Deploy-Hackfest1": TestDeployHackfest1,
-            # "Deploy-MultiVIM": TestDeployMultiVIM,
-            "Deploy-SingleVdu": TestDeploySingleVdu,
-            "Deploy-Hnfd": TestDeployHnfd,
-            "Upload-Slice-Template": TestNetSliceTemplates,
-            "Deploy-Slice-Instance": TestNetSliceInstances,
-            "Deploy-SimpleCharm": TestDeploySimpleCharm,
-            "Deploy-SimpleCharm2": TestDeploySimpleCharm2,
-            "Authentication": TestAuthentication,
-            "NBI-Quotas": TestNbiQuotas,
-        }
-        test_to_do = []
-        test_params = {}
-
-        for o, a in opts:
-            # print("parameter:", o, a)
-            if o == "--version":
-                print("test version " + __version__ + " " + version_date)
-                exit()
-            elif o == "--list":
-                for test, test_class in sorted(test_classes.items()):
-                    print("{:32} {}".format(test + ":", test_class.description))
-                exit()
-            elif o in ("-v", "--verbose"):
-                verbose += 1
-            elif o == "no-verbose":
-                verbose = -1
-            elif o in ("-h", "--help"):
-                usage()
-                sys.exit()
-            elif o == "--test-osm":
-                test_osm = True
-            elif o == "--manual-check":
-                manual_check = True
-            elif o == "--url":
-                url = a
-            elif o in ("-u", "--user"):
-                user = a
-            elif o in ("-p", "--password"):
-                password = a
-            elif o == "--project":
-                project = a
-            elif o == "--fail-fast":
-                fail_fast = True
-            elif o == "--test":
-                for _test in a.split(","):
-                    if _test not in test_classes:
-                        print(
-                            "Invalid test name '{}'. Use option '--list' to show available tests".format(
-                                _test
-                            ),
-                            file=sys.stderr,
-                        )
-                        exit(1)
-                    test_to_do.append(_test)
-            elif o == "--params":
-                param_key, _, param_value = a.partition("=")
-                text_index = len(test_to_do)
-                if text_index not in test_params:
-                    test_params[text_index] = {}
-                test_params[text_index][param_key] = param_value
-            elif o == "--insecure":
-                verify = False
-            elif o == "--timeout":
-                timeout = int(a)
-            elif o == "--timeout-deploy":
-                timeout_deploy = int(a)
-            elif o == "--timeout-configure":
-                timeout_configure = int(a)
-            else:
-                assert False, "Unhandled option"
-        if verbose == 0:
-            logger.setLevel(logging.WARNING)
-        elif verbose > 1:
-            logger.setLevel(logging.DEBUG)
-        else:
-            logger.setLevel(logging.ERROR)
-
-        test_rest = TestRest(url, user=user, password=password, project=project)
-        # print("tests to do:", test_to_do)
-        if test_to_do:
-            text_index = 0
-            for test in test_to_do:
-                if fail_fast and test_rest.failed_tests:
-                    break
-                text_index += 1
-                test_class = test_classes[test]
-                test_class().run(
-                    test_rest, test_osm, manual_check, test_params.get(text_index)
-                )
-        else:
-            for test, test_class in sorted(test_classes.items()):
-                if fail_fast and test_rest.failed_tests:
-                    break
-                test_class().run(test_rest, test_osm, manual_check, test_params.get(0))
-        test_rest.print_results()
-        exit(1 if test_rest.failed_tests else 0)
-
-    except TestException as e:
-        logger.error(test + "Test {} Exception: {}".format(test, str(e)))
-        exit(1)
-    except getopt.GetoptError as e:
-        logger.error(e)
-        print(e, file=sys.stderr)
-        exit(1)
-    except Exception as e:
-        logger.critical(test + " Exception: " + str(e), exc_info=True)
diff --git a/osm_nbi/tests/send_kafka.py b/osm_nbi/tests/send_kafka.py
deleted file mode 100755 (executable)
index d066d14..0000000
+++ /dev/null
@@ -1,64 +0,0 @@
-#! /usr/bin/python3
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import requests
-import yaml
-from os import getenv
-
-__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
-__date__ = "$2019-05-31$"
-__version__ = "0.1"
-version_date = "May 2019"
-
-
-def usage():
-    print("Usage: ", sys.argv[0], "topic key message")
-    print("   Sends a kafka message using URL test of NBI")
-    print("  host is defined by env OSMNBI_HOST (localhost by default)")
-    print("  port is defined by env OSMNBI_PORT (9999 by default)")
-    return
-
-
-if __name__ == "__main__":
-    try:
-        if "--help" in sys.argv:
-            usage()
-            exit(0)
-
-        if len(sys.argv) != 4:
-            print(
-                "missing parameters. Type --help for more information", file=sys.stderr
-            )
-            exit(1)
-
-        topic, key, message = sys.argv[1:]
-        host = getenv("OSMNBI_HOST", "localhost")
-        port = getenv("OSMNBI_PORT", "9999")
-        url = "https://{host}:{port}/osm/test/message/{topic}".format(
-            host=host, port=port, topic=topic
-        )
-        print(url)
-        data = {key: message}
-
-        r = requests.post(url, data=yaml.safe_dump(data), verify=False)
-        if r.status_code not in (200, 201, 202, 204):
-            print("Received code={}, content='{}'".format(r.status_code, r.text))
-            exit(1)
-        print("{} -> {}: {}".format(topic, key, message))
-
-    except Exception:
-        raise
index 734a289..6a44365 100755 (executable)
@@ -18,13 +18,14 @@ __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "$2019-10-019"
 
 import unittest
 __date__ = "$2019-10-019"
 
 import unittest
+import random
 from unittest import TestCase
 from unittest.mock import Mock, patch, call
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
 from unittest import TestCase
 from unittest.mock import Mock, patch, call
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
-from random import randint
 from osm_common import dbbase, fsbase, msgbase
 from osm_common import dbbase, fsbase, msgbase
+from osm_common.dbmemory import DbMemory
 from osm_nbi import authconn, validation
 from osm_nbi.admin_topics import (
     ProjectTopicAuth,
 from osm_nbi import authconn, validation
 from osm_nbi.admin_topics import (
     ProjectTopicAuth,
@@ -35,6 +36,7 @@ from osm_nbi.admin_topics import (
 )
 from osm_nbi.engine import EngineException
 from osm_nbi.authconn import AuthconnNotFoundException
 )
 from osm_nbi.engine import EngineException
 from osm_nbi.authconn import AuthconnNotFoundException
+from osm_nbi.authconn_internal import AuthconnInternal
 
 
 test_pid = str(uuid4())
 
 
 test_pid = str(uuid4())
@@ -240,7 +242,10 @@ class Test_ProjectTopicAuth(TestCase):
         with self.subTest(i=1):
             self.auth.get_project_list.side_effect = [[proj], []]
             new_name = "new-project-name"
         with self.subTest(i=1):
             self.auth.get_project_list.side_effect = [[proj], []]
             new_name = "new-project-name"
-            quotas = {"vnfds": randint(0, 100), "nsds": randint(0, 100)}
+            quotas = {
+                "vnfds": random.SystemRandom().randint(0, 100),
+                "nsds": random.SystemRandom().randint(0, 100),
+            }
             self.topic.edit(
                 self.fake_session, pid, {"name": new_name, "quotas": quotas}
             )
             self.topic.edit(
                 self.fake_session, pid, {"name": new_name, "quotas": quotas}
             )
@@ -255,7 +260,7 @@ class Test_ProjectTopicAuth(TestCase):
             self.assertEqual(content["quotas"], quotas, "Wrong quotas")
         with self.subTest(i=2):
             new_name = "other-project-name"
             self.assertEqual(content["quotas"], quotas, "Wrong quotas")
         with self.subTest(i=2):
             new_name = "other-project-name"
-            quotas = {"baditems": randint(0, 100)}
+            quotas = {"baditems": random.SystemRandom().randint(0, 100)}
             self.auth.get_project_list.side_effect = [[proj], []]
             with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
                 self.topic.edit(
             self.auth.get_project_list.side_effect = [[proj], []]
             with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
                 self.topic.edit(
@@ -774,9 +779,11 @@ class Test_UserTopicAuth(TestCase):
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-user-topic"
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-user-topic"
+        cls.password = "Test@123"
 
     def setUp(self):
 
     def setUp(self):
-        self.db = Mock(dbbase.DbBase())
+        # self.db = Mock(dbbase.DbBase())
+        self.db = DbMemory()
         self.fs = Mock(fsbase.FsBase())
         self.msg = Mock(msgbase.MsgBase())
         self.auth = Mock(authconn.Authconn(None, None, None))
         self.fs = Mock(fsbase.FsBase())
         self.msg = Mock(msgbase.MsgBase())
         self.auth = Mock(authconn.Authconn(None, None, None))
@@ -809,7 +816,7 @@ class Test_UserTopicAuth(TestCase):
                 self.fake_session,
                 {
                     "username": self.test_name,
                 self.fake_session,
                 {
                     "username": self.test_name,
-                    "password": self.test_name,
+                    "password": self.password,
                     "project_role_mappings": prms_in,
                 },
             )
                     "project_role_mappings": prms_in,
                 },
             )
@@ -817,7 +824,7 @@ class Test_UserTopicAuth(TestCase):
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
-            self.assertEqual(content["password"], self.test_name, "Wrong password")
+            self.assertEqual(content["password"], self.password, "Wrong password")
             self.assertEqual(
                 content["project_role_mappings"],
                 prms_out,
             self.assertEqual(
                 content["project_role_mappings"],
                 prms_out,
@@ -841,7 +848,7 @@ class Test_UserTopicAuth(TestCase):
                 self.fake_session,
                 {
                     "username": self.test_name,
                 self.fake_session,
                 {
                     "username": self.test_name,
-                    "password": self.test_name,
+                    "password": self.password,
                     "projects": ["some_project"],
                 },
             )
                     "projects": ["some_project"],
                 },
             )
@@ -849,7 +856,7 @@ class Test_UserTopicAuth(TestCase):
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
-            self.assertEqual(content["password"], self.test_name, "Wrong password")
+            self.assertEqual(content["password"], self.password, "Wrong password")
             self.assertEqual(
                 content["project_role_mappings"],
                 prms_out,
             self.assertEqual(
                 content["project_role_mappings"],
                 prms_out,
@@ -871,7 +878,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": "other-project-name",
                     self.fake_session,
                     {
                         "username": "other-project-name",
-                        "password": "other-password",
+                        "password": "Other@pwd1",
                         "project_role_mappings": [{}],
                     },
                 )
                         "project_role_mappings": [{}],
                     },
                 )
@@ -896,7 +903,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": "other-project-name",
                     self.fake_session,
                     {
                         "username": "other-project-name",
-                        "password": "other-password",
+                        "password": "Other@pwd1",
                         "projects": [],
                     },
                 )
                         "projects": [],
                     },
                 )
@@ -947,7 +954,7 @@ class Test_UserTopicAuth(TestCase):
                 {"_id": rid1, "name": "role-1"},
             ]
             new_name = "new-user-name"
                 {"_id": rid1, "name": "role-1"},
             ]
             new_name = "new-user-name"
-            new_pasw = "new-password"
+            new_pasw = "New@pwd1"
             add_prms = [{"project": pid2, "role": rid2}]
             rem_prms = [{"project": pid1, "role": rid1}]
             self.topic.edit(
             add_prms = [{"project": pid2, "role": rid2}]
             rem_prms = [{"project": pid1, "role": rid1}]
             self.topic.edit(
@@ -999,6 +1006,24 @@ class Test_UserTopicAuth(TestCase):
                 norm(str(e.exception)),
                 "Wrong exception text",
             )
                 norm(str(e.exception)),
                 "Wrong exception text",
             )
+        with self.subTest(i=3):
+            self.auth.get_user_list.side_effect = [[user], []]
+            self.auth.get_user.return_value = user
+            old_password = self.password
+            new_pasw = "New@pwd1"
+            self.topic.edit(
+                self.fake_session,
+                uid,
+                {
+                    "old_password": old_password,
+                    "password": new_pasw,
+                },
+            )
+            content = self.auth.update_user.call_args[0][0]
+            self.assertEqual(
+                content["old_password"], old_password, "Wrong old password"
+            )
+            self.assertEqual(content["password"], new_pasw, "Wrong user password")
 
     def test_delete_user(self):
         with self.subTest(i=1):
 
     def test_delete_user(self):
         with self.subTest(i=1):
@@ -1032,7 +1057,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": uid,
                     self.fake_session,
                     {
                         "username": uid,
-                        "password": self.test_name,
+                        "password": self.password,
                         "projects": [test_pid],
                     },
                 )
                         "projects": [test_pid],
                     },
                 )
@@ -1060,7 +1085,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": self.test_name,
                     self.fake_session,
                     {
                         "username": self.test_name,
-                        "password": self.test_name,
+                        "password": self.password,
                         "projects": [test_pid],
                     },
                 )
                         "projects": [test_pid],
                     },
                 )
@@ -1085,7 +1110,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": self.test_name,
                     self.fake_session,
                     {
                         "username": self.test_name,
-                        "password": self.test_name,
+                        "password": self.password,
                         "projects": [str(uuid4())],
                     },
                 )
                         "projects": [str(uuid4())],
                     },
                 )
@@ -1205,6 +1230,143 @@ class Test_UserTopicAuth(TestCase):
                 "Wrong exception text",
             )
 
                 "Wrong exception text",
             )
 
+    def test_user_management(self):
+        self.config = {
+            "user_management": True,
+            "pwd_expire_days": 30,
+            "max_pwd_attempt": 5,
+            "account_expire_days": 90,
+            "version": "dev",
+            "deviceVendor": "test",
+            "deviceProduct": "test",
+        }
+        self.permissions = {"admin": True, "default": True}
+        now = time()
+        rid = str(uuid4())
+        role = {
+            "_id": rid,
+            "name": self.test_name,
+            "permissions": self.permissions,
+            "_admin": {"created": now, "modified": now},
+        }
+        self.db.create("roles", role)
+        admin_user = {
+            "_id": "72cd0cd6-e8e2-482c-9bc2-15b413bb8500",
+            "username": "admin",
+            "password": "bf0d9f988ad9b404464cf8c8749b298209b05fd404119bae0c11e247efbbc4cb",
+            "_admin": {
+                "created": 1663058370.7721832,
+                "modified": 1663681183.5651639,
+                "salt": "37587e7e0c2f4dbfb9416f3fb5543e2b",
+                "last_token_time": 1666876472.2962265,
+                "user_status": "always-active",
+                "retry_count": 0,
+            },
+            "project_role_mappings": [
+                {"project": "a595ce4e-09dc-4b24-9d6f-e723830bc66b", "role": rid}
+            ],
+        }
+        self.db.create("users", admin_user)
+        with self.subTest(i=1):
+            self.user_create = AuthconnInternal(self.config, self.db, self.permissions)
+            user_info = {"username": "user_mgmt_true", "password": "Test@123"}
+            self.user_create.create_user(user_info)
+            user = self.db.get_one("users", {"username": user_info["username"]})
+            self.assertEqual(user["username"], user_info["username"], "Wrong user name")
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertIn("password_expire_time", user["_admin"], "Key is not there")
+            self.assertIn("account_expire_time", user["_admin"], "Key is not there")
+        with self.subTest(i=2):
+            self.user_update = AuthconnInternal(self.config, self.db, self.permissions)
+            locked_user = {
+                "username": "user_lock",
+                "password": "c94ba8cfe81985cf5c84dff16d5bac95814ab17e44a8871755eb4cf3a27b7d3d",
+                "_admin": {
+                    "created": 1667207552.2191198,
+                    "modified": 1667207552.2191815,
+                    "salt": "560a5d51b1d64bb4b9cae0ccff3f1102",
+                    "user_status": "locked",
+                    "password_expire_time": 1667207552.2191815,
+                    "account_expire_time": 1674983552.2191815,
+                    "retry_count": 5,
+                    "last_token_time": 1667207552.2191815,
+                },
+                "_id": "73bbbb71-ed38-4b79-9f58-ece19e7e32d6",
+            }
+            self.db.create("users", locked_user)
+            user_info = {
+                "_id": "73bbbb71-ed38-4b79-9f58-ece19e7e32d6",
+                "system_admin_id": "72cd0cd6-e8e2-482c-9bc2-15b413bb8500",
+                "unlock": True,
+            }
+            self.assertEqual(
+                locked_user["_admin"]["user_status"], "locked", "User status is unknown"
+            )
+            self.user_update.update_user(user_info)
+            user = self.db.get_one("users", {"username": locked_user["username"]})
+            self.assertEqual(
+                user["username"], locked_user["username"], "Wrong user name"
+            )
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertEqual(user["_admin"]["retry_count"], 0, "retry_count is unknown")
+        with self.subTest(i=3):
+            self.user_update = AuthconnInternal(self.config, self.db, self.permissions)
+            expired_user = {
+                "username": "user_expire",
+                "password": "c94ba8cfe81985cf5c84dff16d5bac95814ab17e44a8871755eb4cf3a27b7d3d",
+                "_admin": {
+                    "created": 1665602087.601298,
+                    "modified": 1665636442.1245084,
+                    "salt": "560a5d51b1d64bb4b9cae0ccff3f1102",
+                    "user_status": "expired",
+                    "password_expire_time": 1668248628.2191815,
+                    "account_expire_time": 1666952628.2191815,
+                    "retry_count": 0,
+                    "last_token_time": 1666779828.2171815,
+                },
+                "_id": "3266430f-8222-407f-b08f-3a242504ab94",
+            }
+            self.db.create("users", expired_user)
+            user_info = {
+                "_id": "3266430f-8222-407f-b08f-3a242504ab94",
+                "system_admin_id": "72cd0cd6-e8e2-482c-9bc2-15b413bb8500",
+                "renew": True,
+            }
+            self.assertEqual(
+                expired_user["_admin"]["user_status"],
+                "expired",
+                "User status is unknown",
+            )
+            self.user_update.update_user(user_info)
+            user = self.db.get_one("users", {"username": expired_user["username"]})
+            self.assertEqual(
+                user["username"], expired_user["username"], "Wrong user name"
+            )
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertGreater(
+                user["_admin"]["account_expire_time"],
+                expired_user["_admin"]["account_expire_time"],
+                "User expire time is not get extended",
+            )
+        with self.subTest(i=4):
+            self.config.update({"user_management": False})
+            self.user_create = AuthconnInternal(self.config, self.db, self.permissions)
+            user_info = {"username": "user_mgmt_false", "password": "Test@123"}
+            self.user_create.create_user(user_info)
+            user = self.db.get_one("users", {"username": user_info["username"]})
+            self.assertEqual(user["username"], user_info["username"], "Wrong user name")
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertNotIn("password_expire_time", user["_admin"], "Key is not there")
+            self.assertNotIn("account_expire_time", user["_admin"], "Key is not there")
+
 
 class Test_CommonVimWimSdn(TestCase):
     @classmethod
 
 class Test_CommonVimWimSdn(TestCase):
     @classmethod
index 5107680..d5863a6 100755 (executable)
 __author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
 __date__ = "2020-06-17"
 
 __author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
 __date__ = "2020-06-17"
 
+from copy import deepcopy
 import unittest
 from unittest import TestCase
 import unittest
 from unittest import TestCase
+from unittest.mock import patch, Mock
+from osm_nbi.base_topic import (
+    BaseTopic,
+    EngineException,
+    NBIBadArgumentsException,
+    detect_descriptor_usage,
+    update_descriptor_usage_state,
+)
+from osm_common import dbbase
+from osm_nbi.tests.test_pkg_descriptors import db_vnfds_text, db_nsds_text
+import yaml
 
 
-# from unittest.mock import Mock
-# from osm_common import dbbase, fsbase, msgbase
-from osm_nbi.base_topic import BaseTopic, EngineException
+db_vnfd_content = yaml.safe_load(db_vnfds_text)[0]
+db_nsd_content = yaml.safe_load(db_nsds_text)[0]
 
 
 class Test_BaseTopic(TestCase):
 
 
 class Test_BaseTopic(TestCase):
@@ -35,14 +46,9 @@ class Test_BaseTopic(TestCase):
         pass
 
     def setUp(self):
         pass
 
     def setUp(self):
-        pass
-        # self.db = Mock(dbbase.DbBase())
-        # self.fs = Mock(fsbase.FsBase())
-        # self.msg = Mock(msgbase.MsgBase())
-        # self.auth = Mock(authconn.Authconn(None, None, None))
+        self.db = Mock(dbbase.DbBase())
 
     def test_update_input_with_kwargs(self):
 
     def test_update_input_with_kwargs(self):
-
         test_set = (
             # (descriptor content, kwargs, expected descriptor (None=fails), message)
             (
         test_set = (
             # (descriptor content, kwargs, expected descriptor (None=fails), message)
             (
@@ -123,6 +129,110 @@ class Test_BaseTopic(TestCase):
                 BaseTopic._update_input_with_kwargs(desc, kwargs)
                 self.assertEqual(desc, expected, message)
 
                 BaseTopic._update_input_with_kwargs(desc, kwargs)
                 self.assertEqual(desc, expected, message)
 
+    def test_detect_descriptor_usage_empty_descriptor(self):
+        descriptor = {}
+        db_collection = "vnfds"
+        with self.assertRaises(EngineException) as error:
+            detect_descriptor_usage(descriptor, db_collection, self.db)
+            self.assertIn(
+                "Argument is mandatory and can not be empty, Bad arguments: descriptor",
+                error,
+                "Error message is wrong.",
+            )
+        self.db.get_list.assert_not_called()
+
+    def test_detect_descriptor_usage_empty_db_argument(self):
+        descriptor = deepcopy(db_vnfd_content)
+        db_collection = "vnfds"
+        db = None
+        with self.assertRaises(EngineException) as error:
+            detect_descriptor_usage(descriptor, db_collection, db)
+            self.assertIn(
+                "A valid DB object should be provided, Bad arguments: db",
+                error,
+                "Error message is wrong.",
+            )
+        self.db.get_list.assert_not_called()
+
+    def test_detect_descriptor_usage_which_is_in_use(self):
+        descriptor = deepcopy(db_vnfd_content)
+        db_collection = "vnfds"
+        self.db.get_list.side_effect = [deepcopy(db_vnfd_content)]
+        expected = True
+        result = detect_descriptor_usage(descriptor, db_collection, self.db)
+        self.assertEqual(result, expected, "wrong result")
+        self.db.get_list.assert_called_once_with(
+            "vnfrs", {"vnfd-id": descriptor["_id"]}
+        )
+
+    def test_detect_descriptor_usage_which_is_not_in_use(self):
+        descriptor = deepcopy(db_nsd_content)
+        self.db.get_list.return_value = []
+        db_collection = "nsds"
+        expected = None
+        result = detect_descriptor_usage(descriptor, db_collection, self.db)
+        self.assertEqual(result, expected, "wrong result")
+        self.db.get_list.assert_called_once_with("nsrs", {"nsd-id": descriptor["_id"]})
+
+    def test_detect_descriptor_usage_wrong_desc_format(self):
+        descriptor = deepcopy(db_nsd_content)
+        descriptor.pop("_id")
+        db_collection = "nsds"
+        with self.assertRaises(EngineException) as error:
+            detect_descriptor_usage(descriptor, db_collection, self.db)
+            self.assertIn("KeyError", error, "wrong error type")
+        self.db.get_list.assert_not_called()
+
+    def test_detect_descriptor_usage_wrong_db_collection(self):
+        descriptor = deepcopy(db_vnfd_content)
+        descriptor.pop("_id")
+        db_collection = "vnf"
+        with self.assertRaises(EngineException) as error:
+            detect_descriptor_usage(descriptor, db_collection, self.db)
+            self.assertIn(
+                "db_collection should be equal to vnfds or nsds, db_collection",
+                error,
+                "wrong error type",
+            )
+
+        self.db.get_list.assert_not_called()
+
+    @patch("osm_nbi.base_topic.detect_descriptor_usage")
+    def test_update_descriptor_usage_state_to_in_use(self, mock_descriptor_usage):
+        db_collection = "vnfds"
+        descriptor = deepcopy(db_vnfd_content)
+        mock_descriptor_usage.return_value = True
+        descriptor_update = {"_admin.usageState": "IN_USE"}
+        update_descriptor_usage_state(descriptor, db_collection, self.db)
+        self.db.set_one.assert_called_once_with(
+            db_collection, {"_id": descriptor["_id"]}, update_dict=descriptor_update
+        )
+
+    @patch("osm_nbi.base_topic.detect_descriptor_usage")
+    def test_update_descriptor_usage_state_to_not_in_use(self, mock_descriptor_usage):
+        db_collection = "nsds"
+        descriptor = deepcopy(db_nsd_content)
+        mock_descriptor_usage.return_value = False
+        descriptor_update = {"_admin.usageState": "NOT_IN_USE"}
+        update_descriptor_usage_state(descriptor, db_collection, self.db)
+        self.db.set_one.assert_called_once_with(
+            db_collection, {"_id": descriptor["_id"]}, update_dict=descriptor_update
+        )
+
+    @patch("osm_nbi.base_topic.detect_descriptor_usage")
+    def test_update_descriptor_usage_state_db_exception(self, mock_descriptor_usage):
+        db_collection = "nsd"
+        descriptor = deepcopy(db_nsd_content)
+        mock_descriptor_usage.side_effect = NBIBadArgumentsException
+        with self.assertRaises(EngineException) as error:
+            update_descriptor_usage_state(descriptor, db_collection, self.db)
+            self.assertIn(
+                "db_collection should be equal to vnfds or nsds, db_collection",
+                error,
+                "wrong error type",
+            )
+        self.db.set_one.assert_not_called()
+
 
 if __name__ == "__main__":
     unittest.main()
 
 if __name__ == "__main__":
     unittest.main()
index 8b0b226..1914190 100644 (file)
@@ -501,10 +501,12 @@ db_vnfrs_text = """
             mgmt-vnf: true
             name: mgmtVM-eth0
             ns-vld-id: mgmt
             mgmt-vnf: true
             name: mgmtVM-eth0
             ns-vld-id: mgmt
+            position: 1
         -   ip-address: 192.168.54.2
             mac-address: fa:16:3e:6e:7e:78
             name: mgmtVM-eth1
             vnf-vld-id: internal
         -   ip-address: 192.168.54.2
             mac-address: fa:16:3e:6e:7e:78
             name: mgmtVM-eth1
             vnf-vld-id: internal
+            position: 2
         internal-connection-point:
         -   connection-point-id: mgmtVM-internal
             id: mgmtVM-internal
         internal-connection-point:
         -   connection-point-id: mgmtVM-internal
             id: mgmtVM-internal
@@ -623,3 +625,148 @@ db_vnfrs_text = """
     vnfd-id: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77
     vnfd-ref: hackfest3charmed-vnf
 """
     vnfd-id: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77
     vnfd-ref: hackfest3charmed-vnf
 """
+
+db_vnfm_vnfd_text = """
+---
+-   _admin:
+        created: 1647529096.3635302
+        modified: 1650456936.518325
+        onboardingState: ONBOARDED
+        operationalState: ENABLED
+        projects_read:
+        - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4
+        projects_write:
+        - 25b5aebf-3da1-49ed-99de-1d2b4a86d6e4
+        storage:
+            descriptor: hackfest_basic_metrics_vnf/hackfest_basic_metrics_vnfd.yaml
+            folder: 70b47595-fafa-4f63-904b-fc3ada60eebb
+            fs: mongo
+            path: /app/storage/
+            pkg-dir: hackfest_basic_metrics_vnf
+            zipfile: package.tar.gz
+        type: vnfd
+        usageState: NOT_IN_USE
+        userDefinedData: {}
+    _id: 70b47595-fafa-4f63-904b-fc3ada60eebb
+    _links:
+        packageContent:
+            href: /vnfpkgm/v1/vnf_packages/70b47595-fafa-4f63-904b-fc3ada60eebb/package_content
+        self:
+            href: /vnfpkgm/v1/vnf_packages/70b47595-fafa-4f63-904b-fc3ada60eebb
+        vnfd:
+            href: /vnfpkgm/v1/vnf_packages/70b47595-fafa-4f63-904b-fc3ada60eebb/vnfd
+    description: A basic VNF descriptor with one VDU and VIM metrics
+    df:
+    -   id: default-df
+        instantiation-level:
+        -   id: default-instantiation-level
+            vdu-level:
+            -   number-of-instances: 1
+                vdu-id: hackfest_basic_metrics-VM
+        scaling-aspect:
+        -   aspect-delta-details:
+                deltas:
+                -   id: vdu_autoscale-delta
+                    vdu-delta:
+                    -   id: hackfest_basic_metrics-VM
+                        number-of-instances: 1
+            id: vdu_autoscale
+            max-scale-level: 1
+            name: vdu_autoscale
+            scaling-policy:
+            -   cooldown-time: 120
+                name: cpu_util_above_threshold
+                scaling-criteria:
+                -   name: cpu_util_above_threshold
+                    scale-in-relational-operation: LT
+                    scale-in-threshold: '10.0000000000'
+                    scale-out-relational-operation: GT
+                    scale-out-threshold: '60.0000000000'
+                    vnf-monitoring-param-ref: vnf_cpu_util
+                scaling-type: automatic
+                threshold-time: 10
+        vdu-profile:
+        -   id: hackfest_basic_metrics-VM
+            max-number-of-instances: 2
+            min-number-of-instances: 1
+    ext-cpd:
+    -   id: vnf-cp0-ext
+        int-cpd:
+            cpd: vdu-eth0-int
+            vdu-id: hackfest_basic_metrics-VM
+    id: hackfest_basic_metrics-vnf
+    mgmt-cp: vnf-cp0-ext
+    onboardingState: ONBOARDED
+    operationalState: ENABLED
+    product-name: hackfest_basic_metrics-vnf
+    sw-image-desc:
+    -   id: bionic
+        image: bionic
+        name: bionic
+    -   id: ubuntu18.04-aws
+        image: ubuntu/images/hvm-ssd/ubuntu-artful-17.10-amd64-server-20180509
+        name: ubuntu18.04-aws
+        vim-type: aws
+    -   id: ubuntu18.04-azure
+        image: Canonical:UbuntuServer:18.04-LTS:latest
+        name: ubuntu18.04-azure
+        vim-type: azure
+    -   id: ubuntu18.04-gcp
+        image: ubuntu-os-cloud:image-family:ubuntu-1804-lts
+        name: ubuntu18.04-gcp
+        vim-type: gcp
+    usageState: NOT_IN_USE
+    vdu:
+    -   alarm:
+        -   actions:
+                alarm:
+                -   url: https://webhook.site/b79f9bf9-4c19-429d-81ed-19be26a3d5d8
+                insufficient-data:
+                -   url: https://webhook.site/b79f9bf9-4c19-429d-81ed-19be26a3d5d8
+                ok:
+                -   url: https://webhook.site/b79f9bf9-4c19-429d-81ed-19be26a3d5d8
+            alarm-id: alarm-1
+            operation: LT
+            value: '20.0000'
+            vnf-monitoring-param-ref: vnf_cpu_util
+        alternative-sw-image-desc:
+        - ubuntu18.04-aws
+        - ubuntu18.04-azure
+        - ubuntu18.04-gcp
+        cloud-init-file: cloud-config
+        id: hackfest_basic_metrics-VM
+        int-cpd:
+        -   id: vdu-eth0-int
+            virtual-network-interface-requirement:
+            -   name: vdu-eth0
+                virtual-interface:
+                    type: PARAVIRT
+        monitoring-parameter:
+        -   id: vnf_cpu_util
+            name: vnf_cpu_util
+            performance-metric: cpu_utilization
+        -   id: vnf_memory_util
+            name: vnf_memory_util
+            performance-metric: average_memory_utilization
+        -   id: vnf_packets_sent
+            name: vnf_packets_sent
+            performance-metric: packets_sent
+        -   id: vnf_packets_received
+            name: vnf_packets_received
+            performance-metric: packets_received
+        name: hackfest_basic_metrics-VM
+        sw-image-desc: bionic
+        virtual-compute-desc: hackfest_basic_metrics-VM-compute
+        virtual-storage-desc:
+        - hackfest_basic_metrics-VM-storage
+    version: '1.0'
+    virtual-compute-desc:
+    -   id: hackfest_basic_metrics-VM-compute
+        virtual-cpu:
+            num-virtual-cpu: 1
+        virtual-memory:
+            size: 1.0
+    virtual-storage-desc:
+    -   id: hackfest_basic_metrics-VM-storage
+        size-of-storage: '10'
+"""
index b2df34f..f6d4001 100755 (executable)
 __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "2019-11-20"
 
 __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "2019-11-20"
 
+from contextlib import contextmanager
 import unittest
 from unittest import TestCase
 import unittest
 from unittest import TestCase
-from unittest.mock import Mock
+from unittest.mock import Mock, patch
 from uuid import uuid4
 from http import HTTPStatus
 from copy import deepcopy
 from time import time
 from osm_common import dbbase, fsbase, msgbase
 from osm_nbi import authconn
 from uuid import uuid4
 from http import HTTPStatus
 from copy import deepcopy
 from time import time
 from osm_common import dbbase, fsbase, msgbase
 from osm_nbi import authconn
-from osm_nbi.tests.test_pkg_descriptors import db_vnfds_text, db_nsds_text
+from osm_nbi.tests.test_pkg_descriptors import (
+    db_vnfds_text,
+    db_nsds_text,
+    vnfd_exploit_text,
+    vnfd_exploit_fixed_text,
+    db_sfc_nsds_text,
+)
 from osm_nbi.descriptor_topics import VnfdTopic, NsdTopic
 from osm_nbi.engine import EngineException
 from osm_common.dbbase import DbException
 import yaml
 from osm_nbi.descriptor_topics import VnfdTopic, NsdTopic
 from osm_nbi.engine import EngineException
 from osm_common.dbbase import DbException
 import yaml
+import tempfile
+import collections
+import collections.abc
+
+collections.MutableSequence = collections.abc.MutableSequence
 
 test_name = "test-user"
 
 test_name = "test-user"
-db_vnfd_content = yaml.load(db_vnfds_text, Loader=yaml.Loader)[0]
-db_nsd_content = yaml.load(db_nsds_text, Loader=yaml.Loader)[0]
+db_vnfd_content = yaml.safe_load(db_vnfds_text)[0]
+db_nsd_content = yaml.safe_load(db_nsds_text)[0]
 test_pid = db_vnfd_content["_admin"]["projects_read"][0]
 fake_session = {
     "username": test_name,
 test_pid = db_vnfd_content["_admin"]["projects_read"][0]
 fake_session = {
     "username": test_name,
@@ -45,11 +57,23 @@ fake_session = {
     "public": False,
     "allow_show_user_project_role": True,
 }
     "public": False,
     "allow_show_user_project_role": True,
 }
+UUID = "00000000-0000-0000-0000-000000000000"
+
+
+def admin_value():
+    return {"projects_read": []}
 
 
 
 
-def norm(str):
+def setup_mock_fs(fs):
+    fs.path = ""
+    fs.get_params.return_value = {}
+    fs.file_exists.return_value = False
+    fs.file_open.side_effect = lambda path, mode: tempfile.TemporaryFile(mode="a+b")
+
+
+def norm(s: str):
     """Normalize string for checking"""
     """Normalize string for checking"""
-    return " ".join(str.strip().split()).lower()
+    return " ".join(s.strip().split()).lower()
 
 
 def compare_desc(tc, d1, d2, k):
 
 
 def compare_desc(tc, d1, d2, k):
@@ -61,7 +85,7 @@ def compare_desc(tc, d1, d2, k):
     Lists of different length are not compared
     :param tc: Test Case wich provides context (in particular the assert* methods)
     :param d1,d2: Descriptors to be compared
     Lists of different length are not compared
     :param tc: Test Case wich provides context (in particular the assert* methods)
     :param d1,d2: Descriptors to be compared
-    :param key/item being compared
+    :param k: key/item being compared
     :return: Nothing
     """
     if isinstance(d1, dict) and isinstance(d2, dict):
     :return: Nothing
     """
     if isinstance(d1, dict) and isinstance(d2, dict):
@@ -92,502 +116,610 @@ class Test_VnfdTopic(TestCase):
         self.topic = VnfdTopic(self.db, self.fs, self.msg, self.auth)
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
         self.topic = VnfdTopic(self.db, self.fs, self.msg, self.auth)
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
-    def test_new_vnfd(self):
-        did = db_vnfd_content["_id"]
-        self.fs.get_params.return_value = {}
-        self.fs.file_exists.return_value = False
-        self.fs.file_open.side_effect = lambda path, mode: open(
-            "/tmp/" + str(uuid4()), "a+b"
-        )
+    @contextmanager
+    def assertNotRaises(self, exception_type=Exception):
+        try:
+            yield None
+        except exception_type:
+            raise self.failureException("{} raised".format(exception_type.__name__))
+
+    def create_desc_temp(self, template):
+        old_desc = deepcopy(template)
+        new_desc = deepcopy(template)
+        return old_desc, new_desc
+
+    def prepare_vnfd_creation(self):
+        setup_mock_fs(self.fs)
         test_vnfd = deepcopy(db_vnfd_content)
         test_vnfd = deepcopy(db_vnfd_content)
+        did = db_vnfd_content["_id"]
+        self.db.create.return_value = did
+        self.db.get_one.side_effect = [
+            {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+            None,
+        ]
+        return did, test_vnfd
+
+    def prepare_vnfd(self, vnfd_text):
+        setup_mock_fs(self.fs)
+        test_vnfd = yaml.safe_load(vnfd_text)
+        self.db.create.return_value = UUID
+        self.db.get_one.side_effect = [
+            {"_id": UUID, "_admin": admin_value()},
+            None,
+        ]
+        return UUID, test_vnfd
+
+    def prepare_test_vnfd(self, test_vnfd):
         del test_vnfd["_id"]
         del test_vnfd["_admin"]
         del test_vnfd["_id"]
         del test_vnfd["_admin"]
-        with self.subTest(i=1, t="Normal Creation"):
-            self.db.create.return_value = did
-            rollback = []
-            did2, oid = self.topic.new(rollback, fake_session, {})
-            db_args = self.db.create.call_args[0]
-            msg_args = self.msg.write.call_args[0]
-            self.assertEqual(len(rollback), 1, "Wrong rollback length")
-            self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
-            self.assertEqual(msg_args[1], "created", "Wrong message action")
-            self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
-            self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
-            self.assertEqual(did2, did, "Wrong DB VNFD id")
-            self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(
-                db_args[1]["_admin"]["modified"],
-                db_args[1]["_admin"]["created"],
-                "Wrong modification time",
-            )
-            self.assertEqual(
-                db_args[1]["_admin"]["projects_read"],
-                [test_pid],
-                "Wrong read-only project list",
+        del test_vnfd["vdu"][0]["cloud-init-file"]
+        del test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ][0]["execution-environment-list"][0]["juju"]
+        return test_vnfd
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_normal_creation(self, mock_rename, mock_shutil):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        rollback = []
+        did2, oid = self.topic.new(rollback, fake_session, {})
+        db_args = self.db.create.call_args[0]
+        msg_args = self.msg.write.call_args[0]
+
+        self.assertEqual(len(rollback), 1, "Wrong rollback length")
+        self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+        self.assertEqual(msg_args[1], "created", "Wrong message action")
+        self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
+        self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
+        self.assertEqual(did2, did, "Wrong DB VNFD id")
+        self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
+        self.assertEqual(
+            db_args[1]["_admin"]["modified"],
+            db_args[1]["_admin"]["created"],
+            "Wrong modification time",
+        )
+        self.assertEqual(
+            db_args[1]["_admin"]["projects_read"],
+            [test_pid],
+            "Wrong read-only project list",
+        )
+        self.assertEqual(
+            db_args[1]["_admin"]["projects_write"],
+            [test_pid],
+            "Wrong read-write project list",
+        )
+
+        self.db.get_one.side_effect = [
+            {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+            None,
+        ]
+
+        self.topic.upload_content(
+            fake_session, did, test_vnfd, {}, {"Content-Type": []}
+        )
+        msg_args = self.msg.write.call_args[0]
+        test_vnfd["_id"] = did
+        self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+        self.assertEqual(msg_args[1], "edited", "Wrong message action")
+        self.assertEqual(msg_args[2], test_vnfd, "Wrong message content")
+
+        db_args = self.db.get_one.mock_calls[0][1]
+        self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
+        self.assertEqual(db_args[1]["_id"], did, "Wrong DB VNFD id")
+
+        db_args = self.db.replace.call_args[0]
+        self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
+        self.assertEqual(db_args[1], did, "Wrong DB VNFD id")
+
+        admin = db_args[2]["_admin"]
+        db_admin = deepcopy(db_vnfd_content["_admin"])
+        self.assertEqual(admin["type"], "vnfd", "Wrong descriptor type")
+        self.assertEqual(admin["created"], db_admin["created"], "Wrong creation time")
+        self.assertGreater(
+            admin["modified"], db_admin["created"], "Wrong modification time"
+        )
+        self.assertEqual(
+            admin["projects_read"],
+            db_admin["projects_read"],
+            "Wrong read-only project list",
+        )
+        self.assertEqual(
+            admin["projects_write"],
+            db_admin["projects_write"],
+            "Wrong read-write project list",
+        )
+        self.assertEqual(
+            admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
+        )
+        self.assertEqual(
+            admin["operationalState"], "ENABLED", "Wrong operational state"
+        )
+        self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
+
+        storage = admin["storage"]
+        self.assertEqual(storage["folder"], did + ":1", "Wrong storage folder")
+        self.assertEqual(storage["descriptor"], "package", "Wrong storage descriptor")
+        self.assertEqual(admin["revision"], 1, "Wrong revision number")
+        compare_desc(self, test_vnfd, db_args[2], "VNFD")
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_exploit(self, mock_rename, mock_shutil):
+        id, test_vnfd = self.prepare_vnfd(vnfd_exploit_text)
+
+        with self.assertRaises(EngineException):
+            self.topic.upload_content(
+                fake_session, id, test_vnfd, {}, {"Content-Type": []}
             )
             )
-            self.assertEqual(
-                db_args[1]["_admin"]["projects_write"],
-                [test_pid],
-                "Wrong read-write project list",
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_valid_helm_chart(self, mock_rename, mock_shutil):
+        id, test_vnfd = self.prepare_vnfd(vnfd_exploit_fixed_text)
+
+        with self.assertNotRaises():
+            self.topic.upload_content(
+                fake_session, id, test_vnfd, {}, {"Content-Type": []}
             )
             )
-            tmp1 = test_vnfd["vdu"][0]["cloud-init-file"]
-            tmp2 = test_vnfd["df"][0]["lcm-operations-configuration"][
-                "operate-vnf-op-config"
-            ]["day1-2"][0]["execution-environment-list"][0]["juju"]
-            del test_vnfd["vdu"][0]["cloud-init-file"]
-            del test_vnfd["df"][0]["lcm-operations-configuration"][
-                "operate-vnf-op-config"
-            ]["day1-2"][0]["execution-environment-list"][0]["juju"]
-            try:
-                self.db.get_one.side_effect = [
-                    {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
-                    None,
-                ]
-                self.topic.upload_content(
-                    fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                )
-                msg_args = self.msg.write.call_args[0]
-                test_vnfd["_id"] = did
-                self.assertEqual(
-                    msg_args[0], self.topic.topic_msg, "Wrong message topic"
-                )
-                self.assertEqual(msg_args[1], "edited", "Wrong message action")
-                self.assertEqual(msg_args[2], test_vnfd, "Wrong message content")
-                db_args = self.db.get_one.mock_calls[0][1]
-                self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
-                self.assertEqual(db_args[1]["_id"], did, "Wrong DB VNFD id")
-                db_args = self.db.replace.call_args[0]
-                self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
-                self.assertEqual(db_args[1], did, "Wrong DB VNFD id")
-                admin = db_args[2]["_admin"]
-                db_admin = deepcopy(db_vnfd_content["_admin"])
-                self.assertEqual(admin["type"], "vnfd", "Wrong descriptor type")
-                self.assertEqual(
-                    admin["created"], db_admin["created"], "Wrong creation time"
-                )
-                self.assertGreater(
-                    admin["modified"], db_admin["created"], "Wrong modification time"
-                )
-                self.assertEqual(
-                    admin["projects_read"],
-                    db_admin["projects_read"],
-                    "Wrong read-only project list",
-                )
-                self.assertEqual(
-                    admin["projects_write"],
-                    db_admin["projects_write"],
-                    "Wrong read-write project list",
-                )
-                self.assertEqual(
-                    admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
-                )
-                self.assertEqual(
-                    admin["operationalState"], "ENABLED", "Wrong operational state"
-                )
-                self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
-                storage = admin["storage"]
-                self.assertEqual(storage["folder"], did, "Wrong storage folder")
-                self.assertEqual(
-                    storage["descriptor"], "package", "Wrong storage descriptor"
-                )
-                compare_desc(self, test_vnfd, db_args[2], "VNFD")
-            finally:
-                test_vnfd["vdu"][0]["cloud-init-file"] = tmp1
-                test_vnfd["df"][0]["lcm-operations-configuration"][
-                    "operate-vnf-op-config"
-                ]["day1-2"][0]["execution-environment-list"][0]["juju"] = tmp2
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_pyangbind_validation_additional_properties(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        self.topic.upload_content(
+            fake_session, did, test_vnfd, {}, {"Content-Type": []}
+        )
+        test_vnfd["_id"] = did
+        test_vnfd["extra-property"] = 0
         self.db.get_one.side_effect = (
             lambda table, filter, fail_on_empty=None, fail_on_more=None: {
                 "_id": did,
                 "_admin": deepcopy(db_vnfd_content["_admin"]),
             }
         )
         self.db.get_one.side_effect = (
             lambda table, filter, fail_on_empty=None, fail_on_more=None: {
                 "_id": did,
                 "_admin": deepcopy(db_vnfd_content["_admin"]),
             }
         )
-        with self.subTest(i=2, t="Check Pyangbind Validation: additional properties"):
-            test_vnfd["extra-property"] = 0
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted VNFD with an additional property"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "Error in pyangbind validation: {} ({})".format(
-                            "json object contained a key that did not exist",
-                            "extra-property",
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                del test_vnfd["extra-property"]
-        with self.subTest(i=3, t="Check Pyangbind Validation: property types"):
-            tmp = test_vnfd["product-name"]
-            test_vnfd["product-name"] = {"key": 0}
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted VNFD with a wrongly typed property"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "Error in pyangbind validation: {} ({})".format(
-                            "json object contained a key that did not exist", "key"
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
+
+        with self.assertRaises(
+            EngineException, msg="Accepted VNFD with an additional property"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error in pyangbind validation: {} ({})".format(
+                    "json object contained a key that did not exist", "extra-property"
                 )
                 )
-            finally:
-                test_vnfd["product-name"] = tmp
-        with self.subTest(i=4, t="Check Input Validation: cloud-init"):
-            with self.assertRaises(
-                EngineException, msg="Accepted non-existent cloud_init file"
-            ) as e:
-                self.topic.upload_content(
-                    fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+        db_args = self.db.replace.call_args[0]
+        admin = db_args[2]["_admin"]
+        self.assertEqual(admin["revision"], 1, "Wrong revision number")
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_pyangbind_validation_property_types(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        test_vnfd["_id"] = did
+        test_vnfd["product-name"] = {"key": 0}
+
+        with self.assertRaises(
+            EngineException, msg="Accepted VNFD with a wrongly typed property"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error in pyangbind validation: {} ({})".format(
+                    "json object contained a key that did not exist", "key"
                 )
                 )
-            self.assertEqual(
-                e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_cloud_init(self, mock_rename, mock_shutil):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        del test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ][0]["execution-environment-list"][0]["juju"]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted non-existent cloud_init file"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
             )
             )
-            self.assertIn(
-                norm(
-                    "{} defined in vnf[id={}]:vdu[id={}] but not present in package".format(
-                        "cloud-init", test_vnfd["id"], test_vnfd["vdu"][0]["id"]
-                    )
-                ),
-                norm(str(e.exception)),
-                "Wrong exception text",
+        self.assertEqual(
+            e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+        )
+        self.assertIn(
+            norm(
+                "{} defined in vnf[id={}]:vdu[id={}] but not present in package".format(
+                    "cloud-init", test_vnfd["id"], test_vnfd["vdu"][0]["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_day12_configuration(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        del test_vnfd["vdu"][0]["cloud-init-file"]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted non-existent charm in VNF configuration"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
             )
             )
-        with self.subTest(i=5, t="Check Input Validation: day1-2 configuration[juju]"):
-            del test_vnfd["vdu"][0]["cloud-init-file"]
-            with self.assertRaises(
-                EngineException, msg="Accepted non-existent charm in VNF configuration"
-            ) as e:
-                self.topic.upload_content(
-                    fake_session, did, test_vnfd, {}, {"Content-Type": []}
+        self.assertEqual(
+            e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+        )
+        self.assertIn(
+            norm(
+                "{} defined in vnf[id={}] but not present in package".format(
+                    "charm", test_vnfd["id"]
                 )
                 )
-            print(str(e.exception))
-            self.assertEqual(
-                e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_mgmt_cp(self, mock_rename, mock_shutil):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        del test_vnfd["mgmt-cp"]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted VNFD without management interface"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
             )
             )
-            self.assertIn(
-                norm(
-                    "{} defined in vnf[id={}] but not present in package".format(
-                        "charm", test_vnfd["id"]
-                    )
-                ),
-                norm(str(e.exception)),
-                "Wrong exception text",
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm("'{}' is a mandatory field and it is not defined".format("mgmt-cp")),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_mgmt_cp_connection_point(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        test_vnfd["mgmt-cp"] = "wrong-cp"
+
+        with self.assertRaises(
+            EngineException, msg="Accepted wrong mgmt-cp connection point"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
             )
             )
-            del test_vnfd["df"][0]["lcm-operations-configuration"][
-                "operate-vnf-op-config"
-            ]["day1-2"][0]["execution-environment-list"][0]["juju"]
-        with self.subTest(i=6, t="Check Input Validation: mgmt-cp"):
-            tmp = test_vnfd["mgmt-cp"]
-            del test_vnfd["mgmt-cp"]
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted VNFD without management interface"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "'{}' is a mandatory field and it is not defined".format(
-                            "mgmt-cp"
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                test_vnfd["mgmt-cp"] = tmp
-        with self.subTest(i=7, t="Check Input Validation: mgmt-cp connection point"):
-            tmp = test_vnfd["mgmt-cp"]
-            test_vnfd["mgmt-cp"] = "wrong-cp"
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted wrong mgmt-cp connection point"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "mgmt-cp='{}' must match an existing ext-cpd".format(
-                            test_vnfd["mgmt-cp"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                test_vnfd["mgmt-cp"] = tmp
-        with self.subTest(i=8, t="Check Input Validation: vdu int-cpd"):
-            ext_cpd = test_vnfd["ext-cpd"][1]
-            tmp = ext_cpd["int-cpd"]["cpd"]
-            ext_cpd["int-cpd"]["cpd"] = "wrong-cpd"
-            try:
-                with self.assertRaises(
-                    EngineException,
-                    msg="Accepted wrong ext-cpd internal connection point",
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
-                            ext_cpd["id"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                ext_cpd["int-cpd"]["cpd"] = tmp
-        with self.subTest(i=9, t="Check Input Validation: Duplicated VLD"):
-            test_vnfd["int-virtual-link-desc"].insert(0, {"id": "internal"})
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted duplicated VLD name"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "identifier id '{}' is not unique".format(
-                            test_vnfd["int-virtual-link-desc"][0]["id"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                del test_vnfd["int-virtual-link-desc"][0]
-        with self.subTest(i=10, t="Check Input Validation: vdu int-virtual-link-desc"):
-            vdu = test_vnfd["vdu"][0]
-            int_cpd = vdu["int-cpd"][1]
-            tmp = int_cpd["int-virtual-link-desc"]
-            int_cpd["int-virtual-link-desc"] = "non-existing-int-virtual-link-desc"
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted int-virtual-link-desc"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
-                        "int-virtual-link-desc".format(
-                            vdu["id"], int_cpd["id"], int_cpd["int-virtual-link-desc"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                int_cpd["int-virtual-link-desc"] = tmp
-        with self.subTest(i=11, t="Check Input Validation: virtual-link-profile)"):
-            fake_ivld_profile = {"id": "fake-profile-ref", "flavour": "fake-flavour"}
-            df = test_vnfd["df"][0]
-            df["virtual-link-profile"] = [fake_ivld_profile]
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted non-existent Profile Ref"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "df[id='{}']:virtual-link-profile='{}' must match an existing "
-                        "int-virtual-link-desc".format(
-                            df["id"], fake_ivld_profile["id"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                del df["virtual-link-profile"]
-        with self.subTest(
-            i=12, t="Check Input Validation: scaling-criteria monitoring-param-ref"
-        ):
-            vdu = test_vnfd["vdu"][1]
-            affected_df = test_vnfd["df"][0]
-            sa = affected_df["scaling-aspect"][0]
-            sp = sa["scaling-policy"][0]
-            sc = sp["scaling-criteria"][0]
-            tmp = vdu.pop("monitoring-parameter")
-            try:
-                with self.assertRaises(
-                    EngineException,
-                    msg="Accepted non-existent Scaling Group Policy Criteria",
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "mgmt-cp='{}' must match an existing ext-cpd".format(
+                    test_vnfd["mgmt-cp"]
                 )
                 )
-                self.assertIn(
-                    norm(
-                        "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
-                        "[name='{}']:scaling-criteria[name='{}']: "
-                        "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
-                            affected_df["id"],
-                            sa["id"],
-                            sp["name"],
-                            sc["name"],
-                            sc["vnf-monitoring-param-ref"],
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_vdu_int_cpd(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for vdu internal connection point"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        ext_cpd = test_vnfd["ext-cpd"][1]
+        ext_cpd["int-cpd"]["cpd"] = "wrong-cpd"
+
+        with self.assertRaises(
+            EngineException, msg="Accepted wrong ext-cpd internal connection point"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+                    ext_cpd["id"]
                 )
                 )
-            finally:
-                vdu["monitoring-parameter"] = tmp
-        with self.subTest(
-            i=13, t="Check Input Validation: scaling-aspect vnf-configuration"
-        ):
-            df = test_vnfd["df"][0]
-            tmp = test_vnfd["df"][0]["lcm-operations-configuration"][
-                "operate-vnf-op-config"
-            ]["day1-2"].pop()
-            try:
-                with self.assertRaises(
-                    EngineException,
-                    msg="Accepted non-existent Scaling Group VDU ID Reference",
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_duplicated_vld(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for dublicated virtual link description"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        test_vnfd["int-virtual-link-desc"].insert(0, {"id": "internal"})
+
+        with self.assertRaises(
+            EngineException, msg="Accepted duplicated VLD name"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "identifier id '{}' is not unique".format(
+                    test_vnfd["int-virtual-link-desc"][0]["id"]
                 )
                 )
-                self.assertIn(
-                    norm(
-                        "'day1-2 configuration' not defined in the descriptor but it is referenced "
-                        "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
-                            df["id"], df["scaling-aspect"][0]["id"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_vdu_int_virtual_link_desc(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for vdu internal virtual link description"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        vdu = test_vnfd["vdu"][0]
+        int_cpd = vdu["int-cpd"][1]
+        int_cpd["int-virtual-link-desc"] = "non-existing-int-virtual-link-desc"
+
+        with self.assertRaises(
+            EngineException, msg="Accepted int-virtual-link-desc"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+                "int-virtual-link-desc".format(
+                    vdu["id"], int_cpd["id"], int_cpd["int-virtual-link-desc"]
                 )
                 )
-            finally:
-                test_vnfd["df"][0]["lcm-operations-configuration"][
-                    "operate-vnf-op-config"
-                ]["day1-2"].append(tmp)
-        with self.subTest(i=14, t="Check Input Validation: scaling-config-action"):
-            df = test_vnfd["df"][0]
-            tmp = (
-                test_vnfd["df"][0]
-                .get("lcm-operations-configuration")
-                .get("operate-vnf-op-config")["day1-2"][0]["config-primitive"]
-            )
-            test_vnfd["df"][0].get("lcm-operations-configuration").get(
-                "operate-vnf-op-config"
-            )["day1-2"][0]["config-primitive"] = [{"name": "wrong-primitive"}]
-            try:
-                with self.assertRaises(
-                    EngineException,
-                    msg="Accepted non-existent Scaling Group VDU ID Reference",
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_virtual_link_profile(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for virtual link profile"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        fake_ivld_profile = {"id": "fake-profile-ref", "flavour": "fake-flavour"}
+        df = test_vnfd["df"][0]
+        df["virtual-link-profile"] = [fake_ivld_profile]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted non-existent Profile Ref"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:virtual-link-profile='{}' must match an existing "
+                "int-virtual-link-desc".format(df["id"], fake_ivld_profile["id"])
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_scaling_criteria_monitoring_param_ref(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for scaling criteria without monitoring parameter"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        vdu = test_vnfd["vdu"][1]
+        affected_df = test_vnfd["df"][0]
+        sa = affected_df["scaling-aspect"][0]
+        sp = sa["scaling-policy"][0]
+        sc = sp["scaling-criteria"][0]
+        vdu.pop("monitoring-parameter")
+
+        with self.assertRaises(
+            EngineException, msg="Accepted non-existent Scaling Group Policy Criteria"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                "[name='{}']:scaling-criteria[name='{}']: "
+                "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+                    affected_df["id"],
+                    sa["id"],
+                    sp["name"],
+                    sc["name"],
+                    sc["vnf-monitoring-param-ref"],
                 )
                 )
-                self.assertIn(
-                    norm(
-                        "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
-                        "config-primitive-name-ref='{}' does not match any "
-                        "day1-2 configuration:config-primitive:name".format(
-                            df["id"],
-                            df["scaling-aspect"][0]["id"],
-                            sa["scaling-config-action"][0][
-                                "vnf-config-primitive-name-ref"
-                            ],
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_scaling_aspect_vnf_configuration(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for scaling criteria without day12 configuration"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ].pop()
+        df = test_vnfd["df"][0]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted non-existent Scaling Group VDU ID Reference"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "'day1-2 configuration' not defined in the descriptor but it is referenced "
+                "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+                    df["id"], df["scaling-aspect"][0]["id"]
                 )
                 )
-            finally:
-                test_vnfd["df"][0].get("lcm-operations-configuration").get(
-                    "operate-vnf-op-config"
-                )["day1-2"][0]["config-primitive"] = tmp
-        with self.subTest(i=15, t="Check Input Validation: everything right"):
-            test_vnfd["id"] = "fake-vnfd-id"
-            test_vnfd["df"][0].get("lcm-operations-configuration").get(
-                "operate-vnf-op-config"
-            )["day1-2"][0]["id"] = "fake-vnfd-id"
-            self.db.get_one.side_effect = [
-                {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
-                None,
-            ]
-            rc = self.topic.upload_content(
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_scaling_config_action(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        for scaling criteria wrong config primitive"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        df = test_vnfd["df"][0]
+        affected_df = test_vnfd["df"][0]
+        sa = affected_df["scaling-aspect"][0]
+        test_vnfd["df"][0].get("lcm-operations-configuration").get(
+            "operate-vnf-op-config"
+        )["day1-2"][0]["config-primitive"] = [{"name": "wrong-primitive"}]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted non-existent Scaling Group VDU ID Reference"
+        ) as e:
+            self.topic.upload_content(
                 fake_session, did, test_vnfd, {}, {"Content-Type": []}
             )
                 fake_session, did, test_vnfd, {}, {"Content-Type": []}
             )
-            self.assertTrue(rc, "Input Validation: Unexpected failure")
-        return
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                "config-primitive-name-ref='{}' does not match any "
+                "day1-2 configuration:config-primitive:name".format(
+                    df["id"],
+                    df["scaling-aspect"][0]["id"],
+                    sa["scaling-config-action"][0]["vnf-config-primitive-name-ref"],
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_check_input_validation_everything_right(
+        self, mock_rename, mock_shutil
+    ):
+        """Testing input validation during new vnfd creation
+        everything correct"""
+        did, test_vnfd = self.prepare_vnfd_creation()
+        test_vnfd = self.prepare_test_vnfd(test_vnfd)
+        test_vnfd["id"] = "fake-vnfd-id"
+        test_vnfd["df"][0].get("lcm-operations-configuration").get(
+            "operate-vnf-op-config"
+        )["day1-2"][0]["id"] = "fake-vnfd-id"
+        self.db.get_one.side_effect = [
+            {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+            None,
+        ]
+        rc = self.topic.upload_content(
+            fake_session, did, test_vnfd, {}, {"Content-Type": []}
+        )
+        self.assertTrue(rc, "Input Validation: Unexpected failure")
 
     def test_edit_vnfd(self):
         vnfd_content = deepcopy(db_vnfd_content)
 
     def test_edit_vnfd(self):
         vnfd_content = deepcopy(db_vnfd_content)
@@ -697,6 +829,18 @@ class Test_VnfdTopic(TestCase):
                 "Wrong DB NSD vnfd-id",
             )
 
                 "Wrong DB NSD vnfd-id",
             )
 
+            self.assertEqual(
+                self.db.del_list.call_args[0][0],
+                self.topic.topic + "_revisions",
+                "Wrong DB topic",
+            )
+
+            self.assertEqual(
+                self.db.del_list.call_args[0][1]["_id"]["$regex"],
+                did,
+                "Wrong ID for rexep delete",
+            )
+
             self.db.set_one.assert_not_called()
             fs_del_calls = self.fs.file_delete.call_args_list
             self.assertEqual(fs_del_calls[0][0][0], did, "Wrong FS file id")
             self.db.set_one.assert_not_called()
             fs_del_calls = self.fs.file_delete.call_args_list
             self.assertEqual(fs_del_calls[0][0][0], did, "Wrong FS file id")
@@ -768,13 +912,125 @@ class Test_VnfdTopic(TestCase):
             self.assertIsNone(
                 db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
             )
             self.assertIsNone(
                 db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
             )
-            self.assertEqual(
-                db_s1_args[1]["pull_list"],
-                {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
-                "Wrong DB pull_list dictionary",
+            self.assertEqual(
+                db_s1_args[1]["pull_list"],
+                {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
+                "Wrong DB pull_list dictionary",
+            )
+            self.fs.file_delete.assert_not_called()
+        return
+
+    def prepare_vnfd_validation(self):
+        descriptor_name = "test_descriptor"
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
+        old_vnfd, new_vnfd = self.create_desc_temp(db_vnfd_content)
+        return descriptor_name, old_vnfd, new_vnfd
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_vnfd_changes_day12_config_primitive_changed(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating VNFD for VNFD updates, day1-2 config primitive has changed"""
+        descriptor_name, old_vnfd, new_vnfd = self.prepare_vnfd_validation()
+        did = old_vnfd["_id"]
+        new_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ][0]["config-primitive"][0]["name"] = "new_action"
+        mock_safe_load.side_effect = [old_vnfd, new_vnfd]
+        mock_detect_usage.return_value = True
+        self.db.get_one.return_value = old_vnfd
+
+        with self.assertNotRaises(EngineException):
+            self.topic._validate_descriptor_changes(
+                did, descriptor_name, "/tmp/", "/tmp:1/"
+            )
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        self.assertEqual(mock_safe_load.call_count, 2)
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_vnfd_changes_sw_version_changed(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating VNFD for updates, software version has changed"""
+        # old vnfd uses the default software version: 1.0
+        descriptor_name, old_vnfd, new_vnfd = self.prepare_vnfd_validation()
+        did = old_vnfd["_id"]
+        new_vnfd["software-version"] = "1.3"
+        new_vnfd["sw-image-desc"][0]["name"] = "new-image"
+        mock_safe_load.side_effect = [old_vnfd, new_vnfd]
+        mock_detect_usage.return_value = True
+        self.db.get_one.return_value = old_vnfd
+
+        with self.assertNotRaises(EngineException):
+            self.topic._validate_descriptor_changes(
+                did, descriptor_name, "/tmp/", "/tmp:1/"
+            )
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        self.assertEqual(mock_safe_load.call_count, 2)
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_vnfd_changes_sw_version_not_changed_mgm_cp_changed(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating VNFD for updates, software version has not
+        changed, mgmt-cp has changed."""
+        descriptor_name, old_vnfd, new_vnfd = self.prepare_vnfd_validation()
+        new_vnfd["mgmt-cp"] = "new-mgmt-cp"
+        mock_safe_load.side_effect = [old_vnfd, new_vnfd]
+        did = old_vnfd["_id"]
+        mock_detect_usage.return_value = True
+        self.db.get_one.return_value = old_vnfd
+
+        with self.assertRaises(
+            EngineException, msg="there are disallowed changes in the vnf descriptor"
+        ) as e:
+            self.topic._validate_descriptor_changes(
+                did, descriptor_name, "/tmp/", "/tmp:1/"
+            )
+
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm("there are disallowed changes in the vnf descriptor"),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        self.assertEqual(mock_safe_load.call_count, 2)
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_vnfd_changes_sw_version_not_changed_mgm_cp_changed_vnfd_not_in_use(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating VNFD for updates, software version has not
+        changed, mgmt-cp has changed, vnfd is not in use."""
+        descriptor_name, old_vnfd, new_vnfd = self.prepare_vnfd_validation()
+        new_vnfd["mgmt-cp"] = "new-mgmt-cp"
+        mock_safe_load.side_effect = [old_vnfd, new_vnfd]
+        did = old_vnfd["_id"]
+        mock_detect_usage.return_value = None
+        self.db.get_one.return_value = old_vnfd
+
+        with self.assertNotRaises(EngineException):
+            self.topic._validate_descriptor_changes(
+                did, descriptor_name, "/tmp/", "/tmp:1/"
             )
             )
-            self.fs.file_delete.assert_not_called()
-        return
+
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        mock_safe_load.assert_not_called()
 
     def test_validate_mgmt_interface_connection_point_on_valid_descriptor(self):
         indata = deepcopy(db_vnfd_content)
 
     def test_validate_mgmt_interface_connection_point_on_valid_descriptor(self):
         indata = deepcopy(db_vnfd_content)
@@ -1003,17 +1259,377 @@ class Test_VnfdTopic(TestCase):
             "Wrong exception text",
         )
 
             "Wrong exception text",
         )
 
-    def test_validate_monitoring_params_on_duplicated_vdu_monitoring_param(self):
-        indata = deepcopy(db_vnfd_content)
-        duplicated_mp = {
-            "id": "dataVM_cpu_util",
-            "name": "dataVM_cpu_util",
-            "performance_metric": "cpu",
-        }
-        affected_vdu = indata["vdu"][1]
-        affected_vdu["monitoring-parameter"].insert(0, duplicated_mp)
-        with self.assertRaises(EngineException) as e:
-            self.topic.validate_monitoring_params(indata)
+    def test_validate_monitoring_params_on_duplicated_vdu_monitoring_param(self):
+        indata = deepcopy(db_vnfd_content)
+        duplicated_mp = {
+            "id": "dataVM_cpu_util",
+            "name": "dataVM_cpu_util",
+            "performance_metric": "cpu",
+        }
+        affected_vdu = indata["vdu"][1]
+        affected_vdu["monitoring-parameter"].insert(0, duplicated_mp)
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_monitoring_params(indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Duplicated monitoring-parameter id in "
+                "vdu[id='{}']:monitoring-parameter[id='{}']".format(
+                    affected_vdu["id"], duplicated_mp["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_validate_monitoring_params_on_duplicated_df_monitoring_param(self):
+        indata = deepcopy(db_vnfd_content)
+        duplicated_mp = {
+            "id": "memory",
+            "name": "memory",
+            "performance_metric": "memory",
+        }
+        affected_df = indata["df"][0]
+        affected_df["monitoring-parameter"] = [duplicated_mp, duplicated_mp]
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_monitoring_params(indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Duplicated monitoring-parameter id in "
+                "df[id='{}']:monitoring-parameter[id='{}']".format(
+                    affected_df["id"], duplicated_mp["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_validate_scaling_group_descriptor_on_valid_descriptor(self):
+        indata = db_vnfd_content
+        self.topic.validate_scaling_group_descriptor(indata)
+
+    def test_validate_scaling_group_descriptor_when_missing_monitoring_param(self):
+        indata = deepcopy(db_vnfd_content)
+        vdu = indata["vdu"][1]
+        affected_df = indata["df"][0]
+        affected_sa = affected_df["scaling-aspect"][0]
+        affected_sp = affected_sa["scaling-policy"][0]
+        affected_sc = affected_sp["scaling-criteria"][0]
+        vdu.pop("monitoring-parameter")
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_scaling_group_descriptor(indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                "[name='{}']:scaling-criteria[name='{}']: "
+                "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+                    affected_df["id"],
+                    affected_sa["id"],
+                    affected_sp["name"],
+                    affected_sc["name"],
+                    affected_sc["vnf-monitoring-param-ref"],
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_validate_scaling_group_descriptor_when_missing_vnf_configuration(self):
+        indata = deepcopy(db_vnfd_content)
+        df = indata["df"][0]
+        affected_sa = df["scaling-aspect"][0]
+        indata["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ].pop()
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_scaling_group_descriptor(indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "'day1-2 configuration' not defined in the descriptor but it is referenced "
+                "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+                    df["id"], affected_sa["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_validate_scaling_group_descriptor_when_missing_scaling_config_action_primitive(
+        self,
+    ):
+        indata = deepcopy(db_vnfd_content)
+        df = indata["df"][0]
+        affected_sa = df["scaling-aspect"][0]
+        affected_sca_primitive = affected_sa["scaling-config-action"][0][
+            "vnf-config-primitive-name-ref"
+        ]
+        df["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][0][
+            "config-primitive"
+        ] = []
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_scaling_group_descriptor(indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                "config-primitive-name-ref='{}' does not match any "
+                "day1-2 configuration:config-primitive:name".format(
+                    df["id"], affected_sa["id"], affected_sca_primitive
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_new_vnfd_revision(self):
+        did = db_vnfd_content["_id"]
+        self.fs.get_params.return_value = {}
+        self.fs.file_exists.return_value = False
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
+        test_vnfd = deepcopy(db_vnfd_content)
+        del test_vnfd["_id"]
+        del test_vnfd["_admin"]
+        self.db.create.return_value = did
+        rollback = []
+        did2, oid = self.topic.new(rollback, fake_session, {})
+        db_args = self.db.create.call_args[0]
+        self.assertEqual(
+            db_args[1]["_admin"]["revision"], 0, "New package should be at revision 0"
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_update_vnfd(self, mock_rename, mock_shutil):
+        old_revision = 5
+        did = db_vnfd_content["_id"]
+        self.fs.path = ""
+        self.fs.get_params.return_value = {}
+        self.fs.file_exists.return_value = False
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
+        new_vnfd = deepcopy(db_vnfd_content)
+        del new_vnfd["_id"]
+        self.db.create.return_value = did
+        rollback = []
+        did2, oid = self.topic.new(rollback, fake_session, {})
+        del new_vnfd["vdu"][0]["cloud-init-file"]
+        del new_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ][0]["execution-environment-list"][0]["juju"]
+
+        old_vnfd = {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}
+        old_vnfd["_admin"]["revision"] = old_revision
+
+        self.db.get_one.side_effect = [old_vnfd, old_vnfd, None]
+        self.topic.upload_content(fake_session, did, new_vnfd, {}, {"Content-Type": []})
+
+        db_args = self.db.replace.call_args[0]
+        self.assertEqual(
+            db_args[2]["_admin"]["revision"],
+            old_revision + 1,
+            "Revision should increment",
+        )
+
+
+class Test_NsdTopic(TestCase):
+    @classmethod
+    def setUpClass(cls):
+        cls.test_name = "test-nsd-topic"
+
+    @classmethod
+    def tearDownClass(cls):
+        pass
+
+    def setUp(self):
+        self.db = Mock(dbbase.DbBase())
+        self.fs = Mock(fsbase.FsBase())
+        self.msg = Mock(msgbase.MsgBase())
+        self.auth = Mock(authconn.Authconn(None, None, None))
+        self.topic = NsdTopic(self.db, self.fs, self.msg, self.auth)
+        self.topic.check_quota = Mock(return_value=None)  # skip quota
+
+    @contextmanager
+    def assertNotRaises(self, exception_type):
+        try:
+            yield None
+        except exception_type:
+            raise self.failureException("{} raised".format(exception_type.__name__))
+
+    def create_desc_temp(self, template):
+        old_desc = deepcopy(template)
+        new_desc = deepcopy(template)
+        return old_desc, new_desc
+
+    def prepare_nsd_creation(self):
+        self.fs.path = ""
+        did = db_nsd_content["_id"]
+        self.fs.get_params.return_value = {}
+        self.fs.file_exists.return_value = False
+        self.fs.file_open.side_effect = lambda path, mode: tempfile.TemporaryFile(
+            mode="a+b"
+        )
+        self.db.get_one.side_effect = [
+            {"_id": did, "_admin": deepcopy(db_nsd_content["_admin"])},
+            None,
+        ]
+        test_nsd = deepcopy(db_nsd_content)
+        del test_nsd["_id"]
+        del test_nsd["_admin"]
+        return did, test_nsd
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_normal_creation(self, mock_rename, mock_shutil):
+        did, test_nsd = self.prepare_nsd_creation()
+        self.db.create.return_value = did
+        rollback = []
+
+        did2, oid = self.topic.new(rollback, fake_session, {})
+        db_args = self.db.create.call_args[0]
+        msg_args = self.msg.write.call_args[0]
+        self.assertEqual(len(rollback), 1, "Wrong rollback length")
+        self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+        self.assertEqual(msg_args[1], "created", "Wrong message action")
+        self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
+        self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
+        self.assertEqual(did2, did, "Wrong DB NSD id")
+        self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
+        self.assertEqual(
+            db_args[1]["_admin"]["modified"],
+            db_args[1]["_admin"]["created"],
+            "Wrong modification time",
+        )
+        self.assertEqual(
+            db_args[1]["_admin"]["projects_read"],
+            [test_pid],
+            "Wrong read-only project list",
+        )
+        self.assertEqual(
+            db_args[1]["_admin"]["projects_write"],
+            [test_pid],
+            "Wrong read-write project list",
+        )
+
+        self.db.get_list.return_value = [db_vnfd_content]
+
+        self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
+        msg_args = self.msg.write.call_args[0]
+        test_nsd["_id"] = did
+        self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+        self.assertEqual(msg_args[1], "edited", "Wrong message action")
+        self.assertEqual(msg_args[2], test_nsd, "Wrong message content")
+
+        db_args = self.db.get_one.mock_calls[0][1]
+        self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
+        self.assertEqual(db_args[1]["_id"], did, "Wrong DB NSD id")
+
+        db_args = self.db.replace.call_args[0]
+        self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
+        self.assertEqual(db_args[1], did, "Wrong DB NSD id")
+
+        admin = db_args[2]["_admin"]
+        db_admin = db_nsd_content["_admin"]
+        self.assertEqual(admin["created"], db_admin["created"], "Wrong creation time")
+        self.assertGreater(
+            admin["modified"], db_admin["created"], "Wrong modification time"
+        )
+        self.assertEqual(
+            admin["projects_read"],
+            db_admin["projects_read"],
+            "Wrong read-only project list",
+        )
+        self.assertEqual(
+            admin["projects_write"],
+            db_admin["projects_write"],
+            "Wrong read-write project list",
+        )
+        self.assertEqual(
+            admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
+        )
+        self.assertEqual(
+            admin["operationalState"], "ENABLED", "Wrong operational state"
+        )
+        self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
+
+        storage = admin["storage"]
+        self.assertEqual(storage["folder"], did + ":1", "Wrong storage folder")
+        self.assertEqual(storage["descriptor"], "package", "Wrong storage descriptor")
+
+        compare_desc(self, test_nsd, db_args[2], "NSD")
+        revision_args = self.db.create.call_args[0]
+        self.assertEqual(
+            revision_args[0], self.topic.topic + "_revisions", "Wrong topic"
+        )
+        self.assertEqual(revision_args[1]["id"], db_args[2]["id"], "Wrong revision id")
+        self.assertEqual(
+            revision_args[1]["_id"], db_args[2]["_id"] + ":1", "Wrong revision _id"
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_check_pyangbind_validation_required_properties(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_nsd = self.prepare_nsd_creation()
+        del test_nsd["id"]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted NSD with a missing required property"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_nsd, {}, {"Content-Type": []}
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm("Error in pyangbind validation: '{}'".format("id")),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_check_pyangbind_validation_additional_properties(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_nsd = self.prepare_nsd_creation()
+        test_nsd["extra-property"] = 0
+
+        with self.assertRaises(
+            EngineException, msg="Accepted NSD with an additional property"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_nsd, {}, {"Content-Type": []}
+            )
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
@@ -1021,26 +1637,28 @@ class Test_VnfdTopic(TestCase):
         )
         self.assertIn(
             norm(
         )
         self.assertIn(
             norm(
-                "Duplicated monitoring-parameter id in "
-                "vdu[id='{}']:monitoring-parameter[id='{}']".format(
-                    affected_vdu["id"], duplicated_mp["id"]
+                "Error in pyangbind validation: {} ({})".format(
+                    "json object contained a key that did not exist", "extra-property"
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
-    def test_validate_monitoring_params_on_duplicated_df_monitoring_param(self):
-        indata = deepcopy(db_vnfd_content)
-        duplicated_mp = {
-            "id": "memory",
-            "name": "memory",
-            "performance_metric": "memory",
-        }
-        affected_df = indata["df"][0]
-        affected_df["monitoring-parameter"] = [duplicated_mp, duplicated_mp]
-        with self.assertRaises(EngineException) as e:
-            self.topic.validate_monitoring_params(indata)
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_check_pyangbind_validation_property_types(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_nsd = self.prepare_nsd_creation()
+        test_nsd["designer"] = {"key": 0}
+
+        with self.assertRaises(
+            EngineException, msg="Accepted NSD with a wrongly typed property"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_nsd, {}, {"Content-Type": []}
+            )
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
@@ -1048,29 +1666,34 @@ class Test_VnfdTopic(TestCase):
         )
         self.assertIn(
             norm(
         )
         self.assertIn(
             norm(
-                "Duplicated monitoring-parameter id in "
-                "df[id='{}']:monitoring-parameter[id='{}']".format(
-                    affected_df["id"], duplicated_mp["id"]
+                "Error in pyangbind validation: {} ({})".format(
+                    "json object contained a key that did not exist", "key"
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
-    def test_validate_scaling_group_descriptor_on_valid_descriptor(self):
-        indata = db_vnfd_content
-        self.topic.validate_scaling_group_descriptor(indata)
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_check_input_validation_mgmt_network_virtual_link_protocol_data(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_nsd = self.prepare_nsd_creation()
+        df = test_nsd["df"][0]
+        mgmt_profile = {
+            "id": "id",
+            "virtual-link-desc-id": "mgmt",
+            "virtual-link-protocol-data": {"associated-layer-protocol": "ipv4"},
+        }
+        df["virtual-link-profile"] = [mgmt_profile]
 
 
-    def test_validate_scaling_group_descriptor_when_missing_monitoring_param(self):
-        indata = deepcopy(db_vnfd_content)
-        vdu = indata["vdu"][1]
-        affected_df = indata["df"][0]
-        affected_sa = affected_df["scaling-aspect"][0]
-        affected_sp = affected_sa["scaling-policy"][0]
-        affected_sc = affected_sp["scaling-criteria"][0]
-        vdu.pop("monitoring-parameter")
-        with self.assertRaises(EngineException) as e:
-            self.topic.validate_scaling_group_descriptor(indata)
+        with self.assertRaises(
+            EngineException, msg="Accepted VLD with mgmt-network+ip-profile"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_nsd, {}, {"Content-Type": []}
+            )
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
@@ -1078,59 +1701,62 @@ class Test_VnfdTopic(TestCase):
         )
         self.assertIn(
             norm(
         )
         self.assertIn(
             norm(
-                "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
-                "[name='{}']:scaling-criteria[name='{}']: "
-                "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
-                    affected_df["id"],
-                    affected_sa["id"],
-                    affected_sp["name"],
-                    affected_sc["name"],
-                    affected_sc["vnf-monitoring-param-ref"],
+                "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
+                " You cannot set a virtual-link-protocol-data when mgmt-network is True".format(
+                    df["id"], mgmt_profile["id"]
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
-    def test_validate_scaling_group_descriptor_when_missing_vnf_configuration(self):
-        indata = deepcopy(db_vnfd_content)
-        df = indata["df"][0]
-        affected_sa = df["scaling-aspect"][0]
-        indata["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
-            "day1-2"
-        ].pop()
-        with self.assertRaises(EngineException) as e:
-            self.topic.validate_scaling_group_descriptor(indata)
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_check_descriptor_dependencies_vnfd_id(
+        self, mock_rename, mock_shutil
+    ):
+        did, test_nsd = self.prepare_nsd_creation()
+        self.db.get_list.return_value = []
+
+        with self.assertRaises(
+            EngineException, msg="Accepted wrong VNFD ID reference"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_nsd, {}, {"Content-Type": []}
+            )
         self.assertEqual(
         self.assertEqual(
-            e.exception.http_code,
-            HTTPStatus.UNPROCESSABLE_ENTITY,
-            "Wrong HTTP status code",
+            e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
         )
         self.assertIn(
             norm(
         )
         self.assertIn(
             norm(
-                "'day1-2 configuration' not defined in the descriptor but it is referenced "
-                "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
-                    df["id"], affected_sa["id"]
+                "'vnfd-id'='{}' references a non existing vnfd".format(
+                    test_nsd["vnfd-id"][0]
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
-    def test_validate_scaling_group_descriptor_when_missing_scaling_config_action_primitive(
-        self,
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_nsd_check_descriptor_dependencies_vld_vnfd_connection_point_ref(
+        self, mock_rename, mock_shutil
     ):
     ):
-        indata = deepcopy(db_vnfd_content)
-        df = indata["df"][0]
-        affected_sa = df["scaling-aspect"][0]
-        affected_sca_primitive = affected_sa["scaling-config-action"][0][
-            "vnf-config-primitive-name-ref"
-        ]
-        df["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][0][
-            "config-primitive"
-        ] = []
-        with self.assertRaises(EngineException) as e:
-            self.topic.validate_scaling_group_descriptor(indata)
+        # Check Descriptor Dependencies: "vld[vnfd-connection-point-ref][vnfd-connection-point-ref]
+        did, test_nsd = self.prepare_nsd_creation()
+        vnfd_descriptor = deepcopy(db_vnfd_content)
+        df = test_nsd["df"][0]
+        affected_vnf_profile = df["vnf-profile"][0]
+        affected_virtual_link = affected_vnf_profile["virtual-link-connectivity"][1]
+        affected_cpd = vnfd_descriptor["ext-cpd"].pop()
+        self.db.get_list.return_value = [vnfd_descriptor]
+
+        with self.assertRaises(
+            EngineException, msg="Accepted wrong VLD CP reference"
+        ) as e:
+            self.topic.upload_content(
+                fake_session, did, test_nsd, {}, {"Content-Type": []}
+            )
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
         self.assertEqual(
             e.exception.http_code,
             HTTPStatus.UNPROCESSABLE_ENTITY,
@@ -1138,314 +1764,20 @@ class Test_VnfdTopic(TestCase):
         )
         self.assertIn(
             norm(
         )
         self.assertIn(
             norm(
-                "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
-                "config-primitive-name-ref='{}' does not match any "
-                "day1-2 configuration:config-primitive:name".format(
-                    df["id"], affected_sa["id"], affected_sca_primitive
+                "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+                "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+                "non existing ext-cpd:id inside vnfd '{}'".format(
+                    df["id"],
+                    affected_vnf_profile["id"],
+                    affected_virtual_link["virtual-link-profile-id"],
+                    affected_cpd["id"],
+                    vnfd_descriptor["id"],
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
                 )
             ),
             norm(str(e.exception)),
             "Wrong exception text",
         )
 
-
-class Test_NsdTopic(TestCase):
-    @classmethod
-    def setUpClass(cls):
-        cls.test_name = "test-nsd-topic"
-
-    @classmethod
-    def tearDownClass(cls):
-        pass
-
-    def setUp(self):
-        self.db = Mock(dbbase.DbBase())
-        self.fs = Mock(fsbase.FsBase())
-        self.msg = Mock(msgbase.MsgBase())
-        self.auth = Mock(authconn.Authconn(None, None, None))
-        self.topic = NsdTopic(self.db, self.fs, self.msg, self.auth)
-        self.topic.check_quota = Mock(return_value=None)  # skip quota
-
-    def test_new_nsd(self):
-        did = db_nsd_content["_id"]
-        self.fs.get_params.return_value = {}
-        self.fs.file_exists.return_value = False
-        self.fs.file_open.side_effect = lambda path, mode: open(
-            "/tmp/" + str(uuid4()), "a+b"
-        )
-        test_nsd = deepcopy(db_nsd_content)
-        del test_nsd["_id"]
-        del test_nsd["_admin"]
-        with self.subTest(i=1, t="Normal Creation"):
-            self.db.create.return_value = did
-            rollback = []
-            did2, oid = self.topic.new(rollback, fake_session, {})
-            db_args = self.db.create.call_args[0]
-            msg_args = self.msg.write.call_args[0]
-            self.assertEqual(len(rollback), 1, "Wrong rollback length")
-            self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
-            self.assertEqual(msg_args[1], "created", "Wrong message action")
-            self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
-            self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
-            self.assertEqual(did2, did, "Wrong DB NSD id")
-            self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(
-                db_args[1]["_admin"]["modified"],
-                db_args[1]["_admin"]["created"],
-                "Wrong modification time",
-            )
-            self.assertEqual(
-                db_args[1]["_admin"]["projects_read"],
-                [test_pid],
-                "Wrong read-only project list",
-            )
-            self.assertEqual(
-                db_args[1]["_admin"]["projects_write"],
-                [test_pid],
-                "Wrong read-write project list",
-            )
-            try:
-                self.db.get_one.side_effect = [
-                    {"_id": did, "_admin": db_nsd_content["_admin"]},
-                    None,
-                ]
-                self.db.get_list.return_value = [db_vnfd_content]
-                self.topic.upload_content(
-                    fake_session, did, test_nsd, {}, {"Content-Type": []}
-                )
-                msg_args = self.msg.write.call_args[0]
-                test_nsd["_id"] = did
-                self.assertEqual(
-                    msg_args[0], self.topic.topic_msg, "Wrong message topic"
-                )
-                self.assertEqual(msg_args[1], "edited", "Wrong message action")
-                self.assertEqual(msg_args[2], test_nsd, "Wrong message content")
-                db_args = self.db.get_one.mock_calls[0][1]
-                self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
-                self.assertEqual(db_args[1]["_id"], did, "Wrong DB NSD id")
-                db_args = self.db.replace.call_args[0]
-                self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
-                self.assertEqual(db_args[1], did, "Wrong DB NSD id")
-                admin = db_args[2]["_admin"]
-                db_admin = db_nsd_content["_admin"]
-                self.assertEqual(
-                    admin["created"], db_admin["created"], "Wrong creation time"
-                )
-                self.assertGreater(
-                    admin["modified"], db_admin["created"], "Wrong modification time"
-                )
-                self.assertEqual(
-                    admin["projects_read"],
-                    db_admin["projects_read"],
-                    "Wrong read-only project list",
-                )
-                self.assertEqual(
-                    admin["projects_write"],
-                    db_admin["projects_write"],
-                    "Wrong read-write project list",
-                )
-                self.assertEqual(
-                    admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
-                )
-                self.assertEqual(
-                    admin["operationalState"], "ENABLED", "Wrong operational state"
-                )
-                self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
-                storage = admin["storage"]
-                self.assertEqual(storage["folder"], did, "Wrong storage folder")
-                self.assertEqual(
-                    storage["descriptor"], "package", "Wrong storage descriptor"
-                )
-                compare_desc(self, test_nsd, db_args[2], "NSD")
-            finally:
-                pass
-        self.db.get_one.side_effect = (
-            lambda table, filter, fail_on_empty=None, fail_on_more=None: {
-                "_id": did,
-                "_admin": db_nsd_content["_admin"],
-            }
-        )
-        with self.subTest(i=2, t="Check Pyangbind Validation: required properties"):
-            tmp = test_nsd["id"]
-            del test_nsd["id"]
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted NSD with a missing required property"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_nsd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm("Error in pyangbind validation: '{}'".format("id")),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                test_nsd["id"] = tmp
-        with self.subTest(i=3, t="Check Pyangbind Validation: additional properties"):
-            test_nsd["extra-property"] = 0
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted NSD with an additional property"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_nsd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "Error in pyangbind validation: {} ({})".format(
-                            "json object contained a key that did not exist",
-                            "extra-property",
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                del test_nsd["extra-property"]
-        with self.subTest(i=4, t="Check Pyangbind Validation: property types"):
-            tmp = test_nsd["designer"]
-            test_nsd["designer"] = {"key": 0}
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted NSD with a wrongly typed property"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_nsd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "Error in pyangbind validation: {} ({})".format(
-                            "json object contained a key that did not exist", "key"
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                test_nsd["designer"] = tmp
-        with self.subTest(
-            i=5, t="Check Input Validation: mgmt-network+virtual-link-protocol-data"
-        ):
-            df = test_nsd["df"][0]
-            mgmt_profile = {
-                "id": "id",
-                "virtual-link-desc-id": "mgmt",
-                "virtual-link-protocol-data": {"associated-layer-protocol": "ipv4"},
-            }
-            df["virtual-link-profile"] = [mgmt_profile]
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted VLD with mgmt-network+ip-profile"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_nsd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
-                        " You cannot set a virtual-link-protocol-data when mgmt-network is True".format(
-                            df["id"], mgmt_profile["id"]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                del df["virtual-link-profile"]
-        with self.subTest(i=6, t="Check Descriptor Dependencies: vnfd-id[]"):
-            self.db.get_one.side_effect = [
-                {"_id": did, "_admin": db_nsd_content["_admin"]},
-                None,
-            ]
-            self.db.get_list.return_value = []
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted wrong VNFD ID reference"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_nsd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
-                )
-                self.assertIn(
-                    norm(
-                        "'vnfd-id'='{}' references a non existing vnfd".format(
-                            test_nsd["vnfd-id"][0]
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                pass
-        with self.subTest(
-            i=7,
-            t="Check Descriptor Dependencies: "
-            "vld[vnfd-connection-point-ref][vnfd-connection-point-ref]",
-        ):
-            vnfd_descriptor = deepcopy(db_vnfd_content)
-            df = test_nsd["df"][0]
-            affected_vnf_profile = df["vnf-profile"][0]
-            affected_virtual_link = affected_vnf_profile["virtual-link-connectivity"][1]
-            affected_cpd = vnfd_descriptor["ext-cpd"].pop()
-            self.db.get_one.side_effect = [
-                {"_id": did, "_admin": db_nsd_content["_admin"]},
-                None,
-            ]
-            self.db.get_list.return_value = [vnfd_descriptor]
-            try:
-                with self.assertRaises(
-                    EngineException, msg="Accepted wrong VLD CP reference"
-                ) as e:
-                    self.topic.upload_content(
-                        fake_session, did, test_nsd, {}, {"Content-Type": []}
-                    )
-                self.assertEqual(
-                    e.exception.http_code,
-                    HTTPStatus.UNPROCESSABLE_ENTITY,
-                    "Wrong HTTP status code",
-                )
-                self.assertIn(
-                    norm(
-                        "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
-                        "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
-                        "non existing ext-cpd:id inside vnfd '{}'".format(
-                            df["id"],
-                            affected_vnf_profile["id"],
-                            affected_virtual_link["virtual-link-profile-id"],
-                            affected_cpd["id"],
-                            vnfd_descriptor["id"],
-                        )
-                    ),
-                    norm(str(e.exception)),
-                    "Wrong exception text",
-                )
-            finally:
-                pass
-        return
-
     def test_edit_nsd(self):
         nsd_content = deepcopy(db_nsd_content)
         did = nsd_content["_id"]
     def test_edit_nsd(self):
         nsd_content = deepcopy(db_nsd_content)
         did = nsd_content["_id"]
@@ -1522,6 +1854,7 @@ class Test_NsdTopic(TestCase):
                 norm(str(e.exception)),
                 "Wrong exception text",
             )
                 norm(str(e.exception)),
                 "Wrong exception text",
             )
+        self.db.reset_mock()
         return
 
     def test_delete_nsd(self):
         return
 
     def test_delete_nsd(self):
@@ -1633,8 +1966,98 @@ class Test_NsdTopic(TestCase):
                 "Wrong DB pull_list dictionary",
             )
             self.fs.file_delete.assert_not_called()
                 "Wrong DB pull_list dictionary",
             )
             self.fs.file_delete.assert_not_called()
+        self.db.reset_mock()
         return
 
         return
 
+    def prepare_nsd_validation(self):
+        descriptor_name = "test_ns_descriptor"
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
+        old_nsd, new_nsd = self.create_desc_temp(db_nsd_content)
+        return descriptor_name, old_nsd, new_nsd
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_descriptor_ns_configuration_changed(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating NSD and NSD has changes in ns-configuration:config-primitive"""
+        descriptor_name, old_nsd, new_nsd = self.prepare_nsd_validation()
+        mock_safe_load.side_effect = [old_nsd, new_nsd]
+        mock_detect_usage.return_value = True
+        self.db.get_one.return_value = old_nsd
+        old_nsd.update(
+            {"ns-configuration": {"config-primitive": [{"name": "add-user"}]}}
+        )
+        new_nsd.update(
+            {"ns-configuration": {"config-primitive": [{"name": "del-user"}]}}
+        )
+
+        with self.assertNotRaises(EngineException):
+            self.topic._validate_descriptor_changes(
+                old_nsd["_id"], descriptor_name, "/tmp", "/tmp:1"
+            )
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        self.assertEqual(mock_safe_load.call_count, 2)
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_descriptor_nsd_name_changed(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating NSD, NSD name has changed."""
+        descriptor_name, old_nsd, new_nsd = self.prepare_nsd_validation()
+        did = old_nsd["_id"]
+        new_nsd["name"] = "nscharm-ns2"
+        mock_safe_load.side_effect = [old_nsd, new_nsd]
+        mock_detect_usage.return_value = True
+        self.db.get_one.return_value = old_nsd
+
+        with self.assertRaises(
+            EngineException, msg="there are disallowed changes in the ns descriptor"
+        ) as e:
+            self.topic._validate_descriptor_changes(
+                did, descriptor_name, "/tmp", "/tmp:1"
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm("there are disallowed changes in the ns descriptor"),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        self.assertEqual(mock_safe_load.call_count, 2)
+
+    @patch("osm_nbi.descriptor_topics.detect_descriptor_usage")
+    @patch("osm_nbi.descriptor_topics.yaml.safe_load")
+    def test_validate_descriptor_nsd_name_changed_nsd_not_in_use(
+        self, mock_safe_load, mock_detect_usage
+    ):
+        """Validating NSD, NSD name has changed, NSD is not in use."""
+        descriptor_name, old_nsd, new_nsd = self.prepare_nsd_validation()
+        did = old_nsd["_id"]
+        new_nsd["name"] = "nscharm-ns2"
+        mock_safe_load.side_effect = [old_nsd, new_nsd]
+        mock_detect_usage.return_value = None
+        self.db.get_one.return_value = old_nsd
+
+        with self.assertNotRaises(Exception):
+            self.topic._validate_descriptor_changes(
+                did, descriptor_name, "/tmp", "/tmp:1"
+            )
+
+        self.db.get_one.assert_called_once()
+        mock_detect_usage.assert_called_once()
+        mock_safe_load.assert_not_called()
+
     def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_on_valid_descriptor(
         self,
     ):
     def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_on_valid_descriptor(
         self,
     ):
@@ -1775,6 +2198,61 @@ class Test_NsdTopic(TestCase):
             "Wrong exception text",
         )
 
             "Wrong exception text",
         )
 
+    def test_validate_vnffgd_descriptor_on_valid_descriptor(self):
+        indata = yaml.safe_load(db_sfc_nsds_text)[0]
+        vnffgd = indata.get("vnffgd")
+        fg = vnffgd[0]
+        self.topic.validate_vnffgd_data(fg, indata)
+
+    def test_validate_vnffgd_descriptor_not_matching_nfp_position_element(self):
+        indata = yaml.safe_load(db_sfc_nsds_text)[0]
+        vnffgd = indata.get("vnffgd")
+        fg = vnffgd[0]
+        nfpd = fg.get("nfpd")[0]
+        with self.assertRaises(EngineException) as e:
+            fg.update({"nfp-position-element": [{"id": "test1"}]})
+            self.topic.validate_vnffgd_data(fg, indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
+                "does not match any nfp-position-element".format(nfpd["id"], "test")
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_validate_vnffgd_descriptor_not_matching_constituent_base_element_id(
+        self,
+    ):
+        indata = yaml.safe_load(db_sfc_nsds_text)[0]
+        vnffgd = indata.get("vnffgd")
+        fg = vnffgd[0]
+        fg["nfpd"][0]["position-desc-id"][0]["cp-profile-id"][0][
+            "constituent-profile-elements"
+        ][0]["constituent-base-element-id"] = "error_vnf"
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_vnffgd_data(fg, indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
+                "does not match any constituent-base-element-id".format(
+                    "vnf1", "error_vnf"
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
 
 if __name__ == "__main__":
     unittest.main()
 
 if __name__ == "__main__":
     unittest.main()
index 8efb1f9..b12a330 100644 (file)
 # contact: esousa@whitestack.com or alfonso.tiernosepulveda@telefonica.com
 ##
 
 # contact: esousa@whitestack.com or alfonso.tiernosepulveda@telefonica.com
 ##
 
+from contextlib import contextmanager
 import unittest
 from time import time
 import unittest
 from time import time
-from unittest.mock import Mock, mock_open   # patch, MagicMock
+from unittest.mock import Mock, mock_open  # patch, MagicMock
 from osm_common.dbbase import DbException
 from osm_nbi.engine import EngineException
 from osm_common.dbmemory import DbMemory
 from osm_common.fsbase import FsBase
 from osm_common.msgbase import MsgBase
 from osm_common.dbbase import DbException
 from osm_nbi.engine import EngineException
 from osm_common.dbmemory import DbMemory
 from osm_common.fsbase import FsBase
 from osm_common.msgbase import MsgBase
+from osm_common import dbbase
 from http import HTTPStatus
 from osm_nbi.instance_topics import NsLcmOpTopic, NsrTopic
 from osm_nbi.tests.test_db_descriptors import (
 from http import HTTPStatus
 from osm_nbi.instance_topics import NsLcmOpTopic, NsrTopic
 from osm_nbi.tests.test_db_descriptors import (
@@ -47,15 +49,12 @@ class TestNsLcmOpTopic(unittest.TestCase):
         self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
         self.nslcmop_topic.check_quota = Mock(return_value=None)  # skip quota
 
         self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
         self.nslcmop_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list(
-            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
-        )
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfds_text))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         self.db.create = Mock(return_value="created_id")
         self.db.create = Mock(return_value="created_id")
-        self.db.set_one = Mock(return_value={"updated": 1})
         self.nsd = self.db.get_list("nsds")[0]
         self.nsd_id = self.nsd["_id"]
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsd = self.db.get_list("nsds")[0]
         self.nsd_id = self.nsd["_id"]
         self.nsr = self.db.get_list("nsrs")[0]
@@ -66,6 +65,7 @@ class TestNsLcmOpTopic(unittest.TestCase):
         self.vim_id = self.vim["_id"]
 
     def test_create_instantiate(self):
         self.vim_id = self.vim["_id"]
 
     def test_create_instantiate(self):
+        self.db.set_one = Mock(return_value={"updated": 1})
         session = {
             "force": False,
             "admin": False,
         session = {
             "force": False,
             "admin": False,
@@ -226,6 +226,316 @@ class TestNsLcmOpTopic(unittest.TestCase):
                 "Engine exception bad http_code with {}".format(indata_copy),
             )
 
                 "Engine exception bad http_code with {}".format(indata_copy),
             )
 
+    def test_update_remove_vnf(self):
+        vnfr_id = self.db.get_list("vnfrs")[0]["_id"]
+        session = {}
+        self.db.set_one(
+            "nsrs",
+            {"_id": self.nsr_id},
+            {"_admin.nsState": "INSTANTIATED"},
+        )
+        indata = {
+            "lcmOperationType": "update",
+            "updateType": "REMOVE_VNF",
+            "nsInstanceId": self.nsr_id,
+            "removeVnfInstanceId": vnfr_id,
+        }
+
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsr_project],
+            "method": "write",
+        }
+        rollback = []
+        headers = {}
+
+        nslcmop_id, _ = self.nslcmop_topic.new(
+            rollback, session, indata, kwargs=None, headers=headers
+        )
+
+        self.assertEqual(
+            self.db.create.call_count,
+            1,
+            "database create not called, or called more than once",
+        )
+        _call = self.db.create.call_args_list[0]
+        self.assertEqual(
+            _call[0][0], "nslcmops", "nslcmops entry must be created at database"
+        )
+        created_nslcmop = _call[0][1]
+        self.assertEqual(
+            self.nsr_id,
+            created_nslcmop["nsInstanceId"],
+            "mismatch between nsId '_id' in created nslcmop and database nsr",
+        )
+        self.assertTrue(
+            created_nslcmop["lcmOperationType"] == "update",
+            "Database record must contain 'lcmOperationType=update'",
+        )
+        self.assertTrue(
+            created_nslcmop["operationParams"]["updateType"] == "REMOVE_VNF",
+            "Database record must contain 'updateType=REMOVE_VNF'",
+        )
+
+    def test_migrate(self):
+        _ = self.db.get_list("vnfrs")[0]["_id"]
+        session = {}
+        self.db.set_one(
+            "nsrs",
+            {"_id": self.nsr_id},
+            {"_admin.nsState": "INSTANTIATED"},
+        )
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsr_project],
+            "method": "write",
+        }
+        rollback = []
+        headers = {}
+
+        with self.subTest(i=1, t="Migration for Specific VM"):
+            indata = {
+                "lcmOperationType": "migrate",
+                "nsInstanceId": self.nsr_id,
+                "migrateToHost": "sample02",
+                "vdu": {"vduCountIndex": 0, "vduId": "mgmtVM"},
+                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f",
+            }
+            nslcmop_id, _ = self.nslcmop_topic.new(
+                rollback, session, indata, kwargs=None, headers=headers
+            )
+
+            self.assertEqual(
+                self.db.create.call_count,
+                1,
+                "database create not called, or called more than once",
+            )
+            _call = self.db.create.call_args_list[0]
+            self.assertEqual(
+                _call[0][0], "nslcmops", "nslcmops entry must be created at database"
+            )
+            created_nslcmop = _call[0][1]
+            self.assertEqual(
+                self.nsr_id,
+                created_nslcmop["nsInstanceId"],
+                "mismatch between nsId '_id' in created nslcmop and database nsr",
+            )
+            self.assertTrue(
+                created_nslcmop["lcmOperationType"] == "migrate",
+                "Database record must contain 'lcmOperationType=migrate'",
+            )
+        with self.subTest(i=2, t="Migration of all VDUs in a VNF"):
+            indata = {
+                "lcmOperationType": "migrate",
+                "nsInstanceId": self.nsr_id,
+                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f",
+            }
+            nslcmop_id, _ = self.nslcmop_topic.new(
+                rollback, session, indata, kwargs=None, headers=headers
+            )
+
+            self.assertEqual(
+                self.db.create.call_count,
+                2,
+                "database create not called, or called more than once",
+            )
+            _call = self.db.create.call_args_list[0]
+            self.assertEqual(
+                _call[0][0], "nslcmops", "nslcmops entry must be created at database"
+            )
+            created_nslcmop = _call[0][1]
+            self.assertEqual(
+                self.nsr_id,
+                created_nslcmop["nsInstanceId"],
+                "mismatch between nsId '_id' in created nslcmop and database nsr",
+            )
+            self.assertTrue(
+                created_nslcmop["lcmOperationType"] == "migrate",
+                "Database record must contain 'lcmOperationType=migrate'",
+            )
+        with self.subTest(i=3, t="Migration failure - vduId not provided in vdu "):
+            indata = {
+                "lcmOperationType": "migrate",
+                "nsInstanceId": self.nsr_id,
+                "migrateToHost": "sample02",
+                "vdu": {"vduCountIndex": 0},
+                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f",
+            }
+
+            with self.assertRaises(Exception) as e:
+                nslcmop_id, _ = self.nslcmop_topic.new(
+                    rollback, session, indata, kwargs=None, headers=headers
+                )
+            self.assertTrue(
+                "Format error at 'vdu' ''vduId' is a required property'"
+                in str(e.exception)
+            )
+
+
+class TestNsLcmOpTopicWithMock(unittest.TestCase):
+    def setUp(self):
+        self.db = Mock(dbbase.DbBase())
+        self.fs = Mock(FsBase())
+        self.fs.get_params.return_value = {"./fake/folder"}
+        self.fs.file_open = mock_open()
+        self.msg = Mock(MsgBase())
+        # create class
+        self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
+
+    def test_get_vnfd_from_vnf_member_revision(self):
+        test_vnfr = yaml.safe_load(db_vnfrs_text)[0]
+        test_vnfd = yaml.safe_load(db_vnfds_text)
+        self.db.get_one.side_effect = [test_vnfr, test_vnfd]
+        _ = self.nslcmop_topic._get_vnfd_from_vnf_member_index("1", test_vnfr["_id"])
+        self.assertEqual(
+            self.db.get_one.call_args_list[0][0][0],
+            "vnfrs",
+            "Incorrect first DB lookup",
+        )
+        self.assertEqual(
+            self.db.get_one.call_args_list[1][0][0],
+            "vnfds",
+            "Incorrect second DB lookup",
+        )
+
+    def test_get_vnfd_from_vnf_member_no_revision(self):
+        test_vnfr = yaml.safe_load(db_vnfrs_text)[0]
+        test_vnfr["revision"] = 3
+        test_vnfd = yaml.safe_load(db_vnfds_text)
+        self.db.get_one.side_effect = [test_vnfr, test_vnfd]
+        _ = self.nslcmop_topic._get_vnfd_from_vnf_member_index("1", test_vnfr["_id"])
+        self.assertEqual(
+            self.db.get_one.call_args_list[0][0][0],
+            "vnfrs",
+            "Incorrect first DB lookup",
+        )
+        self.assertEqual(
+            self.db.get_one.call_args_list[1][0][0],
+            "vnfds_revisions",
+            "Incorrect second DB lookup",
+        )
+
+    @contextmanager
+    def assertNotRaises(self, exception_type):
+        try:
+            yield None
+        except exception_type:
+            raise self.failureException("{} raised".format(exception_type.__name__))
+
+    def test_check_ns_update_operation(self):
+        self.db = DbMemory()
+        self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
+        session = {}
+
+        with self.subTest(i=1, t="VNF instance does not belong to NS"):
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
+            test_vnfr[0]["revision"] = 2
+            test_nsr = yaml.safe_load(db_nsrs_text)
+            test_nsr[0]["constituent-vnfr-ref"][
+                0
+            ] = "99d90b0c-faff-4b9f-bccd-017f33985984"
+            self.db.create_list("vnfrs", test_vnfr)
+            self.db.create_list("nsrs", test_nsr)
+            nsrs = self.db.get_list("nsrs")[0]
+            indata = {
+                "updateType": "CHANGE_VNFPKG",
+                "changeVnfPackageData": {
+                    "vnfInstanceId": "88d90b0c-faff-4b9f-bccd-017f33985984",
+                    "vnfdId": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77",
+                },
+                "nsInstanceId": "f48163a6-c807-47bc-9682-f72caef5af85",
+            }
+            with self.assertRaises(EngineException) as expected_exception:
+                self.nslcmop_topic._check_ns_operation(session, nsrs, "update", indata)
+            self.assertEqual(
+                str(expected_exception.exception),
+                "Error in validating ns-update request: vnf 88d90b0c-faff-4b9f-bccd-017f33985984"
+                " does not belong to NS f48163a6-c807-47bc-9682-f72caef5af85",
+            )
+
+        with self.subTest(i=2, t="Ns update request validated with no exception"):
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
+            test_vnfr[0]["revision"] = 2
+            test_nsr = yaml.safe_load(db_nsrs_text)
+            self.db.create_list("vnfrs", test_vnfr)
+            self.db.create_list("nsrs", test_nsr)
+            nsrs = self.db.get_list("nsrs")[1]
+            indata = {
+                "updateType": "CHANGE_VNFPKG",
+                "changeVnfPackageData": {
+                    "vnfInstanceId": "88d90b0c-faff-4b9f-bccd-017f33985984",
+                    "vnfdId": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77",
+                },
+                "nsInstanceId": "f48163a6-c807-47bc-9682-f72caef5af85",
+            }
+            with self.assertNotRaises(EngineException):
+                self.nslcmop_topic._check_ns_operation(session, nsrs, "update", indata)
+
+        with self.subTest(
+            i=3, t="Ns update request rejected because of too small timeout"
+        ):
+            indata = {
+                "updateType": "CHANGE_VNFPKG",
+                "changeVnfPackageData": {
+                    "vnfInstanceId": "88d90b0c-faff-4b9f-bccd-017f33985984",
+                    "vnfdId": "7637bcf8-cf14-42dc-ad70-c66fcf1e6e77",
+                },
+                "nsInstanceId": "f48163a6-c807-47bc-9682-f72caef5af85",
+                "timeout_ns_update": 50,
+            }
+            with self.assertRaises(EngineException) as expected_exception:
+                self.nslcmop_topic._check_ns_operation(session, nsrs, "update", indata)
+            self.assertEqual(
+                str(expected_exception.exception),
+                "Error in validating ns-update request: 50 second is not enough "
+                "to upgrade the VNF instance: 88d90b0c-faff-4b9f-bccd-017f33985984",
+            )
+
+        with self.subTest(i=4, t="wrong vnfdid is given as an update parameter"):
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
+            test_vnfr[0]["revision"] = 2
+            test_nsr = yaml.safe_load(db_nsrs_text)
+            self.db.create_list("vnfrs", test_vnfr)
+            self.db.create_list("nsrs", test_nsr)
+            nsrs = self.db.get_list("nsrs")[2]
+            indata = {
+                "updateType": "CHANGE_VNFPKG",
+                "changeVnfPackageData": {
+                    "vnfInstanceId": "88d90b0c-faff-4b9f-bccd-017f33985984",
+                    "vnfdId": "9637bcf8-cf14-42dc-ad70-c66fcf1e6e77",
+                },
+                "nsInstanceId": "f48163a6-c807-47bc-9682-f72caef5af85",
+            }
+            with self.assertRaises(EngineException) as expected_exception:
+                self.nslcmop_topic._check_ns_operation(session, nsrs, "update", indata)
+            self.assertEqual(
+                str(expected_exception.exception),
+                "Error in validating ns-update request: vnfd-id 9637bcf8-cf14-42dc-ad70-c66fcf1e6e77 does not "
+                "match with the vnfd-id: 7637bcf8-cf14-42dc-ad70-c66fcf1e6e77 of "
+                "VNF instance: 88d90b0c-faff-4b9f-bccd-017f33985984",
+            )
+
+        with self.subTest(
+            i=5, t="Ns update REMOVE_VNF request validated with no exception"
+        ):
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
+            test_vnfr[0]["revision"] = 2
+            test_nsr = yaml.safe_load(db_nsrs_text)
+            self.db.create_list("vnfrs", test_vnfr)
+            self.db.create_list("nsrs", test_nsr)
+            nsrs = self.db.get_list("nsrs")[1]
+            indata = {
+                "updateType": "REMOVE_VNF",
+                "removeVnfInstanceId": "88d90b0c-faff-4b9f-bccd-017f33985984",
+                "nsInstanceId": "f48163a6-c807-47bc-9682-f72caef5af85",
+            }
+            with self.assertNotRaises(EngineException):
+                self.nslcmop_topic._check_ns_operation(session, nsrs, "update", indata)
+
 
 class TestNsrTopic(unittest.TestCase):
     def setUp(self):
 
 class TestNsrTopic(unittest.TestCase):
     def setUp(self):
@@ -238,11 +548,9 @@ class TestNsrTopic(unittest.TestCase):
         self.nsr_topic = NsrTopic(self.db, self.fs, self.msg, None)
         self.nsr_topic.check_quota = Mock(return_value=None)  # skip quota
 
         self.nsr_topic = NsrTopic(self.db, self.fs, self.msg, None)
         self.nsr_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list(
-            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
-        )
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfds_text))
         self.db.create = Mock(return_value="created_id")
         self.nsd = self.db.get_list("nsds")[0]
         self.nsd_id = self.nsd["_id"]
         self.db.create = Mock(return_value="created_id")
         self.nsd = self.db.get_list("nsds")[0]
         self.nsd_id = self.nsd["_id"]
@@ -334,6 +642,19 @@ class TestNsrTopic(unittest.TestCase):
         self.assertEqual(
             len(created_vnfrs), 2, "created a mismatch number of vnfr at database"
         )
         self.assertEqual(
             len(created_vnfrs), 2, "created a mismatch number of vnfr at database"
         )
+
+        self.assertEqual(
+            created_vnfrs[0]["vdur"][0]["interfaces"][0]["position"],
+            1,
+            "vdur first interface position does not match",
+        )
+
+        self.assertEqual(
+            created_vnfrs[0]["vdur"][0]["interfaces"][1]["position"],
+            2,
+            "vdur second interface position does not match",
+        )
+
         self.assertEqual(
             len(created_nsrs), 1, "Only one nsrs must be created at database"
         )
         self.assertEqual(
             len(created_nsrs), 1, "Only one nsrs must be created at database"
         )
@@ -376,57 +697,80 @@ class TestNsrTopic(unittest.TestCase):
                 self.assertTrue(e.exception.http_code == expect_code)
             if expect_text_list:
                 for expect_text in expect_text_list:
                 self.assertTrue(e.exception.http_code == expect_code)
             if expect_text_list:
                 for expect_text in expect_text_list:
-                    self.assertIn(expect_text, str(e.exception).lower(),
-                                  "Expected '{}' at exception text".format(expect_text))
+                    self.assertIn(
+                        expect_text,
+                        str(e.exception).lower(),
+                        "Expected '{}' at exception text".format(expect_text),
+                    )
 
     def test_show_instance(self):
 
     def test_show_instance(self):
-        session = {"force": False, "admin": False, "public": False, "project_id": [self.nsd_project], "method": "write"}
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsd_project],
+            "method": "write",
+        }
         filter_q = {}
         for refresh_status in ("true", "false"):
         filter_q = {}
         for refresh_status in ("true", "false"):
-            self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+            self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
             actual_nsr = self.db.get_list("nsrs")[0]
             nsr_id = actual_nsr["_id"]
             actual_nsr = self.db.get_list("nsrs")[0]
             nsr_id = actual_nsr["_id"]
-            filter_q['vcaStatus-refresh'] = refresh_status
+            filter_q["vcaStatus-refresh"] = refresh_status
             expected_nsr = self.nsr_topic.show(session, nsr_id, filter_q=filter_q)
             self.nsr_topic.delete(session, nsr_id)
             actual_nsr.pop("_admin")
             expected_nsr.pop("_admin")
             expected_nsr = self.nsr_topic.show(session, nsr_id, filter_q=filter_q)
             self.nsr_topic.delete(session, nsr_id)
             actual_nsr.pop("_admin")
             expected_nsr.pop("_admin")
-            self.assertEqual(expected_nsr, actual_nsr, "Database nsr and show() nsr do not match.")
+            self.assertEqual(
+                expected_nsr, actual_nsr, "Database nsr and show() nsr do not match."
+            )
 
     def test_vca_status_refresh(self):
 
     def test_vca_status_refresh(self):
-        session = {"force": False, "admin": False, "public": False, "project_id": [self.nsd_project], "method": "write"}
-        filter_q = {'vcaStatus-refresh': 'true'}
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsd_project],
+            "method": "write",
+        }
+        filter_q = {"vcaStatus-refresh": "true"}
         time_delta = 120
         time_delta = 120
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         nsr = self.db.get_list("nsrs")[0]
 
         # When vcaStatus-refresh is true
         nsr = self.db.get_list("nsrs")[0]
 
         # When vcaStatus-refresh is true
-        filter_q['vcaStatus-refresh'] = "true"
+        filter_q["vcaStatus-refresh"] = "true"
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[0]
         self.assertEqual(msg_args[1], "vca_status_refresh", "Wrong message action")
         self.assertGreater(nsr["_admin"]["modified"], time() - time_delta)
 
         # When vcaStatus-refresh is false but modified time is within threshold
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[0]
         self.assertEqual(msg_args[1], "vca_status_refresh", "Wrong message action")
         self.assertGreater(nsr["_admin"]["modified"], time() - time_delta)
 
         # When vcaStatus-refresh is false but modified time is within threshold
-        filter_q['vcaStatus-refresh'] = "false"
+        filter_q["vcaStatus-refresh"] = "false"
         time_now = time()
         nsr["_admin"]["modified"] = time_now
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[1]
         self.assertEqual(msg_args, {}, "Message should not be sent.")
         time_now = time()
         nsr["_admin"]["modified"] = time_now
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[1]
         self.assertEqual(msg_args, {}, "Message should not be sent.")
-        self.assertEqual(nsr["_admin"]["modified"], time_now, "Modified time should not be changed.")
+        self.assertEqual(
+            nsr["_admin"]["modified"], time_now, "Modified time should not be changed."
+        )
 
         # When vcaStatus-refresh is false but modified time is less than threshold
 
         # When vcaStatus-refresh is false but modified time is less than threshold
-        filter_q['vcaStatus-refresh'] = "false"
-        nsr["_admin"]["modified"] = time() - (2*time_delta)
+        filter_q["vcaStatus-refresh"] = "false"
+        nsr["_admin"]["modified"] = time() - (2 * time_delta)
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[0]
         self.assertEqual(msg_args[1], "vca_status_refresh", "Wrong message action")
         self.nsr_topic.delete(session, nsr["_id"])
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[0]
         self.assertEqual(msg_args[1], "vca_status_refresh", "Wrong message action")
         self.nsr_topic.delete(session, nsr["_id"])
-        self.assertGreater(nsr["_admin"]["modified"], time() - time_delta, "Modified time is not changed.")
+        self.assertGreater(
+            nsr["_admin"]["modified"],
+            time() - time_delta,
+            "Modified time is not changed.",
+        )
 
     def test_delete_ns(self):
 
     def test_delete_ns(self):
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsr_id = self.nsr["_id"]
         self.db_set_one = self.db.set_one
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsr_id = self.nsr["_id"]
         self.db_set_one = self.db.set_one
diff --git a/osm_nbi/tests/test_osm_vnfm.py b/osm_nbi/tests/test_osm_vnfm.py
new file mode 100644 (file)
index 0000000..f4e6e63
--- /dev/null
@@ -0,0 +1,220 @@
+# Copyright 2021 Selvi Jayaraman (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = "Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
+
+import unittest
+from uuid import uuid4
+from unittest.mock import Mock, patch, mock_open
+from osm_common.dbmemory import DbMemory
+from osm_common.fsbase import FsBase
+from osm_common.msgbase import MsgBase
+from osm_nbi.vnf_instance_topics import VnfInstances, VnfLcmOpTopic
+from osm_nbi.instance_topics import NsrTopic
+from osm_nbi.tests.test_db_descriptors import (
+    db_vim_accounts_text,
+    db_vnfm_vnfd_text,
+    db_nsds_text,
+    db_nsrs_text,
+    db_vnfrs_text,
+    db_nslcmops_text,
+)
+import yaml
+
+
+class TestVnfInstances(unittest.TestCase):
+    def setUp(self):
+        self.db = DbMemory()
+        self.fs = Mock(FsBase())
+        self.msg = Mock(MsgBase())
+        self.vnfinstances = VnfInstances(self.db, self.fs, self.msg, None)
+        self.nsrtopic = NsrTopic(self.db, self.fs, self.msg, None)
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfm_vnfd_text))
+        self.vnfd = self.db.get_list("vnfds")[0]
+        self.vnfd_id = self.vnfd["id"]
+        self.vnfd_project = self.vnfd["_admin"]["projects_read"][0]
+
+        self.vim = self.db.get_list("vim_accounts")[0]
+        self.vim_id = self.vim["_id"]
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_create_identifier(self, mock_rename, mock_shutil):
+        session = {
+            "force": True,
+            "admin": False,
+            "public": False,
+            "project_id": [self.vnfd_project],
+            "method": "write",
+        }
+        indata = {
+            "vnfdId": self.vnfd_id,
+            "vnfInstanceName": "vnf_instance_name",
+            "vnfInstanceDescription": "vnf instance description",
+            "vimAccountId": self.vim_id,
+            "additionalParams": {
+                "virtual-link-desc": [{"id": "mgmt-net", "mgmt-network": True}],
+                "constituent-cpd-id": "vnf-cp0-ext",
+                "virtual-link-profile-id": "mgmt-net",
+            },
+        }
+        rollback = []
+        self.fs.path = ""
+        self.fs.get_params.return_value = {}
+        self.fs.file_exists.return_value = False
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
+
+        vnfr_id, _ = self.vnfinstances.new(
+            rollback, session, indata, {}, headers={"Content-Type": []}
+        )
+        vnfr = self.db.get_one("vnfrs")
+        self.assertEqual(
+            vnfr_id, vnfr["id"], "Mismatch between return id and database id"
+        )
+        self.assertEqual(
+            "NOT_INSTANTIATED",
+            vnfr["_admin"]["nsState"],
+            "Database record must contain 'nsState' NOT_INSTANTIATED",
+        )
+        self.assertEqual(
+            self.vnfd_id,
+            vnfr["vnfd-ref"],
+            "vnfr record is not properly created for the given vnfd",
+        )
+
+    def test_show_vnfinstance(self):
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.vnfd_project],
+            "method": "write",
+        }
+        filter_q = {}
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        actual_vnfr = self.db.get_list("vnfrs")[0]
+        id = actual_vnfr["_id"]
+        expected_vnfr = self.vnfinstances.show(session, id, filter_q)
+        self.assertEqual(
+            actual_vnfr["_id"],
+            expected_vnfr["_id"],
+            "Mismatch between return vnfr Id and database vnfr Id",
+        )
+
+    def test_delete_vnfinstance(self):
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.vnfd_project],
+            "method": "delete",
+        }
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+
+        self.vnfr = self.db.get_list("vnfrs")[0]
+        self.vnfr_id = self.vnfr["_id"]
+        self.db.set_one = self.db.set_one
+        self.db.set_one = Mock()
+
+        self.vnfinstances.delete(session, self.vnfr_id)
+        msg_args = self.msg.write.call_args[0]
+        self.assertEqual(msg_args[1], "deleted", "Wrong message action")
+
+
+class TestVnfLcmOpTopic(unittest.TestCase):
+    def setUp(self):
+        self.db = DbMemory()
+        self.fs = Mock(FsBase())
+        self.fs.get_params.return_value = {"./fake/folder"}
+        self.fs.file_open = mock_open()
+        self.msg = Mock(MsgBase())
+
+        self.vnflcmop_topic = VnfLcmOpTopic(self.db, self.fs, self.msg, None)
+        self.vnflcmop_topic.check_quota = Mock(return_value=None)  # skip quota
+
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfm_vnfd_text))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
+
+        self.vnfd = self.db.get_list("vnfds")[0]
+        self.vnfd_id = self.vnfd["_id"]
+        self.vnfr = self.db.get_list("vnfrs")[0]
+        self.vnfr_id = self.vnfr["_id"]
+
+        self.vnfd_project = self.vnfd["_admin"]["projects_read"][0]
+
+        self.vim = self.db.get_list("vim_accounts")[0]
+        self.vim_id = self.vim["_id"]
+
+    def test_create_vnf_instantiate(self):
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.vnfd_project],
+            "method": "write",
+        }
+        indata = {
+            "vnfInstanceId": self.vnfr_id,
+            "lcmOperationType": "instantiate",
+            "vnfName": "vnf_instance_name",
+            "vnfDescription": "vnf instance description",
+            "vnfId": self.vnfd_id,
+            "vimAccountId": self.vim_id,
+        }
+        rollback = []
+        headers = {}
+        vnflcmop_id, _ = self.vnflcmop_topic.new(
+            rollback, session, indata, kwargs=None, headers=headers
+        )
+        vnflcmop_info = self.db.get_one("nslcmops")
+        self.assertEqual(
+            vnflcmop_id,
+            vnflcmop_info["_id"],
+            "Mismatch between return id and database '_id'",
+        )
+        self.assertTrue(
+            vnflcmop_info["lcmOperationType"] == "instantiate",
+            "Database record must contain 'lcmOperationType=instantiate'",
+        )
+
+    def test_show_vnflmcop(self):
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.vnfd_project],
+            "method": "write",
+        }
+        self.db.create_list("nslcmops", yaml.safe_load(db_nslcmops_text))
+        filter_q = {}
+        actual_lcmop = self.db.get_list("nslcmops")[0]
+        id = actual_lcmop["_id"]
+        vnfr = self.db.get_list("vnfrs")[0]
+        vnfr_id = vnfr["_id"]
+        vnflcmop = self.vnflcmop_topic.show(session, id, filter_q)
+        _id = vnflcmop["vnfInstanceId"]
+        self.assertEqual(
+            _id,
+            vnfr_id,
+            "Mismatch between vnflcmop's vnfInstanceId and database vnfr's id",
+        )
index 91e5641..d77b79f 100644 (file)
 __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "2019-11-20"
 
 __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "2019-11-20"
 
+
+# Exploit exists in the key kdu.helm-chart
+vnfd_exploit_text = """
+  _id: 00000000-0000-0000-0000-000000000000
+  id: n2vc-rce_vnfd
+  df:
+  - id: default-df
+  kdu:
+  - name: exploit
+    helm-chart: "local/exploit --post-renderer /bin/bash"
+    helm-version: v3
+"""
+
+# Exploit in kdu.helm-chart is fixed
+vnfd_exploit_fixed_text = """
+  id: n2vc-rce_vnfd
+  df:
+  - id: default-df
+  kdu:
+  - name: exploit
+    helm-chart: "local/exploit"
+    helm-version: v3
+"""
+
 db_vnfds_text = """
 ---
 -   _admin:
 db_vnfds_text = """
 ---
 -   _admin:
@@ -49,7 +73,7 @@ db_vnfds_text = """
     product-name: hackfest3charmed-vnf
     version: '1.0'
     mgmt-cp: vnf-mgmt-ext
     product-name: hackfest3charmed-vnf
     version: '1.0'
     mgmt-cp: vnf-mgmt-ext
-  
+
     virtual-compute-desc:
       - id: mgmt-compute
         virtual-cpu:
     virtual-compute-desc:
       - id: mgmt-compute
         virtual-cpu:
@@ -61,17 +85,17 @@ db_vnfds_text = """
           num-virtual-cpu: 2
         virtual-memory:
           size: '2'
           num-virtual-cpu: 2
         virtual-memory:
           size: '2'
-  
+
     virtual-storage-desc:
       - id: mgmt-storage
         size-of-storage: '20'
       - id: data-storage
         size-of-storage: '20'
     virtual-storage-desc:
       - id: mgmt-storage
         size-of-storage: '20'
       - id: data-storage
         size-of-storage: '20'
-  
+
     sw-image-desc:
       - id: hackfest3-mgmt
         name: hackfest3-mgmt
     sw-image-desc:
       - id: hackfest3-mgmt
         name: hackfest3-mgmt
-  
+
     vdu:
       - id: mgmtVM
         name: mgmtVM
     vdu:
       - id: mgmtVM
         name: mgmtVM
@@ -118,10 +142,10 @@ db_vnfds_text = """
           - id: dataVM_cpu_util
             name: dataVM_cpu_util
             performance-metric: cpu_utilization
           - id: dataVM_cpu_util
             name: dataVM_cpu_util
             performance-metric: cpu_utilization
-  
+
     int-virtual-link-desc:
       - id: internal
     int-virtual-link-desc:
       - id: internal
-  
+
     ext-cpd:
       - id: vnf-mgmt-ext
         int-cpd: # Connection to int-cpd
     ext-cpd:
       - id: vnf-mgmt-ext
         int-cpd: # Connection to int-cpd
@@ -131,7 +155,7 @@ db_vnfds_text = """
         int-cpd: # Connection to int-cpd
           vdu-id: dataVM
           cpd: vnf-data
         int-cpd: # Connection to int-cpd
           vdu-id: dataVM
           cpd: vnf-data
-  
+
     df:
       - id: hackfest_default
         vdu-profile:
     df:
       - id: hackfest_default
         vdu-profile:
@@ -271,3 +295,77 @@ db_nsds_text = """
                   - constituent-base-element-id: hackfest_vnf2
                     constituent-cpd-id: vnf-data-ext
 """
                   - constituent-base-element-id: hackfest_vnf2
                     constituent-cpd-id: vnf-data-ext
 """
+
+db_sfc_nsds_text = """
+- _admin:
+    userDefinedData: {}
+    revision: 1
+    created: 1683713524.2696395
+    modified: 1683713524.3553684
+    projects_read:
+      - 93601899-b310-4a56-a765-91539d5f675d
+    projects_write:
+      - 93601899-b310-4a56-a765-91539d5f675d
+    onboardingState: ONBOARDED
+    operationalState: ENABLED
+    usageState: NOT_IN_USE
+    storage:
+      fs: mongo
+      path: /app/storage/
+      folder: '2eb45633-03e3-4909-a87d-a564f5943948:1'
+      pkg-dir: cirros_vnffg_ns
+      descriptor: cirros_vnffg_ns/cirros_vnffg_nsd.yaml
+      zipfile: package.tar.gz
+  _id: 2eb45633-03e3-4909-a87d-a564f5943948
+  id: cirros_vnffg-ns
+  designer: OSM
+  version: '1.0'
+  name: cirros_vnffg-ns
+
+  vnfd-id:
+    - cirros_vnffg-vnf
+
+  virtual-link-desc:
+    - id: osm-ext
+      mgmt-network: true
+
+  vnffgd:
+    - id: vnffg1
+      vnf-profile-id:
+        - Mid-vnf1
+      nfpd:
+        - id: forwardingpath1
+          position-desc-id:
+            - id: position1
+              cp-profile-id:
+                - id: cpprofile2
+                  constituent-profile-elements:
+                    - id: vnf1
+                      order: 0
+                      constituent-base-element-id: Mid-vnf1
+                      ingress-constituent-cpd-id: vnf-cp0-ext
+                      egress-constituent-cpd-id: vnf-cp0-ext
+              match-attributes:
+                - id: rule1_80
+                  ip-proto: 6
+                  source-ip-address: 20.20.1.2
+                  destination-ip-address: 20.20.3.5
+                  source-port: 0
+                  destination-port: 80
+              nfp-position-element-id:
+                - test
+      nfp-position-element:
+        - id: test
+
+  df:
+    - id: default-df
+      vnf-profile:
+        - id: '1'
+          virtual-link-connectivity:
+            - constituent-cpd-id:
+                - constituent-base-element-id: '1'
+                  constituent-cpd-id: eth0-ext
+              virtual-link-profile-id: osm-ext
+          vnfd-id: cirros_vnffg-vnf
+  description: Simple NS example with vnffgd
+"""
index 231818b..e5605c3 100644 (file)
@@ -43,10 +43,10 @@ class PmJobsTopicTest(asynctest.TestCase):
     def setUp(self):
         self.db = DbMemory()
         self.pmjobs_topic = PmJobsTopic(self.db, host="prometheus", port=9091)
     def setUp(self):
         self.db = DbMemory()
         self.pmjobs_topic = PmJobsTopic(self.db, host="prometheus", port=9091)
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfds_text))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsr_id = self.nsr["_id"]
         project_id = self.nsr["_admin"]["projects_write"]
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsr_id = self.nsr["_id"]
         project_id = self.nsr["_admin"]["projects_write"]
@@ -80,18 +80,18 @@ class PmJobsTopicTest(asynctest.TestCase):
         for metric in metric_list:
             endpoint = re.sub(r"metric_name", metric, site)
             if metric == "cpu_utilization":
         for metric in metric_list:
             endpoint = re.sub(r"metric_name", metric, site)
             if metric == "cpu_utilization":
-                response = yaml.load(cpu_utilization, Loader=yaml.Loader)
+                response = yaml.safe_load(cpu_utilization)
             elif metric == "users":
             elif metric == "users":
-                response = yaml.load(users, Loader=yaml.Loader)
+                response = yaml.safe_load(users)
             elif metric == "load":
             elif metric == "load":
-                response = yaml.load(load, Loader=yaml.Loader)
+                response = yaml.safe_load(load)
             else:
             else:
-                response = yaml.load(empty, Loader=yaml.Loader)
+                response = yaml.safe_load(empty)
             mock_res.get(endpoint, payload=response)
 
     async def test_prom_metric_request(self):
         with self.subTest("Test case1 failed in test_prom"):
             mock_res.get(endpoint, payload=response)
 
     async def test_prom_metric_request(self):
         with self.subTest("Test case1 failed in test_prom"):
-            prom_response = yaml.load(prom_res, Loader=yaml.Loader)
+            prom_response = yaml.safe_load(prom_res)
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = await self.pmjobs_topic._prom_metric_request(
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = await self.pmjobs_topic._prom_metric_request(
@@ -109,7 +109,7 @@ class PmJobsTopicTest(asynctest.TestCase):
 
     def test_show(self):
         with self.subTest("Test case1 failed in test_show"):
 
     def test_show(self):
         with self.subTest("Test case1 failed in test_show"):
-            show_response = yaml.load(show_res, Loader=yaml.Loader)
+            show_response = yaml.safe_load(show_res)
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = self.pmjobs_topic.show(self.session, self.nsr_id)
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = self.pmjobs_topic.show(self.session, self.nsr_id)
diff --git a/osm_nbi/tests/upload.py b/osm_nbi/tests/upload.py
deleted file mode 100755 (executable)
index dfd7302..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-#! /usr/bin/python3
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import getopt
-import sys
-import requests
-from os.path import getsize, basename
-from hashlib import md5
-
-__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
-__date__ = "$2018-01-01$"
-__version__ = "0.1"
-version_date = "Jan 2018"
-
-
-def usage():
-    print("Usage: ", sys.argv[0], "[options]")
-    print("      --version: prints current version")
-    print("      -f|--file FILE: file to be sent")
-    print("      -h|--help: shows this help")
-    print("      -u|--url URL: complete server URL")
-    print("      -s|--chunk-size SIZE: size of chunks, by default 1000")
-    print("      -t|--token TOKEN: Authorizaton token, previously obtained from server")
-    print("      -v|--verbose print debug information, can be used several times")
-    return
-
-
-if __name__ == "__main__":
-    try:
-        # load parameters and configuration
-        opts, args = getopt.getopt(
-            sys.argv[1:],
-            "hvu:s:f:t:",
-            ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="],
-        )
-        url = None
-        chunk_size = 500
-        pkg_file = None
-        verbose = 0
-        token = None
-
-        for o, a in opts:
-            if o == "--version":
-                print("upload version " + __version__ + " " + version_date)
-                sys.exit()
-            elif o in ("-v", "--verbose"):
-                verbose += 1
-            elif o in ("-h", "--help"):
-                usage()
-                sys.exit()
-            elif o in ("-u", "--url"):
-                url = a
-            elif o in ("-s", "--chunk-size"):
-                chunk_size = int(a)
-            elif o in ("-f", "--file"):
-                pkg_file = a
-            elif o in ("-t", "--token"):
-                token = a
-            else:
-                assert False, "Unhandled option"
-        total_size = getsize(pkg_file)
-        index = 0
-        transaction_id = None
-        file_md5 = md5()
-        with open(pkg_file, "rb") as f:
-            headers = {
-                "Content-type": "application/gzip",
-                "Content-Filename": basename(pkg_file),
-                "Accept": "application/json",
-            }
-            if token:
-                headers["Authorization"] = token
-            while index < total_size:
-                chunk_data = f.read(chunk_size)
-                file_md5.update(chunk_data)
-                # payload = {"file_name": pkg_file, "chunk_data": base64.b64encode(chunk_data).decode("utf-8"),
-                #            "chunk_size": chunk_size}
-                if transaction_id:
-                    headers["Transaction-Id"] = transaction_id
-                if index + len(chunk_data) == total_size:
-                    headers["Content-File-MD5"] = file_md5.hexdigest()
-                #    payload["id"] = transaction_id
-                headers["Content-range"] = "bytes {}-{}/{}".format(
-                    index, index + len(chunk_data) - 1, total_size
-                )
-                # refers to rfc2616:  https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
-                if verbose:
-                    print("TX chunk Headers: {}".format(headers))
-                r = requests.post(url, data=chunk_data, headers=headers, verify=False)
-                if r.status_code not in (200, 201):
-                    print("Got {}: {}".format(r.status_code, r.text))
-                    exit(1)
-                if verbose > 1:
-                    print("RX {}: {}".format(r.status_code, r.text))
-                response = r.json()
-                if not transaction_id:
-                    transaction_id = response["id"]
-                index += len(chunk_data)
-            if verbose <= 1:
-                print("RX {}: {}".format(r.status_code, r.text))
-            if "id" in response:
-                print("---\nid: {}".format(response["id"]))
-    except Exception:
-        raise
index 73fc40f..9b48ee8 100644 (file)
@@ -21,6 +21,8 @@
 # For those usages not covered by the Apache License, Version 2.0 please
 # contact: fbravo@whitestack.com or agarcia@whitestack.com
 ##
 # For those usages not covered by the Apache License, Version 2.0 please
 # contact: fbravo@whitestack.com or agarcia@whitestack.com
 ##
+from cefevent import CEFEvent
+from osm_nbi import version
 
 
 def find_in_list(the_list, condition_lambda):
 
 
 def find_in_list(the_list, condition_lambda):
@@ -64,3 +66,29 @@ def deep_update_dict(data, updated_data):
         return data
 
     return data
         return data
 
     return data
+
+
+def cef_event(cef_logger, cef_fields):
+    for key, value in cef_fields.items():
+        cef_logger.set_field(key, value)
+
+
+def cef_event_builder(config):
+    cef_logger = CEFEvent()
+    cef_fields = {
+        "version": config["version"],
+        "deviceVendor": config["deviceVendor"],
+        "deviceProduct": config["deviceProduct"],
+        "deviceVersion": get_version(),
+        "message": "CEF Logger",
+        "sourceUserName": "admin",
+        "severity": 1,
+    }
+    cef_event(cef_logger, cef_fields)
+    cef_logger.build_cef()
+    return cef_logger
+
+
+def get_version():
+    osm_version = version.split("+")
+    return osm_version[0]
index c5f3ef2..620272f 100644 (file)
@@ -35,6 +35,10 @@ shortname_schema = {
     "pattern": "^[^,;()\\.\\$'\"]+$",
 }
 passwd_schema = {"type": "string", "minLength": 1, "maxLength": 60}
     "pattern": "^[^,;()\\.\\$'\"]+$",
 }
 passwd_schema = {"type": "string", "minLength": 1, "maxLength": 60}
+user_passwd_schema = {
+    "type": "string",
+    "pattern": "^.*(?=.{8,})((?=.*[!@#$%^&*()\\-_=+{};:,<.>]){1})(?=.*\\d)((?=.*[a-z]){1})((?=.*[A-Z]){1}).*$",
+}
 name_schema = {
     "type": "string",
     "minLength": 1,
 name_schema = {
     "type": "string",
     "minLength": 1,
@@ -94,6 +98,10 @@ ip_schema = {
     "type": "string",
     "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
 }
     "type": "string",
     "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
 }
+ipv6_schema = {
+    "type": "string",
+    "pattern": "(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))",  # noqa: W605
+}
 ip_prefix_schema = {
     "type": "string",
     "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}"
 ip_prefix_schema = {
     "type": "string",
     "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}"
@@ -133,6 +141,7 @@ ns_instantiate_vdu = {
     "type": "object",
     "properties": {
         "id": name_schema,
     "type": "object",
     "properties": {
         "id": name_schema,
+        "vim-flavor-id": name_schema,
         "volume": {
             "type": "array",
             "minItems": 1,
         "volume": {
             "type": "array",
             "minItems": 1,
@@ -153,7 +162,7 @@ ns_instantiate_vdu = {
                 "type": "object",
                 "properties": {
                     "name": name_schema,
                 "type": "object",
                 "properties": {
                     "name": name_schema,
-                    "ip-address": ip_schema,
+                    "ip-address": {"oneOf": [ip_schema, ipv6_schema]},
                     "mac-address": mac_schema,
                     "floating-ip-required": bool_schema,
                 },
                     "mac-address": mac_schema,
                     "floating-ip-required": bool_schema,
                 },
@@ -172,7 +181,7 @@ ip_profile_dns_schema = {
     "items": {
         "type": "object",
         "properties": {
     "items": {
         "type": "object",
         "properties": {
-            "address": ip_schema,
+            "address": {"oneOf": [ip_schema, ipv6_schema]},
         },
         "required": ["address"],
         "additionalProperties": False,
         },
         "required": ["address"],
         "additionalProperties": False,
@@ -190,19 +199,6 @@ ip_profile_dhcp_schema = {
 }
 
 ip_profile_schema = {
 }
 
 ip_profile_schema = {
-    "title": "ip profile validation schema",
-    "$schema": "http://json-schema.org/draft-04/schema#",
-    "type": "object",
-    "properties": {
-        "ip-version": {"enum": ["ipv4", "ipv6"]},
-        "subnet-address": ip_prefix_schema,
-        "gateway-address": ip_schema,
-        "dns-server": ip_profile_dns_schema,
-        "dhcp-params": ip_profile_dhcp_schema,
-    },
-}
-
-ip_profile_update_schema = {
     "title": "ip profile validation schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
     "title": "ip profile validation schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
@@ -242,14 +238,14 @@ provider_network_schema = {
 }
 
 ns_instantiate_internal_vld = {
 }
 
 ns_instantiate_internal_vld = {
-    "title": "ns action instantiate input schema for vdu",
+    "title": "ns action instantiate input schema for vld",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
     "properties": {
         "name": name_schema,
         "vim-network-name": name_schema,
         "vim-network-id": name_schema,
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
     "properties": {
         "name": name_schema,
         "vim-network-name": name_schema,
         "vim-network-id": name_schema,
-        "ip-profile": ip_profile_update_schema,
+        "ip-profile": ip_profile_schema,
         "provider-network": provider_network_schema,
         "internal-connection-point": {
             "type": "array",
         "provider-network": provider_network_schema,
         "internal-connection-point": {
             "type": "array",
@@ -312,6 +308,19 @@ additional_params_for_vnf = {
                     "additionalProperties": False,
                 },
             },
                     "additionalProperties": False,
                 },
             },
+            "affinity-or-anti-affinity-group": {
+                "type": "array",
+                "items": {
+                    "type": "object",
+                    "properties": {
+                        "id": name_schema,
+                        "vim-affinity-group-id": name_schema,
+                    },
+                    "required": ["id"],
+                    "minProperties": 2,
+                    "additionalProperties": False,
+                },
+            },
         },
         "required": ["member-vnf-index"],
         "minProperties": 2,
         },
         "required": ["member-vnf-index"],
         "minProperties": 2,
@@ -330,7 +339,6 @@ ns_instantiate = {
         "nsName": name_schema,
         "nsDescription": {"oneOf": [description_schema, null_schema]},
         "nsdId": id_schema,
         "nsName": name_schema,
         "nsDescription": {"oneOf": [description_schema, null_schema]},
         "nsdId": id_schema,
-        "vcaId": id_schema,
         "vimAccountId": id_schema,
         "wimAccountId": {"oneOf": [id_schema, bool_schema, null_schema]},
         "placement-engine": string_schema,
         "vimAccountId": id_schema,
         "wimAccountId": {"oneOf": [id_schema, bool_schema, null_schema]},
         "placement-engine": string_schema,
@@ -351,7 +359,6 @@ ns_instantiate = {
                 "properties": {
                     "member-vnf-index": name_schema,
                     "vimAccountId": id_schema,
                 "properties": {
                     "member-vnf-index": name_schema,
                     "vimAccountId": id_schema,
-                    "vcaId": id_schema,
                     "vdu": {
                         "type": "array",
                         "minItems": 1,
                     "vdu": {
                         "type": "array",
                         "minItems": 1,
@@ -379,7 +386,7 @@ ns_instantiate = {
                     "vim-network-id": {"oneOf": [string_schema, object_schema]},
                     "ns-net": object_schema,
                     "wimAccountId": {"oneOf": [id_schema, bool_schema, null_schema]},
                     "vim-network-id": {"oneOf": [string_schema, object_schema]},
                     "ns-net": object_schema,
                     "wimAccountId": {"oneOf": [id_schema, bool_schema, null_schema]},
-                    "ip-profile": object_schema,
+                    "ip-profile": ip_profile_schema,
                     "provider-network": provider_network_schema,
                     "vnfd-connection-point-ref": {
                         "type": "array",
                     "provider-network": provider_network_schema,
                     "vnfd-connection-point-ref": {
                         "type": "array",
@@ -389,7 +396,7 @@ ns_instantiate = {
                             "properties": {
                                 "member-vnf-index-ref": name_schema,
                                 "vnfd-connection-point-ref": name_schema,
                             "properties": {
                                 "member-vnf-index-ref": name_schema,
                                 "vnfd-connection-point-ref": name_schema,
-                                "ip-address": ip_schema,
+                                "ip-address": {"oneOf": [ip_schema, ipv6_schema]},
                                 # "mac-address": mac_schema,
                             },
                             "required": [
                                 # "mac-address": mac_schema,
                             },
                             "required": [
@@ -425,6 +432,62 @@ ns_terminate = {
     "additionalProperties": False,
 }
 
     "additionalProperties": False,
 }
 
+ns_update = {
+    "title": "ns update input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "lcmOperationType": string_schema,
+        "nsInstanceId": id_schema,
+        "timeout_ns_update": integer1_schema,
+        "updateType": {
+            "enum": [
+                "CHANGE_VNFPKG",
+                "REMOVE_VNF",
+                "MODIFY_VNF_INFORMATION",
+                "OPERATE_VNF",
+            ]
+        },
+        "modifyVnfInfoData": {
+            "type": "object",
+            "properties": {
+                "vnfInstanceId": id_schema,
+                "vnfdId": id_schema,
+            },
+            "required": ["vnfInstanceId", "vnfdId"],
+        },
+        "removeVnfInstanceId": id_schema,
+        "changeVnfPackageData": {
+            "type": "object",
+            "properties": {
+                "vnfInstanceId": id_schema,
+                "vnfdId": id_schema,
+            },
+            "required": ["vnfInstanceId", "vnfdId"],
+        },
+        "operateVnfData": {
+            "type": "object",
+            "properties": {
+                "vnfInstanceId": id_schema,
+                "changeStateTo": name_schema,
+                "additionalParam": {
+                    "type": "object",
+                    "properties": {
+                        "run-day1": bool_schema,
+                        "vdu_id": name_schema,
+                        "count-index": integer0_schema,
+                    },
+                    "required": ["vdu_id", "count-index"],
+                    "additionalProperties": False,
+                },
+            },
+            "required": ["vnfInstanceId", "changeStateTo"],
+        },
+    },
+    "required": ["updateType"],
+    "additionalProperties": False,
+}
+
 ns_action = {  # TODO for the moment it is only contemplated the vnfd primitive execution
     "title": "ns action input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
 ns_action = {  # TODO for the moment it is only contemplated the vnfd primitive execution
     "title": "ns action input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -444,6 +507,7 @@ ns_action = {  # TODO for the moment it is only contemplated the vnfd primitive
     "required": ["primitive", "primitive_params"],  # TODO add member_vnf_index
     "additionalProperties": False,
 }
     "required": ["primitive", "primitive_params"],  # TODO add member_vnf_index
     "additionalProperties": False,
 }
+
 ns_scale = {  # TODO for the moment it is only VDU-scaling
     "title": "ns scale input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
 ns_scale = {  # TODO for the moment it is only VDU-scaling
     "title": "ns scale input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -478,6 +542,121 @@ ns_scale = {  # TODO for the moment it is only VDU-scaling
     "additionalProperties": False,
 }
 
     "additionalProperties": False,
 }
 
+ns_migrate = {
+    "title": "ns migrate input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "lcmOperationType": string_schema,
+        "nsInstanceId": id_schema,
+        "vnfInstanceId": id_schema,
+        "migrateToHost": string_schema,
+        "vdu": {
+            "type": "object",
+            "properties": {
+                "vduId": name_schema,
+                "vduCountIndex": integer0_schema,
+            },
+            "required": ["vduId"],
+            "additionalProperties": False,
+        },
+    },
+    "required": ["vnfInstanceId"],
+    "additionalProperties": False,
+}
+
+ns_heal = {
+    "title": "ns heal input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "lcmOperationType": string_schema,
+        "nsInstanceId": id_schema,
+        "timeout_ns_heal": integer1_schema,
+        "healVnfData": {
+            "type": "array",
+            "items": {
+                "type": "object",
+                "properties": {
+                    "vnfInstanceId": id_schema,
+                    "cause": description_schema,
+                    "additionalParams": {
+                        "type": "object",
+                        "properties": {
+                            "run-day1": bool_schema,
+                            "vdu": {
+                                "type": "array",
+                                "items": {
+                                    "type": "object",
+                                    "properties": {
+                                        "run-day1": bool_schema,
+                                        "vdu-id": name_schema,
+                                        "count-index": integer0_schema,
+                                    },
+                                    "required": ["vdu-id"],
+                                    "additionalProperties": False,
+                                },
+                            },
+                        },
+                        "additionalProperties": False,
+                    },
+                },
+                "required": ["vnfInstanceId"],
+                "additionalProperties": False,
+            },
+        },
+    },
+    "required": ["healVnfData"],
+    "additionalProperties": False,
+}
+
+ns_verticalscale = {
+    "title": "vertial scale input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "lcmOperationType": string_schema,
+        "verticalScale": string_schema,
+        "nsInstanceId": id_schema,
+        "changeVnfFlavorData": {
+            "type": "object",
+            "properties": {
+                "vnfInstanceId": id_schema,
+                "additionalParams": {
+                    "type": "object",
+                    "properties": {
+                        "vduid": string_schema,
+                        "vduCountIndex": integer0_schema,
+                        "virtualMemory": integer1_schema,
+                        "sizeOfStorage": integer0_schema,
+                        "numVirtualCpu": integer1_schema,
+                    },
+                },
+            },
+            "required": ["vnfInstanceId", "additionalParams"],
+            "additionalProperties": False,
+        },
+    },
+    "required": ["lcmOperationType", "verticalScale", "nsInstanceId"],
+    "additionalProperties": False,
+}
+
+nslcmop_cancel = {
+    "title": "Cancel nslcmop input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "nsLcmOpOccId": id_schema,
+        "cancelMode": {
+            "enum": [
+                "GRACEFUL",
+                "FORCEFUL",
+            ]
+        },
+    },
+    "required": ["cancelMode"],
+    "additionalProperties": False,
+}
 
 schema_version = {"type": "string", "enum": ["1.0"]}
 schema_type = {"type": "string"}
 
 schema_version = {"type": "string", "enum": ["1.0"]}
 schema_type = {"type": "string"}
@@ -501,6 +680,7 @@ vim_account_edit_schema = {
         "vim_password": passwd_schema,
         "vca": id_schema,
         "config": {"type": "object"},
         "vim_password": passwd_schema,
         "vca": id_schema,
         "config": {"type": "object"},
+        "prometheus-config": {"type": "object"},
     },
     "additionalProperties": False,
 }
     },
     "additionalProperties": False,
 }
@@ -525,6 +705,7 @@ vim_account_new_schema = {
         "vim_password": passwd_schema,
         "vca": id_schema,
         "config": {"type": "object"},
         "vim_password": passwd_schema,
         "vca": id_schema,
         "config": {"type": "object"},
+        "prometheus-config": {"type": "object"},
     },
     "required": [
         "name",
     },
     "required": [
         "name",
@@ -549,7 +730,7 @@ wim_account_edit_schema = {
         "wim": name_schema,
         "wim_type": wim_type,
         "wim_url": description_schema,
         "wim": name_schema,
         "wim_type": wim_type,
         "wim_url": description_schema,
-        "user": shortname_schema,
+        "user": string_schema,
         "password": passwd_schema,
         "config": {"type": "object"},
     },
         "password": passwd_schema,
         "config": {"type": "object"},
     },
@@ -568,7 +749,7 @@ wim_account_new_schema = {
         "wim": name_schema,
         "wim_type": wim_type,
         "wim_url": description_schema,
         "wim": name_schema,
         "wim_type": wim_type,
         "wim_url": description_schema,
-        "user": shortname_schema,
+        "user": string_schema,
         "password": passwd_schema,
         "config": {
             "type": "object",
         "password": passwd_schema,
         "config": {
             "type": "object",
@@ -583,7 +764,7 @@ sdn_properties = {
     "name": name_schema,
     "type": {"type": "string"},
     "url": {"type": "string"},
     "name": name_schema,
     "type": {"type": "string"},
     "url": {"type": "string"},
-    "user": shortname_schema,
+    "user": string_schema,
     "password": passwd_schema,
     "config": {"type": "object"},
     "description": description_schema,
     "password": passwd_schema,
     "config": {"type": "object"},
     "description": description_schema,
@@ -646,6 +827,17 @@ sdn_external_port_schema = {
 }
 
 # K8s Clusters
 }
 
 # K8s Clusters
+k8scluster_deploy_method_schema = {
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Deployment methods for K8s cluster",
+    "type": "object",
+    "properties": {
+        "juju-bundle": {"type": "boolean"},
+        "helm-chart-v3": {"type": "boolean"},
+    },
+    "additionalProperties": False,
+    "minProperties": 2,
+}
 k8scluster_nets_schema = {
     "title": "k8scluster nets input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
 k8scluster_nets_schema = {
     "title": "k8scluster nets input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -668,6 +860,7 @@ k8scluster_new_schema = {
         "vca_id": id_schema,
         "k8s_version": string_schema,
         "nets": k8scluster_nets_schema,
         "vca_id": id_schema,
         "k8s_version": string_schema,
         "nets": k8scluster_nets_schema,
+        "deployment_methods": k8scluster_deploy_method_schema,
         "namespace": name_schema,
         "cni": nameshort_list_schema,
     },
         "namespace": name_schema,
         "cni": nameshort_list_schema,
     },
@@ -703,7 +896,7 @@ vca_new_schema = {
         "name": name_schema,
         "description": description_schema,
         "endpoints": description_list_schema,
         "name": name_schema,
         "description": description_schema,
         "endpoints": description_list_schema,
-        "user": shortname_schema,
+        "user": string_schema,
         "secret": passwd_schema,
         "cacert": long_description_schema,
         "lxd-cloud": shortname_schema,
         "secret": passwd_schema,
         "cacert": long_description_schema,
         "lxd-cloud": shortname_schema,
@@ -734,7 +927,7 @@ vca_edit_schema = {
         "description": description_schema,
         "endpoints": description_list_schema,
         "port": integer1_schema,
         "description": description_schema,
         "endpoints": description_list_schema,
         "port": integer1_schema,
-        "user": shortname_schema,
+        "user": string_schema,
         "secret": passwd_schema,
         "cacert": long_description_schema,
         "lxd-cloud": shortname_schema,
         "secret": passwd_schema,
         "cacert": long_description_schema,
         "lxd-cloud": shortname_schema,
@@ -753,6 +946,10 @@ k8srepo_properties = {
     "description": description_schema,
     "type": k8srepo_types,
     "url": description_schema,
     "description": description_schema,
     "type": k8srepo_types,
     "url": description_schema,
+    "cacert": long_description_schema,
+    "user": string_schema,
+    "password": passwd_schema,
+    "oci": bool_schema,
 }
 k8srepo_new_schema = {
     "title": "k8scluster creation input schema",
 }
 k8srepo_new_schema = {
     "title": "k8scluster creation input schema",
@@ -777,7 +974,7 @@ osmrepo_properties = {
     "description": description_schema,
     "type": osmrepo_types,
     "url": description_schema
     "description": description_schema,
     "type": osmrepo_types,
     "url": description_schema
-    # "user": shortname_schema,
+    # "user": string_schema,
     # "password": passwd_schema
 }
 osmrepo_new_schema = {
     # "password": passwd_schema
 }
 osmrepo_new_schema = {
@@ -803,7 +1000,7 @@ pdu_interface = {
         "name": shortname_schema,
         "mgmt": bool_schema,
         "type": {"enum": ["overlay", "underlay"]},
         "name": shortname_schema,
         "mgmt": bool_schema,
         "type": {"enum": ["overlay", "underlay"]},
-        "ip-address": ip_schema,
+        "ip-address": {"oneOf": [ip_schema, ipv6_schema]},
         # TODO, add user, password, ssh-key
         "mac-address": mac_schema,
         "vim-network-name": shortname_schema,  # interface is connected to one vim network, or switch port
         # TODO, add user, password, ssh-key
         "mac-address": mac_schema,
         "vim-network-name": shortname_schema,  # interface is connected to one vim network, or switch port
@@ -907,9 +1104,9 @@ user_new_schema = {
     "title": "New user schema",
     "type": "object",
     "properties": {
     "title": "New user schema",
     "type": "object",
     "properties": {
-        "username": shortname_schema,
+        "username": string_schema,
         "domain_name": shortname_schema,
         "domain_name": shortname_schema,
-        "password": passwd_schema,
+        "password": user_passwd_schema,
         "projects": nameshort_list_schema,
         "project_role_mappings": project_role_mappings,
     },
         "projects": nameshort_list_schema,
         "project_role_mappings": project_role_mappings,
     },
@@ -921,12 +1118,16 @@ user_edit_schema = {
     "title": "User edit schema for administrators",
     "type": "object",
     "properties": {
     "title": "User edit schema for administrators",
     "type": "object",
     "properties": {
-        "password": passwd_schema,
-        "username": shortname_schema,  # To allow User Name modification
+        "password": user_passwd_schema,
+        "old_password": passwd_schema,
+        "username": string_schema,  # To allow User Name modification
         "projects": {"oneOf": [nameshort_list_schema, array_edition_schema]},
         "project_role_mappings": project_role_mappings,
         "add_project_role_mappings": project_role_mappings,
         "remove_project_role_mappings": project_role_mappings_optional,
         "projects": {"oneOf": [nameshort_list_schema, array_edition_schema]},
         "project_role_mappings": project_role_mappings,
         "add_project_role_mappings": project_role_mappings,
         "remove_project_role_mappings": project_role_mappings_optional,
+        "system_admin_id": id_schema,
+        "unlock": bool_schema,
+        "renew": bool_schema,
     },
     "minProperties": 1,
     "additionalProperties": False,
     },
     "minProperties": 1,
     "additionalProperties": False,
@@ -1030,6 +1231,8 @@ nbi_new_input_schemas = {
     "ns_instantiate": ns_instantiate,
     "ns_action": ns_action,
     "ns_scale": ns_scale,
     "ns_instantiate": ns_instantiate,
     "ns_action": ns_action,
     "ns_scale": ns_scale,
+    "ns_update": ns_update,
+    "ns_heal": ns_heal,
     "pdus": pdu_new_schema,
 }
 
     "pdus": pdu_new_schema,
 }
 
@@ -1216,7 +1419,7 @@ authentication_schema = {
         "paramsBasic": {
             "type": "object",
             "properties": {
         "paramsBasic": {
             "type": "object",
             "properties": {
-                "userName": shortname_schema,
+                "userName": string_schema,
                 "password": passwd_schema,
             },
         },
                 "password": passwd_schema,
             },
         },
@@ -1235,6 +1438,77 @@ subscription = {
     "required": ["CallbackUri"],
 }
 
     "required": ["CallbackUri"],
 }
 
+vnflcmsub_schema = {
+    "title": "vnflcmsubscription input schema",
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "type": "object",
+    "properties": {
+        "VnfInstanceSubscriptionFilter": {
+            "type": "object",
+            "properties": {
+                "vnfdIds": {"type": "array"},
+                "vnfInstanceIds": {"type": "array"},
+            },
+        },
+        "notificationTypes": {
+            "type": "array",
+            "items": {
+                "enum": [
+                    "VnfIdentifierCreationNotification",
+                    "VnfLcmOperationOccurrenceNotification",
+                    "VnfIdentifierDeletionNotification",
+                ]
+            },
+        },
+        "operationTypes": {
+            "type": "array",
+            "items": {
+                "enum": [
+                    "INSTANTIATE",
+                    "SCALE",
+                    "SCALE_TO_LEVEL",
+                    "CHANGE_FLAVOUR",
+                    "TERMINATE",
+                    "HEAL",
+                    "OPERATE",
+                    "CHANGE_EXT_CONN",
+                    "MODIFY_INFO",
+                    "CREATE_SNAPSHOT",
+                    "REVERT_TO_SNAPSHOT",
+                    "CHANGE_VNFPKG",
+                ]
+            },
+        },
+        "operationStates": {
+            "type": "array",
+            "items": {
+                "enum": [
+                    "STARTING",
+                    "PROCESSING",
+                    "COMPLETED",
+                    "FAILED_TEMP",
+                    "FAILED",
+                    "ROLLING_BACK",
+                    "ROLLED_BACK",
+                ]
+            },
+        },
+    },
+    "required": ["VnfInstanceSubscriptionFilter", "notificationTypes"],
+}
+
+vnf_subscription = {
+    "title": "vnf subscription input schema",
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "type": "object",
+    "properties": {
+        "filter": vnflcmsub_schema,
+        "CallbackUri": description_schema,
+        "authentication": authentication_schema,
+    },
+    "required": ["filter", "CallbackUri"],
+}
+
 
 class ValidationError(Exception):
     def __init__(self, message, http_code=HTTPStatus.UNPROCESSABLE_ENTITY):
 
 class ValidationError(Exception):
     def __init__(self, message, http_code=HTTPStatus.UNPROCESSABLE_ENTITY):
diff --git a/osm_nbi/vnf_instance_topics.py b/osm_nbi/vnf_instance_topics.py
new file mode 100644 (file)
index 0000000..8550534
--- /dev/null
@@ -0,0 +1,116 @@
+# Copyright 2021 K Sai Kiran (Tata Elxsi)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = "K Sai Kiran <saikiran.k@tataelxsi.co.in>, Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
+__date__ = "$12-June-2021 8:30:59$"
+
+from osm_nbi.base_topic import BaseTopic
+from .osm_vnfm.vnf_instances import VnfInstances2NsInstances
+from .osm_vnfm.vnf_instance_actions import VnfLcmOp2NsLcmOp
+
+
+class VnfInstances(BaseTopic):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for vnf instance topic
+        """
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        self.vnfinstances2nsinstances = VnfInstances2NsInstances(db, fs, msg, auth)
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates new vnf instance
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the vnf instance
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: the _id of vnf instance created at database. Or an exception.
+        """
+        return self.vnfinstances2nsinstances.new(
+            rollback, session, indata, kwargs, headers
+        )
+
+    def list(self, session, filter_q=None, api_req=False):
+        """
+        Get a list of the vnf instances that match a filter
+        :param session: contains the used login username and working project
+        :param filter_q: filter of data to be applied
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: The list, it can be empty if no one match the filter.
+        """
+        return self.vnfinstances2nsinstances.list(session, filter_q, api_req)
+
+    def show(self, session, _id, filter_q=None, api_req=False):
+        """
+        Get complete information on an vnf instance
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        return self.vnfinstances2nsinstances.show(session, _id, api_req)
+
+    def delete(self, session, _id, dry_run=False, not_send_msg=None):
+        """
+        Delete vnf instance by its internal _id
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param dry_run: make checking but do not delete
+        :param not_send_msg: To not send message (False) or store content (list) instead
+        :return: operation id (None if there is not operation), raise exception if error or not found, conflict, ...
+        """
+        return self.vnfinstances2nsinstances.delete(session, _id, dry_run, not_send_msg)
+
+
+class VnfLcmOpTopic(BaseTopic):
+    def __init__(self, db, fs, msg, auth):
+        """
+        Constructor call for vnf lcm op topic
+        """
+        BaseTopic.__init__(self, db, fs, msg, auth)
+        self.vnflcmop2nslcmop = VnfLcmOp2NsLcmOp(db, fs, msg, auth)
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates new vnf lcm op
+        :param rollback: list to append the created items at database in case a rollback must be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: params to be used for the vnf instance
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: the _id of vnf lcm op created at database. Or an exception.
+        """
+        return self.vnflcmop2nslcmop.new(rollback, session, indata, kwargs, headers)
+
+    def list(self, session, filter_q=None, api_req=False):
+        """
+        Get a list of the vnf lcm op that match a filter
+        :param session: contains the used login username and working project
+        :param filter_q: filter of data to be applied
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: The list, it can be empty if no one match the filter.
+        """
+        return self.vnflcmop2nslcmop.list(session, filter_q, api_req)
+
+    def show(self, session, _id, filter_q=None, api_req=False):
+        """
+        Get complete information on an vnf lcm op
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param api_req: True if this call is serving an external API request. False if serving internal request.
+        :return: dictionary, raise exception if not found.
+        """
+        return self.vnflcmop2nslcmop.show(session, _id, api_req)
diff --git a/pyangbind.patch b/pyangbind.patch
new file mode 100644 (file)
index 0000000..3077299
--- /dev/null
@@ -0,0 +1,46 @@
+#######################################################################################
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#######################################################################################
+
+*** .tox/cover/lib/python3.10/site-packages/pyangbind/lib/yangtypes.py 2023-05-10 06:50:57.876027148 -0400
+--- .tox/cover/lib/python3.10/site-packages/pyangbind/lib/yangtypes.py 2023-05-10 06:51:11.772022417 -0400
+*************** limitations under the License.
+*** 22,27 ****
+--- 22,28 ----
+  from __future__ import unicode_literals
+
+  import collections
++ from six.moves import collections_abc
+  import copy
+  import uuid
+  from decimal import Decimal
+*************** def TypedListType(*args, **kwargs):
+*** 372,378 ****
+    if not isinstance(allowed_type, list):
+      allowed_type = [allowed_type]
+
+!   class TypedList(collections.MutableSequence):
+      _pybind_generated_by = "TypedListType"
+      _list = list()
+
+--- 373,379 ----
+    if not isinstance(allowed_type, list):
+      allowed_type = [allowed_type]
+
+!   class TypedList(collections_abc.MutableSequence):
+      _pybind_generated_by = "TypedListType"
+      _list = list()
+
index dcaa6bf..33a3ab6 100644 (file)
@@ -11,4 +11,7 @@
 # under the License.
 
 git+https://osm.etsi.org/gerrit/osm/common.git@master#egg=osm-common
 # under the License.
 
 git+https://osm.etsi.org/gerrit/osm/common.git@master#egg=osm-common
+-r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+
 git+https://osm.etsi.org/gerrit/osm/IM.git@master#egg=osm-im
 git+https://osm.etsi.org/gerrit/osm/IM.git@master#egg=osm-im
+-r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
index fa3825a..0bf9737 100644 (file)
@@ -1,37 +1,3 @@
-aiokafka==0.7.0
-    # via osm-common
-bitarray==1.8.1
-    # via pyangbind
-dataclasses==0.6
-    # via osm-common
-enum34==1.1.10
-    # via pyangbind
-kafka-python==2.0.2
-    # via aiokafka
-lxml==4.6.3
-    # via
-    #   pyang
-    #   pyangbind
-git+https://osm.etsi.org/gerrit/osm/common.git@master#egg=osm-common
-    # via -r requirements-dev.in
-git+https://osm.etsi.org/gerrit/osm/IM.git@master#egg=osm-im
-    # via -r requirements-dev.in
-pyang==2.4.0
-    # via
-    #   osm-im
-    #   pyangbind
-pyangbind==0.8.1
-    # via osm-im
-pycrypto==2.6.1
-    # via osm-common
-pymongo==3.11.3
-    # via osm-common
-pyyaml==5.4.1
-    # via osm-common
-regex==2021.3.17
-    # via pyangbind
-six==1.15.0
-    # via pyangbind
 # Copyright 2018 Telefonica S.A.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # Copyright 2018 Telefonica S.A.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -48,3 +14,62 @@ six==1.15.0
 # limitations under the License.
 
 
 # limitations under the License.
 
 
+aiokafka==0.8.1
+    # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+async-timeout==4.0.3
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   aiokafka
+dataclasses==0.6
+    # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+dnspython==2.4.2
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pymongo
+enum34==1.1.10
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pyangbind
+kafka-python==2.0.2
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   aiokafka
+lxml==4.9.3
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pyang
+    #   pyangbind
+motor==3.3.1
+    # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+osm-common @ git+https://osm.etsi.org/gerrit/osm/common.git@master
+    # via -r requirements-dev.in
+osm-im @ git+https://osm.etsi.org/gerrit/osm/IM.git@master
+    # via -r requirements-dev.in
+packaging==23.1
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   aiokafka
+pyang==2.5.3
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pyangbind
+pyangbind==0.8.3.post1
+    # via -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+pycryptodome==3.19.0
+    # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+pymongo==4.5.0
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   motor
+pyyaml==6.0.1
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+regex==2023.8.8
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pyangbind
+six==1.16.0
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pyangbind
index 11f0a2a..4f8784f 100644 (file)
@@ -14,4 +14,5 @@
 # limitations under the License.
 
 stdeb
 # limitations under the License.
 
 stdeb
-setuptools-version-command
\ No newline at end of file
+setuptools-version-command
+setuptools<60
\ No newline at end of file
index c8f13c9..fa7c05f 100644 (file)
@@ -1,10 +1,3 @@
-setuptools-version-command==2.2
-    # via -r requirements-dist.in
-stdeb==0.10.0
-    # via -r requirements-dist.in
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
 # Copyright 2018 Telefonica S.A.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # Copyright 2018 Telefonica S.A.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,3 +14,13 @@ stdeb==0.10.0
 # limitations under the License.
 
 
 # limitations under the License.
 
 
+setuptools-version-command==99.9
+    # via -r requirements-dist.in
+stdeb==0.10.0
+    # via -r requirements-dist.in
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==59.8.0
+    # via
+    #   -r requirements-dist.in
+    #   setuptools-version-command
index 4564164..5d6739a 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-aiohttp>=2.3.10,<=3.6.2
 aioresponses
 asynctest
 coverage
 aioresponses
 asynctest
 coverage
+deepdiff
 nose2
 nose2
-requests==2.25.1
-pyang
index b4cf255..1abfb13 100644 (file)
@@ -1,47 +1,3 @@
-aiohttp==3.6.2
-    # via
-    #   -r requirements-test.in
-    #   aioresponses
-aioresponses==0.7.2
-    # via -r requirements-test.in
-async-timeout==3.0.1
-    # via aiohttp
-asynctest==0.13.0
-    # via -r requirements-test.in
-attrs==20.3.0
-    # via aiohttp
-certifi==2020.12.5
-    # via requests
-chardet==3.0.4
-    # via
-    #   aiohttp
-    #   requests
-coverage==5.5
-    # via
-    #   -r requirements-test.in
-    #   nose2
-idna==2.10
-    # via
-    #   requests
-    #   yarl
-lxml==4.6.3
-    # via pyang
-multidict==4.7.6
-    # via
-    #   aiohttp
-    #   yarl
-nose2==0.10.0
-    # via -r requirements-test.in
-pyang==2.4.0
-    # via -r requirements-test.in
-requests==2.25.1
-    # via -r requirements-test.in
-six==1.15.0
-    # via nose2
-urllib3==1.26.4
-    # via requests
-yarl==1.6.3
-    # via aiohttp
 # Copyright 2018 Telefonica S.A.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # Copyright 2018 Telefonica S.A.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -58,3 +14,37 @@ yarl==1.6.3
 # limitations under the License.
 
 
 # limitations under the License.
 
 
+aiohttp==3.8.5
+    # via aioresponses
+aioresponses==0.7.4
+    # via -r requirements-test.in
+aiosignal==1.3.1
+    # via aiohttp
+async-timeout==4.0.3
+    # via aiohttp
+asynctest==0.13.0
+    # via -r requirements-test.in
+attrs==23.1.0
+    # via aiohttp
+charset-normalizer==3.2.0
+    # via aiohttp
+coverage==7.3.1
+    # via -r requirements-test.in
+deepdiff==6.5.0
+    # via -r requirements-test.in
+frozenlist==1.4.0
+    # via
+    #   aiohttp
+    #   aiosignal
+idna==3.4
+    # via yarl
+multidict==6.0.4
+    # via
+    #   aiohttp
+    #   yarl
+nose2==0.13.0
+    # via -r requirements-test.in
+ordered-set==4.1.0
+    # via deepdiff
+yarl==1.9.2
+    # via aiohttp
index 8098683..9096b53 100644 (file)
 # License for the specific language governing permissions and limitations
 # under the License.
 
 # License for the specific language governing permissions and limitations
 # under the License.
 
-aiohttp>=2.3.10,<=3.6.2
+aiohttp
+cefevent
 CherryPy>=18.1.2
 CherryPy>=18.1.2
+deepdiff
 jsonschema>=3.2.0
 python-keystoneclient
 jsonschema>=3.2.0
 python-keystoneclient
-pyyaml
+pyyaml>6
 requests
 tacacs_plus
 requests
 tacacs_plus
index 30dabd5..cc7328f 100644 (file)
-aiohttp==3.6.2
+# Copyright 2018 Telefonica S.A.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+aiohttp==3.8.5
     # via -r requirements.in
     # via -r requirements.in
-async-timeout==3.0.1
+aiosignal==1.3.1
     # via aiohttp
     # via aiohttp
-attrs==20.3.0
+annotated-types==0.5.0
+    # via pydantic
+async-timeout==4.0.3
+    # via aiohttp
+attrs==23.1.0
     # via
     #   aiohttp
     #   jsonschema
     # via
     #   aiohttp
     #   jsonschema
-certifi==2020.12.5
+    #   referencing
+autocommand==2.2.2
+    # via jaraco-text
+cefevent==0.5.4
+    # via -r requirements.in
+certifi==2023.7.22
     # via requests
     # via requests
-chardet==3.0.4
+charset-normalizer==3.2.0
     # via
     #   aiohttp
     #   requests
     # via
     #   aiohttp
     #   requests
-cheroot==8.5.2
+cheroot==10.0.0
     # via cherrypy
     # via cherrypy
-cherrypy==18.6.0
+cherrypy==18.8.0
     # via -r requirements.in
     # via -r requirements.in
-debtcollector==2.2.0
+debtcollector==2.5.0
     # via
     # via
-    #   oslo.config
-    #   oslo.utils
+    #   oslo-config
+    #   oslo-utils
     #   python-keystoneclient
     #   python-keystoneclient
-idna==2.10
+deepdiff==6.5.0
+    # via -r requirements.in
+frozenlist==1.4.0
+    # via
+    #   aiohttp
+    #   aiosignal
+idna==3.4
     # via
     #   requests
     #   yarl
     # via
     #   requests
     #   yarl
-iso8601==0.1.14
+inflect==7.0.0
+    # via jaraco-text
+iso8601==2.0.0
     # via
     #   keystoneauth1
     # via
     #   keystoneauth1
-    #   oslo.utils
-jaraco.classes==3.2.1
-    # via jaraco.collections
-jaraco.collections==3.2.0
+    #   oslo-utils
+jaraco-collections==4.3.0
     # via cherrypy
     # via cherrypy
-jaraco.functools==3.2.1
+jaraco-context==4.3.0
+    # via jaraco-text
+jaraco-functools==3.9.0
     # via
     #   cheroot
     # via
     #   cheroot
-    #   jaraco.text
+    #   jaraco-text
     #   tempora
     #   tempora
-jaraco.text==3.5.0
-    # via jaraco.collections
-jsonschema==3.2.0
+jaraco-text==3.11.1
+    # via jaraco-collections
+jsonschema==4.19.1
     # via -r requirements.in
     # via -r requirements.in
-keystoneauth1==4.3.1
+jsonschema-specifications==2023.7.1
+    # via jsonschema
+keystoneauth1==5.3.0
     # via python-keystoneclient
     # via python-keystoneclient
-more-itertools==8.7.0
+more-itertools==10.1.0
     # via
     #   cheroot
     #   cherrypy
     # via
     #   cheroot
     #   cherrypy
-    #   jaraco.classes
-    #   jaraco.functools
-msgpack==1.0.2
-    # via oslo.serialization
-multidict==4.7.6
+    #   jaraco-functools
+    #   jaraco-text
+msgpack==1.0.7
+    # via oslo-serialization
+multidict==6.0.4
     # via
     #   aiohttp
     #   yarl
     # via
     #   aiohttp
     #   yarl
-netaddr==0.8.0
-    # via
-    #   oslo.config
-    #   oslo.utils
-netifaces==0.10.9
-    # via oslo.utils
+netaddr==0.9.0
+    # via
+    #   oslo-config
+    #   oslo-utils
+netifaces==0.11.0
+    # via oslo-utils
+ordered-set==4.1.0
+    # via deepdiff
 os-service-types==1.7.0
     # via keystoneauth1
 os-service-types==1.7.0
     # via keystoneauth1
-oslo.config==8.5.0
+oslo-config==9.2.0
     # via python-keystoneclient
     # via python-keystoneclient
-oslo.i18n==5.0.1
+oslo-i18n==6.1.0
     # via
     # via
-    #   oslo.config
-    #   oslo.utils
+    #   oslo-config
+    #   oslo-utils
     #   python-keystoneclient
     #   python-keystoneclient
-oslo.serialization==4.1.0
+oslo-serialization==5.2.0
     # via python-keystoneclient
     # via python-keystoneclient
-oslo.utils==4.8.0
+oslo-utils==6.2.1
+    # via
+    #   oslo-serialization
+    #   python-keystoneclient
+packaging==23.1
     # via
     # via
-    #   oslo.serialization
+    #   oslo-utils
     #   python-keystoneclient
     #   python-keystoneclient
-packaging==20.9
-    # via oslo.utils
-pbr==5.5.1
+pbr==5.11.1
     # via
     # via
-    #   debtcollector
     #   keystoneauth1
     #   os-service-types
     #   keystoneauth1
     #   os-service-types
-    #   oslo.i18n
-    #   oslo.serialization
-    #   oslo.utils
+    #   oslo-i18n
+    #   oslo-serialization
     #   python-keystoneclient
     #   stevedore
     #   python-keystoneclient
     #   stevedore
-portend==2.7.1
+portend==3.2.0
     # via cherrypy
     # via cherrypy
-pyparsing==2.4.7
-    # via
-    #   oslo.utils
-    #   packaging
-pyrsistent==0.17.3
-    # via jsonschema
-python-keystoneclient==4.2.0
+pydantic==2.4.2
+    # via inflect
+pydantic-core==2.10.1
+    # via pydantic
+pyparsing==3.1.1
+    # via oslo-utils
+python-keystoneclient==5.2.0
     # via -r requirements.in
     # via -r requirements.in
-pytz==2021.1
+pytz==2023.3.post1
     # via
     # via
-    #   oslo.serialization
-    #   oslo.utils
+    #   oslo-serialization
+    #   oslo-utils
     #   tempora
     #   tempora
-pyyaml==5.4.1
+pyyaml==6.0.1
     # via
     #   -r requirements.in
     # via
     #   -r requirements.in
-    #   oslo.config
-requests==2.25.1
+    #   oslo-config
+referencing==0.30.2
+    # via
+    #   jsonschema
+    #   jsonschema-specifications
+requests==2.31.0
     # via
     #   -r requirements.in
     #   keystoneauth1
     # via
     #   -r requirements.in
     #   keystoneauth1
-    #   oslo.config
+    #   oslo-config
     #   python-keystoneclient
     #   python-keystoneclient
-rfc3986==1.4.0
-    # via oslo.config
-six==1.15.0
+rfc3986==2.0.0
+    # via oslo-config
+rpds-py==0.10.3
     # via
     # via
-    #   cheroot
-    #   debtcollector
     #   jsonschema
     #   jsonschema
-    #   keystoneauth1
-    #   oslo.i18n
+    #   referencing
+six==1.16.0
+    # via
     #   python-keystoneclient
     #   tacacs-plus
     #   python-keystoneclient
     #   tacacs-plus
-stevedore==3.3.0
+stevedore==5.1.0
     # via
     #   keystoneauth1
     # via
     #   keystoneauth1
-    #   oslo.config
+    #   oslo-config
     #   python-keystoneclient
 tacacs-plus==2.6
     # via -r requirements.in
     #   python-keystoneclient
 tacacs-plus==2.6
     # via -r requirements.in
-tempora==4.0.1
+tempora==5.5.0
     # via portend
     # via portend
-urllib3==1.26.4
+typing-extensions==4.8.0
+    # via
+    #   inflect
+    #   jaraco-functools
+    #   pydantic
+    #   pydantic-core
+tzdata==2023.3
+    # via
+    #   oslo-serialization
+    #   oslo-utils
+urllib3==2.0.5
     # via requests
     # via requests
-wrapt==1.12.1
+wrapt==1.15.0
     # via debtcollector
     # via debtcollector
-yarl==1.6.3
+yarl==1.9.2
     # via aiohttp
     # via aiohttp
-zc.lockfile==2.0
+zc-lockfile==3.0.post1
     # via cherrypy
 
 # The following packages are considered to be unsafe in a requirements file:
 # setuptools
     # via cherrypy
 
 # The following packages are considered to be unsafe in a requirements file:
 # setuptools
-# Copyright 2018 Telefonica S.A.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
diff --git a/tox.ini b/tox.ini
index 5cb66e7..3c0217e 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -22,18 +22,18 @@ toxworkdir = /tmp/.tox
 
 [testenv]
 usedevelop = True
 
 [testenv]
 usedevelop = True
-basepython = python3
+basepython = python3.10
 setenv = VIRTUAL_ENV={envdir}
          PYTHONDONTWRITEBYTECODE = 1
 deps =  -r{toxinidir}/requirements.txt
 
 #######################################################################################
 [testenv:black]
 setenv = VIRTUAL_ENV={envdir}
          PYTHONDONTWRITEBYTECODE = 1
 deps =  -r{toxinidir}/requirements.txt
 
 #######################################################################################
 [testenv:black]
-deps = black
+deps = black==23.12.1
 skip_install = true
 commands =
 skip_install = true
 commands =
-        black --check --diff osm_nbi/
-        black --check --diff setup.py
+        black --check --diff osm_nbi/
+        black --check --diff setup.py
 
 
 #######################################################################################
 
 
 #######################################################################################
@@ -48,14 +48,14 @@ commands =
         coverage report --omit='*tests*'
         coverage html -d ./cover --omit='*tests*'
         coverage xml -o coverage.xml --omit=*tests*
         coverage report --omit='*tests*'
         coverage html -d ./cover --omit='*tests*'
         coverage xml -o coverage.xml --omit=*tests*
-whitelist_externals = sh
+allowlist_externals = sh
 
 
 #######################################################################################
 [testenv:flake8]
 deps = flake8
 commands =
 
 
 #######################################################################################
 [testenv:flake8]
 deps = flake8
 commands =
-        flake8 osm_nbi/ setup.py
+        flake8 osm_nbi/ setup.py
 
 
 #######################################################################################
 
 
 #######################################################################################
@@ -65,7 +65,7 @@ deps =  {[testenv]deps}
         -r{toxinidir}/requirements-test.txt
         pylint
 commands =
         -r{toxinidir}/requirements-test.txt
         pylint
 commands =
-    - pylint -E osm_nbi
+        pylint -E osm_nbi
 
 
 #######################################################################################
 
 
 #######################################################################################
@@ -81,13 +81,18 @@ commands =
 
 #######################################################################################
 [testenv:pip-compile]
 
 #######################################################################################
 [testenv:pip-compile]
-deps =  pip-tools==5.5.0
+deps =  pip-tools==6.13.0
+skip_install = true
+allowlist_externals = bash
+        [
 commands =
 commands =
-        - sh -c 'for file in requirements*.in ; do pip-compile -rU --no-header $file ;\
-        out=`echo $file | sed "s/.in/.txt/"` ; \
-        head -16 tox.ini >> $out ;\
-        done'
-whitelist_externals = sh
+        - bash -c "for file in requirements*.in ; do \
+        UNSAFE="" ; \
+        if [[ $file =~ 'dist' ]] ; then UNSAFE='--allow-unsafe' ; fi ; \
+        pip-compile --resolver=backtracking -rU --no-header $UNSAFE $file ;\
+        out=`echo $file | sed 's/.in/.txt/'` ; \
+        sed -i -e '1 e head -16 tox.ini' $out ;\
+        done"
 
 
 #######################################################################################
 
 
 #######################################################################################
@@ -102,7 +107,7 @@ commands =
         python3 setup.py --command-packages=stdeb.command sdist_dsc
         sh -c 'cd deb_dist/osm-nbi*/ && dpkg-buildpackage -rfakeroot -uc -us'
         sh -c 'rm osm_nbi/requirements.txt'
         python3 setup.py --command-packages=stdeb.command sdist_dsc
         sh -c 'cd deb_dist/osm-nbi*/ && dpkg-buildpackage -rfakeroot -uc -us'
         sh -c 'rm osm_nbi/requirements.txt'
-whitelist_externals = sh
+allowlist_externals = sh
 
 #######################################################################################
 [flake8]
 
 #######################################################################################
 [flake8]
@@ -112,8 +117,10 @@ ignore =
         W503,
         E123,
         E125,
         W503,
         E123,
         E125,
+        E203,
         E226,
         E226,
-        E241
+        E241,
+        E501
 exclude =
         .git,
         __pycache__,
 exclude =
         .git,
         __pycache__,