Bug 1830 fixed: maps completed operations to original operation types 87/13987/2 master
authorGabriel Cuba <gcuba@whitestack.com>
Wed, 8 Nov 2023 05:14:33 +0000 (00:14 -0500)
committergarciadeblas <gerardo.garciadeblas@telefonica.com>
Tue, 30 Jan 2024 14:36:12 +0000 (15:36 +0100)
Change-Id: I1a2f60f183ede39cabd9a7441ae64f10d7557232
Signed-off-by: Gabriel Cuba <gcuba@whitestack.com>
42 files changed:
Dockerfile
attic/run_test.py [new file with mode: 0755]
devops-stages/stage-archive.sh
devops-stages/stage-test.sh
osm_nbi/admin_topics.py
osm_nbi/auth.py
osm_nbi/authconn.py
osm_nbi/authconn_internal.py
osm_nbi/authconn_keystone.py
osm_nbi/base_topic.py
osm_nbi/descriptor_topics.py
osm_nbi/engine.py
osm_nbi/html_out.py
osm_nbi/instance_topics.py
osm_nbi/nbi.cfg
osm_nbi/nbi.py
osm_nbi/notifications.py
osm_nbi/osm_vnfm/base_methods.py
osm_nbi/osm_vnfm/vnf_instance_actions.py
osm_nbi/osm_vnfm/vnf_instances.py
osm_nbi/osm_vnfm/vnf_subscription.py
osm_nbi/subscription_topics.py
osm_nbi/subscriptions.py
osm_nbi/tests/run_test.py [deleted file]
osm_nbi/tests/send_kafka.py [deleted file]
osm_nbi/tests/test_admin_topics.py
osm_nbi/tests/test_descriptor_topics.py
osm_nbi/tests/test_instance_topics.py
osm_nbi/tests/test_osm_vnfm.py
osm_nbi/tests/test_pkg_descriptors.py
osm_nbi/tests/test_pmjobs_topic.py
osm_nbi/tests/upload.py [deleted file]
osm_nbi/utils.py
osm_nbi/validation.py
osm_nbi/vnf_instance_topics.py
pyangbind.patch [new file with mode: 0644]
requirements-dev.txt
requirements-test.in
requirements-test.txt
requirements.in
requirements.txt
tox.ini

index 2a64fdc..c4c9b6c 100644 (file)
@@ -21,7 +21,7 @@
 #   devops-stages/stage-build.sh
 #
 
-FROM ubuntu:20.04
+FROM ubuntu:22.04
 
 ARG APT_PROXY
 RUN if [ ! -z $APT_PROXY ] ; then \
@@ -34,13 +34,15 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \
         debhelper \
         dh-python \
         git \
-        python3.8 \
+        python3 \
         python3-all \
-        python3.8-dev \
-        python3-setuptools
+        python3-dev \
+        python3-setuptools \
+        python3-pip \
+        tox
 
-RUN python3 -m easy_install pip==21.3.1
-RUN pip install tox==3.24.5
+ENV LC_ALL C.UTF-8
+ENV LANG C.UTF-8
 
 RUN DEBIAN_FRONTEND=noninteractive apt-get -y install wget
 
diff --git a/attic/run_test.py b/attic/run_test.py
new file mode 100755 (executable)
index 0000000..b7768ba
--- /dev/null
@@ -0,0 +1,5889 @@
+#! /usr/bin/python3
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import getopt
+import sys
+import requests
+import json
+import logging
+import yaml
+
+# import json
+# import tarfile
+from time import sleep
+from random import randint
+import os
+from sys import stderr
+from uuid import uuid4
+import re
+
+__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
+__date__ = "$2018-03-01$"
+__version__ = "0.3"
+version_date = "Oct 2018"
+
+
+def usage():
+    print("Usage: ", sys.argv[0], "[options]")
+    print(
+        "      Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'"
+    )
+    print(
+        "      If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
+        "where deployment is done"
+    )
+    print("OPTIONS")
+    print("      -h|--help: shows this help")
+    print("      --insecure: Allows non trusted https NBI server")
+    print("      --list: list available tests")
+    print(
+        "      --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
+        "'--test-osm'"
+    )
+    print("      -p|--password PASSWORD: NBI access password. 'admin' by default")
+    print("      ---project PROJECT: NBI access project. 'admin' by default")
+    print(
+        "      --test TEST[,...]: Execute only a test or a comma separated list of tests"
+    )
+    print(
+        "      --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config"
+    )
+    print(
+        "      --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
+        "this flag to test the system. LCM and RO components are expected to be up and running"
+    )
+    print(
+        "      --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)
+    )
+    print(
+        "      --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(
+            timeout_deploy
+        )
+    )
+    print(
+        "      --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
+        " by default {}s".format(timeout_configure)
+    )
+    print("      -u|--user USERNAME: NBI access username. 'admin' by default")
+    print(
+        "      --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default"
+    )
+    print("      -v|--verbose print debug information, can be used several times")
+    print("      --no-verbose remove verbosity")
+    print("      --version: prints current version")
+    print("ENV variables used for real deployment tests with option osm-test.")
+    print("      export OSMNBITEST_VIM_NAME=vim-name")
+    print("      export OSMNBITEST_VIM_URL=vim-url")
+    print("      export OSMNBITEST_VIM_TYPE=vim-type")
+    print("      export OSMNBITEST_VIM_TENANT=vim-tenant")
+    print("      export OSMNBITEST_VIM_USER=vim-user")
+    print("      export OSMNBITEST_VIM_PASSWORD=vim-password")
+    print('      export OSMNBITEST_VIM_CONFIG="vim-config"')
+    print('      export OSMNBITEST_NS_NAME="vim-config"')
+    return
+
+
+r_header_json = {"Content-type": "application/json"}
+headers_json = {"Content-type": "application/json", "Accept": "application/json"}
+r_header_yaml = {"Content-type": "application/yaml"}
+headers_yaml = {"Content-type": "application/yaml", "Accept": "application/yaml"}
+r_header_text = {"Content-type": "text/plain"}
+r_header_octect = {"Content-type": "application/octet-stream"}
+headers_text = {"Accept": "text/plain,application/yaml"}
+r_header_zip = {"Content-type": "application/zip"}
+headers_zip = {"Accept": "application/zip,application/yaml"}
+headers_zip_yaml = {"Accept": "application/yaml", "Content-type": "application/zip"}
+headers_zip_json = {"Accept": "application/json", "Content-type": "application/zip"}
+headers_txt_json = {"Accept": "application/json", "Content-type": "text/plain"}
+r_headers_yaml_location_vnfd = {
+    "Location": "/vnfpkgm/v1/vnf_packages_content/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsd = {
+    "Location": "/nsd/v1/ns_descriptors_content/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nst = {
+    "Location": "/nst/v1/netslice_templates_content",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nslcmop = {
+    "Location": "nslcm/v1/ns_lcm_op_occs/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsilcmop = {
+    "Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/",
+    "Content-Type": "application/yaml",
+}
+
+# test ones authorized
+test_authorized_list = (
+    (
+        "AU1",
+        "Invalid vnfd id",
+        "GET",
+        "/vnfpkgm/v1/vnf_packages/non-existing-id",
+        headers_json,
+        None,
+        404,
+        r_header_json,
+        "json",
+    ),
+    (
+        "AU2",
+        "Invalid nsd id",
+        "GET",
+        "/nsd/v1/ns_descriptors/non-existing-id",
+        headers_yaml,
+        None,
+        404,
+        r_header_yaml,
+        "yaml",
+    ),
+    (
+        "AU3",
+        "Invalid nsd id",
+        "DELETE",
+        "/nsd/v1/ns_descriptors_content/non-existing-id",
+        headers_yaml,
+        None,
+        404,
+        r_header_yaml,
+        "yaml",
+    ),
+)
+timeout = 120  # general timeout
+timeout_deploy = 60 * 10  # timeout for NS deploying without charms
+timeout_configure = 60 * 20  # timeout for NS deploying and configuring
+
+
+class TestException(Exception):
+    pass
+
+
+class TestRest:
+    def __init__(
+        self,
+        url_base,
+        header_base=None,
+        verify=False,
+        user="admin",
+        password="admin",
+        project="admin",
+    ):
+        self.url_base = url_base
+        if header_base is None:
+            self.header_base = {}
+        else:
+            self.header_base = header_base.copy()
+        self.s = requests.session()
+        self.s.headers = self.header_base
+        self.verify = verify
+        self.token = False
+        self.user = user
+        self.password = password
+        self.project = project
+        self.vim_id = None
+        # contains ID of tests obtained from Location response header. "" key contains last obtained id
+        self.last_id = ""
+        self.test_name = None
+        self.step = 0  # number of subtest under test
+        self.passed_tests = 0
+        self.failed_tests = 0
+
+    def set_test_name(self, test_name):
+        self.test_name = test_name
+        self.step = 0
+        self.last_id = ""
+
+    def set_header(self, header):
+        self.s.headers.update(header)
+
+    def set_tet_name(self, test_name):
+        self.test_name = test_name
+
+    def unset_header(self, key):
+        if key in self.s.headers:
+            del self.s.headers[key]
+
+    def test(
+        self,
+        description,
+        method,
+        url,
+        headers,
+        payload,
+        expected_codes,
+        expected_headers,
+        expected_payload,
+        store_file=None,
+        pooling=False,
+    ):
+        """
+        Performs an http request and check http code response. Exit if different than allowed. It get the returned id
+        that can be used by following test in the URL with {name} where name is the name of the test
+        :param description:  description of the test
+        :param method: HTTP method: GET,PUT,POST,DELETE,...
+        :param url: complete URL or relative URL
+        :param headers: request headers to add to the base headers
+        :param payload: Can be a dict, transformed to json, a text or a file if starts with '@'
+        :param expected_codes: expected response codes, can be int, int tuple or int range
+        :param expected_headers: expected response headers, dict with key values
+        :param expected_payload: expected payload, 0 if empty, 'yaml', 'json', 'text', 'zip', 'octet-stream'
+        :param store_file: filename to store content
+        :param pooling: if True do not count neither log this test. Because a pooling is done with many equal requests
+        :return: requests response
+        """
+        r = None
+        try:
+            if not self.s:
+                self.s = requests.session()
+            # URL
+            if not url:
+                url = self.url_base
+            elif not url.startswith("http"):
+                url = self.url_base + url
+
+            # replace url <> with the last ID
+            url = url.replace("<>", self.last_id)
+            if payload:
+                if isinstance(payload, str):
+                    if payload.startswith("@"):
+                        mode = "r"
+                        file_name = payload[1:]
+                        if payload.startswith("@b"):
+                            mode = "rb"
+                            file_name = payload[2:]
+                        with open(file_name, mode) as f:
+                            payload = f.read()
+                elif isinstance(payload, dict):
+                    payload = json.dumps(payload)
+
+            if not pooling:
+                test_description = "Test {}{} {} {} {}".format(
+                    self.test_name, self.step, description, method, url
+                )
+                logger.warning(test_description)
+                self.step += 1
+            stream = False
+            if expected_payload in ("zip", "octet-string") or store_file:
+                stream = True
+            __retry = 0
+            while True:
+                try:
+                    r = getattr(self.s, method.lower())(
+                        url,
+                        data=payload,
+                        headers=headers,
+                        verify=self.verify,
+                        stream=stream,
+                    )
+                    break
+                except requests.exceptions.ConnectionError as e:
+                    if __retry == 2:
+                        raise
+                    logger.error("Exception {}. Retrying".format(e))
+                    __retry += 1
+
+            if expected_payload in ("zip", "octet-string") or store_file:
+                logger.debug("RX {}".format(r.status_code))
+            else:
+                logger.debug("RX {}: {}".format(r.status_code, r.text))
+
+            # check response
+            if expected_codes:
+                if isinstance(expected_codes, int):
+                    expected_codes = (expected_codes,)
+                if r.status_code not in expected_codes:
+                    raise TestException(
+                        "Got status {}. Expected {}. {}".format(
+                            r.status_code, expected_codes, r.text
+                        )
+                    )
+
+            if expected_headers:
+                for header_key, header_val in expected_headers.items():
+                    if header_key.lower() not in r.headers:
+                        raise TestException("Header {} not present".format(header_key))
+                    if header_val and header_val.lower() not in r.headers[header_key]:
+                        raise TestException(
+                            "Header {} does not contain {} but {}".format(
+                                header_key, header_val, r.headers[header_key]
+                            )
+                        )
+
+            if expected_payload is not None:
+                if expected_payload == 0 and len(r.content) > 0:
+                    raise TestException("Expected empty payload")
+                elif expected_payload == "json":
+                    try:
+                        r.json()
+                    except Exception as e:
+                        raise TestException(
+                            "Expected json response payload, but got Exception {}".format(
+                                e
+                            )
+                        )
+                elif expected_payload == "yaml":
+                    try:
+                        yaml.safe_load(r.text)
+                    except Exception as e:
+                        raise TestException(
+                            "Expected yaml response payload, but got Exception {}".format(
+                                e
+                            )
+                        )
+                elif expected_payload in ("zip", "octet-string"):
+                    if len(r.content) == 0:
+                        raise TestException(
+                            "Expected some response payload, but got empty"
+                        )
+                    # try:
+                    #     tar = tarfile.open(None, 'r:gz', fileobj=r.raw)
+                    #     for tarinfo in tar:
+                    #         tarname = tarinfo.name
+                    #         print(tarname)
+                    # except Exception as e:
+                    #     raise TestException("Expected zip response payload, but got Exception {}".format(e))
+                elif expected_payload == "text":
+                    if len(r.content) == 0:
+                        raise TestException(
+                            "Expected some response payload, but got empty"
+                        )
+                    # r.text
+            if store_file:
+                with open(store_file, "wb") as fd:
+                    for chunk in r.iter_content(chunk_size=128):
+                        fd.write(chunk)
+
+            location = r.headers.get("Location")
+            if location:
+                _id = location[location.rfind("/") + 1 :]
+                if _id:
+                    self.last_id = str(_id)
+            if not pooling:
+                self.passed_tests += 1
+            return r
+        except TestException as e:
+            self.failed_tests += 1
+            r_status_code = None
+            r_text = None
+            if r:
+                r_status_code = r.status_code
+                r_text = r.text
+            logger.error("{} \nRX code{}: {}".format(e, r_status_code, r_text))
+            return None
+            # exit(1)
+        except IOError as e:
+            if store_file:
+                logger.error("Cannot open file {}: {}".format(store_file, e))
+            else:
+                logger.error("Exception: {}".format(e), exc_info=True)
+            self.failed_tests += 1
+            return None
+            # exit(1)
+        except requests.exceptions.RequestException as e:
+            logger.error("Exception: {}".format(e))
+
+    def get_autorization(self):  # user=None, password=None, project=None):
+        if (
+            self.token
+        ):  # and self.user == user and self.password == password and self.project == project:
+            return
+        # self.user = user
+        # self.password = password
+        # self.project = project
+        r = self.test(
+            "Obtain token",
+            "POST",
+            "/admin/v1/tokens",
+            headers_json,
+            {
+                "username": self.user,
+                "password": self.password,
+                "project_id": self.project,
+            },
+            (200, 201),
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return
+        response = r.json()
+        self.token = response["id"]
+        self.set_header({"Authorization": "Bearer {}".format(self.token)})
+
+    def remove_authorization(self):
+        if self.token:
+            self.test(
+                "Delete token",
+                "DELETE",
+                "/admin/v1/tokens/{}".format(self.token),
+                headers_json,
+                None,
+                (200, 201, 204),
+                None,
+                None,
+            )
+        self.token = None
+        self.unset_header("Authorization")
+
+    def get_create_vim(self, test_osm):
+        if self.vim_id:
+            return self.vim_id
+        self.get_autorization()
+        if test_osm:
+            vim_name = os.environ.get("OSMNBITEST_VIM_NAME")
+            if not vim_name:
+                raise TestException(
+                    "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment"
+                )
+        else:
+            vim_name = "fakeVim"
+        # Get VIM
+        r = self.test(
+            "Get VIM ID",
+            "GET",
+            "/admin/v1/vim_accounts?name={}".format(vim_name),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return
+        vims = r.json()
+        if vims:
+            return vims[0]["_id"]
+        # Add VIM
+        if test_osm:
+            # check needed environ parameters:
+            if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get(
+                "OSMNBITEST_VIM_TENANT"
+            ):
+                raise TestException(
+                    "Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
+                    " to deploy on whit the --test-osm option"
+                )
+            vim_data = (
+                "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}',"
+                "vim_tenant_name: '{}', "
+                "vim_user: {}, vim_password: {}"
+            ).format(
+                vim_name,
+                os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
+                os.environ.get("OSMNBITEST_VIM_URL"),
+                os.environ.get("OSMNBITEST_VIM_TENANT"),
+                os.environ.get("OSMNBITEST_VIM_USER"),
+                os.environ.get("OSMNBITEST_VIM_PASSWORD"),
+            )
+            if os.environ.get("OSMNBITEST_VIM_CONFIG"):
+                vim_data += " ,config: {}".format(
+                    os.environ.get("OSMNBITEST_VIM_CONFIG")
+                )
+            vim_data += "}"
+        else:
+            vim_data = (
+                "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"
+                ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
+            )
+        self.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_yaml,
+            vim_data,
+            (201, 202),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"},
+            "yaml",
+        )
+        return self.last_id
+
+    def print_results(self):
+        print("\n\n\n--------------------------------------------")
+        print(
+            "TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(
+                self.passed_tests + self.failed_tests,
+                self.passed_tests,
+                self.failed_tests,
+            )
+        )
+        print("--------------------------------------------")
+
+    def wait_until_delete(self, url_op, timeout_delete):
+        """
+        Make a pooling until topic is not present, because of deleted
+        :param url_op:
+        :param timeout_delete:
+        :return:
+        """
+        description = "Wait to topic being deleted"
+        test_description = "Test {}{} {} {} {}".format(
+            self.test_name, self.step, description, "GET", url_op
+        )
+        logger.warning(test_description)
+        self.step += 1
+
+        wait = timeout_delete
+        while wait >= 0:
+            r = self.test(
+                description,
+                "GET",
+                url_op,
+                headers_yaml,
+                None,
+                (200, 404),
+                None,
+                r_header_yaml,
+                "yaml",
+                pooling=True,
+            )
+            if not r:
+                return
+            if r.status_code == 404:
+                self.passed_tests += 1
+                break
+            elif r.status_code == 200:
+                wait -= 5
+                sleep(5)
+        else:
+            raise TestException(
+                "Topic is not deleted after {} seconds".format(timeout_delete)
+            )
+            self.failed_tests += 1
+
+    def wait_operation_ready(self, ns_nsi, opp_id, timeout, expected_fail=False):
+        """
+        Wait until nslcmop or nsilcmop finished
+        :param ns_nsi: "ns" o "nsi"
+        :param opp_id: Id o fthe operation
+        :param timeout:
+        :param expected_fail:
+        :return: None. Updates passed/failed_tests
+        """
+        if ns_nsi == "ns":
+            url_op = "/nslcm/v1/ns_lcm_op_occs/{}".format(opp_id)
+        else:
+            url_op = "/nsilcm/v1/nsi_lcm_op_occs/{}".format(opp_id)
+        description = "Wait to {} lcm operation complete".format(ns_nsi)
+        test_description = "Test {}{} {} {} {}".format(
+            self.test_name, self.step, description, "GET", url_op
+        )
+        logger.warning(test_description)
+        self.step += 1
+        wait = timeout
+        while wait >= 0:
+            r = self.test(
+                description,
+                "GET",
+                url_op,
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+                pooling=True,
+            )
+            if not r:
+                return
+            nslcmop = r.json()
+            if "COMPLETED" in nslcmop["operationState"]:
+                if expected_fail:
+                    logger.error(
+                        "NS terminate has success, expecting failing: {}".format(
+                            nslcmop["detailed-status"]
+                        )
+                    )
+                    self.failed_tests += 1
+                else:
+                    self.passed_tests += 1
+                break
+            elif "FAILED" in nslcmop["operationState"]:
+                if not expected_fail:
+                    logger.error(
+                        "NS terminate has failed: {}".format(nslcmop["detailed-status"])
+                    )
+                    self.failed_tests += 1
+                else:
+                    self.passed_tests += 1
+                break
+
+            print(".", end="", file=stderr)
+            wait -= 10
+            sleep(10)
+        else:
+            self.failed_tests += 1
+            logger.error(
+                "NS instantiate is not terminate after {} seconds".format(timeout)
+            )
+            return
+        print("", file=stderr)
+
+
+class TestNonAuthorized:
+    description = "Test invalid URLs. methods and no authorization"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("NonAuth")
+        engine.remove_authorization()
+        test_not_authorized_list = (
+            (
+                "Invalid token",
+                "GET",
+                "/admin/v1/users",
+                headers_json,
+                None,
+                401,
+                r_header_json,
+                "json",
+            ),
+            (
+                "Invalid URL",
+                "POST",
+                "/admin/v1/nonexist",
+                headers_yaml,
+                None,
+                405,
+                r_header_yaml,
+                "yaml",
+            ),
+            (
+                "Invalid version",
+                "DELETE",
+                "/admin/v2/users",
+                headers_yaml,
+                None,
+                405,
+                r_header_yaml,
+                "yaml",
+            ),
+        )
+        for t in test_not_authorized_list:
+            engine.test(*t)
+
+
+class TestUsersProjects:
+    description = "test project and user creation"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("UserProject")
+        # backend = test_params.get("backend") if test_params else None   # UNUSED
+
+        # Initialisation
+        p1 = p2 = p3 = None
+        padmin = pbad = None
+        u1 = u2 = u3 = u4 = None
+
+        engine.get_autorization()
+
+        res = engine.test(
+            "Create project non admin 1",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P1"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        p1 = engine.last_id if res else None
+
+        res = engine.test(
+            "Create project admin",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "Padmin", "admin": True},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        padmin = engine.last_id if res else None
+
+        res = engine.test(
+            "Create project bad format",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": 1},
+            (400, 422),
+            r_header_json,
+            "json",
+        )
+        pbad = engine.last_id if res else None
+
+        res = engine.test(
+            "Get project admin role",
+            "GET",
+            "/admin/v1/roles?name=project_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        rpa = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get project user role",
+            "GET",
+            "/admin/v1/roles?name=project_user",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        rpu = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get system admin role",
+            "GET",
+            "/admin/v1/roles?name=system_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        rsa = res.json()[0]["_id"] if res else None
+
+        data = {"username": "U1", "password": "pw1"}
+        p2 = uuid4().hex
+        data["project_role_mappings"] = [
+            {"project": p1, "role": rpa},
+            {"project": p2, "role": rpa},
+            {"project": padmin, "role": rpu},
+        ]
+        rc = 201
+        xhd = {"Location": "/admin/v1/users/", "Content-Type": "application/json"}
+        res = engine.test(
+            "Create user with bad project and force",
+            "POST",
+            "/admin/v1/users?FORCE=True",
+            headers_json,
+            data,
+            rc,
+            xhd,
+            "json",
+        )
+        if res:
+            u1 = engine.last_id
+        else:
+            # User is created sometimes even though an exception is raised
+            res = engine.test(
+                "Get user U1",
+                "GET",
+                "/admin/v1/users?username=U1",
+                headers_json,
+                {},
+                (200),
+                {"Content-Type": "application/json"},
+                "json",
+            )
+            u1 = res.json()[0]["_id"] if res else None
+
+        data = {"username": "U2", "password": "pw2"}
+        data["project_role_mappings"] = [
+            {"project": p1, "role": rpa},
+            {"project": padmin, "role": rsa},
+        ]
+        res = engine.test(
+            "Create user 2",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+        u2 = engine.last_id if res else None
+
+        if u1:
+            ftt = "project_role_mappings"
+            xpr = [{"project": p1, "role": rpa}, {"project": padmin, "role": rpu}]
+            data = {ftt: xpr}
+            engine.test(
+                "Edit user U1, delete  P2 project",
+                "PATCH",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                data,
+                204,
+                None,
+                None,
+            )
+            res = engine.test(
+                "Check user U1, contains the right projects",
+                "GET",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                None,
+                200,
+                None,
+                json,
+            )
+            if res:
+                rj = res.json()
+                xpr[0]["project_name"] = "P1"
+                xpr[0]["role_name"] = "project_admin"
+                xpr[1]["project_name"] = "Padmin"
+                xpr[1]["role_name"] = "project_user"
+                ok = True
+                for pr in rj[ftt]:
+                    if pr not in xpr:
+                        ok = False
+                for pr in xpr:
+                    if pr not in rj[ftt]:
+                        ok = False
+                if not ok:
+                    logger.error(
+                        "User {} '{}' are different than expected '{}'. Edition was not done properly".format(
+                            ftt, rj[ftt], xpr
+                        )
+                    )
+                    engine.failed_tests += 1
+
+        p2 = None  # To prevent deletion attempts
+
+        # Add a test of 'default project' for Keystone?
+
+        if u2:
+            engine.test(
+                "Edit user U2, change password",
+                "PUT",
+                "/admin/v1/users/" + u2,
+                headers_json,
+                {"password": "pw2_new"},
+                204,
+                None,
+                None,
+            )
+
+        if p1:
+            engine.test(
+                "Change to project P1 non existing",
+                "POST",
+                "/admin/v1/tokens/",
+                headers_json,
+                {"project_id": p1},
+                401,
+                r_header_json,
+                "json",
+            )
+
+        if u2 and p1:
+            res = engine.test(
+                "Change to user U2 project P1",
+                "POST",
+                "/admin/v1/tokens",
+                headers_json,
+                {"username": "U2", "password": "pw2_new", "project_id": "P1"},
+                (200, 201),
+                r_header_json,
+                "json",
+            )
+            if res:
+                rj = res.json()
+                engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
+
+                engine.test(
+                    "Edit user projects non admin",
+                    "PUT",
+                    "/admin/v1/users/U1",
+                    headers_json,
+                    {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
+                    401,
+                    r_header_json,
+                    "json",
+                )
+
+                res = engine.test(
+                    "Add new project non admin",
+                    "POST",
+                    "/admin/v1/projects",
+                    headers_json,
+                    {"name": "P2"},
+                    401,
+                    r_header_json,
+                    "json",
+                )
+                if res is None or res.status_code == 201:
+                    # The project has been created even though it shouldn't
+                    res = engine.test(
+                        "Get project P2",
+                        "GET",
+                        "/admin/v1/projects/P2",
+                        headers_json,
+                        None,
+                        200,
+                        r_header_json,
+                        "json",
+                    )
+                    p2 = res.json()["_id"] if res else None
+
+                if p1:
+                    data = {"username": "U3", "password": "pw3"}
+                    data["project_role_mappings"] = [{"project": p1, "role": rpu}]
+                    res = engine.test(
+                        "Add new user non admin",
+                        "POST",
+                        "/admin/v1/users",
+                        headers_json,
+                        data,
+                        401,
+                        r_header_json,
+                        "json",
+                    )
+                    if res is None or res.status_code == 201:
+                        # The user has been created even though it shouldn't
+                        res = engine.test(
+                            "Get user U3",
+                            "GET",
+                            "/admin/v1/users/U3",
+                            headers_json,
+                            None,
+                            200,
+                            r_header_json,
+                            "json",
+                        )
+                        u3 = res.json()["_id"] if res else None
+                else:
+                    u3 = None
+
+                if padmin:
+                    res = engine.test(
+                        "Change to user U2 project Padmin",
+                        "POST",
+                        "/admin/v1/tokens",
+                        headers_json,
+                        {
+                            "project_id": "Padmin"
+                        },  # Caused a Keystone authentication error
+                        # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
+                        (200, 201),
+                        r_header_json,
+                        "json",
+                    )
+                    if res:
+                        rj = res.json()
+                        engine.set_header(
+                            {"Authorization": "Bearer {}".format(rj["id"])}
+                        )
+
+                        res = engine.test(
+                            "Add new project admin",
+                            "POST",
+                            "/admin/v1/projects",
+                            headers_json,
+                            {"name": "P3"},
+                            (201, 204),
+                            {
+                                "Location": "/admin/v1/projects/",
+                                "Content-Type": "application/json",
+                            },
+                            "json",
+                        )
+                        p3 = engine.last_id if res else None
+
+                        if p1:
+                            data = {"username": "U4", "password": "pw4"}
+                            data["project_role_mappings"] = [
+                                {"project": p1, "role": rpa}
+                            ]
+                            res = engine.test(
+                                "Add new user admin",
+                                "POST",
+                                "/admin/v1/users",
+                                headers_json,
+                                data,
+                                (201, 204),
+                                {
+                                    "Location": "/admin/v1/users/",
+                                    "Content-Type": "application/json",
+                                },
+                                "json",
+                            )
+                            u4 = engine.last_id if res else None
+                        else:
+                            u4 = None
+
+                        if u4 and p3:
+                            data = {
+                                "project_role_mappings": [{"project": p3, "role": rpa}]
+                            }
+                            engine.test(
+                                "Edit user projects admin",
+                                "PUT",
+                                "/admin/v1/users/U4",
+                                headers_json,
+                                data,
+                                204,
+                                None,
+                                None,
+                            )
+                            # Project is deleted even though it shouldn't - PROVISIONAL?
+                            res = engine.test(
+                                "Delete project P3 conflict",
+                                "DELETE",
+                                "/admin/v1/projects/" + p3,
+                                headers_json,
+                                None,
+                                409,
+                                None,
+                                None,
+                            )
+                            if res and res.status_code in (200, 204):
+                                p3 = None
+                            if p3:
+                                res = engine.test(
+                                    "Delete project P3 forcing",
+                                    "DELETE",
+                                    "/admin/v1/projects/" + p3 + "?FORCE=True",
+                                    headers_json,
+                                    None,
+                                    204,
+                                    None,
+                                    None,
+                                )
+                                if res and res.status_code in (200, 204):
+                                    p3 = None
+
+                        if u2:
+                            res = engine.test(
+                                "Delete user U2. Conflict deleting own user",
+                                "DELETE",
+                                "/admin/v1/users/" + u2,
+                                headers_json,
+                                None,
+                                409,
+                                r_header_json,
+                                "json",
+                            )
+                            if res is None or res.status_code in (200, 204):
+                                u2 = None
+                        if u4:
+                            res = engine.test(
+                                "Delete user U4",
+                                "DELETE",
+                                "/admin/v1/users/" + u4,
+                                headers_json,
+                                None,
+                                204,
+                                None,
+                                None,
+                            )
+                            if res and res.status_code in (200, 204):
+                                u4 = None
+                        if p3:
+                            res = engine.test(
+                                "Delete project P3",
+                                "DELETE",
+                                "/admin/v1/projects/" + p3,
+                                headers_json,
+                                None,
+                                204,
+                                None,
+                                None,
+                            )
+                            if res and res.status_code in (200, 204):
+                                p3 = None
+
+                if u3:
+                    res = engine.test(
+                        "Delete user U3",
+                        "DELETE",
+                        "/admin/v1/users/" + u3,
+                        headers_json,
+                        None,
+                        204,
+                        None,
+                        None,
+                    )
+                    if res:
+                        u3 = None
+
+        # change to admin
+        engine.remove_authorization()  # To force get authorization
+        engine.get_autorization()
+        if u1:
+            engine.test(
+                "Delete user U1",
+                "DELETE",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if u2:
+            engine.test(
+                "Delete user U2",
+                "DELETE",
+                "/admin/v1/users/" + u2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if u3:
+            engine.test(
+                "Delete user U3",
+                "DELETE",
+                "/admin/v1/users/" + u3,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if u4:
+            engine.test(
+                "Delete user U4",
+                "DELETE",
+                "/admin/v1/users/" + u4,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if p1:
+            engine.test(
+                "Delete project P1",
+                "DELETE",
+                "/admin/v1/projects/" + p1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if p2:
+            engine.test(
+                "Delete project P2",
+                "DELETE",
+                "/admin/v1/projects/" + p2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if p3:
+            engine.test(
+                "Delete project P3",
+                "DELETE",
+                "/admin/v1/projects/" + p3,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if padmin:
+            engine.test(
+                "Delete project Padmin",
+                "DELETE",
+                "/admin/v1/projects/" + padmin,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if pbad:
+            engine.test(
+                "Delete bad project",
+                "DELETE",
+                "/admin/v1/projects/" + pbad,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+
+        # BEGIN New Tests - Addressing Projects/Users by Name/ID
+        pid1 = pid2 = None
+        uid1 = uid2 = None
+        res = engine.test(
+            "Create new project P1",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P1"},
+            201,
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            pid1 = res.json()["id"]
+            # print("# pid =", pid1)
+        res = engine.test(
+            "Create new project P2",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P2"},
+            201,
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            pid2 = res.json()["id"]
+            # print("# pid =", pid2)
+        data = {"username": "U1", "password": "pw1"}
+        data["project_role_mappings"] = [{"project": pid1, "role": rpu}]
+        res = engine.test(
+            "Create new user U1",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            uid1 = res.json()["id"]
+            # print("# uid =", uid1)
+        data = {"username": "U2", "password": "pw2"}
+        data["project_role_mappings"] = [{"project": pid2, "role": rpu}]
+        res = engine.test(
+            "Create new user U2",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+        if res:
+            uid2 = res.json()["id"]
+            # print("# uid =", uid2)
+        if pid1:
+            engine.test(
+                "Get Project P1 by Name",
+                "GET",
+                "/admin/v1/projects/P1",
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+            engine.test(
+                "Get Project P1 by ID",
+                "GET",
+                "/admin/v1/projects/" + pid1,
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+        if uid1:
+            engine.test(
+                "Get User U1 by Name",
+                "GET",
+                "/admin/v1/users/U1",
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+            engine.test(
+                "Get User U1 by ID",
+                "GET",
+                "/admin/v1/users/" + uid1,
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+        if pid1:
+            res = engine.test(
+                "Rename Project P1 by Name",
+                "PUT",
+                "/admin/v1/projects/P1",
+                headers_json,
+                {"name": "P3"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get Project P1 by new Name",
+                    "GET",
+                    "/admin/v1/projects/P3",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+        if pid2:
+            res = engine.test(
+                "Rename Project P2 by ID",
+                "PUT",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                {"name": "P4"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get Project P2 by new Name",
+                    "GET",
+                    "/admin/v1/projects/P4",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+
+        if uid1:
+            res = engine.test(
+                "Rename User U1 by Name",
+                "PUT",
+                "/admin/v1/users/U1",
+                headers_json,
+                {"username": "U3"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get User U1 by new Name",
+                    "GET",
+                    "/admin/v1/users/U3",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+
+        if uid2:
+            res = engine.test(
+                "Rename User U2 by ID",
+                "PUT",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                {"username": "U4"},
+                204,
+                None,
+                None,
+            )
+            if res:
+                engine.test(
+                    "Get User U2 by new Name",
+                    "GET",
+                    "/admin/v1/users/U4",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
+        if uid1:
+            res = engine.test(
+                "Delete User U1 by Name",
+                "DELETE",
+                "/admin/v1/users/U3",
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                uid1 = None
+
+        if uid2:
+            res = engine.test(
+                "Delete User U2 by ID",
+                "DELETE",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                uid2 = None
+
+        if pid1:
+            res = engine.test(
+                "Delete Project P1 by Name",
+                "DELETE",
+                "/admin/v1/projects/P3",
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                pid1 = None
+
+        if pid2:
+            res = engine.test(
+                "Delete Project P2 by ID",
+                "DELETE",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+            if res:
+                pid2 = None
+
+        # END New Tests - Addressing Projects/Users by Name
+
+        # CLEANUP
+        if pid1:
+            engine.test(
+                "Delete Project P1",
+                "DELETE",
+                "/admin/v1/projects/" + pid1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if pid2:
+            engine.test(
+                "Delete Project P2",
+                "DELETE",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if uid1:
+            engine.test(
+                "Delete User U1",
+                "DELETE",
+                "/admin/v1/users/" + uid1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+        if uid2:
+            engine.test(
+                "Delete User U2",
+                "DELETE",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
+
+        engine.remove_authorization()  # To finish
+
+
+class TestProjectsDescriptors:
+    description = "test descriptors visibility among projects"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        vnfd_ids = []
+        engine.set_test_name("ProjectDescriptors")
+        engine.get_autorization()
+
+        project_admin_id = None
+        res = engine.test(
+            "Get my project Padmin",
+            "GET",
+            "/admin/v1/projects/{}".format(engine.project),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if res:
+            response = res.json()
+            project_admin_id = response["_id"]
+        engine.test(
+            "Create project Padmin",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "Padmin", "admin": True},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Create project P2",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P2"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Create project P3",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P3"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+
+        engine.test(
+            "Create user U1",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {
+                "username": "U1",
+                "password": "pw1",
+                "project_role_mappings": [
+                    {"project": "Padmin", "role": "system_admin"},
+                    {"project": "P2", "role": "project_admin"},
+                    {"project": "P3", "role": "project_admin"},
+                ],
+            },
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+
+        engine.test(
+            "Onboard VNFD id1",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id1",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+        engine.test(
+            "Onboard VNFD id2 PUBLIC",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+        engine.test(
+            "Onboard VNFD id3",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+
+        res = engine.test(
+            "Get VNFD descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 3:
+            logger.error(
+                "Only 3 vnfds should be present for project admin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        # Change to other project Padmin
+        res = engine.test(
+            "Change to user U1 project Padmin",
+            "POST",
+            "/admin/v1/tokens",
+            headers_json,
+            {"username": "U1", "password": "pw1", "project_id": "Padmin"},
+            (200, 201),
+            r_header_json,
+            "json",
+        )
+        if res:
+            response = res.json()
+            engine.set_header({"Authorization": "Bearer {}".format(response["id"])})
+
+        # list vnfds
+        res = engine.test(
+            "List VNFD descriptors for Padmin",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 0:
+            logger.error(
+                "Only 0 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        # list Public vnfds
+        res = engine.test(
+            "List VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 1:
+            logger.error(
+                "Only 1 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        # list vnfds belonging to project "admin"
+        res = engine.test(
+            "List VNFD of admin project",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if res:
+            response = res.json()
+            if len(response) != 3:
+                logger.error(
+                    "Only 3 vnfds should be present for project Padmin. {} listed".format(
+                        len(response)
+                    )
+                )
+                engine.failed_tests += 1
+
+        # Get Public vnfds
+        engine.test(
+            "Get VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        # Edit not owned vnfd
+        engine.test(
+            "Edit VNFD ",
+            "PATCH",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+            headers_yaml,
+            "{name: pepe}",
+            404,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # Add to my catalog
+        engine.test(
+            "Add VNFD id2 to my catalog",
+            "PATCH",
+            "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # Add a new vnfd
+        engine.test(
+            "Onboard VNFD id4",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id4",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        vnfd_ids.append(engine.last_id)
+
+        # list vnfds
+        res = engine.test(
+            "List VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        response = res.json()
+        if len(response) != 2:
+            logger.error(
+                "Only 2 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
+            engine.failed_tests += 1
+
+        if manual_check:
+            input(
+                "VNFDs have been omboarded. Perform manual check and press enter to resume"
+            )
+
+        test_rest.test(
+            "Delete VNFD id2",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # change to admin project
+        engine.remove_authorization()  # To force get authorization
+        engine.get_autorization()
+        test_rest.test(
+            "Delete VNFD id1",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id2",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id3",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id4",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
+            headers_yaml,
+            None,
+            404,
+            r_header_yaml,
+            "yaml",
+        )
+        test_rest.test(
+            "Delete VNFD id4",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        # Get Public vnfds
+        engine.test(
+            "Get VNFD deleted id1",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id2",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id3",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id4",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+
+        engine.test(
+            "Delete user U1",
+            "DELETE",
+            "/admin/v1/users/U1",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project Padmin",
+            "DELETE",
+            "/admin/v1/projects/Padmin",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project P2",
+            "DELETE",
+            "/admin/v1/projects/P2",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project P3",
+            "DELETE",
+            "/admin/v1/projects/P3",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+
+
+class TestFakeVim:
+    description = "Creates/edit/delete fake VIMs and SDN controllers"
+
+    def __init__(self):
+        self.vim = {
+            "schema_version": "1.0",
+            "schema_type": "No idea",
+            "name": "myVim",
+            "description": "Descriptor name",
+            "vim_type": "openstack",
+            "vim_url": "http://localhost:/vim",
+            "vim_tenant_name": "vimTenant",
+            "vim_user": "user",
+            "vim_password": "password",
+            "config": {"config_param": 1},
+        }
+        self.sdn = {
+            "name": "sdn-name",
+            "description": "sdn-description",
+            "dpid": "50:50:52:54:00:94:21:21",
+            "ip": "192.168.15.17",
+            "port": 8080,
+            "type": "opendaylight",
+            "version": "3.5.6",
+            "user": "user",
+            "password": "passwd",
+        }
+        self.port_mapping = [
+            {
+                "compute_node": "compute node 1",
+                "ports": [
+                    {
+                        "pci": "0000:81:00.0",
+                        "switch_port": "port-2/1",
+                        "switch_mac": "52:54:00:94:21:21",
+                    },
+                    {
+                        "pci": "0000:81:00.1",
+                        "switch_port": "port-2/2",
+                        "switch_mac": "52:54:00:94:21:22",
+                    },
+                ],
+            },
+            {
+                "compute_node": "compute node 2",
+                "ports": [
+                    {
+                        "pci": "0000:81:00.0",
+                        "switch_port": "port-2/3",
+                        "switch_mac": "52:54:00:94:21:23",
+                    },
+                    {
+                        "pci": "0000:81:00.1",
+                        "switch_port": "port-2/4",
+                        "switch_mac": "52:54:00:94:21:24",
+                    },
+                ],
+            },
+        ]
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        vim_bad = self.vim.copy()
+        vim_bad.pop("name")
+
+        engine.set_test_name("FakeVim")
+        engine.get_autorization()
+        engine.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            (201, 202),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+            "json",
+        )
+        vim_id = engine.last_id
+        engine.test(
+            "Create VIM without name, bad schema",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            vim_bad,
+            422,
+            None,
+            headers_json,
+        )
+        engine.test(
+            "Create VIM name repeated",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            409,
+            None,
+            headers_json,
+        )
+        engine.test(
+            "Show VIMs",
+            "GET",
+            "/admin/v1/vim_accounts",
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+        engine.test(
+            "Show VIM",
+            "GET",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+        if not test_osm:
+            # delete with FORCE
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Check VIM is deleted",
+                "GET",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+        else:
+            # delete and wait until is really deleted
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.wait_until_delete(
+                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+            )
+
+
+class TestVIMSDN(TestFakeVim):
+    description = "Creates VIM with SDN editing SDN controllers and port_mapping"
+
+    def __init__(self):
+        TestFakeVim.__init__(self)
+        self.wim = {
+            "schema_version": "1.0",
+            "schema_type": "No idea",
+            "name": "myWim",
+            "description": "Descriptor name",
+            "wim_type": "odl",
+            "wim_url": "http://localhost:/wim",
+            "user": "user",
+            "password": "password",
+            "config": {"config_param": 1},
+        }
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("VimSdn")
+        engine.get_autorization()
+        # Added SDN
+        engine.test(
+            "Create SDN",
+            "POST",
+            "/admin/v1/sdns",
+            headers_json,
+            self.sdn,
+            (201, 202),
+            {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"},
+            "json",
+        )
+        sdnc_id = engine.last_id
+        # sleep(5)
+        # Edit SDN
+        engine.test(
+            "Edit SDN",
+            "PATCH",
+            "/admin/v1/sdns/{}".format(sdnc_id),
+            headers_json,
+            {"name": "new_sdn_name"},
+            (202, 204),
+            None,
+            None,
+        )
+        # sleep(5)
+        # VIM with SDN
+        self.vim["config"]["sdn-controller"] = sdnc_id
+        self.vim["config"]["sdn-port-mapping"] = self.port_mapping
+        engine.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            (200, 202, 201),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+            "json",
+        ),
+
+        vim_id = engine.last_id
+        self.port_mapping[0]["compute_node"] = "compute node XX"
+        engine.test(
+            "Edit VIM change port-mapping",
+            "PUT",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_json,
+            {"config": {"sdn-port-mapping": self.port_mapping}},
+            (202, 204),
+            None,
+            None,
+        )
+        engine.test(
+            "Edit VIM remove port-mapping",
+            "PUT",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_json,
+            {"config": {"sdn-port-mapping": None}},
+            (202, 204),
+            None,
+            None,
+        )
+
+        engine.test(
+            "Create WIM",
+            "POST",
+            "/admin/v1/wim_accounts",
+            headers_json,
+            self.wim,
+            (200, 202, 201),
+            {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"},
+            "json",
+        ),
+        wim_id = engine.last_id
+
+        if not test_osm:
+            # delete with FORCE
+            engine.test(
+                "Delete VIM remove port-mapping",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Delete SDNC",
+                "DELETE",
+                "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+
+            engine.test(
+                "Delete WIM",
+                "DELETE",
+                "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Check VIM is deleted",
+                "GET",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+            engine.test(
+                "Check SDN is deleted",
+                "GET",
+                "/admin/v1/sdns/{}".format(sdnc_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+            engine.test(
+                "Check WIM is deleted",
+                "GET",
+                "/admin/v1/wim_accounts/{}".format(wim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+        else:
+            if manual_check:
+                input(
+                    "VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume"
+                )
+            # delete and wait until is really deleted
+            engine.test(
+                "Delete VIM remove port-mapping",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.test(
+                "Delete SDN",
+                "DELETE",
+                "/admin/v1/sdns/{}".format(sdnc_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/wim_accounts/{}".format(wim_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.wait_until_delete(
+                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+            )
+            engine.wait_until_delete("/admin/v1/sdns/{}".format(sdnc_id), timeout)
+            engine.wait_until_delete(
+                "/admin/v1/wim_accounts/{}".format(wim_id), timeout
+            )
+
+
+class TestDeploy:
+    description = "Base class for downloading descriptors from ETSI, onboard and deploy in real VIM"
+
+    def __init__(self):
+        self.test_name = "DEPLOY"
+        self.nsd_id = None
+        self.vim_id = None
+        self.ns_id = None
+        self.vnfds_id = []
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        )
+        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
+        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
+        self.descriptor_edit = None
+        self.uses_configuration = False
+        self.users = {}
+        self.passwords = {}
+        self.commands = {}
+        self.keys = {}
+        self.timeout = 120
+        self.qforce = ""
+        self.ns_params = None
+        self.vnfr_ip_list = {}
+
+    def create_descriptors(self, engine):
+        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
+        if not os.path.exists(temp_dir):
+            os.makedirs(temp_dir)
+        for vnfd_index, vnfd_filename in enumerate(self.vnfd_filenames):
+            if "/" in vnfd_filename:
+                vnfd_filename_path = vnfd_filename
+                if not os.path.exists(vnfd_filename_path):
+                    raise TestException(
+                        "File '{}' does not exist".format(vnfd_filename_path)
+                    )
+            else:
+                vnfd_filename_path = temp_dir + vnfd_filename
+                if not os.path.exists(vnfd_filename_path):
+                    with open(vnfd_filename_path, "wb") as file:
+                        response = requests.get(self.descriptor_url + vnfd_filename)
+                        if response.status_code >= 300:
+                            raise TestException(
+                                "Error downloading descriptor from '{}': {}".format(
+                                    self.descriptor_url + vnfd_filename,
+                                    response.status_code,
+                                )
+                            )
+                        file.write(response.content)
+            if vnfd_filename_path.endswith(".yaml"):
+                headers = headers_yaml
+            else:
+                headers = headers_zip_yaml
+            if randint(0, 1) == 0:
+                # vnfd CREATE AND UPLOAD in one step:
+                engine.test(
+                    "Onboard VNFD in one step",
+                    "POST",
+                    "/vnfpkgm/v1/vnf_packages_content" + self.qforce,
+                    headers,
+                    "@b" + vnfd_filename_path,
+                    201,
+                    r_headers_yaml_location_vnfd,
+                    "yaml",
+                )
+                self.vnfds_id.append(engine.last_id)
+            else:
+                # vnfd CREATE AND UPLOAD ZIP
+                engine.test(
+                    "Onboard VNFD step 1",
+                    "POST",
+                    "/vnfpkgm/v1/vnf_packages",
+                    headers_json,
+                    None,
+                    201,
+                    {
+                        "Location": "/vnfpkgm/v1/vnf_packages/",
+                        "Content-Type": "application/json",
+                    },
+                    "json",
+                )
+                self.vnfds_id.append(engine.last_id)
+                engine.test(
+                    "Onboard VNFD step 2 as ZIP",
+                    "PUT",
+                    "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
+                    headers,
+                    "@b" + vnfd_filename_path,
+                    204,
+                    None,
+                    0,
+                )
+
+            if self.descriptor_edit:
+                if "vnfd{}".format(vnfd_index) in self.descriptor_edit:
+                    # Modify VNFD
+                    engine.test(
+                        "Edit VNFD ",
+                        "PATCH",
+                        "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
+                        headers_yaml,
+                        self.descriptor_edit["vnfd{}".format(vnfd_index)],
+                        204,
+                        None,
+                        None,
+                    )
+
+        if "/" in self.nsd_filename:
+            nsd_filename_path = self.nsd_filename
+            if not os.path.exists(nsd_filename_path):
+                raise TestException(
+                    "File '{}' does not exist".format(nsd_filename_path)
+                )
+        else:
+            nsd_filename_path = temp_dir + self.nsd_filename
+            if not os.path.exists(nsd_filename_path):
+                with open(nsd_filename_path, "wb") as file:
+                    response = requests.get(self.descriptor_url + self.nsd_filename)
+                    if response.status_code >= 300:
+                        raise TestException(
+                            "Error downloading descriptor from '{}': {}".format(
+                                self.descriptor_url + self.nsd_filename,
+                                response.status_code,
+                            )
+                        )
+                    file.write(response.content)
+        if nsd_filename_path.endswith(".yaml"):
+            headers = headers_yaml
+        else:
+            headers = headers_zip_yaml
+
+        if randint(0, 1) == 0:
+            # nsd CREATE AND UPLOAD in one step:
+            engine.test(
+                "Onboard NSD in one step",
+                "POST",
+                "/nsd/v1/ns_descriptors_content" + self.qforce,
+                headers,
+                "@b" + nsd_filename_path,
+                201,
+                r_headers_yaml_location_nsd,
+                yaml,
+            )
+            self.nsd_id = engine.last_id
+        else:
+            # nsd CREATE AND UPLOAD ZIP
+            engine.test(
+                "Onboard NSD step 1",
+                "POST",
+                "/nsd/v1/ns_descriptors",
+                headers_json,
+                None,
+                201,
+                {
+                    "Location": "/nsd/v1/ns_descriptors/",
+                    "Content-Type": "application/json",
+                },
+                "json",
+            )
+            self.nsd_id = engine.last_id
+            engine.test(
+                "Onboard NSD step 2 as ZIP",
+                "PUT",
+                "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
+                headers,
+                "@b" + nsd_filename_path,
+                204,
+                None,
+                0,
+            )
+
+        if self.descriptor_edit and "nsd" in self.descriptor_edit:
+            # Modify NSD
+            engine.test(
+                "Edit NSD ",
+                "PATCH",
+                "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+                headers_yaml,
+                self.descriptor_edit["nsd"],
+                204,
+                None,
+                None,
+            )
+
+    def delete_descriptors(self, engine):
+        # delete descriptors
+        engine.test(
+            "Delete NSSD SOL005",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        for vnfd_id in self.vnfds_id:
+            engine.test(
+                "Delete VNFD SOL005",
+                "DELETE",
+                "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+
+    def instantiate(self, engine, ns_data):
+        ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256)
+        # create NS Two steps
+        r = engine.test(
+            "Create NS step 1",
+            "POST",
+            "/nslcm/v1/ns_instances",
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"},
+            "yaml",
+        )
+        if not r:
+            return
+        self.ns_id = engine.last_id
+        engine.test(
+            "Instantiate NS step 2",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id),
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop_id = engine.last_id
+
+        if test_osm:
+            # Wait until status is Ok
+            timeout = timeout_configure if self.uses_configuration else timeout_deploy
+            engine.wait_operation_ready("ns", nslcmop_id, timeout)
+
+    def terminate(self, engine):
+        # remove deployment
+        if test_osm:
+            engine.test(
+                "Terminate NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id),
+                headers_yaml,
+                None,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
+            nslcmop2_id = engine.last_id
+            # Wait until status is Ok
+            engine.wait_operation_ready("ns", nslcmop2_id, timeout_deploy)
+
+            engine.test(
+                "Delete NS",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+        else:
+            engine.test(
+                "Delete NS with FORCE",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+
+        # check all it is deleted
+        engine.test(
+            "Check NS is deleted",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_yaml,
+            None,
+            404,
+            None,
+            "yaml",
+        )
+        r = engine.test(
+            "Check NSLCMOPs are deleted",
+            "GET",
+            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            None,
+            "json",
+        )
+        if not r:
+            return
+        nslcmops = r.json()
+        if not isinstance(nslcmops, list) or nslcmops:
+            raise TestException(
+                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+                    self.ns_id, nslcmops
+                )
+            )
+
+    def test_ns(
+        self,
+        engine,
+        test_osm,
+        commands=None,
+        users=None,
+        passwds=None,
+        keys=None,
+        timeout=0,
+    ):
+        r = engine.test(
+            "GET VNFR IDs",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return
+        ns_data = r.json()
+
+        vnfr_list = ns_data["constituent-vnfr-ref"]
+        time = 0
+        _commands = commands if commands is not None else self.commands
+        _users = users if users is not None else self.users
+        _passwds = passwds if passwds is not None else self.passwords
+        _keys = keys if keys is not None else self.keys
+        _timeout = timeout if timeout != 0 else self.timeout
+
+        # vnfr_list=[d8272263-6bd3-4680-84ca-6a4be23b3f2d, 88b22e2f-994a-4b61-94fd-4a3c90de3dc4]
+        for vnfr_id in vnfr_list:
+            r = engine.test(
+                "Get VNFR to get IP_ADDRESS",
+                "GET",
+                "/nslcm/v1/vnfrs/{}".format(vnfr_id),
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+            )
+            if not r:
+                continue
+            vnfr_data = r.json()
+
+            vnf_index = str(vnfr_data["member-vnf-index-ref"])
+
+            ip_address = self.get_vnfr_ip(engine, vnf_index)
+            description = "Exec command='{}' at VNFR={} IP={}".format(
+                _commands.get(vnf_index)[0], vnf_index, ip_address
+            )
+            engine.step += 1
+            test_description = "{}{} {}".format(
+                engine.test_name, engine.step, description
+            )
+            logger.warning(test_description)
+            while _timeout >= time:
+                result, message = self.do_checks(
+                    [ip_address],
+                    vnf_index=vnfr_data["member-vnf-index-ref"],
+                    commands=_commands.get(vnf_index),
+                    user=_users.get(vnf_index),
+                    passwd=_passwds.get(vnf_index),
+                    key=_keys.get(vnf_index),
+                )
+                if result == 1:
+                    engine.passed_tests += 1
+                    logger.debug(message)
+                    break
+                elif result == 0:
+                    time += 20
+                    sleep(20)
+                elif result == -1:
+                    engine.failed_tests += 1
+                    logger.error(message)
+                    break
+                else:
+                    time -= 20
+                    engine.failed_tests += 1
+                    logger.error(message)
+            else:
+                engine.failed_tests += 1
+                logger.error(
+                    "VNFR {} has not mgmt address. Check failed".format(vnf_index)
+                )
+
+    def do_checks(self, ip, vnf_index, commands=[], user=None, passwd=None, key=None):
+        try:
+            import urllib3
+            from pssh.clients import ParallelSSHClient
+            from pssh.utils import load_private_key
+            from ssh2 import exceptions as ssh2Exception
+        except ImportError as e:
+            logger.critical(
+                "Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
+                "parallel-ssh urllib3': {}".format(e)
+            )
+            return -1, "install needed packages 'pip3 install parallel-ssh urllib3'"
+        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+        try:
+            p_host = os.environ.get("PROXY_HOST")
+            p_user = os.environ.get("PROXY_USER")
+            p_password = os.environ.get("PROXY_PASSWD")
+
+            if key:
+                pkey = load_private_key(key)
+            else:
+                pkey = None
+
+            client = ParallelSSHClient(
+                ip,
+                user=user,
+                password=passwd,
+                pkey=pkey,
+                proxy_host=p_host,
+                proxy_user=p_user,
+                proxy_password=p_password,
+                timeout=10,
+                num_retries=0,
+            )
+            for cmd in commands:
+                output = client.run_command(cmd)
+                client.join(output)
+                if output[ip[0]].exit_code:
+                    return -1, "VNFR {} command '{}' returns error: '{}'".format(
+                        ip[0], cmd, "\n".join(output[ip[0]].stderr)
+                    )
+                else:
+                    return 1, "VNFR {} command '{}' successful".format(ip[0], cmd)
+        except (
+            ssh2Exception.ChannelFailure,
+            ssh2Exception.SocketDisconnectError,
+            ssh2Exception.SocketTimeout,
+            ssh2Exception.SocketRecvError,
+        ) as e:
+            return 0, "Timeout accessing the VNFR {}: {}".format(ip[0], str(e))
+        except Exception as e:
+            return -1, "ERROR checking the VNFR {}: {}".format(ip[0], str(e))
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        pass
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name(self.test_name)
+        engine.get_autorization()
+        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
+        if test_params:
+            if "vnfd-files" in test_params:
+                self.vnfd_filenames = test_params["vnfd-files"].split(",")
+            if "nsd-file" in test_params:
+                self.nsd_filename = test_params["nsd-file"]
+            if test_params.get("ns-name"):
+                nsname = test_params["ns-name"]
+        self.create_descriptors(engine)
+
+        # create real VIM if not exist
+        self.vim_id = engine.get_create_vim(test_osm)
+        ns_data = {
+            "nsDescription": "default description",
+            "nsName": nsname,
+            "nsdId": self.nsd_id,
+            "vimAccountId": self.vim_id,
+        }
+        if self.ns_params:
+            ns_data.update(self.ns_params)
+        if test_params and test_params.get("ns-config"):
+            if isinstance(test_params["ns-config"], str):
+                ns_data.update(yaml.safe_load(test_params["ns-config"]))
+            else:
+                ns_data.update(test_params["ns-config"])
+        self.instantiate(engine, ns_data)
+
+        if manual_check:
+            input(
+                "NS has been deployed. Perform manual check and press enter to resume"
+            )
+        if test_osm and self.commands:
+            self.test_ns(engine, test_osm)
+        self.additional_operations(engine, test_osm, manual_check)
+        self.terminate(engine)
+        self.delete_descriptors(engine)
+
+    def get_first_ip(self, ip_string):
+        # When using a floating IP, the vnfr_data['ip-address'] contains a semicolon-separated list of IP:s.
+        first_ip = ip_string.split(";")[0] if ip_string else ""
+        return first_ip
+
+    def get_vnfr_ip(self, engine, vnfr_index_wanted):
+        # If the IP address list has been obtained before, it has been stored in 'vnfr_ip_list'
+        ip = self.vnfr_ip_list.get(vnfr_index_wanted, "")
+        if ip:
+            return self.get_first_ip(ip)
+        r = engine.test(
+            "Get VNFR to get IP_ADDRESS",
+            "GET",
+            "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
+                vnfr_index_wanted, self.ns_id
+            ),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+        if not r:
+            return ""
+        vnfr_data = r.json()
+        if not (vnfr_data and vnfr_data[0]):
+            return ""
+        # Store the IP (or list of IPs) in 'vnfr_ip_list'
+        ip_list = vnfr_data[0].get("ip-address", "")
+        if ip_list:
+            self.vnfr_ip_list[vnfr_index_wanted] = ip_list
+            ip = self.get_first_ip(ip_list)
+        return ip
+
+
+class TestDeployHackfestCirros(TestDeploy):
+    description = "Load and deploy Hackfest cirros_2vnf_ns example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "CIRROS"
+        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
+        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "cirros", "2": "cirros"}
+        self.passwords = {"1": "cubswin:)", "2": "cubswin:)"}
+
+    def terminate(self, engine):
+        # Make a delete in one step, overriding the normal two step of TestDeploy that launched terminate and delete
+        if test_osm:
+            engine.test(
+                "Terminate and delete NS in one step",
+                "DELETE",
+                "/nslcm/v1/ns_instances_content/{}".format(self.ns_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                "yaml",
+            )
+
+            engine.wait_until_delete(
+                "/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy
+            )
+        else:
+            engine.test(
+                "Delete NS with FORCE",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
+
+        # check all it is deleted
+        engine.test(
+            "Check NS is deleted",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_yaml,
+            None,
+            404,
+            None,
+            "yaml",
+        )
+        r = engine.test(
+            "Check NSLCMOPs are deleted",
+            "GET",
+            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            None,
+            "json",
+        )
+        if not r:
+            return
+        nslcmops = r.json()
+        if not isinstance(nslcmops, list) or nslcmops:
+            raise TestException(
+                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+                    self.ns_id, nslcmops
+                )
+            )
+
+
+class TestDeployHackfest1(TestDeploy):
+    description = "Load and deploy Hackfest_1_vnfd example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST1-"
+        self.vnfd_filenames = ("hackfest_1_vnfd.tar.gz",)
+        self.nsd_filename = "hackfest_1_nsd.tar.gz"
+        # self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
+        # self.users = {'1': "cirros", '2': "cirros"}
+        # self.passwords = {'1': "cubswin:)", '2': "cubswin:)"}
+
+
+class TestDeployHackfestCirrosScaling(TestDeploy):
+    description = (
+        "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "CIRROS-SCALE"
+        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
+        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
+        # Modify VNFD to add scaling and count=2
+        self.descriptor_edit = {
+            "vnfd0": {
+                "vdu": {"$id: 'cirros_vnfd-VM'": {"count": 2}},
+                "scaling-group-descriptor": [
+                    {
+                        "name": "scale_cirros",
+                        "max-instance-count": 2,
+                        "vdu": [{"vdu-id-ref": "cirros_vnfd-VM", "count": 2}],
+                    }
+                ],
+            }
+        }
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        if not test_osm:
+            return
+        # 2 perform scale out twice
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        )
+        for i in range(0, 2):
+            engine.test(
+                "Execute scale action over NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+                headers_yaml,
+                payload,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
+            nslcmop2_scale_out = engine.last_id
+            engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
+            if manual_check:
+                input("NS scale out done. Check that two more vdus are there")
+            # TODO check automatic
+
+        # 2 perform scale in
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        )
+        for i in range(0, 2):
+            engine.test(
+                "Execute scale IN action over NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+                headers_yaml,
+                payload,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
+            nslcmop2_scale_in = engine.last_id
+            engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
+            if manual_check:
+                input("NS scale in done. Check that two less vdus are there")
+            # TODO check automatic
+
+        # perform scale in that must fail as reached limit
+        engine.test(
+            "Execute scale IN out of limit action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_scale_in = engine.last_id
+        engine.wait_operation_ready(
+            "ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True
+        )
+
+
+class TestDeployIpMac(TestDeploy):
+    description = "Load and deploy descriptor examples setting mac, ip address at descriptor and instantiate params"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "SetIpMac"
+        self.vnfd_filenames = (
+            "vnfd_2vdu_set_ip_mac2.yaml",
+            "vnfd_2vdu_set_ip_mac.yaml",
+        )
+        self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml"
+        self.descriptor_url = "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "osm", "2": "osm"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+        self.timeout = 360
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        # super().run(engine, test_osm, manual_check, test_params)
+        # run again setting IPs with instantiate parameters
+        instantiation_params = {
+            "vnf": [
+                {
+                    "member-vnf-index": "1",
+                    "internal-vld": [
+                        {
+                            "name": "internal_vld1",  # net_internal
+                            "ip-profile": {
+                                "ip-version": "ipv4",
+                                "subnet-address": "10.9.8.0/24",
+                                "dhcp-params": {
+                                    "count": 100,
+                                    "start-address": "10.9.8.100",
+                                },
+                            },
+                            "internal-connection-point": [
+                                {
+                                    "id-ref": "eth2",
+                                    "ip-address": "10.9.8.2",
+                                },
+                                {
+                                    "id-ref": "eth3",
+                                    "ip-address": "10.9.8.3",
+                                },
+                            ],
+                        },
+                    ],
+                    "vdu": [
+                        {
+                            "id": "VM1",
+                            "interface": [
+                                # {
+                                #     "name": "iface11",
+                                #     "floating-ip-required": True,
+                                # },
+                                {"name": "iface13", "mac-address": "52:33:44:55:66:13"},
+                            ],
+                        },
+                        {
+                            "id": "VM2",
+                            "interface": [
+                                {
+                                    "name": "iface21",
+                                    "ip-address": "10.31.31.22",
+                                    "mac-address": "52:33:44:55:66:21",
+                                },
+                            ],
+                        },
+                    ],
+                },
+            ]
+        }
+
+        super().run(
+            engine,
+            test_osm,
+            manual_check,
+            test_params={"ns-config": instantiation_params},
+        )
+
+
+class TestDeployHackfest4(TestDeploy):
+    description = "Load and deploy Hackfest 4 example."
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST4-"
+        self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",)
+        self.nsd_filename = "hackfest_4_nsd.tar.gz"
+        self.uses_configuration = True
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+        # Modify VNFD to add scaling
+        # self.descriptor_edit = {
+        #     "vnfd0": {
+        #         'vnf-configuration': {
+        #             'config-primitive': [{
+        #                 'name': 'touch',
+        #                 'parameter': [{
+        #                     'name': 'filename',
+        #                     'data-type': 'STRING',
+        #                     'default-value': '/home/ubuntu/touched'
+        #                 }]
+        #             }]
+        #         },
+        #         'scaling-group-descriptor': [{
+        #             'name': 'scale_dataVM',
+        #             'scaling-policy': [{
+        #                 'threshold-time': 0,
+        #                 'name': 'auto_cpu_util_above_threshold',
+        #                 'scaling-type': 'automatic',
+        #                 'scaling-criteria': [{
+        #                     'name': 'cpu_util_above_threshold',
+        #                     'vnf-monitoring-param-ref': 'all_aaa_cpu_util',
+        #                     'scale-out-relational-operation': 'GE',
+        #                     'scale-in-threshold': 15,
+        #                     'scale-out-threshold': 60,
+        #                     'scale-in-relational-operation': 'LE'
+        #                 }],
+        #                 'cooldown-time': 60
+        #             }],
+        #             'max-instance-count': 10,
+        #             'scaling-config-action': [
+        #                 {'vnf-config-primitive-name-ref': 'touch',
+        #                  'trigger': 'post-scale-out'},
+        #                 {'vnf-config-primitive-name-ref': 'touch',
+        #                  'trigger': 'pre-scale-in'}
+        #             ],
+        #             'vdu': [{
+        #                 'vdu-id-ref': 'dataVM',
+        #                 'count': 1
+        #             }]
+        #         }]
+        #     }
+        # }
+
+
+class TestDeployHackfest3Charmed(TestDeploy):
+    description = "Load and deploy Hackfest 3charmed_ns example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST3-"
+        self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",)
+        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
+        self.uses_configuration = True
+        self.commands = {
+            "1": ["ls -lrt /home/ubuntu/first-touch"],
+            "2": ["ls -lrt /home/ubuntu/first-touch"],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+        self.descriptor_edit = {
+            "vnfd0": yaml.safe_load(
+                """
+                vnf-configuration:
+                    terminate-config-primitive:
+                    -   seq: '1'
+                        name: touch
+                        parameter:
+                        -   name: filename
+                            value: '/home/ubuntu/last-touch1'
+                    -   seq: '3'
+                        name: touch
+                        parameter:
+                        -   name: filename
+                            value: '/home/ubuntu/last-touch3'
+                    -   seq: '2'
+                        name: touch
+                        parameter:
+                        -   name: filename
+                            value: '/home/ubuntu/last-touch2'
+                """
+            )
+        }
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        if not test_osm:
+            return
+        # 1 perform action
+        vnfr_index_selected = "2"
+        payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}'
+        engine.test(
+            "Exec service primitive over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/action".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_action = engine.last_id
+        # Wait until status is Ok
+        engine.wait_operation_ready("ns", nslcmop2_action, timeout_deploy)
+        vnfr_ip = self.get_vnfr_ip(engine, vnfr_index_selected)
+        if manual_check:
+            input(
+                "NS service primitive has been executed."
+                "Check that file /home/ubuntu/OSMTESTNBI is present at {}".format(
+                    vnfr_ip
+                )
+            )
+        if test_osm:
+            commands = {
+                "1": [""],
+                "2": [
+                    "ls -lrt /home/ubuntu/OSMTESTNBI",
+                ],
+            }
+            self.test_ns(engine, test_osm, commands=commands)
+
+        # # 2 perform scale out
+        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
+        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        # engine.test("Execute scale action over NS", "POST",
+        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
+        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        # nslcmop2_scale_out = engine.last_id
+        # engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
+        # if manual_check:
+        #     input('NS scale out done. Check that file /home/ubuntu/touched is present and new VM is created')
+        # # TODO check automatic
+        #
+        # # 2 perform scale in
+        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
+        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        # engine.test("Execute scale action over NS", "POST",
+        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
+        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        # nslcmop2_scale_in = engine.last_id
+        # engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
+        # if manual_check:
+        #     input('NS scale in done. Check that file /home/ubuntu/touched is updated and new VM is deleted')
+        # # TODO check automatic
+
+
+class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
+    description = (
+        "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in "
+        "ids and member-vnf-index."
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST3v2-"
+        self.qforce = "?FORCE=True"
+        self.descriptor_edit = {
+            "vnfd0": {
+                "vdu": {
+                    "$[0]": {
+                        "interface": {
+                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+                        }
+                    },
+                    "$[1]": None,
+                },
+                "vnf-configuration": None,
+                "connection-point": {
+                    "$[0]": {
+                        "id": "pdu-mgmt",
+                        "name": "pdu-mgmt",
+                        "short-name": "pdu-mgmt",
+                    },
+                    "$[1]": None,
+                },
+                "mgmt-interface": {"cp": "pdu-mgmt"},
+                "description": "A vnf single vdu to be used as PDU",
+                "id": "vdu-as-pdu",
+                "internal-vld": {
+                    "$[0]": {
+                        "id": "pdu_internal",
+                        "name": "pdu_internal",
+                        "internal-connection-point": {"$[1]": None},
+                        "short-name": "pdu_internal",
+                        "type": "ELAN",
+                    }
+                },
+            },
+            # Modify NSD accordingly
+            "nsd": {
+                "constituent-vnfd": {
+                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
+                    "$[1]": None,
+                },
+                "description": "A nsd to deploy the vnf to act as as PDU",
+                "id": "nsd-as-pdu",
+                "name": "nsd-as-pdu",
+                "short-name": "nsd-as-pdu",
+                "vld": {
+                    "$[0]": {
+                        "id": "mgmt_pdu",
+                        "name": "mgmt_pdu",
+                        "short-name": "mgmt_pdu",
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "vnfd-connection-point-ref": "pdu-mgmt",
+                                "vnfd-id-ref": "vdu-as-pdu",
+                            },
+                            "$[1]": None,
+                        },
+                        "type": "ELAN",
+                    },
+                    "$[1]": None,
+                },
+            },
+        }
+
+
+class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
+    description = "Load and deploy Hackfest 3charmed_ns example modified version to test scaling and NS parameters"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST3v3-"
+        self.commands = {
+            "1": ["ls -lrt /home/ubuntu/first-touch-1"],
+            "2": ["ls -lrt /home/ubuntu/first-touch-2"],
+        }
+        self.descriptor_edit = {
+            "vnfd0": yaml.safe_load(
+                """
+                scaling-group-descriptor:
+                    -   name: "scale_dataVM"
+                        max-instance-count: 10
+                        scaling-policy:
+                        -   name: "auto_cpu_util_above_threshold"
+                            scaling-type: "automatic"
+                            threshold-time: 0
+                            cooldown-time: 60
+                            scaling-criteria:
+                            -   name: "cpu_util_above_threshold"
+                                scale-in-threshold: 15
+                                scale-in-relational-operation: "LE"
+                                scale-out-threshold: 60
+                                scale-out-relational-operation: "GE"
+                                vnf-monitoring-param-ref: "monitor1"
+                        vdu:
+                        -   vdu-id-ref: dataVM
+                            count: 1
+                        scaling-config-action:
+                        -   trigger: post-scale-out
+                            vnf-config-primitive-name-ref: touch
+                        -   trigger: pre-scale-in
+                            vnf-config-primitive-name-ref: touch
+                vdu:
+                    "$id: dataVM":
+                        monitoring-param:
+                        -   id: "dataVM_cpu_util"
+                            nfvi-metric: "cpu_utilization"
+
+                monitoring-param:
+                -   id: "monitor1"
+                    name: "monitor1"
+                    aggregation-type: AVERAGE
+                    vdu-monitoring-param:
+                      vdu-ref: "dataVM"
+                      vdu-monitoring-param-ref: "dataVM_cpu_util"
+                vnf-configuration:
+                    initial-config-primitive:
+                        "$[1]":
+                            parameter:
+                                "$[0]":
+                                    value: "<touch_filename>"   # default-value: /home/ubuntu/first-touch
+                    config-primitive:
+                        "$[0]":
+                            parameter:
+                                "$[0]":
+                                    default-value: "<touch_filename2>"
+                """,
+            )
+        }
+        self.ns_params = {
+            "additionalParamsForVnf": [
+                {
+                    "member-vnf-index": "1",
+                    "additionalParams": {
+                        "touch_filename": "/home/ubuntu/first-touch-1",
+                        "touch_filename2": "/home/ubuntu/second-touch-1",
+                    },
+                },
+                {
+                    "member-vnf-index": "2",
+                    "additionalParams": {
+                        "touch_filename": "/home/ubuntu/first-touch-2",
+                        "touch_filename2": "/home/ubuntu/second-touch-2",
+                    },
+                },
+            ]
+        }
+
+    def additional_operations(self, engine, test_osm, manual_check):
+        super().additional_operations(engine, test_osm, manual_check)
+        if not test_osm:
+            return
+
+        # 2 perform scale out
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        )
+        engine.test(
+            "Execute scale action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_scale_out = engine.last_id
+        engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
+        if manual_check:
+            input(
+                "NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created"
+            )
+        if test_osm:
+            commands = {
+                "1": [
+                    "ls -lrt /home/ubuntu/second-touch-1",
+                ]
+            }
+            self.test_ns(engine, test_osm, commands=commands)
+            # TODO check automatic connection to scaled VM
+
+        # 2 perform scale in
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        )
+        engine.test(
+            "Execute scale action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
+        nslcmop2_scale_in = engine.last_id
+        engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
+        if manual_check:
+            input(
+                "NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted"
+            )
+        # TODO check automatic
+
+
+class TestDeploySimpleCharm(TestDeploy):
+    description = "Deploy hackfest-4 hackfest_simplecharm example"
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST-SIMPLE"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
+        )
+        self.vnfd_filenames = ("hackfest_simplecharm_vnf.tar.gz",)
+        self.nsd_filename = "hackfest_simplecharm_ns.tar.gz"
+        self.uses_configuration = True
+        self.commands = {
+            "1": [""],
+            "2": [
+                "ls -lrt /home/ubuntu/first-touch",
+            ],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
+
+
+class TestDeploySimpleCharm2(TestDeploySimpleCharm):
+    description = (
+        "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and "
+        "vnf-member-index"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HACKFEST-SIMPLE2-"
+        self.qforce = "?FORCE=True"
+        self.descriptor_edit = {
+            "vnfd0": {"id": "hackfest.simplecharm.vnf"},
+            "nsd": {
+                "id": "hackfest.simplecharm.ns",
+                "constituent-vnfd": {
+                    "$[0]": {
+                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                        "member-vnf-index": "$1",
+                    },
+                    "$[1]": {
+                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                        "member-vnf-index": "$2",
+                    },
+                },
+                "vld": {
+                    "$[0]": {
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "member-vnf-index-ref": "$1",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                            "$[1]": {
+                                "member-vnf-index-ref": "$2",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                        },
+                    },
+                    "$[1]": {
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "member-vnf-index-ref": "$1",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                            "$[1]": {
+                                "member-vnf-index-ref": "$2",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                        },
+                    },
+                },
+            },
+        }
+
+
+class TestDeploySingleVdu(TestDeployHackfest3Charmed):
+    description = (
+        "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "SingleVDU"
+        self.qforce = "?FORCE=True"
+        self.descriptor_edit = {
+            # Modify VNFD to remove one VDU
+            "vnfd0": {
+                "vdu": {
+                    "$[0]": {
+                        "interface": {
+                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+                        }
+                    },
+                    "$[1]": None,
+                },
+                "vnf-configuration": None,
+                "connection-point": {
+                    "$[0]": {
+                        "id": "pdu-mgmt",
+                        "name": "pdu-mgmt",
+                        "short-name": "pdu-mgmt",
+                    },
+                    "$[1]": None,
+                },
+                "mgmt-interface": {"cp": "pdu-mgmt"},
+                "description": "A vnf single vdu to be used as PDU",
+                "id": "vdu-as-pdu",
+                "internal-vld": {
+                    "$[0]": {
+                        "id": "pdu_internal",
+                        "name": "pdu_internal",
+                        "internal-connection-point": {"$[1]": None},
+                        "short-name": "pdu_internal",
+                        "type": "ELAN",
+                    }
+                },
+            },
+            # Modify NSD accordingly
+            "nsd": {
+                "constituent-vnfd": {
+                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
+                    "$[1]": None,
+                },
+                "description": "A nsd to deploy the vnf to act as as PDU",
+                "id": "nsd-as-pdu",
+                "name": "nsd-as-pdu",
+                "short-name": "nsd-as-pdu",
+                "vld": {
+                    "$[0]": {
+                        "id": "mgmt_pdu",
+                        "name": "mgmt_pdu",
+                        "short-name": "mgmt_pdu",
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "vnfd-connection-point-ref": "pdu-mgmt",
+                                "vnfd-id-ref": "vdu-as-pdu",
+                            },
+                            "$[1]": None,
+                        },
+                        "type": "ELAN",
+                    },
+                    "$[1]": None,
+                },
+            },
+        }
+
+
+class TestDeployHnfd(TestDeployHackfest3Charmed):
+    description = (
+        "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
+    )
+
+    def __init__(self):
+        super().__init__()
+        self.test_name = "HNFD"
+        self.pduDeploy = TestDeploySingleVdu()
+        self.pdu_interface_0 = {}
+        self.pdu_interface_1 = {}
+
+        self.pdu_id = None
+        # self.vnf_to_pdu = """
+        #     vdu:
+        #         "$[0]":
+        #             pdu-type: PDU-TYPE-1
+        #             interface:
+        #                 "$[0]":
+        #                     name: mgmt-iface
+        #                 "$[1]":
+        #                     name: pdu-iface-internal
+        #     id: hfn1
+        #     description: HFND, one PDU + One VDU
+        #     name: hfn1
+        #     short-name: hfn1
+        #
+        # """
+
+        self.pdu_descriptor = {
+            "name": "my-PDU",
+            "type": "PDU-TYPE-1",
+            "vim_accounts": "to-override",
+            "interfaces": [
+                {
+                    "name": "mgmt-iface",
+                    "mgmt": True,
+                    "type": "overlay",
+                    "ip-address": "to override",
+                    "mac-address": "mac_address",
+                    "vim-network-name": "mgmt",
+                },
+                {
+                    "name": "pdu-iface-internal",
+                    "mgmt": False,
+                    "type": "overlay",
+                    "ip-address": "to override",
+                    "mac-address": "mac_address",
+                    "vim-network-name": "pdu_internal",  # OSMNBITEST-PDU-pdu_internal
+                },
+            ],
+        }
+        self.vnfd_filenames = (
+            "hackfest_3charmed_vnfd.tar.gz",
+            "hackfest_3charmed_vnfd.tar.gz",
+        )
+
+        self.descriptor_edit = {
+            "vnfd0": {
+                "id": "hfnd1",
+                "name": "hfn1",
+                "short-name": "hfn1",
+                "vdu": {
+                    "$[0]": {
+                        "pdu-type": "PDU-TYPE-1",
+                        "interface": {
+                            "$[0]": {"name": "mgmt-iface"},
+                            "$[1]": {"name": "pdu-iface-internal"},
+                        },
+                    }
+                },
+            },
+            "nsd": {
+                "constituent-vnfd": {"$[1]": {"vnfd-id-ref": "hfnd1"}},
+                "vld": {
+                    "$[0]": {
+                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+                    },
+                    "$[1]": {
+                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+                    },
+                },
+            },
+        }
+
+    def create_descriptors(self, engine):
+        super().create_descriptors(engine)
+
+        # Create PDU
+        self.pdu_descriptor["interfaces"][0].update(self.pdu_interface_0)
+        self.pdu_descriptor["interfaces"][1].update(self.pdu_interface_1)
+        self.pdu_descriptor["vim_accounts"] = [self.vim_id]
+        # TODO get vim-network-name from vnfr.vld.name
+        self.pdu_descriptor["interfaces"][1]["vim-network-name"] = "{}-{}-{}".format(
+            os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST"),
+            "PDU",
+            self.pdu_descriptor["interfaces"][1]["vim-network-name"],
+        )
+        engine.test(
+            "Onboard PDU descriptor",
+            "POST",
+            "/pdu/v1/pdu_descriptors",
+            {
+                "Location": "/pdu/v1/pdu_descriptors/",
+                "Content-Type": "application/yaml",
+            },
+            self.pdu_descriptor,
+            201,
+            r_header_yaml,
+            "yaml",
+        )
+        self.pdu_id = engine.last_id
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.get_autorization()
+        engine.set_test_name(self.test_name)
+        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
+
+        # create real VIM if not exist
+        self.vim_id = engine.get_create_vim(test_osm)
+        # instantiate PDU
+        self.pduDeploy.create_descriptors(engine)
+        self.pduDeploy.instantiate(
+            engine,
+            {
+                "nsDescription": "to be used as PDU",
+                "nsName": nsname + "-PDU",
+                "nsdId": self.pduDeploy.nsd_id,
+                "vimAccountId": self.vim_id,
+            },
+        )
+        if manual_check:
+            input(
+                "VNF to be used as PDU has been deployed. Perform manual check and press enter to resume"
+            )
+        if test_osm:
+            self.pduDeploy.test_ns(engine, test_osm)
+
+        if test_osm:
+            r = engine.test(
+                "Get VNFR to obtain IP_ADDRESS",
+                "GET",
+                "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id),
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+            )
+            if not r:
+                return
+            vnfr_data = r.json()
+            # print(vnfr_data)
+
+            self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                0
+            ].get("ip-address")
+            self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                1
+            ].get("ip-address")
+            self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                0
+            ].get("mac-address")
+            self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                1
+            ].get("mac-address")
+            if not self.pdu_interface_0["ip-address"]:
+                raise TestException("Vnfr has not managment ip address")
+        else:
+            self.pdu_interface_0["ip-address"] = "192.168.10.10"
+            self.pdu_interface_1["ip-address"] = "192.168.11.10"
+            self.pdu_interface_0["mac-address"] = "52:33:44:55:66:13"
+            self.pdu_interface_1["mac-address"] = "52:33:44:55:66:14"
+
+        self.create_descriptors(engine)
+
+        ns_data = {
+            "nsDescription": "default description",
+            "nsName": nsname,
+            "nsdId": self.nsd_id,
+            "vimAccountId": self.vim_id,
+        }
+        if test_params and test_params.get("ns-config"):
+            if isinstance(test_params["ns-config"], str):
+                ns_data.update(yaml.safe_load(test_params["ns-config"]))
+            else:
+                ns_data.update(test_params["ns-config"])
+
+        self.instantiate(engine, ns_data)
+        if manual_check:
+            input(
+                "NS has been deployed. Perform manual check and press enter to resume"
+            )
+        if test_osm:
+            self.test_ns(engine, test_osm)
+        self.additional_operations(engine, test_osm, manual_check)
+        self.terminate(engine)
+        self.pduDeploy.terminate(engine)
+        self.delete_descriptors(engine)
+        self.pduDeploy.delete_descriptors(engine)
+
+    def delete_descriptors(self, engine):
+        super().delete_descriptors(engine)
+        # delete pdu
+        engine.test(
+            "Delete PDU SOL005",
+            "DELETE",
+            "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestDescriptors:
+    description = "Test VNFD, NSD, PDU descriptors CRUD and dependencies"
+    vnfd_empty = """vnfd:vnfd-catalog:
+        vnfd:
+        -   name: prova
+            short-name: prova
+            id: prova
+    """
+    vnfd_prova = """vnfd:vnfd-catalog:
+        vnfd:
+        -   connection-point:
+            -   name: cp_0h8m
+                type: VPORT
+            id: prova
+            name: prova
+            short-name: prova
+            vdu:
+            -   id: vdu_z4bm
+                image: ubuntu
+                interface:
+                -   external-connection-point-ref: cp_0h8m
+                    name: eth0
+                    virtual-interface:
+                    type: VIRTIO
+                name: vdu_z4bm
+            version: '1.0'
+    """
+
+    def __init__(self):
+        self.vnfd_filename = "hackfest_3charmed_vnfd.tar.gz"
+        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        )
+        self.vnfd_id = None
+        self.nsd_id = None
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("Descriptors")
+        engine.get_autorization()
+        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
+        if not os.path.exists(temp_dir):
+            os.makedirs(temp_dir)
+
+        # download files
+        for filename in (self.vnfd_filename, self.nsd_filename):
+            filename_path = temp_dir + filename
+            if not os.path.exists(filename_path):
+                with open(filename_path, "wb") as file:
+                    response = requests.get(self.descriptor_url + filename)
+                    if response.status_code >= 300:
+                        raise TestException(
+                            "Error downloading descriptor from '{}': {}".format(
+                                self.descriptor_url + filename, response.status_code
+                            )
+                        )
+                    file.write(response.content)
+
+        vnfd_filename_path = temp_dir + self.vnfd_filename
+        nsd_filename_path = temp_dir + self.nsd_filename
+
+        engine.test(
+            "Onboard empty VNFD in one step",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_id = engine.last_id
+
+        # test bug 605
+        engine.test(
+            "Upload invalid VNFD ",
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_yaml,
+            self.vnfd_prova,
+            422,
+            r_header_yaml,
+            "yaml",
+        )
+
+        engine.test(
+            "Upload VNFD {}".format(self.vnfd_filename),
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_zip_yaml,
+            "@b" + vnfd_filename_path,
+            204,
+            None,
+            0,
+        )
+
+        queries = [
+            "mgmt-interface.cp=mgmt",
+            "vdu.0.interface.0.external-connection-point-ref=mgmt",
+            "vdu.0.interface.1.internal-connection-point-ref=internal",
+            "internal-vld.0.internal-connection-point.0.id-ref=internal",
+            # Detection of duplicated VLD names in VNF Descriptors
+            # URL: internal-vld=[
+            #        {id: internal1, name: internal, type:ELAN,
+            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
+            #        {id: internal2, name: internal, type:ELAN,
+            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
+            #        ]
+            "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
+            "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
+            "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
+            "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
+            "id-ref%3A%20dataVM-internal%7D%5D%7D%5D",
+        ]
+        for query in queries:
+            engine.test(
+                "Upload invalid VNFD ",
+                "PUT",
+                "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(
+                    self.vnfd_id, query
+                ),
+                headers_zip_yaml,
+                "@b" + vnfd_filename_path,
+                422,
+                r_header_yaml,
+                "yaml",
+            )
+
+        # test bug 605
+        engine.test(
+            "Upload invalid VNFD ",
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_yaml,
+            self.vnfd_prova,
+            422,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # get vnfd descriptor
+        engine.test(
+            "Get VNFD descriptor",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # get vnfd file descriptor
+        engine.test(
+            "Get VNFD file descriptor",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
+            headers_text,
+            None,
+            200,
+            r_header_text,
+            "text",
+            temp_dir + "vnfd-yaml",
+        )
+        # TODO compare files: diff vnfd-yaml hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml
+
+        # get vnfd zip file package
+        engine.test(
+            "Get VNFD zip package",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_zip,
+            "zip",
+            temp_dir + "vnfd-zip",
+        )
+        # TODO compare files: diff vnfd-zip hackfest_3charmed_vnfd.tar.gz
+
+        # get vnfd artifact
+        engine.test(
+            "Get VNFD artifact package",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_octect,
+            "octet-string",
+            temp_dir + "vnfd-icon",
+        )
+        # TODO compare files: diff vnfd-icon hackfest_3charmed_vnfd/icons/osm.png
+
+        # nsd CREATE AND UPLOAD in one step:
+        engine.test(
+            "Onboard NSD in one step",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_zip_yaml,
+            "@b" + nsd_filename_path,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_id = engine.last_id
+
+        queries = ["vld.0.vnfd-connection-point-ref.0.vnfd-id-ref=hf"]
+        for query in queries:
+            engine.test(
+                "Upload invalid NSD ",
+                "PUT",
+                "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
+                headers_zip_yaml,
+                "@b" + nsd_filename_path,
+                422,
+                r_header_yaml,
+                "yaml",
+            )
+
+        # get nsd descriptor
+        engine.test(
+            "Get NSD descriptor",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+
+        # get nsd file descriptor
+        engine.test(
+            "Get NSD file descriptor",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id),
+            headers_text,
+            None,
+            200,
+            r_header_text,
+            "text",
+            temp_dir + "nsd-yaml",
+        )
+        # TODO compare files: diff nsd-yaml hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml
+
+        # get nsd zip file package
+        engine.test(
+            "Get NSD zip package",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_zip,
+            "zip",
+            temp_dir + "nsd-zip",
+        )
+        # TODO compare files: diff nsd-zip hackfest_3charmed_nsd.tar.gz
+
+        # get nsd artifact
+        engine.test(
+            "Get NSD artifact package",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_octect,
+            "octet-string",
+            temp_dir + "nsd-icon",
+        )
+        # TODO compare files: diff nsd-icon hackfest_3charmed_nsd/icons/osm.png
+
+        # vnfd DELETE
+        test_rest.test(
+            "Delete VNFD conflict",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            409,
+            None,
+            None,
+        )
+
+        test_rest.test(
+            "Delete VNFD force",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # nsd DELETE
+        test_rest.test(
+            "Delete NSD",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestNetSliceTemplates:
+    description = "Upload a NST to OSM"
+
+    def __init__(self):
+        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        # nst CREATE
+        engine.set_test_name("NST step ")
+        engine.get_autorization()
+        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
+        if not os.path.exists(temp_dir):
+            os.makedirs(temp_dir)
+
+        # Onboard VNFDs
+        engine.test(
+            "Onboard edge VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard middle VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename_middle,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_middle_id = engine.last_id
+
+        # Onboard NSDs
+        engine.test(
+            "Onboard NSD edge",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard NSD middle",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename_middle,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_middle_id = engine.last_id
+
+        # Onboard NST
+        engine.test(
+            "Onboard NST",
+            "POST",
+            "/nst/v1/netslice_templates_content",
+            headers_yaml,
+            self.nst_filenames,
+            201,
+            r_headers_yaml_location_nst,
+            "yaml",
+        )
+        nst_id = engine.last_id
+
+        # nstd SHOW OSM format
+        engine.test(
+            "Show NSTD OSM format",
+            "GET",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
+
+        # nstd DELETE
+        engine.test(
+            "Delete NSTD",
+            "DELETE",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # NSDs DELETE
+        test_rest.test(
+            "Delete NSD middle",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete NSD edge",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # VNFDs DELETE
+        test_rest.test(
+            "Delete VNFD edge",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete VNFD middle",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestNetSliceInstances:
+    """
+    Test procedure:
+    1. Populate databases with VNFD, NSD, NST with the following scenario
+       +-----------------management-----------------+
+       |                     |                      |
+    +--+---+            +----+----+             +---+--+
+    |      |            |         |             |      |
+    | edge +---data1----+  middle +---data2-----+ edge |
+    |      |            |         |             |      |
+    +------+            +---------+             +------+
+                        shared-nss
+    2. Create NSI-1
+    3. Instantiate NSI-1
+    4. Create NSI-2
+    5. Instantiate NSI-2
+        Manual check - Are 2 slices instantiated correctly?
+        NSI-1 3 nss (2 nss-edges + 1 nss-middle)
+        NSI-2 2 nss (2 nss-edge sharing nss-middle)
+    6. Terminate NSI-1
+    7. Delete NSI-1
+        Manual check - Is slice NSI-1 deleted correctly?
+        NSI-2 with 2 nss-edge + 1 nss-middle (The one from NSI-1)
+    8. Create NSI-3
+    9. Instantiate NSI-3
+        Manual check - Is slice NSI-3 instantiated correctly?
+        NSI-3 reuse nss-middle. NSI-3 only create 2 nss-edge
+    10. Delete NSI-2
+    11. Terminate NSI-2
+    12. Delete NSI-3
+    13. Terminate NSI-3
+        Manual check - All cleaned correctly?
+        NSI-2 and NSI-3 were terminated and deleted
+    14. Cleanup database
+    """
+
+    description = "Upload a NST to OSM"
+
+    def __init__(self):
+        self.vim_id = None
+        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
+
+    def create_slice(self, engine, nsi_data, name):
+        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
+        r = engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances",
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            {
+                "Location": "nsilcm/v1/netslice_instances/",
+                "Content-Type": "application/yaml",
+            },
+            "yaml",
+        )
+        return r
+
+    def instantiate_slice(self, engine, nsi_data, nsi_id, name):
+        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
+        engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id),
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            r_headers_yaml_location_nsilcmop,
+            "yaml",
+        )
+
+    def terminate_slice(self, engine, nsi_id, name):
+        engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
+            headers_yaml,
+            None,
+            (201, 202),
+            r_headers_yaml_location_nsilcmop,
+            "yaml",
+        )
+
+    def delete_slice(self, engine, nsi_id, name):
+        engine.test(
+            name,
+            "DELETE",
+            "/nsilcm/v1/netslice_instances/{}".format(nsi_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+    def run(self, engine, test_osm, manual_check, test_params=None):
+        # nst CREATE
+        engine.set_test_name("NSI")
+        engine.get_autorization()
+
+        # Onboard VNFDs
+        engine.test(
+            "Onboard edge VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard middle VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename_middle,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
+        self.vnfd_middle_id = engine.last_id
+
+        # Onboard NSDs
+        engine.test(
+            "Onboard NSD edge",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_edge_id = engine.last_id
+
+        engine.test(
+            "Onboard NSD middle",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename_middle,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
+        self.nsd_middle_id = engine.last_id
+
+        # Onboard NST
+        engine.test(
+            "Onboard NST",
+            "POST",
+            "/nst/v1/netslice_templates_content",
+            headers_yaml,
+            self.nst_filenames,
+            201,
+            r_headers_yaml_location_nst,
+            "yaml",
+        )
+        nst_id = engine.last_id
+
+        self.vim_id = engine.get_create_vim(test_osm)
+
+        # CREATE NSI-1
+        ns_data = {
+            "nsiName": "Deploy-NSI-1",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
+        r = self.create_slice(engine, ns_data, "Create NSI-1 step 1")
+        if not r:
+            return
+        self.nsi_id1 = engine.last_id
+
+        # INSTANTIATE NSI-1
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2"
+        )
+        nsilcmop_id1 = engine.last_id
+
+        # Waiting for NSI-1
+        if test_osm:
+            engine.wait_operation_ready("nsi", nsilcmop_id1, timeout_deploy)
+
+        # CREATE NSI-2
+        ns_data = {
+            "nsiName": "Deploy-NSI-2",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
+        r = self.create_slice(engine, ns_data, "Create NSI-2 step 1")
+        if not r:
+            return
+        self.nsi_id2 = engine.last_id
+
+        # INSTANTIATE NSI-2
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2"
+        )
+        nsilcmop_id2 = engine.last_id
+
+        # Waiting for NSI-2
+        if test_osm:
+            engine.wait_operation_ready("nsi", nsilcmop_id2, timeout_deploy)
+
+        if manual_check:
+            input(
+                "NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume"
+            )
+
+        # TERMINATE NSI-1
+        if test_osm:
+            self.terminate_slice(engine, self.nsi_id1, "Terminate NSI-1")
+            nsilcmop1_id = engine.last_id
+
+            # Wait terminate NSI-1
+            engine.wait_operation_ready("nsi", nsilcmop1_id, timeout_deploy)
+
+        # DELETE NSI-1
+        self.delete_slice(engine, self.nsi_id1, "Delete NS")
+
+        if manual_check:
+            input(
+                "NSI-1 has been deleted. Perform manual check and press enter to resume"
+            )
+
+        # CREATE NSI-3
+        ns_data = {
+            "nsiName": "Deploy-NSI-3",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
+        r = self.create_slice(engine, ns_data, "Create NSI-3 step 1")
+
+        if not r:
+            return
+        self.nsi_id3 = engine.last_id
+
+        # INSTANTIATE NSI-3
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2"
+        )
+        nsilcmop_id3 = engine.last_id
+
+        # Wait Instantiate NSI-3
+        if test_osm:
+            engine.wait_operation_ready("nsi", nsilcmop_id3, timeout_deploy)
+
+        if manual_check:
+            input(
+                "NSI-3 has been deployed. Perform manual check and press enter to resume"
+            )
+
+        # TERMINATE NSI-2
+        if test_osm:
+            self.terminate_slice(engine, self.nsi_id2, "Terminate NSI-2")
+            nsilcmop2_id = engine.last_id
+
+            # Wait terminate NSI-2
+            engine.wait_operation_ready("nsi", nsilcmop2_id, timeout_deploy)
+
+        # DELETE NSI-2
+        self.delete_slice(engine, self.nsi_id2, "DELETE NSI-2")
+
+        # TERMINATE NSI-3
+        if test_osm:
+            self.terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
+            nsilcmop3_id = engine.last_id
+
+            # Wait terminate NSI-3
+            engine.wait_operation_ready("nsi", nsilcmop3_id, timeout_deploy)
+
+        # DELETE NSI-3
+        self.delete_slice(engine, self.nsi_id3, "DELETE NSI-3")
+
+        if manual_check:
+            input(
+                "NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume"
+            )
+
+        # nstd DELETE
+        engine.test(
+            "Delete NSTD",
+            "DELETE",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # NSDs DELETE
+        test_rest.test(
+            "Delete NSD middle",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete NSD edge",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        # VNFDs DELETE
+        test_rest.test(
+            "Delete VNFD edge",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+        test_rest.test(
+            "Delete VNFD middle",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+
+
+class TestAuthentication:
+    description = "Test Authentication"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("Authentication")
+        # backend = test_params.get("backend") if test_params else None   # UNUSED
+
+        admin_project_id = test_project_id = None
+        project_admin_role_id = project_user_role_id = None
+        test_user_id = empty_user_id = None
+        default_role_id = empty_role_id = token_role_id = None
+
+        engine.get_autorization()
+
+        # GET
+        engine.test(
+            "Get tokens",
+            "GET",
+            "/admin/v1/tokens",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get projects",
+            "GET",
+            "/admin/v1/projects",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get users",
+            "GET",
+            "/admin/v1/users",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get roles",
+            "GET",
+            "/admin/v1/roles",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        res = engine.test(
+            "Get admin project",
+            "GET",
+            "/admin/v1/projects?name=admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        admin_project_id = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get project admin role",
+            "GET",
+            "/admin/v1/roles?name=project_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        project_admin_role_id = res.json()[0]["_id"] if res else None
+        res = engine.test(
+            "Get project user role",
+            "GET",
+            "/admin/v1/roles?name=project_user",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        project_user_role_id = res.json()[0]["_id"] if res else None
+
+        # POST
+        res = engine.test(
+            "Create test project",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "test"},
+            (201),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        test_project_id = engine.last_id if res else None
+        res = engine.test(
+            "Create role without permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {"name": "empty"},
+            (201),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        empty_role_id = engine.last_id if res else None
+        res = engine.test(
+            "Create role with default permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {"name": "default", "permissions": {"default": True}},
+            (201),
+            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+            "json",
+        )
+        default_role_id = engine.last_id if res else None
+        res = engine.test(
+            "Create role with token permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {
+                "name": "tokens",
+                "permissions": {"tokens": True},
+            },  # is default required ?
+            (201),
+            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+            "json",
+        )
+        token_role_id = engine.last_id if res else None
+        pr = "project-role mappings"
+        res = engine.test(
+            "Create user without " + pr,
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {"username": "empty", "password": "empty"},
+            201,
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        empty_user_id = engine.last_id if res else None
+        if (
+            admin_project_id
+            and test_project_id
+            and project_admin_role_id
+            and project_user_role_id
+        ):
+            data = {"username": "test", "password": "test"}
+            data["project_role_mappings"] = [
+                {"project": test_project_id, "role": project_admin_role_id},
+                {"project": admin_project_id, "role": project_user_role_id},
+            ]
+            res = engine.test(
+                "Create user with " + pr,
+                "POST",
+                "/admin/v1/users",
+                headers_json,
+                data,
+                (201),
+                {"Content-Type": "application/json"},
+                "json",
+            )
+            test_user_id = engine.last_id if res else None
+
+        # PUT
+        if test_user_id:
+            engine.test(
+                "Modify test user's password",
+                "PUT",
+                "/admin/v1/users/" + test_user_id,
+                headers_json,
+                {"password": "password"},
+                (204),
+                {},
+                0,
+            )
+        if (
+            empty_user_id
+            and admin_project_id
+            and test_project_id
+            and project_admin_role_id
+            and project_user_role_id
+        ):
+            data = {
+                "project_role_mappings": [
+                    {"project": test_project_id, "role": project_admin_role_id},
+                    {"project": admin_project_id, "role": project_user_role_id},
+                ]
+            }
+            engine.test(
+                "Modify empty user's " + pr,
+                "PUT",
+                "/admin/v1/users/" + empty_user_id,
+                headers_json,
+                data,
+                (204),
+                {},
+                0,
+            )
+
+        # DELETE
+        if empty_user_id:
+            engine.test(
+                "Delete empty user",
+                "DELETE",
+                "/admin/v1/users/" + empty_user_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if test_user_id:
+            engine.test(
+                "Delete test user",
+                "DELETE",
+                "/admin/v1/users/" + test_user_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if empty_role_id:
+            engine.test(
+                "Delete empty role",
+                "DELETE",
+                "/admin/v1/roles/" + empty_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if default_role_id:
+            engine.test(
+                "Delete default role",
+                "DELETE",
+                "/admin/v1/roles/" + default_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if token_role_id:
+            engine.test(
+                "Delete token role",
+                "DELETE",
+                "/admin/v1/roles/" + token_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if test_project_id:
+            engine.test(
+                "Delete test project",
+                "DELETE",
+                "/admin/v1/projects/" + test_project_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+
+        # END Tests
+
+        engine.remove_authorization()  # To finish
+
+
+class TestNbiQuotas:
+    description = "Test NBI Quotas"
+
+    @staticmethod
+    def run(engine, test_osm, manual_check, test_params=None):
+        engine.set_test_name("NBI-Quotas_")
+        # backend = test_params.get("backend") if test_params else None   # UNUSED
+
+        test_username = "test-nbi-quotas"
+        test_password = "test-nbi-quotas"
+        test_project = "test-nbi-quotas"
+
+        test_vim = "test-nbi-quotas"
+        test_wim = "test-nbi-quotas"
+        test_sdn = "test-nbi-quotas"
+
+        test_user_id = None
+        test_project_id = None
+
+        test_vim_ids = []
+        test_wim_ids = []
+        test_sdn_ids = []
+        test_vnfd_ids = []
+        test_nsd_ids = []
+        test_nst_ids = []
+        test_pdu_ids = []
+        test_nsr_ids = []
+        test_nsi_ids = []
+
+        # Save admin access data
+        admin_username = engine.user
+        admin_password = engine.password
+        admin_project = engine.project
+
+        # Get admin access
+        engine.get_autorization()
+        admin_token = engine.last_id
+
+        # Check that test project,user do not exist
+        res1 = engine.test(
+            "Check that test project doesn't exist",
+            "GET",
+            "/admin/v1/projects/" + test_project,
+            headers_json,
+            {},
+            (404),
+            {},
+            True,
+        )
+        res2 = engine.test(
+            "Check that test user doesn't exist",
+            "GET",
+            "/admin/v1/users/" + test_username,
+            headers_json,
+            {},
+            (404),
+            {},
+            True,
+        )
+        if None in [res1, res2]:
+            engine.remove_authorization()
+            logger.error("Test project and/or user already exist")
+            return
+
+        # Create test project&user
+        res = engine.test(
+            "Create test project",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {
+                "name": test_username,
+                "quotas": {
+                    "vnfds": 2,
+                    "nsds": 2,
+                    "nsts": 1,
+                    "pdus": 1,
+                    "nsrs": 2,
+                    "nsis": 1,
+                    "vim_accounts": 1,
+                    "wim_accounts": 1,
+                    "sdns": 1,
+                },
+            },
+            (201),
+            r_header_json,
+            "json",
+        )
+        test_project_id = engine.last_id if res else None
+        res = engine.test(
+            "Create test user",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {
+                "username": test_username,
+                "password": test_password,
+                "project_role_mappings": [
+                    {"project": test_project, "role": "project_admin"}
+                ],
+            },
+            (201),
+            r_header_json,
+            "json",
+        )
+        test_user_id = engine.last_id if res else None
+
+        if test_project_id and test_user_id:
+            # Get user access
+            engine.token = None
+            engine.user = test_username
+            engine.password = test_password
+            engine.project = test_project
+            engine.get_autorization()
+            user_token = engine.last_id
+
+            # Create test VIM
+            res = engine.test(
+                "Create test VIM",
+                "POST",
+                "/admin/v1/vim_accounts",
+                headers_json,
+                {
+                    "name": test_vim,
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_vim_ids += [engine.last_id if res else None]
+
+            res = engine.test(
+                "Try to create second test VIM",
+                "POST",
+                "/admin/v1/vim_accounts",
+                headers_json,
+                {
+                    "name": test_vim + "_2",
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
+            test_vim_ids += [engine.last_id if res is None else None]
+
+            res = engine.test(
+                "Try to create second test VIM with FORCE",
+                "POST",
+                "/admin/v1/vim_accounts?FORCE",
+                headers_json,
+                {
+                    "name": test_vim + "_3",
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_vim_ids += [engine.last_id if res else None]
+
+            if test_vim_ids[0]:
+                # Download descriptor files (if required)
+                test_dir = "/tmp/" + test_username + "/"
+                test_url = "https://osm-download.etsi.org/ftp/osm-6.0-six/7th-hackfest/packages/"
+                vnfd_filenames = [
+                    "slice_hackfest_vnfd.tar.gz",
+                    "slice_hackfest_middle_vnfd.tar.gz",
+                ]
+                nsd_filenames = [
+                    "slice_hackfest_nsd.tar.gz",
+                    "slice_hackfest_middle_nsd.tar.gz",
+                ]
+                nst_filenames = ["slice_hackfest_nstd.yaml"]
+                pdu_filenames = ["PDU_router.yaml"]
+                desc_filenames = (
+                    vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
+                )
+                if not os.path.exists(test_dir):
+                    os.makedirs(test_dir)
+                for filename in desc_filenames:
+                    if not os.path.exists(test_dir + filename):
+                        res = requests.get(test_url + filename)
+                        if res.status_code < 300:
+                            with open(test_dir + filename, "wb") as file:
+                                file.write(res.content)
+
+                if all([os.path.exists(test_dir + p) for p in desc_filenames]):
+                    # Test VNFD Quotas
+                    res = engine.test(
+                        "Create test VNFD #1",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res else None]
+                    res = engine.test(
+                        "Create test VNFD #2",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[1],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res else None]
+                    res = engine.test(
+                        "Try to create extra test VNFD",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (422),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res is None else None]
+                    res = engine.test(
+                        "Try to create extra test VNFD with FORCE",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content?FORCE",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
+                    test_vnfd_ids += [engine.last_id if res else None]
+
+                    # Remove extra VNFDs to prevent further errors
+                    for i in [2, 3]:
+                        if test_vnfd_ids[i]:
+                            res = engine.test(
+                                "Delete test VNFD #" + str(i),
+                                "DELETE",
+                                "/vnfpkgm/v1/vnf_packages_content/"
+                                + test_vnfd_ids[i]
+                                + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                            if res:
+                                test_vnfd_ids[i] = None
+
+                    if test_vnfd_ids[0] and test_vnfd_ids[1]:
+                        # Test NSD Quotas
+                        res = engine.test(
+                            "Create test NSD #1",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res else None]
+                        res = engine.test(
+                            "Create test NSD #2",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[1],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res else None]
+                        res = engine.test(
+                            "Try to create extra test NSD",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (422),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res is None else None]
+                        res = engine.test(
+                            "Try to create extra test NSD with FORCE",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content?FORCE",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
+                        test_nsd_ids += [engine.last_id if res else None]
+
+                        # Remove extra NSDs to prevent further errors
+                        for i in [2, 3]:
+                            if test_nsd_ids[i]:
+                                res = engine.test(
+                                    "Delete test NSD #" + str(i),
+                                    "DELETE",
+                                    "/nsd/v1/ns_descriptors_content/"
+                                    + test_nsd_ids[i]
+                                    + "?FORCE",
+                                    headers_json,
+                                    {},
+                                    (204),
+                                    {},
+                                    0,
+                                )
+                                if res:
+                                    test_nsd_ids[i] = None
+
+                        if test_nsd_ids[0] and test_nsd_ids[1]:
+                            # Test NSR Quotas
+                            res = engine.test(
+                                "Create test NSR #1",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_1",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res else None]
+                            res = engine.test(
+                                "Create test NSR #2",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_2",
+                                    "nsdId": test_nsd_ids[1],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res else None]
+                            res = engine.test(
+                                "Try to create extra test NSR",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_3",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (422),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res is None else None]
+                            res = engine.test(
+                                "Try to create test NSR with FORCE",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content?FORCE",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_4",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nsr_ids += [engine.last_id if res else None]
+
+                            # Test NST Quotas
+                            res = engine.test(
+                                "Create test NST",
+                                "POST",
+                                "/nst/v1/netslice_templates_content",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nst_ids += [engine.last_id if res else None]
+                            res = engine.test(
+                                "Try to create extra test NST",
+                                "POST",
+                                "/nst/v1/netslice_templates_content",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (422),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nst_ids += [engine.last_id if res is None else None]
+                            res = engine.test(
+                                "Try to create extra test NST with FORCE",
+                                "POST",
+                                "/nst/v1/netslice_templates_content?FORCE",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
+                            test_nst_ids += [engine.last_id if res else None]
+
+                            if test_nst_ids[0]:
+                                # Remove NSR Quota
+                                engine.set_header(
+                                    {"Authorization": "Bearer {}".format(admin_token)}
+                                )
+                                res = engine.test(
+                                    "Remove NSR Quota",
+                                    "PUT",
+                                    "/admin/v1/projects/" + test_project_id,
+                                    headers_json,
+                                    {"quotas": {"nsrs": None}},
+                                    (204),
+                                    {},
+                                    0,
+                                )
+                                engine.set_header(
+                                    {"Authorization": "Bearer {}".format(user_token)}
+                                )
+                                if res:
+                                    # Test NSI Quotas
+                                    res = engine.test(
+                                        "Create test NSI",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (201),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [engine.last_id if res else None]
+                                    res = engine.test(
+                                        "Try to create extra test NSI",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (400),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [
+                                        engine.last_id if res is None else None
+                                    ]
+                                    res = engine.test(
+                                        "Try to create extra test NSI with FORCE",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content?FORCE",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (201),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [engine.last_id if res else None]
+
+                    # Test PDU Quotas
+                    with open(test_dir + pdu_filenames[0], "rb") as file:
+                        pdu_text = re.sub(
+                            r"ip-address: *\[[^\]]*\]",
+                            "ip-address: '0.0.0.0'",
+                            file.read().decode("utf-8"),
+                        )
+                    with open(test_dir + pdu_filenames[0], "wb") as file:
+                        file.write(pdu_text.encode("utf-8"))
+                    res = engine.test(
+                        "Create test PDU",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (201),
+                        r_header_yaml,
+                        "yaml",
+                    )
+                    test_pdu_ids += [engine.last_id if res else None]
+                    res = engine.test(
+                        "Try to create extra test PDU",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (422),
+                        r_header_yaml,
+                        "yaml",
+                    )
+                    test_pdu_ids += [engine.last_id if res is None else None]
+                    res = engine.test(
+                        "Try to create extra test PDU with FORCE",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors?FORCE",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (201),
+                        r_header_yaml,
+                        "yaml",
+                    )
+                    test_pdu_ids += [engine.last_id if res else None]
+
+                    # Cleanup
+                    for i, id in enumerate(test_nsi_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NSI #" + str(i),
+                                "DELETE",
+                                "/nsilcm/v1/netslice_instances_content/"
+                                + id
+                                + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_nsr_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NSR #" + str(i),
+                                "DELETE",
+                                "/nslcm/v1/ns_instances_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_nst_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NST #" + str(i),
+                                "DELETE",
+                                "/nst/v1/netslice_templates_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_nsd_ids):
+                        if id:
+                            engine.test(
+                                "Delete test NSD #" + str(i),
+                                "DELETE",
+                                "/nsd/v1/ns_descriptors_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_vnfd_ids):
+                        if id:
+                            engine.test(
+                                "Delete test VNFD #" + str(i),
+                                "DELETE",
+                                "/vnfpkgm/v1/vnf_packages_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+                    for i, id in enumerate(test_pdu_ids):
+                        if id:
+                            engine.test(
+                                "Delete test PDU #" + str(i),
+                                "DELETE",
+                                "/pdu/v1/pdu_descriptors/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
+
+                    # END Test NBI Quotas
+
+            # Test WIM Quotas
+            res = engine.test(
+                "Create test WIM",
+                "POST",
+                "/admin/v1/wim_accounts",
+                headers_json,
+                {
+                    "name": test_wim,
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_wim_ids += [engine.last_id if res else None]
+            res = engine.test(
+                "Try to create second test WIM",
+                "POST",
+                "/admin/v1/wim_accounts",
+                headers_json,
+                {
+                    "name": test_wim + "_2",
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
+            test_wim_ids += [engine.last_id if res is None else None]
+            res = engine.test(
+                "Try to create second test WIM with FORCE",
+                "POST",
+                "/admin/v1/wim_accounts?FORCE",
+                headers_json,
+                {
+                    "name": test_wim + "_3",
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_wim_ids += [engine.last_id if res else None]
+
+            # Test SDN Quotas
+            res = engine.test(
+                "Create test SDN",
+                "POST",
+                "/admin/v1/sdns",
+                headers_json,
+                {
+                    "name": test_sdn,
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_sdn_ids += [engine.last_id if res else None]
+            res = engine.test(
+                "Try to create second test SDN",
+                "POST",
+                "/admin/v1/sdns",
+                headers_json,
+                {
+                    "name": test_sdn + "_2",
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
+            test_sdn_ids += [engine.last_id if res is None else None]
+            res = engine.test(
+                "Try to create second test SDN with FORCE",
+                "POST",
+                "/admin/v1/sdns?FORCE",
+                headers_json,
+                {
+                    "name": test_sdn + "_3",
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
+            test_sdn_ids += [engine.last_id if res else None]
+
+            # Cleanup
+            for i, id in enumerate(test_vim_ids):
+                if id:
+                    engine.test(
+                        "Delete test VIM #" + str(i),
+                        "DELETE",
+                        "/admin/v1/vim_accounts/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
+            for i, id in enumerate(test_wim_ids):
+                if id:
+                    engine.test(
+                        "Delete test WIM #" + str(i),
+                        "DELETE",
+                        "/admin/v1/wim_accounts/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
+            for i, id in enumerate(test_sdn_ids):
+                if id:
+                    engine.test(
+                        "Delete test SDN #" + str(i),
+                        "DELETE",
+                        "/admin/v1/sdns/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
+
+            # Release user access
+            engine.remove_authorization()
+
+        # Cleanup
+        engine.user = admin_username
+        engine.password = admin_password
+        engine.project = admin_project
+        engine.get_autorization()
+        if test_user_id:
+            engine.test(
+                "Delete test user",
+                "DELETE",
+                "/admin/v1/users/" + test_user_id + "?FORCE",
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        if test_project_id:
+            engine.test(
+                "Delete test project",
+                "DELETE",
+                "/admin/v1/projects/" + test_project_id + "?FORCE",
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
+        engine.remove_authorization()
+
+    # END class TestNbiQuotas
+
+
+if __name__ == "__main__":
+    global logger
+    test = ""
+
+    # Disable warnings from self-signed certificates.
+    requests.packages.urllib3.disable_warnings()
+    try:
+        logging.basicConfig(format="%(levelname)s %(message)s", level=logging.ERROR)
+        logger = logging.getLogger("NBI")
+        # load parameters and configuration
+        opts, args = getopt.getopt(
+            sys.argv[1:],
+            "hvu:p:",
+            [
+                "url=",
+                "user=",
+                "password=",
+                "help",
+                "version",
+                "verbose",
+                "no-verbose",
+                "project=",
+                "insecure",
+                "timeout",
+                "timeout-deploy",
+                "timeout-configure",
+                "test=",
+                "list",
+                "test-osm",
+                "manual-check",
+                "params=",
+                "fail-fast",
+            ],
+        )
+        url = "https://localhost:9999/osm"
+        user = password = project = "admin"
+        test_osm = False
+        manual_check = False
+        verbose = 0
+        verify = True
+        fail_fast = False
+        test_classes = {
+            "NonAuthorized": TestNonAuthorized,
+            "FakeVIM": TestFakeVim,
+            "Users-Projects": TestUsersProjects,
+            "Projects-Descriptors": TestProjectsDescriptors,
+            "VIM-SDN": TestVIMSDN,
+            "Deploy-Custom": TestDeploy,
+            "Deploy-Hackfest-Cirros": TestDeployHackfestCirros,
+            "Deploy-Hackfest-Cirros-Scaling": TestDeployHackfestCirrosScaling,
+            "Deploy-Hackfest-3Charmed": TestDeployHackfest3Charmed,
+            "Deploy-Hackfest-3Charmed2": TestDeployHackfest3Charmed2,
+            "Deploy-Hackfest-3Charmed3": TestDeployHackfest3Charmed3,
+            "Deploy-Hackfest-4": TestDeployHackfest4,
+            "Deploy-CirrosMacIp": TestDeployIpMac,
+            "Descriptors": TestDescriptors,
+            "Deploy-Hackfest1": TestDeployHackfest1,
+            # "Deploy-MultiVIM": TestDeployMultiVIM,
+            "Deploy-SingleVdu": TestDeploySingleVdu,
+            "Deploy-Hnfd": TestDeployHnfd,
+            "Upload-Slice-Template": TestNetSliceTemplates,
+            "Deploy-Slice-Instance": TestNetSliceInstances,
+            "Deploy-SimpleCharm": TestDeploySimpleCharm,
+            "Deploy-SimpleCharm2": TestDeploySimpleCharm2,
+            "Authentication": TestAuthentication,
+            "NBI-Quotas": TestNbiQuotas,
+        }
+        test_to_do = []
+        test_params = {}
+
+        for o, a in opts:
+            # print("parameter:", o, a)
+            if o == "--version":
+                print("test version " + __version__ + " " + version_date)
+                exit()
+            elif o == "--list":
+                for test, test_class in sorted(test_classes.items()):
+                    print("{:32} {}".format(test + ":", test_class.description))
+                exit()
+            elif o in ("-v", "--verbose"):
+                verbose += 1
+            elif o == "no-verbose":
+                verbose = -1
+            elif o in ("-h", "--help"):
+                usage()
+                sys.exit()
+            elif o == "--test-osm":
+                test_osm = True
+            elif o == "--manual-check":
+                manual_check = True
+            elif o == "--url":
+                url = a
+            elif o in ("-u", "--user"):
+                user = a
+            elif o in ("-p", "--password"):
+                password = a
+            elif o == "--project":
+                project = a
+            elif o == "--fail-fast":
+                fail_fast = True
+            elif o == "--test":
+                for _test in a.split(","):
+                    if _test not in test_classes:
+                        print(
+                            "Invalid test name '{}'. Use option '--list' to show available tests".format(
+                                _test
+                            ),
+                            file=sys.stderr,
+                        )
+                        exit(1)
+                    test_to_do.append(_test)
+            elif o == "--params":
+                param_key, _, param_value = a.partition("=")
+                text_index = len(test_to_do)
+                if text_index not in test_params:
+                    test_params[text_index] = {}
+                test_params[text_index][param_key] = param_value
+            elif o == "--insecure":
+                verify = False
+            elif o == "--timeout":
+                timeout = int(a)
+            elif o == "--timeout-deploy":
+                timeout_deploy = int(a)
+            elif o == "--timeout-configure":
+                timeout_configure = int(a)
+            else:
+                assert False, "Unhandled option"
+        if verbose == 0:
+            logger.setLevel(logging.WARNING)
+        elif verbose > 1:
+            logger.setLevel(logging.DEBUG)
+        else:
+            logger.setLevel(logging.ERROR)
+
+        test_rest = TestRest(url, user=user, password=password, project=project)
+        # print("tests to do:", test_to_do)
+        if test_to_do:
+            text_index = 0
+            for test in test_to_do:
+                if fail_fast and test_rest.failed_tests:
+                    break
+                text_index += 1
+                test_class = test_classes[test]
+                test_class().run(
+                    test_rest, test_osm, manual_check, test_params.get(text_index)
+                )
+        else:
+            for test, test_class in sorted(test_classes.items()):
+                if fail_fast and test_rest.failed_tests:
+                    break
+                test_class().run(test_rest, test_osm, manual_check, test_params.get(0))
+        test_rest.print_results()
+        exit(1 if test_rest.failed_tests else 0)
+
+    except TestException as e:
+        logger.error(test + "Test {} Exception: {}".format(test, str(e)))
+        exit(1)
+    except getopt.GetoptError as e:
+        logger.error(e)
+        print(e, file=sys.stderr)
+        exit(1)
+    except Exception as e:
+        logger.critical(test + " Exception: " + str(e), exc_info=True)
index 36d341f..0c1e93c 100755 (executable)
@@ -18,7 +18,4 @@ rm -rf pool
 rm -rf dists
 mkdir -p pool/$MDG
 mv deb_dist/*.deb pool/$MDG/
-mkdir -p dists/unstable/$MDG/binary-amd64/
-apt-ftparchive packages pool/$MDG > dists/unstable/$MDG/binary-amd64/Packages
-gzip -9fk dists/unstable/$MDG/binary-amd64/Packages
-echo "dists/**,pool/$MDG/*.deb"
+
index a247f37..5a86dc9 100755 (executable)
@@ -13,7 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-set -e
 echo "Launching tox"
-tox --parallel=auto
-
+TOX_PARALLEL_NO_SPINNER=1 tox --parallel=auto
index b695693..de0ad1f 100644 (file)
@@ -345,7 +345,6 @@ class CommonVimWimSdn(BaseTopic):
                 schema_version
             ) or self.config_to_encrypt.get("default")
             if edit_content.get("config") and config_to_encrypt_keys:
-
                 for p in config_to_encrypt_keys:
                     if edit_content["config"].get(p):
                         final_content["config"][p] = self.db.encrypt(
@@ -399,48 +398,25 @@ class CommonVimWimSdn(BaseTopic):
         if content.get("vim_type"):
             if content["vim_type"] == "openstack":
                 compute = {
-                    "ram": {
-                        "total": None,
-                        "used": None
-                    },
-                    "vcpus": {
-                        "total": None,
-                        "used": None
-                    },
-                    "instances": {
-                        "total": None,
-                        "used": None
-                    }
+                    "ram": {"total": None, "used": None},
+                    "vcpus": {"total": None, "used": None},
+                    "instances": {"total": None, "used": None},
                 }
                 storage = {
-                    "volumes": {
-                        "total": None,
-                        "used": None
-                    },
-                    "snapshots": {
-                        "total": None,
-                        "used": None
-                    },
-                    "storage": {
-                        "total": None,
-                        "used": None
-                    }
+                    "volumes": {"total": None, "used": None},
+                    "snapshots": {"total": None, "used": None},
+                    "storage": {"total": None, "used": None},
                 }
                 network = {
-                    "networks": {
-                        "total": None,
-                        "used": None
-                    },
-                    "subnets": {
-                        "total": None,
-                        "used": None
-                    },
-                    "floating_ips": {
-                        "total": None,
-                        "used": None
-                    }
+                    "networks": {"total": None, "used": None},
+                    "subnets": {"total": None, "used": None},
+                    "floating_ips": {"total": None, "used": None},
+                }
+                content["resources"] = {
+                    "compute": compute,
+                    "storage": storage,
+                    "network": network,
                 }
-                content["resources"] = {"compute": compute, "storage": storage, "network": network}
 
         return "{}:0".format(content["_id"])
 
@@ -464,7 +440,7 @@ class CommonVimWimSdn(BaseTopic):
 
         # remove reference from project_read if there are more projects referencing it. If it last one,
         # do not remove reference, but order via kafka to delete it
-        if session["project_id"] and session["project_id"]:
+        if session["project_id"]:
             other_projects_referencing = next(
                 (
                     p
@@ -1024,6 +1000,8 @@ class UserTopicAuth(UserTopic):
                 or indata.get("project_role_mappings")
                 or indata.get("projects")
                 or indata.get("add_projects")
+                or indata.get("unlock")
+                or indata.get("renew")
             ):
                 return _id
             if indata.get("project_role_mappings") and (
@@ -1097,7 +1075,6 @@ class UserTopicAuth(UserTopic):
                         mapping["role"],
                         mapping["role_name"],
                     ):
-
                         if mapping in mappings_to_remove:  # do not remove
                             mappings_to_remove.remove(mapping)
                         break  # do not add, it is already at user
@@ -1147,6 +1124,9 @@ class UserTopicAuth(UserTopic):
                     "old_password": indata.get("old_password"),
                     "add_project_role_mappings": mappings_to_add,
                     "remove_project_role_mappings": mappings_to_remove,
+                    "system_admin_id": indata.get("system_admin_id"),
+                    "unlock": indata.get("unlock"),
+                    "renew": indata.get("renew"),
                 }
             )
             data_to_send = {"_id": _id, "changes": indata}
index a99cea7..9c8c8d3 100644 (file)
@@ -44,6 +44,7 @@ from osm_nbi.authconn import AuthException, AuthconnException, AuthExceptionUnau
 from osm_nbi.authconn_keystone import AuthconnKeystone
 from osm_nbi.authconn_internal import AuthconnInternal
 from osm_nbi.authconn_tacacs import AuthconnTacacs
+from osm_nbi.utils import cef_event, cef_event_builder
 from osm_common import dbmemory, dbmongo, msglocal, msgkafka
 from osm_common.dbbase import DbException
 from osm_nbi.validation import is_valid_uuid
@@ -88,6 +89,7 @@ class Authenticator:
         self.valid_query_string = valid_query_string
         self.system_admin_role_id = None  # system_role id
         self.test_project_id = None  # test_project_id
+        self.cef_logger = None
 
     def start(self, config):
         """
@@ -98,6 +100,7 @@ class Authenticator:
         :param config: dictionary containing the relevant parameters for this object.
         """
         self.config = config
+        self.cef_logger = cef_event_builder(config["authentication"])
 
         try:
             if not self.db:
@@ -249,7 +252,7 @@ class Authenticator:
         user_desc = {
             "username": "admin",
             "password": "admin",
-            "_admin": {"created": now, "modified": now},
+            "_admin": {"created": now, "modified": now, "user_status": "always-active"},
         }
         if project_id:
             pid = project_id
@@ -283,7 +286,7 @@ class Authenticator:
             (r for r in records if r["name"] == "system_admin"), None
         ):
             with open(self.roles_to_operations_file, "r") as stream:
-                roles_to_operations_yaml = yaml.load(stream, Loader=yaml.Loader)
+                roles_to_operations_yaml = yaml.safe_load(stream)
 
             role_names = []
             for role_with_operations in roles_to_operations_yaml["roles"]:
@@ -449,9 +452,11 @@ class Authenticator:
                 elif auth_list[0].lower() == "basic":
                     user_passwd64 = auth_list[-1]
             if not token:
-                if cherrypy.session.get("Authorization"):
+                if cherrypy.session.get("Authorization"):  # pylint: disable=E1101
                     # 2. Try using session before request a new token. If not, basic authentication will generate
-                    token = cherrypy.session.get("Authorization")
+                    token = cherrypy.session.get(  # pylint: disable=E1101
+                        "Authorization"
+                    )
                     if token == "logout":
                         token = None  # force Unauthorized response to insert user password again
                 elif user_passwd64 and cherrypy.request.config.get(
@@ -466,10 +471,10 @@ class Authenticator:
                     except Exception:
                         pass
                     outdata = self.new_token(
-                        None, {"username": user, "password": passwd}
+                        None, {"username": user, "password": passwd}, None
                     )
                     token = outdata["_id"]
-                    cherrypy.session["Authorization"] = token
+                    cherrypy.session["Authorization"] = token  # pylint: disable=E1101
 
             if not token:
                 raise AuthException(
@@ -503,13 +508,25 @@ class Authenticator:
                     item_id,
                 )
                 self.logger.info("RBAC_auth: {}".format(RBAC_auth))
+                if RBAC_auth:
+                    cef_event(
+                        self.cef_logger,
+                        {
+                            "name": "System Access",
+                            "sourceUserName": token_info.get("username"),
+                            "message": "Accessing account with system privileges, Project={}".format(
+                                token_info.get("project_name")
+                            ),
+                        },
+                    )
+                    self.logger.info("{}".format(self.cef_logger))
                 token_info["allow_show_user_project_role"] = RBAC_auth
 
             return token_info
         except AuthException as e:
             if not isinstance(e, AuthExceptionUnauthorized):
-                if cherrypy.session.get("Authorization"):
-                    del cherrypy.session["Authorization"]
+                if cherrypy.session.get("Authorization"):  # pylint: disable=E1101
+                    del cherrypy.session["Authorization"]  # pylint: disable=E1101
                 cherrypy.response.headers[
                     "WWW-Authenticate"
                 ] = 'Bearer realm="{}"'.format(e)
@@ -774,21 +791,24 @@ class Authenticator:
         This method will check for password expiry of the user
         :param outdata: user token information
         """
-        user_content = None
-        detail = {}
+        user_list = None
         present_time = time()
         user = outdata["username"]
-        if self.config["authentication"].get("pwd_expiry_check"):
-            user_content = self.db.get_list("users", {"username": user})[0]
-            if not user_content.get("username") == "admin":
-                user_content["_admin"]["modified_time"] = present_time
-                if user_content.get("_admin").get("expire_time"):
-                    expire_time = user_content["_admin"]["expire_time"]
-                else:
-                    expire_time = present_time
-                uid = user_content["_id"]
-                self.db.set_one("users", {"_id": uid}, user_content)
-                if not present_time < expire_time:
-                    return True
+        if self.config["authentication"].get("user_management"):
+            user_list = self.db.get_list("users", {"username": user})
+            if user_list:
+                user_content = user_list[0]
+                if not user_content.get("username") == "admin":
+                    user_content["_admin"]["modified"] = present_time
+                    if user_content.get("_admin").get("password_expire_time"):
+                        password_expire_time = user_content["_admin"][
+                            "password_expire_time"
+                        ]
+                    else:
+                        password_expire_time = present_time
+                    uid = user_content["_id"]
+                    self.db.set_one("users", {"_id": uid}, user_content)
+                    if not present_time < password_expire_time:
+                        return True
         else:
             pass
index 0f4b523..2f70405 100644 (file)
@@ -204,6 +204,7 @@ class Authconn:
         :param filter_q: dictionary to filter user list by name (username is also admited) and/or _id
         :return: returns a list of users.
         """
+        return list()  # Default return value so that the method get_user passes pylint
 
     def get_user(self, _id, fail=True):
         """
index 99d18e4..0f414b1 100644 (file)
@@ -33,9 +33,14 @@ __date__ = "$06-jun-2019 11:16:08$"
 import logging
 import re
 
-from osm_nbi.authconn import Authconn, AuthException, AuthconnConflictException  # , AuthconnOperationException
+from osm_nbi.authconn import (
+    Authconn,
+    AuthException,
+    AuthconnConflictException,
+)  # , AuthconnOperationException
 from osm_common.dbbase import DbException
 from osm_nbi.base_topic import BaseTopic
+from osm_nbi.utils import cef_event, cef_event_builder
 from osm_nbi.validation import is_valid_uuid
 from time import time, sleep
 from http import HTTPStatus
@@ -64,6 +69,7 @@ class AuthconnInternal(Authconn):
 
         # To be Confirmed
         self.sess = None
+        self.cef_logger = cef_event_builder(config)
 
     def validate_token(self, token):
         """
@@ -150,15 +156,134 @@ class AuthconnInternal(Authconn):
         user_rows = self.db.get_list(
             self.users_collection, {BaseTopic.id_field("users", user): user}
         )
+        now = time()
         user_content = None
-        if user_rows:
-            user_content = user_rows[0]
-            salt = user_content["_admin"]["salt"]
-            shadow_password = sha256(
-                password.encode("utf-8") + salt.encode("utf-8")
-            ).hexdigest()
-            if shadow_password != user_content["password"]:
-                user_content = None
+        if user:
+            user_rows = self.db.get_list(
+                self.users_collection,
+                {BaseTopic.id_field(self.users_collection, user): user},
+            )
+            if user_rows:
+                user_content = user_rows[0]
+                # Updating user_status for every system_admin id role login
+                mapped_roles = user_content.get("project_role_mappings")
+                for role in mapped_roles:
+                    role_id = role.get("role")
+                    role_assigned = self.db.get_one(
+                        self.roles_collection,
+                        {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                    )
+
+                    if role_assigned.get("permissions")["admin"]:
+                        if role_assigned.get("permissions")["default"]:
+                            if self.config.get("user_management"):
+                                filt = {}
+                                users = self.db.get_list(self.users_collection, filt)
+                                for user_info in users:
+                                    if not user_info.get("username") == "admin":
+                                        if not user_info.get("_admin").get(
+                                            "account_expire_time"
+                                        ):
+                                            expire = now + 86400 * self.config.get(
+                                                "account_expire_days"
+                                            )
+                                            self.db.set_one(
+                                                self.users_collection,
+                                                {"_id": user_info["_id"]},
+                                                {"_admin.account_expire_time": expire},
+                                            )
+                                        else:
+                                            if now > user_info.get("_admin").get(
+                                                "account_expire_time"
+                                            ):
+                                                self.db.set_one(
+                                                    self.users_collection,
+                                                    {"_id": user_info["_id"]},
+                                                    {"_admin.user_status": "expired"},
+                                                )
+                                break
+
+                # To add "admin" user_status key while upgrading osm setup with feature enabled
+                if user_content.get("username") == "admin":
+                    if self.config.get("user_management"):
+                        self.db.set_one(
+                            self.users_collection,
+                            {"_id": user_content["_id"]},
+                            {"_admin.user_status": "always-active"},
+                        )
+
+                if not user_content.get("username") == "admin":
+                    if self.config.get("user_management"):
+                        if not user_content.get("_admin").get("account_expire_time"):
+                            account_expire_time = now + 86400 * self.config.get(
+                                "account_expire_days"
+                            )
+                            self.db.set_one(
+                                self.users_collection,
+                                {"_id": user_content["_id"]},
+                                {"_admin.account_expire_time": account_expire_time},
+                            )
+                        else:
+                            account_expire_time = user_content.get("_admin").get(
+                                "account_expire_time"
+                            )
+
+                        if now > account_expire_time:
+                            self.db.set_one(
+                                self.users_collection,
+                                {"_id": user_content["_id"]},
+                                {"_admin.user_status": "expired"},
+                            )
+                            raise AuthException(
+                                "Account expired", http_code=HTTPStatus.UNAUTHORIZED
+                            )
+
+                        if user_content.get("_admin").get("user_status") == "locked":
+                            raise AuthException(
+                                "Failed to login as the account is locked due to MANY FAILED ATTEMPTS"
+                            )
+                        elif user_content.get("_admin").get("user_status") == "expired":
+                            raise AuthException(
+                                "Failed to login as the account is expired"
+                            )
+
+                salt = user_content["_admin"]["salt"]
+                shadow_password = sha256(
+                    password.encode("utf-8") + salt.encode("utf-8")
+                ).hexdigest()
+                if shadow_password != user_content["password"]:
+                    count = 1
+                    if user_content.get("_admin").get("retry_count") >= 0:
+                        count += user_content.get("_admin").get("retry_count")
+                        self.db.set_one(
+                            self.users_collection,
+                            {"_id": user_content["_id"]},
+                            {"_admin.retry_count": count},
+                        )
+                        self.logger.debug(
+                            "Failed Authentications count: {}".format(count)
+                        )
+
+                    if user_content.get("username") == "admin":
+                        user_content = None
+                    else:
+                        if not self.config.get("user_management"):
+                            user_content = None
+                        else:
+                            if (
+                                user_content.get("_admin").get("retry_count")
+                                >= self.config["max_pwd_attempt"] - 1
+                            ):
+                                self.db.set_one(
+                                    self.users_collection,
+                                    {"_id": user_content["_id"]},
+                                    {"_admin.user_status": "locked"},
+                                )
+                                raise AuthException(
+                                    "Failed to login as the account is locked due to MANY FAILED ATTEMPTS"
+                                )
+                            else:
+                                user_content = None
         return user_content
 
     def authenticate(self, credentials, token_info=None):
@@ -189,6 +314,18 @@ class AuthconnInternal(Authconn):
         if user:
             user_content = self.validate_user(user, password)
             if not user_content:
+                cef_event(
+                    self.cef_logger,
+                    {
+                        "name": "User login",
+                        "sourceUserName": user,
+                        "message": "Invalid username/password Project={} Outcome=Failure".format(
+                            project
+                        ),
+                        "severity": "3",
+                    },
+                )
+                self.logger.exception("{}".format(self.cef_logger))
                 raise AuthException(
                     "Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED
                 )
@@ -218,10 +355,14 @@ class AuthconnInternal(Authconn):
             sleep(self.token_delay)
         # user_content["_admin"]["last_token_time"] = now
         # self.db.replace("users", user_content["_id"], user_content)   # might cause race conditions
+        user_data = {
+            "_admin.last_token_time": now,
+            "_admin.retry_count": 0,
+        }
         self.db.set_one(
             self.users_collection,
             {"_id": user_content["_id"]},
-            {"_admin.last_token_time": now},
+            user_data,
         )
 
         token_id = "".join(
@@ -281,6 +422,24 @@ class AuthconnInternal(Authconn):
             ]
             roles_list = [{"name": "project_admin", "id": rid}]
 
+        login_count = user_content.get("_admin").get("retry_count")
+        last_token_time = user_content.get("_admin").get("last_token_time")
+
+        admin_show = False
+        user_show = False
+        if self.config.get("user_management"):
+            for role in roles_list:
+                role_id = role.get("id")
+                permission = self.db.get_one(
+                    self.roles_collection,
+                    {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                )
+                if permission.get("permissions")["admin"]:
+                    if permission.get("permissions")["default"]:
+                        admin_show = True
+                        break
+            else:
+                user_show = True
         new_token = {
             "issued_at": now,
             "expires": now + 3600,
@@ -292,6 +451,10 @@ class AuthconnInternal(Authconn):
             "user_id": user_content["_id"],
             "admin": token_admin,
             "roles": roles_list,
+            "login_count": login_count,
+            "last_login": last_token_time,
+            "admin_show": admin_show,
+            "user_show": user_show,
         }
 
         self.db.create(self.tokens_collection, new_token)
@@ -352,15 +515,24 @@ class AuthconnInternal(Authconn):
         BaseTopic.format_on_new(user_info, make_public=False)
         salt = uuid4().hex
         user_info["_admin"]["salt"] = salt
+        user_info["_admin"]["user_status"] = "active"
         present = time()
         if not user_info["username"] == "admin":
-            if self.config.get("pwd_expiry_check"):
-                user_info["_admin"]["modified_time"] = present
-                user_info["_admin"]["expire_time"] = present
+            if self.config.get("user_management"):
+                user_info["_admin"]["modified"] = present
+                user_info["_admin"]["password_expire_time"] = present
+                account_expire_time = present + 86400 * self.config.get(
+                    "account_expire_days"
+                )
+                user_info["_admin"]["account_expire_time"] = account_expire_time
+
+        user_info["_admin"]["retry_count"] = 0
+        user_info["_admin"]["last_token_time"] = present
         if "password" in user_info:
             user_info["password"] = sha256(
                 user_info["password"].encode("utf-8") + salt.encode("utf-8")
             ).hexdigest()
+            user_info["_admin"]["password_history"] = {salt: user_info["password"]}
         # "projects" are not stored any more
         if "projects" in user_info:
             del user_info["projects"]
@@ -375,17 +547,103 @@ class AuthconnInternal(Authconn):
         """
         uid = user_info["_id"]
         old_pwd = user_info.get("old_password")
+        unlock = user_info.get("unlock")
+        renew = user_info.get("renew")
+        permission_id = user_info.get("system_admin_id")
+
         user_data = self.db.get_one(
             self.users_collection, {BaseTopic.id_field("users", uid): uid}
         )
         if old_pwd:
             salt = user_data["_admin"]["salt"]
-            shadow_password = sha256(old_pwd.encode('utf-8') + salt.encode('utf-8')).hexdigest()
+            shadow_password = sha256(
+                old_pwd.encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
             if shadow_password != user_data["password"]:
                 raise AuthconnConflictException(
-                    "Incorrect password",
-                    http_code=HTTPStatus.CONFLICT
+                    "Incorrect password", http_code=HTTPStatus.CONFLICT
+                )
+        # Unlocking the user
+        if unlock:
+            system_user = None
+            unlock_state = False
+            if not permission_id:
+                raise AuthconnConflictException(
+                    "system_admin_id is the required field to unlock the user",
+                    http_code=HTTPStatus.CONFLICT,
+                )
+            else:
+                system_user = self.db.get_one(
+                    self.users_collection,
+                    {
+                        BaseTopic.id_field(
+                            self.users_collection, permission_id
+                        ): permission_id
+                    },
+                )
+                mapped_roles = system_user.get("project_role_mappings")
+                for role in mapped_roles:
+                    role_id = role.get("role")
+                    role_assigned = self.db.get_one(
+                        self.roles_collection,
+                        {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                    )
+                    if role_assigned.get("permissions")["admin"]:
+                        if role_assigned.get("permissions")["default"]:
+                            user_data["_admin"]["retry_count"] = 0
+                            user_data["_admin"]["user_status"] = "active"
+                            unlock_state = True
+                            break
+                if not unlock_state:
+                    raise AuthconnConflictException(
+                        "User '{}' does not have the privilege to unlock the user".format(
+                            permission_id
+                        ),
+                        http_code=HTTPStatus.CONFLICT,
+                    )
+        # Renewing the user
+        if renew:
+            system_user = None
+            renew_state = False
+            if not permission_id:
+                raise AuthconnConflictException(
+                    "system_admin_id is the required field to renew the user",
+                    http_code=HTTPStatus.CONFLICT,
+                )
+            else:
+                system_user = self.db.get_one(
+                    self.users_collection,
+                    {
+                        BaseTopic.id_field(
+                            self.users_collection, permission_id
+                        ): permission_id
+                    },
                 )
+                mapped_roles = system_user.get("project_role_mappings")
+                for role in mapped_roles:
+                    role_id = role.get("role")
+                    role_assigned = self.db.get_one(
+                        self.roles_collection,
+                        {BaseTopic.id_field(self.roles_collection, role_id): role_id},
+                    )
+                    if role_assigned.get("permissions")["admin"]:
+                        if role_assigned.get("permissions")["default"]:
+                            present = time()
+                            account_expire = (
+                                present + 86400 * self.config["account_expire_days"]
+                            )
+                            user_data["_admin"]["modified"] = present
+                            user_data["_admin"]["account_expire_time"] = account_expire
+                            user_data["_admin"]["user_status"] = "active"
+                            renew_state = True
+                            break
+                if not renew_state:
+                    raise AuthconnConflictException(
+                        "User '{}' does not have the privilege to renew the user".format(
+                            permission_id
+                        ),
+                        http_code=HTTPStatus.CONFLICT,
+                    )
         BaseTopic.format_on_edit(user_data, user_info)
         # User Name
         usnm = user_info.get("username")
@@ -396,20 +654,46 @@ class AuthconnInternal(Authconn):
         if pswd and (
             len(pswd) != 64 or not re.match("[a-fA-F0-9]*", pswd)
         ):  # TODO: Improve check?
+            cef_event(
+                self.cef_logger,
+                {
+                    "name": "Change Password",
+                    "sourceUserName": user_data["username"],
+                    "message": "Changing Password for user, Outcome=Success",
+                    "severity": "2",
+                },
+            )
+            self.logger.info("{}".format(self.cef_logger))
             salt = uuid4().hex
             if "_admin" not in user_data:
                 user_data["_admin"] = {}
+            if user_data.get("_admin").get("password_history"):
+                old_pwds = user_data.get("_admin").get("password_history")
+            else:
+                old_pwds = {}
+            for k, v in old_pwds.items():
+                shadow_password = sha256(
+                    pswd.encode("utf-8") + k.encode("utf-8")
+                ).hexdigest()
+                if v == shadow_password:
+                    raise AuthconnConflictException(
+                        "Password is used before", http_code=HTTPStatus.CONFLICT
+                    )
             user_data["_admin"]["salt"] = salt
             user_data["password"] = sha256(
                 pswd.encode("utf-8") + salt.encode("utf-8")
             ).hexdigest()
+            if len(old_pwds) >= 3:
+                old_pwds.pop(list(old_pwds.keys())[0])
+            old_pwds.update({salt: user_data["password"]})
+            user_data["_admin"]["password_history"] = old_pwds
             if not user_data["username"] == "admin":
-                if self.config.get("pwd_expiry_check"):
+                if self.config.get("user_management"):
                     present = time()
-                    if self.config.get("days"):
-                        expire = present + 86400 * self.config.get("days")
-                        user_data["_admin"]["modified_time"] = present
-                        user_data["_admin"]["expire_time"] = expire
+                    if self.config.get("pwd_expire_days"):
+                        expire = present + 86400 * self.config.get("pwd_expire_days")
+                        user_data["_admin"]["modified"] = present
+                        user_data["_admin"]["password_expire_time"] = expire
         # Project-Role Mappings
         # TODO: Check that user_info NEVER includes "project_role_mappings"
         if "project_role_mappings" not in user_data:
index 5e34485..a84b3d9 100644 (file)
@@ -328,7 +328,6 @@ class AuthconnKeystone(Authconn):
         :return: returns the id of the user in keystone.
         """
         try:
-
             if (
                 user_info.get("domain_name")
                 and user_info["domain_name"] in self.user_domain_ro_list
index 591d71a..820bd8e 100644 (file)
@@ -29,6 +29,7 @@ class EngineException(Exception):
         self.http_code = http_code
         super(Exception, self).__init__(message)
 
+
 class NBIBadArgumentsException(Exception):
     """
     Bad argument values exception
@@ -40,9 +41,8 @@ class NBIBadArgumentsException(Exception):
         self.bad_args = bad_args
 
     def __str__(self):
-        return "{}, Bad arguments: {}".format(
-            self.message, self.bad_args
-        )
+        return "{}, Bad arguments: {}".format(self.message, self.bad_args)
+
 
 def deep_get(target_dict, key_list):
     """
@@ -59,9 +59,7 @@ def deep_get(target_dict, key_list):
     return target_dict
 
 
-def detect_descriptor_usage(
-    descriptor: dict, db_collection: str, db: object
-) -> bool:
+def detect_descriptor_usage(descriptor: dict, db_collection: str, db: object) -> bool:
     """Detect the descriptor usage state.
 
     Args:
@@ -88,7 +86,9 @@ def detect_descriptor_usage(
         }
 
         if db_collection not in search_dict:
-            raise NBIBadArgumentsException("db_collection should be equal to vnfds or nsds", "db_collection")
+            raise NBIBadArgumentsException(
+                "db_collection should be equal to vnfds or nsds", "db_collection"
+            )
 
         record_list = db.get_list(
             search_dict[db_collection][0],
@@ -99,7 +99,9 @@ def detect_descriptor_usage(
             return True
 
     except (DbException, KeyError, NBIBadArgumentsException) as error:
-        raise EngineException(f"Error occured while detecting the descriptor usage: {error}")
+        raise EngineException(
+            f"Error occured while detecting the descriptor usage: {error}"
+        )
 
 
 def update_descriptor_usage_state(
@@ -126,10 +128,14 @@ def update_descriptor_usage_state(
                 "_admin.usageState": "IN_USE",
             }
 
-        db.set_one(db_collection, {"_id": descriptor["_id"]}, update_dict=descriptor_update)
+        db.set_one(
+            db_collection, {"_id": descriptor["_id"]}, update_dict=descriptor_update
+        )
 
     except (DbException, KeyError, NBIBadArgumentsException) as error:
-        raise EngineException(f"Error occured while updating the descriptor usage state: {error}")
+        raise EngineException(
+            f"Error occured while updating the descriptor usage state: {error}"
+        )
 
 
 def get_iterable(input_var):
index 50182fd..b165b76 100644 (file)
@@ -20,6 +20,7 @@ import copy
 import os
 import shutil
 import functools
+import re
 
 # import logging
 from deepdiff import DeepDiff
@@ -30,6 +31,7 @@ from time import time
 from uuid import uuid4
 from re import fullmatch
 from zipfile import ZipFile
+from urllib.parse import urlparse
 from osm_nbi.validation import (
     ValidationError,
     pdu_new_schema,
@@ -51,11 +53,17 @@ from osm_nbi import utils
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
+valid_helm_chart_re = re.compile(
+    r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
+)
+
 
 class DescriptorTopic(BaseTopic):
     def __init__(self, db, fs, msg, auth):
+        super().__init__(db, fs, msg, auth)
 
-        BaseTopic.__init__(self, db, fs, msg, auth)
+    def _validate_input_new(self, indata, storage_params, force=False):
+        return indata
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
         final_content = super().check_conflict_on_edit(
@@ -122,7 +130,7 @@ class DescriptorTopic(BaseTopic):
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
                 raise EngineException(
                     "{} with id '{}' already exists for this project".format(
-                        self.topic[:-1], final_content["id"]
+                        (str(self.topic))[:-1], final_content["id"]
                     ),
                     HTTPStatus.CONFLICT,
                 )
@@ -154,7 +162,6 @@ class DescriptorTopic(BaseTopic):
                 self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
                 revision = revision - 1
 
-
     @staticmethod
     def get_one_by_id(db, session, topic, id):
         # find owned by this project
@@ -218,10 +225,7 @@ class DescriptorTopic(BaseTopic):
         # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
         # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
 
-        content = {"_admin": {
-            "userDefinedData": indata,
-            "revision": 0
-            }}
+        content = {"_admin": {"userDefinedData": indata, "revision": 0}}
 
         self.format_on_new(
             content, session["project_id"], make_public=session["public"]
@@ -255,10 +259,7 @@ class DescriptorTopic(BaseTopic):
             or "application/x-gzip" in content_type
         ):
             compressed = "gzip"
-        if (
-            content_type
-            and "application/zip" in content_type
-        ):
+        if content_type and "application/zip" in content_type:
             compressed = "zip"
         filename = headers.get("Content-Filename")
         if not filename and compressed:
@@ -408,14 +409,12 @@ class DescriptorTopic(BaseTopic):
                         )
 
                     if (
-                        (
-                            zipfilename.endswith(".yaml")
-                            or zipfilename.endswith(".json")
-                            or zipfilename.endswith(".yml")
-                        ) and (
-                            zipfilename.find("/") < 0
-                            or zipfilename.find("Definitions") >= 0
-                        )
+                        zipfilename.endswith(".yaml")
+                        or zipfilename.endswith(".json")
+                        or zipfilename.endswith(".yml")
+                    ) and (
+                        zipfilename.find("/") < 0
+                        or zipfilename.find("Definitions") >= 0
                     ):
                         storage["pkg-dir"] = ""
                         if descriptor_file_name:
@@ -444,7 +443,7 @@ class DescriptorTopic(BaseTopic):
                 indata = json.load(content)
             else:
                 error_text = "Invalid yaml format "
-                indata = yaml.load(content, Loader=yaml.SafeLoader)
+                indata = yaml.safe_load(content)
 
             # Need to close the file package here so it can be copied from the
             # revision to the current, unrevisioned record
@@ -467,14 +466,17 @@ class DescriptorTopic(BaseTopic):
                         proposed_revision_path,
                     )
                 except Exception as e:
-                    shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
-                    shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
+                    shutil.rmtree(
+                        self.fs.path + current_revision_path, ignore_errors=True
+                    )
+                    shutil.rmtree(
+                        self.fs.path + proposed_revision_path, ignore_errors=True
+                    )
                     # Only delete the new revision.  We need to keep the original version in place
                     # as it has not been changed.
                     self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
                     raise e
 
-
             indata = self._remove_envelop(indata)
 
             # Override descriptor with query string kwargs
@@ -494,7 +496,10 @@ class DescriptorTopic(BaseTopic):
 
             # Copy the revision to the active package name by its original id
             shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
-            os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
+            os.rename(
+                self.fs.path + proposed_revision_path,
+                self.fs.path + current_revision_path,
+            )
             self.fs.file_delete(current_revision_path, ignore_non_exist=True)
             self.fs.mkdir(current_revision_path)
             self.fs.reverse_sync(from_path=current_revision_path)
@@ -677,7 +682,7 @@ class DescriptorTopic(BaseTopic):
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
-            if type(data) == dict:
+            if isinstance(data, dict):
                 indata["_admin"]["userDefinedData"] = data
             else:
                 raise EngineException(
@@ -706,7 +711,7 @@ class DescriptorTopic(BaseTopic):
         descriptor_id,
         descriptor_file_name,
         old_descriptor_directory,
-        new_descriptor_directory
+        new_descriptor_directory,
     ):
         # Example:
         #    raise EngineException(
@@ -715,6 +720,7 @@ class DescriptorTopic(BaseTopic):
         #    )
         pass
 
+
 class VnfdTopic(DescriptorTopic):
     topic = "vnfds"
     topic_msg = "vnfd"
@@ -847,9 +853,29 @@ class VnfdTopic(DescriptorTopic):
         self.validate_internal_virtual_links(indata)
         self.validate_monitoring_params(indata)
         self.validate_scaling_group_descriptor(indata)
+        self.validate_helm_chart(indata)
 
         return indata
 
+    @staticmethod
+    def validate_helm_chart(indata):
+        def is_url(url):
+            result = urlparse(url)
+            return all([result.scheme, result.netloc])
+
+        kdus = indata.get("kdu", [])
+        for kdu in kdus:
+            helm_chart_value = kdu.get("helm-chart")
+            if not helm_chart_value:
+                continue
+            if not (
+                valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
+            ):
+                raise EngineException(
+                    "helm-chart '{}' is not valid".format(helm_chart_value),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
+
     @staticmethod
     def validate_mgmt_interface_connection_point(indata):
         if not indata.get("vdu"):
@@ -983,13 +1009,9 @@ class VnfdTopic(DescriptorTopic):
             return False
         elif not storage_params.get("pkg-dir"):
             if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
-                f = "{}_/{}".format(
-                    storage_params["folder"], folder
-                )
+                f = "{}_/{}".format(storage_params["folder"], folder)
             else:
-                f = "{}/{}".format(
-                    storage_params["folder"], folder
-                )
+                f = "{}/{}".format(storage_params["folder"], folder)
             if file:
                 return self.fs.file_exists("{}/{}".format(f, file), "file")
             else:
@@ -1187,7 +1209,7 @@ class VnfdTopic(DescriptorTopic):
         """
         super().delete_extra(session, _id, db_content, not_send_msg)
         self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
-        self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
+        self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
 
     def sol005_projection(self, data):
         data["onboardingState"] = data["_admin"]["onboardingState"]
@@ -1234,11 +1256,11 @@ class VnfdTopic(DescriptorTopic):
         """
         for df in vnfd.get("df", {}):
             for policy in ["scaling-aspect", "healing-aspect"]:
-                if (df.get(policy, {})):
+                if df.get(policy, {}):
                     df.pop(policy)
         for vdu in vnfd.get("vdu", {}):
             for alarm_policy in ["alarm", "monitoring-parameter"]:
-                if (vdu.get(alarm_policy, {})):
+                if vdu.get(alarm_policy, {}):
                     vdu.pop(alarm_policy)
         return vnfd
 
@@ -1327,11 +1349,9 @@ class VnfdTopic(DescriptorTopic):
             with self.fs.file_open(
                 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
             ) as old_descriptor_file:
-
                 with self.fs.file_open(
                     (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
                 ) as new_descriptor_file:
-
                     old_content = yaml.safe_load(old_descriptor_file.read())
                     new_content = yaml.safe_load(new_descriptor_file.read())
 
@@ -1381,7 +1401,7 @@ class NsdTopic(DescriptorTopic):
     topic_msg = "nsd"
 
     def __init__(self, db, fs, msg, auth):
-        DescriptorTopic.__init__(self, db, fs, msg, auth)
+        super().__init__(db, fs, msg, auth)
 
     def pyangbind_validation(self, item, data, force=False):
         if self._descriptor_data_is_in_old_format(data):
@@ -1447,6 +1467,8 @@ class NsdTopic(DescriptorTopic):
         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
         for vld in get_iterable(indata.get("virtual-link-desc")):
             self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
+        for fg in get_iterable(indata.get("vnffgd")):
+            self.validate_vnffgd_data(fg, indata)
 
         self.validate_vnf_profiles_vnfd_id(indata)
 
@@ -1468,6 +1490,45 @@ class NsdTopic(DescriptorTopic):
                             http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
                         )
 
+    @staticmethod
+    def validate_vnffgd_data(fg, indata):
+        position_list = []
+        all_vnf_ids = set(get_iterable(fg.get("vnf-profile-id")))
+        for fgposition in get_iterable(fg.get("nfp-position-element")):
+            position_list.append(fgposition["id"])
+
+        for nfpd in get_iterable(fg.get("nfpd")):
+            nfp_position = []
+            for position in get_iterable(nfpd.get("position-desc-id")):
+                nfp_position = position.get("nfp-position-element-id")
+                if position == "nfp-position-element-id":
+                    nfp_position = position.get("nfp-position-element-id")
+                if nfp_position[0] not in position_list:
+                    raise EngineException(
+                        "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
+                        "does not match any nfp-position-element".format(
+                            nfpd["id"], nfp_position[0]
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+                for cp in get_iterable(position.get("cp-profile-id")):
+                    for cpe in get_iterable(cp.get("constituent-profile-elements")):
+                        constituent_base_element_id = cpe.get(
+                            "constituent-base-element-id"
+                        )
+                        if (
+                            constituent_base_element_id
+                            and constituent_base_element_id not in all_vnf_ids
+                        ):
+                            raise EngineException(
+                                "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
+                                "does not match any constituent-base-element-id".format(
+                                    cpe["id"], constituent_base_element_id
+                                ),
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
+
     @staticmethod
     def validate_vnf_profiles_vnfd_id(indata):
         all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
@@ -1511,7 +1572,7 @@ class NsdTopic(DescriptorTopic):
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
-            if type(data) == dict:
+            if isinstance(data, dict):
                 indata["_admin"]["userDefinedData"] = data
             else:
                 raise EngineException(
@@ -1647,7 +1708,7 @@ class NsdTopic(DescriptorTopic):
         :raises: FsException in case of error while deleting associated storage
         """
         super().delete_extra(session, _id, db_content, not_send_msg)
-        self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
+        self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
 
     @staticmethod
     def extract_day12_primitives(nsd: dict) -> dict:
@@ -1723,11 +1784,9 @@ class NsdTopic(DescriptorTopic):
             with self.fs.file_open(
                 (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
             ) as old_descriptor_file:
-
                 with self.fs.file_open(
                     (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
                 ) as new_descriptor_file:
-
                     old_content = yaml.safe_load(old_descriptor_file.read())
                     new_content = yaml.safe_load(new_descriptor_file.read())
 
index 37f1fb2..c4c8eb2 100644 (file)
@@ -205,7 +205,7 @@ class Engine(object):
             #                 "resources_to_operations file missing")
             #
             #     with open(resources_to_operations_file, 'r') as f:
-            #         resources_to_operations = yaml.load(f, Loader=yaml.Loader)
+            #         resources_to_operations = yaml.safeload(f)
             #
             #     self.operations = []
             #
@@ -293,7 +293,9 @@ class Engine(object):
         :return: The list, it can be empty if no one match the filter_q.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].list(session, filter_q, api_req)
 
     def get_item(self, session, topic, _id, filter_q=None, api_req=False):
@@ -307,7 +309,9 @@ class Engine(object):
         :return: dictionary, raise exception if not found.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].show(session, _id, filter_q, api_req)
 
     def get_file(self, session, topic, _id, path=None, accept_header=None):
@@ -376,6 +380,26 @@ class Engine(object):
         with self.write_lock:
             return self.map_topic[topic].edit(session, _id, indata, kwargs)
 
+    def cancel_item(
+        self, rollback, session, topic, indata=None, kwargs=None, headers=None
+    ):
+        """
+        Cancels an item
+        :param rollback: list to append created items at database in case a rollback must to be done
+        :param session: contains the used login username and working project, force to avoid checkins, public
+        :param topic: it can be: users, projects, vim_accounts, sdns, nsrs, nsds, vnfds
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id: identity of the inserted data.
+        """
+        if topic not in self.map_topic:
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
+        with self.write_lock:
+            self.map_topic[topic].cancel(rollback, session, indata, kwargs, headers)
+
     def upgrade_db(self, current_version, target_version):
         if target_version not in self.map_target_version_to_int.keys():
             raise EngineException(
index 89e2f67..f591a70 100644 (file)
@@ -196,13 +196,11 @@ def format(data, request, response, toke_info):
         if "Location" in response.headers:
             body += '<a href="{}"> show </a>'.format(response.headers["Location"])
         else:
-            _id = request.path_info[request.path_info.rfind("/") + 1:]
+            _id = request.path_info[request.path_info.rfind("/") + 1 :]
             body += (
                 '<a href="/osm/{}?METHOD=DELETE"> '
                 '<img src="/osm/static/delete.png" height="25" width="25"> </a>'
-            ).format(
-                request.path_info
-            )
+            ).format(request.path_info)
             if request.path_info.startswith(
                 "/nslcm/v1/ns_instances_content/"
             ) or request.path_info.startswith("/nslcm/v1/ns_instances/"):
index 8cc7106..695a8f8 100644 (file)
@@ -31,6 +31,7 @@ from osm_nbi.validation import (
     nsi_instantiate,
     ns_migrate,
     ns_verticalscale,
+    nslcmop_cancel,
 )
 from osm_nbi.base_topic import (
     BaseTopic,
@@ -61,24 +62,6 @@ class NsrTopic(BaseTopic):
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
 
-    def _check_descriptor_dependencies(self, session, descriptor):
-        """
-        Check that the dependent descriptors exist on a new descriptor or edition
-        :param session: client session information
-        :param descriptor: descriptor to be inserted or edit
-        :return: None or raises exception
-        """
-        if not descriptor.get("nsdId"):
-            return
-        nsd_id = descriptor["nsdId"]
-        if not self.get_item_list(session, "nsds", {"id": nsd_id}):
-            raise EngineException(
-                "Descriptor error at nsdId='{}' references a non exist nsd".format(
-                    nsd_id
-                ),
-                http_code=HTTPStatus.CONFLICT,
-            )
-
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
         BaseTopic.format_on_new(content, project_id=project_id, make_public=make_public)
@@ -278,7 +261,7 @@ class NsrTopic(BaseTopic):
                         for config in df["lcm-operations-configuration"][
                             "operate-vnf-op-config"
                         ].get("day1-2", []):
-                            # Verify the target object (VNF|NS|VDU|KDU) where we need to populate 
+                            # Verify the target object (VNF|NS|VDU|KDU) where we need to populate
                             # the params with the additional ones given by the user
                             if config.get("id") == selector:
                                 for primitive in get_iterable(
@@ -299,7 +282,7 @@ class NsrTopic(BaseTopic):
                                 "<rw_mgmt_ip>",
                                 "<VDU_SCALE_INFO>",
                                 "<ns_config_info>",
-                                "<OSM>"
+                                "<OSM>",
                             ):
                                 continue
                             if (
@@ -329,8 +312,8 @@ class NsrTopic(BaseTopic):
             EngineException, ValidationError, DbException, FsException, MsgException.
             Note: Exceptions are not captured on purpose. They should be captured at called
         """
+        step = "checking quotas"  # first step must be defined outside try
         try:
-            step = "checking quotas"
             self.check_quota(session)
 
             step = "validating input parameters"
@@ -458,7 +441,26 @@ class NsrTopic(BaseTopic):
 
         return ns_k8s_namespace
 
-    def _add_flavor_to_nsr(self, vdu, vnfd, nsr_descriptor, member_vnf_index, revision=None):
+    def _add_shared_volumes_to_nsr(
+        self, vdu, vnfd, nsr_descriptor, member_vnf_index, revision=None
+    ):
+        svsd = []
+        for vsd in vnfd.get("virtual-storage-desc", ()):
+            if vsd.get("vdu-storage-requirements"):
+                if (
+                    vsd.get("vdu-storage-requirements")[0].get("key") == "multiattach"
+                    and vsd.get("vdu-storage-requirements")[0].get("value") == "True"
+                ):
+                    # Avoid setting the volume name multiple times
+                    if not match(f"shared-.*-{vnfd['id']}", vsd["id"]):
+                        vsd["id"] = f"shared-{vsd['id']}-{vnfd['id']}"
+                    svsd.append(vsd)
+        if svsd:
+            nsr_descriptor["shared-volumes"] = svsd
+
+    def _add_flavor_to_nsr(
+        self, vdu, vnfd, nsr_descriptor, member_vnf_index, revision=None
+    ):
         flavor_data = {}
         guest_epa = {}
         # Find this vdu compute and storage descriptors
@@ -471,65 +473,48 @@ class NsrTopic(BaseTopic):
             if vsd.get("id") == vdu.get("virtual-storage-desc", [[]])[0]:
                 vdu_virtual_storage = vsd
         # Get this vdu vcpus, memory and storage info for flavor_data
-        if vdu_virtual_compute.get("virtual-cpu", {}).get(
-            "num-virtual-cpu"
-        ):
+        if vdu_virtual_compute.get("virtual-cpu", {}).get("num-virtual-cpu"):
             flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"][
                 "num-virtual-cpu"
             ]
         if vdu_virtual_compute.get("virtual-memory", {}).get("size"):
             flavor_data["memory-mb"] = (
-                float(vdu_virtual_compute["virtual-memory"]["size"])
-                * 1024.0
+                float(vdu_virtual_compute["virtual-memory"]["size"]) * 1024.0
             )
         if vdu_virtual_storage.get("size-of-storage"):
-            flavor_data["storage-gb"] = vdu_virtual_storage[
-                "size-of-storage"
-            ]
+            flavor_data["storage-gb"] = vdu_virtual_storage["size-of-storage"]
         # Get this vdu EPA info for guest_epa
         if vdu_virtual_compute.get("virtual-cpu", {}).get("cpu-quota"):
-            guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"][
-                "cpu-quota"
-            ]
+            guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"]["cpu-quota"]
         if vdu_virtual_compute.get("virtual-cpu", {}).get("pinning"):
             vcpu_pinning = vdu_virtual_compute["virtual-cpu"]["pinning"]
             if vcpu_pinning.get("thread-policy"):
-                guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning[
-                    "thread-policy"
-                ]
+                guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning["thread-policy"]
             if vcpu_pinning.get("policy"):
                 cpu_policy = (
-                    "SHARED"
-                    if vcpu_pinning["policy"] == "dynamic"
-                    else "DEDICATED"
+                    "SHARED" if vcpu_pinning["policy"] == "dynamic" else "DEDICATED"
                 )
                 guest_epa["cpu-pinning-policy"] = cpu_policy
         if vdu_virtual_compute.get("virtual-memory", {}).get("mem-quota"):
-            guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"][
-                "mem-quota"
+            guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"]["mem-quota"]
+        if vdu_virtual_compute.get("virtual-memory", {}).get("mempage-size"):
+            guest_epa["mempage-size"] = vdu_virtual_compute["virtual-memory"][
+                "mempage-size"
             ]
-        if vdu_virtual_compute.get("virtual-memory", {}).get(
-            "mempage-size"
-        ):
-            guest_epa["mempage-size"] = vdu_virtual_compute[
-                "virtual-memory"
-            ]["mempage-size"]
-        if vdu_virtual_compute.get("virtual-memory", {}).get(
-            "numa-node-policy"
-        ):
-            guest_epa["numa-node-policy"] = vdu_virtual_compute[
-                "virtual-memory"
-            ]["numa-node-policy"]
-        if vdu_virtual_storage.get("disk-io-quota"):
-            guest_epa["disk-io-quota"] = vdu_virtual_storage[
-                "disk-io-quota"
+        if vdu_virtual_compute.get("virtual-memory", {}).get("numa-node-policy"):
+            guest_epa["numa-node-policy"] = vdu_virtual_compute["virtual-memory"][
+                "numa-node-policy"
             ]
+        if vdu_virtual_storage.get("disk-io-quota"):
+            guest_epa["disk-io-quota"] = vdu_virtual_storage["disk-io-quota"]
 
         if guest_epa:
             flavor_data["guest-epa"] = guest_epa
 
         revision = revision if revision is not None else 1
-        flavor_data["name"] = vdu["id"][:56] + "-" + member_vnf_index + "-" + str(revision) + "-flv"
+        flavor_data["name"] = (
+            vdu["id"][:56] + "-" + member_vnf_index + "-" + str(revision) + "-flv"
+        )
         flavor_data["id"] = str(len(nsr_descriptor["flavor"]))
         nsr_descriptor["flavor"].append(flavor_data)
 
@@ -576,6 +561,8 @@ class NsrTopic(BaseTopic):
             "flavor": [],
             "image": [],
             "affinity-or-anti-affinity-group": [],
+            "shared-volumes": [],
+            "vnffgd": [],
         }
         if "revision" in nsd["_admin"]:
             nsr_descriptor["revision"] = nsd["_admin"]["revision"]
@@ -613,6 +600,9 @@ class NsrTopic(BaseTopic):
                 for vdu in vnfd.get("vdu", ()):
                     member_vnf_index = vnf_profile.get("id")
                     self._add_flavor_to_nsr(vdu, vnfd, nsr_descriptor, member_vnf_index)
+                    self._add_shared_volumes_to_nsr(
+                        vdu, vnfd, nsr_descriptor, member_vnf_index
+                    )
                     sw_image_id = vdu.get("sw-image-desc")
                     if sw_image_id:
                         image_data = self._get_image_data_from_vnfd(vnfd, sw_image_id)
@@ -652,6 +642,16 @@ class NsrTopic(BaseTopic):
                 )
                 vld["name"] = vld["id"]
             nsr_descriptor["vld"] = nsr_vld
+        if nsd.get("vnffgd"):
+            vnffgd = nsd.get("vnffgd")
+            for vnffg in vnffgd:
+                info = {}
+                for k, v in vnffg.items():
+                    if k == "id":
+                        info.update({k: v})
+                    if k == "nfpd":
+                        info.update({k: v})
+                nsr_descriptor["vnffgd"].append(info)
 
         return nsr_descriptor
 
@@ -769,7 +769,6 @@ class NsrTopic(BaseTopic):
         if "revision" in vnfd:
             vnfr_descriptor["revision"] = vnfd["revision"]
 
-
         vnf_k8s_namespace = ns_k8s_namespace
         if vnf_params:
             if vnf_params.get("k8s-namespace"):
@@ -881,7 +880,7 @@ class NsrTopic(BaseTopic):
             try:
                 vdu_virtual_storage_descriptors = utils.filter_in_list(
                     vnfd.get("virtual-storage-desc", []),
-                    lambda stg_desc: stg_desc["id"] in vdu["virtual-storage-desc"]
+                    lambda stg_desc: stg_desc["id"] in vdu["virtual-storage-desc"],
                 )
             except Exception:
                 vdu_virtual_storage_descriptors = []
@@ -894,7 +893,7 @@ class NsrTopic(BaseTopic):
                 "interfaces": [],
                 "additionalParams": additional_params,
                 "vdu-name": vdu["name"],
-                "virtual-storages": vdu_virtual_storage_descriptors
+                "virtual-storages": vdu_virtual_storage_descriptors,
             }
             if vdu_params and vdu_params.get("config-units"):
                 vdur["config-units"] = vdu_params["config-units"]
@@ -919,7 +918,7 @@ class NsrTopic(BaseTopic):
                     # Name, mac-address and interface position is taken from VNFD
                     # and included into VNFR. By this way RO can process this information
                     # while creating the VDU.
-                    iface_fields = ("name", "mac-address", "position")
+                    iface_fields = ("name", "mac-address", "position", "ip-address")
                     vdu_iface = {
                         x: iface[x] for x in iface_fields if iface.get(x) is not None
                     }
@@ -989,7 +988,7 @@ class NsrTopic(BaseTopic):
                                         if (
                                             cpd.get("constituent-cpd-id")
                                             == iface_ext_cp
-                                        ):
+                                        ) and vnf_profile.get("id") == vnf_index:
                                             vdu_iface["ns-vld-id"] = vlc.get(
                                                 "virtual-link-profile-id"
                                             )
@@ -1044,7 +1043,9 @@ class NsrTopic(BaseTopic):
                 vdur["alt-image-ids"] = alt_image_ids
 
             revision = revision if revision is not None else 1
-            flavor_data_name = vdu["id"][:56] + "-" + vnf_index + "-" + str(revision) + "-flv"
+            flavor_data_name = (
+                vdu["id"][:56] + "-" + vnf_index + "-" + str(revision) + "-flv"
+            )
             nsr_flavor_desc = utils.find_in_list(
                 nsr_descriptor["flavor"],
                 lambda flavor: flavor["name"] == flavor_data_name,
@@ -1053,6 +1054,21 @@ class NsrTopic(BaseTopic):
             if nsr_flavor_desc:
                 vdur["ns-flavor-id"] = nsr_flavor_desc["id"]
 
+            # Adding Shared Volume information to vdur
+            if vdur.get("virtual-storages"):
+                nsr_sv = []
+                for vsd in vdur["virtual-storages"]:
+                    if vsd.get("vdu-storage-requirements"):
+                        if (
+                            vsd["vdu-storage-requirements"][0].get("key")
+                            == "multiattach"
+                            and vsd["vdu-storage-requirements"][0].get("value")
+                            == "True"
+                        ):
+                            nsr_sv.append(vsd["id"])
+                if nsr_sv:
+                    vdur["shared-volumes-id"] = nsr_sv
+
             # Adding Affinity groups information to vdur
             try:
                 vdu_profile_affinity_group = utils.find_in_list(
@@ -1113,7 +1129,6 @@ class NsrTopic(BaseTopic):
                 vdur["id"] = vdur["_id"]
                 vdur["count-index"] = index
                 vnfr_descriptor["vdur"].append(vdur)
-
         return vnfr_descriptor
 
     def vca_status_refresh(self, session, ns_instance_content, filter_q):
@@ -1125,15 +1140,22 @@ class NsrTopic(BaseTopic):
         :param filter_q: dict: query parameter containing vcaStatus-refresh as true or false
         :return: None
         """
-        time_now, time_delta = time(), time() - ns_instance_content["_admin"]["modified"]
-        force_refresh = isinstance(filter_q, dict) and filter_q.get('vcaStatusRefresh') == 'true'
+        time_now, time_delta = (
+            time(),
+            time() - ns_instance_content["_admin"]["modified"],
+        )
+        force_refresh = (
+            isinstance(filter_q, dict) and filter_q.get("vcaStatusRefresh") == "true"
+        )
         threshold_reached = time_delta > 120
         if force_refresh or threshold_reached:
             operation, _id = "vca_status_refresh", ns_instance_content["_id"]
             ns_instance_content["_admin"]["modified"] = time_now
             self.db.set_one(self.topic, {"_id": _id}, ns_instance_content)
             nslcmop_desc = NsLcmOpTopic._create_nslcmop(_id, operation, None)
-            self.format_on_new(nslcmop_desc, session["project_id"], make_public=session["public"])
+            self.format_on_new(
+                nslcmop_desc, session["project_id"], make_public=session["public"]
+            )
             nslcmop_desc["_admin"].pop("nsState")
             self.msg.write("ns", operation, nslcmop_desc)
         return
@@ -1193,6 +1215,7 @@ class NsLcmOpTopic(BaseTopic):
         "terminate": ns_terminate,
         "migrate": ns_migrate,
         "verticalscale": ns_verticalscale,
+        "cancel": nslcmop_cancel,
     }
 
     def __init__(self, db, fs, msg, auth):
@@ -1338,7 +1361,6 @@ class NsLcmOpTopic(BaseTopic):
                 vnfd_id_2update = indata["changeVnfPackageData"]["vnfdId"]
 
                 if vnf_instance_id not in nsr["constituent-vnfr-ref"]:
-
                     raise EngineException(
                         f"Error in validating ns-update request: vnf {vnf_instance_id} does not "
                         f"belong to NS {ns_instance_id}",
@@ -1358,7 +1380,6 @@ class NsLcmOpTopic(BaseTopic):
 
                 # Check the given vnfd-id belongs to given vnf instance
                 if constituent_vnfd_id and (vnfd_id_2update != constituent_vnfd_id):
-
                     raise EngineException(
                         f"Error in validating ns-update request: vnfd-id {vnfd_id_2update} does not "
                         f"match with the vnfd-id: {constituent_vnfd_id} of VNF instance: {vnf_instance_id}",
@@ -1471,12 +1492,16 @@ class NsLcmOpTopic(BaseTopic):
                 "nsd:constituent-vnfd".format(member_vnf_index)
             )
 
-        ## Backwards compatibility: if there is no revision, get it from the one and only VNFD entry
+        # Backwards compatibility: if there is no revision, get it from the one and only VNFD entry
         if "revision" in vnfr:
             vnfd_revision = vnfr["vnfd-id"] + ":" + str(vnfr["revision"])
-            vnfd = self.db.get_one("vnfds_revisions", {"_id": vnfd_revision}, fail_on_empty=False)
+            vnfd = self.db.get_one(
+                "vnfds_revisions", {"_id": vnfd_revision}, fail_on_empty=False
+            )
         else:
-            vnfd = self.db.get_one("vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=False)
+            vnfd = self.db.get_one(
+                "vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=False
+            )
 
         if not vnfd:
             raise EngineException(
@@ -1558,8 +1583,8 @@ class NsLcmOpTopic(BaseTopic):
             ivld.get("id"): set()
             for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))
         }
-        for vdu in get_iterable(vnfd.get("vdu")):
-            for cpd in get_iterable(vnfd.get("int-cpd")):
+        for vdu in vnfd.get("vdu", {}):
+            for cpd in vdu.get("int-cpd", {}):
                 if cpd.get("int-virtual-link-desc"):
                     vnfd_ivlds_cpds[cpd.get("int-virtual-link-desc")] = cpd.get("id")
 
@@ -1608,9 +1633,7 @@ class NsLcmOpTopic(BaseTopic):
             return self.db.get_one("vim_accounts", db_filter)
         except Exception:
             raise EngineException(
-                "Invalid vimAccountId='{}' not present for the project".format(
-                    vim_id
-                )
+                "Invalid vimAccountId='{}' not present for the project".format(vim_id)
             )
 
     def _check_valid_wim_account(self, wim_account, wim_accounts, session):
@@ -1876,11 +1899,11 @@ class NsLcmOpTopic(BaseTopic):
         return ifaces_forcing_vim_network
 
     def _update_vnfrs_from_nsd(self, nsr):
+        step = "Getting vnf_profiles from nsd"  # first step must be defined outside try
         try:
             nsr_id = nsr["_id"]
             nsd = nsr["nsd"]
 
-            step = "Getting vnf_profiles from nsd"
             vnf_profiles = nsd.get("df", [{}])[0].get("vnf-profile", ())
             vld_fixed_ip_connection_point_data = {}
 
@@ -1891,11 +1914,18 @@ class NsLcmOpTopic(BaseTopic):
                     for cpd in vlc.get("constituent-cpd-id", ()):
                         if cpd.get("ip-address"):
                             step = "Storing ip-address info"
-                            vld_fixed_ip_connection_point_data.update({vlc.get("virtual-link-profile-id") + '.' + cpd.get("constituent-base-element-id"): {
-                                "vnfd-connection-point-ref": cpd.get(
-                                    "constituent-cpd-id"),
-                                    "ip-address": cpd.get(
-                                    "ip-address")}})
+                            vld_fixed_ip_connection_point_data.update(
+                                {
+                                    vlc.get("virtual-link-profile-id")
+                                    + "."
+                                    + cpd.get("constituent-base-element-id"): {
+                                        "vnfd-connection-point-ref": cpd.get(
+                                            "constituent-cpd-id"
+                                        ),
+                                        "ip-address": cpd.get("ip-address"),
+                                    }
+                                }
+                            )
 
             # Inserting ip address to vnfr
             if len(vld_fixed_ip_connection_point_data) > 0:
@@ -1903,17 +1933,25 @@ class NsLcmOpTopic(BaseTopic):
                 vnfrs = self.db.get_list("vnfrs", {"nsr-id-ref": nsr_id})
                 for item in vld_fixed_ip_connection_point_data.keys():
                     step = "Filtering vnfrs"
-                    vnfr = next(filter(lambda vnfr: vnfr["member-vnf-index-ref"] == item.split('.')[1], vnfrs), None)
+                    vnfr = next(
+                        filter(
+                            lambda vnfr: vnfr["member-vnf-index-ref"]
+                            == item.split(".")[1],
+                            vnfrs,
+                        ),
+                        None,
+                    )
                     if vnfr:
                         vnfr_update = {}
                         for vdur_index, vdur in enumerate(vnfr["vdur"]):
                             for iface_index, iface in enumerate(vdur["interfaces"]):
                                 step = "Looking for matched interface"
                                 if (
-                                        iface.get("external-connection-point-ref")
-                                        == vld_fixed_ip_connection_point_data[item].get("vnfd-connection-point-ref") and
-                                        iface.get("ns-vld-id") == item.split('.')[0]
-
+                                    iface.get("external-connection-point-ref")
+                                    == vld_fixed_ip_connection_point_data[item].get(
+                                        "vnfd-connection-point-ref"
+                                    )
+                                    and iface.get("ns-vld-id") == item.split(".")[0]
                                 ):
                                     vnfr_update_text = "vdur.{}.interfaces.{}".format(
                                         vdur_index, iface_index
@@ -1921,19 +1959,22 @@ class NsLcmOpTopic(BaseTopic):
                                     step = "Storing info in order to update vnfr"
                                     vnfr_update[
                                         vnfr_update_text + ".ip-address"
-                                        ] = increment_ip_mac(
-                                        vld_fixed_ip_connection_point_data[item].get("ip-address"),
-                                        vdur.get("count-index", 0), )
+                                    ] = increment_ip_mac(
+                                        vld_fixed_ip_connection_point_data[item].get(
+                                            "ip-address"
+                                        ),
+                                        vdur.get("count-index", 0),
+                                    )
                                     vnfr_update[vnfr_update_text + ".fixed-ip"] = True
 
                         step = "updating vnfr at database"
                         self.db.set_one("vnfrs", {"_id": vnfr["_id"]}, vnfr_update)
         except (
-                ValidationError,
-                EngineException,
-                DbException,
-                MsgException,
-                FsException,
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
         ) as e:
             raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
 
@@ -2254,10 +2295,12 @@ class NsLcmOpTopic(BaseTopic):
                         HTTPStatus.CONFLICT,
                     )
             self._check_ns_operation(session, nsr, operation, indata)
-            if (indata.get("primitive_params")):
+            if indata.get("primitive_params"):
                 indata["primitive_params"] = json.dumps(indata["primitive_params"])
-            elif (indata.get("additionalParamsForVnf")):
-                indata["additionalParamsForVnf"] = json.dumps(indata["additionalParamsForVnf"])
+            elif indata.get("additionalParamsForVnf"):
+                indata["additionalParamsForVnf"] = json.dumps(
+                    indata["additionalParamsForVnf"]
+                )
 
             if operation == "instantiate":
                 self._update_vnfrs_from_nsd(nsr)
@@ -2268,39 +2311,57 @@ class NsLcmOpTopic(BaseTopic):
                 vnfd = self.db.get_one("vnfds", {"_id": vnfd_id})
                 nsd = self.db.get_one("nsds", {"_id": nsr["nsd-id"]})
                 ns_request = nsr["instantiate_params"]
-                vnfr = self.db.get_one("vnfrs", {"_id": indata["changeVnfPackageData"]["vnfInstanceId"]})
+                vnfr = self.db.get_one(
+                    "vnfrs", {"_id": indata["changeVnfPackageData"]["vnfInstanceId"]}
+                )
                 latest_vnfd_revision = vnfd["_admin"].get("revision", 1)
                 vnfr_vnfd_revision = vnfr.get("revision", 1)
                 if latest_vnfd_revision != vnfr_vnfd_revision:
                     old_vnfd_id = vnfd_id + ":" + str(vnfr_vnfd_revision)
-                    old_db_vnfd = self.db.get_one("vnfds_revisions", {"_id": old_vnfd_id})
+                    old_db_vnfd = self.db.get_one(
+                        "vnfds_revisions", {"_id": old_vnfd_id}
+                    )
                     old_sw_version = old_db_vnfd.get("software-version", "1.0")
                     new_sw_version = vnfd.get("software-version", "1.0")
                     if new_sw_version != old_sw_version:
                         vnf_index = vnfr["member-vnf-index-ref"]
                         self.logger.info("nsr {}".format(nsr))
                         for vdu in vnfd["vdu"]:
-                            self.nsrtopic._add_flavor_to_nsr(vdu, vnfd, nsr, vnf_index, latest_vnfd_revision)
+                            self.nsrtopic._add_shared_volumes_to_nsr(
+                                vdu, vnfd, nsr, vnf_index, latest_vnfd_revision
+                            )
+                            self.nsrtopic._add_flavor_to_nsr(
+                                vdu, vnfd, nsr, vnf_index, latest_vnfd_revision
+                            )
                             sw_image_id = vdu.get("sw-image-desc")
                             if sw_image_id:
-                                image_data = self.nsrtopic._get_image_data_from_vnfd(vnfd, sw_image_id)
+                                image_data = self.nsrtopic._get_image_data_from_vnfd(
+                                    vnfd, sw_image_id
+                                )
                                 self.nsrtopic._add_image_to_nsr(nsr, image_data)
                             for alt_image in vdu.get("alternative-sw-image-desc", ()):
-                                image_data = self.nsrtopic._get_image_data_from_vnfd(vnfd, alt_image)
+                                image_data = self.nsrtopic._get_image_data_from_vnfd(
+                                    vnfd, alt_image
+                                )
                                 self.nsrtopic._add_image_to_nsr(nsr, image_data)
                         nsr_update["image"] = nsr["image"]
                         nsr_update["flavor"] = nsr["flavor"]
+                        nsr_update["shared-volumes"] = nsr["shared-volumes"]
                         self.db.set_one("nsrs", {"_id": nsr["_id"]}, nsr_update)
-                        ns_k8s_namespace = self.nsrtopic._get_ns_k8s_namespace(nsd, ns_request, session)
-                        vnfr_descriptor = self.nsrtopic._create_vnfr_descriptor_from_vnfd(
-                            nsd,
-                            vnfd,
-                            vnfd_id,
-                            vnf_index,
-                            nsr,
-                            ns_request,
-                            ns_k8s_namespace,
-                            latest_vnfd_revision,
+                        ns_k8s_namespace = self.nsrtopic._get_ns_k8s_namespace(
+                            nsd, ns_request, session
+                        )
+                        vnfr_descriptor = (
+                            self.nsrtopic._create_vnfr_descriptor_from_vnfd(
+                                nsd,
+                                vnfd,
+                                vnfd_id,
+                                vnf_index,
+                                nsr,
+                                ns_request,
+                                ns_k8s_namespace,
+                                latest_vnfd_revision,
+                            )
                         )
                         indata["newVdur"] = vnfr_descriptor["vdur"]
             nslcmop_desc = self._create_nslcmop(nsInstanceId, operation, indata)
@@ -2323,6 +2384,41 @@ class NsLcmOpTopic(BaseTopic):
         # except DbException as e:
         #     raise EngineException("Cannot get ns_instance '{}': {}".format(e), HTTPStatus.NOT_FOUND)
 
+    def cancel(self, rollback, session, indata=None, kwargs=None, headers=None):
+        validate_input(indata, self.operation_schema["cancel"])
+        # Override descriptor with query string kwargs
+        self._update_input_with_kwargs(indata, kwargs, yaml_format=True)
+        nsLcmOpOccId = indata["nsLcmOpOccId"]
+        cancelMode = indata["cancelMode"]
+        # get nslcmop from nsLcmOpOccId
+        _filter = BaseTopic._get_project_filter(session)
+        _filter["_id"] = nsLcmOpOccId
+        nslcmop = self.db.get_one("nslcmops", _filter)
+        # Fail is this is not an ongoing nslcmop
+        if nslcmop.get("operationState") not in [
+            "STARTING",
+            "PROCESSING",
+            "ROLLING_BACK",
+        ]:
+            raise EngineException(
+                "Operation is not in STARTING, PROCESSING or ROLLING_BACK state",
+                http_code=HTTPStatus.CONFLICT,
+            )
+        nsInstanceId = nslcmop["nsInstanceId"]
+        update_dict = {
+            "isCancelPending": True,
+            "cancelMode": cancelMode,
+        }
+        self.db.set_one(
+            "nslcmops", q_filter=_filter, update_dict=update_dict, fail_on_empty=False
+        )
+        data = {
+            "_id": nsLcmOpOccId,
+            "nsInstanceId": nsInstanceId,
+            "cancelMode": cancelMode,
+        }
+        self.msg.write("nslcmops", "cancel", data)
+
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
         raise EngineException(
             "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
@@ -2375,24 +2471,6 @@ class NsiTopic(BaseTopic):
                     additional_params[k] = "!!yaml " + safe_dump(v)
         return additional_params
 
-    def _check_descriptor_dependencies(self, session, descriptor):
-        """
-        Check that the dependent descriptors exist on a new descriptor or edition
-        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
-        :param descriptor: descriptor to be inserted or edit
-        :return: None or raises exception
-        """
-        if not descriptor.get("nst-ref"):
-            return
-        nstd_id = descriptor["nst-ref"]
-        if not self.get_item_list(session, "nsts", {"id": nstd_id}):
-            raise EngineException(
-                "Descriptor error at nst-ref='{}' references a non exist nstd".format(
-                    nstd_id
-                ),
-                http_code=HTTPStatus.CONFLICT,
-            )
-
     def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that NSI is not instantiated
@@ -2478,8 +2556,8 @@ class NsiTopic(BaseTopic):
         :return: the _id of nsi descriptor created at database
         """
 
+        step = "checking quotas"  # first step must be defined outside try
         try:
-            step = "checking quotas"
             self.check_quota(session)
 
             step = ""
@@ -2667,13 +2745,13 @@ class NsiTopic(BaseTopic):
             self.db.create("nsis", nsi_descriptor)
             rollback.append({"topic": "nsis", "_id": nsi_id})
             return nsi_id, None
+        except ValidationError as e:
+            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
         except Exception as e:  # TODO remove try Except, it is captured at nbi.py
             self.logger.exception(
                 "Exception {} at NsiTopic.new()".format(e), exc_info=True
             )
             raise EngineException("Error {}: {}".format(step, e))
-        except ValidationError as e:
-            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
         raise EngineException(
index 383b462..7035cae 100644 (file)
@@ -116,9 +116,16 @@ backend: "keystone"         # internal or keystone or tacacs
 # tacacs_port: 49    # Default value
 # tacacs_timeout: 10 # Default value
 
-# Password expiry configuration
-# pwd_expiry_check: True      # Uncomment to enable the password expiry check
-# days: 30                    # Default value
+# User Management configuration
+user_management: True
+pwd_expire_days: 30         # Password expiry Default value
+max_pwd_attempt: 5
+account_expire_days: 90     # Account expiry Default value
+
+# CEF Configuration
+version: "0"
+deviceVendor: "OSM"
+deviceProduct: "OSM"
 
 [rbac]
 # roles_to_operations: "roles_to_operations.yml"  # initial role generation when database
index d78379f..8f87135 100644 (file)
@@ -28,6 +28,7 @@ from osm_nbi.authconn import AuthException, AuthconnException
 from osm_nbi.auth import Authenticator
 from osm_nbi.engine import Engine, EngineException
 from osm_nbi.subscriptions import SubscriptionThread
+from osm_nbi.utils import cef_event, cef_event_builder
 from osm_nbi.validation import ValidationError
 from osm_common.dbbase import DbException
 from osm_common.fsbase import FsException
@@ -46,6 +47,7 @@ database_version = "1.2"
 auth_database_version = "1.0"
 nbi_server = None  # instance of Server class
 subscription_thread = None  # instance of SubscriptionThread class
+cef_logger = None
 
 """
 North Bound Interface  (O: OSM specific; 5,X: SOL005 not implemented yet; O5: SOL005 implemented)
@@ -90,7 +92,7 @@ URL: /osm                                                       GET     POST
                     heal                                                O5
             /ns_lcm_op_occs                                     5       5
                 /<nsLcmOpOccId>                                 5                       5       5
-                    TO BE COMPLETED                             5               5
+                    cancel                                              05
             /vnf_instances  (also vnfrs for compatibility)      O
                 /<vnfInstanceId>                                O
             /subscriptions                                      5       5
@@ -463,8 +465,8 @@ valid_url_methods = {
                     },
                     "verticalscale": {
                         "METHODS": ("POST",),
-                        "ROLE_PERMISSION": "ns_instances:id:verticalscale:"
-                           },
+                        "ROLE_PERMISSION": "ns_instances:id:verticalscale:",
+                    },
                 },
             },
             "ns_lcm_op_occs": {
@@ -473,6 +475,10 @@ valid_url_methods = {
                 "<ID>": {
                     "METHODS": ("GET",),
                     "ROLE_PERMISSION": "ns_instances:opps:id:",
+                    "cancel": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:opps:cancel:",
+                    },
                 },
             },
             "vnfrs": {
@@ -497,33 +503,42 @@ valid_url_methods = {
     },
     "vnflcm": {
         "v1": {
-            "vnf_instances": {"METHODS": ("GET", "POST"),
-                              "ROLE_PERMISSION": "vnflcm_instances:",
-                              "<ID>": {"METHODS": ("GET", "DELETE"),
-                                       "ROLE_PERMISSION": "vnflcm_instances:id:",
-                                       "scale": {"METHODS": ("POST",),
-                                                 "ROLE_PERMISSION": "vnflcm_instances:id:scale:"
-                                                },
-                                       "terminate": {"METHODS": ("POST",),
-                                                     "ROLE_PERMISSION": "vnflcm_instances:id:terminate:"
-                                                    },
-                                       "instantiate": {"METHODS": ("POST",),
-                                                       "ROLE_PERMISSION": "vnflcm_instances:id:instantiate:"
-                                                      },
-                                       }
-                            },
-            "vnf_lcm_op_occs": {"METHODS": ("GET",),
-                               "ROLE_PERMISSION": "vnf_instances:opps:",
-                               "<ID>": {"METHODS": ("GET",),
-                                        "ROLE_PERMISSION": "vnf_instances:opps:id:"
-                                        },
-                               },
-            "subscriptions": {"METHODS": ("GET", "POST"),
-                              "ROLE_PERMISSION": "vnflcm_subscriptions:",
-                              "<ID>": {"METHODS": ("GET", "DELETE"),
-                                       "ROLE_PERMISSION": "vnflcm_subscriptions:id:"
-                                       }
-                              },
+            "vnf_instances": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vnflcm_instances:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "vnflcm_instances:id:",
+                    "scale": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnflcm_instances:id:scale:",
+                    },
+                    "terminate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnflcm_instances:id:terminate:",
+                    },
+                    "instantiate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnflcm_instances:id:instantiate:",
+                    },
+                },
+            },
+            "vnf_lcm_op_occs": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "vnf_instances:opps:",
+                "<ID>": {
+                    "METHODS": ("GET",),
+                    "ROLE_PERMISSION": "vnf_instances:opps:id:",
+                },
+            },
+            "subscriptions": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vnflcm_subscriptions:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "vnflcm_subscriptions:id:",
+                },
+            },
         }
     },
     "nst": {
@@ -620,12 +635,14 @@ valid_url_methods = {
     },
     "nsfm": {
         "v1": {
-            "alarms": {"METHODS": ("GET", "PATCH"),
-                       "ROLE_PERMISSION": "alarms:",
-                       "<ID>": {"METHODS": ("GET", "PATCH"),
-                                "ROLE_PERMISSION": "alarms:id:",
-                                },
-                       }
+            "alarms": {
+                "METHODS": ("GET", "PATCH"),
+                "ROLE_PERMISSION": "alarms:",
+                "<ID>": {
+                    "METHODS": ("GET", "PATCH"),
+                    "ROLE_PERMISSION": "alarms:id:",
+                },
+            }
         },
     },
 }
@@ -648,6 +665,7 @@ class Server(object):
         self.engine = Engine(self.authenticator)
 
     def _format_in(self, kwargs):
+        error_text = ""  # error_text must be initialized outside try
         try:
             indata = None
             if cherrypy.request.body.length:
@@ -660,9 +678,7 @@ class Server(object):
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif "application/yaml" in cherrypy.request.headers["Content-Type"]:
                         error_text = "Invalid yaml format "
-                        indata = yaml.load(
-                            cherrypy.request.body, Loader=yaml.SafeLoader
-                        )
+                        indata = yaml.safe_load(cherrypy.request.body)
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif (
                         "application/binary" in cherrypy.request.headers["Content-Type"]
@@ -692,13 +708,11 @@ class Server(object):
                         #                          "Only 'Content-Type' of type 'application/json' or
                         # 'application/yaml' for input format are available")
                         error_text = "Invalid yaml format "
-                        indata = yaml.load(
-                            cherrypy.request.body, Loader=yaml.SafeLoader
-                        )
+                        indata = yaml.safe_load(cherrypy.request.body)
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                 else:
                     error_text = "Invalid yaml format "
-                    indata = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                    indata = yaml.safe_load(cherrypy.request.body)
                     cherrypy.request.headers.pop("Content-File-MD5", None)
             if not indata:
                 indata = {}
@@ -713,7 +727,7 @@ class Server(object):
                         kwargs[k] = None
                     elif format_yaml:
                         try:
-                            kwargs[k] = yaml.load(v, Loader=yaml.SafeLoader)
+                            kwargs[k] = yaml.safe_load(v)
                         except Exception:
                             pass
                     elif (
@@ -737,7 +751,7 @@ class Server(object):
                             v[index] = None
                         elif format_yaml:
                             try:
-                                v[index] = yaml.load(v[index], Loader=yaml.SafeLoader)
+                                v[index] = yaml.safe_load(v[index])
                             except Exception:
                                 pass
 
@@ -884,55 +898,88 @@ class Server(object):
 
     # NS Fault Management
     @cherrypy.expose
-    def nsfm(self, version=None, topic=None, uuid=None, project_name=None, ns_id=None, *args, **kwargs):
-        if topic == 'alarms':
+    def nsfm(
+        self,
+        version=None,
+        topic=None,
+        uuid=None,
+        project_name=None,
+        ns_id=None,
+        *args,
+        **kwargs
+    ):
+        if topic == "alarms":
             try:
                 method = cherrypy.request.method
-                role_permission = self._check_valid_url_method(method, "nsfm", version, topic, None, None, *args)
-                query_string_operations = self._extract_query_string_operations(kwargs, method)
+                role_permission = self._check_valid_url_method(
+                    method, "nsfm", version, topic, None, None, *args
+                )
+                query_string_operations = self._extract_query_string_operations(
+                    kwargs, method
+                )
 
-                self.authenticator.authorize(role_permission, query_string_operations, None)
+                self.authenticator.authorize(
+                    role_permission, query_string_operations, None
+                )
 
                 # to handle get request
-                if cherrypy.request.method == 'GET':
+                if cherrypy.request.method == "GET":
                     # if request is on basis of uuid
-                    if uuid and uuid != 'None':
+                    if uuid and uuid != "None":
                         try:
                             alarm = self.engine.db.get_one("alarms", {"uuid": uuid})
-                            alarm_action = self.engine.db.get_one("alarms_action", {"uuid": uuid})
+                            alarm_action = self.engine.db.get_one(
+                                "alarms_action", {"uuid": uuid}
+                            )
                             alarm.update(alarm_action)
-                            vnf = self.engine.db.get_one("vnfrs", {"nsr-id-ref": alarm["tags"]["ns_id"]})
+                            vnf = self.engine.db.get_one(
+                                "vnfrs", {"nsr-id-ref": alarm["tags"]["ns_id"]}
+                            )
                             alarm["vnf-id"] = vnf["_id"]
                             return self._format_out(str(alarm))
                         except Exception:
                             return self._format_out("Please provide valid alarm uuid")
-                    elif ns_id and ns_id != 'None':
+                    elif ns_id and ns_id != "None":
                         # if request is on basis of ns_id
                         try:
-                            alarms = self.engine.db.get_list("alarms", {"tags.ns_id": ns_id})
+                            alarms = self.engine.db.get_list(
+                                "alarms", {"tags.ns_id": ns_id}
+                            )
                             for alarm in alarms:
-                                alarm_action = self.engine.db.get_one("alarms_action", {"uuid": alarm['uuid']})
+                                alarm_action = self.engine.db.get_one(
+                                    "alarms_action", {"uuid": alarm["uuid"]}
+                                )
                                 alarm.update(alarm_action)
                             return self._format_out(str(alarms))
                         except Exception:
                             return self._format_out("Please provide valid ns id")
                     else:
                         # to return only alarm which are related to given project
-                        project = self.engine.db.get_one("projects", {"name": project_name})
-                        project_id = project.get('_id')
-                        ns_list = self.engine.db.get_list("nsrs", {"_admin.projects_read": project_id})
+                        project = self.engine.db.get_one(
+                            "projects", {"name": project_name}
+                        )
+                        project_id = project.get("_id")
+                        ns_list = self.engine.db.get_list(
+                            "nsrs", {"_admin.projects_read": project_id}
+                        )
                         ns_ids = []
                         for ns in ns_list:
                             ns_ids.append(ns.get("_id"))
                         alarms = self.engine.db.get_list("alarms")
-                        alarm_list = [alarm for alarm in alarms if alarm["tags"]["ns_id"] in ns_ids]
+                        alarm_list = [
+                            alarm
+                            for alarm in alarms
+                            if alarm["tags"]["ns_id"] in ns_ids
+                        ]
                         for alrm in alarm_list:
-                            action = self.engine.db.get_one("alarms_action", {"uuid": alrm.get("uuid")})
+                            action = self.engine.db.get_one(
+                                "alarms_action", {"uuid": alrm.get("uuid")}
+                            )
                             alrm.update(action)
                         return self._format_out(str(alarm_list))
                 # to handle patch request for alarm update
-                elif cherrypy.request.method == 'PATCH':
-                    data = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                elif cherrypy.request.method == "PATCH":
+                    data = yaml.safe_load(cherrypy.request.body)
                     try:
                         # check if uuid is valid
                         self.engine.db.get_one("alarms", {"uuid": data.get("uuid")})
@@ -940,24 +987,42 @@ class Server(object):
                         return self._format_out("Please provide valid alarm uuid.")
                     if data.get("is_enable") is not None:
                         if data.get("is_enable"):
-                            alarm_status = 'ok'
+                            alarm_status = "ok"
                         else:
-                            alarm_status = 'disabled'
-                        self.engine.db.set_one("alarms", {"uuid": data.get("uuid")},
-                                               {"alarm_status": alarm_status})
+                            alarm_status = "disabled"
+                        self.engine.db.set_one(
+                            "alarms",
+                            {"uuid": data.get("uuid")},
+                            {"alarm_status": alarm_status},
+                        )
                     else:
-                        self.engine.db.set_one("alarms", {"uuid": data.get("uuid")},
-                                               {"threshold": data.get("threshold")})
+                        self.engine.db.set_one(
+                            "alarms",
+                            {"uuid": data.get("uuid")},
+                            {"threshold": data.get("threshold")},
+                        )
                     return self._format_out("Alarm updated")
             except Exception as e:
-                cherrypy.response.status = e.http_code.value
-                if isinstance(e, (NbiException, EngineException, DbException, FsException, MsgException, AuthException,
-                              ValidationError, AuthconnException)):
+                if isinstance(
+                    e,
+                    (
+                        NbiException,
+                        EngineException,
+                        DbException,
+                        FsException,
+                        MsgException,
+                        AuthException,
+                        ValidationError,
+                        AuthconnException,
+                    ),
+                ):
                     http_code_value = cherrypy.response.status = e.http_code.value
                     http_code_name = e.http_code.name
                     cherrypy.log("Exception {}".format(e))
                 else:
-                    http_code_value = cherrypy.response.status = HTTPStatus.BAD_REQUEST.value  # INTERNAL_SERVER_ERROR
+                    http_code_value = (
+                        cherrypy.response.status
+                    ) = HTTPStatus.BAD_REQUEST.value  # INTERNAL_SERVER_ERROR
                     cherrypy.log("CRITICAL: Exception {}".format(e), traceback=True)
                     http_code_name = HTTPStatus.BAD_REQUEST.name
                 problem_details = {
@@ -998,18 +1063,30 @@ class Server(object):
             outdata = token_info = self.authenticator.new_token(
                 token_info, indata, cherrypy.request.remote
             )
-            cherrypy.session["Authorization"] = outdata["_id"]
+            cherrypy.session["Authorization"] = outdata["_id"]  # pylint: disable=E1101
             self._set_location_header("admin", "v1", "tokens", outdata["_id"])
             # for logging
             self._format_login(token_info)
             # password expiry check
             if self.authenticator.check_password_expiry(outdata):
-                outdata = {"id": outdata["id"],
-                           "message": "change_password",
-                           "user_id": outdata["user_id"]
-                           }
+                outdata = {
+                    "id": outdata["id"],
+                    "message": "change_password",
+                    "user_id": outdata["user_id"],
+                }
             # cherrypy.response.cookie["Authorization"] = outdata["id"]
             # cherrypy.response.cookie["Authorization"]['expires'] = 3600
+            cef_event(
+                cef_logger,
+                {
+                    "name": "User Login",
+                    "sourceUserName": token_info.get("username"),
+                    "message": "User Logged In, Project={} Outcome=Success".format(
+                        token_info.get("project_name")
+                    ),
+                },
+            )
+            cherrypy.log("{}".format(cef_logger))
         elif method == "DELETE":
             if not token_id and "id" in kwargs:
                 token_id = kwargs["id"]
@@ -1018,9 +1095,27 @@ class Server(object):
                 # for logging
                 self._format_login(token_info)
                 token_id = token_info["_id"]
+            if current_backend != "keystone":
+                token_details = self.engine.db.get_one("tokens", {"_id": token_id})
+                current_user = token_details.get("username")
+                current_project = token_details.get("project_name")
+            else:
+                current_user = "keystone backend"
+                current_project = "keystone backend"
             outdata = self.authenticator.del_token(token_id)
             token_info = None
-            cherrypy.session["Authorization"] = "logout"
+            cherrypy.session["Authorization"] = "logout"  # pylint: disable=E1101
+            cef_event(
+                cef_logger,
+                {
+                    "name": "User Logout",
+                    "sourceUserName": current_user,
+                    "message": "User Logged Out, Project={} Outcome=Success".format(
+                        current_project
+                    ),
+                },
+            )
+            cherrypy.log("{}".format(cef_logger))
             # cherrypy.response.cookie["Authorization"] = token_id
             # cherrypy.response.cookie["Authorization"]['expires'] = 0
         else:
@@ -1048,7 +1143,8 @@ class Server(object):
         elif args and args[0] == "init":
             try:
                 # self.engine.load_dbase(cherrypy.request.app.config)
-                self.engine.create_admin()
+                pid = self.authenticator.create_admin_project()
+                self.authenticator.create_admin_user(pid)
                 return "Done. User 'admin', password 'admin' created"
             except Exception:
                 cherrypy.response.status = HTTPStatus.FORBIDDEN.value
@@ -1106,13 +1202,13 @@ class Server(object):
             return_text = "<html><pre>{} ->\n".format(main_topic)
             try:
                 if cherrypy.request.method == "POST":
-                    to_send = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                    to_send = yaml.safe_load(cherrypy.request.body)
                     for k, v in to_send.items():
                         self.engine.msg.write(main_topic, k, v)
                         return_text += "  {}: {}\n".format(k, v)
                 elif cherrypy.request.method == "GET":
                     for k, v in kwargs.items():
-                        v_dict = yaml.load(v, Loader=yaml.SafeLoader)
+                        v_dict = yaml.safe_load(v)
                         self.engine.msg.write(main_topic, k, v_dict)
                         return_text += "  {}: {}\n".format(k, v_dict)
             except Exception as e:
@@ -1126,10 +1222,12 @@ class Server(object):
             + "  headers: {}\n".format(cherrypy.request.headers)
             + "  path_info: {}\n".format(cherrypy.request.path_info)
             + "  query_string: {}\n".format(cherrypy.request.query_string)
-            + "  session: {}\n".format(cherrypy.session)
+            + "  session: {}\n".format(cherrypy.session)  # pylint: disable=E1101
             + "  cookie: {}\n".format(cherrypy.request.cookie)
             + "  method: {}\n".format(cherrypy.request.method)
-            + "  session: {}\n".format(cherrypy.session.get("fieldname"))
+            + "  session: {}\n".format(
+                cherrypy.session.get("fieldname")  # pylint: disable=E1101
+            )
             + "  body:\n"
         )
         return_text += "    length: {}\n".format(cherrypy.request.body.length)
@@ -1321,12 +1419,20 @@ class Server(object):
         **kwargs
     ):
         token_info = None
-        outdata = None
+        outdata = {}
         _format = None
         method = "DONE"
         engine_topic = None
         rollback = []
         engine_session = None
+        url_id = ""
+        log_mapping = {
+            "POST": "Creating",
+            "GET": "Fetching",
+            "DELETE": "Deleting",
+            "PUT": "Updating",
+            "PATCH": "Updating",
+        }
         try:
             if not main_topic or not version or not topic:
                 raise NbiException(
@@ -1353,6 +1459,8 @@ class Server(object):
                     "URL version '{}' not supported".format(version),
                     HTTPStatus.METHOD_NOT_ALLOWED,
                 )
+            if _id is not None:
+                url_id = _id
 
             if (
                 kwargs
@@ -1453,7 +1561,9 @@ class Server(object):
                     filter_q = None
                     if "vcaStatusRefresh" in kwargs:
                         filter_q = {"vcaStatusRefresh": kwargs["vcaStatusRefresh"]}
-                    outdata = self.engine.get_item(engine_session, engine_topic, _id, filter_q, True)
+                    outdata = self.engine.get_item(
+                        engine_session, engine_topic, _id, filter_q, True
+                    )
 
             elif method == "POST":
                 cherrypy.response.status = HTTPStatus.CREATED.value
@@ -1560,10 +1670,21 @@ class Server(object):
                 elif topic == "vnf_instances" and item:
                     indata["lcmOperationType"] = item
                     indata["vnfInstanceId"] = _id
-                    _id, _ = self.engine.new_item(rollback, engine_session, "vnflcmops", indata, kwargs)
-                    self._set_location_header(main_topic, version, "vnf_lcm_op_occs", _id)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, "vnflcmops", indata, kwargs
+                    )
+                    self._set_location_header(
+                        main_topic, version, "vnf_lcm_op_occs", _id
+                    )
                     outdata = {"id": _id}
                     cherrypy.response.status = HTTPStatus.ACCEPTED.value
+                elif topic == "ns_lcm_op_occs" and item == "cancel":
+                    indata["nsLcmOpOccId"] = _id
+                    self.engine.cancel_item(
+                        rollback, engine_session, "nslcmops", indata, None
+                    )
+                    self._set_location_header(main_topic, version, topic, _id)
+                    cherrypy.response.status = HTTPStatus.ACCEPTED.value
                 else:
                     _id, op_id = self.engine.new_item(
                         rollback,
@@ -1675,6 +1796,36 @@ class Server(object):
             ):
                 self.authenticator.remove_token_from_cache()
 
+            if item is not None:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username"),
+                        "message": "Performing {} operation on {} {}, Project={} Outcome=Success".format(
+                            item,
+                            topic,
+                            url_id,
+                            token_info.get("project_name"),
+                        ),
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
+            else:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username"),
+                        "message": "{} {} {}, Project={} Outcome=Success".format(
+                            log_mapping[method],
+                            topic,
+                            url_id,
+                            token_info.get("project_name"),
+                        ),
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
             return self._format_out(outdata, token_info, _format)
         except Exception as e:
             if isinstance(
@@ -1716,7 +1867,6 @@ class Server(object):
                         self.engine.db.del_list(
                             rollback_item["topic"],
                             rollback_item["filter"],
-                            fail_on_empty=False,
                         )
                     else:
                         self.engine.db.del_one(
@@ -1738,6 +1888,38 @@ class Server(object):
                 "status": http_code_value,
                 "detail": error_text,
             }
+            if item is not None and token_info is not None:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username", None),
+                        "message": "Performing {} operation on {} {}, Project={} Outcome=Failure".format(
+                            item,
+                            topic,
+                            url_id,
+                            token_info.get("project_name", None),
+                        ),
+                        "severity": "2",
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
+            elif token_info is not None:
+                cef_event(
+                    cef_logger,
+                    {
+                        "name": "User Operation",
+                        "sourceUserName": token_info.get("username", None),
+                        "message": "{} {} {}, Project={} Outcome=Failure".format(
+                            item,
+                            topic,
+                            url_id,
+                            token_info.get("project_name", None),
+                        ),
+                        "severity": "2",
+                    },
+                )
+                cherrypy.log("{}".format(cef_logger))
             return self._format_out(problem_details, token_info)
             # raise cherrypy.HTTPError(e.http_code.value, str(e))
         finally:
@@ -1760,12 +1942,17 @@ def _start_service():
     """
     global nbi_server
     global subscription_thread
+    global cef_logger
+    global current_backend
     cherrypy.log.error("Starting osm_nbi")
     # update general cherrypy configuration
     update_dict = {}
 
     engine_config = cherrypy.tree.apps["/osm"].config
     for k, v in environ.items():
+        if k == "OSMNBI_USER_MANAGEMENT":
+            feature_state = eval(v.title())
+            engine_config["authentication"]["user_management"] = feature_state
         if not k.startswith("OSMNBI_"):
             continue
         k1, _, k2 = k[7:].lower().partition("_")
@@ -1792,7 +1979,9 @@ def _start_service():
         except ValueError as e:
             cherrypy.log.error("Ignoring environ '{}': " + str(e))
         except Exception as e:
-            cherrypy.log.warn("skipping environ '{}' on exception '{}'".format(k, e))
+            cherrypy.log(
+                "WARNING: skipping environ '{}' on exception '{}'".format(k, e)
+            )
 
     if update_dict:
         cherrypy.config.update(update_dict)
@@ -1859,6 +2048,8 @@ def _start_service():
         target_version=auth_database_version
     )
 
+    cef_logger = cef_event_builder(engine_config["authentication"])
+
     # start subscriptions thread:
     subscription_thread = SubscriptionThread(
         config=engine_config, engine=nbi_server.engine
@@ -1867,6 +2058,7 @@ def _start_service():
     # Do not capture except SubscriptionException
 
     backend = engine_config["authentication"]["backend"]
+    current_backend = backend
     cherrypy.log.error(
         "Starting OSM NBI Version '{} {}' with '{}' authentication backend".format(
             nbi_version, nbi_version_date, backend
index 47a24ba..22413d0 100644 (file)
@@ -41,7 +41,6 @@ class NotificationException(Exception):
 
 
 class NotificationBase:
-
     response_models = None
     # Common HTTP payload header for all notifications.
     payload_header = {"Content-Type": "application/json", "Accept": "application/json"}
@@ -109,12 +108,12 @@ class NotificationBase:
         return payload
 
     async def send_notifications(
-        self, subscribers: list, loop: asyncio.AbstractEventLoop = None
+        self,
+        subscribers: list,
     ):
         """
         Generate tasks for all notification for an event.
         :param subscribers: A list of subscribers who want to be notified for event.
-        :param loop: Event loop object.
         """
         notifications = []
         for subscriber in subscribers:
@@ -155,21 +154,19 @@ class NotificationBase:
 
         if notifications:
             tasks = []
-            async with aiohttp.ClientSession(loop=loop) as session:
+            async with aiohttp.ClientSession() as session:
                 for notification in notifications:
                     tasks.append(
                         asyncio.ensure_future(
-                            self.send_notification(session, notification, loop=loop),
-                            loop=loop,
+                            self.send_notification(session, notification),
                         )
                     )
-                await asyncio.gather(*tasks, loop=loop)
+                await asyncio.gather(*tasks)
 
     async def send_notification(
         self,
         session: aiohttp.ClientSession,
         notification: dict,
-        loop: asyncio.AbstractEventLoop = None,
         retry_count: int = 5,
         timeout: float = 5.0,
     ):
@@ -178,7 +175,6 @@ class NotificationBase:
         after maximum number of reties, then notification is dropped.
         :param session: An aiohttp client session object to maintain http session.
         :param notification: A dictionary containing all necessary data to make POST request.
-        :param loop: Event loop object.
         :param retry_count: An integer specifying the maximum number of reties for a notification.
         :param timeout: A float representing client timeout of each HTTP request.
         """
@@ -227,7 +223,7 @@ class NotificationBase:
                         notification["payload"]["subscriptionId"], backoff_delay
                     )
                 )
-                await asyncio.sleep(backoff_delay, loop=loop)
+                await asyncio.sleep(backoff_delay)
         # Dropping notification
         self.logger.debug(
             "Notification {} sent failed to subscriber:{}.".format(
@@ -239,7 +235,14 @@ class NotificationBase:
 
 
 class NsLcmNotification(NotificationBase):
-
+    # maps kafka commands of completed operations to the original operation type
+    completed_operation_map = {
+        "INSTANTIATED": "INSTANTIATE",
+        "SCALED": "SCALE",
+        "TERMINATED": "TERMINATE",
+        "UPDATED": "UPDATE",
+        "HEALED": "HEAL",
+    }
     # SOL005 response model for nslcm notifications
     response_models = {
         "NsLcmOperationOccurrenceNotification": {
@@ -352,18 +355,19 @@ class NsLcmNotification(NotificationBase):
             "NsLcmOperationOccurrenceNotification",
             "NsChangeNotification",
             "NsIdentifierCreationNotification",
-            "NsIdentifierDeletionNotification"
+            "NsIdentifierDeletionNotification",
         ]
         filter_q = {
             "identifier": [nsd_id, ns_instance_id],
             "operationStates": ["ANY"],
             "operationTypes": ["ANY"],
-            "notificationType": notification_type
-            }
+            "notificationType": notification_type,
+        }
         if op_state:
             filter_q["operationStates"].append(op_state)
         if command:
-            filter_q["operationTypes"].append(command)
+            op_type = self.completed_operation_map.get(command, command)
+            filter_q["operationTypes"].append(op_type)
         # self.logger.debug("Db query is: {}".format(filter_q))
         subscribers = []
         try:
@@ -398,7 +402,7 @@ class VnfLcmNotification(NotificationBase):
             "changedExtConnectivity",
             "modificationsTriggeredByVnfPkgChange",
             "error",
-            "_links"
+            "_links",
         },
         "VnfIdentifierCreationNotification": {
             "id",
@@ -406,7 +410,7 @@ class VnfLcmNotification(NotificationBase):
             "subscriptionId",
             "timeStamp",
             "vnfInstanceId",
-            "_links"
+            "_links",
         },
         "VnfIdentifierDeletionNotification": {
             "id",
@@ -414,7 +418,7 @@ class VnfLcmNotification(NotificationBase):
             "subscriptionId",
             "timeStamp",
             "vnfInstanceId",
-            "_links"
+            "_links",
         },
     }
 
@@ -434,7 +438,9 @@ class VnfLcmNotification(NotificationBase):
         """
         return self.response_models
 
-    def _format_vnflcm_subscribers(self, subscribers: list, event_details: dict) -> list:
+    def _format_vnflcm_subscribers(
+        self, subscribers: list, event_details: dict
+    ) -> list:
         """
         Formats the raw event details from kafka message and subscriber details.
         :param subscribers: A list of subscribers whom the event needs to be notified.
@@ -454,8 +460,14 @@ class VnfLcmNotification(NotificationBase):
             subscriber.update(event_details["params"])
         return subscribers
 
-    def get_subscribers(self, vnfd_id: str, vnf_instance_id: str, command: str, op_state: str,
-                        event_details: dict) -> list:
+    def get_subscribers(
+        self,
+        vnfd_id: str,
+        vnf_instance_id: str,
+        command: str,
+        op_state: str,
+        event_details: dict,
+    ) -> list:
         """
         Queries database and returns list of subscribers.
         :param vnfd_id: Vnfd id of a VNF whose lifecycle has changed. (instantiated, scaled, terminated. etc)
@@ -468,13 +480,13 @@ class VnfLcmNotification(NotificationBase):
         notification_type = [
             "VnfIdentifierCreationNotification",
             "VnfLcmOperationOccurrenceNotification",
-            "VnfIdentifierDeletionNotification"
+            "VnfIdentifierDeletionNotification",
         ]
         filter_q = {
             "identifier": [vnfd_id, vnf_instance_id],
             "operationStates": ["ANY"],
             "operationTypes": ["ANY"],
-            "notificationType": notification_type
+            "notificationType": notification_type,
         }
         if op_state:
             filter_q["operationStates"].append(op_state)
index f7ca5f4..bbf119f 100644 (file)
@@ -21,7 +21,6 @@ import logging
 
 
 class BaseMethod:
-
     def __init__(self):
         """
         Constructor of the base method
index 947f0b7..b34d203 100644 (file)
@@ -21,7 +21,6 @@ from .base_methods import BaseMethod
 
 
 class VnfLcmOp2NsLcmOp:
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor of Vnf lcm op to Ns lcm op
@@ -66,7 +65,6 @@ class VnfLcmOp2NsLcmOp:
 
 
 class NewVnfLcmOp(BaseMethod):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor of new Vnf Lcm Op
@@ -85,7 +83,7 @@ class NewVnfLcmOp(BaseMethod):
         :return: id of nsd id
         """
         nsr = self.nsrtopic.show(session, vnf_instance_id)
-        return nsr['nsd']['_id']
+        return nsr["nsd"]["_id"]
 
     def __get_formatted_indata(self, session, indata):
         """
@@ -103,12 +101,12 @@ class NewVnfLcmOp(BaseMethod):
                 "vimAccountId": indata["vimAccountId"],
                 "nsr_id": indata["vnfInstanceId"],
                 "lcmOperationType": indata["lcmOperationType"],
-                "nsInstanceId": indata["vnfInstanceId"]
+                "nsInstanceId": indata["vnfInstanceId"],
             }
         elif indata["lcmOperationType"] == "terminate":
             formatted_indata = {
                 "lcmOperationType": indata["lcmOperationType"],
-                "nsInstanceId": indata["vnfInstanceId"]
+                "nsInstanceId": indata["vnfInstanceId"],
             }
         elif indata["lcmOperationType"] == "scale":
             formatted_indata = {
@@ -119,9 +117,11 @@ class NewVnfLcmOp(BaseMethod):
                     "scaleVnfType": indata["type"],
                     "scaleByStepData": {
                         "scaling-group-descriptor": indata["aspectId"],
-                        "member-vnf-index": indata["additionalParams"]["member-vnf-index"]
-                    }
-                }
+                        "member-vnf-index": indata["additionalParams"][
+                            "member-vnf-index"
+                        ],
+                    },
+                },
             }
         elif indata["lcmOperationType"] == "action":
             formatted_indata = {
@@ -129,7 +129,7 @@ class NewVnfLcmOp(BaseMethod):
                 "nsInstanceId": indata["vnfInstanceId"],
                 "member_vnf_index": indata["member_vnf_index"],
                 "primitive": indata["primitive"],
-                "primitive_params": indata["primitive_params"]
+                "primitive_params": indata["primitive_params"],
             }
         return formatted_indata
 
@@ -147,14 +147,17 @@ class NewVnfLcmOp(BaseMethod):
         nslcmop_rec = self.nslcmoptopic.show(session, op_id)
         operation_status = nslcmop_rec["operationState"]
         vnfr = self.vnfrtopic.show(session, vnfInstanceId)
-        links = {"self": "/osm/vnflcm/v1/vnf_lcm_op_occs/" + op_id,
-                 "vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + vnfInstanceId}
-        params = {"vnfdId": vnfr["vnfd-ref"],
-                  "vnfInstanceId": vnfInstanceId,
-                  "operationState": operation_status,
-                  "vnfLcmOpOccId": op_id,
-                  "_links": links
-                  }
+        links = {
+            "self": "/osm/vnflcm/v1/vnf_lcm_op_occs/" + op_id,
+            "vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + vnfInstanceId,
+        }
+        params = {
+            "vnfdId": vnfr["vnfd-ref"],
+            "vnfInstanceId": vnfInstanceId,
+            "operationState": operation_status,
+            "vnfLcmOpOccId": op_id,
+            "_links": links,
+        }
         self.msg.write("vnf", operation, params)
         return None
 
@@ -179,7 +182,6 @@ class NewVnfLcmOp(BaseMethod):
 
 
 class ListVnfLcmOp(BaseMethod):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for listing vnf lcm operations
@@ -201,14 +203,13 @@ class ListVnfLcmOp(BaseMethod):
         for record in records:
             ns_id = record.get("nsInstanceId")
             nsr = self.nsrtopic.show(session, ns_id)
-            vnfInstance_id = nsr['constituent-vnfr-ref'][0]
+            vnfInstance_id = nsr["constituent-vnfr-ref"][0]
             outdata = sol003_projection(record, vnfInstance_id)
             list.append(outdata)
         return list
 
 
 class ShowVnfLcmOp(BaseMethod):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for showing vnf lcm operation
@@ -228,7 +229,7 @@ class ShowVnfLcmOp(BaseMethod):
         record = self.nslcmoptopic.show(session, _id, api_req)
         ns_id = record.get("nsInstanceId")
         nsr = self.nsrtopic.show(session, ns_id)
-        vnfinstance_id = nsr['constituent-vnfr-ref'][0]
+        vnfinstance_id = nsr["constituent-vnfr-ref"][0]
         outdata = sol003_projection(record, vnfinstance_id)
         return outdata
 
index a6a57fc..c9d05ac 100644 (file)
@@ -24,7 +24,6 @@ from osm_nbi.instance_topics import NsrTopic, VnfrTopic
 
 
 class VnfInstances2NsInstances:
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor of Vnf Instances to Ns Instances
@@ -81,7 +80,6 @@ class VnfInstances2NsInstances:
 
 
 class NewVnfInstance(BaseMethod):
-
     # sample ns descriptor
     sample_nsd = {
         "nsd": {
@@ -96,13 +94,15 @@ class NewVnfInstance(BaseMethod):
                                     "virtual-link-connectivity": [
                                         {
                                             "constituent-cpd-id": [
-                                                {"constituent-base-element-id": 1,
-                                                 "constituent-cpd-id": "eth0-ext"}
+                                                {
+                                                    "constituent-base-element-id": 1,
+                                                    "constituent-cpd-id": "eth0-ext",
+                                                }
                                             ],
                                             "virtual-link-profile-id": "mgmtnet",
                                         }
                                     ],
-                                    "vnfd-id": "cirros_vnfd"
+                                    "vnfd-id": "cirros_vnfd",
                                 }
                             ],
                         }
@@ -130,7 +130,12 @@ class NewVnfInstance(BaseMethod):
         formatted_indata = deepcopy(indata)
         formatted_indata["nsdId"] = nsd_id
         formatted_indata["nsName"] = indata["vnfInstanceName"] + "-ns"
-        for invalid_key in ("vnfdId", "vnfInstanceName", "vnfInstanceDescription", "additionalParams"):
+        for invalid_key in (
+            "vnfdId",
+            "vnfInstanceName",
+            "vnfInstanceDescription",
+            "additionalParams",
+        ):
             formatted_indata.pop(invalid_key)
         return formatted_indata
 
@@ -164,34 +169,45 @@ class NewVnfInstance(BaseMethod):
         _id, *others = self.nsdtopic.new(rollback, session, {}, None, headers)
         new_nsd = deepcopy(NewVnfInstance.sample_nsd)
         vnf_content = {
-              "id":"default-df",
-              "vnf-profile": [
+            "id": "default-df",
+            "vnf-profile": [
                 {
-                  "id": "1",
-                  "virtual-link-connectivity": [
-                    {
-                      "constituent-cpd-id": [
+                    "id": "1",
+                    "virtual-link-connectivity": [
                         {
-                          "constituent-base-element-id": "1",
-                          "constituent-cpd-id": indata["additionalParams"]["constituent-cpd-id"]
+                            "constituent-cpd-id": [
+                                {
+                                    "constituent-base-element-id": "1",
+                                    "constituent-cpd-id": indata["additionalParams"][
+                                        "constituent-cpd-id"
+                                    ],
+                                }
+                            ],
+                            "virtual-link-profile-id": indata["additionalParams"][
+                                "virtual-link-profile-id"
+                            ],
                         }
-                      ],
-                      "virtual-link-profile-id": indata["additionalParams"]["virtual-link-profile-id"]
-                    }
-                  ],
-                  "vnfd-id": indata["vnfdId"]
+                    ],
+                    "vnfd-id": indata["vnfdId"],
                 }
-              ]
+            ],
         }
+        vnf_profile = vnf_content["vnf-profile"][0]
+        virtual_link_connectivity = vnf_profile["virtual-link-connectivity"][0]
+        constituent_cpd_id = virtual_link_connectivity["constituent-cpd-id"][0]
+        if "ip-address" in indata["additionalParams"]:
+            constituent_cpd_id["ip-address"] = indata["additionalParams"]["ip-address"]
         new_nsd["nsd"]["nsd"][0] = {
             "description": indata["vnfInstanceDescription"],
             "designer": "OSM",
             "id": indata["vnfdId"] + "-ns",
             "name": indata["vnfdId"] + "-ns",
             "version": "1.0",
-            "df": [vnf_content, ],
+            "df": [
+                vnf_content,
+            ],
             "virtual-link-desc": indata["additionalParams"]["virtual-link-desc"],
-            "vnfd-id": [indata["vnfdId"]]
+            "vnfd-id": [indata["vnfdId"]],
         }
         return _id, new_nsd
 
@@ -207,7 +223,9 @@ class NewVnfInstance(BaseMethod):
         """
         return self.nsrtopic.new(rollback, session, indata, kwargs, headers)
 
-    def __action_pre_processing(self, rollback, session, indata=None, kwargs=None, headers=None):
+    def __action_pre_processing(
+        self, rollback, session, indata=None, kwargs=None, headers=None
+    ):
         """
         Pre process for creating new vnf instance
         :param rollback: list to append the created items at database in case a rollback must be done
@@ -221,9 +239,11 @@ class NewVnfInstance(BaseMethod):
         nsd_id, nsd = self.__create_nsd(rollback, session, indata, kwargs, headers)
         self.nsdtopic.upload_content(session, nsd_id, nsd, kwargs, headers)
         formatted_indata = NewVnfInstance.__get_formatted_indata(indata, nsd_id)
-        nsr_id, _ = self.__create_nsr(rollback, session, formatted_indata, kwargs, headers)
+        nsr_id, _ = self.__create_nsr(
+            rollback, session, formatted_indata, kwargs, headers
+        )
         nsr = self.nsrtopic.show(session, nsr_id)
-        vnfr_id =  nsr['constituent-vnfr-ref'][0]
+        vnfr_id = nsr["constituent-vnfr-ref"][0]
         if vnfr_id:
             links = {"vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + vnfr_id}
             indata["vnfInstanceId"] = vnfr_id
@@ -245,7 +265,6 @@ class NewVnfInstance(BaseMethod):
 
 
 class ListVnfInstance(BaseMethod):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for listing vnfs
@@ -265,7 +284,6 @@ class ListVnfInstance(BaseMethod):
 
 
 class ShowVnfInstance(BaseMethod):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for showing vnf lcm operation
@@ -285,7 +303,6 @@ class ShowVnfInstance(BaseMethod):
 
 
 class DeleteVnfInstance(BaseMethod):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for deleting vnf
@@ -309,11 +326,9 @@ class DeleteVnfInstance(BaseMethod):
         vnfr = self.vnfrtopic.show(session, vnfInstanceId)
         ns_id = vnfr.get("nsr-id-ref")
         nsr = self.nsrtopic.show(session, ns_id)
-        nsd_to_del = nsr['nsd']['_id']
+        nsd_to_del = nsr["nsd"]["_id"]
         links = {"vnfInstance": "/osm/vnflcm/v1/vnf_instances/" + _id}
-        params = {"vnfdId": vnfr["vnfd-ref"],
-                  "vnfInstanceId": _id,
-                  "_links": links}
+        params = {"vnfdId": vnfr["vnfd-ref"], "vnfInstanceId": _id, "_links": links}
         self.msg.write("vnf", "delete", params)
         self.nsrtopic.delete(session, ns_id, dry_run, not_send_msg)
         return self.nsdtopic.delete(session, nsd_to_del, dry_run, not_send_msg)
index 5371a44..c04fbde 100644 (file)
@@ -18,8 +18,10 @@ __author__ = "Selvi Jayaraman <selvi.j@tataelxsi.co.in>"
 from osm_nbi.subscription_topics import CommonSubscriptions
 from osm_nbi.validation import vnf_subscription
 
+
 class VnflcmSubscriptionsTopic(CommonSubscriptions):
     schema_new = vnf_subscription
+
     def _subscription_mapper(self, _id, data, table):
         """
         Performs data transformation on subscription request
@@ -30,7 +32,7 @@ class VnflcmSubscriptionsTopic(CommonSubscriptions):
         formatted_data = []
         formed_data = {
             "reference": data.get("_id"),
-            "CallbackUri": data.get("CallbackUri")
+            "CallbackUri": data.get("CallbackUri"),
         }
         if data.get("authentication"):
             formed_data.update({"authentication": data.get("authentication")})
@@ -53,11 +55,15 @@ class VnflcmSubscriptionsTopic(CommonSubscriptions):
                         formatted_data.append(update_dict)
                     elif elem == "VnfLcmOperationOccurrenceNotification":
                         if "operationTypes" in data["filter"].keys():
-                            update_dict["operationTypes"] = data["filter"]["operationTypes"]
+                            update_dict["operationTypes"] = data["filter"][
+                                "operationTypes"
+                            ]
                         else:
                             update_dict["operationTypes"] = "ANY"
                         if "operationStates" in data["filter"].keys():
-                            update_dict["operationStates"] = data["filter"]["operationStates"]
+                            update_dict["operationStates"] = data["filter"][
+                                "operationStates"
+                            ]
                         else:
                             update_dict["operationStates"] = "ANY"
                         formatted_data.append(update_dict)
index 92c7417..0389483 100644 (file)
@@ -25,6 +25,14 @@ class CommonSubscriptions(BaseTopic):
     topic = "subscriptions"
     topic_msg = None
 
+    def _subscription_mapper(self, _id, data, table):
+        """
+        Performs data transformation on subscription request
+        :param data: data to be trasformed
+        :param table: table in which transformed data are inserted
+        """
+        pass
+
     def format_subscription(self, subs_data):
         """
         Brings lexicographical order for list items at any nested level. For subscriptions max level of nesting is 4.
index 1f172dd..846e7d3 100644 (file)
@@ -53,7 +53,6 @@ class SubscriptionThread(threading.Thread):
         self.db = None
         self.msg = None
         self.engine = engine
-        self.loop = None
         self.logger = logging.getLogger("nbi.subscriptions")
         self.aiomain_task_admin = (
             None  # asyncio task for receiving admin actions from kafka bus
@@ -81,41 +80,38 @@ class SubscriptionThread(threading.Thread):
                 # created.
                 # Before subscribe, send dummy messages
                 await self.msg.aiowrite(
-                    "admin", "echo", "dummy message", loop=self.loop
+                    "admin",
+                    "echo",
+                    "dummy message",
                 )
-                await self.msg.aiowrite("ns", "echo", "dummy message", loop=self.loop)
-                await self.msg.aiowrite("nsi", "echo", "dummy message", loop=self.loop)
-                await self.msg.aiowrite("vnf", "echo", "dummy message", loop=self.loop)
+                await self.msg.aiowrite("ns", "echo", "dummy message")
+                await self.msg.aiowrite("nsi", "echo", "dummy message")
+                await self.msg.aiowrite("vnf", "echo", "dummy message")
                 if not kafka_working:
                     self.logger.critical("kafka is working again")
                     kafka_working = True
                 if not self.aiomain_task_admin:
-                    await asyncio.sleep(10, loop=self.loop)
+                    await asyncio.sleep(10)
                     self.logger.debug("Starting admin subscription task")
                     self.aiomain_task_admin = asyncio.ensure_future(
                         self.msg.aioread(
                             ("admin",),
-                            loop=self.loop,
                             group_id=False,
                             aiocallback=self._msg_callback,
                         ),
-                        loop=self.loop,
                     )
                 if not self.aiomain_task:
-                    await asyncio.sleep(10, loop=self.loop)
+                    await asyncio.sleep(10)
                     self.logger.debug("Starting non-admin subscription task")
                     self.aiomain_task = asyncio.ensure_future(
                         self.msg.aioread(
                             ("ns", "nsi", "vnf"),
-                            loop=self.loop,
                             aiocallback=self._msg_callback,
                         ),
-                        loop=self.loop,
                     )
                 done, _ = await asyncio.wait(
                     [self.aiomain_task, self.aiomain_task_admin],
                     timeout=None,
-                    loop=self.loop,
                     return_when=asyncio.FIRST_COMPLETED,
                 )
                 try:
@@ -142,14 +138,13 @@ class SubscriptionThread(threading.Thread):
                         "Error accessing kafka '{}'. Retrying ...".format(e)
                     )
                     kafka_working = False
-            await asyncio.sleep(10, loop=self.loop)
+            await asyncio.sleep(10)
 
     def run(self):
         """
         Start of the thread
         :return: None
         """
-        self.loop = asyncio.new_event_loop()
         try:
             if not self.db:
                 if self.config["database"]["driver"] == "mongo":
@@ -166,7 +161,6 @@ class SubscriptionThread(threading.Thread):
                     )
             if not self.msg:
                 config_msg = self.config["message"].copy()
-                config_msg["loop"] = self.loop
                 if config_msg["driver"] == "local":
                     self.msg = msglocal.MsgLocal()
                     self.msg.connect(config_msg)
@@ -187,12 +181,7 @@ class SubscriptionThread(threading.Thread):
         self.logger.debug("Starting")
         while not self.to_terminate:
             try:
-
-                self.loop.run_until_complete(
-                    asyncio.ensure_future(self.start_kafka(), loop=self.loop)
-                )
-            # except asyncio.CancelledError:
-            #     break  # if cancelled it should end, breaking loop
+                asyncio.run(self.start_kafka())
             except Exception as e:
                 if not self.to_terminate:
                     self.logger.exception(
@@ -201,7 +190,6 @@ class SubscriptionThread(threading.Thread):
 
         self.logger.debug("Finishing")
         self._stop()
-        self.loop.close()
 
     async def _msg_callback(self, topic, command, params):
         """
@@ -266,10 +254,7 @@ class SubscriptionThread(threading.Thread):
                             # self.logger.debug(subscribers)
                             if subscribers:
                                 asyncio.ensure_future(
-                                    self.nslcm.send_notifications(
-                                        subscribers, loop=self.loop
-                                    ),
-                                    loop=self.loop,
+                                    self.nslcm.send_notifications(subscribers),
                                 )
                 else:
                     self.logger.debug(
@@ -284,24 +269,21 @@ class SubscriptionThread(threading.Thread):
                     else:
                         op_state = params["operationState"]
                     event_details = {
-                            "topic": topic,
-                            "command": command.upper(),
-                            "params": params,
-                            }
+                        "topic": topic,
+                        "command": command.upper(),
+                        "params": params,
+                    }
                     subscribers = self.vnflcm.get_subscribers(
-                            vnfd_id,
-                            vnf_instance_id,
-                            command.upper(),
-                            op_state,
-                            event_details
-                            )
+                        vnfd_id,
+                        vnf_instance_id,
+                        command.upper(),
+                        op_state,
+                        event_details,
+                    )
                     if subscribers:
                         asyncio.ensure_future(
-                                self.vnflcm.send_notifications(
-                                    subscribers, loop=self.loop
-                                ),
-                                loop=self.loop
-                            )
+                            self.vnflcm.send_notifications(subscribers),
+                        )
             elif topic == "nsi":
                 if command == "terminated" and params["operationState"] in (
                     "COMPLETED",
@@ -346,7 +328,7 @@ class SubscriptionThread(threading.Thread):
             # writing to kafka must be done with our own loop. For this reason it is not allowed Engine to do that,
             # but content to be written is stored at msg_to_send
             for msg in msg_to_send:
-                await self.msg.aiowrite(*msg, loop=self.loop)
+                await self.msg.aiowrite(*msg)
         except (EngineException, DbException, MsgException) as e:
             self.logger.error(
                 "Error while processing topic={} command={}: {}".format(
@@ -382,6 +364,8 @@ class SubscriptionThread(threading.Thread):
         """
         self.to_terminate = True
         if self.aiomain_task:
-            self.loop.call_soon_threadsafe(self.aiomain_task.cancel)
+            asyncio.get_event_loop().call_soon_threadsafe(self.aiomain_task.cancel)
         if self.aiomain_task_admin:
-            self.loop.call_soon_threadsafe(self.aiomain_task_admin.cancel)
+            asyncio.get_event_loop().call_soon_threadsafe(
+                self.aiomain_task_admin.cancel
+            )
diff --git a/osm_nbi/tests/run_test.py b/osm_nbi/tests/run_test.py
deleted file mode 100755 (executable)
index 079b129..0000000
+++ /dev/null
@@ -1,5898 +0,0 @@
-#! /usr/bin/python3
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import getopt
-import sys
-import requests
-import json
-import logging
-import yaml
-
-# import json
-# import tarfile
-from time import sleep
-from random import randint
-import os
-from sys import stderr
-from uuid import uuid4
-import re
-
-__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
-__date__ = "$2018-03-01$"
-__version__ = "0.3"
-version_date = "Oct 2018"
-
-
-def usage():
-    print("Usage: ", sys.argv[0], "[options]")
-    print(
-        "      Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'"
-    )
-    print(
-        "      If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
-        "where deployment is done"
-    )
-    print("OPTIONS")
-    print("      -h|--help: shows this help")
-    print("      --insecure: Allows non trusted https NBI server")
-    print("      --list: list available tests")
-    print(
-        "      --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
-        "'--test-osm'"
-    )
-    print("      -p|--password PASSWORD: NBI access password. 'admin' by default")
-    print("      ---project PROJECT: NBI access project. 'admin' by default")
-    print(
-        "      --test TEST[,...]: Execute only a test or a comma separated list of tests"
-    )
-    print(
-        "      --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config"
-    )
-    print(
-        "      --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
-        "this flag to test the system. LCM and RO components are expected to be up and running"
-    )
-    print(
-        "      --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)
-    )
-    print(
-        "      --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(
-            timeout_deploy
-        )
-    )
-    print(
-        "      --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
-        " by default {}s".format(timeout_configure)
-    )
-    print("      -u|--user USERNAME: NBI access username. 'admin' by default")
-    print(
-        "      --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default"
-    )
-    print("      -v|--verbose print debug information, can be used several times")
-    print("      --no-verbose remove verbosity")
-    print("      --version: prints current version")
-    print("ENV variables used for real deployment tests with option osm-test.")
-    print("      export OSMNBITEST_VIM_NAME=vim-name")
-    print("      export OSMNBITEST_VIM_URL=vim-url")
-    print("      export OSMNBITEST_VIM_TYPE=vim-type")
-    print("      export OSMNBITEST_VIM_TENANT=vim-tenant")
-    print("      export OSMNBITEST_VIM_USER=vim-user")
-    print("      export OSMNBITEST_VIM_PASSWORD=vim-password")
-    print('      export OSMNBITEST_VIM_CONFIG="vim-config"')
-    print('      export OSMNBITEST_NS_NAME="vim-config"')
-    return
-
-
-r_header_json = {"Content-type": "application/json"}
-headers_json = {"Content-type": "application/json", "Accept": "application/json"}
-r_header_yaml = {"Content-type": "application/yaml"}
-headers_yaml = {"Content-type": "application/yaml", "Accept": "application/yaml"}
-r_header_text = {"Content-type": "text/plain"}
-r_header_octect = {"Content-type": "application/octet-stream"}
-headers_text = {"Accept": "text/plain,application/yaml"}
-r_header_zip = {"Content-type": "application/zip"}
-headers_zip = {"Accept": "application/zip,application/yaml"}
-headers_zip_yaml = {"Accept": "application/yaml", "Content-type": "application/zip"}
-headers_zip_json = {"Accept": "application/json", "Content-type": "application/zip"}
-headers_txt_json = {"Accept": "application/json", "Content-type": "text/plain"}
-r_headers_yaml_location_vnfd = {
-    "Location": "/vnfpkgm/v1/vnf_packages_content/",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nsd = {
-    "Location": "/nsd/v1/ns_descriptors_content/",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nst = {
-    "Location": "/nst/v1/netslice_templates_content",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nslcmop = {
-    "Location": "nslcm/v1/ns_lcm_op_occs/",
-    "Content-Type": "application/yaml",
-}
-r_headers_yaml_location_nsilcmop = {
-    "Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/",
-    "Content-Type": "application/yaml",
-}
-
-# test ones authorized
-test_authorized_list = (
-    (
-        "AU1",
-        "Invalid vnfd id",
-        "GET",
-        "/vnfpkgm/v1/vnf_packages/non-existing-id",
-        headers_json,
-        None,
-        404,
-        r_header_json,
-        "json",
-    ),
-    (
-        "AU2",
-        "Invalid nsd id",
-        "GET",
-        "/nsd/v1/ns_descriptors/non-existing-id",
-        headers_yaml,
-        None,
-        404,
-        r_header_yaml,
-        "yaml",
-    ),
-    (
-        "AU3",
-        "Invalid nsd id",
-        "DELETE",
-        "/nsd/v1/ns_descriptors_content/non-existing-id",
-        headers_yaml,
-        None,
-        404,
-        r_header_yaml,
-        "yaml",
-    ),
-)
-timeout = 120  # general timeout
-timeout_deploy = 60 * 10  # timeout for NS deploying without charms
-timeout_configure = 60 * 20  # timeout for NS deploying and configuring
-
-
-class TestException(Exception):
-    pass
-
-
-class TestRest:
-    def __init__(
-        self,
-        url_base,
-        header_base=None,
-        verify=False,
-        user="admin",
-        password="admin",
-        project="admin",
-    ):
-        self.url_base = url_base
-        if header_base is None:
-            self.header_base = {}
-        else:
-            self.header_base = header_base.copy()
-        self.s = requests.session()
-        self.s.headers = self.header_base
-        self.verify = verify
-        self.token = False
-        self.user = user
-        self.password = password
-        self.project = project
-        self.vim_id = None
-        # contains ID of tests obtained from Location response header. "" key contains last obtained id
-        self.last_id = ""
-        self.test_name = None
-        self.step = 0  # number of subtest under test
-        self.passed_tests = 0
-        self.failed_tests = 0
-
-    def set_test_name(self, test_name):
-        self.test_name = test_name
-        self.step = 0
-        self.last_id = ""
-
-    def set_header(self, header):
-        self.s.headers.update(header)
-
-    def set_tet_name(self, test_name):
-        self.test_name = test_name
-
-    def unset_header(self, key):
-        if key in self.s.headers:
-            del self.s.headers[key]
-
-    def test(
-        self,
-        description,
-        method,
-        url,
-        headers,
-        payload,
-        expected_codes,
-        expected_headers,
-        expected_payload,
-        store_file=None,
-        pooling=False,
-    ):
-        """
-        Performs an http request and check http code response. Exit if different than allowed. It get the returned id
-        that can be used by following test in the URL with {name} where name is the name of the test
-        :param description:  description of the test
-        :param method: HTTP method: GET,PUT,POST,DELETE,...
-        :param url: complete URL or relative URL
-        :param headers: request headers to add to the base headers
-        :param payload: Can be a dict, transformed to json, a text or a file if starts with '@'
-        :param expected_codes: expected response codes, can be int, int tuple or int range
-        :param expected_headers: expected response headers, dict with key values
-        :param expected_payload: expected payload, 0 if empty, 'yaml', 'json', 'text', 'zip', 'octet-stream'
-        :param store_file: filename to store content
-        :param pooling: if True do not count neither log this test. Because a pooling is done with many equal requests
-        :return: requests response
-        """
-        r = None
-        try:
-            if not self.s:
-                self.s = requests.session()
-            # URL
-            if not url:
-                url = self.url_base
-            elif not url.startswith("http"):
-                url = self.url_base + url
-
-            # replace url <> with the last ID
-            url = url.replace("<>", self.last_id)
-            if payload:
-                if isinstance(payload, str):
-                    if payload.startswith("@"):
-                        mode = "r"
-                        file_name = payload[1:]
-                        if payload.startswith("@b"):
-                            mode = "rb"
-                            file_name = payload[2:]
-                        with open(file_name, mode) as f:
-                            payload = f.read()
-                elif isinstance(payload, dict):
-                    payload = json.dumps(payload)
-
-            if not pooling:
-                test_description = "Test {}{} {} {} {}".format(
-                    self.test_name, self.step, description, method, url
-                )
-                logger.warning(test_description)
-                self.step += 1
-            stream = False
-            if expected_payload in ("zip", "octet-string") or store_file:
-                stream = True
-            __retry = 0
-            while True:
-                try:
-                    r = getattr(self.s, method.lower())(
-                        url,
-                        data=payload,
-                        headers=headers,
-                        verify=self.verify,
-                        stream=stream,
-                    )
-                    break
-                except requests.exceptions.ConnectionError as e:
-                    if __retry == 2:
-                        raise
-                    logger.error("Exception {}. Retrying".format(e))
-                    __retry += 1
-
-            if expected_payload in ("zip", "octet-string") or store_file:
-                logger.debug("RX {}".format(r.status_code))
-            else:
-                logger.debug("RX {}: {}".format(r.status_code, r.text))
-
-            # check response
-            if expected_codes:
-                if isinstance(expected_codes, int):
-                    expected_codes = (expected_codes,)
-                if r.status_code not in expected_codes:
-                    raise TestException(
-                        "Got status {}. Expected {}. {}".format(
-                            r.status_code, expected_codes, r.text
-                        )
-                    )
-
-            if expected_headers:
-                for header_key, header_val in expected_headers.items():
-                    if header_key.lower() not in r.headers:
-                        raise TestException("Header {} not present".format(header_key))
-                    if header_val and header_val.lower() not in r.headers[header_key]:
-                        raise TestException(
-                            "Header {} does not contain {} but {}".format(
-                                header_key, header_val, r.headers[header_key]
-                            )
-                        )
-
-            if expected_payload is not None:
-                if expected_payload == 0 and len(r.content) > 0:
-                    raise TestException("Expected empty payload")
-                elif expected_payload == "json":
-                    try:
-                        r.json()
-                    except Exception as e:
-                        raise TestException(
-                            "Expected json response payload, but got Exception {}".format(
-                                e
-                            )
-                        )
-                elif expected_payload == "yaml":
-                    try:
-                        yaml.safe_load(r.text)
-                    except Exception as e:
-                        raise TestException(
-                            "Expected yaml response payload, but got Exception {}".format(
-                                e
-                            )
-                        )
-                elif expected_payload in ("zip", "octet-string"):
-                    if len(r.content) == 0:
-                        raise TestException(
-                            "Expected some response payload, but got empty"
-                        )
-                    # try:
-                    #     tar = tarfile.open(None, 'r:gz', fileobj=r.raw)
-                    #     for tarinfo in tar:
-                    #         tarname = tarinfo.name
-                    #         print(tarname)
-                    # except Exception as e:
-                    #     raise TestException("Expected zip response payload, but got Exception {}".format(e))
-                elif expected_payload == "text":
-                    if len(r.content) == 0:
-                        raise TestException(
-                            "Expected some response payload, but got empty"
-                        )
-                    # r.text
-            if store_file:
-                with open(store_file, "wb") as fd:
-                    for chunk in r.iter_content(chunk_size=128):
-                        fd.write(chunk)
-
-            location = r.headers.get("Location")
-            if location:
-                _id = location[location.rfind("/") + 1:]
-                if _id:
-                    self.last_id = str(_id)
-            if not pooling:
-                self.passed_tests += 1
-            return r
-        except TestException as e:
-            self.failed_tests += 1
-            r_status_code = None
-            r_text = None
-            if r:
-                r_status_code = r.status_code
-                r_text = r.text
-            logger.error("{} \nRX code{}: {}".format(e, r_status_code, r_text))
-            return None
-            # exit(1)
-        except IOError as e:
-            if store_file:
-                logger.error("Cannot open file {}: {}".format(store_file, e))
-            else:
-                logger.error("Exception: {}".format(e), exc_info=True)
-            self.failed_tests += 1
-            return None
-            # exit(1)
-        except requests.exceptions.RequestException as e:
-            logger.error("Exception: {}".format(e))
-
-    def get_autorization(self):  # user=None, password=None, project=None):
-        if (
-            self.token
-        ):  # and self.user == user and self.password == password and self.project == project:
-            return
-        # self.user = user
-        # self.password = password
-        # self.project = project
-        r = self.test(
-            "Obtain token",
-            "POST",
-            "/admin/v1/tokens",
-            headers_json,
-            {
-                "username": self.user,
-                "password": self.password,
-                "project_id": self.project,
-            },
-            (200, 201),
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return
-        response = r.json()
-        self.token = response["id"]
-        self.set_header({"Authorization": "Bearer {}".format(self.token)})
-
-    def remove_authorization(self):
-        if self.token:
-            self.test(
-                "Delete token",
-                "DELETE",
-                "/admin/v1/tokens/{}".format(self.token),
-                headers_json,
-                None,
-                (200, 201, 204),
-                None,
-                None,
-            )
-        self.token = None
-        self.unset_header("Authorization")
-
-    def get_create_vim(self, test_osm):
-        if self.vim_id:
-            return self.vim_id
-        self.get_autorization()
-        if test_osm:
-            vim_name = os.environ.get("OSMNBITEST_VIM_NAME")
-            if not vim_name:
-                raise TestException(
-                    "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment"
-                )
-        else:
-            vim_name = "fakeVim"
-        # Get VIM
-        r = self.test(
-            "Get VIM ID",
-            "GET",
-            "/admin/v1/vim_accounts?name={}".format(vim_name),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return
-        vims = r.json()
-        if vims:
-            return vims[0]["_id"]
-        # Add VIM
-        if test_osm:
-            # check needed environ parameters:
-            if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get(
-                "OSMNBITEST_VIM_TENANT"
-            ):
-                raise TestException(
-                    "Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
-                    " to deploy on whit the --test-osm option"
-                )
-            vim_data = (
-                "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}',"
-                "vim_tenant_name: '{}', " "vim_user: {}, vim_password: {}"
-            ).format(
-                vim_name,
-                os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
-                os.environ.get("OSMNBITEST_VIM_URL"),
-                os.environ.get("OSMNBITEST_VIM_TENANT"),
-                os.environ.get("OSMNBITEST_VIM_USER"),
-                os.environ.get("OSMNBITEST_VIM_PASSWORD"),
-            )
-            if os.environ.get("OSMNBITEST_VIM_CONFIG"):
-                vim_data += " ,config: {}".format(
-                    os.environ.get("OSMNBITEST_VIM_CONFIG")
-                )
-            vim_data += "}"
-        else:
-            vim_data = (
-                "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"
-                ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
-            )
-        self.test(
-            "Create VIM",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_yaml,
-            vim_data,
-            (201, 202),
-            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"},
-            "yaml",
-        )
-        return self.last_id
-
-    def print_results(self):
-        print("\n\n\n--------------------------------------------")
-        print(
-            "TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(
-                self.passed_tests + self.failed_tests,
-                self.passed_tests,
-                self.failed_tests,
-            )
-        )
-        print("--------------------------------------------")
-
-    def wait_until_delete(self, url_op, timeout_delete):
-        """
-        Make a pooling until topic is not present, because of deleted
-        :param url_op:
-        :param timeout_delete:
-        :return:
-        """
-        description = "Wait to topic being deleted"
-        test_description = "Test {}{} {} {} {}".format(
-            self.test_name, self.step, description, "GET", url_op
-        )
-        logger.warning(test_description)
-        self.step += 1
-
-        wait = timeout_delete
-        while wait >= 0:
-            r = self.test(
-                description,
-                "GET",
-                url_op,
-                headers_yaml,
-                None,
-                (200, 404),
-                None,
-                r_header_yaml,
-                "yaml",
-                pooling=True,
-            )
-            if not r:
-                return
-            if r.status_code == 404:
-                self.passed_tests += 1
-                break
-            elif r.status_code == 200:
-                wait -= 5
-                sleep(5)
-        else:
-            raise TestException(
-                "Topic is not deleted after {} seconds".format(timeout_delete)
-            )
-            self.failed_tests += 1
-
-    def wait_operation_ready(self, ns_nsi, opp_id, timeout, expected_fail=False):
-        """
-        Wait until nslcmop or nsilcmop finished
-        :param ns_nsi: "ns" o "nsi"
-        :param opp_id: Id o fthe operation
-        :param timeout:
-        :param expected_fail:
-        :return: None. Updates passed/failed_tests
-        """
-        if ns_nsi == "ns":
-            url_op = "/nslcm/v1/ns_lcm_op_occs/{}".format(opp_id)
-        else:
-            url_op = "/nsilcm/v1/nsi_lcm_op_occs/{}".format(opp_id)
-        description = "Wait to {} lcm operation complete".format(ns_nsi)
-        test_description = "Test {}{} {} {} {}".format(
-            self.test_name, self.step, description, "GET", url_op
-        )
-        logger.warning(test_description)
-        self.step += 1
-        wait = timeout
-        while wait >= 0:
-            r = self.test(
-                description,
-                "GET",
-                url_op,
-                headers_json,
-                None,
-                200,
-                r_header_json,
-                "json",
-                pooling=True,
-            )
-            if not r:
-                return
-            nslcmop = r.json()
-            if "COMPLETED" in nslcmop["operationState"]:
-                if expected_fail:
-                    logger.error(
-                        "NS terminate has success, expecting failing: {}".format(
-                            nslcmop["detailed-status"]
-                        )
-                    )
-                    self.failed_tests += 1
-                else:
-                    self.passed_tests += 1
-                break
-            elif "FAILED" in nslcmop["operationState"]:
-                if not expected_fail:
-                    logger.error(
-                        "NS terminate has failed: {}".format(nslcmop["detailed-status"])
-                    )
-                    self.failed_tests += 1
-                else:
-                    self.passed_tests += 1
-                break
-
-            print(".", end="", file=stderr)
-            wait -= 10
-            sleep(10)
-        else:
-            self.failed_tests += 1
-            logger.error(
-                "NS instantiate is not terminate after {} seconds".format(timeout)
-            )
-            return
-        print("", file=stderr)
-
-
-class TestNonAuthorized:
-    description = "Test invalid URLs. methods and no authorization"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("NonAuth")
-        engine.remove_authorization()
-        test_not_authorized_list = (
-            (
-                "Invalid token",
-                "GET",
-                "/admin/v1/users",
-                headers_json,
-                None,
-                401,
-                r_header_json,
-                "json",
-            ),
-            (
-                "Invalid URL",
-                "POST",
-                "/admin/v1/nonexist",
-                headers_yaml,
-                None,
-                405,
-                r_header_yaml,
-                "yaml",
-            ),
-            (
-                "Invalid version",
-                "DELETE",
-                "/admin/v2/users",
-                headers_yaml,
-                None,
-                405,
-                r_header_yaml,
-                "yaml",
-            ),
-        )
-        for t in test_not_authorized_list:
-            engine.test(*t)
-
-
-class TestUsersProjects:
-    description = "test project and user creation"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("UserProject")
-        # backend = test_params.get("backend") if test_params else None   # UNUSED
-
-        # Initialisation
-        p1 = p2 = p3 = None
-        padmin = pbad = None
-        u1 = u2 = u3 = u4 = None
-
-        engine.get_autorization()
-
-        res = engine.test(
-            "Create project non admin 1",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P1"},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        p1 = engine.last_id if res else None
-
-        res = engine.test(
-            "Create project admin",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "Padmin", "admin": True},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        padmin = engine.last_id if res else None
-
-        res = engine.test(
-            "Create project bad format",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": 1},
-            (400, 422),
-            r_header_json,
-            "json",
-        )
-        pbad = engine.last_id if res else None
-
-        res = engine.test(
-            "Get project admin role",
-            "GET",
-            "/admin/v1/roles?name=project_admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        rpa = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get project user role",
-            "GET",
-            "/admin/v1/roles?name=project_user",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        rpu = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get system admin role",
-            "GET",
-            "/admin/v1/roles?name=system_admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        rsa = res.json()[0]["_id"] if res else None
-
-        data = {"username": "U1", "password": "pw1"}
-        p2 = uuid4().hex
-        data["project_role_mappings"] = [
-            {"project": p1, "role": rpa},
-            {"project": p2, "role": rpa},
-            {"project": padmin, "role": rpu},
-        ]
-        rc = 201
-        xhd = {"Location": "/admin/v1/users/", "Content-Type": "application/json"}
-        res = engine.test(
-            "Create user with bad project and force",
-            "POST",
-            "/admin/v1/users?FORCE=True",
-            headers_json,
-            data,
-            rc,
-            xhd,
-            "json",
-        )
-        if res:
-            u1 = engine.last_id
-        else:
-            # User is created sometimes even though an exception is raised
-            res = engine.test(
-                "Get user U1",
-                "GET",
-                "/admin/v1/users?username=U1",
-                headers_json,
-                {},
-                (200),
-                {"Content-Type": "application/json"},
-                "json",
-            )
-            u1 = res.json()[0]["_id"] if res else None
-
-        data = {"username": "U2", "password": "pw2"}
-        data["project_role_mappings"] = [
-            {"project": p1, "role": rpa},
-            {"project": padmin, "role": rsa},
-        ]
-        res = engine.test(
-            "Create user 2",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            data,
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-        u2 = engine.last_id if res else None
-
-        if u1:
-            ftt = "project_role_mappings"
-            xpr = [{"project": p1, "role": rpa}, {"project": padmin, "role": rpu}]
-            data = {ftt: xpr}
-            engine.test(
-                "Edit user U1, delete  P2 project",
-                "PATCH",
-                "/admin/v1/users/" + u1,
-                headers_json,
-                data,
-                204,
-                None,
-                None,
-            )
-            res = engine.test(
-                "Check user U1, contains the right projects",
-                "GET",
-                "/admin/v1/users/" + u1,
-                headers_json,
-                None,
-                200,
-                None,
-                json,
-            )
-            if res:
-                rj = res.json()
-                xpr[0]["project_name"] = "P1"
-                xpr[0]["role_name"] = "project_admin"
-                xpr[1]["project_name"] = "Padmin"
-                xpr[1]["role_name"] = "project_user"
-                ok = True
-                for pr in rj[ftt]:
-                    if pr not in xpr:
-                        ok = False
-                for pr in xpr:
-                    if pr not in rj[ftt]:
-                        ok = False
-                if not ok:
-                    logger.error(
-                        "User {} '{}' are different than expected '{}'. Edition was not done properly".format(
-                            ftt, rj[ftt], xpr
-                        )
-                    )
-                    engine.failed_tests += 1
-
-        p2 = None  # To prevent deletion attempts
-
-        # Add a test of 'default project' for Keystone?
-
-        if u2:
-            engine.test(
-                "Edit user U2, change password",
-                "PUT",
-                "/admin/v1/users/" + u2,
-                headers_json,
-                {"password": "pw2_new"},
-                204,
-                None,
-                None,
-            )
-
-        if p1:
-            engine.test(
-                "Change to project P1 non existing",
-                "POST",
-                "/admin/v1/tokens/",
-                headers_json,
-                {"project_id": p1},
-                401,
-                r_header_json,
-                "json",
-            )
-
-        if u2 and p1:
-            res = engine.test(
-                "Change to user U2 project P1",
-                "POST",
-                "/admin/v1/tokens",
-                headers_json,
-                {"username": "U2", "password": "pw2_new", "project_id": "P1"},
-                (200, 201),
-                r_header_json,
-                "json",
-            )
-            if res:
-                rj = res.json()
-                engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
-
-                engine.test(
-                    "Edit user projects non admin",
-                    "PUT",
-                    "/admin/v1/users/U1",
-                    headers_json,
-                    {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
-                    401,
-                    r_header_json,
-                    "json",
-                )
-
-                res = engine.test(
-                    "Add new project non admin",
-                    "POST",
-                    "/admin/v1/projects",
-                    headers_json,
-                    {"name": "P2"},
-                    401,
-                    r_header_json,
-                    "json",
-                )
-                if res is None or res.status_code == 201:
-                    # The project has been created even though it shouldn't
-                    res = engine.test(
-                        "Get project P2",
-                        "GET",
-                        "/admin/v1/projects/P2",
-                        headers_json,
-                        None,
-                        200,
-                        r_header_json,
-                        "json",
-                    )
-                    p2 = res.json()["_id"] if res else None
-
-                if p1:
-                    data = {"username": "U3", "password": "pw3"}
-                    data["project_role_mappings"] = [{"project": p1, "role": rpu}]
-                    res = engine.test(
-                        "Add new user non admin",
-                        "POST",
-                        "/admin/v1/users",
-                        headers_json,
-                        data,
-                        401,
-                        r_header_json,
-                        "json",
-                    )
-                    if res is None or res.status_code == 201:
-                        # The user has been created even though it shouldn't
-                        res = engine.test(
-                            "Get user U3",
-                            "GET",
-                            "/admin/v1/users/U3",
-                            headers_json,
-                            None,
-                            200,
-                            r_header_json,
-                            "json",
-                        )
-                        u3 = res.json()["_id"] if res else None
-                else:
-                    u3 = None
-
-                if padmin:
-                    res = engine.test(
-                        "Change to user U2 project Padmin",
-                        "POST",
-                        "/admin/v1/tokens",
-                        headers_json,
-                        {
-                            "project_id": "Padmin"
-                        },  # Caused a Keystone authentication error
-                        # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
-                        (200, 201),
-                        r_header_json,
-                        "json",
-                    )
-                    if res:
-                        rj = res.json()
-                        engine.set_header(
-                            {"Authorization": "Bearer {}".format(rj["id"])}
-                        )
-
-                        res = engine.test(
-                            "Add new project admin",
-                            "POST",
-                            "/admin/v1/projects",
-                            headers_json,
-                            {"name": "P3"},
-                            (201, 204),
-                            {
-                                "Location": "/admin/v1/projects/",
-                                "Content-Type": "application/json",
-                            },
-                            "json",
-                        )
-                        p3 = engine.last_id if res else None
-
-                        if p1:
-                            data = {"username": "U4", "password": "pw4"}
-                            data["project_role_mappings"] = [
-                                {"project": p1, "role": rpa}
-                            ]
-                            res = engine.test(
-                                "Add new user admin",
-                                "POST",
-                                "/admin/v1/users",
-                                headers_json,
-                                data,
-                                (201, 204),
-                                {
-                                    "Location": "/admin/v1/users/",
-                                    "Content-Type": "application/json",
-                                },
-                                "json",
-                            )
-                            u4 = engine.last_id if res else None
-                        else:
-                            u4 = None
-
-                        if u4 and p3:
-                            data = {
-                                "project_role_mappings": [{"project": p3, "role": rpa}]
-                            }
-                            engine.test(
-                                "Edit user projects admin",
-                                "PUT",
-                                "/admin/v1/users/U4",
-                                headers_json,
-                                data,
-                                204,
-                                None,
-                                None,
-                            )
-                            # Project is deleted even though it shouldn't - PROVISIONAL?
-                            res = engine.test(
-                                "Delete project P3 conflict",
-                                "DELETE",
-                                "/admin/v1/projects/" + p3,
-                                headers_json,
-                                None,
-                                409,
-                                None,
-                                None,
-                            )
-                            if res and res.status_code in (200, 204):
-                                p3 = None
-                            if p3:
-                                res = engine.test(
-                                    "Delete project P3 forcing",
-                                    "DELETE",
-                                    "/admin/v1/projects/" + p3 + "?FORCE=True",
-                                    headers_json,
-                                    None,
-                                    204,
-                                    None,
-                                    None,
-                                )
-                                if res and res.status_code in (200, 204):
-                                    p3 = None
-
-                        if u2:
-                            res = engine.test(
-                                "Delete user U2. Conflict deleting own user",
-                                "DELETE",
-                                "/admin/v1/users/" + u2,
-                                headers_json,
-                                None,
-                                409,
-                                r_header_json,
-                                "json",
-                            )
-                            if res is None or res.status_code in (200, 204):
-                                u2 = None
-                        if u4:
-                            res = engine.test(
-                                "Delete user U4",
-                                "DELETE",
-                                "/admin/v1/users/" + u4,
-                                headers_json,
-                                None,
-                                204,
-                                None,
-                                None,
-                            )
-                            if res and res.status_code in (200, 204):
-                                u4 = None
-                        if p3:
-                            res = engine.test(
-                                "Delete project P3",
-                                "DELETE",
-                                "/admin/v1/projects/" + p3,
-                                headers_json,
-                                None,
-                                204,
-                                None,
-                                None,
-                            )
-                            if res and res.status_code in (200, 204):
-                                p3 = None
-
-                if u3:
-                    res = engine.test(
-                        "Delete user U3",
-                        "DELETE",
-                        "/admin/v1/users/" + u3,
-                        headers_json,
-                        None,
-                        204,
-                        None,
-                        None,
-                    )
-                    if res:
-                        u3 = None
-
-        # change to admin
-        engine.remove_authorization()  # To force get authorization
-        engine.get_autorization()
-        if u1:
-            engine.test(
-                "Delete user U1",
-                "DELETE",
-                "/admin/v1/users/" + u1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if u2:
-            engine.test(
-                "Delete user U2",
-                "DELETE",
-                "/admin/v1/users/" + u2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if u3:
-            engine.test(
-                "Delete user U3",
-                "DELETE",
-                "/admin/v1/users/" + u3,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if u4:
-            engine.test(
-                "Delete user U4",
-                "DELETE",
-                "/admin/v1/users/" + u4,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if p1:
-            engine.test(
-                "Delete project P1",
-                "DELETE",
-                "/admin/v1/projects/" + p1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if p2:
-            engine.test(
-                "Delete project P2",
-                "DELETE",
-                "/admin/v1/projects/" + p2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if p3:
-            engine.test(
-                "Delete project P3",
-                "DELETE",
-                "/admin/v1/projects/" + p3,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if padmin:
-            engine.test(
-                "Delete project Padmin",
-                "DELETE",
-                "/admin/v1/projects/" + padmin,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if pbad:
-            engine.test(
-                "Delete bad project",
-                "DELETE",
-                "/admin/v1/projects/" + pbad,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-
-        # BEGIN New Tests - Addressing Projects/Users by Name/ID
-        pid1 = pid2 = None
-        uid1 = uid2 = None
-        res = engine.test(
-            "Create new project P1",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P1"},
-            201,
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            pid1 = res.json()["id"]
-            # print("# pid =", pid1)
-        res = engine.test(
-            "Create new project P2",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P2"},
-            201,
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            pid2 = res.json()["id"]
-            # print("# pid =", pid2)
-        data = {"username": "U1", "password": "pw1"}
-        data["project_role_mappings"] = [{"project": pid1, "role": rpu}]
-        res = engine.test(
-            "Create new user U1",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            data,
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            uid1 = res.json()["id"]
-            # print("# uid =", uid1)
-        data = {"username": "U2", "password": "pw2"}
-        data["project_role_mappings"] = [{"project": pid2, "role": rpu}]
-        res = engine.test(
-            "Create new user U2",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            data,
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-        if res:
-            uid2 = res.json()["id"]
-            # print("# uid =", uid2)
-        if pid1:
-            engine.test(
-                "Get Project P1 by Name",
-                "GET",
-                "/admin/v1/projects/P1",
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-            engine.test(
-                "Get Project P1 by ID",
-                "GET",
-                "/admin/v1/projects/" + pid1,
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-        if uid1:
-            engine.test(
-                "Get User U1 by Name",
-                "GET",
-                "/admin/v1/users/U1",
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-            engine.test(
-                "Get User U1 by ID",
-                "GET",
-                "/admin/v1/users/" + uid1,
-                headers_json,
-                None,
-                200,
-                None,
-                "json",
-            )
-        if pid1:
-            res = engine.test(
-                "Rename Project P1 by Name",
-                "PUT",
-                "/admin/v1/projects/P1",
-                headers_json,
-                {"name": "P3"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get Project P1 by new Name",
-                    "GET",
-                    "/admin/v1/projects/P3",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-        if pid2:
-            res = engine.test(
-                "Rename Project P2 by ID",
-                "PUT",
-                "/admin/v1/projects/" + pid2,
-                headers_json,
-                {"name": "P4"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get Project P2 by new Name",
-                    "GET",
-                    "/admin/v1/projects/P4",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-
-        if uid1:
-            res = engine.test(
-                "Rename User U1 by Name",
-                "PUT",
-                "/admin/v1/users/U1",
-                headers_json,
-                {"username": "U3"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get User U1 by new Name",
-                    "GET",
-                    "/admin/v1/users/U3",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-
-        if uid2:
-            res = engine.test(
-                "Rename User U2 by ID",
-                "PUT",
-                "/admin/v1/users/" + uid2,
-                headers_json,
-                {"username": "U4"},
-                204,
-                None,
-                None,
-            )
-            if res:
-                engine.test(
-                    "Get User U2 by new Name",
-                    "GET",
-                    "/admin/v1/users/U4",
-                    headers_json,
-                    None,
-                    200,
-                    None,
-                    "json",
-                )
-        if uid1:
-            res = engine.test(
-                "Delete User U1 by Name",
-                "DELETE",
-                "/admin/v1/users/U3",
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                uid1 = None
-
-        if uid2:
-            res = engine.test(
-                "Delete User U2 by ID",
-                "DELETE",
-                "/admin/v1/users/" + uid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                uid2 = None
-
-        if pid1:
-            res = engine.test(
-                "Delete Project P1 by Name",
-                "DELETE",
-                "/admin/v1/projects/P3",
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                pid1 = None
-
-        if pid2:
-            res = engine.test(
-                "Delete Project P2 by ID",
-                "DELETE",
-                "/admin/v1/projects/" + pid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-            if res:
-                pid2 = None
-
-        # END New Tests - Addressing Projects/Users by Name
-
-        # CLEANUP
-        if pid1:
-            engine.test(
-                "Delete Project P1",
-                "DELETE",
-                "/admin/v1/projects/" + pid1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if pid2:
-            engine.test(
-                "Delete Project P2",
-                "DELETE",
-                "/admin/v1/projects/" + pid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if uid1:
-            engine.test(
-                "Delete User U1",
-                "DELETE",
-                "/admin/v1/users/" + uid1,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-        if uid2:
-            engine.test(
-                "Delete User U2",
-                "DELETE",
-                "/admin/v1/users/" + uid2,
-                headers_json,
-                None,
-                204,
-                None,
-                None,
-            )
-
-        engine.remove_authorization()  # To finish
-
-
-class TestProjectsDescriptors:
-    description = "test descriptors visibility among projects"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        vnfd_ids = []
-        engine.set_test_name("ProjectDescriptors")
-        engine.get_autorization()
-
-        project_admin_id = None
-        res = engine.test(
-            "Get my project Padmin",
-            "GET",
-            "/admin/v1/projects/{}".format(engine.project),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if res:
-            response = res.json()
-            project_admin_id = response["_id"]
-        engine.test(
-            "Create project Padmin",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "Padmin", "admin": True},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Create project P2",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P2"},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Create project P3",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "P3"},
-            (201, 204),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-
-        engine.test(
-            "Create user U1",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            {
-                "username": "U1",
-                "password": "pw1",
-                "project_role_mappings": [
-                    {"project": "Padmin", "role": "system_admin"},
-                    {"project": "P2", "role": "project_admin"},
-                    {"project": "P3", "role": "project_admin"},
-                ],
-            },
-            201,
-            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-            "json",
-        )
-
-        engine.test(
-            "Onboard VNFD id1",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id1",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-        engine.test(
-            "Onboard VNFD id2 PUBLIC",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-        engine.test(
-            "Onboard VNFD id3",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-
-        res = engine.test(
-            "Get VNFD descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 3:
-            logger.error(
-                "Only 3 vnfds should be present for project admin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        # Change to other project Padmin
-        res = engine.test(
-            "Change to user U1 project Padmin",
-            "POST",
-            "/admin/v1/tokens",
-            headers_json,
-            {"username": "U1", "password": "pw1", "project_id": "Padmin"},
-            (200, 201),
-            r_header_json,
-            "json",
-        )
-        if res:
-            response = res.json()
-            engine.set_header({"Authorization": "Bearer {}".format(response["id"])})
-
-        # list vnfds
-        res = engine.test(
-            "List VNFD descriptors for Padmin",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 0:
-            logger.error(
-                "Only 0 vnfds should be present for project Padmin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        # list Public vnfds
-        res = engine.test(
-            "List VNFD public descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 1:
-            logger.error(
-                "Only 1 vnfds should be present for project Padmin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        # list vnfds belonging to project "admin"
-        res = engine.test(
-            "List VNFD of admin project",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if res:
-            response = res.json()
-            if len(response) != 3:
-                logger.error(
-                    "Only 3 vnfds should be present for project Padmin. {} listed".format(
-                        len(response)
-                    )
-                )
-                engine.failed_tests += 1
-
-        # Get Public vnfds
-        engine.test(
-            "Get VNFD public descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        # Edit not owned vnfd
-        engine.test(
-            "Edit VNFD ",
-            "PATCH",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
-            headers_yaml,
-            "{name: pepe}",
-            404,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # Add to my catalog
-        engine.test(
-            "Add VNFD id2 to my catalog",
-            "PATCH",
-            "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".format(vnfd_ids[1]),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # Add a new vnfd
-        engine.test(
-            "Onboard VNFD id4",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content?id=id4",
-            headers_yaml,
-            TestDescriptors.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        vnfd_ids.append(engine.last_id)
-
-        # list vnfds
-        res = engine.test(
-            "List VNFD public descriptors",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages",
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        response = res.json()
-        if len(response) != 2:
-            logger.error(
-                "Only 2 vnfds should be present for project Padmin. {} listed".format(
-                    len(response)
-                )
-            )
-            engine.failed_tests += 1
-
-        if manual_check:
-            input(
-                "VNFDs have been omboarded. Perform manual check and press enter to resume"
-            )
-
-        test_rest.test(
-            "Delete VNFD id2",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # change to admin project
-        engine.remove_authorization()  # To force get authorization
-        engine.get_autorization()
-        test_rest.test(
-            "Delete VNFD id1",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        test_rest.test(
-            "Delete VNFD id2",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        test_rest.test(
-            "Delete VNFD id3",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        test_rest.test(
-            "Delete VNFD id4",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
-            headers_yaml,
-            None,
-            404,
-            r_header_yaml,
-            "yaml",
-        )
-        test_rest.test(
-            "Delete VNFD id4",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        # Get Public vnfds
-        engine.test(
-            "Get VNFD deleted id1",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-        engine.test(
-            "Get VNFD deleted id2",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-        engine.test(
-            "Get VNFD deleted id3",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-        engine.test(
-            "Get VNFD deleted id4",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
-            headers_json,
-            None,
-            404,
-            r_header_json,
-            "json",
-        )
-
-        engine.test(
-            "Delete user U1",
-            "DELETE",
-            "/admin/v1/users/U1",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-        engine.test(
-            "Delete project Padmin",
-            "DELETE",
-            "/admin/v1/projects/Padmin",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-        engine.test(
-            "Delete project P2",
-            "DELETE",
-            "/admin/v1/projects/P2",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-        engine.test(
-            "Delete project P3",
-            "DELETE",
-            "/admin/v1/projects/P3",
-            headers_json,
-            None,
-            204,
-            None,
-            None,
-        )
-
-
-class TestFakeVim:
-    description = "Creates/edit/delete fake VIMs and SDN controllers"
-
-    def __init__(self):
-        self.vim = {
-            "schema_version": "1.0",
-            "schema_type": "No idea",
-            "name": "myVim",
-            "description": "Descriptor name",
-            "vim_type": "openstack",
-            "vim_url": "http://localhost:/vim",
-            "vim_tenant_name": "vimTenant",
-            "vim_user": "user",
-            "vim_password": "password",
-            "config": {"config_param": 1},
-        }
-        self.sdn = {
-            "name": "sdn-name",
-            "description": "sdn-description",
-            "dpid": "50:50:52:54:00:94:21:21",
-            "ip": "192.168.15.17",
-            "port": 8080,
-            "type": "opendaylight",
-            "version": "3.5.6",
-            "user": "user",
-            "password": "passwd",
-        }
-        self.port_mapping = [
-            {
-                "compute_node": "compute node 1",
-                "ports": [
-                    {
-                        "pci": "0000:81:00.0",
-                        "switch_port": "port-2/1",
-                        "switch_mac": "52:54:00:94:21:21",
-                    },
-                    {
-                        "pci": "0000:81:00.1",
-                        "switch_port": "port-2/2",
-                        "switch_mac": "52:54:00:94:21:22",
-                    },
-                ],
-            },
-            {
-                "compute_node": "compute node 2",
-                "ports": [
-                    {
-                        "pci": "0000:81:00.0",
-                        "switch_port": "port-2/3",
-                        "switch_mac": "52:54:00:94:21:23",
-                    },
-                    {
-                        "pci": "0000:81:00.1",
-                        "switch_port": "port-2/4",
-                        "switch_mac": "52:54:00:94:21:24",
-                    },
-                ],
-            },
-        ]
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-
-        vim_bad = self.vim.copy()
-        vim_bad.pop("name")
-
-        engine.set_test_name("FakeVim")
-        engine.get_autorization()
-        engine.test(
-            "Create VIM",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            self.vim,
-            (201, 202),
-            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
-            "json",
-        )
-        vim_id = engine.last_id
-        engine.test(
-            "Create VIM without name, bad schema",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            vim_bad,
-            422,
-            None,
-            headers_json,
-        )
-        engine.test(
-            "Create VIM name repeated",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            self.vim,
-            409,
-            None,
-            headers_json,
-        )
-        engine.test(
-            "Show VIMs",
-            "GET",
-            "/admin/v1/vim_accounts",
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-        engine.test(
-            "Show VIM",
-            "GET",
-            "/admin/v1/vim_accounts/{}".format(vim_id),
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-        if not test_osm:
-            # delete with FORCE
-            engine.test(
-                "Delete VIM",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
-                headers_yaml,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.test(
-                "Check VIM is deleted",
-                "GET",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-        else:
-            # delete and wait until is really deleted
-            engine.test(
-                "Delete VIM",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_yaml,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.wait_until_delete(
-                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
-            )
-
-
-class TestVIMSDN(TestFakeVim):
-    description = "Creates VIM with SDN editing SDN controllers and port_mapping"
-
-    def __init__(self):
-        TestFakeVim.__init__(self)
-        self.wim = {
-            "schema_version": "1.0",
-            "schema_type": "No idea",
-            "name": "myWim",
-            "description": "Descriptor name",
-            "wim_type": "odl",
-            "wim_url": "http://localhost:/wim",
-            "user": "user",
-            "password": "password",
-            "config": {"config_param": 1},
-        }
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("VimSdn")
-        engine.get_autorization()
-        # Added SDN
-        engine.test(
-            "Create SDN",
-            "POST",
-            "/admin/v1/sdns",
-            headers_json,
-            self.sdn,
-            (201, 202),
-            {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"},
-            "json",
-        )
-        sdnc_id = engine.last_id
-        # sleep(5)
-        # Edit SDN
-        engine.test(
-            "Edit SDN",
-            "PATCH",
-            "/admin/v1/sdns/{}".format(sdnc_id),
-            headers_json,
-            {"name": "new_sdn_name"},
-            (202, 204),
-            None,
-            None,
-        )
-        # sleep(5)
-        # VIM with SDN
-        self.vim["config"]["sdn-controller"] = sdnc_id
-        self.vim["config"]["sdn-port-mapping"] = self.port_mapping
-        engine.test(
-            "Create VIM",
-            "POST",
-            "/admin/v1/vim_accounts",
-            headers_json,
-            self.vim,
-            (200, 202, 201),
-            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
-            "json",
-        ),
-
-        vim_id = engine.last_id
-        self.port_mapping[0]["compute_node"] = "compute node XX"
-        engine.test(
-            "Edit VIM change port-mapping",
-            "PUT",
-            "/admin/v1/vim_accounts/{}".format(vim_id),
-            headers_json,
-            {"config": {"sdn-port-mapping": self.port_mapping}},
-            (202, 204),
-            None,
-            None,
-        )
-        engine.test(
-            "Edit VIM remove port-mapping",
-            "PUT",
-            "/admin/v1/vim_accounts/{}".format(vim_id),
-            headers_json,
-            {"config": {"sdn-port-mapping": None}},
-            (202, 204),
-            None,
-            None,
-        )
-
-        engine.test(
-            "Create WIM",
-            "POST",
-            "/admin/v1/wim_accounts",
-            headers_json,
-            self.wim,
-            (200, 202, 201),
-            {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"},
-            "json",
-        ),
-        wim_id = engine.last_id
-
-        if not test_osm:
-            # delete with FORCE
-            engine.test(
-                "Delete VIM remove port-mapping",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
-                headers_json,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.test(
-                "Delete SDNC",
-                "DELETE",
-                "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id),
-                headers_json,
-                None,
-                202,
-                None,
-                0,
-            )
-
-            engine.test(
-                "Delete WIM",
-                "DELETE",
-                "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id),
-                headers_json,
-                None,
-                202,
-                None,
-                0,
-            )
-            engine.test(
-                "Check VIM is deleted",
-                "GET",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-            engine.test(
-                "Check SDN is deleted",
-                "GET",
-                "/admin/v1/sdns/{}".format(sdnc_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-            engine.test(
-                "Check WIM is deleted",
-                "GET",
-                "/admin/v1/wim_accounts/{}".format(wim_id),
-                headers_yaml,
-                None,
-                404,
-                r_header_yaml,
-                "yaml",
-            )
-        else:
-            if manual_check:
-                input(
-                    "VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume"
-                )
-            # delete and wait until is really deleted
-            engine.test(
-                "Delete VIM remove port-mapping",
-                "DELETE",
-                "/admin/v1/vim_accounts/{}".format(vim_id),
-                headers_json,
-                None,
-                (202, 201, 204),
-                None,
-                0,
-            )
-            engine.test(
-                "Delete SDN",
-                "DELETE",
-                "/admin/v1/sdns/{}".format(sdnc_id),
-                headers_json,
-                None,
-                (202, 201, 204),
-                None,
-                0,
-            )
-            engine.test(
-                "Delete VIM",
-                "DELETE",
-                "/admin/v1/wim_accounts/{}".format(wim_id),
-                headers_json,
-                None,
-                (202, 201, 204),
-                None,
-                0,
-            )
-            engine.wait_until_delete(
-                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
-            )
-            engine.wait_until_delete("/admin/v1/sdns/{}".format(sdnc_id), timeout)
-            engine.wait_until_delete(
-                "/admin/v1/wim_accounts/{}".format(wim_id), timeout
-            )
-
-
-class TestDeploy:
-    description = "Base class for downloading descriptors from ETSI, onboard and deploy in real VIM"
-
-    def __init__(self):
-        self.test_name = "DEPLOY"
-        self.nsd_id = None
-        self.vim_id = None
-        self.ns_id = None
-        self.vnfds_id = []
-        self.descriptor_url = (
-            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
-        )
-        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
-        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        self.descriptor_edit = None
-        self.uses_configuration = False
-        self.users = {}
-        self.passwords = {}
-        self.commands = {}
-        self.keys = {}
-        self.timeout = 120
-        self.qforce = ""
-        self.ns_params = None
-        self.vnfr_ip_list = {}
-
-    def create_descriptors(self, engine):
-        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
-        if not os.path.exists(temp_dir):
-            os.makedirs(temp_dir)
-        for vnfd_index, vnfd_filename in enumerate(self.vnfd_filenames):
-            if "/" in vnfd_filename:
-                vnfd_filename_path = vnfd_filename
-                if not os.path.exists(vnfd_filename_path):
-                    raise TestException(
-                        "File '{}' does not exist".format(vnfd_filename_path)
-                    )
-            else:
-                vnfd_filename_path = temp_dir + vnfd_filename
-                if not os.path.exists(vnfd_filename_path):
-                    with open(vnfd_filename_path, "wb") as file:
-                        response = requests.get(self.descriptor_url + vnfd_filename)
-                        if response.status_code >= 300:
-                            raise TestException(
-                                "Error downloading descriptor from '{}': {}".format(
-                                    self.descriptor_url + vnfd_filename,
-                                    response.status_code,
-                                )
-                            )
-                        file.write(response.content)
-            if vnfd_filename_path.endswith(".yaml"):
-                headers = headers_yaml
-            else:
-                headers = headers_zip_yaml
-            if randint(0, 1) == 0:
-                # vnfd CREATE AND UPLOAD in one step:
-                engine.test(
-                    "Onboard VNFD in one step",
-                    "POST",
-                    "/vnfpkgm/v1/vnf_packages_content" + self.qforce,
-                    headers,
-                    "@b" + vnfd_filename_path,
-                    201,
-                    r_headers_yaml_location_vnfd,
-                    "yaml",
-                )
-                self.vnfds_id.append(engine.last_id)
-            else:
-                # vnfd CREATE AND UPLOAD ZIP
-                engine.test(
-                    "Onboard VNFD step 1",
-                    "POST",
-                    "/vnfpkgm/v1/vnf_packages",
-                    headers_json,
-                    None,
-                    201,
-                    {
-                        "Location": "/vnfpkgm/v1/vnf_packages/",
-                        "Content-Type": "application/json",
-                    },
-                    "json",
-                )
-                self.vnfds_id.append(engine.last_id)
-                engine.test(
-                    "Onboard VNFD step 2 as ZIP",
-                    "PUT",
-                    "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
-                    headers,
-                    "@b" + vnfd_filename_path,
-                    204,
-                    None,
-                    0,
-                )
-
-            if self.descriptor_edit:
-                if "vnfd{}".format(vnfd_index) in self.descriptor_edit:
-                    # Modify VNFD
-                    engine.test(
-                        "Edit VNFD ",
-                        "PATCH",
-                        "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
-                        headers_yaml,
-                        self.descriptor_edit["vnfd{}".format(vnfd_index)],
-                        204,
-                        None,
-                        None,
-                    )
-
-        if "/" in self.nsd_filename:
-            nsd_filename_path = self.nsd_filename
-            if not os.path.exists(nsd_filename_path):
-                raise TestException(
-                    "File '{}' does not exist".format(nsd_filename_path)
-                )
-        else:
-            nsd_filename_path = temp_dir + self.nsd_filename
-            if not os.path.exists(nsd_filename_path):
-                with open(nsd_filename_path, "wb") as file:
-                    response = requests.get(self.descriptor_url + self.nsd_filename)
-                    if response.status_code >= 300:
-                        raise TestException(
-                            "Error downloading descriptor from '{}': {}".format(
-                                self.descriptor_url + self.nsd_filename,
-                                response.status_code,
-                            )
-                        )
-                    file.write(response.content)
-        if nsd_filename_path.endswith(".yaml"):
-            headers = headers_yaml
-        else:
-            headers = headers_zip_yaml
-
-        if randint(0, 1) == 0:
-            # nsd CREATE AND UPLOAD in one step:
-            engine.test(
-                "Onboard NSD in one step",
-                "POST",
-                "/nsd/v1/ns_descriptors_content" + self.qforce,
-                headers,
-                "@b" + nsd_filename_path,
-                201,
-                r_headers_yaml_location_nsd,
-                yaml,
-            )
-            self.nsd_id = engine.last_id
-        else:
-            # nsd CREATE AND UPLOAD ZIP
-            engine.test(
-                "Onboard NSD step 1",
-                "POST",
-                "/nsd/v1/ns_descriptors",
-                headers_json,
-                None,
-                201,
-                {
-                    "Location": "/nsd/v1/ns_descriptors/",
-                    "Content-Type": "application/json",
-                },
-                "json",
-            )
-            self.nsd_id = engine.last_id
-            engine.test(
-                "Onboard NSD step 2 as ZIP",
-                "PUT",
-                "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
-                headers,
-                "@b" + nsd_filename_path,
-                204,
-                None,
-                0,
-            )
-
-        if self.descriptor_edit and "nsd" in self.descriptor_edit:
-            # Modify NSD
-            engine.test(
-                "Edit NSD ",
-                "PATCH",
-                "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-                headers_yaml,
-                self.descriptor_edit["nsd"],
-                204,
-                None,
-                None,
-            )
-
-    def delete_descriptors(self, engine):
-        # delete descriptors
-        engine.test(
-            "Delete NSSD SOL005",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-        for vnfd_id in self.vnfds_id:
-            engine.test(
-                "Delete VNFD SOL005",
-                "DELETE",
-                "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-
-    def instantiate(self, engine, ns_data):
-        ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256)
-        # create NS Two steps
-        r = engine.test(
-            "Create NS step 1",
-            "POST",
-            "/nslcm/v1/ns_instances",
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"},
-            "yaml",
-        )
-        if not r:
-            return
-        self.ns_id = engine.last_id
-        engine.test(
-            "Instantiate NS step 2",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id),
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop_id = engine.last_id
-
-        if test_osm:
-            # Wait until status is Ok
-            timeout = timeout_configure if self.uses_configuration else timeout_deploy
-            engine.wait_operation_ready("ns", nslcmop_id, timeout)
-
-    def terminate(self, engine):
-        # remove deployment
-        if test_osm:
-            engine.test(
-                "Terminate NS",
-                "POST",
-                "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id),
-                headers_yaml,
-                None,
-                (201, 202),
-                r_headers_yaml_location_nslcmop,
-                "yaml",
-            )
-            nslcmop2_id = engine.last_id
-            # Wait until status is Ok
-            engine.wait_operation_ready("ns", nslcmop2_id, timeout_deploy)
-
-            engine.test(
-                "Delete NS",
-                "DELETE",
-                "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-        else:
-            engine.test(
-                "Delete NS with FORCE",
-                "DELETE",
-                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-
-        # check all it is deleted
-        engine.test(
-            "Check NS is deleted",
-            "GET",
-            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-            headers_yaml,
-            None,
-            404,
-            None,
-            "yaml",
-        )
-        r = engine.test(
-            "Check NSLCMOPs are deleted",
-            "GET",
-            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
-            headers_json,
-            None,
-            200,
-            None,
-            "json",
-        )
-        if not r:
-            return
-        nslcmops = r.json()
-        if not isinstance(nslcmops, list) or nslcmops:
-            raise TestException(
-                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
-                    self.ns_id, nslcmops
-                )
-            )
-
-    def test_ns(
-        self,
-        engine,
-        test_osm,
-        commands=None,
-        users=None,
-        passwds=None,
-        keys=None,
-        timeout=0,
-    ):
-
-        r = engine.test(
-            "GET VNFR IDs",
-            "GET",
-            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return
-        ns_data = r.json()
-
-        vnfr_list = ns_data["constituent-vnfr-ref"]
-        time = 0
-        _commands = commands if commands is not None else self.commands
-        _users = users if users is not None else self.users
-        _passwds = passwds if passwds is not None else self.passwords
-        _keys = keys if keys is not None else self.keys
-        _timeout = timeout if timeout != 0 else self.timeout
-
-        # vnfr_list=[d8272263-6bd3-4680-84ca-6a4be23b3f2d, 88b22e2f-994a-4b61-94fd-4a3c90de3dc4]
-        for vnfr_id in vnfr_list:
-            r = engine.test(
-                "Get VNFR to get IP_ADDRESS",
-                "GET",
-                "/nslcm/v1/vnfrs/{}".format(vnfr_id),
-                headers_json,
-                None,
-                200,
-                r_header_json,
-                "json",
-            )
-            if not r:
-                continue
-            vnfr_data = r.json()
-
-            vnf_index = str(vnfr_data["member-vnf-index-ref"])
-
-            ip_address = self.get_vnfr_ip(engine, vnf_index)
-            description = "Exec command='{}' at VNFR={} IP={}".format(
-                _commands.get(vnf_index)[0], vnf_index, ip_address
-            )
-            engine.step += 1
-            test_description = "{}{} {}".format(
-                engine.test_name, engine.step, description
-            )
-            logger.warning(test_description)
-            while _timeout >= time:
-                result, message = self.do_checks(
-                    [ip_address],
-                    vnf_index=vnfr_data["member-vnf-index-ref"],
-                    commands=_commands.get(vnf_index),
-                    user=_users.get(vnf_index),
-                    passwd=_passwds.get(vnf_index),
-                    key=_keys.get(vnf_index),
-                )
-                if result == 1:
-                    engine.passed_tests += 1
-                    logger.debug(message)
-                    break
-                elif result == 0:
-                    time += 20
-                    sleep(20)
-                elif result == -1:
-                    engine.failed_tests += 1
-                    logger.error(message)
-                    break
-                else:
-                    time -= 20
-                    engine.failed_tests += 1
-                    logger.error(message)
-            else:
-                engine.failed_tests += 1
-                logger.error(
-                    "VNFR {} has not mgmt address. Check failed".format(vnf_index)
-                )
-
-    def do_checks(self, ip, vnf_index, commands=[], user=None, passwd=None, key=None):
-        try:
-            import urllib3
-            from pssh.clients import ParallelSSHClient
-            from pssh.utils import load_private_key
-            from ssh2 import exceptions as ssh2Exception
-        except ImportError as e:
-            logger.critical(
-                "Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
-                "parallel-ssh urllib3': {}".format(e)
-            )
-            return -1, "install needed packages 'pip3 install parallel-ssh urllib3'"
-        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
-        try:
-            p_host = os.environ.get("PROXY_HOST")
-            p_user = os.environ.get("PROXY_USER")
-            p_password = os.environ.get("PROXY_PASSWD")
-
-            if key:
-                pkey = load_private_key(key)
-            else:
-                pkey = None
-
-            client = ParallelSSHClient(
-                ip,
-                user=user,
-                password=passwd,
-                pkey=pkey,
-                proxy_host=p_host,
-                proxy_user=p_user,
-                proxy_password=p_password,
-                timeout=10,
-                num_retries=0,
-            )
-            for cmd in commands:
-                output = client.run_command(cmd)
-                client.join(output)
-                if output[ip[0]].exit_code:
-                    return -1, "VNFR {} command '{}' returns error: '{}'".format(
-                        ip[0], cmd, "\n".join(output[ip[0]].stderr)
-                    )
-                else:
-                    return 1, "VNFR {} command '{}' successful".format(ip[0], cmd)
-        except (
-            ssh2Exception.ChannelFailure,
-            ssh2Exception.SocketDisconnectError,
-            ssh2Exception.SocketTimeout,
-            ssh2Exception.SocketRecvError,
-        ) as e:
-            return 0, "Timeout accessing the VNFR {}: {}".format(ip[0], str(e))
-        except Exception as e:
-            return -1, "ERROR checking the VNFR {}: {}".format(ip[0], str(e))
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        pass
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name(self.test_name)
-        engine.get_autorization()
-        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
-        if test_params:
-            if "vnfd-files" in test_params:
-                self.vnfd_filenames = test_params["vnfd-files"].split(",")
-            if "nsd-file" in test_params:
-                self.nsd_filename = test_params["nsd-file"]
-            if test_params.get("ns-name"):
-                nsname = test_params["ns-name"]
-        self.create_descriptors(engine)
-
-        # create real VIM if not exist
-        self.vim_id = engine.get_create_vim(test_osm)
-        ns_data = {
-            "nsDescription": "default description",
-            "nsName": nsname,
-            "nsdId": self.nsd_id,
-            "vimAccountId": self.vim_id,
-        }
-        if self.ns_params:
-            ns_data.update(self.ns_params)
-        if test_params and test_params.get("ns-config"):
-            if isinstance(test_params["ns-config"], str):
-                ns_data.update(yaml.load(test_params["ns-config"]), Loader=yaml.Loader)
-            else:
-                ns_data.update(test_params["ns-config"])
-        self.instantiate(engine, ns_data)
-
-        if manual_check:
-            input(
-                "NS has been deployed. Perform manual check and press enter to resume"
-            )
-        if test_osm and self.commands:
-            self.test_ns(engine, test_osm)
-        self.additional_operations(engine, test_osm, manual_check)
-        self.terminate(engine)
-        self.delete_descriptors(engine)
-
-    def get_first_ip(self, ip_string):
-        # When using a floating IP, the vnfr_data['ip-address'] contains a semicolon-separated list of IP:s.
-        first_ip = ip_string.split(";")[0] if ip_string else ""
-        return first_ip
-
-    def get_vnfr_ip(self, engine, vnfr_index_wanted):
-        # If the IP address list has been obtained before, it has been stored in 'vnfr_ip_list'
-        ip = self.vnfr_ip_list.get(vnfr_index_wanted, "")
-        if ip:
-            return self.get_first_ip(ip)
-        r = engine.test(
-            "Get VNFR to get IP_ADDRESS",
-            "GET",
-            "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
-                vnfr_index_wanted, self.ns_id
-            ),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-        if not r:
-            return ""
-        vnfr_data = r.json()
-        if not (vnfr_data and vnfr_data[0]):
-            return ""
-        # Store the IP (or list of IPs) in 'vnfr_ip_list'
-        ip_list = vnfr_data[0].get("ip-address", "")
-        if ip_list:
-            self.vnfr_ip_list[vnfr_index_wanted] = ip_list
-            ip = self.get_first_ip(ip_list)
-        return ip
-
-
-class TestDeployHackfestCirros(TestDeploy):
-    description = "Load and deploy Hackfest cirros_2vnf_ns example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "CIRROS"
-        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
-        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        self.commands = {
-            "1": [
-                "ls -lrt",
-            ],
-            "2": [
-                "ls -lrt",
-            ],
-        }
-        self.users = {"1": "cirros", "2": "cirros"}
-        self.passwords = {"1": "cubswin:)", "2": "cubswin:)"}
-
-    def terminate(self, engine):
-        # Make a delete in one step, overriding the normal two step of TestDeploy that launched terminate and delete
-        if test_osm:
-            engine.test(
-                "Terminate and delete NS in one step",
-                "DELETE",
-                "/nslcm/v1/ns_instances_content/{}".format(self.ns_id),
-                headers_yaml,
-                None,
-                202,
-                None,
-                "yaml",
-            )
-
-            engine.wait_until_delete(
-                "/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy
-            )
-        else:
-            engine.test(
-                "Delete NS with FORCE",
-                "DELETE",
-                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
-                headers_yaml,
-                None,
-                204,
-                None,
-                0,
-            )
-
-        # check all it is deleted
-        engine.test(
-            "Check NS is deleted",
-            "GET",
-            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
-            headers_yaml,
-            None,
-            404,
-            None,
-            "yaml",
-        )
-        r = engine.test(
-            "Check NSLCMOPs are deleted",
-            "GET",
-            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
-            headers_json,
-            None,
-            200,
-            None,
-            "json",
-        )
-        if not r:
-            return
-        nslcmops = r.json()
-        if not isinstance(nslcmops, list) or nslcmops:
-            raise TestException(
-                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
-                    self.ns_id, nslcmops
-                )
-            )
-
-
-class TestDeployHackfest1(TestDeploy):
-    description = "Load and deploy Hackfest_1_vnfd example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST1-"
-        self.vnfd_filenames = ("hackfest_1_vnfd.tar.gz",)
-        self.nsd_filename = "hackfest_1_nsd.tar.gz"
-        # self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
-        # self.users = {'1': "cirros", '2': "cirros"}
-        # self.passwords = {'1': "cubswin:)", '2': "cubswin:)"}
-
-
-class TestDeployHackfestCirrosScaling(TestDeploy):
-    description = (
-        "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "CIRROS-SCALE"
-        self.vnfd_filenames = ("cirros_vnf.tar.gz",)
-        self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        # Modify VNFD to add scaling and count=2
-        self.descriptor_edit = {
-            "vnfd0": {
-                "vdu": {"$id: 'cirros_vnfd-VM'": {"count": 2}},
-                "scaling-group-descriptor": [
-                    {
-                        "name": "scale_cirros",
-                        "max-instance-count": 2,
-                        "vdu": [{"vdu-id-ref": "cirros_vnfd-VM", "count": 2}],
-                    }
-                ],
-            }
-        }
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        if not test_osm:
-            return
-        # 2 perform scale out twice
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
-            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
-        )
-        for i in range(0, 2):
-            engine.test(
-                "Execute scale action over NS",
-                "POST",
-                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-                headers_yaml,
-                payload,
-                (201, 202),
-                r_headers_yaml_location_nslcmop,
-                "yaml",
-            )
-            nslcmop2_scale_out = engine.last_id
-            engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
-            if manual_check:
-                input("NS scale out done. Check that two more vdus are there")
-            # TODO check automatic
-
-        # 2 perform scale in
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
-            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
-        )
-        for i in range(0, 2):
-            engine.test(
-                "Execute scale IN action over NS",
-                "POST",
-                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-                headers_yaml,
-                payload,
-                (201, 202),
-                r_headers_yaml_location_nslcmop,
-                "yaml",
-            )
-            nslcmop2_scale_in = engine.last_id
-            engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
-            if manual_check:
-                input("NS scale in done. Check that two less vdus are there")
-            # TODO check automatic
-
-        # perform scale in that must fail as reached limit
-        engine.test(
-            "Execute scale IN out of limit action over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_scale_in = engine.last_id
-        engine.wait_operation_ready(
-            "ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True
-        )
-
-
-class TestDeployIpMac(TestDeploy):
-    description = "Load and deploy descriptor examples setting mac, ip address at descriptor and instantiate params"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "SetIpMac"
-        self.vnfd_filenames = (
-            "vnfd_2vdu_set_ip_mac2.yaml",
-            "vnfd_2vdu_set_ip_mac.yaml",
-        )
-        self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml"
-        self.descriptor_url = (
-            "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
-        )
-        self.commands = {
-            "1": [
-                "ls -lrt",
-            ],
-            "2": [
-                "ls -lrt",
-            ],
-        }
-        self.users = {"1": "osm", "2": "osm"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-        self.timeout = 360
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        # super().run(engine, test_osm, manual_check, test_params)
-        # run again setting IPs with instantiate parameters
-        instantiation_params = {
-            "vnf": [
-                {
-                    "member-vnf-index": "1",
-                    "internal-vld": [
-                        {
-                            "name": "internal_vld1",  # net_internal
-                            "ip-profile": {
-                                "ip-version": "ipv4",
-                                "subnet-address": "10.9.8.0/24",
-                                "dhcp-params": {
-                                    "count": 100,
-                                    "start-address": "10.9.8.100",
-                                },
-                            },
-                            "internal-connection-point": [
-                                {
-                                    "id-ref": "eth2",
-                                    "ip-address": "10.9.8.2",
-                                },
-                                {
-                                    "id-ref": "eth3",
-                                    "ip-address": "10.9.8.3",
-                                },
-                            ],
-                        },
-                    ],
-                    "vdu": [
-                        {
-                            "id": "VM1",
-                            "interface": [
-                                # {
-                                #     "name": "iface11",
-                                #     "floating-ip-required": True,
-                                # },
-                                {"name": "iface13", "mac-address": "52:33:44:55:66:13"},
-                            ],
-                        },
-                        {
-                            "id": "VM2",
-                            "interface": [
-                                {
-                                    "name": "iface21",
-                                    "ip-address": "10.31.31.22",
-                                    "mac-address": "52:33:44:55:66:21",
-                                },
-                            ],
-                        },
-                    ],
-                },
-            ]
-        }
-
-        super().run(
-            engine,
-            test_osm,
-            manual_check,
-            test_params={"ns-config": instantiation_params},
-        )
-
-
-class TestDeployHackfest4(TestDeploy):
-    description = "Load and deploy Hackfest 4 example."
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST4-"
-        self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",)
-        self.nsd_filename = "hackfest_4_nsd.tar.gz"
-        self.uses_configuration = True
-        self.commands = {
-            "1": [
-                "ls -lrt",
-            ],
-            "2": [
-                "ls -lrt",
-            ],
-        }
-        self.users = {"1": "ubuntu", "2": "ubuntu"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-        # Modify VNFD to add scaling
-        # self.descriptor_edit = {
-        #     "vnfd0": {
-        #         'vnf-configuration': {
-        #             'config-primitive': [{
-        #                 'name': 'touch',
-        #                 'parameter': [{
-        #                     'name': 'filename',
-        #                     'data-type': 'STRING',
-        #                     'default-value': '/home/ubuntu/touched'
-        #                 }]
-        #             }]
-        #         },
-        #         'scaling-group-descriptor': [{
-        #             'name': 'scale_dataVM',
-        #             'scaling-policy': [{
-        #                 'threshold-time': 0,
-        #                 'name': 'auto_cpu_util_above_threshold',
-        #                 'scaling-type': 'automatic',
-        #                 'scaling-criteria': [{
-        #                     'name': 'cpu_util_above_threshold',
-        #                     'vnf-monitoring-param-ref': 'all_aaa_cpu_util',
-        #                     'scale-out-relational-operation': 'GE',
-        #                     'scale-in-threshold': 15,
-        #                     'scale-out-threshold': 60,
-        #                     'scale-in-relational-operation': 'LE'
-        #                 }],
-        #                 'cooldown-time': 60
-        #             }],
-        #             'max-instance-count': 10,
-        #             'scaling-config-action': [
-        #                 {'vnf-config-primitive-name-ref': 'touch',
-        #                  'trigger': 'post-scale-out'},
-        #                 {'vnf-config-primitive-name-ref': 'touch',
-        #                  'trigger': 'pre-scale-in'}
-        #             ],
-        #             'vdu': [{
-        #                 'vdu-id-ref': 'dataVM',
-        #                 'count': 1
-        #             }]
-        #         }]
-        #     }
-        # }
-
-
-class TestDeployHackfest3Charmed(TestDeploy):
-    description = "Load and deploy Hackfest 3charmed_ns example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST3-"
-        self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",)
-        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
-        self.uses_configuration = True
-        self.commands = {
-            "1": ["ls -lrt /home/ubuntu/first-touch"],
-            "2": ["ls -lrt /home/ubuntu/first-touch"],
-        }
-        self.users = {"1": "ubuntu", "2": "ubuntu"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-        self.descriptor_edit = {
-            "vnfd0": yaml.safe_load(
-                """
-                vnf-configuration:
-                    terminate-config-primitive:
-                    -   seq: '1'
-                        name: touch
-                        parameter:
-                        -   name: filename
-                            value: '/home/ubuntu/last-touch1'
-                    -   seq: '3'
-                        name: touch
-                        parameter:
-                        -   name: filename
-                            value: '/home/ubuntu/last-touch3'
-                    -   seq: '2'
-                        name: touch
-                        parameter:
-                        -   name: filename
-                            value: '/home/ubuntu/last-touch2'
-                """
-            )
-        }
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        if not test_osm:
-            return
-        # 1 perform action
-        vnfr_index_selected = "2"
-        payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}'
-        engine.test(
-            "Exec service primitive over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/action".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_action = engine.last_id
-        # Wait until status is Ok
-        engine.wait_operation_ready("ns", nslcmop2_action, timeout_deploy)
-        vnfr_ip = self.get_vnfr_ip(engine, vnfr_index_selected)
-        if manual_check:
-            input(
-                "NS service primitive has been executed."
-                "Check that file /home/ubuntu/OSMTESTNBI is present at {}".format(
-                    vnfr_ip
-                )
-            )
-        if test_osm:
-            commands = {
-                "1": [""],
-                "2": [
-                    "ls -lrt /home/ubuntu/OSMTESTNBI",
-                ],
-            }
-            self.test_ns(engine, test_osm, commands=commands)
-
-        # # 2 perform scale out
-        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
-        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        # engine.test("Execute scale action over NS", "POST",
-        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
-        # nslcmop2_scale_out = engine.last_id
-        # engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
-        # if manual_check:
-        #     input('NS scale out done. Check that file /home/ubuntu/touched is present and new VM is created')
-        # # TODO check automatic
-        #
-        # # 2 perform scale in
-        # payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
-        #           '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        # engine.test("Execute scale action over NS", "POST",
-        #             "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-        #             (201, 202), r_headers_yaml_location_nslcmop, "yaml")
-        # nslcmop2_scale_in = engine.last_id
-        # engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
-        # if manual_check:
-        #     input('NS scale in done. Check that file /home/ubuntu/touched is updated and new VM is deleted')
-        # # TODO check automatic
-
-
-class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
-    description = (
-        "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in "
-        "ids and member-vnf-index."
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST3v2-"
-        self.qforce = "?FORCE=True"
-        self.descriptor_edit = {
-            "vnfd0": {
-                "vdu": {
-                    "$[0]": {
-                        "interface": {
-                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
-                        }
-                    },
-                    "$[1]": None,
-                },
-                "vnf-configuration": None,
-                "connection-point": {
-                    "$[0]": {
-                        "id": "pdu-mgmt",
-                        "name": "pdu-mgmt",
-                        "short-name": "pdu-mgmt",
-                    },
-                    "$[1]": None,
-                },
-                "mgmt-interface": {"cp": "pdu-mgmt"},
-                "description": "A vnf single vdu to be used as PDU",
-                "id": "vdu-as-pdu",
-                "internal-vld": {
-                    "$[0]": {
-                        "id": "pdu_internal",
-                        "name": "pdu_internal",
-                        "internal-connection-point": {"$[1]": None},
-                        "short-name": "pdu_internal",
-                        "type": "ELAN",
-                    }
-                },
-            },
-            # Modify NSD accordingly
-            "nsd": {
-                "constituent-vnfd": {
-                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
-                    "$[1]": None,
-                },
-                "description": "A nsd to deploy the vnf to act as as PDU",
-                "id": "nsd-as-pdu",
-                "name": "nsd-as-pdu",
-                "short-name": "nsd-as-pdu",
-                "vld": {
-                    "$[0]": {
-                        "id": "mgmt_pdu",
-                        "name": "mgmt_pdu",
-                        "short-name": "mgmt_pdu",
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "vnfd-connection-point-ref": "pdu-mgmt",
-                                "vnfd-id-ref": "vdu-as-pdu",
-                            },
-                            "$[1]": None,
-                        },
-                        "type": "ELAN",
-                    },
-                    "$[1]": None,
-                },
-            },
-        }
-
-
-class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
-    description = "Load and deploy Hackfest 3charmed_ns example modified version to test scaling and NS parameters"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST3v3-"
-        self.commands = {
-            "1": ["ls -lrt /home/ubuntu/first-touch-1"],
-            "2": ["ls -lrt /home/ubuntu/first-touch-2"],
-        }
-        self.descriptor_edit = {
-            "vnfd0": yaml.load(
-                """
-                scaling-group-descriptor:
-                    -   name: "scale_dataVM"
-                        max-instance-count: 10
-                        scaling-policy:
-                        -   name: "auto_cpu_util_above_threshold"
-                            scaling-type: "automatic"
-                            threshold-time: 0
-                            cooldown-time: 60
-                            scaling-criteria:
-                            -   name: "cpu_util_above_threshold"
-                                scale-in-threshold: 15
-                                scale-in-relational-operation: "LE"
-                                scale-out-threshold: 60
-                                scale-out-relational-operation: "GE"
-                                vnf-monitoring-param-ref: "monitor1"
-                        vdu:
-                        -   vdu-id-ref: dataVM
-                            count: 1
-                        scaling-config-action:
-                        -   trigger: post-scale-out
-                            vnf-config-primitive-name-ref: touch
-                        -   trigger: pre-scale-in
-                            vnf-config-primitive-name-ref: touch
-                vdu:
-                    "$id: dataVM":
-                        monitoring-param:
-                        -   id: "dataVM_cpu_util"
-                            nfvi-metric: "cpu_utilization"
-
-                monitoring-param:
-                -   id: "monitor1"
-                    name: "monitor1"
-                    aggregation-type: AVERAGE
-                    vdu-monitoring-param:
-                      vdu-ref: "dataVM"
-                      vdu-monitoring-param-ref: "dataVM_cpu_util"
-                vnf-configuration:
-                    initial-config-primitive:
-                        "$[1]":
-                            parameter:
-                                "$[0]":
-                                    value: "<touch_filename>"   # default-value: /home/ubuntu/first-touch
-                    config-primitive:
-                        "$[0]":
-                            parameter:
-                                "$[0]":
-                                    default-value: "<touch_filename2>"
-                """,
-                Loader=yaml.Loader,
-            )
-        }
-        self.ns_params = {
-            "additionalParamsForVnf": [
-                {
-                    "member-vnf-index": "1",
-                    "additionalParams": {
-                        "touch_filename": "/home/ubuntu/first-touch-1",
-                        "touch_filename2": "/home/ubuntu/second-touch-1",
-                    },
-                },
-                {
-                    "member-vnf-index": "2",
-                    "additionalParams": {
-                        "touch_filename": "/home/ubuntu/first-touch-2",
-                        "touch_filename2": "/home/ubuntu/second-touch-2",
-                    },
-                },
-            ]
-        }
-
-    def additional_operations(self, engine, test_osm, manual_check):
-        super().additional_operations(engine, test_osm, manual_check)
-        if not test_osm:
-            return
-
-        # 2 perform scale out
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
-            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        )
-        engine.test(
-            "Execute scale action over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_scale_out = engine.last_id
-        engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
-        if manual_check:
-            input(
-                "NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created"
-            )
-        if test_osm:
-            commands = {
-                "1": [
-                    "ls -lrt /home/ubuntu/second-touch-1",
-                ]
-            }
-            self.test_ns(engine, test_osm, commands=commands)
-            # TODO check automatic connection to scaled VM
-
-        # 2 perform scale in
-        payload = (
-            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
-            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        )
-        engine.test(
-            "Execute scale action over NS",
-            "POST",
-            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
-            headers_yaml,
-            payload,
-            (201, 202),
-            r_headers_yaml_location_nslcmop,
-            "yaml",
-        )
-        nslcmop2_scale_in = engine.last_id
-        engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
-        if manual_check:
-            input(
-                "NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted"
-            )
-        # TODO check automatic
-
-
-class TestDeploySimpleCharm(TestDeploy):
-    description = "Deploy hackfest-4 hackfest_simplecharm example"
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST-SIMPLE"
-        self.descriptor_url = (
-            "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
-        )
-        self.vnfd_filenames = ("hackfest_simplecharm_vnf.tar.gz",)
-        self.nsd_filename = "hackfest_simplecharm_ns.tar.gz"
-        self.uses_configuration = True
-        self.commands = {
-            "1": [""],
-            "2": [
-                "ls -lrt /home/ubuntu/first-touch",
-            ],
-        }
-        self.users = {"1": "ubuntu", "2": "ubuntu"}
-        self.passwords = {"1": "osm4u", "2": "osm4u"}
-
-
-class TestDeploySimpleCharm2(TestDeploySimpleCharm):
-    description = (
-        "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and "
-        "vnf-member-index"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HACKFEST-SIMPLE2-"
-        self.qforce = "?FORCE=True"
-        self.descriptor_edit = {
-            "vnfd0": {"id": "hackfest.simplecharm.vnf"},
-            "nsd": {
-                "id": "hackfest.simplecharm.ns",
-                "constituent-vnfd": {
-                    "$[0]": {
-                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                        "member-vnf-index": "$1",
-                    },
-                    "$[1]": {
-                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                        "member-vnf-index": "$2",
-                    },
-                },
-                "vld": {
-                    "$[0]": {
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "member-vnf-index-ref": "$1",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                            "$[1]": {
-                                "member-vnf-index-ref": "$2",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                        },
-                    },
-                    "$[1]": {
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "member-vnf-index-ref": "$1",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                            "$[1]": {
-                                "member-vnf-index-ref": "$2",
-                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
-                            },
-                        },
-                    },
-                },
-            },
-        }
-
-
-class TestDeploySingleVdu(TestDeployHackfest3Charmed):
-    description = (
-        "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "SingleVDU"
-        self.qforce = "?FORCE=True"
-        self.descriptor_edit = {
-            # Modify VNFD to remove one VDU
-            "vnfd0": {
-                "vdu": {
-                    "$[0]": {
-                        "interface": {
-                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
-                        }
-                    },
-                    "$[1]": None,
-                },
-                "vnf-configuration": None,
-                "connection-point": {
-                    "$[0]": {
-                        "id": "pdu-mgmt",
-                        "name": "pdu-mgmt",
-                        "short-name": "pdu-mgmt",
-                    },
-                    "$[1]": None,
-                },
-                "mgmt-interface": {"cp": "pdu-mgmt"},
-                "description": "A vnf single vdu to be used as PDU",
-                "id": "vdu-as-pdu",
-                "internal-vld": {
-                    "$[0]": {
-                        "id": "pdu_internal",
-                        "name": "pdu_internal",
-                        "internal-connection-point": {"$[1]": None},
-                        "short-name": "pdu_internal",
-                        "type": "ELAN",
-                    }
-                },
-            },
-            # Modify NSD accordingly
-            "nsd": {
-                "constituent-vnfd": {
-                    "$[0]": {"vnfd-id-ref": "vdu-as-pdu"},
-                    "$[1]": None,
-                },
-                "description": "A nsd to deploy the vnf to act as as PDU",
-                "id": "nsd-as-pdu",
-                "name": "nsd-as-pdu",
-                "short-name": "nsd-as-pdu",
-                "vld": {
-                    "$[0]": {
-                        "id": "mgmt_pdu",
-                        "name": "mgmt_pdu",
-                        "short-name": "mgmt_pdu",
-                        "vnfd-connection-point-ref": {
-                            "$[0]": {
-                                "vnfd-connection-point-ref": "pdu-mgmt",
-                                "vnfd-id-ref": "vdu-as-pdu",
-                            },
-                            "$[1]": None,
-                        },
-                        "type": "ELAN",
-                    },
-                    "$[1]": None,
-                },
-            },
-        }
-
-
-class TestDeployHnfd(TestDeployHackfest3Charmed):
-    description = (
-        "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
-    )
-
-    def __init__(self):
-        super().__init__()
-        self.test_name = "HNFD"
-        self.pduDeploy = TestDeploySingleVdu()
-        self.pdu_interface_0 = {}
-        self.pdu_interface_1 = {}
-
-        self.pdu_id = None
-        # self.vnf_to_pdu = """
-        #     vdu:
-        #         "$[0]":
-        #             pdu-type: PDU-TYPE-1
-        #             interface:
-        #                 "$[0]":
-        #                     name: mgmt-iface
-        #                 "$[1]":
-        #                     name: pdu-iface-internal
-        #     id: hfn1
-        #     description: HFND, one PDU + One VDU
-        #     name: hfn1
-        #     short-name: hfn1
-        #
-        # """
-
-        self.pdu_descriptor = {
-            "name": "my-PDU",
-            "type": "PDU-TYPE-1",
-            "vim_accounts": "to-override",
-            "interfaces": [
-                {
-                    "name": "mgmt-iface",
-                    "mgmt": True,
-                    "type": "overlay",
-                    "ip-address": "to override",
-                    "mac-address": "mac_address",
-                    "vim-network-name": "mgmt",
-                },
-                {
-                    "name": "pdu-iface-internal",
-                    "mgmt": False,
-                    "type": "overlay",
-                    "ip-address": "to override",
-                    "mac-address": "mac_address",
-                    "vim-network-name": "pdu_internal",  # OSMNBITEST-PDU-pdu_internal
-                },
-            ],
-        }
-        self.vnfd_filenames = (
-            "hackfest_3charmed_vnfd.tar.gz",
-            "hackfest_3charmed_vnfd.tar.gz",
-        )
-
-        self.descriptor_edit = {
-            "vnfd0": {
-                "id": "hfnd1",
-                "name": "hfn1",
-                "short-name": "hfn1",
-                "vdu": {
-                    "$[0]": {
-                        "pdu-type": "PDU-TYPE-1",
-                        "interface": {
-                            "$[0]": {"name": "mgmt-iface"},
-                            "$[1]": {"name": "pdu-iface-internal"},
-                        },
-                    }
-                },
-            },
-            "nsd": {
-                "constituent-vnfd": {"$[1]": {"vnfd-id-ref": "hfnd1"}},
-                "vld": {
-                    "$[0]": {
-                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
-                    },
-                    "$[1]": {
-                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
-                    },
-                },
-            },
-        }
-
-    def create_descriptors(self, engine):
-        super().create_descriptors(engine)
-
-        # Create PDU
-        self.pdu_descriptor["interfaces"][0].update(self.pdu_interface_0)
-        self.pdu_descriptor["interfaces"][1].update(self.pdu_interface_1)
-        self.pdu_descriptor["vim_accounts"] = [self.vim_id]
-        # TODO get vim-network-name from vnfr.vld.name
-        self.pdu_descriptor["interfaces"][1]["vim-network-name"] = "{}-{}-{}".format(
-            os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST"),
-            "PDU",
-            self.pdu_descriptor["interfaces"][1]["vim-network-name"],
-        )
-        engine.test(
-            "Onboard PDU descriptor",
-            "POST",
-            "/pdu/v1/pdu_descriptors",
-            {
-                "Location": "/pdu/v1/pdu_descriptors/",
-                "Content-Type": "application/yaml",
-            },
-            self.pdu_descriptor,
-            201,
-            r_header_yaml,
-            "yaml",
-        )
-        self.pdu_id = engine.last_id
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.get_autorization()
-        engine.set_test_name(self.test_name)
-        nsname = os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST")
-
-        # create real VIM if not exist
-        self.vim_id = engine.get_create_vim(test_osm)
-        # instantiate PDU
-        self.pduDeploy.create_descriptors(engine)
-        self.pduDeploy.instantiate(
-            engine,
-            {
-                "nsDescription": "to be used as PDU",
-                "nsName": nsname + "-PDU",
-                "nsdId": self.pduDeploy.nsd_id,
-                "vimAccountId": self.vim_id,
-            },
-        )
-        if manual_check:
-            input(
-                "VNF to be used as PDU has been deployed. Perform manual check and press enter to resume"
-            )
-        if test_osm:
-            self.pduDeploy.test_ns(engine, test_osm)
-
-        if test_osm:
-            r = engine.test(
-                "Get VNFR to obtain IP_ADDRESS",
-                "GET",
-                "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id),
-                headers_json,
-                None,
-                200,
-                r_header_json,
-                "json",
-            )
-            if not r:
-                return
-            vnfr_data = r.json()
-            # print(vnfr_data)
-
-            self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                0
-            ].get("ip-address")
-            self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                1
-            ].get("ip-address")
-            self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                0
-            ].get("mac-address")
-            self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
-                1
-            ].get("mac-address")
-            if not self.pdu_interface_0["ip-address"]:
-                raise TestException("Vnfr has not managment ip address")
-        else:
-            self.pdu_interface_0["ip-address"] = "192.168.10.10"
-            self.pdu_interface_1["ip-address"] = "192.168.11.10"
-            self.pdu_interface_0["mac-address"] = "52:33:44:55:66:13"
-            self.pdu_interface_1["mac-address"] = "52:33:44:55:66:14"
-
-        self.create_descriptors(engine)
-
-        ns_data = {
-            "nsDescription": "default description",
-            "nsName": nsname,
-            "nsdId": self.nsd_id,
-            "vimAccountId": self.vim_id,
-        }
-        if test_params and test_params.get("ns-config"):
-            if isinstance(test_params["ns-config"], str):
-                ns_data.update(yaml.load(test_params["ns-config"]), Loader=yaml.Loader)
-            else:
-                ns_data.update(test_params["ns-config"])
-
-        self.instantiate(engine, ns_data)
-        if manual_check:
-            input(
-                "NS has been deployed. Perform manual check and press enter to resume"
-            )
-        if test_osm:
-            self.test_ns(engine, test_osm)
-        self.additional_operations(engine, test_osm, manual_check)
-        self.terminate(engine)
-        self.pduDeploy.terminate(engine)
-        self.delete_descriptors(engine)
-        self.pduDeploy.delete_descriptors(engine)
-
-    def delete_descriptors(self, engine):
-        super().delete_descriptors(engine)
-        # delete pdu
-        engine.test(
-            "Delete PDU SOL005",
-            "DELETE",
-            "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestDescriptors:
-    description = "Test VNFD, NSD, PDU descriptors CRUD and dependencies"
-    vnfd_empty = """vnfd:vnfd-catalog:
-        vnfd:
-        -   name: prova
-            short-name: prova
-            id: prova
-    """
-    vnfd_prova = """vnfd:vnfd-catalog:
-        vnfd:
-        -   connection-point:
-            -   name: cp_0h8m
-                type: VPORT
-            id: prova
-            name: prova
-            short-name: prova
-            vdu:
-            -   id: vdu_z4bm
-                image: ubuntu
-                interface:
-                -   external-connection-point-ref: cp_0h8m
-                    name: eth0
-                    virtual-interface:
-                    type: VIRTIO
-                name: vdu_z4bm
-            version: '1.0'
-    """
-
-    def __init__(self):
-        self.vnfd_filename = "hackfest_3charmed_vnfd.tar.gz"
-        self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
-        self.descriptor_url = (
-            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
-        )
-        self.vnfd_id = None
-        self.nsd_id = None
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("Descriptors")
-        engine.get_autorization()
-        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
-        if not os.path.exists(temp_dir):
-            os.makedirs(temp_dir)
-
-        # download files
-        for filename in (self.vnfd_filename, self.nsd_filename):
-            filename_path = temp_dir + filename
-            if not os.path.exists(filename_path):
-                with open(filename_path, "wb") as file:
-                    response = requests.get(self.descriptor_url + filename)
-                    if response.status_code >= 300:
-                        raise TestException(
-                            "Error downloading descriptor from '{}': {}".format(
-                                self.descriptor_url + filename, response.status_code
-                            )
-                        )
-                    file.write(response.content)
-
-        vnfd_filename_path = temp_dir + self.vnfd_filename
-        nsd_filename_path = temp_dir + self.nsd_filename
-
-        engine.test(
-            "Onboard empty VNFD in one step",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_empty,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_id = engine.last_id
-
-        # test bug 605
-        engine.test(
-            "Upload invalid VNFD ",
-            "PUT",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_yaml,
-            self.vnfd_prova,
-            422,
-            r_header_yaml,
-            "yaml",
-        )
-
-        engine.test(
-            "Upload VNFD {}".format(self.vnfd_filename),
-            "PUT",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_zip_yaml,
-            "@b" + vnfd_filename_path,
-            204,
-            None,
-            0,
-        )
-
-        queries = [
-            "mgmt-interface.cp=mgmt",
-            "vdu.0.interface.0.external-connection-point-ref=mgmt",
-            "vdu.0.interface.1.internal-connection-point-ref=internal",
-            "internal-vld.0.internal-connection-point.0.id-ref=internal",
-            # Detection of duplicated VLD names in VNF Descriptors
-            # URL: internal-vld=[
-            #        {id: internal1, name: internal, type:ELAN,
-            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
-            #        {id: internal2, name: internal, type:ELAN,
-            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
-            #        ]
-            "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
-            "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
-            "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
-            "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
-            "id-ref%3A%20dataVM-internal%7D%5D%7D%5D",
-        ]
-        for query in queries:
-            engine.test(
-                "Upload invalid VNFD ",
-                "PUT",
-                "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(
-                    self.vnfd_id, query
-                ),
-                headers_zip_yaml,
-                "@b" + vnfd_filename_path,
-                422,
-                r_header_yaml,
-                "yaml",
-            )
-
-        # test bug 605
-        engine.test(
-            "Upload invalid VNFD ",
-            "PUT",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_yaml,
-            self.vnfd_prova,
-            422,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # get vnfd descriptor
-        engine.test(
-            "Get VNFD descriptor",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # get vnfd file descriptor
-        engine.test(
-            "Get VNFD file descriptor",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
-            headers_text,
-            None,
-            200,
-            r_header_text,
-            "text",
-            temp_dir + "vnfd-yaml",
-        )
-        # TODO compare files: diff vnfd-yaml hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml
-
-        # get vnfd zip file package
-        engine.test(
-            "Get VNFD zip package",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_zip,
-            "zip",
-            temp_dir + "vnfd-zip",
-        )
-        # TODO compare files: diff vnfd-zip hackfest_3charmed_vnfd.tar.gz
-
-        # get vnfd artifact
-        engine.test(
-            "Get VNFD artifact package",
-            "GET",
-            "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_octect,
-            "octet-string",
-            temp_dir + "vnfd-icon",
-        )
-        # TODO compare files: diff vnfd-icon hackfest_3charmed_vnfd/icons/osm.png
-
-        # nsd CREATE AND UPLOAD in one step:
-        engine.test(
-            "Onboard NSD in one step",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_zip_yaml,
-            "@b" + nsd_filename_path,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_id = engine.last_id
-
-        queries = ["vld.0.vnfd-connection-point-ref.0.vnfd-id-ref=hf"]
-        for query in queries:
-            engine.test(
-                "Upload invalid NSD ",
-                "PUT",
-                "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
-                headers_zip_yaml,
-                "@b" + nsd_filename_path,
-                422,
-                r_header_yaml,
-                "yaml",
-            )
-
-        # get nsd descriptor
-        engine.test(
-            "Get NSD descriptor",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-            headers_yaml,
-            None,
-            200,
-            r_header_yaml,
-            "yaml",
-        )
-
-        # get nsd file descriptor
-        engine.test(
-            "Get NSD file descriptor",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id),
-            headers_text,
-            None,
-            200,
-            r_header_text,
-            "text",
-            temp_dir + "nsd-yaml",
-        )
-        # TODO compare files: diff nsd-yaml hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml
-
-        # get nsd zip file package
-        engine.test(
-            "Get NSD zip package",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_zip,
-            "zip",
-            temp_dir + "nsd-zip",
-        )
-        # TODO compare files: diff nsd-zip hackfest_3charmed_nsd.tar.gz
-
-        # get nsd artifact
-        engine.test(
-            "Get NSD artifact package",
-            "GET",
-            "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id),
-            headers_zip,
-            None,
-            200,
-            r_header_octect,
-            "octet-string",
-            temp_dir + "nsd-icon",
-        )
-        # TODO compare files: diff nsd-icon hackfest_3charmed_nsd/icons/osm.png
-
-        # vnfd DELETE
-        test_rest.test(
-            "Delete VNFD conflict",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
-            headers_yaml,
-            None,
-            409,
-            None,
-            None,
-        )
-
-        test_rest.test(
-            "Delete VNFD force",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # nsd DELETE
-        test_rest.test(
-            "Delete NSD",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestNetSliceTemplates:
-    description = "Upload a NST to OSM"
-
-    def __init__(self):
-        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
-        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
-        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
-        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
-        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        # nst CREATE
-        engine.set_test_name("NST step ")
-        engine.get_autorization()
-        temp_dir = os.path.dirname(os.path.abspath(__file__)) + "/temp/"
-        if not os.path.exists(temp_dir):
-            os.makedirs(temp_dir)
-
-        # Onboard VNFDs
-        engine.test(
-            "Onboard edge VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard middle VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename_middle,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_middle_id = engine.last_id
-
-        # Onboard NSDs
-        engine.test(
-            "Onboard NSD edge",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard NSD middle",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename_middle,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_middle_id = engine.last_id
-
-        # Onboard NST
-        engine.test(
-            "Onboard NST",
-            "POST",
-            "/nst/v1/netslice_templates_content",
-            headers_yaml,
-            self.nst_filenames,
-            201,
-            r_headers_yaml_location_nst,
-            "yaml",
-        )
-        nst_id = engine.last_id
-
-        # nstd SHOW OSM format
-        engine.test(
-            "Show NSTD OSM format",
-            "GET",
-            "/nst/v1/netslice_templates/{}".format(nst_id),
-            headers_json,
-            None,
-            200,
-            r_header_json,
-            "json",
-        )
-
-        # nstd DELETE
-        engine.test(
-            "Delete NSTD",
-            "DELETE",
-            "/nst/v1/netslice_templates/{}".format(nst_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # NSDs DELETE
-        test_rest.test(
-            "Delete NSD middle",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete NSD edge",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # VNFDs DELETE
-        test_rest.test(
-            "Delete VNFD edge",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete VNFD middle",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestNetSliceInstances:
-    """
-    Test procedure:
-    1. Populate databases with VNFD, NSD, NST with the following scenario
-       +-----------------management-----------------+
-       |                     |                      |
-    +--+---+            +----+----+             +---+--+
-    |      |            |         |             |      |
-    | edge +---data1----+  middle +---data2-----+ edge |
-    |      |            |         |             |      |
-    +------+            +---------+             +------+
-                        shared-nss
-    2. Create NSI-1
-    3. Instantiate NSI-1
-    4. Create NSI-2
-    5. Instantiate NSI-2
-        Manual check - Are 2 slices instantiated correctly?
-        NSI-1 3 nss (2 nss-edges + 1 nss-middle)
-        NSI-2 2 nss (2 nss-edge sharing nss-middle)
-    6. Terminate NSI-1
-    7. Delete NSI-1
-        Manual check - Is slice NSI-1 deleted correctly?
-        NSI-2 with 2 nss-edge + 1 nss-middle (The one from NSI-1)
-    8. Create NSI-3
-    9. Instantiate NSI-3
-        Manual check - Is slice NSI-3 instantiated correctly?
-        NSI-3 reuse nss-middle. NSI-3 only create 2 nss-edge
-    10. Delete NSI-2
-    11. Terminate NSI-2
-    12. Delete NSI-3
-    13. Terminate NSI-3
-        Manual check - All cleaned correctly?
-        NSI-2 and NSI-3 were terminated and deleted
-    14. Cleanup database
-    """
-
-    description = "Upload a NST to OSM"
-
-    def __init__(self):
-        self.vim_id = None
-        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
-        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
-        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
-        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
-        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
-
-    def create_slice(self, engine, nsi_data, name):
-        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
-        r = engine.test(
-            name,
-            "POST",
-            "/nsilcm/v1/netslice_instances",
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            {
-                "Location": "nsilcm/v1/netslice_instances/",
-                "Content-Type": "application/yaml",
-            },
-            "yaml",
-        )
-        return r
-
-    def instantiate_slice(self, engine, nsi_data, nsi_id, name):
-        ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
-        engine.test(
-            name,
-            "POST",
-            "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id),
-            headers_yaml,
-            ns_data_text,
-            (201, 202),
-            r_headers_yaml_location_nsilcmop,
-            "yaml",
-        )
-
-    def terminate_slice(self, engine, nsi_id, name):
-        engine.test(
-            name,
-            "POST",
-            "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
-            headers_yaml,
-            None,
-            (201, 202),
-            r_headers_yaml_location_nsilcmop,
-            "yaml",
-        )
-
-    def delete_slice(self, engine, nsi_id, name):
-        engine.test(
-            name,
-            "DELETE",
-            "/nsilcm/v1/netslice_instances/{}".format(nsi_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-    def run(self, engine, test_osm, manual_check, test_params=None):
-        # nst CREATE
-        engine.set_test_name("NSI")
-        engine.get_autorization()
-
-        # Onboard VNFDs
-        engine.test(
-            "Onboard edge VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard middle VNFD",
-            "POST",
-            "/vnfpkgm/v1/vnf_packages_content",
-            headers_yaml,
-            self.vnfd_filename_middle,
-            201,
-            r_headers_yaml_location_vnfd,
-            "yaml",
-        )
-        self.vnfd_middle_id = engine.last_id
-
-        # Onboard NSDs
-        engine.test(
-            "Onboard NSD edge",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_edge_id = engine.last_id
-
-        engine.test(
-            "Onboard NSD middle",
-            "POST",
-            "/nsd/v1/ns_descriptors_content",
-            headers_yaml,
-            self.nsd_filename_middle,
-            201,
-            r_headers_yaml_location_nsd,
-            "yaml",
-        )
-        self.nsd_middle_id = engine.last_id
-
-        # Onboard NST
-        engine.test(
-            "Onboard NST",
-            "POST",
-            "/nst/v1/netslice_templates_content",
-            headers_yaml,
-            self.nst_filenames,
-            201,
-            r_headers_yaml_location_nst,
-            "yaml",
-        )
-        nst_id = engine.last_id
-
-        self.vim_id = engine.get_create_vim(test_osm)
-
-        # CREATE NSI-1
-        ns_data = {
-            "nsiName": "Deploy-NSI-1",
-            "vimAccountId": self.vim_id,
-            "nstId": nst_id,
-            "nsiDescription": "default",
-        }
-        r = self.create_slice(engine, ns_data, "Create NSI-1 step 1")
-        if not r:
-            return
-        self.nsi_id1 = engine.last_id
-
-        # INSTANTIATE NSI-1
-        self.instantiate_slice(
-            engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2"
-        )
-        nsilcmop_id1 = engine.last_id
-
-        # Waiting for NSI-1
-        if test_osm:
-            engine.wait_operation_ready("nsi", nsilcmop_id1, timeout_deploy)
-
-        # CREATE NSI-2
-        ns_data = {
-            "nsiName": "Deploy-NSI-2",
-            "vimAccountId": self.vim_id,
-            "nstId": nst_id,
-            "nsiDescription": "default",
-        }
-        r = self.create_slice(engine, ns_data, "Create NSI-2 step 1")
-        if not r:
-            return
-        self.nsi_id2 = engine.last_id
-
-        # INSTANTIATE NSI-2
-        self.instantiate_slice(
-            engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2"
-        )
-        nsilcmop_id2 = engine.last_id
-
-        # Waiting for NSI-2
-        if test_osm:
-            engine.wait_operation_ready("nsi", nsilcmop_id2, timeout_deploy)
-
-        if manual_check:
-            input(
-                "NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume"
-            )
-
-        # TERMINATE NSI-1
-        if test_osm:
-            self.terminate_slice(engine, self.nsi_id1, "Terminate NSI-1")
-            nsilcmop1_id = engine.last_id
-
-            # Wait terminate NSI-1
-            engine.wait_operation_ready("nsi", nsilcmop1_id, timeout_deploy)
-
-        # DELETE NSI-1
-        self.delete_slice(engine, self.nsi_id1, "Delete NS")
-
-        if manual_check:
-            input(
-                "NSI-1 has been deleted. Perform manual check and press enter to resume"
-            )
-
-        # CREATE NSI-3
-        ns_data = {
-            "nsiName": "Deploy-NSI-3",
-            "vimAccountId": self.vim_id,
-            "nstId": nst_id,
-            "nsiDescription": "default",
-        }
-        r = self.create_slice(engine, ns_data, "Create NSI-3 step 1")
-
-        if not r:
-            return
-        self.nsi_id3 = engine.last_id
-
-        # INSTANTIATE NSI-3
-        self.instantiate_slice(
-            engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2"
-        )
-        nsilcmop_id3 = engine.last_id
-
-        # Wait Instantiate NSI-3
-        if test_osm:
-            engine.wait_operation_ready("nsi", nsilcmop_id3, timeout_deploy)
-
-        if manual_check:
-            input(
-                "NSI-3 has been deployed. Perform manual check and press enter to resume"
-            )
-
-        # TERMINATE NSI-2
-        if test_osm:
-            self.terminate_slice(engine, self.nsi_id2, "Terminate NSI-2")
-            nsilcmop2_id = engine.last_id
-
-            # Wait terminate NSI-2
-            engine.wait_operation_ready("nsi", nsilcmop2_id, timeout_deploy)
-
-        # DELETE NSI-2
-        self.delete_slice(engine, self.nsi_id2, "DELETE NSI-2")
-
-        # TERMINATE NSI-3
-        if test_osm:
-            self.terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
-            nsilcmop3_id = engine.last_id
-
-            # Wait terminate NSI-3
-            engine.wait_operation_ready("nsi", nsilcmop3_id, timeout_deploy)
-
-        # DELETE NSI-3
-        self.delete_slice(engine, self.nsi_id3, "DELETE NSI-3")
-
-        if manual_check:
-            input(
-                "NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume"
-            )
-
-        # nstd DELETE
-        engine.test(
-            "Delete NSTD",
-            "DELETE",
-            "/nst/v1/netslice_templates/{}".format(nst_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # NSDs DELETE
-        test_rest.test(
-            "Delete NSD middle",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete NSD edge",
-            "DELETE",
-            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
-            headers_json,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        # VNFDs DELETE
-        test_rest.test(
-            "Delete VNFD edge",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-        test_rest.test(
-            "Delete VNFD middle",
-            "DELETE",
-            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
-            headers_yaml,
-            None,
-            204,
-            None,
-            0,
-        )
-
-
-class TestAuthentication:
-    description = "Test Authentication"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("Authentication")
-        # backend = test_params.get("backend") if test_params else None   # UNUSED
-
-        admin_project_id = test_project_id = None
-        project_admin_role_id = project_user_role_id = None
-        test_user_id = empty_user_id = None
-        default_role_id = empty_role_id = token_role_id = None
-
-        engine.get_autorization()
-
-        # GET
-        engine.test(
-            "Get tokens",
-            "GET",
-            "/admin/v1/tokens",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Get projects",
-            "GET",
-            "/admin/v1/projects",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Get users",
-            "GET",
-            "/admin/v1/users",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        engine.test(
-            "Get roles",
-            "GET",
-            "/admin/v1/roles",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        res = engine.test(
-            "Get admin project",
-            "GET",
-            "/admin/v1/projects?name=admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        admin_project_id = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get project admin role",
-            "GET",
-            "/admin/v1/roles?name=project_admin",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        project_admin_role_id = res.json()[0]["_id"] if res else None
-        res = engine.test(
-            "Get project user role",
-            "GET",
-            "/admin/v1/roles?name=project_user",
-            headers_json,
-            {},
-            (200),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        project_user_role_id = res.json()[0]["_id"] if res else None
-
-        # POST
-        res = engine.test(
-            "Create test project",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {"name": "test"},
-            (201),
-            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-            "json",
-        )
-        test_project_id = engine.last_id if res else None
-        res = engine.test(
-            "Create role without permissions",
-            "POST",
-            "/admin/v1/roles",
-            headers_json,
-            {"name": "empty"},
-            (201),
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        empty_role_id = engine.last_id if res else None
-        res = engine.test(
-            "Create role with default permissions",
-            "POST",
-            "/admin/v1/roles",
-            headers_json,
-            {"name": "default", "permissions": {"default": True}},
-            (201),
-            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
-            "json",
-        )
-        default_role_id = engine.last_id if res else None
-        res = engine.test(
-            "Create role with token permissions",
-            "POST",
-            "/admin/v1/roles",
-            headers_json,
-            {
-                "name": "tokens",
-                "permissions": {"tokens": True},
-            },  # is default required ?
-            (201),
-            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
-            "json",
-        )
-        token_role_id = engine.last_id if res else None
-        pr = "project-role mappings"
-        res = engine.test(
-            "Create user without " + pr,
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            {"username": "empty", "password": "empty"},
-            201,
-            {"Content-Type": "application/json"},
-            "json",
-        )
-        empty_user_id = engine.last_id if res else None
-        if (
-            admin_project_id
-            and test_project_id
-            and project_admin_role_id
-            and project_user_role_id
-        ):
-            data = {"username": "test", "password": "test"}
-            data["project_role_mappings"] = [
-                {"project": test_project_id, "role": project_admin_role_id},
-                {"project": admin_project_id, "role": project_user_role_id},
-            ]
-            res = engine.test(
-                "Create user with " + pr,
-                "POST",
-                "/admin/v1/users",
-                headers_json,
-                data,
-                (201),
-                {"Content-Type": "application/json"},
-                "json",
-            )
-            test_user_id = engine.last_id if res else None
-
-        # PUT
-        if test_user_id:
-            engine.test(
-                "Modify test user's password",
-                "PUT",
-                "/admin/v1/users/" + test_user_id,
-                headers_json,
-                {"password": "password"},
-                (204),
-                {},
-                0,
-            )
-        if (
-            empty_user_id
-            and admin_project_id
-            and test_project_id
-            and project_admin_role_id
-            and project_user_role_id
-        ):
-            data = {
-                "project_role_mappings": [
-                    {"project": test_project_id, "role": project_admin_role_id},
-                    {"project": admin_project_id, "role": project_user_role_id},
-                ]
-            }
-            engine.test(
-                "Modify empty user's " + pr,
-                "PUT",
-                "/admin/v1/users/" + empty_user_id,
-                headers_json,
-                data,
-                (204),
-                {},
-                0,
-            )
-
-        # DELETE
-        if empty_user_id:
-            engine.test(
-                "Delete empty user",
-                "DELETE",
-                "/admin/v1/users/" + empty_user_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if test_user_id:
-            engine.test(
-                "Delete test user",
-                "DELETE",
-                "/admin/v1/users/" + test_user_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if empty_role_id:
-            engine.test(
-                "Delete empty role",
-                "DELETE",
-                "/admin/v1/roles/" + empty_role_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if default_role_id:
-            engine.test(
-                "Delete default role",
-                "DELETE",
-                "/admin/v1/roles/" + default_role_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if token_role_id:
-            engine.test(
-                "Delete token role",
-                "DELETE",
-                "/admin/v1/roles/" + token_role_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if test_project_id:
-            engine.test(
-                "Delete test project",
-                "DELETE",
-                "/admin/v1/projects/" + test_project_id,
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-
-        # END Tests
-
-        engine.remove_authorization()  # To finish
-
-
-class TestNbiQuotas:
-    description = "Test NBI Quotas"
-
-    @staticmethod
-    def run(engine, test_osm, manual_check, test_params=None):
-        engine.set_test_name("NBI-Quotas_")
-        # backend = test_params.get("backend") if test_params else None   # UNUSED
-
-        test_username = "test-nbi-quotas"
-        test_password = "test-nbi-quotas"
-        test_project = "test-nbi-quotas"
-
-        test_vim = "test-nbi-quotas"
-        test_wim = "test-nbi-quotas"
-        test_sdn = "test-nbi-quotas"
-
-        test_user_id = None
-        test_project_id = None
-
-        test_vim_ids = []
-        test_wim_ids = []
-        test_sdn_ids = []
-        test_vnfd_ids = []
-        test_nsd_ids = []
-        test_nst_ids = []
-        test_pdu_ids = []
-        test_nsr_ids = []
-        test_nsi_ids = []
-
-        # Save admin access data
-        admin_username = engine.user
-        admin_password = engine.password
-        admin_project = engine.project
-
-        # Get admin access
-        engine.get_autorization()
-        admin_token = engine.last_id
-
-        # Check that test project,user do not exist
-        res1 = engine.test(
-            "Check that test project doesn't exist",
-            "GET",
-            "/admin/v1/projects/" + test_project,
-            headers_json,
-            {},
-            (404),
-            {},
-            True,
-        )
-        res2 = engine.test(
-            "Check that test user doesn't exist",
-            "GET",
-            "/admin/v1/users/" + test_username,
-            headers_json,
-            {},
-            (404),
-            {},
-            True,
-        )
-        if None in [res1, res2]:
-            engine.remove_authorization()
-            logger.error("Test project and/or user already exist")
-            return
-
-        # Create test project&user
-        res = engine.test(
-            "Create test project",
-            "POST",
-            "/admin/v1/projects",
-            headers_json,
-            {
-                "name": test_username,
-                "quotas": {
-                    "vnfds": 2,
-                    "nsds": 2,
-                    "nsts": 1,
-                    "pdus": 1,
-                    "nsrs": 2,
-                    "nsis": 1,
-                    "vim_accounts": 1,
-                    "wim_accounts": 1,
-                    "sdns": 1,
-                },
-            },
-            (201),
-            r_header_json,
-            "json",
-        )
-        test_project_id = engine.last_id if res else None
-        res = engine.test(
-            "Create test user",
-            "POST",
-            "/admin/v1/users",
-            headers_json,
-            {
-                "username": test_username,
-                "password": test_password,
-                "project_role_mappings": [
-                    {"project": test_project, "role": "project_admin"}
-                ],
-            },
-            (201),
-            r_header_json,
-            "json",
-        )
-        test_user_id = engine.last_id if res else None
-
-        if test_project_id and test_user_id:
-
-            # Get user access
-            engine.token = None
-            engine.user = test_username
-            engine.password = test_password
-            engine.project = test_project
-            engine.get_autorization()
-            user_token = engine.last_id
-
-            # Create test VIM
-            res = engine.test(
-                "Create test VIM",
-                "POST",
-                "/admin/v1/vim_accounts",
-                headers_json,
-                {
-                    "name": test_vim,
-                    "vim_type": "openvim",
-                    "vim_user": test_username,
-                    "vim_password": test_password,
-                    "vim_tenant_name": test_project,
-                    "vim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_vim_ids += [engine.last_id if res else None]
-
-            res = engine.test(
-                "Try to create second test VIM",
-                "POST",
-                "/admin/v1/vim_accounts",
-                headers_json,
-                {
-                    "name": test_vim + "_2",
-                    "vim_type": "openvim",
-                    "vim_user": test_username,
-                    "vim_password": test_password,
-                    "vim_tenant_name": test_project,
-                    "vim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (422),
-                r_header_json,
-                "json",
-            )
-            test_vim_ids += [engine.last_id if res is None else None]
-
-            res = engine.test(
-                "Try to create second test VIM with FORCE",
-                "POST",
-                "/admin/v1/vim_accounts?FORCE",
-                headers_json,
-                {
-                    "name": test_vim + "_3",
-                    "vim_type": "openvim",
-                    "vim_user": test_username,
-                    "vim_password": test_password,
-                    "vim_tenant_name": test_project,
-                    "vim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_vim_ids += [engine.last_id if res else None]
-
-            if test_vim_ids[0]:
-
-                # Download descriptor files (if required)
-                test_dir = "/tmp/" + test_username + "/"
-                test_url = "https://osm-download.etsi.org/ftp/osm-6.0-six/7th-hackfest/packages/"
-                vnfd_filenames = [
-                    "slice_hackfest_vnfd.tar.gz",
-                    "slice_hackfest_middle_vnfd.tar.gz",
-                ]
-                nsd_filenames = [
-                    "slice_hackfest_nsd.tar.gz",
-                    "slice_hackfest_middle_nsd.tar.gz",
-                ]
-                nst_filenames = ["slice_hackfest_nstd.yaml"]
-                pdu_filenames = ["PDU_router.yaml"]
-                desc_filenames = (
-                    vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
-                )
-                if not os.path.exists(test_dir):
-                    os.makedirs(test_dir)
-                for filename in desc_filenames:
-                    if not os.path.exists(test_dir + filename):
-                        res = requests.get(test_url + filename)
-                        if res.status_code < 300:
-                            with open(test_dir + filename, "wb") as file:
-                                file.write(res.content)
-
-                if all([os.path.exists(test_dir + p) for p in desc_filenames]):
-
-                    # Test VNFD Quotas
-                    res = engine.test(
-                        "Create test VNFD #1",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[0],
-                        (201),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res else None]
-                    res = engine.test(
-                        "Create test VNFD #2",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[1],
-                        (201),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res else None]
-                    res = engine.test(
-                        "Try to create extra test VNFD",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[0],
-                        (422),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res is None else None]
-                    res = engine.test(
-                        "Try to create extra test VNFD with FORCE",
-                        "POST",
-                        "/vnfpkgm/v1/vnf_packages_content?FORCE",
-                        headers_zip_json,
-                        "@b" + test_dir + vnfd_filenames[0],
-                        (201),
-                        r_header_json,
-                        "json",
-                    )
-                    test_vnfd_ids += [engine.last_id if res else None]
-
-                    # Remove extra VNFDs to prevent further errors
-                    for i in [2, 3]:
-                        if test_vnfd_ids[i]:
-                            res = engine.test(
-                                "Delete test VNFD #" + str(i),
-                                "DELETE",
-                                "/vnfpkgm/v1/vnf_packages_content/"
-                                + test_vnfd_ids[i]
-                                + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                            if res:
-                                test_vnfd_ids[i] = None
-
-                    if test_vnfd_ids[0] and test_vnfd_ids[1]:
-
-                        # Test NSD Quotas
-                        res = engine.test(
-                            "Create test NSD #1",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[0],
-                            (201),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res else None]
-                        res = engine.test(
-                            "Create test NSD #2",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[1],
-                            (201),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res else None]
-                        res = engine.test(
-                            "Try to create extra test NSD",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[0],
-                            (422),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res is None else None]
-                        res = engine.test(
-                            "Try to create extra test NSD with FORCE",
-                            "POST",
-                            "/nsd/v1/ns_descriptors_content?FORCE",
-                            headers_zip_json,
-                            "@b" + test_dir + nsd_filenames[0],
-                            (201),
-                            r_header_json,
-                            "json",
-                        )
-                        test_nsd_ids += [engine.last_id if res else None]
-
-                        # Remove extra NSDs to prevent further errors
-                        for i in [2, 3]:
-                            if test_nsd_ids[i]:
-                                res = engine.test(
-                                    "Delete test NSD #" + str(i),
-                                    "DELETE",
-                                    "/nsd/v1/ns_descriptors_content/"
-                                    + test_nsd_ids[i]
-                                    + "?FORCE",
-                                    headers_json,
-                                    {},
-                                    (204),
-                                    {},
-                                    0,
-                                )
-                                if res:
-                                    test_nsd_ids[i] = None
-
-                        if test_nsd_ids[0] and test_nsd_ids[1]:
-
-                            # Test NSR Quotas
-                            res = engine.test(
-                                "Create test NSR #1",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_1",
-                                    "nsdId": test_nsd_ids[0],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res else None]
-                            res = engine.test(
-                                "Create test NSR #2",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_2",
-                                    "nsdId": test_nsd_ids[1],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res else None]
-                            res = engine.test(
-                                "Try to create extra test NSR",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_3",
-                                    "nsdId": test_nsd_ids[0],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (422),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res is None else None]
-                            res = engine.test(
-                                "Try to create test NSR with FORCE",
-                                "POST",
-                                "/nslcm/v1/ns_instances_content?FORCE",
-                                headers_json,
-                                {
-                                    "nsName": test_username + "_4",
-                                    "nsdId": test_nsd_ids[0],
-                                    "vimAccountId": test_vim_ids[0],
-                                },
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nsr_ids += [engine.last_id if res else None]
-
-                            # Test NST Quotas
-                            res = engine.test(
-                                "Create test NST",
-                                "POST",
-                                "/nst/v1/netslice_templates_content",
-                                headers_txt_json,
-                                "@b" + test_dir + nst_filenames[0],
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nst_ids += [engine.last_id if res else None]
-                            res = engine.test(
-                                "Try to create extra test NST",
-                                "POST",
-                                "/nst/v1/netslice_templates_content",
-                                headers_txt_json,
-                                "@b" + test_dir + nst_filenames[0],
-                                (422),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nst_ids += [engine.last_id if res is None else None]
-                            res = engine.test(
-                                "Try to create extra test NST with FORCE",
-                                "POST",
-                                "/nst/v1/netslice_templates_content?FORCE",
-                                headers_txt_json,
-                                "@b" + test_dir + nst_filenames[0],
-                                (201),
-                                r_header_json,
-                                "json",
-                            )
-                            test_nst_ids += [engine.last_id if res else None]
-
-                            if test_nst_ids[0]:
-                                # Remove NSR Quota
-                                engine.set_header(
-                                    {"Authorization": "Bearer {}".format(admin_token)}
-                                )
-                                res = engine.test(
-                                    "Remove NSR Quota",
-                                    "PUT",
-                                    "/admin/v1/projects/" + test_project_id,
-                                    headers_json,
-                                    {"quotas": {"nsrs": None}},
-                                    (204),
-                                    {},
-                                    0,
-                                )
-                                engine.set_header(
-                                    {"Authorization": "Bearer {}".format(user_token)}
-                                )
-                                if res:
-                                    # Test NSI Quotas
-                                    res = engine.test(
-                                        "Create test NSI",
-                                        "POST",
-                                        "/nsilcm/v1/netslice_instances_content",
-                                        headers_json,
-                                        {
-                                            "nsiName": test_username,
-                                            "nstId": test_nst_ids[0],
-                                            "vimAccountId": test_vim_ids[0],
-                                        },
-                                        (201),
-                                        r_header_json,
-                                        "json",
-                                    )
-                                    test_nsi_ids += [engine.last_id if res else None]
-                                    res = engine.test(
-                                        "Try to create extra test NSI",
-                                        "POST",
-                                        "/nsilcm/v1/netslice_instances_content",
-                                        headers_json,
-                                        {
-                                            "nsiName": test_username,
-                                            "nstId": test_nst_ids[0],
-                                            "vimAccountId": test_vim_ids[0],
-                                        },
-                                        (400),
-                                        r_header_json,
-                                        "json",
-                                    )
-                                    test_nsi_ids += [
-                                        engine.last_id if res is None else None
-                                    ]
-                                    res = engine.test(
-                                        "Try to create extra test NSI with FORCE",
-                                        "POST",
-                                        "/nsilcm/v1/netslice_instances_content?FORCE",
-                                        headers_json,
-                                        {
-                                            "nsiName": test_username,
-                                            "nstId": test_nst_ids[0],
-                                            "vimAccountId": test_vim_ids[0],
-                                        },
-                                        (201),
-                                        r_header_json,
-                                        "json",
-                                    )
-                                    test_nsi_ids += [engine.last_id if res else None]
-
-                    # Test PDU Quotas
-                    with open(test_dir + pdu_filenames[0], "rb") as file:
-                        pdu_text = re.sub(
-                            r"ip-address: *\[[^\]]*\]",
-                            "ip-address: '0.0.0.0'",
-                            file.read().decode("utf-8"),
-                        )
-                    with open(test_dir + pdu_filenames[0], "wb") as file:
-                        file.write(pdu_text.encode("utf-8"))
-                    res = engine.test(
-                        "Create test PDU",
-                        "POST",
-                        "/pdu/v1/pdu_descriptors",
-                        headers_yaml,
-                        "@b" + test_dir + pdu_filenames[0],
-                        (201),
-                        r_header_yaml,
-                        "yaml",
-                    )
-                    test_pdu_ids += [engine.last_id if res else None]
-                    res = engine.test(
-                        "Try to create extra test PDU",
-                        "POST",
-                        "/pdu/v1/pdu_descriptors",
-                        headers_yaml,
-                        "@b" + test_dir + pdu_filenames[0],
-                        (422),
-                        r_header_yaml,
-                        "yaml",
-                    )
-                    test_pdu_ids += [engine.last_id if res is None else None]
-                    res = engine.test(
-                        "Try to create extra test PDU with FORCE",
-                        "POST",
-                        "/pdu/v1/pdu_descriptors?FORCE",
-                        headers_yaml,
-                        "@b" + test_dir + pdu_filenames[0],
-                        (201),
-                        r_header_yaml,
-                        "yaml",
-                    )
-                    test_pdu_ids += [engine.last_id if res else None]
-
-                    # Cleanup
-                    for i, id in enumerate(test_nsi_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NSI #" + str(i),
-                                "DELETE",
-                                "/nsilcm/v1/netslice_instances_content/"
-                                + id
-                                + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_nsr_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NSR #" + str(i),
-                                "DELETE",
-                                "/nslcm/v1/ns_instances_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_nst_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NST #" + str(i),
-                                "DELETE",
-                                "/nst/v1/netslice_templates_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_nsd_ids):
-                        if id:
-                            engine.test(
-                                "Delete test NSD #" + str(i),
-                                "DELETE",
-                                "/nsd/v1/ns_descriptors_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_vnfd_ids):
-                        if id:
-                            engine.test(
-                                "Delete test VNFD #" + str(i),
-                                "DELETE",
-                                "/vnfpkgm/v1/vnf_packages_content/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-                    for i, id in enumerate(test_pdu_ids):
-                        if id:
-                            engine.test(
-                                "Delete test PDU #" + str(i),
-                                "DELETE",
-                                "/pdu/v1/pdu_descriptors/" + id + "?FORCE",
-                                headers_json,
-                                {},
-                                (204),
-                                {},
-                                0,
-                            )
-
-                    # END Test NBI Quotas
-
-            # Test WIM Quotas
-            res = engine.test(
-                "Create test WIM",
-                "POST",
-                "/admin/v1/wim_accounts",
-                headers_json,
-                {
-                    "name": test_wim,
-                    "wim_type": "onos",
-                    "wim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_wim_ids += [engine.last_id if res else None]
-            res = engine.test(
-                "Try to create second test WIM",
-                "POST",
-                "/admin/v1/wim_accounts",
-                headers_json,
-                {
-                    "name": test_wim + "_2",
-                    "wim_type": "onos",
-                    "wim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (422),
-                r_header_json,
-                "json",
-            )
-            test_wim_ids += [engine.last_id if res is None else None]
-            res = engine.test(
-                "Try to create second test WIM with FORCE",
-                "POST",
-                "/admin/v1/wim_accounts?FORCE",
-                headers_json,
-                {
-                    "name": test_wim + "_3",
-                    "wim_type": "onos",
-                    "wim_url": "https://0.0.0.0:0/v0.0",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_wim_ids += [engine.last_id if res else None]
-
-            # Test SDN Quotas
-            res = engine.test(
-                "Create test SDN",
-                "POST",
-                "/admin/v1/sdns",
-                headers_json,
-                {
-                    "name": test_sdn,
-                    "type": "onos",
-                    "ip": "0.0.0.0",
-                    "port": 9999,
-                    "dpid": "00:00:00:00:00:00:00:00",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_sdn_ids += [engine.last_id if res else None]
-            res = engine.test(
-                "Try to create second test SDN",
-                "POST",
-                "/admin/v1/sdns",
-                headers_json,
-                {
-                    "name": test_sdn + "_2",
-                    "type": "onos",
-                    "ip": "0.0.0.0",
-                    "port": 9999,
-                    "dpid": "00:00:00:00:00:00:00:00",
-                },
-                (422),
-                r_header_json,
-                "json",
-            )
-            test_sdn_ids += [engine.last_id if res is None else None]
-            res = engine.test(
-                "Try to create second test SDN with FORCE",
-                "POST",
-                "/admin/v1/sdns?FORCE",
-                headers_json,
-                {
-                    "name": test_sdn + "_3",
-                    "type": "onos",
-                    "ip": "0.0.0.0",
-                    "port": 9999,
-                    "dpid": "00:00:00:00:00:00:00:00",
-                },
-                (202),
-                r_header_json,
-                "json",
-            )
-            test_sdn_ids += [engine.last_id if res else None]
-
-            # Cleanup
-            for i, id in enumerate(test_vim_ids):
-                if id:
-                    engine.test(
-                        "Delete test VIM #" + str(i),
-                        "DELETE",
-                        "/admin/v1/vim_accounts/" + id + "?FORCE",
-                        headers_json,
-                        {},
-                        (202),
-                        {},
-                        0,
-                    )
-            for i, id in enumerate(test_wim_ids):
-                if id:
-                    engine.test(
-                        "Delete test WIM #" + str(i),
-                        "DELETE",
-                        "/admin/v1/wim_accounts/" + id + "?FORCE",
-                        headers_json,
-                        {},
-                        (202),
-                        {},
-                        0,
-                    )
-            for i, id in enumerate(test_sdn_ids):
-                if id:
-                    engine.test(
-                        "Delete test SDN #" + str(i),
-                        "DELETE",
-                        "/admin/v1/sdns/" + id + "?FORCE",
-                        headers_json,
-                        {},
-                        (202),
-                        {},
-                        0,
-                    )
-
-            # Release user access
-            engine.remove_authorization()
-
-        # Cleanup
-        engine.user = admin_username
-        engine.password = admin_password
-        engine.project = admin_project
-        engine.get_autorization()
-        if test_user_id:
-            engine.test(
-                "Delete test user",
-                "DELETE",
-                "/admin/v1/users/" + test_user_id + "?FORCE",
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        if test_project_id:
-            engine.test(
-                "Delete test project",
-                "DELETE",
-                "/admin/v1/projects/" + test_project_id + "?FORCE",
-                headers_json,
-                {},
-                (204),
-                {},
-                0,
-            )
-        engine.remove_authorization()
-
-    # END class TestNbiQuotas
-
-
-if __name__ == "__main__":
-    global logger
-    test = ""
-
-    # Disable warnings from self-signed certificates.
-    requests.packages.urllib3.disable_warnings()
-    try:
-        logging.basicConfig(format="%(levelname)s %(message)s", level=logging.ERROR)
-        logger = logging.getLogger("NBI")
-        # load parameters and configuration
-        opts, args = getopt.getopt(
-            sys.argv[1:],
-            "hvu:p:",
-            [
-                "url=",
-                "user=",
-                "password=",
-                "help",
-                "version",
-                "verbose",
-                "no-verbose",
-                "project=",
-                "insecure",
-                "timeout",
-                "timeout-deploy",
-                "timeout-configure",
-                "test=",
-                "list",
-                "test-osm",
-                "manual-check",
-                "params=",
-                "fail-fast",
-            ],
-        )
-        url = "https://localhost:9999/osm"
-        user = password = project = "admin"
-        test_osm = False
-        manual_check = False
-        verbose = 0
-        verify = True
-        fail_fast = False
-        test_classes = {
-            "NonAuthorized": TestNonAuthorized,
-            "FakeVIM": TestFakeVim,
-            "Users-Projects": TestUsersProjects,
-            "Projects-Descriptors": TestProjectsDescriptors,
-            "VIM-SDN": TestVIMSDN,
-            "Deploy-Custom": TestDeploy,
-            "Deploy-Hackfest-Cirros": TestDeployHackfestCirros,
-            "Deploy-Hackfest-Cirros-Scaling": TestDeployHackfestCirrosScaling,
-            "Deploy-Hackfest-3Charmed": TestDeployHackfest3Charmed,
-            "Deploy-Hackfest-3Charmed2": TestDeployHackfest3Charmed2,
-            "Deploy-Hackfest-3Charmed3": TestDeployHackfest3Charmed3,
-            "Deploy-Hackfest-4": TestDeployHackfest4,
-            "Deploy-CirrosMacIp": TestDeployIpMac,
-            "Descriptors": TestDescriptors,
-            "Deploy-Hackfest1": TestDeployHackfest1,
-            # "Deploy-MultiVIM": TestDeployMultiVIM,
-            "Deploy-SingleVdu": TestDeploySingleVdu,
-            "Deploy-Hnfd": TestDeployHnfd,
-            "Upload-Slice-Template": TestNetSliceTemplates,
-            "Deploy-Slice-Instance": TestNetSliceInstances,
-            "Deploy-SimpleCharm": TestDeploySimpleCharm,
-            "Deploy-SimpleCharm2": TestDeploySimpleCharm2,
-            "Authentication": TestAuthentication,
-            "NBI-Quotas": TestNbiQuotas,
-        }
-        test_to_do = []
-        test_params = {}
-
-        for o, a in opts:
-            # print("parameter:", o, a)
-            if o == "--version":
-                print("test version " + __version__ + " " + version_date)
-                exit()
-            elif o == "--list":
-                for test, test_class in sorted(test_classes.items()):
-                    print("{:32} {}".format(test + ":", test_class.description))
-                exit()
-            elif o in ("-v", "--verbose"):
-                verbose += 1
-            elif o == "no-verbose":
-                verbose = -1
-            elif o in ("-h", "--help"):
-                usage()
-                sys.exit()
-            elif o == "--test-osm":
-                test_osm = True
-            elif o == "--manual-check":
-                manual_check = True
-            elif o == "--url":
-                url = a
-            elif o in ("-u", "--user"):
-                user = a
-            elif o in ("-p", "--password"):
-                password = a
-            elif o == "--project":
-                project = a
-            elif o == "--fail-fast":
-                fail_fast = True
-            elif o == "--test":
-                for _test in a.split(","):
-                    if _test not in test_classes:
-                        print(
-                            "Invalid test name '{}'. Use option '--list' to show available tests".format(
-                                _test
-                            ),
-                            file=sys.stderr,
-                        )
-                        exit(1)
-                    test_to_do.append(_test)
-            elif o == "--params":
-                param_key, _, param_value = a.partition("=")
-                text_index = len(test_to_do)
-                if text_index not in test_params:
-                    test_params[text_index] = {}
-                test_params[text_index][param_key] = param_value
-            elif o == "--insecure":
-                verify = False
-            elif o == "--timeout":
-                timeout = int(a)
-            elif o == "--timeout-deploy":
-                timeout_deploy = int(a)
-            elif o == "--timeout-configure":
-                timeout_configure = int(a)
-            else:
-                assert False, "Unhandled option"
-        if verbose == 0:
-            logger.setLevel(logging.WARNING)
-        elif verbose > 1:
-            logger.setLevel(logging.DEBUG)
-        else:
-            logger.setLevel(logging.ERROR)
-
-        test_rest = TestRest(url, user=user, password=password, project=project)
-        # print("tests to do:", test_to_do)
-        if test_to_do:
-            text_index = 0
-            for test in test_to_do:
-                if fail_fast and test_rest.failed_tests:
-                    break
-                text_index += 1
-                test_class = test_classes[test]
-                test_class().run(
-                    test_rest, test_osm, manual_check, test_params.get(text_index)
-                )
-        else:
-            for test, test_class in sorted(test_classes.items()):
-                if fail_fast and test_rest.failed_tests:
-                    break
-                test_class().run(test_rest, test_osm, manual_check, test_params.get(0))
-        test_rest.print_results()
-        exit(1 if test_rest.failed_tests else 0)
-
-    except TestException as e:
-        logger.error(test + "Test {} Exception: {}".format(test, str(e)))
-        exit(1)
-    except getopt.GetoptError as e:
-        logger.error(e)
-        print(e, file=sys.stderr)
-        exit(1)
-    except Exception as e:
-        logger.critical(test + " Exception: " + str(e), exc_info=True)
diff --git a/osm_nbi/tests/send_kafka.py b/osm_nbi/tests/send_kafka.py
deleted file mode 100755 (executable)
index d066d14..0000000
+++ /dev/null
@@ -1,64 +0,0 @@
-#! /usr/bin/python3
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import requests
-import yaml
-from os import getenv
-
-__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
-__date__ = "$2019-05-31$"
-__version__ = "0.1"
-version_date = "May 2019"
-
-
-def usage():
-    print("Usage: ", sys.argv[0], "topic key message")
-    print("   Sends a kafka message using URL test of NBI")
-    print("  host is defined by env OSMNBI_HOST (localhost by default)")
-    print("  port is defined by env OSMNBI_PORT (9999 by default)")
-    return
-
-
-if __name__ == "__main__":
-    try:
-        if "--help" in sys.argv:
-            usage()
-            exit(0)
-
-        if len(sys.argv) != 4:
-            print(
-                "missing parameters. Type --help for more information", file=sys.stderr
-            )
-            exit(1)
-
-        topic, key, message = sys.argv[1:]
-        host = getenv("OSMNBI_HOST", "localhost")
-        port = getenv("OSMNBI_PORT", "9999")
-        url = "https://{host}:{port}/osm/test/message/{topic}".format(
-            host=host, port=port, topic=topic
-        )
-        print(url)
-        data = {key: message}
-
-        r = requests.post(url, data=yaml.safe_dump(data), verify=False)
-        if r.status_code not in (200, 201, 202, 204):
-            print("Received code={}, content='{}'".format(r.status_code, r.text))
-            exit(1)
-        print("{} -> {}: {}".format(topic, key, message))
-
-    except Exception:
-        raise
index 8124ce4..6a44365 100755 (executable)
@@ -18,13 +18,14 @@ __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "$2019-10-019"
 
 import unittest
+import random
 from unittest import TestCase
 from unittest.mock import Mock, patch, call
 from uuid import uuid4
 from http import HTTPStatus
 from time import time
-from random import randint
 from osm_common import dbbase, fsbase, msgbase
+from osm_common.dbmemory import DbMemory
 from osm_nbi import authconn, validation
 from osm_nbi.admin_topics import (
     ProjectTopicAuth,
@@ -35,6 +36,7 @@ from osm_nbi.admin_topics import (
 )
 from osm_nbi.engine import EngineException
 from osm_nbi.authconn import AuthconnNotFoundException
+from osm_nbi.authconn_internal import AuthconnInternal
 
 
 test_pid = str(uuid4())
@@ -240,7 +242,10 @@ class Test_ProjectTopicAuth(TestCase):
         with self.subTest(i=1):
             self.auth.get_project_list.side_effect = [[proj], []]
             new_name = "new-project-name"
-            quotas = {"vnfds": randint(0, 100), "nsds": randint(0, 100)}
+            quotas = {
+                "vnfds": random.SystemRandom().randint(0, 100),
+                "nsds": random.SystemRandom().randint(0, 100),
+            }
             self.topic.edit(
                 self.fake_session, pid, {"name": new_name, "quotas": quotas}
             )
@@ -255,7 +260,7 @@ class Test_ProjectTopicAuth(TestCase):
             self.assertEqual(content["quotas"], quotas, "Wrong quotas")
         with self.subTest(i=2):
             new_name = "other-project-name"
-            quotas = {"baditems": randint(0, 100)}
+            quotas = {"baditems": random.SystemRandom().randint(0, 100)}
             self.auth.get_project_list.side_effect = [[proj], []]
             with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
                 self.topic.edit(
@@ -774,9 +779,11 @@ class Test_UserTopicAuth(TestCase):
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-user-topic"
+        cls.password = "Test@123"
 
     def setUp(self):
-        self.db = Mock(dbbase.DbBase())
+        # self.db = Mock(dbbase.DbBase())
+        self.db = DbMemory()
         self.fs = Mock(fsbase.FsBase())
         self.msg = Mock(msgbase.MsgBase())
         self.auth = Mock(authconn.Authconn(None, None, None))
@@ -809,7 +816,7 @@ class Test_UserTopicAuth(TestCase):
                 self.fake_session,
                 {
                     "username": self.test_name,
-                    "password": self.test_name,
+                    "password": self.password,
                     "project_role_mappings": prms_in,
                 },
             )
@@ -817,7 +824,7 @@ class Test_UserTopicAuth(TestCase):
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
-            self.assertEqual(content["password"], self.test_name, "Wrong password")
+            self.assertEqual(content["password"], self.password, "Wrong password")
             self.assertEqual(
                 content["project_role_mappings"],
                 prms_out,
@@ -841,7 +848,7 @@ class Test_UserTopicAuth(TestCase):
                 self.fake_session,
                 {
                     "username": self.test_name,
-                    "password": self.test_name,
+                    "password": self.password,
                     "projects": ["some_project"],
                 },
             )
@@ -849,7 +856,7 @@ class Test_UserTopicAuth(TestCase):
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
-            self.assertEqual(content["password"], self.test_name, "Wrong password")
+            self.assertEqual(content["password"], self.password, "Wrong password")
             self.assertEqual(
                 content["project_role_mappings"],
                 prms_out,
@@ -871,7 +878,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": "other-project-name",
-                        "password": "other-password",
+                        "password": "Other@pwd1",
                         "project_role_mappings": [{}],
                     },
                 )
@@ -896,7 +903,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": "other-project-name",
-                        "password": "other-password",
+                        "password": "Other@pwd1",
                         "projects": [],
                     },
                 )
@@ -947,7 +954,7 @@ class Test_UserTopicAuth(TestCase):
                 {"_id": rid1, "name": "role-1"},
             ]
             new_name = "new-user-name"
-            new_pasw = "new-password"
+            new_pasw = "New@pwd1"
             add_prms = [{"project": pid2, "role": rid2}]
             rem_prms = [{"project": pid1, "role": rid1}]
             self.topic.edit(
@@ -1002,8 +1009,8 @@ class Test_UserTopicAuth(TestCase):
         with self.subTest(i=3):
             self.auth.get_user_list.side_effect = [[user], []]
             self.auth.get_user.return_value = user
-            old_password = self.test_name
-            new_pasw = "new-password"
+            old_password = self.password
+            new_pasw = "New@pwd1"
             self.topic.edit(
                 self.fake_session,
                 uid,
@@ -1013,7 +1020,9 @@ class Test_UserTopicAuth(TestCase):
                 },
             )
             content = self.auth.update_user.call_args[0][0]
-            self.assertEqual(content["old_password"], old_password, "Wrong old password")
+            self.assertEqual(
+                content["old_password"], old_password, "Wrong old password"
+            )
             self.assertEqual(content["password"], new_pasw, "Wrong user password")
 
     def test_delete_user(self):
@@ -1048,7 +1057,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": uid,
-                        "password": self.test_name,
+                        "password": self.password,
                         "projects": [test_pid],
                     },
                 )
@@ -1076,7 +1085,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": self.test_name,
-                        "password": self.test_name,
+                        "password": self.password,
                         "projects": [test_pid],
                     },
                 )
@@ -1101,7 +1110,7 @@ class Test_UserTopicAuth(TestCase):
                     self.fake_session,
                     {
                         "username": self.test_name,
-                        "password": self.test_name,
+                        "password": self.password,
                         "projects": [str(uuid4())],
                     },
                 )
@@ -1221,6 +1230,143 @@ class Test_UserTopicAuth(TestCase):
                 "Wrong exception text",
             )
 
+    def test_user_management(self):
+        self.config = {
+            "user_management": True,
+            "pwd_expire_days": 30,
+            "max_pwd_attempt": 5,
+            "account_expire_days": 90,
+            "version": "dev",
+            "deviceVendor": "test",
+            "deviceProduct": "test",
+        }
+        self.permissions = {"admin": True, "default": True}
+        now = time()
+        rid = str(uuid4())
+        role = {
+            "_id": rid,
+            "name": self.test_name,
+            "permissions": self.permissions,
+            "_admin": {"created": now, "modified": now},
+        }
+        self.db.create("roles", role)
+        admin_user = {
+            "_id": "72cd0cd6-e8e2-482c-9bc2-15b413bb8500",
+            "username": "admin",
+            "password": "bf0d9f988ad9b404464cf8c8749b298209b05fd404119bae0c11e247efbbc4cb",
+            "_admin": {
+                "created": 1663058370.7721832,
+                "modified": 1663681183.5651639,
+                "salt": "37587e7e0c2f4dbfb9416f3fb5543e2b",
+                "last_token_time": 1666876472.2962265,
+                "user_status": "always-active",
+                "retry_count": 0,
+            },
+            "project_role_mappings": [
+                {"project": "a595ce4e-09dc-4b24-9d6f-e723830bc66b", "role": rid}
+            ],
+        }
+        self.db.create("users", admin_user)
+        with self.subTest(i=1):
+            self.user_create = AuthconnInternal(self.config, self.db, self.permissions)
+            user_info = {"username": "user_mgmt_true", "password": "Test@123"}
+            self.user_create.create_user(user_info)
+            user = self.db.get_one("users", {"username": user_info["username"]})
+            self.assertEqual(user["username"], user_info["username"], "Wrong user name")
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertIn("password_expire_time", user["_admin"], "Key is not there")
+            self.assertIn("account_expire_time", user["_admin"], "Key is not there")
+        with self.subTest(i=2):
+            self.user_update = AuthconnInternal(self.config, self.db, self.permissions)
+            locked_user = {
+                "username": "user_lock",
+                "password": "c94ba8cfe81985cf5c84dff16d5bac95814ab17e44a8871755eb4cf3a27b7d3d",
+                "_admin": {
+                    "created": 1667207552.2191198,
+                    "modified": 1667207552.2191815,
+                    "salt": "560a5d51b1d64bb4b9cae0ccff3f1102",
+                    "user_status": "locked",
+                    "password_expire_time": 1667207552.2191815,
+                    "account_expire_time": 1674983552.2191815,
+                    "retry_count": 5,
+                    "last_token_time": 1667207552.2191815,
+                },
+                "_id": "73bbbb71-ed38-4b79-9f58-ece19e7e32d6",
+            }
+            self.db.create("users", locked_user)
+            user_info = {
+                "_id": "73bbbb71-ed38-4b79-9f58-ece19e7e32d6",
+                "system_admin_id": "72cd0cd6-e8e2-482c-9bc2-15b413bb8500",
+                "unlock": True,
+            }
+            self.assertEqual(
+                locked_user["_admin"]["user_status"], "locked", "User status is unknown"
+            )
+            self.user_update.update_user(user_info)
+            user = self.db.get_one("users", {"username": locked_user["username"]})
+            self.assertEqual(
+                user["username"], locked_user["username"], "Wrong user name"
+            )
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertEqual(user["_admin"]["retry_count"], 0, "retry_count is unknown")
+        with self.subTest(i=3):
+            self.user_update = AuthconnInternal(self.config, self.db, self.permissions)
+            expired_user = {
+                "username": "user_expire",
+                "password": "c94ba8cfe81985cf5c84dff16d5bac95814ab17e44a8871755eb4cf3a27b7d3d",
+                "_admin": {
+                    "created": 1665602087.601298,
+                    "modified": 1665636442.1245084,
+                    "salt": "560a5d51b1d64bb4b9cae0ccff3f1102",
+                    "user_status": "expired",
+                    "password_expire_time": 1668248628.2191815,
+                    "account_expire_time": 1666952628.2191815,
+                    "retry_count": 0,
+                    "last_token_time": 1666779828.2171815,
+                },
+                "_id": "3266430f-8222-407f-b08f-3a242504ab94",
+            }
+            self.db.create("users", expired_user)
+            user_info = {
+                "_id": "3266430f-8222-407f-b08f-3a242504ab94",
+                "system_admin_id": "72cd0cd6-e8e2-482c-9bc2-15b413bb8500",
+                "renew": True,
+            }
+            self.assertEqual(
+                expired_user["_admin"]["user_status"],
+                "expired",
+                "User status is unknown",
+            )
+            self.user_update.update_user(user_info)
+            user = self.db.get_one("users", {"username": expired_user["username"]})
+            self.assertEqual(
+                user["username"], expired_user["username"], "Wrong user name"
+            )
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertGreater(
+                user["_admin"]["account_expire_time"],
+                expired_user["_admin"]["account_expire_time"],
+                "User expire time is not get extended",
+            )
+        with self.subTest(i=4):
+            self.config.update({"user_management": False})
+            self.user_create = AuthconnInternal(self.config, self.db, self.permissions)
+            user_info = {"username": "user_mgmt_false", "password": "Test@123"}
+            self.user_create.create_user(user_info)
+            user = self.db.get_one("users", {"username": user_info["username"]})
+            self.assertEqual(user["username"], user_info["username"], "Wrong user name")
+            self.assertEqual(
+                user["_admin"]["user_status"], "active", "User status is unknown"
+            )
+            self.assertNotIn("password_expire_time", user["_admin"], "Key is not there")
+            self.assertNotIn("account_expire_time", user["_admin"], "Key is not there")
+
 
 class Test_CommonVimWimSdn(TestCase):
     @classmethod
index a56916e..f6d4001 100755 (executable)
@@ -27,11 +27,22 @@ from copy import deepcopy
 from time import time
 from osm_common import dbbase, fsbase, msgbase
 from osm_nbi import authconn
-from osm_nbi.tests.test_pkg_descriptors import db_vnfds_text, db_nsds_text
+from osm_nbi.tests.test_pkg_descriptors import (
+    db_vnfds_text,
+    db_nsds_text,
+    vnfd_exploit_text,
+    vnfd_exploit_fixed_text,
+    db_sfc_nsds_text,
+)
 from osm_nbi.descriptor_topics import VnfdTopic, NsdTopic
 from osm_nbi.engine import EngineException
 from osm_common.dbbase import DbException
 import yaml
+import tempfile
+import collections
+import collections.abc
+
+collections.MutableSequence = collections.abc.MutableSequence
 
 test_name = "test-user"
 db_vnfd_content = yaml.safe_load(db_vnfds_text)[0]
@@ -46,11 +57,23 @@ fake_session = {
     "public": False,
     "allow_show_user_project_role": True,
 }
+UUID = "00000000-0000-0000-0000-000000000000"
+
+
+def admin_value():
+    return {"projects_read": []}
+
 
+def setup_mock_fs(fs):
+    fs.path = ""
+    fs.get_params.return_value = {}
+    fs.file_exists.return_value = False
+    fs.file_open.side_effect = lambda path, mode: tempfile.TemporaryFile(mode="a+b")
 
-def norm(str):
+
+def norm(s: str):
     """Normalize string for checking"""
-    return " ".join(str.strip().split()).lower()
+    return " ".join(s.strip().split()).lower()
 
 
 def compare_desc(tc, d1, d2, k):
@@ -94,7 +117,7 @@ class Test_VnfdTopic(TestCase):
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
     @contextmanager
-    def assertNotRaises(self, exception_type):
+    def assertNotRaises(self, exception_type=Exception):
         try:
             yield None
         except exception_type:
@@ -106,12 +129,7 @@ class Test_VnfdTopic(TestCase):
         return old_desc, new_desc
 
     def prepare_vnfd_creation(self):
-        self.fs.path = ""
-        self.fs.get_params.return_value = {}
-        self.fs.file_exists.return_value = False
-        self.fs.file_open.side_effect = lambda path, mode: open(
-            "/tmp/" + str(uuid4()), "a+b"
-        )
+        setup_mock_fs(self.fs)
         test_vnfd = deepcopy(db_vnfd_content)
         did = db_vnfd_content["_id"]
         self.db.create.return_value = did
@@ -121,6 +139,16 @@ class Test_VnfdTopic(TestCase):
         ]
         return did, test_vnfd
 
+    def prepare_vnfd(self, vnfd_text):
+        setup_mock_fs(self.fs)
+        test_vnfd = yaml.safe_load(vnfd_text)
+        self.db.create.return_value = UUID
+        self.db.get_one.side_effect = [
+            {"_id": UUID, "_admin": admin_value()},
+            None,
+        ]
+        return UUID, test_vnfd
+
     def prepare_test_vnfd(self, test_vnfd):
         del test_vnfd["_id"]
         del test_vnfd["_admin"]
@@ -216,6 +244,26 @@ class Test_VnfdTopic(TestCase):
         self.assertEqual(admin["revision"], 1, "Wrong revision number")
         compare_desc(self, test_vnfd, db_args[2], "VNFD")
 
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_exploit(self, mock_rename, mock_shutil):
+        id, test_vnfd = self.prepare_vnfd(vnfd_exploit_text)
+
+        with self.assertRaises(EngineException):
+            self.topic.upload_content(
+                fake_session, id, test_vnfd, {}, {"Content-Type": []}
+            )
+
+    @patch("osm_nbi.descriptor_topics.shutil")
+    @patch("osm_nbi.descriptor_topics.os.rename")
+    def test_new_vnfd_valid_helm_chart(self, mock_rename, mock_shutil):
+        id, test_vnfd = self.prepare_vnfd(vnfd_exploit_fixed_text)
+
+        with self.assertNotRaises():
+            self.topic.upload_content(
+                fake_session, id, test_vnfd, {}, {"Content-Type": []}
+            )
+
     @patch("osm_nbi.descriptor_topics.shutil")
     @patch("osm_nbi.descriptor_topics.os.rename")
     def test_new_vnfd_check_pyangbind_validation_additional_properties(
@@ -228,10 +276,12 @@ class Test_VnfdTopic(TestCase):
         )
         test_vnfd["_id"] = did
         test_vnfd["extra-property"] = 0
-        self.db.get_one.side_effect = lambda table, filter, fail_on_empty=None, fail_on_more=None: {
-            "_id": did,
-            "_admin": deepcopy(db_vnfd_content["_admin"]),
-        }
+        self.db.get_one.side_effect = (
+            lambda table, filter, fail_on_empty=None, fail_on_more=None: {
+                "_id": did,
+                "_admin": deepcopy(db_vnfd_content["_admin"]),
+            }
+        )
 
         with self.assertRaises(
             EngineException, msg="Accepted VNFD with an additional property"
@@ -1441,8 +1491,8 @@ class Test_NsdTopic(TestCase):
         did = db_nsd_content["_id"]
         self.fs.get_params.return_value = {}
         self.fs.file_exists.return_value = False
-        self.fs.file_open.side_effect = lambda path, mode: open(
-            "/tmp/" + str(uuid4()), "a+b"
+        self.fs.file_open.side_effect = lambda path, mode: tempfile.TemporaryFile(
+            mode="a+b"
         )
         self.db.get_one.side_effect = [
             {"_id": did, "_admin": deepcopy(db_nsd_content["_admin"])},
@@ -1944,7 +1994,6 @@ class Test_NsdTopic(TestCase):
             {"ns-configuration": {"config-primitive": [{"name": "del-user"}]}}
         )
 
-
         with self.assertNotRaises(EngineException):
             self.topic._validate_descriptor_changes(
                 old_nsd["_id"], descriptor_name, "/tmp", "/tmp:1"
@@ -2149,6 +2198,61 @@ class Test_NsdTopic(TestCase):
             "Wrong exception text",
         )
 
+    def test_validate_vnffgd_descriptor_on_valid_descriptor(self):
+        indata = yaml.safe_load(db_sfc_nsds_text)[0]
+        vnffgd = indata.get("vnffgd")
+        fg = vnffgd[0]
+        self.topic.validate_vnffgd_data(fg, indata)
+
+    def test_validate_vnffgd_descriptor_not_matching_nfp_position_element(self):
+        indata = yaml.safe_load(db_sfc_nsds_text)[0]
+        vnffgd = indata.get("vnffgd")
+        fg = vnffgd[0]
+        nfpd = fg.get("nfpd")[0]
+        with self.assertRaises(EngineException) as e:
+            fg.update({"nfp-position-element": [{"id": "test1"}]})
+            self.topic.validate_vnffgd_data(fg, indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at vnffgd nfpd[id='{}']:nfp-position-element-id='{}' "
+                "does not match any nfp-position-element".format(nfpd["id"], "test")
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
+    def test_validate_vnffgd_descriptor_not_matching_constituent_base_element_id(
+        self,
+    ):
+        indata = yaml.safe_load(db_sfc_nsds_text)[0]
+        vnffgd = indata.get("vnffgd")
+        fg = vnffgd[0]
+        fg["nfpd"][0]["position-desc-id"][0]["cp-profile-id"][0][
+            "constituent-profile-elements"
+        ][0]["constituent-base-element-id"] = "error_vnf"
+        with self.assertRaises(EngineException) as e:
+            self.topic.validate_vnffgd_data(fg, indata)
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at vnffgd constituent_profile[id='{}']:vnfd-id='{}' "
+                "does not match any constituent-base-element-id".format(
+                    "vnf1", "error_vnf"
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
+
 
 if __name__ == "__main__":
     unittest.main()
index 2c10632..b12a330 100644 (file)
@@ -18,7 +18,7 @@
 from contextlib import contextmanager
 import unittest
 from time import time
-from unittest.mock import Mock, mock_open   # patch, MagicMock
+from unittest.mock import Mock, mock_open  # patch, MagicMock
 from osm_common.dbbase import DbException
 from osm_nbi.engine import EngineException
 from osm_common.dbmemory import DbMemory
@@ -49,13 +49,11 @@ class TestNsLcmOpTopic(unittest.TestCase):
         self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
         self.nslcmop_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list(
-            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
-        )
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfds_text))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         self.db.create = Mock(return_value="created_id")
         self.nsd = self.db.get_list("nsds")[0]
         self.nsd_id = self.nsd["_id"]
@@ -240,7 +238,7 @@ class TestNsLcmOpTopic(unittest.TestCase):
             "lcmOperationType": "update",
             "updateType": "REMOVE_VNF",
             "nsInstanceId": self.nsr_id,
-            "removeVnfInstanceId": vnfr_id
+            "removeVnfInstanceId": vnfr_id,
         }
 
         session = {
@@ -282,7 +280,7 @@ class TestNsLcmOpTopic(unittest.TestCase):
         )
 
     def test_migrate(self):
-        vnfr_id = self.db.get_list("vnfrs")[0]["_id"]
+        _ = self.db.get_list("vnfrs")[0]["_id"]
         session = {}
         self.db.set_one(
             "nsrs",
@@ -303,12 +301,9 @@ class TestNsLcmOpTopic(unittest.TestCase):
             indata = {
                 "lcmOperationType": "migrate",
                 "nsInstanceId": self.nsr_id,
-                "migrateToHost":"sample02",
-                "vdu": {
-                    "vduCountIndex": 0,
-                    "vduId": "mgmtVM"
-                },
-                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f"
+                "migrateToHost": "sample02",
+                "vdu": {"vduCountIndex": 0, "vduId": "mgmtVM"},
+                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f",
             }
             nslcmop_id, _ = self.nslcmop_topic.new(
                 rollback, session, indata, kwargs=None, headers=headers
@@ -337,7 +332,7 @@ class TestNsLcmOpTopic(unittest.TestCase):
             indata = {
                 "lcmOperationType": "migrate",
                 "nsInstanceId": self.nsr_id,
-                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f"
+                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f",
             }
             nslcmop_id, _ = self.nslcmop_topic.new(
                 rollback, session, indata, kwargs=None, headers=headers
@@ -366,18 +361,19 @@ class TestNsLcmOpTopic(unittest.TestCase):
             indata = {
                 "lcmOperationType": "migrate",
                 "nsInstanceId": self.nsr_id,
-                "migrateToHost":"sample02",
-                "vdu": {
-                    "vduCountIndex": 0
-                },
-                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f"
+                "migrateToHost": "sample02",
+                "vdu": {"vduCountIndex": 0},
+                "vnfInstanceId": "9e8006df-cdfa-4f63-bf6a-fce860d71c1f",
             }
 
             with self.assertRaises(Exception) as e:
                 nslcmop_id, _ = self.nslcmop_topic.new(
-                rollback, session, indata, kwargs=None, headers=headers
+                    rollback, session, indata, kwargs=None, headers=headers
+                )
+            self.assertTrue(
+                "Format error at 'vdu' ''vduId' is a required property'"
+                in str(e.exception)
             )
-            self.assertTrue("Format error at 'vdu' ''vduId' is a required property'" in str(e.exception))
 
 
 class TestNsLcmOpTopicWithMock(unittest.TestCase):
@@ -391,21 +387,37 @@ class TestNsLcmOpTopicWithMock(unittest.TestCase):
         self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
 
     def test_get_vnfd_from_vnf_member_revision(self):
-        test_vnfr = yaml.load(db_vnfrs_text, Loader=yaml.Loader)[0]
-        test_vnfd = yaml.load(db_vnfds_text, Loader=yaml.Loader)
+        test_vnfr = yaml.safe_load(db_vnfrs_text)[0]
+        test_vnfd = yaml.safe_load(db_vnfds_text)
         self.db.get_one.side_effect = [test_vnfr, test_vnfd]
-        vnfr = self.nslcmop_topic._get_vnfd_from_vnf_member_index("1", test_vnfr['_id'])
-        self.assertEqual(self.db.get_one.call_args_list[0][0][0], 'vnfrs', "Incorrect first DB lookup")
-        self.assertEqual(self.db.get_one.call_args_list[1][0][0], 'vnfds', "Incorrect second DB lookup")
+        _ = self.nslcmop_topic._get_vnfd_from_vnf_member_index("1", test_vnfr["_id"])
+        self.assertEqual(
+            self.db.get_one.call_args_list[0][0][0],
+            "vnfrs",
+            "Incorrect first DB lookup",
+        )
+        self.assertEqual(
+            self.db.get_one.call_args_list[1][0][0],
+            "vnfds",
+            "Incorrect second DB lookup",
+        )
 
     def test_get_vnfd_from_vnf_member_no_revision(self):
-        test_vnfr = yaml.load(db_vnfrs_text, Loader=yaml.Loader)[0]
-        test_vnfr['revision'] = 3
-        test_vnfd = yaml.load(db_vnfds_text, Loader=yaml.Loader)
+        test_vnfr = yaml.safe_load(db_vnfrs_text)[0]
+        test_vnfr["revision"] = 3
+        test_vnfd = yaml.safe_load(db_vnfds_text)
         self.db.get_one.side_effect = [test_vnfr, test_vnfd]
-        vnfr = self.nslcmop_topic._get_vnfd_from_vnf_member_index("1", test_vnfr['_id'])
-        self.assertEqual(self.db.get_one.call_args_list[0][0][0], 'vnfrs', "Incorrect first DB lookup")
-        self.assertEqual(self.db.get_one.call_args_list[1][0][0], 'vnfds_revisions', "Incorrect second DB lookup")
+        _ = self.nslcmop_topic._get_vnfd_from_vnf_member_index("1", test_vnfr["_id"])
+        self.assertEqual(
+            self.db.get_one.call_args_list[0][0][0],
+            "vnfrs",
+            "Incorrect first DB lookup",
+        )
+        self.assertEqual(
+            self.db.get_one.call_args_list[1][0][0],
+            "vnfds_revisions",
+            "Incorrect second DB lookup",
+        )
 
     @contextmanager
     def assertNotRaises(self, exception_type):
@@ -420,9 +432,9 @@ class TestNsLcmOpTopicWithMock(unittest.TestCase):
         session = {}
 
         with self.subTest(i=1, t="VNF instance does not belong to NS"):
-            test_vnfr = yaml.load(db_vnfrs_text, Loader=yaml.Loader)
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
             test_vnfr[0]["revision"] = 2
-            test_nsr = yaml.load(db_nsrs_text, Loader=yaml.Loader)
+            test_nsr = yaml.safe_load(db_nsrs_text)
             test_nsr[0]["constituent-vnfr-ref"][
                 0
             ] = "99d90b0c-faff-4b9f-bccd-017f33985984"
@@ -446,9 +458,9 @@ class TestNsLcmOpTopicWithMock(unittest.TestCase):
             )
 
         with self.subTest(i=2, t="Ns update request validated with no exception"):
-            test_vnfr = yaml.load(db_vnfrs_text, Loader=yaml.Loader)
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
             test_vnfr[0]["revision"] = 2
-            test_nsr = yaml.load(db_nsrs_text, Loader=yaml.Loader)
+            test_nsr = yaml.safe_load(db_nsrs_text)
             self.db.create_list("vnfrs", test_vnfr)
             self.db.create_list("nsrs", test_nsr)
             nsrs = self.db.get_list("nsrs")[1]
@@ -484,9 +496,9 @@ class TestNsLcmOpTopicWithMock(unittest.TestCase):
             )
 
         with self.subTest(i=4, t="wrong vnfdid is given as an update parameter"):
-            test_vnfr = yaml.load(db_vnfrs_text, Loader=yaml.Loader)
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
             test_vnfr[0]["revision"] = 2
-            test_nsr = yaml.load(db_nsrs_text, Loader=yaml.Loader)
+            test_nsr = yaml.safe_load(db_nsrs_text)
             self.db.create_list("vnfrs", test_vnfr)
             self.db.create_list("nsrs", test_nsr)
             nsrs = self.db.get_list("nsrs")[2]
@@ -507,10 +519,12 @@ class TestNsLcmOpTopicWithMock(unittest.TestCase):
                 "VNF instance: 88d90b0c-faff-4b9f-bccd-017f33985984",
             )
 
-        with self.subTest(i=5, t="Ns update REMOVE_VNF request validated with no exception"):
-            test_vnfr = yaml.load(db_vnfrs_text, Loader=yaml.Loader)
+        with self.subTest(
+            i=5, t="Ns update REMOVE_VNF request validated with no exception"
+        ):
+            test_vnfr = yaml.safe_load(db_vnfrs_text)
             test_vnfr[0]["revision"] = 2
-            test_nsr = yaml.load(db_nsrs_text, Loader=yaml.Loader)
+            test_nsr = yaml.safe_load(db_nsrs_text)
             self.db.create_list("vnfrs", test_vnfr)
             self.db.create_list("nsrs", test_nsr)
             nsrs = self.db.get_list("nsrs")[1]
@@ -522,6 +536,7 @@ class TestNsLcmOpTopicWithMock(unittest.TestCase):
             with self.assertNotRaises(EngineException):
                 self.nslcmop_topic._check_ns_operation(session, nsrs, "update", indata)
 
+
 class TestNsrTopic(unittest.TestCase):
     def setUp(self):
         self.db = DbMemory()
@@ -533,11 +548,9 @@ class TestNsrTopic(unittest.TestCase):
         self.nsr_topic = NsrTopic(self.db, self.fs, self.msg, None)
         self.nsr_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list(
-            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
-        )
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfds_text))
         self.db.create = Mock(return_value="created_id")
         self.nsd = self.db.get_list("nsds")[0]
         self.nsd_id = self.nsd["_id"]
@@ -684,57 +697,80 @@ class TestNsrTopic(unittest.TestCase):
                 self.assertTrue(e.exception.http_code == expect_code)
             if expect_text_list:
                 for expect_text in expect_text_list:
-                    self.assertIn(expect_text, str(e.exception).lower(),
-                                  "Expected '{}' at exception text".format(expect_text))
+                    self.assertIn(
+                        expect_text,
+                        str(e.exception).lower(),
+                        "Expected '{}' at exception text".format(expect_text),
+                    )
 
     def test_show_instance(self):
-        session = {"force": False, "admin": False, "public": False, "project_id": [self.nsd_project], "method": "write"}
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsd_project],
+            "method": "write",
+        }
         filter_q = {}
         for refresh_status in ("true", "false"):
-            self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+            self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
             actual_nsr = self.db.get_list("nsrs")[0]
             nsr_id = actual_nsr["_id"]
-            filter_q['vcaStatus-refresh'] = refresh_status
+            filter_q["vcaStatus-refresh"] = refresh_status
             expected_nsr = self.nsr_topic.show(session, nsr_id, filter_q=filter_q)
             self.nsr_topic.delete(session, nsr_id)
             actual_nsr.pop("_admin")
             expected_nsr.pop("_admin")
-            self.assertEqual(expected_nsr, actual_nsr, "Database nsr and show() nsr do not match.")
+            self.assertEqual(
+                expected_nsr, actual_nsr, "Database nsr and show() nsr do not match."
+            )
 
     def test_vca_status_refresh(self):
-        session = {"force": False, "admin": False, "public": False, "project_id": [self.nsd_project], "method": "write"}
-        filter_q = {'vcaStatus-refresh': 'true'}
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsd_project],
+            "method": "write",
+        }
+        filter_q = {"vcaStatus-refresh": "true"}
         time_delta = 120
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         nsr = self.db.get_list("nsrs")[0]
 
         # When vcaStatus-refresh is true
-        filter_q['vcaStatus-refresh'] = "true"
+        filter_q["vcaStatus-refresh"] = "true"
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[0]
         self.assertEqual(msg_args[1], "vca_status_refresh", "Wrong message action")
         self.assertGreater(nsr["_admin"]["modified"], time() - time_delta)
 
         # When vcaStatus-refresh is false but modified time is within threshold
-        filter_q['vcaStatus-refresh'] = "false"
+        filter_q["vcaStatus-refresh"] = "false"
         time_now = time()
         nsr["_admin"]["modified"] = time_now
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[1]
         self.assertEqual(msg_args, {}, "Message should not be sent.")
-        self.assertEqual(nsr["_admin"]["modified"], time_now, "Modified time should not be changed.")
+        self.assertEqual(
+            nsr["_admin"]["modified"], time_now, "Modified time should not be changed."
+        )
 
         # When vcaStatus-refresh is false but modified time is less than threshold
-        filter_q['vcaStatus-refresh'] = "false"
-        nsr["_admin"]["modified"] = time() - (2*time_delta)
+        filter_q["vcaStatus-refresh"] = "false"
+        nsr["_admin"]["modified"] = time() - (2 * time_delta)
         self.nsr_topic.vca_status_refresh(session, nsr, filter_q)
         msg_args = self.msg.write.call_args[0]
         self.assertEqual(msg_args[1], "vca_status_refresh", "Wrong message action")
         self.nsr_topic.delete(session, nsr["_id"])
-        self.assertGreater(nsr["_admin"]["modified"], time() - time_delta, "Modified time is not changed.")
+        self.assertGreater(
+            nsr["_admin"]["modified"],
+            time() - time_delta,
+            "Modified time is not changed.",
+        )
 
     def test_delete_ns(self):
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsr_id = self.nsr["_id"]
         self.db_set_one = self.db.set_one
index a7ea8a3..f4e6e63 100644 (file)
@@ -29,7 +29,7 @@ from osm_nbi.tests.test_db_descriptors import (
     db_nsds_text,
     db_nsrs_text,
     db_vnfrs_text,
-    db_nslcmops_text
+    db_nslcmops_text,
 )
 import yaml
 
@@ -41,10 +41,8 @@ class TestVnfInstances(unittest.TestCase):
         self.msg = Mock(MsgBase())
         self.vnfinstances = VnfInstances(self.db, self.fs, self.msg, None)
         self.nsrtopic = NsrTopic(self.db, self.fs, self.msg, None)
-        self.db.create_list(
-            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
-        )
-        self.db.create_list("vnfds", yaml.load(db_vnfm_vnfd_text, Loader=yaml.Loader))
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfm_vnfd_text))
         self.vnfd = self.db.get_list("vnfds")[0]
         self.vnfd_id = self.vnfd["id"]
         self.vnfd_project = self.vnfd["_admin"]["projects_read"][0]
@@ -68,43 +66,36 @@ class TestVnfInstances(unittest.TestCase):
             "vnfInstanceDescription": "vnf instance description",
             "vimAccountId": self.vim_id,
             "additionalParams": {
-                "virtual-link-desc": [
-                    {
-                        "id": "mgmt-net",
-                        "mgmt-network": True
-                    }
-                ],
+                "virtual-link-desc": [{"id": "mgmt-net", "mgmt-network": True}],
                 "constituent-cpd-id": "vnf-cp0-ext",
-                "virtual-link-profile-id": "mgmt-net"
-            }
+                "virtual-link-profile-id": "mgmt-net",
+            },
         }
         rollback = []
-        headers = {}
         self.fs.path = ""
         self.fs.get_params.return_value = {}
         self.fs.file_exists.return_value = False
         self.fs.file_open.side_effect = lambda path, mode: open(
-                "/tmp/" + str(uuid4()), "a+b"
-                )
+            "/tmp/" + str(uuid4()), "a+b"
+        )
 
         vnfr_id, _ = self.vnfinstances.new(
             rollback, session, indata, {}, headers={"Content-Type": []}
         )
         vnfr = self.db.get_one("vnfrs")
         self.assertEqual(
-                vnfr_id,
-                vnfr["id"],
-                "Mismatch between return id and database id"
-                )
+            vnfr_id, vnfr["id"], "Mismatch between return id and database id"
+        )
         self.assertEqual(
-                "NOT_INSTANTIATED",
-                vnfr["_admin"]["nsState"],
-                "Database record must contain 'nsState' NOT_INSTANTIATED"
-                )
+            "NOT_INSTANTIATED",
+            vnfr["_admin"]["nsState"],
+            "Database record must contain 'nsState' NOT_INSTANTIATED",
+        )
         self.assertEqual(
-                self.vnfd_id,
-                vnfr["vnfd-ref"],
-                "vnfr record is not properly created for the given vnfd")
+            self.vnfd_id,
+            vnfr["vnfd-ref"],
+            "vnfr record is not properly created for the given vnfd",
+        )
 
     def test_show_vnfinstance(self):
         session = {
@@ -115,14 +106,14 @@ class TestVnfInstances(unittest.TestCase):
             "method": "write",
         }
         filter_q = {}
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
         actual_vnfr = self.db.get_list("vnfrs")[0]
         id = actual_vnfr["_id"]
         expected_vnfr = self.vnfinstances.show(session, id, filter_q)
         self.assertEqual(
             actual_vnfr["_id"],
             expected_vnfr["_id"],
-            "Mismatch between return vnfr Id and database vnfr Id"
+            "Mismatch between return vnfr Id and database vnfr Id",
         )
 
     def test_delete_vnfinstance(self):
@@ -133,9 +124,9 @@ class TestVnfInstances(unittest.TestCase):
             "project_id": [self.vnfd_project],
             "method": "delete",
         }
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
 
         self.vnfr = self.db.get_list("vnfrs")[0]
         self.vnfr_id = self.vnfr["_id"]
@@ -158,13 +149,11 @@ class TestVnfLcmOpTopic(unittest.TestCase):
         self.vnflcmop_topic = VnfLcmOpTopic(self.db, self.fs, self.msg, None)
         self.vnflcmop_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list(
-            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
-        )
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfm_vnfd_text, Loader=yaml.Loader))
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("vim_accounts", yaml.safe_load(db_vim_accounts_text))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfm_vnfd_text))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
 
         self.vnfd = self.db.get_list("vnfds")[0]
         self.vnfd_id = self.vnfd["_id"]
@@ -190,7 +179,7 @@ class TestVnfLcmOpTopic(unittest.TestCase):
             "vnfName": "vnf_instance_name",
             "vnfDescription": "vnf instance description",
             "vnfId": self.vnfd_id,
-            "vimAccountId": self.vim_id
+            "vimAccountId": self.vim_id,
         }
         rollback = []
         headers = {}
@@ -216,7 +205,7 @@ class TestVnfLcmOpTopic(unittest.TestCase):
             "project_id": [self.vnfd_project],
             "method": "write",
         }
-        self.db.create_list("nslcmops", yaml.load(db_nslcmops_text, Loader=yaml.Loader))
+        self.db.create_list("nslcmops", yaml.safe_load(db_nslcmops_text))
         filter_q = {}
         actual_lcmop = self.db.get_list("nslcmops")[0]
         id = actual_lcmop["_id"]
@@ -224,4 +213,8 @@ class TestVnfLcmOpTopic(unittest.TestCase):
         vnfr_id = vnfr["_id"]
         vnflcmop = self.vnflcmop_topic.show(session, id, filter_q)
         _id = vnflcmop["vnfInstanceId"]
-        self.assertEqual(_id, vnfr_id, "Mismatch between vnflcmop's vnfInstanceId and database vnfr's id")
+        self.assertEqual(
+            _id,
+            vnfr_id,
+            "Mismatch between vnflcmop's vnfInstanceId and database vnfr's id",
+        )
index 91e5641..d77b79f 100644 (file)
 __author__ = "Pedro de la Cruz Ramos, pedro.delacruzramos@altran.com"
 __date__ = "2019-11-20"
 
+
+# Exploit exists in the key kdu.helm-chart
+vnfd_exploit_text = """
+  _id: 00000000-0000-0000-0000-000000000000
+  id: n2vc-rce_vnfd
+  df:
+  - id: default-df
+  kdu:
+  - name: exploit
+    helm-chart: "local/exploit --post-renderer /bin/bash"
+    helm-version: v3
+"""
+
+# Exploit in kdu.helm-chart is fixed
+vnfd_exploit_fixed_text = """
+  id: n2vc-rce_vnfd
+  df:
+  - id: default-df
+  kdu:
+  - name: exploit
+    helm-chart: "local/exploit"
+    helm-version: v3
+"""
+
 db_vnfds_text = """
 ---
 -   _admin:
@@ -49,7 +73,7 @@ db_vnfds_text = """
     product-name: hackfest3charmed-vnf
     version: '1.0'
     mgmt-cp: vnf-mgmt-ext
-  
+
     virtual-compute-desc:
       - id: mgmt-compute
         virtual-cpu:
@@ -61,17 +85,17 @@ db_vnfds_text = """
           num-virtual-cpu: 2
         virtual-memory:
           size: '2'
-  
+
     virtual-storage-desc:
       - id: mgmt-storage
         size-of-storage: '20'
       - id: data-storage
         size-of-storage: '20'
-  
+
     sw-image-desc:
       - id: hackfest3-mgmt
         name: hackfest3-mgmt
-  
+
     vdu:
       - id: mgmtVM
         name: mgmtVM
@@ -118,10 +142,10 @@ db_vnfds_text = """
           - id: dataVM_cpu_util
             name: dataVM_cpu_util
             performance-metric: cpu_utilization
-  
+
     int-virtual-link-desc:
       - id: internal
-  
+
     ext-cpd:
       - id: vnf-mgmt-ext
         int-cpd: # Connection to int-cpd
@@ -131,7 +155,7 @@ db_vnfds_text = """
         int-cpd: # Connection to int-cpd
           vdu-id: dataVM
           cpd: vnf-data
-  
+
     df:
       - id: hackfest_default
         vdu-profile:
@@ -271,3 +295,77 @@ db_nsds_text = """
                   - constituent-base-element-id: hackfest_vnf2
                     constituent-cpd-id: vnf-data-ext
 """
+
+db_sfc_nsds_text = """
+- _admin:
+    userDefinedData: {}
+    revision: 1
+    created: 1683713524.2696395
+    modified: 1683713524.3553684
+    projects_read:
+      - 93601899-b310-4a56-a765-91539d5f675d
+    projects_write:
+      - 93601899-b310-4a56-a765-91539d5f675d
+    onboardingState: ONBOARDED
+    operationalState: ENABLED
+    usageState: NOT_IN_USE
+    storage:
+      fs: mongo
+      path: /app/storage/
+      folder: '2eb45633-03e3-4909-a87d-a564f5943948:1'
+      pkg-dir: cirros_vnffg_ns
+      descriptor: cirros_vnffg_ns/cirros_vnffg_nsd.yaml
+      zipfile: package.tar.gz
+  _id: 2eb45633-03e3-4909-a87d-a564f5943948
+  id: cirros_vnffg-ns
+  designer: OSM
+  version: '1.0'
+  name: cirros_vnffg-ns
+
+  vnfd-id:
+    - cirros_vnffg-vnf
+
+  virtual-link-desc:
+    - id: osm-ext
+      mgmt-network: true
+
+  vnffgd:
+    - id: vnffg1
+      vnf-profile-id:
+        - Mid-vnf1
+      nfpd:
+        - id: forwardingpath1
+          position-desc-id:
+            - id: position1
+              cp-profile-id:
+                - id: cpprofile2
+                  constituent-profile-elements:
+                    - id: vnf1
+                      order: 0
+                      constituent-base-element-id: Mid-vnf1
+                      ingress-constituent-cpd-id: vnf-cp0-ext
+                      egress-constituent-cpd-id: vnf-cp0-ext
+              match-attributes:
+                - id: rule1_80
+                  ip-proto: 6
+                  source-ip-address: 20.20.1.2
+                  destination-ip-address: 20.20.3.5
+                  source-port: 0
+                  destination-port: 80
+              nfp-position-element-id:
+                - test
+      nfp-position-element:
+        - id: test
+
+  df:
+    - id: default-df
+      vnf-profile:
+        - id: '1'
+          virtual-link-connectivity:
+            - constituent-cpd-id:
+                - constituent-base-element-id: '1'
+                  constituent-cpd-id: eth0-ext
+              virtual-link-profile-id: osm-ext
+          vnfd-id: cirros_vnffg-vnf
+  description: Simple NS example with vnffgd
+"""
index 231818b..e5605c3 100644 (file)
@@ -43,10 +43,10 @@ class PmJobsTopicTest(asynctest.TestCase):
     def setUp(self):
         self.db = DbMemory()
         self.pmjobs_topic = PmJobsTopic(self.db, host="prometheus", port=9091)
-        self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
-        self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
-        self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
+        self.db.create_list("nsds", yaml.safe_load(db_nsds_text))
+        self.db.create_list("vnfds", yaml.safe_load(db_vnfds_text))
+        self.db.create_list("vnfrs", yaml.safe_load(db_vnfrs_text))
+        self.db.create_list("nsrs", yaml.safe_load(db_nsrs_text))
         self.nsr = self.db.get_list("nsrs")[0]
         self.nsr_id = self.nsr["_id"]
         project_id = self.nsr["_admin"]["projects_write"]
@@ -80,18 +80,18 @@ class PmJobsTopicTest(asynctest.TestCase):
         for metric in metric_list:
             endpoint = re.sub(r"metric_name", metric, site)
             if metric == "cpu_utilization":
-                response = yaml.load(cpu_utilization, Loader=yaml.Loader)
+                response = yaml.safe_load(cpu_utilization)
             elif metric == "users":
-                response = yaml.load(users, Loader=yaml.Loader)
+                response = yaml.safe_load(users)
             elif metric == "load":
-                response = yaml.load(load, Loader=yaml.Loader)
+                response = yaml.safe_load(load)
             else:
-                response = yaml.load(empty, Loader=yaml.Loader)
+                response = yaml.safe_load(empty)
             mock_res.get(endpoint, payload=response)
 
     async def test_prom_metric_request(self):
         with self.subTest("Test case1 failed in test_prom"):
-            prom_response = yaml.load(prom_res, Loader=yaml.Loader)
+            prom_response = yaml.safe_load(prom_res)
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = await self.pmjobs_topic._prom_metric_request(
@@ -109,7 +109,7 @@ class PmJobsTopicTest(asynctest.TestCase):
 
     def test_show(self):
         with self.subTest("Test case1 failed in test_show"):
-            show_response = yaml.load(show_res, Loader=yaml.Loader)
+            show_response = yaml.safe_load(show_res)
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = self.pmjobs_topic.show(self.session, self.nsr_id)
diff --git a/osm_nbi/tests/upload.py b/osm_nbi/tests/upload.py
deleted file mode 100755 (executable)
index dfd7302..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-#! /usr/bin/python3
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import getopt
-import sys
-import requests
-from os.path import getsize, basename
-from hashlib import md5
-
-__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
-__date__ = "$2018-01-01$"
-__version__ = "0.1"
-version_date = "Jan 2018"
-
-
-def usage():
-    print("Usage: ", sys.argv[0], "[options]")
-    print("      --version: prints current version")
-    print("      -f|--file FILE: file to be sent")
-    print("      -h|--help: shows this help")
-    print("      -u|--url URL: complete server URL")
-    print("      -s|--chunk-size SIZE: size of chunks, by default 1000")
-    print("      -t|--token TOKEN: Authorizaton token, previously obtained from server")
-    print("      -v|--verbose print debug information, can be used several times")
-    return
-
-
-if __name__ == "__main__":
-    try:
-        # load parameters and configuration
-        opts, args = getopt.getopt(
-            sys.argv[1:],
-            "hvu:s:f:t:",
-            ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="],
-        )
-        url = None
-        chunk_size = 500
-        pkg_file = None
-        verbose = 0
-        token = None
-
-        for o, a in opts:
-            if o == "--version":
-                print("upload version " + __version__ + " " + version_date)
-                sys.exit()
-            elif o in ("-v", "--verbose"):
-                verbose += 1
-            elif o in ("-h", "--help"):
-                usage()
-                sys.exit()
-            elif o in ("-u", "--url"):
-                url = a
-            elif o in ("-s", "--chunk-size"):
-                chunk_size = int(a)
-            elif o in ("-f", "--file"):
-                pkg_file = a
-            elif o in ("-t", "--token"):
-                token = a
-            else:
-                assert False, "Unhandled option"
-        total_size = getsize(pkg_file)
-        index = 0
-        transaction_id = None
-        file_md5 = md5()
-        with open(pkg_file, "rb") as f:
-            headers = {
-                "Content-type": "application/gzip",
-                "Content-Filename": basename(pkg_file),
-                "Accept": "application/json",
-            }
-            if token:
-                headers["Authorization"] = token
-            while index < total_size:
-                chunk_data = f.read(chunk_size)
-                file_md5.update(chunk_data)
-                # payload = {"file_name": pkg_file, "chunk_data": base64.b64encode(chunk_data).decode("utf-8"),
-                #            "chunk_size": chunk_size}
-                if transaction_id:
-                    headers["Transaction-Id"] = transaction_id
-                if index + len(chunk_data) == total_size:
-                    headers["Content-File-MD5"] = file_md5.hexdigest()
-                #    payload["id"] = transaction_id
-                headers["Content-range"] = "bytes {}-{}/{}".format(
-                    index, index + len(chunk_data) - 1, total_size
-                )
-                # refers to rfc2616:  https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
-                if verbose:
-                    print("TX chunk Headers: {}".format(headers))
-                r = requests.post(url, data=chunk_data, headers=headers, verify=False)
-                if r.status_code not in (200, 201):
-                    print("Got {}: {}".format(r.status_code, r.text))
-                    exit(1)
-                if verbose > 1:
-                    print("RX {}: {}".format(r.status_code, r.text))
-                response = r.json()
-                if not transaction_id:
-                    transaction_id = response["id"]
-                index += len(chunk_data)
-            if verbose <= 1:
-                print("RX {}: {}".format(r.status_code, r.text))
-            if "id" in response:
-                print("---\nid: {}".format(response["id"]))
-    except Exception:
-        raise
index 73fc40f..9b48ee8 100644 (file)
@@ -21,6 +21,8 @@
 # For those usages not covered by the Apache License, Version 2.0 please
 # contact: fbravo@whitestack.com or agarcia@whitestack.com
 ##
+from cefevent import CEFEvent
+from osm_nbi import version
 
 
 def find_in_list(the_list, condition_lambda):
@@ -64,3 +66,29 @@ def deep_update_dict(data, updated_data):
         return data
 
     return data
+
+
+def cef_event(cef_logger, cef_fields):
+    for key, value in cef_fields.items():
+        cef_logger.set_field(key, value)
+
+
+def cef_event_builder(config):
+    cef_logger = CEFEvent()
+    cef_fields = {
+        "version": config["version"],
+        "deviceVendor": config["deviceVendor"],
+        "deviceProduct": config["deviceProduct"],
+        "deviceVersion": get_version(),
+        "message": "CEF Logger",
+        "sourceUserName": "admin",
+        "severity": 1,
+    }
+    cef_event(cef_logger, cef_fields)
+    cef_logger.build_cef()
+    return cef_logger
+
+
+def get_version():
+    osm_version = version.split("+")
+    return osm_version[0]
index a17e241..620272f 100644 (file)
@@ -35,6 +35,10 @@ shortname_schema = {
     "pattern": "^[^,;()\\.\\$'\"]+$",
 }
 passwd_schema = {"type": "string", "minLength": 1, "maxLength": 60}
+user_passwd_schema = {
+    "type": "string",
+    "pattern": "^.*(?=.{8,})((?=.*[!@#$%^&*()\\-_=+{};:,<.>]){1})(?=.*\\d)((?=.*[a-z]){1})((?=.*[A-Z]){1}).*$",
+}
 name_schema = {
     "type": "string",
     "minLength": 1,
@@ -96,7 +100,7 @@ ip_schema = {
 }
 ipv6_schema = {
     "type": "string",
-    "pattern": "(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))",
+    "pattern": "(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))",  # noqa: W605
 }
 ip_prefix_schema = {
     "type": "string",
@@ -137,6 +141,7 @@ ns_instantiate_vdu = {
     "type": "object",
     "properties": {
         "id": name_schema,
+        "vim-flavor-id": name_schema,
         "volume": {
             "type": "array",
             "minItems": 1,
@@ -194,19 +199,6 @@ ip_profile_dhcp_schema = {
 }
 
 ip_profile_schema = {
-    "title": "ip profile validation schema",
-    "$schema": "http://json-schema.org/draft-04/schema#",
-    "type": "object",
-    "properties": {
-        "ip-version": {"enum": ["ipv4", "ipv6"]},
-        "subnet-address": ip_prefix_schema,
-        "gateway-address": ip_schema,
-        "dns-server": ip_profile_dns_schema,
-        "dhcp-params": ip_profile_dhcp_schema,
-    },
-}
-
-ip_profile_update_schema = {
     "title": "ip profile validation schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
@@ -253,7 +245,7 @@ ns_instantiate_internal_vld = {
         "name": name_schema,
         "vim-network-name": name_schema,
         "vim-network-id": name_schema,
-        "ip-profile": ip_profile_update_schema,
+        "ip-profile": ip_profile_schema,
         "provider-network": provider_network_schema,
         "internal-connection-point": {
             "type": "array",
@@ -394,7 +386,7 @@ ns_instantiate = {
                     "vim-network-id": {"oneOf": [string_schema, object_schema]},
                     "ns-net": object_schema,
                     "wimAccountId": {"oneOf": [id_schema, bool_schema, null_schema]},
-                    "ip-profile": object_schema,
+                    "ip-profile": ip_profile_schema,
                     "provider-network": provider_network_schema,
                     "vnfd-connection-point-ref": {
                         "type": "array",
@@ -449,7 +441,12 @@ ns_update = {
         "nsInstanceId": id_schema,
         "timeout_ns_update": integer1_schema,
         "updateType": {
-            "enum": ["CHANGE_VNFPKG", "REMOVE_VNF", "MODIFY_VNF_INFORMATION", "OPERATE_VNF"]
+            "enum": [
+                "CHANGE_VNFPKG",
+                "REMOVE_VNF",
+                "MODIFY_VNF_INFORMATION",
+                "OPERATE_VNF",
+            ]
         },
         "modifyVnfInfoData": {
             "type": "object",
@@ -482,10 +479,10 @@ ns_update = {
                     },
                     "required": ["vdu_id", "count-index"],
                     "additionalProperties": False,
-                }
+                },
             },
             "required": ["vnfInstanceId", "changeStateTo"],
-        }
+        },
     },
     "required": ["updateType"],
     "additionalProperties": False,
@@ -556,16 +553,16 @@ ns_migrate = {
         "migrateToHost": string_schema,
         "vdu": {
             "type": "object",
-                "properties": {
-                    "vduId": name_schema,
-                    "vduCountIndex": integer0_schema,
-                },
-                "required": ["vduId"],
-                "additionalProperties": False,
+            "properties": {
+                "vduId": name_schema,
+                "vduCountIndex": integer0_schema,
+            },
+            "required": ["vduId"],
+            "additionalProperties": False,
         },
     },
     "required": ["vnfInstanceId"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 ns_heal = {
@@ -633,17 +630,34 @@ ns_verticalscale = {
                         "virtualMemory": integer1_schema,
                         "sizeOfStorage": integer0_schema,
                         "numVirtualCpu": integer1_schema,
-                        },
-                    }
+                    },
                 },
+            },
             "required": ["vnfInstanceId", "additionalParams"],
             "additionalProperties": False,
-            }
         },
+    },
     "required": ["lcmOperationType", "verticalScale", "nsInstanceId"],
     "additionalProperties": False,
 }
 
+nslcmop_cancel = {
+    "title": "Cancel nslcmop input schema",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "properties": {
+        "nsLcmOpOccId": id_schema,
+        "cancelMode": {
+            "enum": [
+                "GRACEFUL",
+                "FORCEFUL",
+            ]
+        },
+    },
+    "required": ["cancelMode"],
+    "additionalProperties": False,
+}
+
 schema_version = {"type": "string", "enum": ["1.0"]}
 schema_type = {"type": "string"}
 vim_type = shortname_schema  # {"enum": ["openstack", "openvim", "vmware", "opennebula", "aws", "azure", "fos"]}
@@ -818,12 +832,11 @@ k8scluster_deploy_method_schema = {
     "title": "Deployment methods for K8s cluster",
     "type": "object",
     "properties": {
-        "helm-chart": {"type": "boolean"},
         "juju-bundle": {"type": "boolean"},
         "helm-chart-v3": {"type": "boolean"},
     },
     "additionalProperties": False,
-    "minProperties": 3,
+    "minProperties": 2,
 }
 k8scluster_nets_schema = {
     "title": "k8scluster nets input schema",
@@ -933,6 +946,10 @@ k8srepo_properties = {
     "description": description_schema,
     "type": k8srepo_types,
     "url": description_schema,
+    "cacert": long_description_schema,
+    "user": string_schema,
+    "password": passwd_schema,
+    "oci": bool_schema,
 }
 k8srepo_new_schema = {
     "title": "k8scluster creation input schema",
@@ -1089,7 +1106,7 @@ user_new_schema = {
     "properties": {
         "username": string_schema,
         "domain_name": shortname_schema,
-        "password": passwd_schema,
+        "password": user_passwd_schema,
         "projects": nameshort_list_schema,
         "project_role_mappings": project_role_mappings,
     },
@@ -1101,13 +1118,16 @@ user_edit_schema = {
     "title": "User edit schema for administrators",
     "type": "object",
     "properties": {
-        "password": passwd_schema,
+        "password": user_passwd_schema,
         "old_password": passwd_schema,
         "username": string_schema,  # To allow User Name modification
         "projects": {"oneOf": [nameshort_list_schema, array_edition_schema]},
         "project_role_mappings": project_role_mappings,
         "add_project_role_mappings": project_role_mappings,
         "remove_project_role_mappings": project_role_mappings_optional,
+        "system_admin_id": id_schema,
+        "unlock": bool_schema,
+        "renew": bool_schema,
     },
     "minProperties": 1,
     "additionalProperties": False,
@@ -1436,32 +1456,46 @@ vnflcmsub_schema = {
                 "enum": [
                     "VnfIdentifierCreationNotification",
                     "VnfLcmOperationOccurrenceNotification",
-                    "VnfIdentifierDeletionNotification"
-                    ]
-            }
+                    "VnfIdentifierDeletionNotification",
+                ]
+            },
         },
         "operationTypes": {
             "type": "array",
             "items": {
                 "enum": [
-                    "INSTANTIATE", "SCALE", "SCALE_TO_LEVEL", "CHANGE_FLAVOUR", "TERMINATE",
-                    "HEAL", "OPERATE", "CHANGE_EXT_CONN", "MODIFY_INFO", "CREATE_SNAPSHOT",
-                    "REVERT_TO_SNAPSHOT", "CHANGE_VNFPKG"
-                    ]
-            }
+                    "INSTANTIATE",
+                    "SCALE",
+                    "SCALE_TO_LEVEL",
+                    "CHANGE_FLAVOUR",
+                    "TERMINATE",
+                    "HEAL",
+                    "OPERATE",
+                    "CHANGE_EXT_CONN",
+                    "MODIFY_INFO",
+                    "CREATE_SNAPSHOT",
+                    "REVERT_TO_SNAPSHOT",
+                    "CHANGE_VNFPKG",
+                ]
+            },
         },
         "operationStates": {
             "type": "array",
             "items": {
                 "enum": [
-                    "STARTING", "PROCESSING", "COMPLETED", "FAILED_TEMP", "FAILED",
-                    "ROLLING_BACK", "ROLLED_BACK"
-                    ]
-            }
-        }
+                    "STARTING",
+                    "PROCESSING",
+                    "COMPLETED",
+                    "FAILED_TEMP",
+                    "FAILED",
+                    "ROLLING_BACK",
+                    "ROLLED_BACK",
+                ]
+            },
+        },
     },
-    "required": ["VnfInstanceSubscriptionFilter", "notificationTypes"]
- }
+    "required": ["VnfInstanceSubscriptionFilter", "notificationTypes"],
+}
 
 vnf_subscription = {
     "title": "vnf subscription input schema",
@@ -1470,9 +1504,9 @@ vnf_subscription = {
     "properties": {
         "filter": vnflcmsub_schema,
         "CallbackUri": description_schema,
-        "authentication": authentication_schema
+        "authentication": authentication_schema,
     },
-    "required": ["filter", "CallbackUri"]
+    "required": ["filter", "CallbackUri"],
 }
 
 
index ac5fe41..8550534 100644 (file)
@@ -22,7 +22,6 @@ from .osm_vnfm.vnf_instance_actions import VnfLcmOp2NsLcmOp
 
 
 class VnfInstances(BaseTopic):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for vnf instance topic
@@ -40,7 +39,9 @@ class VnfInstances(BaseTopic):
         :param headers: http request headers
         :return: the _id of vnf instance created at database. Or an exception.
         """
-        return self.vnfinstances2nsinstances.new(rollback, session, indata, kwargs, headers)
+        return self.vnfinstances2nsinstances.new(
+            rollback, session, indata, kwargs, headers
+        )
 
     def list(self, session, filter_q=None, api_req=False):
         """
@@ -75,7 +76,6 @@ class VnfInstances(BaseTopic):
 
 
 class VnfLcmOpTopic(BaseTopic):
-
     def __init__(self, db, fs, msg, auth):
         """
         Constructor call for vnf lcm op topic
diff --git a/pyangbind.patch b/pyangbind.patch
new file mode 100644 (file)
index 0000000..3077299
--- /dev/null
@@ -0,0 +1,46 @@
+#######################################################################################
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#######################################################################################
+
+*** .tox/cover/lib/python3.10/site-packages/pyangbind/lib/yangtypes.py 2023-05-10 06:50:57.876027148 -0400
+--- .tox/cover/lib/python3.10/site-packages/pyangbind/lib/yangtypes.py 2023-05-10 06:51:11.772022417 -0400
+*************** limitations under the License.
+*** 22,27 ****
+--- 22,28 ----
+  from __future__ import unicode_literals
+
+  import collections
++ from six.moves import collections_abc
+  import copy
+  import uuid
+  from decimal import Decimal
+*************** def TypedListType(*args, **kwargs):
+*** 372,378 ****
+    if not isinstance(allowed_type, list):
+      allowed_type = [allowed_type]
+
+!   class TypedList(collections.MutableSequence):
+      _pybind_generated_by = "TypedListType"
+      _list = list()
+
+--- 373,379 ----
+    if not isinstance(allowed_type, list):
+      allowed_type = [allowed_type]
+
+!   class TypedList(collections_abc.MutableSequence):
+      _pybind_generated_by = "TypedListType"
+      _list = list()
+
index b036158..0bf9737 100644 (file)
 # limitations under the License.
 
 
-aiokafka==0.7.2
+aiokafka==0.8.1
     # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
-bitarray==2.5.1
+async-timeout==4.0.3
     # via
-    #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
-    #   pyangbind
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   aiokafka
 dataclasses==0.6
     # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+dnspython==2.4.2
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   pymongo
 enum34==1.1.10
     # via
     #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
@@ -30,30 +34,38 @@ kafka-python==2.0.2
     # via
     #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
     #   aiokafka
-lxml==4.9.0
+lxml==4.9.3
     # via
     #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
     #   pyang
     #   pyangbind
+motor==3.3.1
+    # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
 osm-common @ git+https://osm.etsi.org/gerrit/osm/common.git@master
     # via -r requirements-dev.in
 osm-im @ git+https://osm.etsi.org/gerrit/osm/IM.git@master
     # via -r requirements-dev.in
+packaging==23.1
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   aiokafka
 pyang==2.5.3
     # via
     #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
     #   pyangbind
-pyangbind==0.8.1
+pyangbind==0.8.3.post1
     # via -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
-pycrypto==2.6.1
+pycryptodome==3.19.0
     # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
-pymongo==3.12.3
-    # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
-pyyaml==5.4.1
+pymongo==4.5.0
+    # via
+    #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+    #   motor
+pyyaml==6.0.1
     # via
     #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
     #   -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
-regex==2022.6.2
+regex==2023.8.8
     # via
     #   -r https://osm.etsi.org/gitweb/?p=osm/IM.git;a=blob_plain;f=requirements.txt;hb=master
     #   pyangbind
index e9c47c6..5d6739a 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-aiohttp>=2.3.10,<=3.6.2
 aioresponses
 asynctest
 coverage
 deepdiff
 nose2
-requests
-pyang
index 24cfd87..1abfb13 100644 (file)
 # limitations under the License.
 
 
-aiohttp==3.6.2
-    # via
-    #   -r requirements-test.in
-    #   aioresponses
-aioresponses==0.7.3
+aiohttp==3.8.5
+    # via aioresponses
+aioresponses==0.7.4
     # via -r requirements-test.in
-async-timeout==3.0.1
+aiosignal==1.3.1
+    # via aiohttp
+async-timeout==4.0.3
     # via aiohttp
 asynctest==0.13.0
     # via -r requirements-test.in
-attrs==21.4.0
+attrs==23.1.0
     # via aiohttp
-certifi==2022.6.15
-    # via requests
-chardet==3.0.4
-    # via
-    #   aiohttp
-    #   requests
-coverage==6.4.1
-    # via
-    #   -r requirements-test.in
-    #   nose2
-deepdiff==5.8.1
+charset-normalizer==3.2.0
+    # via aiohttp
+coverage==7.3.1
     # via -r requirements-test.in
-idna==3.3
+deepdiff==6.5.0
+    # via -r requirements-test.in
+frozenlist==1.4.0
     # via
-    #   requests
-    #   yarl
-lxml==4.9.0
-    # via pyang
-multidict==4.7.6
+    #   aiohttp
+    #   aiosignal
+idna==3.4
+    # via yarl
+multidict==6.0.4
     # via
     #   aiohttp
     #   yarl
-nose2==0.11.0
+nose2==0.13.0
     # via -r requirements-test.in
 ordered-set==4.1.0
     # via deepdiff
-pyang==2.5.3
-    # via -r requirements-test.in
-requests==2.28.0
-    # via -r requirements-test.in
-six==1.16.0
-    # via nose2
-urllib3==1.26.9
-    # via requests
-yarl==1.7.2
+yarl==1.9.2
     # via aiohttp
index 5476674..9096b53 100644 (file)
 # License for the specific language governing permissions and limitations
 # under the License.
 
-aiohttp>=2.3.10,<=3.6.2
+aiohttp
+cefevent
 CherryPy>=18.1.2
 deepdiff
 jsonschema>=3.2.0
 python-keystoneclient
-pyyaml==5.4.1
+pyyaml>6
 requests
 tacacs_plus
index 8b1f3b4..cc7328f 100644 (file)
 # limitations under the License.
 
 
-aiohttp==3.6.2
+aiohttp==3.8.5
     # via -r requirements.in
-async-timeout==3.0.1
+aiosignal==1.3.1
     # via aiohttp
-attrs==21.4.0
+annotated-types==0.5.0
+    # via pydantic
+async-timeout==4.0.3
+    # via aiohttp
+attrs==23.1.0
     # via
     #   aiohttp
     #   jsonschema
-certifi==2022.6.15
-    # via requests
-chardet==3.0.4
-    # via aiohttp
-charset-normalizer==2.0.12
+    #   referencing
+autocommand==2.2.2
+    # via jaraco-text
+cefevent==0.5.4
+    # via -r requirements.in
+certifi==2023.7.22
     # via requests
-cheroot==8.6.0
+charset-normalizer==3.2.0
+    # via
+    #   aiohttp
+    #   requests
+cheroot==10.0.0
     # via cherrypy
-cherrypy==18.6.1
+cherrypy==18.8.0
     # via -r requirements.in
 debtcollector==2.5.0
     # via
     #   oslo-config
     #   oslo-utils
     #   python-keystoneclient
-deepdiff==5.8.1
+deepdiff==6.5.0
     # via -r requirements.in
-idna==3.3
+frozenlist==1.4.0
+    # via
+    #   aiohttp
+    #   aiosignal
+idna==3.4
     # via
     #   requests
     #   yarl
-importlib-resources==5.8.0
-    # via
-    #   jaraco-text
-    #   jsonschema
-iso8601==1.0.2
+inflect==7.0.0
+    # via jaraco-text
+iso8601==2.0.0
     # via
     #   keystoneauth1
     #   oslo-utils
-jaraco-classes==3.2.1
-    # via jaraco-collections
-jaraco-collections==3.5.1
+jaraco-collections==4.3.0
     # via cherrypy
-jaraco-context==4.1.1
+jaraco-context==4.3.0
     # via jaraco-text
-jaraco-functools==3.5.0
+jaraco-functools==3.9.0
     # via
     #   cheroot
     #   jaraco-text
     #   tempora
-jaraco-text==3.8.0
+jaraco-text==3.11.1
     # via jaraco-collections
-jsonschema==4.6.0
+jsonschema==4.19.1
     # via -r requirements.in
-keystoneauth1==4.6.0
+jsonschema-specifications==2023.7.1
+    # via jsonschema
+keystoneauth1==5.3.0
     # via python-keystoneclient
-more-itertools==8.13.0
+more-itertools==10.1.0
     # via
     #   cheroot
     #   cherrypy
-    #   jaraco-classes
     #   jaraco-functools
-msgpack==1.0.4
+    #   jaraco-text
+msgpack==1.0.7
     # via oslo-serialization
-multidict==4.7.6
+multidict==6.0.4
     # via
     #   aiohttp
     #   yarl
-netaddr==0.8.0
+netaddr==0.9.0
     # via
     #   oslo-config
     #   oslo-utils
@@ -90,24 +101,24 @@ ordered-set==4.1.0
     # via deepdiff
 os-service-types==1.7.0
     # via keystoneauth1
-oslo-config==8.8.0
+oslo-config==9.2.0
     # via python-keystoneclient
-oslo-i18n==5.1.0
+oslo-i18n==6.1.0
     # via
     #   oslo-config
     #   oslo-utils
     #   python-keystoneclient
-oslo-serialization==4.3.0
+oslo-serialization==5.2.0
     # via python-keystoneclient
-oslo-utils==6.0.0
+oslo-utils==6.2.1
     # via
     #   oslo-serialization
     #   python-keystoneclient
-packaging==21.3
+packaging==23.1
     # via
     #   oslo-utils
     #   python-keystoneclient
-pbr==5.9.0
+pbr==5.11.1
     # via
     #   keystoneauth1
     #   os-service-types
@@ -115,26 +126,30 @@ pbr==5.9.0
     #   oslo-serialization
     #   python-keystoneclient
     #   stevedore
-portend==3.1.0
+portend==3.2.0
     # via cherrypy
-pyparsing==3.0.9
-    # via
-    #   oslo-utils
-    #   packaging
-pyrsistent==0.18.1
-    # via jsonschema
-python-keystoneclient==4.5.0
+pydantic==2.4.2
+    # via inflect
+pydantic-core==2.10.1
+    # via pydantic
+pyparsing==3.1.1
+    # via oslo-utils
+python-keystoneclient==5.2.0
     # via -r requirements.in
-pytz==2022.1
+pytz==2023.3.post1
     # via
     #   oslo-serialization
     #   oslo-utils
     #   tempora
-pyyaml==5.4.1
+pyyaml==6.0.1
     # via
     #   -r requirements.in
     #   oslo-config
-requests==2.28.0
+referencing==0.30.2
+    # via
+    #   jsonschema
+    #   jsonschema-specifications
+requests==2.31.0
     # via
     #   -r requirements.in
     #   keystoneauth1
@@ -142,31 +157,41 @@ requests==2.28.0
     #   python-keystoneclient
 rfc3986==2.0.0
     # via oslo-config
+rpds-py==0.10.3
+    # via
+    #   jsonschema
+    #   referencing
 six==1.16.0
     # via
-    #   cheroot
-    #   keystoneauth1
     #   python-keystoneclient
     #   tacacs-plus
-stevedore==3.5.0
+stevedore==5.1.0
     # via
     #   keystoneauth1
     #   oslo-config
     #   python-keystoneclient
 tacacs-plus==2.6
     # via -r requirements.in
-tempora==5.0.1
+tempora==5.5.0
     # via portend
-urllib3==1.26.9
+typing-extensions==4.8.0
+    # via
+    #   inflect
+    #   jaraco-functools
+    #   pydantic
+    #   pydantic-core
+tzdata==2023.3
+    # via
+    #   oslo-serialization
+    #   oslo-utils
+urllib3==2.0.5
     # via requests
-wrapt==1.14.1
+wrapt==1.15.0
     # via debtcollector
-yarl==1.7.2
+yarl==1.9.2
     # via aiohttp
-zc-lockfile==2.0
+zc-lockfile==3.0.post1
     # via cherrypy
-zipp==3.8.0
-    # via importlib-resources
 
 # The following packages are considered to be unsafe in a requirements file:
 # setuptools
diff --git a/tox.ini b/tox.ini
index ada5cd1..3c0217e 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -22,18 +22,18 @@ toxworkdir = /tmp/.tox
 
 [testenv]
 usedevelop = True
-basepython = python3.8
+basepython = python3.10
 setenv = VIRTUAL_ENV={envdir}
          PYTHONDONTWRITEBYTECODE = 1
 deps =  -r{toxinidir}/requirements.txt
 
 #######################################################################################
 [testenv:black]
-deps = black
+deps = black==23.12.1
 skip_install = true
 commands =
-        black --check --diff osm_nbi/
-        black --check --diff setup.py
+        black --check --diff osm_nbi/
+        black --check --diff setup.py
 
 
 #######################################################################################
@@ -48,14 +48,14 @@ commands =
         coverage report --omit='*tests*'
         coverage html -d ./cover --omit='*tests*'
         coverage xml -o coverage.xml --omit=*tests*
-whitelist_externals = sh
+allowlist_externals = sh
 
 
 #######################################################################################
 [testenv:flake8]
 deps = flake8
 commands =
-        flake8 osm_nbi/ setup.py
+        flake8 osm_nbi/ setup.py
 
 
 #######################################################################################
@@ -65,7 +65,7 @@ deps =  {[testenv]deps}
         -r{toxinidir}/requirements-test.txt
         pylint
 commands =
-    - pylint -E osm_nbi
+        pylint -E osm_nbi
 
 
 #######################################################################################
@@ -81,15 +81,15 @@ commands =
 
 #######################################################################################
 [testenv:pip-compile]
-deps =  pip-tools==6.6.2
+deps =  pip-tools==6.13.0
 skip_install = true
-whitelist_externals = bash
+allowlist_externals = bash
         [
 commands =
         - bash -c "for file in requirements*.in ; do \
         UNSAFE="" ; \
         if [[ $file =~ 'dist' ]] ; then UNSAFE='--allow-unsafe' ; fi ; \
-        pip-compile -rU --no-header $UNSAFE $file ;\
+        pip-compile --resolver=backtracking -rU --no-header $UNSAFE $file ;\
         out=`echo $file | sed 's/.in/.txt/'` ; \
         sed -i -e '1 e head -16 tox.ini' $out ;\
         done"
@@ -107,7 +107,7 @@ commands =
         python3 setup.py --command-packages=stdeb.command sdist_dsc
         sh -c 'cd deb_dist/osm-nbi*/ && dpkg-buildpackage -rfakeroot -uc -us'
         sh -c 'rm osm_nbi/requirements.txt'
-whitelist_externals = sh
+allowlist_externals = sh
 
 #######################################################################################
 [flake8]
@@ -117,6 +117,7 @@ ignore =
         W503,
         E123,
         E125,
+        E203,
         E226,
         E241,
         E501