Reformat NBI to standardized format 35/10535/5
authorgarciadeblas <gerardo.garciadeblas@telefonica.com>
Wed, 24 Mar 2021 08:19:48 +0000 (09:19 +0100)
committerguzman <jmguzman@whitestack.com>
Tue, 18 May 2021 14:26:17 +0000 (16:26 +0200)
Change-Id: I17c2b029b83af84fbc97559bd2b616dfb6aef0fb
Signed-off-by: garciadeblas <gerardo.garciadeblas@telefonica.com>
28 files changed:
osm_nbi/__init__.py
osm_nbi/admin_topics.py
osm_nbi/auth.py
osm_nbi/authconn.py
osm_nbi/authconn_internal.py
osm_nbi/authconn_keystone.py
osm_nbi/authconn_tacacs.py
osm_nbi/base_topic.py
osm_nbi/descriptor_topics.py
osm_nbi/engine.py
osm_nbi/html_out.py
osm_nbi/instance_topics.py
osm_nbi/nbi.py
osm_nbi/notifications.py
osm_nbi/pmjobs_topics.py
osm_nbi/subscription_topics.py
osm_nbi/subscriptions.py
osm_nbi/tests/run_test.py
osm_nbi/tests/send_kafka.py
osm_nbi/tests/test_admin_topics.py
osm_nbi/tests/test_base_topic.py
osm_nbi/tests/test_descriptor_topics.py
osm_nbi/tests/test_instance_topics.py
osm_nbi/tests/test_pmjobs_topic.py
osm_nbi/tests/upload.py
osm_nbi/validation.py
setup.py
tox.ini

index 7bd5b6b..3bfe160 100644 (file)
 # under the License.
 ##
 
-version = '7.0.1.post23'
-version_date = '2020-04-17'
+version = "7.0.1.post23"
+version_date = "2020-04-17"
 
 # Obtain installed package version. Ignore if error, e.g. pkg_resources not installed
 try:
     from pkg_resources import get_distribution
+
     version = get_distribution("osm_nbi").version
 except Exception:
     pass
index 24c99a9..0006917 100644 (file)
@@ -18,13 +18,31 @@ from uuid import uuid4
 from hashlib import sha256
 from http import HTTPStatus
 from time import time
-from osm_nbi.validation import user_new_schema, user_edit_schema, project_new_schema, project_edit_schema, \
-    vim_account_new_schema, vim_account_edit_schema, sdn_new_schema, sdn_edit_schema, \
-    wim_account_new_schema, wim_account_edit_schema, roles_new_schema, roles_edit_schema, \
-    k8scluster_new_schema, k8scluster_edit_schema, k8srepo_new_schema, k8srepo_edit_schema, \
-    vca_new_schema, vca_edit_schema, \
-    osmrepo_new_schema, osmrepo_edit_schema, \
-    validate_input, ValidationError, is_valid_uuid  # To check that User/Project Names don't look like UUIDs
+from osm_nbi.validation import (
+    user_new_schema,
+    user_edit_schema,
+    project_new_schema,
+    project_edit_schema,
+    vim_account_new_schema,
+    vim_account_edit_schema,
+    sdn_new_schema,
+    sdn_edit_schema,
+    wim_account_new_schema,
+    wim_account_edit_schema,
+    roles_new_schema,
+    roles_edit_schema,
+    k8scluster_new_schema,
+    k8scluster_edit_schema,
+    k8srepo_new_schema,
+    k8srepo_edit_schema,
+    vca_new_schema,
+    vca_edit_schema,
+    osmrepo_new_schema,
+    osmrepo_edit_schema,
+    validate_input,
+    ValidationError,
+    is_valid_uuid,
+)  # To check that User/Project Names don't look like UUIDs
 from osm_nbi.base_topic import BaseTopic, EngineException
 from osm_nbi.authconn import AuthconnNotFoundException, AuthconnConflictException
 from osm_common.dbbase import deep_update_rfc7396
@@ -58,15 +76,28 @@ class UserTopic(BaseTopic):
 
     def check_conflict_on_new(self, session, indata):
         # check username not exists
-        if self.db.get_one(self.topic, {"username": indata.get("username")}, fail_on_empty=False, fail_on_more=False):
-            raise EngineException("username '{}' exists".format(indata["username"]), HTTPStatus.CONFLICT)
+        if self.db.get_one(
+            self.topic,
+            {"username": indata.get("username")},
+            fail_on_empty=False,
+            fail_on_more=False,
+        ):
+            raise EngineException(
+                "username '{}' exists".format(indata["username"]), HTTPStatus.CONFLICT
+            )
         # check projects
         if not session["force"]:
             for p in indata.get("projects") or []:
                 # To allow project addressing by Name as well as ID
-                if not self.db.get_one("projects", {BaseTopic.id_field("projects", p): p}, fail_on_empty=False,
-                                       fail_on_more=False):
-                    raise EngineException("project '{}' does not exist".format(p), HTTPStatus.CONFLICT)
+                if not self.db.get_one(
+                    "projects",
+                    {BaseTopic.id_field("projects", p): p},
+                    fail_on_empty=False,
+                    fail_on_more=False,
+                ):
+                    raise EngineException(
+                        "project '{}' does not exist".format(p), HTTPStatus.CONFLICT
+                    )
 
     def check_conflict_on_del(self, session, _id, db_content):
         """
@@ -77,7 +108,9 @@ class UserTopic(BaseTopic):
         :return: None if ok or raises EngineException with the conflict
         """
         if _id == session["username"]:
-            raise EngineException("You cannot delete your own user", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "You cannot delete your own user", http_code=HTTPStatus.CONFLICT
+            )
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
@@ -87,9 +120,13 @@ class UserTopic(BaseTopic):
         salt = uuid4().hex
         content["_admin"]["salt"] = salt
         if content.get("password"):
-            content["password"] = sha256(content["password"].encode('utf-8') + salt.encode('utf-8')).hexdigest()
+            content["password"] = sha256(
+                content["password"].encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
         if content.get("project_role_mappings"):
-            projects = [mapping["project"] for mapping in content["project_role_mappings"]]
+            projects = [
+                mapping["project"] for mapping in content["project_role_mappings"]
+            ]
 
             if content.get("projects"):
                 content["projects"] += projects
@@ -102,29 +139,42 @@ class UserTopic(BaseTopic):
         if edit_content.get("password"):
             salt = uuid4().hex
             final_content["_admin"]["salt"] = salt
-            final_content["password"] = sha256(edit_content["password"].encode('utf-8') +
-                                               salt.encode('utf-8')).hexdigest()
+            final_content["password"] = sha256(
+                edit_content["password"].encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
         return None
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
         if not session["admin"]:
-            raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+            raise EngineException(
+                "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+            )
         # Names that look like UUIDs are not allowed
         name = (indata if indata else kwargs).get("username")
         if is_valid_uuid(name):
-            raise EngineException("Usernames that look like UUIDs are not allowed",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        return BaseTopic.edit(self, session, _id, indata=indata, kwargs=kwargs, content=content)
+            raise EngineException(
+                "Usernames that look like UUIDs are not allowed",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        return BaseTopic.edit(
+            self, session, _id, indata=indata, kwargs=kwargs, content=content
+        )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         if not session["admin"]:
-            raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+            raise EngineException(
+                "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+            )
         # Names that look like UUIDs are not allowed
         name = indata["username"] if indata else kwargs["username"]
         if is_valid_uuid(name):
-            raise EngineException("Usernames that look like UUIDs are not allowed",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        return BaseTopic.new(self, rollback, session, indata=indata, kwargs=kwargs, headers=headers)
+            raise EngineException(
+                "Usernames that look like UUIDs are not allowed",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        return BaseTopic.new(
+            self, rollback, session, indata=indata, kwargs=kwargs, headers=headers
+        )
 
 
 class ProjectTopic(BaseTopic):
@@ -154,8 +204,15 @@ class ProjectTopic(BaseTopic):
         if not indata.get("name"):
             raise EngineException("missing 'name'")
         # check name not exists
-        if self.db.get_one(self.topic, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False):
-            raise EngineException("name '{}' exists".format(indata["name"]), HTTPStatus.CONFLICT)
+        if self.db.get_one(
+            self.topic,
+            {"name": indata.get("name")},
+            fail_on_empty=False,
+            fail_on_more=False,
+        ):
+            raise EngineException(
+                "name '{}' exists".format(indata["name"]), HTTPStatus.CONFLICT
+            )
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
@@ -172,38 +229,58 @@ class ProjectTopic(BaseTopic):
         :return: None if ok or raises EngineException with the conflict
         """
         if _id in session["project_id"]:
-            raise EngineException("You cannot delete your own project", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "You cannot delete your own project", http_code=HTTPStatus.CONFLICT
+            )
         if session["force"]:
             return
         _filter = {"projects": _id}
         if self.db.get_list("users", _filter):
-            raise EngineException("There is some USER that contains this project", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is some USER that contains this project",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
         if not session["admin"]:
-            raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+            raise EngineException(
+                "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+            )
         # Names that look like UUIDs are not allowed
         name = (indata if indata else kwargs).get("name")
         if is_valid_uuid(name):
-            raise EngineException("Project names that look like UUIDs are not allowed",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        return BaseTopic.edit(self, session, _id, indata=indata, kwargs=kwargs, content=content)
+            raise EngineException(
+                "Project names that look like UUIDs are not allowed",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        return BaseTopic.edit(
+            self, session, _id, indata=indata, kwargs=kwargs, content=content
+        )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         if not session["admin"]:
-            raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+            raise EngineException(
+                "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+            )
         # Names that look like UUIDs are not allowed
         name = indata["name"] if indata else kwargs["name"]
         if is_valid_uuid(name):
-            raise EngineException("Project names that look like UUIDs are not allowed",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        return BaseTopic.new(self, rollback, session, indata=indata, kwargs=kwargs, headers=headers)
+            raise EngineException(
+                "Project names that look like UUIDs are not allowed",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        return BaseTopic.new(
+            self, rollback, session, indata=indata, kwargs=kwargs, headers=headers
+        )
 
 
 class CommonVimWimSdn(BaseTopic):
     """Common class for VIM, WIM SDN just to unify methods that are equal to all of them"""
-    config_to_encrypt = {}     # what keys at config must be encrypted because contains passwords
-    password_to_encrypt = ""   # key that contains a password
+
+    config_to_encrypt = (
+        {}
+    )  # what keys at config must be encrypted because contains passwords
+    password_to_encrypt = ""  # key that contains a password
 
     @staticmethod
     def _create_operation(op_type, params=None):
@@ -259,21 +336,29 @@ class CommonVimWimSdn(BaseTopic):
         schema_version = final_content.get("schema_version")
         if schema_version:
             if edit_content.get(self.password_to_encrypt):
-                final_content[self.password_to_encrypt] = self.db.encrypt(edit_content[self.password_to_encrypt],
-                                                                          schema_version=schema_version,
-                                                                          salt=final_content["_id"])
-            config_to_encrypt_keys = self.config_to_encrypt.get(schema_version) or self.config_to_encrypt.get("default")
+                final_content[self.password_to_encrypt] = self.db.encrypt(
+                    edit_content[self.password_to_encrypt],
+                    schema_version=schema_version,
+                    salt=final_content["_id"],
+                )
+            config_to_encrypt_keys = self.config_to_encrypt.get(
+                schema_version
+            ) or self.config_to_encrypt.get("default")
             if edit_content.get("config") and config_to_encrypt_keys:
 
                 for p in config_to_encrypt_keys:
                     if edit_content["config"].get(p):
-                        final_content["config"][p] = self.db.encrypt(edit_content["config"][p],
-                                                                     schema_version=schema_version,
-                                                                     salt=final_content["_id"])
+                        final_content["config"][p] = self.db.encrypt(
+                            edit_content["config"][p],
+                            schema_version=schema_version,
+                            salt=final_content["_id"],
+                        )
 
         # create edit operation
         final_content["_admin"]["operations"].append(self._create_operation("edit"))
-        return "{}:{}".format(final_content["_id"], len(final_content["_admin"]["operations"]) - 1)
+        return "{}:{}".format(
+            final_content["_id"], len(final_content["_admin"]["operations"]) - 1
+        )
 
     def format_on_new(self, content, project_id=None, make_public=False):
         """
@@ -288,16 +373,22 @@ class CommonVimWimSdn(BaseTopic):
 
         # encrypt passwords
         if content.get(self.password_to_encrypt):
-            content[self.password_to_encrypt] = self.db.encrypt(content[self.password_to_encrypt],
-                                                                schema_version=schema_version,
-                                                                salt=content["_id"])
-        config_to_encrypt_keys = self.config_to_encrypt.get(schema_version) or self.config_to_encrypt.get("default")
+            content[self.password_to_encrypt] = self.db.encrypt(
+                content[self.password_to_encrypt],
+                schema_version=schema_version,
+                salt=content["_id"],
+            )
+        config_to_encrypt_keys = self.config_to_encrypt.get(
+            schema_version
+        ) or self.config_to_encrypt.get("default")
         if content.get("config") and config_to_encrypt_keys:
             for p in config_to_encrypt_keys:
                 if content["config"].get(p):
-                    content["config"][p] = self.db.encrypt(content["config"][p],
-                                                           schema_version=schema_version,
-                                                           salt=content["_id"])
+                    content["config"][p] = self.db.encrypt(
+                        content["config"][p],
+                        schema_version=schema_version,
+                        salt=content["_id"],
+                    )
 
         content["_admin"]["operationalState"] = "PROCESSING"
 
@@ -328,38 +419,64 @@ class CommonVimWimSdn(BaseTopic):
         # remove reference from project_read if there are more projects referencing it. If it last one,
         # do not remove reference, but order via kafka to delete it
         if session["project_id"] and session["project_id"]:
-            other_projects_referencing = next((p for p in db_content["_admin"]["projects_read"]
-                                               if p not in session["project_id"] and p != "ANY"), None)
+            other_projects_referencing = next(
+                (
+                    p
+                    for p in db_content["_admin"]["projects_read"]
+                    if p not in session["project_id"] and p != "ANY"
+                ),
+                None,
+            )
 
             # check if there are projects referencing it (apart from ANY, that means, public)....
             if other_projects_referencing:
                 # remove references but not delete
-                update_dict_pull = {"_admin.projects_read": session["project_id"],
-                                    "_admin.projects_write": session["project_id"]}
-                self.db.set_one(self.topic, filter_q, update_dict=None, pull_list=update_dict_pull)
+                update_dict_pull = {
+                    "_admin.projects_read": session["project_id"],
+                    "_admin.projects_write": session["project_id"],
+                }
+                self.db.set_one(
+                    self.topic, filter_q, update_dict=None, pull_list=update_dict_pull
+                )
                 return None
             else:
-                can_write = next((p for p in db_content["_admin"]["projects_write"] if p == "ANY" or
-                                  p in session["project_id"]), None)
+                can_write = next(
+                    (
+                        p
+                        for p in db_content["_admin"]["projects_write"]
+                        if p == "ANY" or p in session["project_id"]
+                    ),
+                    None,
+                )
                 if not can_write:
-                    raise EngineException("You have not write permission to delete it",
-                                          http_code=HTTPStatus.UNAUTHORIZED)
+                    raise EngineException(
+                        "You have not write permission to delete it",
+                        http_code=HTTPStatus.UNAUTHORIZED,
+                    )
 
         # It must be deleted
         if session["force"]:
             self.db.del_one(self.topic, {"_id": _id})
             op_id = None
-            self._send_msg("deleted", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg)
+            self._send_msg(
+                "deleted", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg
+            )
         else:
             update_dict = {"_admin.to_delete": True}
-            self.db.set_one(self.topic, {"_id": _id},
-                            update_dict=update_dict,
-                            push={"_admin.operations": self._create_operation("delete")}
-                            )
+            self.db.set_one(
+                self.topic,
+                {"_id": _id},
+                update_dict=update_dict,
+                push={"_admin.operations": self._create_operation("delete")},
+            )
             # the number of operations is the operation_id. db_content does not contains the new operation inserted,
             # so the -1 is not needed
-            op_id = "{}:{}".format(db_content["_id"], len(db_content["_admin"]["operations"]))
-            self._send_msg("delete", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg)
+            op_id = "{}:{}".format(
+                db_content["_id"], len(db_content["_admin"]["operations"])
+            )
+            self._send_msg(
+                "delete", {"_id": _id, "op_id": op_id}, not_send_msg=not_send_msg
+            )
         return op_id
 
 
@@ -370,8 +487,15 @@ class VimAccountTopic(CommonVimWimSdn):
     schema_edit = vim_account_edit_schema
     multiproject = True
     password_to_encrypt = "vim_password"
-    config_to_encrypt = {"1.1": ("admin_password", "nsx_password", "vcenter_password"),
-                         "default": ("admin_password", "nsx_password", "vcenter_password", "vrops_password")}
+    config_to_encrypt = {
+        "1.1": ("admin_password", "nsx_password", "vcenter_password"),
+        "default": (
+            "admin_password",
+            "nsx_password",
+            "vcenter_password",
+            "vrops_password",
+        ),
+    }
 
     def check_conflict_on_del(self, session, _id, db_content):
         """
@@ -385,7 +509,10 @@ class VimAccountTopic(CommonVimWimSdn):
             return
         # check if used by VNF
         if self.db.get_list("vnfrs", {"vim-account-id": _id}):
-            raise EngineException("There is at least one VNF using this VIM account", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VNF using this VIM account",
+                http_code=HTTPStatus.CONFLICT,
+            )
         super().check_conflict_on_del(session, _id, db_content)
 
 
@@ -411,12 +538,14 @@ class SdnTopic(CommonVimWimSdn):
 
     def _obtain_url(self, input, create):
         if input.get("ip") or input.get("port"):
-            if not input.get("ip") or not input.get("port") or input.get('url'):
-                raise ValidationError("You must provide both 'ip' and 'port' (deprecated); or just 'url' (prefered)")
-            input['url'] = "http://{}:{}/".format(input["ip"], input["port"])
+            if not input.get("ip") or not input.get("port") or input.get("url"):
+                raise ValidationError(
+                    "You must provide both 'ip' and 'port' (deprecated); or just 'url' (prefered)"
+                )
+            input["url"] = "http://{}:{}/".format(input["ip"], input["port"])
             del input["ip"]
             del input["port"]
-        elif create and not input.get('url'):
+        elif create and not input.get("url"):
             raise ValidationError("You must provide 'url'")
         return input
 
@@ -440,39 +569,59 @@ class K8sClusterTopic(CommonVimWimSdn):
 
     def format_on_new(self, content, project_id=None, make_public=False):
         oid = super().format_on_new(content, project_id, make_public)
-        self.db.encrypt_decrypt_fields(content["credentials"], 'encrypt', ['password', 'secret'],
-                                       schema_version=content["schema_version"], salt=content["_id"])
+        self.db.encrypt_decrypt_fields(
+            content["credentials"],
+            "encrypt",
+            ["password", "secret"],
+            schema_version=content["schema_version"],
+            salt=content["_id"],
+        )
         # Add Helm/Juju Repo lists
         repos = {"helm-chart": [], "juju-bundle": []}
         for proj in content["_admin"]["projects_read"]:
-            if proj != 'ANY':
-                for repo in self.db.get_list("k8srepos", {"_admin.projects_read": proj}):
+            if proj != "ANY":
+                for repo in self.db.get_list(
+                    "k8srepos", {"_admin.projects_read": proj}
+                ):
                     if repo["_id"] not in repos[repo["type"]]:
                         repos[repo["type"]].append(repo["_id"])
         for k in repos:
-            content["_admin"][k.replace('-', '_')+"_repos"] = repos[k]
+            content["_admin"][k.replace("-", "_") + "_repos"] = repos[k]
         return oid
 
     def format_on_edit(self, final_content, edit_content):
         if final_content.get("schema_version") and edit_content.get("credentials"):
-            self.db.encrypt_decrypt_fields(edit_content["credentials"], 'encrypt', ['password', 'secret'],
-                                           schema_version=final_content["schema_version"], salt=final_content["_id"])
-            deep_update_rfc7396(final_content["credentials"], edit_content["credentials"])
+            self.db.encrypt_decrypt_fields(
+                edit_content["credentials"],
+                "encrypt",
+                ["password", "secret"],
+                schema_version=final_content["schema_version"],
+                salt=final_content["_id"],
+            )
+            deep_update_rfc7396(
+                final_content["credentials"], edit_content["credentials"]
+            )
         oid = super().format_on_edit(final_content, edit_content)
         return oid
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        final_content = super(CommonVimWimSdn, self).check_conflict_on_edit(session, final_content, edit_content, _id)
-        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super(CommonVimWimSdn, self).check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
         # Update Helm/Juju Repo lists
         repos = {"helm-chart": [], "juju-bundle": []}
         for proj in session.get("set_project", []):
-            if proj != 'ANY':
-                for repo in self.db.get_list("k8srepos", {"_admin.projects_read": proj}):
+            if proj != "ANY":
+                for repo in self.db.get_list(
+                    "k8srepos", {"_admin.projects_read": proj}
+                ):
                     if repo["_id"] not in repos[repo["type"]]:
                         repos[repo["type"]].append(repo["_id"])
         for k in repos:
-            rlist = k.replace('-', '_') + "_repos"
+            rlist = k.replace("-", "_") + "_repos"
             if rlist not in final_content["_admin"]:
                 final_content["_admin"][rlist] = []
             final_content["_admin"][rlist] += repos[k]
@@ -493,7 +642,10 @@ class K8sClusterTopic(CommonVimWimSdn):
         if session["project_id"]:
             filter_q["_admin.projects_read.cont"] = session["project_id"]
         if self.db.get_list("vnfrs", filter_q):
-            raise EngineException("There is at least one VNF using this k8scluster", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VNF using this k8scluster",
+                http_code=HTTPStatus.CONFLICT,
+            )
         super().check_conflict_on_del(session, _id, db_content)
 
 
@@ -510,9 +662,7 @@ class VcaTopic(CommonVimWimSdn):
         content["schema_version"] = schema_version = "1.11"
         for key in ["secret", "cacert"]:
             content[key] = self.db.encrypt(
-                content[key],
-                schema_version=schema_version,
-                salt=content["_id"]
+                content[key], schema_version=schema_version, salt=content["_id"]
             )
         return oid
 
@@ -524,7 +674,7 @@ class VcaTopic(CommonVimWimSdn):
                 final_content[key] = self.db.encrypt(
                     edit_content[key],
                     schema_version=schema_version,
-                    salt=final_content["_id"]
+                    salt=final_content["_id"],
                 )
         return oid
 
@@ -543,7 +693,10 @@ class VcaTopic(CommonVimWimSdn):
         if session["project_id"]:
             filter_q["_admin.projects_read.cont"] = session["project_id"]
         if self.db.get_list("vim_accounts", filter_q):
-            raise EngineException("There is at least one VIM account using this vca", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VIM account using this vca",
+                http_code=HTTPStatus.CONFLICT,
+            )
         super().check_conflict_on_del(session, _id, db_content)
 
 
@@ -559,12 +712,18 @@ class K8sRepoTopic(CommonVimWimSdn):
     def format_on_new(self, content, project_id=None, make_public=False):
         oid = super().format_on_new(content, project_id, make_public)
         # Update Helm/Juju Repo lists
-        repo_list = content["type"].replace('-', '_')+"_repos"
+        repo_list = content["type"].replace("-", "_") + "_repos"
         for proj in content["_admin"]["projects_read"]:
-            if proj != 'ANY':
-                self.db.set_list("k8sclusters",
-                                 {"_admin.projects_read": proj, "_admin."+repo_list+".ne": content["_id"]}, {},
-                                 push={"_admin."+repo_list: content["_id"]})
+            if proj != "ANY":
+                self.db.set_list(
+                    "k8sclusters",
+                    {
+                        "_admin.projects_read": proj,
+                        "_admin." + repo_list + ".ne": content["_id"],
+                    },
+                    {},
+                    push={"_admin." + repo_list: content["_id"]},
+                )
         return oid
 
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
@@ -572,8 +731,13 @@ class K8sRepoTopic(CommonVimWimSdn):
         oid = super().delete(session, _id, dry_run, not_send_msg)
         if oid:
             # Remove from Helm/Juju Repo lists
-            repo_list = type.replace('-', '_') + "_repos"
-            self.db.set_list("k8sclusters", {"_admin."+repo_list: _id}, {}, pull={"_admin."+repo_list: _id})
+            repo_list = type.replace("-", "_") + "_repos"
+            self.db.set_list(
+                "k8sclusters",
+                {"_admin." + repo_list: _id},
+                {},
+                pull={"_admin." + repo_list: _id},
+            )
         return oid
 
 
@@ -606,12 +770,16 @@ class UserTopicAuth(UserTopic):
         """
         username = indata.get("username")
         if is_valid_uuid(username):
-            raise EngineException("username '{}' cannot have a uuid format".format(username),
-                                  HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "username '{}' cannot have a uuid format".format(username),
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
         # Check that username is not used, regardless keystone already checks this
         if self.auth.get_user_list(filter_q={"name": username}):
-            raise EngineException("username '{}' is already used".format(username), HTTPStatus.CONFLICT)
+            raise EngineException(
+                "username '{}' is already used".format(username), HTTPStatus.CONFLICT
+            )
 
         if "projects" in indata.keys():
             # convert to new format project_role_mappings
@@ -619,7 +787,9 @@ class UserTopicAuth(UserTopic):
             if not role:
                 role = self.auth.get_role_list()
             if not role:
-                raise AuthconnNotFoundException("Can't find default role for user '{}'".format(username))
+                raise AuthconnNotFoundException(
+                    "Can't find default role for user '{}'".format(username)
+                )
             rid = role[0]["_id"]
             if not indata.get("project_role_mappings"):
                 indata["project_role_mappings"] = []
@@ -645,19 +815,29 @@ class UserTopicAuth(UserTopic):
         if "username" in edit_content:
             username = edit_content.get("username")
             if is_valid_uuid(username):
-                raise EngineException("username '{}' cannot have an uuid format".format(username),
-                                      HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise EngineException(
+                    "username '{}' cannot have an uuid format".format(username),
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
 
             # Check that username is not used, regardless keystone already checks this
             if self.auth.get_user_list(filter_q={"name": username}):
-                raise EngineException("username '{}' is already used".format(username), HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "username '{}' is already used".format(username),
+                    HTTPStatus.CONFLICT,
+                )
 
         if final_content["username"] == "admin":
             for mapping in edit_content.get("remove_project_role_mappings", ()):
-                if mapping["project"] == "admin" and mapping.get("role") in (None, "system_admin"):
+                if mapping["project"] == "admin" and mapping.get("role") in (
+                    None,
+                    "system_admin",
+                ):
                     # TODO make this also available for project id and role id
-                    raise EngineException("You cannot remove system_admin role from admin user",
-                                          http_code=HTTPStatus.FORBIDDEN)
+                    raise EngineException(
+                        "You cannot remove system_admin role from admin user",
+                        http_code=HTTPStatus.FORBIDDEN,
+                    )
 
         return final_content
 
@@ -670,7 +850,9 @@ class UserTopicAuth(UserTopic):
         :return: None if ok or raises EngineException with the conflict
         """
         if db_content["username"] == session["username"]:
-            raise EngineException("You cannot delete your own login user ", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "You cannot delete your own login user ", http_code=HTTPStatus.CONFLICT
+            )
         # TODO: Check that user is not logged in ? How? (Would require listing current tokens)
 
     @staticmethod
@@ -684,10 +866,14 @@ class UserTopicAuth(UserTopic):
         if "projects" in content:
             for project in content["projects"]:
                 for role in project["roles"]:
-                    project_role_mappings.append({"project": project["_id"],
-                                                  "project_name": project["name"],
-                                                  "role": role["_id"],
-                                                  "role_name": role["name"]})
+                    project_role_mappings.append(
+                        {
+                            "project": project["_id"],
+                            "project_name": project["name"],
+                            "role": role["_id"],
+                            "role_name": role["name"],
+                        }
+                    )
             del content["projects"]
         content["project_role_mappings"] = project_role_mappings
 
@@ -748,13 +934,17 @@ class UserTopicAuth(UserTopic):
         # Allow _id to be a name or uuid
         filter_q = {"username": _id}
         # users = self.auth.get_user_list(filter_q)
-        users = self.list(session, filter_q)   # To allow default filtering (Bug 853)
+        users = self.list(session, filter_q)  # To allow default filtering (Bug 853)
         if len(users) == 1:
             return users[0]
         elif len(users) > 1:
-            raise EngineException("Too many users found for '{}'".format(_id), HTTPStatus.CONFLICT)
+            raise EngineException(
+                "Too many users found for '{}'".format(_id), HTTPStatus.CONFLICT
+            )
         else:
-            raise EngineException("User '{}' not found".format(_id), HTTPStatus.NOT_FOUND)
+            raise EngineException(
+                "User '{}' not found".format(_id), HTTPStatus.NOT_FOUND
+            )
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
         """
@@ -779,22 +969,36 @@ class UserTopicAuth(UserTopic):
             content = self.check_conflict_on_edit(session, content, indata, _id=_id)
             # self.format_on_edit(content, indata)
 
-            if not ("password" in indata or "username" in indata or indata.get("remove_project_role_mappings") or
-                    indata.get("add_project_role_mappings") or indata.get("project_role_mappings") or
-                    indata.get("projects") or indata.get("add_projects")):
+            if not (
+                "password" in indata
+                or "username" in indata
+                or indata.get("remove_project_role_mappings")
+                or indata.get("add_project_role_mappings")
+                or indata.get("project_role_mappings")
+                or indata.get("projects")
+                or indata.get("add_projects")
+            ):
                 return _id
-            if indata.get("project_role_mappings") \
-                    and (indata.get("remove_project_role_mappings") or indata.get("add_project_role_mappings")):
-                raise EngineException("Option 'project_role_mappings' is incompatible with 'add_project_role_mappings"
-                                      "' or 'remove_project_role_mappings'", http_code=HTTPStatus.BAD_REQUEST)
+            if indata.get("project_role_mappings") and (
+                indata.get("remove_project_role_mappings")
+                or indata.get("add_project_role_mappings")
+            ):
+                raise EngineException(
+                    "Option 'project_role_mappings' is incompatible with 'add_project_role_mappings"
+                    "' or 'remove_project_role_mappings'",
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
 
             if indata.get("projects") or indata.get("add_projects"):
                 role = self.auth.get_role_list({"name": "project_admin"})
                 if not role:
                     role = self.auth.get_role_list()
                 if not role:
-                    raise AuthconnNotFoundException("Can't find a default role for user '{}'"
-                                                    .format(content["username"]))
+                    raise AuthconnNotFoundException(
+                        "Can't find a default role for user '{}'".format(
+                            content["username"]
+                        )
+                    )
                 rid = role[0]["_id"]
                 if "add_project_role_mappings" not in indata:
                     indata["add_project_role_mappings"] = []
@@ -804,12 +1008,18 @@ class UserTopicAuth(UserTopic):
                     # backward compatible
                     for k, v in indata["projects"].items():
                         if k.startswith("$") and v is None:
-                            indata["remove_project_role_mappings"].append({"project": k[1:]})
+                            indata["remove_project_role_mappings"].append(
+                                {"project": k[1:]}
+                            )
                         elif k.startswith("$+"):
-                            indata["add_project_role_mappings"].append({"project": v, "role": rid})
+                            indata["add_project_role_mappings"].append(
+                                {"project": v, "role": rid}
+                            )
                     del indata["projects"]
                 for proj in indata.get("projects", []) + indata.get("add_projects", []):
-                    indata["add_project_role_mappings"].append({"project": proj, "role": rid})
+                    indata["add_project_role_mappings"].append(
+                        {"project": proj, "role": rid}
+                    )
 
             # user = self.show(session, _id)   # Already in 'content'
             original_mapping = content["project_role_mappings"]
@@ -820,17 +1030,28 @@ class UserTopicAuth(UserTopic):
             # remove
             for to_remove in indata.get("remove_project_role_mappings", ()):
                 for mapping in original_mapping:
-                    if to_remove["project"] in (mapping["project"], mapping["project_name"]):
-                        if not to_remove.get("role") or to_remove["role"] in (mapping["role"], mapping["role_name"]):
+                    if to_remove["project"] in (
+                        mapping["project"],
+                        mapping["project_name"],
+                    ):
+                        if not to_remove.get("role") or to_remove["role"] in (
+                            mapping["role"],
+                            mapping["role_name"],
+                        ):
                             mappings_to_remove.append(mapping)
 
             # add
             for to_add in indata.get("add_project_role_mappings", ()):
                 for mapping in original_mapping:
-                    if to_add["project"] in (mapping["project"], mapping["project_name"]) and \
-                            to_add["role"] in (mapping["role"], mapping["role_name"]):
-
-                        if mapping in mappings_to_remove:   # do not remove
+                    if to_add["project"] in (
+                        mapping["project"],
+                        mapping["project_name"],
+                    ) and to_add["role"] in (
+                        mapping["role"],
+                        mapping["role_name"],
+                    ):
+
+                        if mapping in mappings_to_remove:  # do not remove
                             mappings_to_remove.remove(mapping)
                         break  # do not add, it is already at user
                 else:
@@ -842,9 +1063,14 @@ class UserTopicAuth(UserTopic):
             if indata.get("project_role_mappings"):
                 for to_set in indata["project_role_mappings"]:
                     for mapping in original_mapping:
-                        if to_set["project"] in (mapping["project"], mapping["project_name"]) and \
-                                to_set["role"] in (mapping["role"], mapping["role_name"]):
-                            if mapping in mappings_to_remove:   # do not remove
+                        if to_set["project"] in (
+                            mapping["project"],
+                            mapping["project_name"],
+                        ) and to_set["role"] in (
+                            mapping["role"],
+                            mapping["role_name"],
+                        ):
+                            if mapping in mappings_to_remove:  # do not remove
                                 mappings_to_remove.remove(mapping)
                             break  # do not add, it is already at user
                     else:
@@ -853,19 +1079,29 @@ class UserTopicAuth(UserTopic):
                         mappings_to_add.append({"project": pid, "role": rid})
                 for mapping in original_mapping:
                     for to_set in indata["project_role_mappings"]:
-                        if to_set["project"] in (mapping["project"], mapping["project_name"]) and \
-                                to_set["role"] in (mapping["role"], mapping["role_name"]):
+                        if to_set["project"] in (
+                            mapping["project"],
+                            mapping["project_name"],
+                        ) and to_set["role"] in (
+                            mapping["role"],
+                            mapping["role_name"],
+                        ):
                             break
                     else:
                         # delete
-                        if mapping not in mappings_to_remove:   # do not remove
+                        if mapping not in mappings_to_remove:  # do not remove
                             mappings_to_remove.append(mapping)
 
-            self.auth.update_user({"_id": _id, "username": indata.get("username"), "password": indata.get("password"),
-                                   "add_project_role_mappings": mappings_to_add,
-                                   "remove_project_role_mappings": mappings_to_remove
-                                   })
-            data_to_send = {'_id': _id, "changes": indata}
+            self.auth.update_user(
+                {
+                    "_id": _id,
+                    "username": indata.get("username"),
+                    "password": indata.get("password"),
+                    "add_project_role_mappings": mappings_to_add,
+                    "remove_project_role_mappings": mappings_to_remove,
+                }
+            )
+            data_to_send = {"_id": _id, "changes": indata}
             self._send_msg("edited", data_to_send, not_send_msg=None)
 
             # return _id
@@ -883,7 +1119,9 @@ class UserTopicAuth(UserTopic):
         user_list = self.auth.get_user_list(filter_q)
         if not session["allow_show_user_project_role"]:
             # Bug 853 - Default filtering
-            user_list = [usr for usr in user_list if usr["username"] == session["username"]]
+            user_list = [
+                usr for usr in user_list if usr["username"] == session["username"]
+            ]
         return user_list
 
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
@@ -928,13 +1166,17 @@ class ProjectTopicAuth(ProjectTopic):
         """
         project_name = indata.get("name")
         if is_valid_uuid(project_name):
-            raise EngineException("project name '{}' cannot have an uuid format".format(project_name),
-                                  HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "project name '{}' cannot have an uuid format".format(project_name),
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
         project_list = self.auth.get_project_list(filter_q={"name": project_name})
 
         if project_list:
-            raise EngineException("project '{}' exists".format(project_name), HTTPStatus.CONFLICT)
+            raise EngineException(
+                "project '{}' exists".format(project_name), HTTPStatus.CONFLICT
+            )
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
         """
@@ -950,15 +1192,24 @@ class ProjectTopicAuth(ProjectTopic):
         project_name = edit_content.get("name")
         if project_name != final_content["name"]:  # It is a true renaming
             if is_valid_uuid(project_name):
-                raise EngineException("project name '{}' cannot have an uuid format".format(project_name),
-                                      HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise EngineException(
+                    "project name '{}' cannot have an uuid format".format(project_name),
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
 
             if final_content["name"] == "admin":
-                raise EngineException("You cannot rename project 'admin'", http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "You cannot rename project 'admin'", http_code=HTTPStatus.CONFLICT
+                )
 
             # Check that project name is not used, regardless keystone already checks this
-            if project_name and self.auth.get_project_list(filter_q={"name": project_name}):
-                raise EngineException("project '{}' is already used".format(project_name), HTTPStatus.CONFLICT)
+            if project_name and self.auth.get_project_list(
+                filter_q={"name": project_name}
+            ):
+                raise EngineException(
+                    "project '{}' is already used".format(project_name),
+                    HTTPStatus.CONFLICT,
+                )
         return final_content
 
     def check_conflict_on_del(self, session, _id, db_content):
@@ -973,23 +1224,39 @@ class ProjectTopicAuth(ProjectTopic):
 
         def check_rw_projects(topic, title, id_field):
             for desc in self.db.get_list(topic):
-                if _id in desc["_admin"]["projects_read"] + desc["_admin"]["projects_write"]:
-                    raise EngineException("Project '{}' ({}) is being used by {} '{}'"
-                                          .format(db_content["name"], _id, title, desc[id_field]), HTTPStatus.CONFLICT)
+                if (
+                    _id
+                    in desc["_admin"]["projects_read"]
+                    + desc["_admin"]["projects_write"]
+                ):
+                    raise EngineException(
+                        "Project '{}' ({}) is being used by {} '{}'".format(
+                            db_content["name"], _id, title, desc[id_field]
+                        ),
+                        HTTPStatus.CONFLICT,
+                    )
 
         if _id in session["project_id"]:
-            raise EngineException("You cannot delete your own project", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "You cannot delete your own project", http_code=HTTPStatus.CONFLICT
+            )
 
         if db_content["name"] == "admin":
-            raise EngineException("You cannot delete project 'admin'", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "You cannot delete project 'admin'", http_code=HTTPStatus.CONFLICT
+            )
 
         # If any user is using this project, raise CONFLICT exception
         if not session["force"]:
             for user in self.auth.get_user_list():
                 for prm in user.get("project_role_mappings"):
                     if prm["project"] == _id:
-                        raise EngineException("Project '{}' ({}) is being used by user '{}'"
-                                              .format(db_content["name"], _id, user["username"]), HTTPStatus.CONFLICT)
+                        raise EngineException(
+                            "Project '{}' ({}) is being used by user '{}'".format(
+                                db_content["name"], _id, user["username"]
+                            ),
+                            HTTPStatus.CONFLICT,
+                        )
 
         # If any VNFD, NSD, NST, PDU, etc. is using this project, raise CONFLICT exception
         if not session["force"]:
@@ -1018,7 +1285,9 @@ class ProjectTopicAuth(ProjectTopic):
             BaseTopic._update_input_with_kwargs(content, kwargs)
             content = self._validate_input_new(content, session["force"])
             self.check_conflict_on_new(session, content)
-            self.format_on_new(content, project_id=session["project_id"], make_public=session["public"])
+            self.format_on_new(
+                content, project_id=session["project_id"], make_public=session["public"]
+            )
             _id = self.auth.create_project(content)
             rollback.append({"topic": self.topic, "_id": _id})
             self._send_msg("created", content, not_send_msg=None)
@@ -1038,7 +1307,7 @@ class ProjectTopicAuth(ProjectTopic):
         # Allow _id to be a name or uuid
         filter_q = {self.id_field(self.topic, _id): _id}
         # projects = self.auth.get_project_list(filter_q=filter_q)
-        projects = self.list(session, filter_q)   # To allow default filtering (Bug 853)
+        projects = self.list(session, filter_q)  # To allow default filtering (Bug 853)
         if len(projects) == 1:
             return projects[0]
         elif len(projects) > 1:
@@ -1115,7 +1384,7 @@ class ProjectTopicAuth(ProjectTopic):
 
 class RoleTopicAuth(BaseTopic):
     topic = "roles"
-    topic_msg = None    # "roles"
+    topic_msg = None  # "roles"
     schema_new = roles_new_schema
     schema_edit = roles_edit_schema
     multiproject = False
@@ -1145,7 +1414,14 @@ class RoleTopicAuth(BaseTopic):
             if role_def[-1] == ":":
                 raise ValidationError("Operation cannot end with ':'")
 
-            match = next((op for op in operations if op == role_def or op.startswith(role_def + ":")), None)
+            match = next(
+                (
+                    op
+                    for op in operations
+                    if op == role_def or op.startswith(role_def + ":")
+                ),
+                None,
+            )
 
             if not match:
                 raise ValidationError("Invalid permission '{}'".format(role_def))
@@ -1189,13 +1465,17 @@ class RoleTopicAuth(BaseTopic):
         # check name is not uuid
         role_name = indata.get("name")
         if is_valid_uuid(role_name):
-            raise EngineException("role name '{}' cannot have an uuid format".format(role_name),
-                                  HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "role name '{}' cannot have an uuid format".format(role_name),
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
         # check name not exists
         name = indata["name"]
         # if self.db.get_one(self.topic, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False):
         if self.auth.get_role_list({"name": name}):
-            raise EngineException("role name '{}' exists".format(name), HTTPStatus.CONFLICT)
+            raise EngineException(
+                "role name '{}' exists".format(name), HTTPStatus.CONFLICT
+            )
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
         """
@@ -1215,13 +1495,18 @@ class RoleTopicAuth(BaseTopic):
         # check name is not uuid
         role_name = edit_content.get("name")
         if is_valid_uuid(role_name):
-            raise EngineException("role name '{}' cannot have an uuid format".format(role_name),
-                                  HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "role name '{}' cannot have an uuid format".format(role_name),
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
         # Check renaming of admin roles
         role = self.auth.get_role(_id)
         if role["name"] in ["system_admin", "project_admin"]:
-            raise EngineException("You cannot rename role '{}'".format(role["name"]), http_code=HTTPStatus.FORBIDDEN)
+            raise EngineException(
+                "You cannot rename role '{}'".format(role["name"]),
+                http_code=HTTPStatus.FORBIDDEN,
+            )
 
         # check name not exists
         if "name" in edit_content:
@@ -1229,7 +1514,9 @@ class RoleTopicAuth(BaseTopic):
             # if self.db.get_one(self.topic, {"name":role_name,"_id.ne":_id}, fail_on_empty=False, fail_on_more=False):
             roles = self.auth.get_role_list({"name": role_name})
             if roles and roles[0][BaseTopic.id_field("roles", _id)] != _id:
-                raise EngineException("role name '{}' exists".format(role_name), HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "role name '{}' exists".format(role_name), HTTPStatus.CONFLICT
+                )
 
         return final_content
 
@@ -1244,18 +1531,25 @@ class RoleTopicAuth(BaseTopic):
         """
         role = self.auth.get_role(_id)
         if role["name"] in ["system_admin", "project_admin"]:
-            raise EngineException("You cannot delete role '{}'".format(role["name"]), http_code=HTTPStatus.FORBIDDEN)
+            raise EngineException(
+                "You cannot delete role '{}'".format(role["name"]),
+                http_code=HTTPStatus.FORBIDDEN,
+            )
 
         # If any user is using this role, raise CONFLICT exception
         if not session["force"]:
             for user in self.auth.get_user_list():
                 for prm in user.get("project_role_mappings"):
                     if prm["role"] == _id:
-                        raise EngineException("Role '{}' ({}) is being used by user '{}'"
-                                              .format(role["name"], _id, user["username"]), HTTPStatus.CONFLICT)
+                        raise EngineException(
+                            "Role '{}' ({}) is being used by user '{}'".format(
+                                role["name"], _id, user["username"]
+                            ),
+                            HTTPStatus.CONFLICT,
+                        )
 
     @staticmethod
-    def format_on_new(content, project_id=None, make_public=False):   # TO BE REMOVED ?
+    def format_on_new(content, project_id=None, make_public=False):  # TO BE REMOVED ?
         """
         Modifies content descriptor to include _admin
 
@@ -1311,11 +1605,15 @@ class RoleTopicAuth(BaseTopic):
         """
         filter_q = {BaseTopic.id_field(self.topic, _id): _id}
         # roles = self.auth.get_role_list(filter_q)
-        roles = self.list(session, filter_q)   # To allow default filtering (Bug 853)
+        roles = self.list(session, filter_q)  # To allow default filtering (Bug 853)
         if not roles:
-            raise AuthconnNotFoundException("Not found any role with filter {}".format(filter_q))
+            raise AuthconnNotFoundException(
+                "Not found any role with filter {}".format(filter_q)
+            )
         elif len(roles) > 1:
-            raise AuthconnConflictException("Found more than one role with filter {}".format(filter_q))
+            raise AuthconnConflictException(
+                "Found more than one role with filter {}".format(filter_q)
+            )
         return roles[0]
 
     def list(self, session, filter_q=None, api_req=False):
@@ -1352,7 +1650,9 @@ class RoleTopicAuth(BaseTopic):
             self._update_input_with_kwargs(content, kwargs)
             content = self._validate_input_new(content, session["force"])
             self.check_conflict_on_new(session, content)
-            self.format_on_new(content, project_id=session["project_id"], make_public=session["public"])
+            self.format_on_new(
+                content, project_id=session["project_id"], make_public=session["public"]
+            )
             # role_name = content["name"]
             rid = self.auth.create_role(content)
             content["_id"] = rid
@@ -1376,9 +1676,13 @@ class RoleTopicAuth(BaseTopic):
         filter_q = {BaseTopic.id_field(self.topic, _id): _id}
         roles = self.auth.get_role_list(filter_q)
         if not roles:
-            raise AuthconnNotFoundException("Not found any role with filter {}".format(filter_q))
+            raise AuthconnNotFoundException(
+                "Not found any role with filter {}".format(filter_q)
+            )
         elif len(roles) > 1:
-            raise AuthconnConflictException("Found more than one role with filter {}".format(filter_q))
+            raise AuthconnConflictException(
+                "Found more than one role with filter {}".format(filter_q)
+            )
         rid = roles[0]["_id"]
         self.check_conflict_on_del(session, rid, None)
         # filter_q = {"_id": _id}
index dcf12c3..7c29f84 100644 (file)
@@ -34,6 +34,7 @@ import logging
 import yaml
 from base64 import standard_b64decode
 from copy import deepcopy
+
 # from functools import reduce
 from http import HTTPStatus
 from time import time
@@ -59,8 +60,10 @@ class Authenticator:
     This class must be threading safe
     """
 
-    periodin_db_pruning = 60 * 30  # for the internal backend only. every 30 minutes expired tokens will be pruned
-    token_limit = 500   # when reached, the token cache will be cleared
+    periodin_db_pruning = (
+        60 * 30
+    )  # for the internal backend only. every 30 minutes expired tokens will be pruned
+    token_limit = 500  # when reached, the token cache will be cleared
 
     def __init__(self, valid_methods, valid_query_string):
         """
@@ -72,7 +75,9 @@ class Authenticator:
         self.db = None
         self.msg = None
         self.tokens_cache = dict()
-        self.next_db_prune_time = 0  # time when next cleaning of expired tokens must be done
+        self.next_db_prune_time = (
+            0  # time when next cleaning of expired tokens must be done
+        )
         self.roles_to_operations_file = None
         # self.roles_to_operations_table = None
         self.resources_to_operations_mapping = {}
@@ -81,7 +86,7 @@ class Authenticator:
         self.role_permissions = []
         self.valid_methods = valid_methods
         self.valid_query_string = valid_query_string
-        self.system_admin_role_id = None   # system_role id
+        self.system_admin_role_id = None  # system_role id
         self.test_project_id = None  # test_project_id
 
     def start(self, config):
@@ -103,8 +108,11 @@ class Authenticator:
                     self.db = dbmemory.DbMemory()
                     self.db.db_connect(config["database"])
                 else:
-                    raise AuthException("Invalid configuration param '{}' at '[database]':'driver'"
-                                        .format(config["database"]["driver"]))
+                    raise AuthException(
+                        "Invalid configuration param '{}' at '[database]':'driver'".format(
+                            config["database"]["driver"]
+                        )
+                    )
             if not self.msg:
                 if config["message"]["driver"] == "local":
                     self.msg = msglocal.MsgLocal()
@@ -113,41 +121,60 @@ class Authenticator:
                     self.msg = msgkafka.MsgKafka()
                     self.msg.connect(config["message"])
                 else:
-                    raise AuthException("Invalid configuration param '{}' at '[message]':'driver'"
-                                        .format(config["message"]["driver"]))
+                    raise AuthException(
+                        "Invalid configuration param '{}' at '[message]':'driver'".format(
+                            config["message"]["driver"]
+                        )
+                    )
             if not self.backend:
                 if config["authentication"]["backend"] == "keystone":
-                    self.backend = AuthconnKeystone(self.config["authentication"], self.db, self.role_permissions)
+                    self.backend = AuthconnKeystone(
+                        self.config["authentication"], self.db, self.role_permissions
+                    )
                 elif config["authentication"]["backend"] == "internal":
-                    self.backend = AuthconnInternal(self.config["authentication"], self.db, self.role_permissions)
+                    self.backend = AuthconnInternal(
+                        self.config["authentication"], self.db, self.role_permissions
+                    )
                     self._internal_tokens_prune("tokens")
                 elif config["authentication"]["backend"] == "tacacs":
-                    self.backend = AuthconnTacacs(self.config["authentication"], self.db, self.role_permissions)
+                    self.backend = AuthconnTacacs(
+                        self.config["authentication"], self.db, self.role_permissions
+                    )
                     self._internal_tokens_prune("tokens_tacacs")
                 else:
-                    raise AuthException("Unknown authentication backend: {}"
-                                        .format(config["authentication"]["backend"]))
+                    raise AuthException(
+                        "Unknown authentication backend: {}".format(
+                            config["authentication"]["backend"]
+                        )
+                    )
 
             if not self.roles_to_operations_file:
                 if "roles_to_operations" in config["rbac"]:
-                    self.roles_to_operations_file = config["rbac"]["roles_to_operations"]
+                    self.roles_to_operations_file = config["rbac"][
+                        "roles_to_operations"
+                    ]
                 else:
                     possible_paths = (
-                        __file__[:__file__.rfind("auth.py")] + "roles_to_operations.yml",
-                        "./roles_to_operations.yml"
+                        __file__[: __file__.rfind("auth.py")]
+                        + "roles_to_operations.yml",
+                        "./roles_to_operations.yml",
                     )
                     for config_file in possible_paths:
                         if path.isfile(config_file):
                             self.roles_to_operations_file = config_file
                             break
                 if not self.roles_to_operations_file:
-                    raise AuthException("Invalid permission configuration: roles_to_operations file missing")
+                    raise AuthException(
+                        "Invalid permission configuration: roles_to_operations file missing"
+                    )
 
             # load role_permissions
             def load_role_permissions(method_dict):
                 for k in method_dict:
                     if k == "ROLE_PERMISSION":
-                        for method in chain(method_dict.get("METHODS", ()), method_dict.get("TODO", ())):
+                        for method in chain(
+                            method_dict.get("METHODS", ()), method_dict.get("TODO", ())
+                        ):
                             permission = method_dict["ROLE_PERMISSION"] + method.lower()
                             if permission not in self.role_permissions:
                                 self.role_permissions.append(permission)
@@ -164,11 +191,17 @@ class Authenticator:
                         self.role_permissions.append(permission)
 
             # get ids of role system_admin and test project
-            role_system_admin = self.db.get_one("roles", {"name": "system_admin"}, fail_on_empty=False)
+            role_system_admin = self.db.get_one(
+                "roles", {"name": "system_admin"}, fail_on_empty=False
+            )
             if role_system_admin:
                 self.system_admin_role_id = role_system_admin["_id"]
-            test_project_name = self.config["authentication"].get("project_not_authorized", "admin")
-            test_project = self.db.get_one("projects", {"name": test_project_name}, fail_on_empty=False)
+            test_project_name = self.config["authentication"].get(
+                "project_not_authorized", "admin"
+            )
+            test_project = self.db.get_one(
+                "projects", {"name": test_project_name}, fail_on_empty=False
+            )
             if test_project:
                 self.test_project_id = test_project["_id"]
 
@@ -197,7 +230,9 @@ class Authenticator:
         project_desc["_id"] = str(uuid4())
         project_desc["_admin"] = {"created": now, "modified": now}
         pid = self.backend.create_project(project_desc)
-        self.logger.info("Project '{}' created at database".format(project_desc["name"]))
+        self.logger.info(
+            "Project '{}' created at database".format(project_desc["name"])
+        )
         return pid
 
     def create_admin_user(self, project_id):
@@ -211,7 +246,11 @@ class Authenticator:
             return None
         # user_desc = {"username": "admin", "password": "admin", "projects": [project_id]}
         now = time()
-        user_desc = {"username": "admin", "password": "admin", "_admin": {"created": now, "modified": now}}
+        user_desc = {
+            "username": "admin",
+            "password": "admin",
+            "_admin": {"created": now, "modified": now},
+        }
         if project_id:
             pid = project_id
         else:
@@ -221,12 +260,14 @@ class Authenticator:
         # role = self.db.get_one("roles", {"name": "system_admin"}, fail_on_empty=False, fail_on_more=False)
         roles = self.backend.get_role_list({"name": "system_admin"})
         if pid and roles:
-            user_desc["project_role_mappings"] = [{"project": pid, "role": roles[0]["_id"]}]
+            user_desc["project_role_mappings"] = [
+                {"project": pid, "role": roles[0]["_id"]}
+            ]
         uid = self.backend.create_user(user_desc)
         self.logger.info("User '{}' created at database".format(user_desc["username"]))
         return uid
 
-    def init_db(self, target_version='1.0'):
+    def init_db(self, target_version="1.0"):
         """
         Check if the database has been initialized, with at least one user. If not, create the required tables
         and insert the predefined mappings between roles and permissions.
@@ -238,7 +279,9 @@ class Authenticator:
         records = self.backend.get_role_list()
 
         # Loading permissions to AUTH. At lease system_admin must be present.
-        if not records or not next((r for r in records if r["name"] == "system_admin"), None):
+        if not records or not next(
+            (r for r in records if r["name"] == "system_admin"), None
+        ):
             with open(self.roles_to_operations_file, "r") as stream:
                 roles_to_operations_yaml = yaml.load(stream, Loader=yaml.Loader)
 
@@ -248,23 +291,36 @@ class Authenticator:
                 if role_with_operations["name"] not in role_names:
                     role_names.append(role_with_operations["name"])
                 else:
-                    raise AuthException("Duplicated role name '{}' at file '{}''"
-                                        .format(role_with_operations["name"], self.roles_to_operations_file))
+                    raise AuthException(
+                        "Duplicated role name '{}' at file '{}''".format(
+                            role_with_operations["name"], self.roles_to_operations_file
+                        )
+                    )
 
                 if not role_with_operations["permissions"]:
                     continue
 
-                for permission, is_allowed in role_with_operations["permissions"].items():
+                for permission, is_allowed in role_with_operations[
+                    "permissions"
+                ].items():
                     if not isinstance(is_allowed, bool):
-                        raise AuthException("Invalid value for permission '{}' at role '{}'; at file '{}'"
-                                            .format(permission, role_with_operations["name"],
-                                                    self.roles_to_operations_file))
+                        raise AuthException(
+                            "Invalid value for permission '{}' at role '{}'; at file '{}'".format(
+                                permission,
+                                role_with_operations["name"],
+                                self.roles_to_operations_file,
+                            )
+                        )
 
                     # TODO check permission is ok
                     if permission[-1] == ":":
-                        raise AuthException("Invalid permission '{}' terminated in ':' for role '{}'; at file {}"
-                                            .format(permission, role_with_operations["name"],
-                                                    self.roles_to_operations_file))
+                        raise AuthException(
+                            "Invalid permission '{}' terminated in ':' for role '{}'; at file {}".format(
+                                permission,
+                                role_with_operations["name"],
+                                self.roles_to_operations_file,
+                            )
+                        )
 
                 if "default" not in role_with_operations["permissions"]:
                     role_with_operations["permissions"]["default"] = False
@@ -280,11 +336,17 @@ class Authenticator:
                 # self.db.create(self.roles_to_operations_table, role_with_operations)
                 try:
                     self.backend.create_role(role_with_operations)
-                    self.logger.info("Role '{}' created".format(role_with_operations["name"]))
+                    self.logger.info(
+                        "Role '{}' created".format(role_with_operations["name"])
+                    )
                 except (AuthException, AuthconnException) as e:
                     if role_with_operations["name"] == "system_admin":
                         raise
-                    self.logger.error("Role '{}' cannot be created: {}".format(role_with_operations["name"], e))
+                    self.logger.error(
+                        "Role '{}' cannot be created: {}".format(
+                            role_with_operations["name"], e
+                        )
+                    )
 
         # Create admin project&user if required
         pid = self.create_admin_project()
@@ -310,11 +372,25 @@ class Authenticator:
                     if user_with_system_admin:
                         break
                 if not user_with_system_admin:
-                    self.backend.update_user({"_id": user_admin_id,
-                                              "add_project_role_mappings": [{"project": pid, "role": role_id}]})
-                    self.logger.info("Added role system admin to user='{}' project=admin".format(user_admin_id))
+                    self.backend.update_user(
+                        {
+                            "_id": user_admin_id,
+                            "add_project_role_mappings": [
+                                {"project": pid, "role": role_id}
+                            ],
+                        }
+                    )
+                    self.logger.info(
+                        "Added role system admin to user='{}' project=admin".format(
+                            user_admin_id
+                        )
+                    )
             except Exception as e:
-                self.logger.error("Error in Authorization DataBase initialization: {}: {}".format(type(e).__name__, e))
+                self.logger.error(
+                    "Error in Authorization DataBase initialization: {}: {}".format(
+                        type(e).__name__, e
+                    )
+                )
 
         self.load_operation_to_allowed_roles()
 
@@ -333,13 +409,21 @@ class Authenticator:
         for record in records:
             if not record.get("permissions"):
                 continue
-            record_permissions = {oper: record["permissions"].get("default", False) for oper in self.role_permissions}
-            operations_joined = [(oper, value) for oper, value in record["permissions"].items()
-                                 if oper not in ignore_fields]
+            record_permissions = {
+                oper: record["permissions"].get("default", False)
+                for oper in self.role_permissions
+            }
+            operations_joined = [
+                (oper, value)
+                for oper, value in record["permissions"].items()
+                if oper not in ignore_fields
+            ]
             operations_joined.sort(key=lambda x: x[0].count(":"))
 
             for oper in operations_joined:
-                match = list(filter(lambda x: x.find(oper[0]) == 0, record_permissions.keys()))
+                match = list(
+                    filter(lambda x: x.find(oper[0]) == 0, record_permissions.keys())
+                )
 
                 for m in match:
                     record_permissions[m] = oper[1]
@@ -351,7 +435,9 @@ class Authenticator:
 
         self.operation_to_allowed_roles = permissions
 
-    def authorize(self, role_permission=None, query_string_operations=None, item_id=None):
+    def authorize(
+        self, role_permission=None, query_string_operations=None, item_id=None
+    ):
         token = None
         user_passwd64 = None
         try:
@@ -369,7 +455,9 @@ class Authenticator:
                     token = cherrypy.session.get("Authorization")
                     if token == "logout":
                         token = None  # force Unauthorized response to insert user password again
-                elif user_passwd64 and cherrypy.request.config.get("auth.allow_basic_authentication"):
+                elif user_passwd64 and cherrypy.request.config.get(
+                    "auth.allow_basic_authentication"
+                ):
                     # 3. Get new token from user password
                     user = None
                     passwd = None
@@ -378,13 +466,17 @@ class Authenticator:
                         user, _, passwd = user_passwd.partition(":")
                     except Exception:
                         pass
-                    outdata = self.new_token(None, {"username": user, "password": passwd})
+                    outdata = self.new_token(
+                        None, {"username": user, "password": passwd}
+                    )
                     token = outdata["_id"]
-                    cherrypy.session['Authorization'] = token
+                    cherrypy.session["Authorization"] = token
 
             if not token:
-                raise AuthException("Needed a token or Authorization http header",
-                                    http_code=HTTPStatus.UNAUTHORIZED)
+                raise AuthException(
+                    "Needed a token or Authorization http header",
+                    http_code=HTTPStatus.UNAUTHORIZED,
+                )
 
             # try to get from cache first
             now = time()
@@ -404,22 +496,33 @@ class Authenticator:
             # TODO add to token info remote host, port
 
             if role_permission:
-                RBAC_auth = self.check_permissions(token_info, cherrypy.request.method, role_permission,
-                                                   query_string_operations, item_id)
+                RBAC_auth = self.check_permissions(
+                    token_info,
+                    cherrypy.request.method,
+                    role_permission,
+                    query_string_operations,
+                    item_id,
+                )
                 token_info["allow_show_user_project_role"] = RBAC_auth
 
             return token_info
         except AuthException as e:
             if not isinstance(e, AuthExceptionUnauthorized):
-                if cherrypy.session.get('Authorization'):
-                    del cherrypy.session['Authorization']
-                cherrypy.response.headers["WWW-Authenticate"] = 'Bearer realm="{}"'.format(e)
+                if cherrypy.session.get("Authorization"):
+                    del cherrypy.session["Authorization"]
+                cherrypy.response.headers[
+                    "WWW-Authenticate"
+                ] = 'Bearer realm="{}"'.format(e)
             if self.config["authentication"].get("user_not_authorized"):
-                return {"id": "testing-token", "_id": "testing-token",
-                        "project_id": self.test_project_id,
-                        "username": self.config["authentication"]["user_not_authorized"],
-                        "roles": [self.system_admin_role_id],
-                        "admin": True, "allow_show_user_project_role": True}
+                return {
+                    "id": "testing-token",
+                    "_id": "testing-token",
+                    "project_id": self.test_project_id,
+                    "username": self.config["authentication"]["user_not_authorized"],
+                    "roles": [self.system_admin_role_id],
+                    "admin": True,
+                    "allow_show_user_project_role": True,
+                }
             raise
 
     def new_token(self, token_info, indata, remote):
@@ -432,7 +535,9 @@ class Authenticator:
         if not new_token_info.get("expires"):
             new_token_info["expires"] = time() + 3600
         if not new_token_info.get("admin"):
-            new_token_info["admin"] = True if new_token_info.get("project_name") == "admin" else False
+            new_token_info["admin"] = (
+                True if new_token_info.get("project_name") == "admin" else False
+            )
             # TODO put admin in RBAC
 
         if remote.name:
@@ -448,8 +553,11 @@ class Authenticator:
             return self._internal_get_token_list(token_info)
         else:
             # TODO: check if this can be avoided. Backend may provide enough information
-            return [deepcopy(token) for token in self.tokens_cache.values()
-                    if token["username"] == token_info["username"]]
+            return [
+                deepcopy(token)
+                for token in self.tokens_cache.values()
+                if token["username"] == token_info["username"]
+            ]
 
     def get_token(self, token_info, token):
         if self.config["authentication"]["backend"] == "internal":
@@ -459,8 +567,13 @@ class Authenticator:
             token_value = self.tokens_cache.get(token)
             if not token_value:
                 raise AuthException("token not found", http_code=HTTPStatus.NOT_FOUND)
-            if token_value["username"] != token_info["username"] and not token_info["admin"]:
-                raise AuthException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+            if (
+                token_value["username"] != token_info["username"]
+                and not token_info["admin"]
+            ):
+                raise AuthException(
+                    "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+                )
             return token_value
 
     def del_token(self, token):
@@ -470,9 +583,18 @@ class Authenticator:
             self.remove_token_from_cache(token)
             return "token '{}' deleted".format(token)
         except KeyError:
-            raise AuthException("Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND)
-
-    def check_permissions(self, token_info, method, role_permission=None, query_string_operations=None, item_id=None):
+            raise AuthException(
+                "Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND
+            )
+
+    def check_permissions(
+        self,
+        token_info,
+        method,
+        role_permission=None,
+        query_string_operations=None,
+        item_id=None,
+    ):
         """
         Checks that operation has permissions to be done, base on the assigned roles to this user project
         :param token_info: Dictionary that contains "roles" with a list of assigned roles.
@@ -508,7 +630,10 @@ class Authenticator:
                 if not query_string_operations:
                     return True
                 for query_string_operation in query_string_operations:
-                    if role not in self.operation_to_allowed_roles[query_string_operation]:
+                    if (
+                        role
+                        not in self.operation_to_allowed_roles[query_string_operation]
+                    ):
                         break
                 else:
                     return True
@@ -517,8 +642,16 @@ class Authenticator:
         # User/Project/Role whole listings are filtered elsewhere
         # uid, pid, rid = ("user_id", "project_id", "id") if is_valid_uuid(id) else ("username", "project_name", "name")
         uid = "user_id" if is_valid_uuid(item_id) else "username"
-        if (role_permission in ["projects:get", "projects:id:get", "roles:get", "roles:id:get", "users:get"]) \
-                or (role_permission == "users:id:get" and item_id == token_info[uid]):
+        if (
+            role_permission
+            in [
+                "projects:get",
+                "projects:id:get",
+                "roles:get",
+                "roles:id:get",
+                "users:get",
+            ]
+        ) or (role_permission == "users:id:get" and item_id == token_info[uid]):
             # or (role_permission == "projects:id:get" and item_id == token_info[pid]) \
             # or (role_permission == "roles:id:get" and item_id in [role[rid] for role in token_info["roles"]]):
             return False
@@ -526,7 +659,9 @@ class Authenticator:
         if not operation_allowed:
             raise AuthExceptionUnauthorized("Access denied: lack of permissions.")
         else:
-            raise AuthExceptionUnauthorized("Access denied: You have not permissions to use these admin query string")
+            raise AuthExceptionUnauthorized(
+                "Access denied: You have not permissions to use these admin query string"
+            )
 
     def get_user_list(self):
         return self.backend.get_user_list()
@@ -534,12 +669,15 @@ class Authenticator:
     def _normalize_url(self, url, method):
         # DEPRECATED !!!
         # Removing query strings
-        normalized_url = url if '?' not in url else url[:url.find("?")]
+        normalized_url = url if "?" not in url else url[: url.find("?")]
         normalized_url_splitted = normalized_url.split("/")
         parameters = {}
 
-        filtered_keys = [key for key in self.resources_to_operations_mapping.keys()
-                         if method in key.split()[0]]
+        filtered_keys = [
+            key
+            for key in self.resources_to_operations_mapping.keys()
+            if method in key.split()[0]
+        ]
 
         for idx, path_part in enumerate(normalized_url_splitted):
             tmp_keys = []
@@ -551,33 +689,47 @@ class Authenticator:
                     if splitted[idx] == "<artifactPath>":
                         tmp_keys.append(tmp_key)
                         continue
-                    elif idx == len(normalized_url_splitted) - 1 and \
-                            len(normalized_url_splitted) != len(splitted):
+                    elif idx == len(normalized_url_splitted) - 1 and len(
+                        normalized_url_splitted
+                    ) != len(splitted):
                         continue
                     else:
                         tmp_keys.append(tmp_key)
                 elif splitted[idx] == path_part:
-                    if idx == len(normalized_url_splitted) - 1 and \
-                            len(normalized_url_splitted) != len(splitted):
+                    if idx == len(normalized_url_splitted) - 1 and len(
+                        normalized_url_splitted
+                    ) != len(splitted):
                         continue
                     else:
                         tmp_keys.append(tmp_key)
             filtered_keys = tmp_keys
-            if len(filtered_keys) == 1 and \
-                    filtered_keys[0].split("/")[-1] == "<artifactPath>":
+            if (
+                len(filtered_keys) == 1
+                and filtered_keys[0].split("/")[-1] == "<artifactPath>"
+            ):
                 break
 
         if len(filtered_keys) == 0:
-            raise AuthException("Cannot make an authorization decision. URL not found. URL: {0}".format(url))
+            raise AuthException(
+                "Cannot make an authorization decision. URL not found. URL: {0}".format(
+                    url
+                )
+            )
         elif len(filtered_keys) > 1:
-            raise AuthException("Cannot make an authorization decision. Multiple URLs found. URL: {0}".format(url))
+            raise AuthException(
+                "Cannot make an authorization decision. Multiple URLs found. URL: {0}".format(
+                    url
+                )
+            )
 
         filtered_key = filtered_keys[0]
 
         for idx, path_part in enumerate(filtered_key.split()[1].split("/")):
             if "<" in path_part and ">" in path_part:
                 if path_part == "<artifactPath>":
-                    parameters[path_part[1:-1]] = "/".join(normalized_url_splitted[idx:])
+                    parameters[path_part[1:-1]] = "/".join(
+                        normalized_url_splitted[idx:]
+                    )
                 else:
                     parameters[path_part[1:-1]] = normalized_url_splitted[idx]
 
@@ -585,15 +737,22 @@ class Authenticator:
 
     def _internal_get_token_list(self, token_info):
         now = time()
-        token_list = self.db.get_list("tokens", {"username": token_info["username"], "expires.gt": now})
+        token_list = self.db.get_list(
+            "tokens", {"username": token_info["username"], "expires.gt": now}
+        )
         return token_list
 
     def _internal_get_token(self, token_info, token_id):
         token_value = self.db.get_one("tokens", {"_id": token_id}, fail_on_empty=False)
         if not token_value:
             raise AuthException("token not found", http_code=HTTPStatus.NOT_FOUND)
-        if token_value["username"] != token_info["username"] and not token_info["admin"]:
-            raise AuthException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+        if (
+            token_value["username"] != token_info["username"]
+            and not token_info["admin"]
+        ):
+            raise AuthException(
+                "needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED
+            )
         return token_value
 
     def _internal_tokens_prune(self, token_collection, now=None):
index bbcf342..0f4b523 100644 (file)
@@ -23,8 +23,10 @@ Authconn implements an Abstract class for the Auth backend connector
 plugins with the definition of the methods to be implemented.
 """
 
-__author__ = "Eduardo Sousa <esousa@whitestack.com>, " \
-             "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+__author__ = (
+    "Eduardo Sousa <esousa@whitestack.com>, "
+    "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+)
 __date__ = "$27-jul-2018 23:59:59$"
 
 from http import HTTPStatus
@@ -35,6 +37,7 @@ class AuthException(Exception):
     """
     Authentication error, because token, user password not recognized
     """
+
     def __init__(self, message, http_code=HTTPStatus.UNAUTHORIZED):
         super(AuthException, self).__init__(message)
         self.http_code = http_code
@@ -44,6 +47,7 @@ class AuthExceptionUnauthorized(AuthException):
     """
     Authentication error, because not having rights to make this operation
     """
+
     pass
 
 
@@ -51,6 +55,7 @@ class AuthconnException(Exception):
     """
     Common and base class Exception for all authconn exceptions.
     """
+
     def __init__(self, message, http_code=HTTPStatus.UNAUTHORIZED):
         super(AuthconnException, self).__init__(message)
         self.http_code = http_code
@@ -60,6 +65,7 @@ class AuthconnConnectionException(AuthconnException):
     """
     Connectivity error with Auth backend.
     """
+
     def __init__(self, message, http_code=HTTPStatus.BAD_GATEWAY):
         super(AuthconnConnectionException, self).__init__(message, http_code)
 
@@ -68,6 +74,7 @@ class AuthconnNotSupportedException(AuthconnException):
     """
     The request is not supported by the Auth backend.
     """
+
     def __init__(self, message, http_code=HTTPStatus.NOT_IMPLEMENTED):
         super(AuthconnNotSupportedException, self).__init__(message, http_code)
 
@@ -76,6 +83,7 @@ class AuthconnNotImplementedException(AuthconnException):
     """
     The method is not implemented by the Auth backend.
     """
+
     def __init__(self, message, http_code=HTTPStatus.NOT_IMPLEMENTED):
         super(AuthconnNotImplementedException, self).__init__(message, http_code)
 
@@ -84,6 +92,7 @@ class AuthconnOperationException(AuthconnException):
     """
     The operation executed failed.
     """
+
     def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR):
         super(AuthconnOperationException, self).__init__(message, http_code)
 
@@ -92,6 +101,7 @@ class AuthconnNotFoundException(AuthconnException):
     """
     The operation executed failed because element not found.
     """
+
     def __init__(self, message, http_code=HTTPStatus.NOT_FOUND):
         super().__init__(message, http_code)
 
@@ -100,6 +110,7 @@ class AuthconnConflictException(AuthconnException):
     """
     The operation has conflicts.
     """
+
     def __init__(self, message, http_code=HTTPStatus.CONFLICT):
         super().__init__(message, http_code)
 
@@ -110,6 +121,7 @@ class Authconn:
     Each Auth backend connector plugin must be a subclass of
     Authconn class.
     """
+
     def __init__(self, config, db, role_permissions):
         """
         Constructor of the Authconn class.
@@ -204,7 +216,10 @@ class Authconn:
         users = self.get_user_list(filt)
         if not users:
             if fail:
-                raise AuthconnNotFoundException("User with {} not found".format(filt), http_code=HTTPStatus.NOT_FOUND)
+                raise AuthconnNotFoundException(
+                    "User with {} not found".format(filt),
+                    http_code=HTTPStatus.NOT_FOUND,
+                )
             else:
                 return None
         return users[0]
@@ -299,7 +314,9 @@ class Authconn:
         projs = self.get_project_list(filt)
         if not projs:
             if fail:
-                raise AuthconnNotFoundException("project with {} not found".format(filt))
+                raise AuthconnNotFoundException(
+                    "project with {} not found".format(filt)
+                )
             else:
                 return None
         return projs[0]
index b3de1cd..e342150 100644 (file)
@@ -24,14 +24,16 @@ AuthconnInternal implements implements the connector for
 OSM Internal Authentication Backend and leverages the RBAC model
 """
 
-__author__ = "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>, " \
-             "Alfonso Tierno <alfonso.tiernosepulveda@telefoncia.com"
+__author__ = (
+    "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>, "
+    "Alfonso Tierno <alfonso.tiernosepulveda@telefoncia.com"
+)
 __date__ = "$06-jun-2019 11:16:08$"
 
 import logging
 import re
 
-from osm_nbi.authconn import Authconn, AuthException   # , AuthconnOperationException
+from osm_nbi.authconn import Authconn, AuthException  # , AuthconnOperationException
 from osm_common.dbbase import DbException
 from osm_nbi.base_topic import BaseTopic
 from osm_nbi.validation import is_valid_uuid
@@ -44,8 +46,8 @@ from random import choice as random_choice
 
 
 class AuthconnInternal(Authconn):
-    token_time_window = 2   # seconds
-    token_delay = 1   # seconds to wait upon second request within time window
+    token_time_window = 2  # seconds
+    token_delay = 1  # seconds to wait upon second request within time window
 
     users_collection = "users"
     roles_collection = "roles"
@@ -81,7 +83,10 @@ class AuthconnInternal(Authconn):
 
         try:
             if not token:
-                raise AuthException("Needed a token or Authorization HTTP header", http_code=HTTPStatus.UNAUTHORIZED)
+                raise AuthException(
+                    "Needed a token or Authorization HTTP header",
+                    http_code=HTTPStatus.UNAUTHORIZED,
+                )
 
             now = time()
 
@@ -89,21 +94,31 @@ class AuthconnInternal(Authconn):
             # if not token_info:
             token_info = self.db.get_one(self.tokens_collection, {"_id": token})
             if token_info["expires"] < now:
-                raise AuthException("Expired Token or Authorization HTTP header", http_code=HTTPStatus.UNAUTHORIZED)
+                raise AuthException(
+                    "Expired Token or Authorization HTTP header",
+                    http_code=HTTPStatus.UNAUTHORIZED,
+                )
 
             return token_info
 
         except DbException as e:
             if e.http_code == HTTPStatus.NOT_FOUND:
-                raise AuthException("Invalid Token or Authorization HTTP header", http_code=HTTPStatus.UNAUTHORIZED)
+                raise AuthException(
+                    "Invalid Token or Authorization HTTP header",
+                    http_code=HTTPStatus.UNAUTHORIZED,
+                )
             else:
                 raise
         except AuthException:
             raise
         except Exception:
-            self.logger.exception("Error during token validation using internal backend")
-            raise AuthException("Error during token validation using internal backend",
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            self.logger.exception(
+                "Error during token validation using internal backend"
+            )
+            raise AuthException(
+                "Error during token validation using internal backend",
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
     def revoke_token(self, token):
         """
@@ -117,7 +132,9 @@ class AuthconnInternal(Authconn):
             return True
         except DbException as e:
             if e.http_code == HTTPStatus.NOT_FOUND:
-                raise AuthException("Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND)
+                raise AuthException(
+                    "Token '{}' not found".format(token), http_code=HTTPStatus.NOT_FOUND
+                )
             else:
                 # raise
                 exmsg = "Error during token revocation using internal backend"
@@ -130,12 +147,16 @@ class AuthconnInternal(Authconn):
         :param user: username of the user.
         :param password: password to be validated.
         """
-        user_rows = self.db.get_list(self.users_collection, {BaseTopic.id_field("users", user): user})
+        user_rows = self.db.get_list(
+            self.users_collection, {BaseTopic.id_field("users", user): user}
+        )
         user_content = None
         if user_rows:
             user_content = user_rows[0]
             salt = user_content["_admin"]["salt"]
-            shadow_password = sha256(password.encode('utf-8') + salt.encode('utf-8')).hexdigest()
+            shadow_password = sha256(
+                password.encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
             if shadow_password != user_content["password"]:
                 user_content = None
         return user_content
@@ -168,28 +189,47 @@ class AuthconnInternal(Authconn):
         if user:
             user_content = self.validate_user(user, password)
             if not user_content:
-                raise AuthException("Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED)
+                raise AuthException(
+                    "Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED
+                )
             if not user_content.get("_admin", None):
-                raise AuthException("No default project for this user.", http_code=HTTPStatus.UNAUTHORIZED)
+                raise AuthException(
+                    "No default project for this user.",
+                    http_code=HTTPStatus.UNAUTHORIZED,
+                )
         elif token_info:
-            user_rows = self.db.get_list(self.users_collection, {"username": token_info["username"]})
+            user_rows = self.db.get_list(
+                self.users_collection, {"username": token_info["username"]}
+            )
             if user_rows:
                 user_content = user_rows[0]
             else:
                 raise AuthException("Invalid token", http_code=HTTPStatus.UNAUTHORIZED)
         else:
-            raise AuthException("Provide credentials: username/password or Authorization Bearer token",
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "Provide credentials: username/password or Authorization Bearer token",
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
         # Delay upon second request within time window
-        if now - user_content["_admin"].get("last_token_time", 0) < self.token_time_window:
+        if (
+            now - user_content["_admin"].get("last_token_time", 0)
+            < self.token_time_window
+        ):
             sleep(self.token_delay)
         # user_content["_admin"]["last_token_time"] = now
         # self.db.replace("users", user_content["_id"], user_content)   # might cause race conditions
-        self.db.set_one(self.users_collection,
-                        {"_id": user_content["_id"]}, {"_admin.last_token_time": now})
-
-        token_id = ''.join(random_choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
-                           for _ in range(0, 32))
+        self.db.set_one(
+            self.users_collection,
+            {"_id": user_content["_id"]},
+            {"_admin.last_token_time": now},
+        )
+
+        token_id = "".join(
+            random_choice(
+                "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
+            )
+            for _ in range(0, 32)
+        )
 
         # projects = user_content.get("projects", [])
         prm_list = user_content.get("project_role_mappings", [])
@@ -197,17 +237,23 @@ class AuthconnInternal(Authconn):
         if not project:
             project = prm_list[0]["project"] if prm_list else None
         if not project:
-            raise AuthException("can't find a default project for this user", http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "can't find a default project for this user",
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
         projects = [prm["project"] for prm in prm_list]
 
-        proj = self.db.get_one(self.projects_collection,
-                               {BaseTopic.id_field("projects", project): project})
+        proj = self.db.get_one(
+            self.projects_collection, {BaseTopic.id_field("projects", project): project}
+        )
         project_name = proj["name"]
         project_id = proj["_id"]
         if project_name not in projects and project_id not in projects:
-            raise AuthException("project {} not allowed for this user".format(project),
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "project {} not allowed for this user".format(project),
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
         # TODO remove admin, this vill be used by roles RBAC
         if project_name == "admin":
@@ -220,28 +266,33 @@ class AuthconnInternal(Authconn):
         roles_list = []
         for prm in prm_list:
             if prm["project"] in [project_id, project_name]:
-                role = self.db.get_one(self.roles_collection,
-                                       {BaseTopic.id_field("roles", prm["role"]): prm["role"]})
+                role = self.db.get_one(
+                    self.roles_collection,
+                    {BaseTopic.id_field("roles", prm["role"]): prm["role"]},
+                )
                 rid = role["_id"]
                 if rid not in roles:
                     rnm = role["name"]
                     roles.append(rid)
                     roles_list.append({"name": rnm, "id": rid})
         if not roles_list:
-            rid = self.db.get_one(self.roles_collection, {"name": "project_admin"})["_id"]
+            rid = self.db.get_one(self.roles_collection, {"name": "project_admin"})[
+                "_id"
+            ]
             roles_list = [{"name": "project_admin", "id": rid}]
 
-        new_token = {"issued_at": now,
-                     "expires": now + 3600,
-                     "_id": token_id,
-                     "id": token_id,
-                     "project_id": proj["_id"],
-                     "project_name": proj["name"],
-                     "username": user_content["username"],
-                     "user_id": user_content["_id"],
-                     "admin": token_admin,
-                     "roles": roles_list,
-                     }
+        new_token = {
+            "issued_at": now,
+            "expires": now + 3600,
+            "_id": token_id,
+            "id": token_id,
+            "project_id": proj["_id"],
+            "project_name": proj["name"],
+            "username": user_content["username"],
+            "user_id": user_content["_id"],
+            "admin": token_admin,
+            "roles": roles_list,
+        }
 
         self.db.create(self.tokens_collection, new_token)
         return deepcopy(new_token)
@@ -302,7 +353,9 @@ class AuthconnInternal(Authconn):
         salt = uuid4().hex
         user_info["_admin"]["salt"] = salt
         if "password" in user_info:
-            user_info["password"] = sha256(user_info["password"].encode('utf-8') + salt.encode('utf-8')).hexdigest()
+            user_info["password"] = sha256(
+                user_info["password"].encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
         # "projects" are not stored any more
         if "projects" in user_info:
             del user_info["projects"]
@@ -316,7 +369,9 @@ class AuthconnInternal(Authconn):
         :param user_info: user info modifications
         """
         uid = user_info["_id"]
-        user_data = self.db.get_one(self.users_collection, {BaseTopic.id_field("users", uid): uid})
+        user_data = self.db.get_one(
+            self.users_collection, {BaseTopic.id_field("users", uid): uid}
+        )
         BaseTopic.format_on_edit(user_data, user_info)
         # User Name
         usnm = user_info.get("username")
@@ -324,12 +379,16 @@ class AuthconnInternal(Authconn):
             user_data["username"] = usnm
         # If password is given and is not already encripted
         pswd = user_info.get("password")
-        if pswd and (len(pswd) != 64 or not re.match('[a-fA-F0-9]*', pswd)):   # TODO: Improve check?
+        if pswd and (
+            len(pswd) != 64 or not re.match("[a-fA-F0-9]*", pswd)
+        ):  # TODO: Improve check?
             salt = uuid4().hex
             if "_admin" not in user_data:
                 user_data["_admin"] = {}
             user_data["_admin"]["salt"] = salt
-            user_data["password"] = sha256(pswd.encode('utf-8') + salt.encode('utf-8')).hexdigest()
+            user_data["password"] = sha256(
+                pswd.encode("utf-8") + salt.encode("utf-8")
+            ).hexdigest()
         # Project-Role Mappings
         # TODO: Check that user_info NEVER includes "project_role_mappings"
         if "project_role_mappings" not in user_data:
@@ -340,7 +399,9 @@ class AuthconnInternal(Authconn):
             for pidf in ["project", "project_name"]:
                 for ridf in ["role", "role_name"]:
                     try:
-                        user_data["project_role_mappings"].remove({"role": prm[ridf], "project": prm[pidf]})
+                        user_data["project_role_mappings"].remove(
+                            {"role": prm[ridf], "project": prm[pidf]}
+                        )
                     except KeyError:
                         pass
                     except ValueError:
@@ -389,9 +450,11 @@ class AuthconnInternal(Authconn):
                 for prm in prms:
                     project_id = prm["project"]
                     if project_id not in project_id_name:
-                        pr = self.db.get_one(self.projects_collection,
-                                             {BaseTopic.id_field("projects", project_id): project_id},
-                                             fail_on_empty=False)
+                        pr = self.db.get_one(
+                            self.projects_collection,
+                            {BaseTopic.id_field("projects", project_id): project_id},
+                            fail_on_empty=False,
+                        )
                         project_id_name[project_id] = pr["name"] if pr else None
                     prm["project_name"] = project_id_name[project_id]
                     if prm["project_name"] not in projects:
@@ -399,25 +462,32 @@ class AuthconnInternal(Authconn):
 
                     role_id = prm["role"]
                     if role_id not in role_id_name:
-                        role = self.db.get_one(self.roles_collection,
-                                               {BaseTopic.id_field("roles", role_id): role_id},
-                                               fail_on_empty=False)
+                        role = self.db.get_one(
+                            self.roles_collection,
+                            {BaseTopic.id_field("roles", role_id): role_id},
+                            fail_on_empty=False,
+                        )
                         role_id_name[role_id] = role["name"] if role else None
                     prm["role_name"] = role_id_name[role_id]
                 user["projects"] = projects  # for backward compatibility
             elif projects:
                 # user created with an old version. Create a project_role mapping with role project_admin
                 user["project_role_mappings"] = []
-                role = self.db.get_one(self.roles_collection,
-                                       {BaseTopic.id_field("roles", "project_admin"): "project_admin"})
+                role = self.db.get_one(
+                    self.roles_collection,
+                    {BaseTopic.id_field("roles", "project_admin"): "project_admin"},
+                )
                 for p_id_name in projects:
-                    pr = self.db.get_one(self.projects_collection,
-                                         {BaseTopic.id_field("projects", p_id_name): p_id_name})
-                    prm = {"project": pr["_id"],
-                           "project_name": pr["name"],
-                           "role_name": "project_admin",
-                           "role": role["_id"]
-                           }
+                    pr = self.db.get_one(
+                        self.projects_collection,
+                        {BaseTopic.id_field("projects", p_id_name): p_id_name},
+                    )
+                    prm = {
+                        "project": pr["_id"],
+                        "project_name": pr["name"],
+                        "role_name": "project_admin",
+                        "role": role["_id"],
+                    }
                     user["project_role_mappings"].append(prm)
             else:
                 user["projects"] = []
@@ -466,5 +536,8 @@ class AuthconnInternal(Authconn):
         :return: None
         :raises AuthconnOperationException: if project update failed.
         """
-        self.db.set_one(self.projects_collection, {BaseTopic.id_field("projects", project_id): project_id},
-                        project_info)
+        self.db.set_one(
+            self.projects_collection,
+            {BaseTopic.id_field("projects", project_id): project_id},
+            project_info,
+        )
index 05f803a..5e34485 100644 (file)
@@ -25,12 +25,19 @@ it for OSM.
 """
 
 
-__author__ = "Eduardo Sousa <esousa@whitestack.com>, " \
-             "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+__author__ = (
+    "Eduardo Sousa <esousa@whitestack.com>, "
+    "Pedro de la Cruz Ramos <pdelacruzramos@altran.com>"
+)
 __date__ = "$27-jul-2018 23:59:59$"
 
-from osm_nbi.authconn import Authconn, AuthException, AuthconnOperationException, AuthconnNotFoundException, \
-    AuthconnConflictException
+from osm_nbi.authconn import (
+    Authconn,
+    AuthException,
+    AuthconnOperationException,
+    AuthconnNotFoundException,
+    AuthconnConflictException,
+)
 
 import logging
 import requests
@@ -56,14 +63,19 @@ class AuthconnKeystone(Authconn):
         if config.get("auth_url"):
             validate_input(self.auth_url, http_schema)
         else:
-            self.auth_url = "http://{0}:{1}/v3".format(config.get("auth_host", "keystone"),
-                                                       config.get("auth_port", "5000"))
+            self.auth_url = "http://{0}:{1}/v3".format(
+                config.get("auth_host", "keystone"), config.get("auth_port", "5000")
+            )
         self.user_domain_name_list = config.get("user_domain_name", "default")
         self.user_domain_name_list = self.user_domain_name_list.split(",")
         # read only domain list
-        self.user_domain_ro_list = [x[:-3] for x in self.user_domain_name_list if x.endswith(":ro")]
+        self.user_domain_ro_list = [
+            x[:-3] for x in self.user_domain_name_list if x.endswith(":ro")
+        ]
         # remove the ":ro"
-        self.user_domain_name_list = [x if not x.endswith(":ro") else x[:-3] for x in self.user_domain_name_list]
+        self.user_domain_name_list = [
+            x if not x.endswith(":ro") else x[:-3] for x in self.user_domain_name_list
+        ]
 
         self.admin_project = config.get("service_project", "service")
         self.admin_username = config.get("service_username", "nbi")
@@ -71,10 +83,12 @@ class AuthconnKeystone(Authconn):
         self.project_domain_name_list = config.get("project_domain_name", "default")
         self.project_domain_name_list = self.project_domain_name_list.split(",")
         if len(self.user_domain_name_list) != len(self.project_domain_name_list):
-            raise ValueError("Invalid configuration parameter fo authenticate. 'project_domain_name' and "
-                             "'user_domain_name' must be a comma-separated list with the same size. Revise "
-                             "configuration or/and 'OSMNBI_AUTHENTICATION_PROJECT_DOMAIN_NAME', "
-                             "'OSMNBI_AUTHENTICATION_USER_DOMAIN_NAME'  Variables")
+            raise ValueError(
+                "Invalid configuration parameter fo authenticate. 'project_domain_name' and "
+                "'user_domain_name' must be a comma-separated list with the same size. Revise "
+                "configuration or/and 'OSMNBI_AUTHENTICATION_PROJECT_DOMAIN_NAME', "
+                "'OSMNBI_AUTHENTICATION_USER_DOMAIN_NAME'  Variables"
+            )
 
         # Waiting for Keystone to be up
         available = None
@@ -89,14 +103,18 @@ class AuthconnKeystone(Authconn):
                 if counter == 0:
                     raise AuthException("Keystone not available after 300s timeout")
 
-        self.auth = v3.Password(user_domain_name=self.user_domain_name_list[0],
-                                username=self.admin_username,
-                                password=self.admin_password,
-                                project_domain_name=self.project_domain_name_list[0],
-                                project_name=self.admin_project,
-                                auth_url=self.auth_url)
+        self.auth = v3.Password(
+            user_domain_name=self.user_domain_name_list[0],
+            username=self.admin_username,
+            password=self.admin_password,
+            project_domain_name=self.project_domain_name_list[0],
+            project_name=self.admin_project,
+            auth_url=self.auth_url,
+        )
         self.sess = session.Session(auth=self.auth)
-        self.keystone = client.Client(session=self.sess, endpoint_override=self.auth_url)
+        self.keystone = client.Client(
+            session=self.sess, endpoint_override=self.auth_url
+        )
 
     def authenticate(self, credentials, token_info=None):
         """
@@ -122,11 +140,11 @@ class AuthconnKeystone(Authconn):
         project_id = None
         project_name = None
         if credentials.get("project_domain_name"):
-            project_domain_name_list = (credentials["project_domain_name"], )
+            project_domain_name_list = (credentials["project_domain_name"],)
         else:
             project_domain_name_list = self.project_domain_name_list
         if credentials.get("user_domain_name"):
-            user_domain_name_list = (credentials["user_domain_name"], )
+            user_domain_name_list = (credentials["user_domain_name"],)
         else:
             user_domain_name_list = self.user_domain_name_list
 
@@ -146,19 +164,30 @@ class AuthconnKeystone(Authconn):
                         username=username,
                         password=credentials.get("password"),
                         user_domain_name=user_domain_name,
-                        project_domain_name=project_domain_name)
+                        project_domain_name=project_domain_name,
+                    )
                 elif token_info:
-                    unscoped_token = self.keystone.tokens.validate(token=token_info.get("_id"))
+                    unscoped_token = self.keystone.tokens.validate(
+                        token=token_info.get("_id")
+                    )
                 else:
-                    raise AuthException("Provide credentials: username/password or Authorization Bearer token",
-                                        http_code=HTTPStatus.UNAUTHORIZED)
+                    raise AuthException(
+                        "Provide credentials: username/password or Authorization Bearer token",
+                        http_code=HTTPStatus.UNAUTHORIZED,
+                    )
 
                 if not credentials.get("project_id"):
                     # get first project for the user
-                    project_list = self.keystone.projects.list(user=unscoped_token["user"]["id"])
+                    project_list = self.keystone.projects.list(
+                        user=unscoped_token["user"]["id"]
+                    )
                     if not project_list:
-                        raise AuthException("The user {} has not any project and cannot be used for authentication".
-                                            format(credentials.get("username")), http_code=HTTPStatus.UNAUTHORIZED)
+                        raise AuthException(
+                            "The user {} has not any project and cannot be used for authentication".format(
+                                credentials.get("username")
+                            ),
+                            http_code=HTTPStatus.UNAUTHORIZED,
+                        )
                     project_id = project_list[0].id
                 else:
                     if is_valid_uuid(credentials["project_id"]):
@@ -172,7 +201,8 @@ class AuthconnKeystone(Authconn):
                     project_id=project_id,
                     user_domain_name=user_domain_name,
                     project_domain_name=project_domain_name,
-                    token=unscoped_token["auth_token"])
+                    token=unscoped_token["auth_token"],
+                )
 
                 auth_token = {
                     "_id": scoped_token.auth_token,
@@ -184,16 +214,21 @@ class AuthconnKeystone(Authconn):
                     "project_domain_name": scoped_token.project_domain_name,
                     "user_domain_name": scoped_token.user_domain_name,
                     "expires": scoped_token.expires.timestamp(),
-                    "issued_at": scoped_token.issued.timestamp()
+                    "issued_at": scoped_token.issued.timestamp(),
                 }
 
                 return auth_token
             except ClientException as e:
-                if index >= len(user_domain_name_list)-1 or index >= len(project_domain_name_list)-1:
+                if (
+                    index >= len(user_domain_name_list) - 1
+                    or index >= len(project_domain_name_list) - 1
+                ):
                     # if last try, launch exception
                     # self.logger.exception("Error during user authentication using keystone: {}".format(e))
-                    raise AuthException("Error during user authentication using Keystone: {}".format(e),
-                                        http_code=HTTPStatus.UNAUTHORIZED)
+                    raise AuthException(
+                        "Error during user authentication using Keystone: {}".format(e),
+                        http_code=HTTPStatus.UNAUTHORIZED,
+                    )
 
     def validate_token(self, token):
         """
@@ -222,14 +257,16 @@ class AuthconnKeystone(Authconn):
                 "username": token_info["user"]["name"],
                 "roles": token_info["roles"],
                 "expires": token_info.expires.timestamp(),
-                "issued_at": token_info.issued.timestamp()
+                "issued_at": token_info.issued.timestamp(),
             }
 
             return ses
         except ClientException as e:
             # self.logger.exception("Error during token validation using keystone: {}".format(e))
-            raise AuthException("Error during token validation using Keystone: {}".format(e),
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "Error during token validation using Keystone: {}".format(e),
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
     def revoke_token(self, token):
         """
@@ -244,8 +281,10 @@ class AuthconnKeystone(Authconn):
             return True
         except ClientException as e:
             # self.logger.exception("Error during token revocation using keystone: {}".format(e))
-            raise AuthException("Error during token revocation using Keystone: {}".format(e),
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "Error during token revocation using Keystone: {}".format(e),
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
     def _get_domain_id(self, domain_name, fail_if_not_found=True):
         """
@@ -262,7 +301,9 @@ class AuthconnKeystone(Authconn):
             # domain_name is already an id
             return domain_name
         if not domain_id and fail_if_not_found:
-            raise AuthconnNotFoundException("Domain {} cannot be found".format(domain_name))
+            raise AuthconnNotFoundException(
+                "Domain {} cannot be found".format(domain_name)
+            )
         return domain_id
 
     def _get_domains(self):
@@ -288,24 +329,38 @@ class AuthconnKeystone(Authconn):
         """
         try:
 
-            if user_info.get("domain_name") and user_info["domain_name"] in self.user_domain_ro_list:
-                raise AuthconnConflictException("Cannot create a user in the read only domain {}".
-                                                format(user_info["domain_name"]))
+            if (
+                user_info.get("domain_name")
+                and user_info["domain_name"] in self.user_domain_ro_list
+            ):
+                raise AuthconnConflictException(
+                    "Cannot create a user in the read only domain {}".format(
+                        user_info["domain_name"]
+                    )
+                )
 
             new_user = self.keystone.users.create(
-                user_info["username"], password=user_info["password"],
-                domain=self._get_domain_id(user_info.get("domain_name", self.user_domain_name_list[0])),
-                _admin=user_info["_admin"])
+                user_info["username"],
+                password=user_info["password"],
+                domain=self._get_domain_id(
+                    user_info.get("domain_name", self.user_domain_name_list[0])
+                ),
+                _admin=user_info["_admin"],
+            )
             if "project_role_mappings" in user_info.keys():
                 for mapping in user_info["project_role_mappings"]:
-                    self.assign_role_to_user(new_user, mapping["project"], mapping["role"])
+                    self.assign_role_to_user(
+                        new_user, mapping["project"], mapping["role"]
+                    )
             return {"username": new_user.name, "_id": new_user.id}
         except Conflict as e:
             # self.logger.exception("Error during user creation using keystone: {}".format(e))
             raise AuthconnOperationException(e, http_code=HTTPStatus.CONFLICT)
         except ClientException as e:
             # self.logger.exception("Error during user creation using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during user creation using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during user creation using Keystone: {}".format(e)
+            )
 
     def update_user(self, user_info):
         """
@@ -322,14 +377,18 @@ class AuthconnKeystone(Authconn):
                 user_obj = None
             if not user_obj:
                 for user_domain in self.user_domain_name_list:
-                    domain_id = self._get_domain_id(user_domain, fail_if_not_found=False)
+                    domain_id = self._get_domain_id(
+                        user_domain, fail_if_not_found=False
+                    )
                     if not domain_id:
                         continue
-                    user_obj_list = self.keystone.users.list(name=user, domain=domain_id)
+                    user_obj_list = self.keystone.users.list(
+                        name=user, domain=domain_id
+                    )
                     if user_obj_list:
                         user_obj = user_obj_list[0]
                         break
-                else:   # user not found
+                else:  # user not found
                     raise AuthconnNotFoundException("User '{}' not found".format(user))
 
             user_id = user_obj.id
@@ -338,29 +397,51 @@ class AuthconnKeystone(Authconn):
 
             if domain_name in self.user_domain_ro_list:
                 if user_info.get("password") or user_info.get("username"):
-                    raise AuthconnConflictException("Cannot update the user {} belonging to a read only domain {}".
-                                                    format(user, domain_name))
-
-            elif user_info.get("password") or user_info.get("username") \
-                    or user_info.get("add_project_role_mappings") or user_info.get("remove_project_role_mappings"):
+                    raise AuthconnConflictException(
+                        "Cannot update the user {} belonging to a read only domain {}".format(
+                            user, domain_name
+                        )
+                    )
+
+            elif (
+                user_info.get("password")
+                or user_info.get("username")
+                or user_info.get("add_project_role_mappings")
+                or user_info.get("remove_project_role_mappings")
+            ):
                 # if user_index>0, it is an external domain, that should not be updated
-                ctime = user_obj._admin.get("created", 0) if hasattr(user_obj, "_admin") else 0
+                ctime = (
+                    user_obj._admin.get("created", 0)
+                    if hasattr(user_obj, "_admin")
+                    else 0
+                )
                 try:
-                    self.keystone.users.update(user_id, password=user_info.get("password"),
-                                               name=user_info.get("username"),
-                                               _admin={"created": ctime, "modified": time.time()})
+                    self.keystone.users.update(
+                        user_id,
+                        password=user_info.get("password"),
+                        name=user_info.get("username"),
+                        _admin={"created": ctime, "modified": time.time()},
+                    )
                 except Exception as e:
                     if user_info.get("username") or user_info.get("password"):
-                        raise AuthconnOperationException("Error during username/password change: {}".format(str(e)))
-                    self.logger.error("Error during updating user profile: {}".format(str(e)))
+                        raise AuthconnOperationException(
+                            "Error during username/password change: {}".format(str(e))
+                        )
+                    self.logger.error(
+                        "Error during updating user profile: {}".format(str(e))
+                    )
 
             for mapping in user_info.get("remove_project_role_mappings", []):
-                self.remove_role_from_user(user_obj, mapping["project"], mapping["role"])
+                self.remove_role_from_user(
+                    user_obj, mapping["project"], mapping["role"]
+                )
             for mapping in user_info.get("add_project_role_mappings", []):
                 self.assign_role_to_user(user_obj, mapping["project"], mapping["role"])
         except ClientException as e:
             # self.logger.exception("Error during user password/name update using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during user update using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during user update using Keystone: {}".format(e)
+            )
 
     def delete_user(self, user_id):
         """
@@ -374,8 +455,11 @@ class AuthconnKeystone(Authconn):
             domain_id = user_obj.domain_id
             domain_name = self.domains_id2name.get(domain_id)
             if domain_name in self.user_domain_ro_list:
-                raise AuthconnConflictException("Cannot delete user {} belonging to a read only domain {}".
-                                                format(user_id, domain_name))
+                raise AuthconnConflictException(
+                    "Cannot delete user {} belonging to a read only domain {}".format(
+                        user_id, domain_name
+                    )
+                )
 
             result, detail = self.keystone.users.delete(user_id)
             if result.status_code != 204:
@@ -383,7 +467,9 @@ class AuthconnKeystone(Authconn):
             return True
         except ClientException as e:
             # self.logger.exception("Error during user deletion using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during user deletion using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during user deletion using Keystone: {}".format(e)
+            )
 
     def get_user_list(self, filter_q=None):
         """
@@ -401,7 +487,9 @@ class AuthconnKeystone(Authconn):
             if filter_q:
                 filter_name = filter_q.get("name") or filter_q.get("username")
                 if filter_q.get("domain_name"):
-                    filter_domain = self._get_domain_id(filter_q["domain_name"], fail_if_not_found=False)
+                    filter_domain = self._get_domain_id(
+                        filter_q["domain_name"], fail_if_not_found=False
+                    )
                     # If domain is not found, use the same name to obtain an empty list
                     filter_domain = filter_domain or filter_q["domain_name"]
                 if filter_q.get("domain_id"):
@@ -411,7 +499,9 @@ class AuthconnKeystone(Authconn):
             # get users from user_domain_name_list[1:], because it will not be provided in case of LDAP
             if filter_domain is None and len(self.user_domain_name_list) > 1:
                 for user_domain in self.user_domain_name_list[1:]:
-                    domain_id = self._get_domain_id(user_domain, fail_if_not_found=False)
+                    domain_id = self._get_domain_id(
+                        user_domain, fail_if_not_found=False
+                    )
                     if not domain_id:
                         continue
                     # find if users of this domain are already provided. In this case ignore
@@ -419,7 +509,9 @@ class AuthconnKeystone(Authconn):
                         if u.domain_id == domain_id:
                             break
                     else:
-                        users += self.keystone.users.list(name=filter_name, domain=domain_id)
+                        users += self.keystone.users.list(
+                            name=filter_name, domain=domain_id
+                        )
 
             # if filter name matches a user id, provide it also
             if filter_name:
@@ -430,13 +522,17 @@ class AuthconnKeystone(Authconn):
                 except Exception:
                     pass
 
-            users = [{
-                "username": user.name,
-                "_id": user.id,
-                "id": user.id,
-                "_admin": user.to_dict().get("_admin", {}),   # TODO: REVISE
-                "domain_name": self.domains_id2name.get(user.domain_id)
-            } for user in users if user.name != self.admin_username]
+            users = [
+                {
+                    "username": user.name,
+                    "_id": user.id,
+                    "id": user.id,
+                    "_admin": user.to_dict().get("_admin", {}),  # TODO: REVISE
+                    "domain_name": self.domains_id2name.get(user.domain_id),
+                }
+                for user in users
+                if user.name != self.admin_username
+            ]
 
             if filter_q and filter_q.get("_id"):
                 users = [user for user in users if filter_q["_id"] == user["_id"]]
@@ -448,7 +544,9 @@ class AuthconnKeystone(Authconn):
                 for project in projects:
                     user["projects"].append(project.name)
 
-                    roles = self.keystone.roles.list(user=user["_id"], project=project.id)
+                    roles = self.keystone.roles.list(
+                        user=user["_id"], project=project.id
+                    )
                     for role in roles:
                         prm = {
                             "project": project.id,
@@ -461,7 +559,9 @@ class AuthconnKeystone(Authconn):
             return users
         except ClientException as e:
             # self.logger.exception("Error during user listing using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during user listing using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during user listing using Keystone: {}".format(e)
+            )
 
     def get_role_list(self, filter_q=None):
         """
@@ -476,12 +576,16 @@ class AuthconnKeystone(Authconn):
                 filter_name = filter_q.get("name")
             roles_list = self.keystone.roles.list(name=filter_name)
 
-            roles = [{
-                "name": role.name,
-                "_id": role.id,
-                "_admin": role.to_dict().get("_admin", {}),
-                "permissions": role.to_dict().get("permissions", {})
-            } for role in roles_list if role.name != "service"]
+            roles = [
+                {
+                    "name": role.name,
+                    "_id": role.id,
+                    "_admin": role.to_dict().get("_admin", {}),
+                    "permissions": role.to_dict().get("permissions", {}),
+                }
+                for role in roles_list
+                if role.name != "service"
+            ]
 
             if filter_q and filter_q.get("_id"):
                 roles = [role for role in roles if filter_q["_id"] == role["_id"]]
@@ -489,8 +593,10 @@ class AuthconnKeystone(Authconn):
             return roles
         except ClientException as e:
             # self.logger.exception("Error during user role listing using keystone: {}".format(e))
-            raise AuthException("Error during user role listing using Keystone: {}".format(e),
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "Error during user role listing using Keystone: {}".format(e),
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
     def create_role(self, role_info):
         """
@@ -500,14 +606,19 @@ class AuthconnKeystone(Authconn):
         :raises AuthconnOperationException: if role creation failed.
         """
         try:
-            result = self.keystone.roles.create(role_info["name"], permissions=role_info.get("permissions"),
-                                                _admin=role_info.get("_admin"))
+            result = self.keystone.roles.create(
+                role_info["name"],
+                permissions=role_info.get("permissions"),
+                _admin=role_info.get("_admin"),
+            )
             return result.id
         except Conflict as ex:
             raise AuthconnConflictException(str(ex))
         except ClientException as e:
             # self.logger.exception("Error during role creation using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during role creation using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during role creation using Keystone: {}".format(e)
+            )
 
     def delete_role(self, role_id):
         """
@@ -525,7 +636,9 @@ class AuthconnKeystone(Authconn):
             return True
         except ClientException as e:
             # self.logger.exception("Error during role deletion using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during role deletion using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during role deletion using Keystone: {}".format(e)
+            )
 
     def update_role(self, role_info):
         """
@@ -535,16 +648,22 @@ class AuthconnKeystone(Authconn):
         """
         try:
             rid = role_info["_id"]
-            if not is_valid_uuid(rid):   # Is this required?
+            if not is_valid_uuid(rid):  # Is this required?
                 role_obj_list = self.keystone.roles.list(name=rid)
                 if not role_obj_list:
                     raise AuthconnNotFoundException("Role '{}' not found".format(rid))
                 rid = role_obj_list[0].id
-            self.keystone.roles.update(rid, name=role_info["name"], permissions=role_info.get("permissions"),
-                                       _admin=role_info.get("_admin"))
+            self.keystone.roles.update(
+                rid,
+                name=role_info["name"],
+                permissions=role_info.get("permissions"),
+                _admin=role_info.get("_admin"),
+            )
         except ClientException as e:
             # self.logger.exception("Error during role update using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during role updating using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during role updating using Keystone: {}".format(e)
+            )
 
     def get_project_list(self, filter_q=None):
         """
@@ -563,25 +682,33 @@ class AuthconnKeystone(Authconn):
                 if filter_q.get("domain_id"):
                     filter_domain = filter_q["domain_id"]
 
-            projects = self.keystone.projects.list(name=filter_name, domain=filter_domain)
+            projects = self.keystone.projects.list(
+                name=filter_name, domain=filter_domain
+            )
 
-            projects = [{
-                "name": project.name,
-                "_id": project.id,
-                "_admin": project.to_dict().get("_admin", {}),  # TODO: REVISE
-                "quotas": project.to_dict().get("quotas", {}),  # TODO: REVISE
-                "domain_name": self.domains_id2name.get(project.domain_id)
-            } for project in projects]
+            projects = [
+                {
+                    "name": project.name,
+                    "_id": project.id,
+                    "_admin": project.to_dict().get("_admin", {}),  # TODO: REVISE
+                    "quotas": project.to_dict().get("quotas", {}),  # TODO: REVISE
+                    "domain_name": self.domains_id2name.get(project.domain_id),
+                }
+                for project in projects
+            ]
 
             if filter_q and filter_q.get("_id"):
-                projects = [project for project in projects
-                            if filter_q["_id"] == project["_id"]]
+                projects = [
+                    project for project in projects if filter_q["_id"] == project["_id"]
+                ]
 
             return projects
         except ClientException as e:
             # self.logger.exception("Error during user project listing using keystone: {}".format(e))
-            raise AuthException("Error during user project listing using Keystone: {}".format(e),
-                                http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "Error during user project listing using Keystone: {}".format(e),
+                http_code=HTTPStatus.UNAUTHORIZED,
+            )
 
     def create_project(self, project_info):
         """
@@ -594,14 +721,18 @@ class AuthconnKeystone(Authconn):
         try:
             result = self.keystone.projects.create(
                 project_info["name"],
-                domain=self._get_domain_id(project_info.get("domain_name", self.project_domain_name_list[0])),
+                domain=self._get_domain_id(
+                    project_info.get("domain_name", self.project_domain_name_list[0])
+                ),
                 _admin=project_info["_admin"],
-                quotas=project_info.get("quotas", {})
+                quotas=project_info.get("quotas", {}),
             )
             return result.id
         except ClientException as e:
             # self.logger.exception("Error during project creation using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during project creation using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during project creation using Keystone: {}".format(e)
+            )
 
     def delete_project(self, project_id):
         """
@@ -622,7 +753,9 @@ class AuthconnKeystone(Authconn):
             return True
         except ClientException as e:
             # self.logger.exception("Error during project deletion using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during project deletion using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during project deletion using Keystone: {}".format(e)
+            )
 
     def update_project(self, project_id, project_info):
         """
@@ -632,13 +765,17 @@ class AuthconnKeystone(Authconn):
         :return: None
         """
         try:
-            self.keystone.projects.update(project_id, name=project_info["name"],
-                                          _admin=project_info["_admin"],
-                                          quotas=project_info.get("quotas", {})
-                                          )
+            self.keystone.projects.update(
+                project_id,
+                name=project_info["name"],
+                _admin=project_info["_admin"],
+                quotas=project_info.get("quotas", {}),
+            )
         except ClientException as e:
             # self.logger.exception("Error during project update using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during project update using Keystone: {}".format(e))
+            raise AuthconnOperationException(
+                "Error during project update using Keystone: {}".format(e)
+            )
 
     def assign_role_to_user(self, user_obj, project, role):
         """
@@ -655,7 +792,9 @@ class AuthconnKeystone(Authconn):
             except Exception:
                 project_obj_list = self.keystone.projects.list(name=project)
                 if not project_obj_list:
-                    raise AuthconnNotFoundException("Project '{}' not found".format(project))
+                    raise AuthconnNotFoundException(
+                        "Project '{}' not found".format(project)
+                    )
                 project_obj = project_obj_list[0]
 
             try:
@@ -669,8 +808,10 @@ class AuthconnKeystone(Authconn):
             self.keystone.roles.grant(role_obj, user=user_obj, project=project_obj)
         except ClientException as e:
             # self.logger.exception("Error during user role assignment using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during role '{}' assignment to user '{}' and project '{}' using "
-                                             "Keystone: {}".format(role, user_obj.name, project, e))
+            raise AuthconnOperationException(
+                "Error during role '{}' assignment to user '{}' and project '{}' using "
+                "Keystone: {}".format(role, user_obj.name, project, e)
+            )
 
     def remove_role_from_user(self, user_obj, project, role):
         """
@@ -688,7 +829,9 @@ class AuthconnKeystone(Authconn):
             except Exception:
                 project_obj_list = self.keystone.projects.list(name=project)
                 if not project_obj_list:
-                    raise AuthconnNotFoundException("Project '{}' not found".format(project))
+                    raise AuthconnNotFoundException(
+                        "Project '{}' not found".format(project)
+                    )
                 project_obj = project_obj_list[0]
 
             try:
@@ -702,5 +845,7 @@ class AuthconnKeystone(Authconn):
             self.keystone.roles.revoke(role_obj, user=user_obj, project=project_obj)
         except ClientException as e:
             # self.logger.exception("Error during user role revocation using keystone: {}".format(e))
-            raise AuthconnOperationException("Error during role '{}' revocation to user '{}' and project '{}' using "
-                                             "Keystone: {}".format(role, user_obj.name, project, e))
+            raise AuthconnOperationException(
+                "Error during role '{}' revocation to user '{}' and project '{}' using "
+                "Keystone: {}".format(role, user_obj.name, project, e)
+            )
index 27f38e9..1b82935 100644 (file)
 ##
 
 
-""" 
+"""
 AuthconnTacacs implements implements the connector for TACACS.
 Leverages AuthconnInternal for token lifecycle management and the RBAC model.
 
 When NBI bootstraps, it tries to create admin user with admin role associated to admin project.
 Hence, the TACACS server should contain admin user.
-""" 
+"""
 
 __author__ = "K Sai Kiran <saikiran.k@tataelxsi.co.in>"
 __date__ = "$11-Nov-2020 11:04:00$"
 
 
-from osm_nbi.authconn import Authconn, AuthException  
+from osm_nbi.authconn import Authconn, AuthException
 from osm_nbi.authconn_internal import AuthconnInternal
 from osm_nbi.base_topic import BaseTopic
 
@@ -63,21 +63,31 @@ class AuthconnTacacs(AuthconnInternal):
         self.db = db
         self.tacacs_host = config["tacacs_host"]
         self.tacacs_secret = config["tacacs_secret"]
-        self.tacacs_port = config["tacacs_port"] if config.get("tacacs_port") else self.tacacs_def_port
-        self.tacacs_timeout = config["tacacs_timeout"] if config.get("tacacs_timeout") else self.tacacs_def_timeout
-        self.tacacs_cli = TACACSClient(self.tacacs_host, self.tacacs_port, self.tacacs_secret,
-                                       self.tacacs_timeout)
+        self.tacacs_port = (
+            config["tacacs_port"] if config.get("tacacs_port") else self.tacacs_def_port
+        )
+        self.tacacs_timeout = (
+            config["tacacs_timeout"]
+            if config.get("tacacs_timeout")
+            else self.tacacs_def_timeout
+        )
+        self.tacacs_cli = TACACSClient(
+            self.tacacs_host, self.tacacs_port, self.tacacs_secret, self.tacacs_timeout
+        )
 
     def validate_user(self, user, password):
-        """
-        """
+        """"""
         now = time()
         try:
             tacacs_authen = self.tacacs_cli.authenticate(user, password)
         except Exception as e:
-            raise AuthException("TACACS server error: {}".format(e), http_code=HTTPStatus.UNAUTHORIZED)
+            raise AuthException(
+                "TACACS server error: {}".format(e), http_code=HTTPStatus.UNAUTHORIZED
+            )
         user_content = None
-        user_rows = self.db.get_list(self.users_collection, {BaseTopic.id_field("users", user): user})
+        user_rows = self.db.get_list(
+            self.users_collection, {BaseTopic.id_field("users", user): user}
+        )
         if not tacacs_authen.valid:
             if user_rows:
                 # To remove TACACS stale user from system.
@@ -86,14 +96,12 @@ class AuthconnTacacs(AuthconnInternal):
         if user_rows:
             user_content = user_rows[0]
         else:
-            new_user = {'username': user,
-                        'password': password,
-                        '_admin': {
-                            'created': now,
-                            'modified': now
-                        },
-                        'project_role_mappings': []
-                        }
+            new_user = {
+                "username": user,
+                "password": password,
+                "_admin": {"created": now, "modified": now},
+                "project_role_mappings": [],
+            }
             user_content = self.create_user(new_user)
         return user_content
 
@@ -106,14 +114,21 @@ class AuthconnTacacs(AuthconnInternal):
         """
         BaseTopic.format_on_new(user_info, make_public=False)
         try:
-            authen = self.tacacs_cli.authenticate(user_info["username"], user_info["password"])
+            authen = self.tacacs_cli.authenticate(
+                user_info["username"], user_info["password"]
+            )
             if authen.valid:
                 user_info.pop("password")
                 self.db.create(self.users_collection, user_info)
             else:
-                raise AuthException("TACACS server error: Invalid credentials", http_code=HTTPStatus.FORBIDDEN)
+                raise AuthException(
+                    "TACACS server error: Invalid credentials",
+                    http_code=HTTPStatus.FORBIDDEN,
+                )
         except Exception as e:
-            raise AuthException("TACACS server error: {}".format(e), http_code=HTTPStatus.BAD_REQUEST)
+            raise AuthException(
+                "TACACS server error: {}".format(e), http_code=HTTPStatus.BAD_REQUEST
+            )
         return {"username": user_info["username"], "_id": user_info["_id"]}
 
     def update_user(self, user_info):
@@ -124,8 +139,12 @@ class AuthconnTacacs(AuthconnInternal):
         :param user_info: Full user information in dict.
         :return: returns None for successful add/remove of project and role map.
         """
-        if(user_info.get("username")):
-            raise AuthException("Can not update username of this user", http_code=HTTPStatus.FORBIDDEN)
-        if(user_info.get("password")):
-            raise AuthException("Can not update password of this user", http_code=HTTPStatus.FORBIDDEN)
+        if user_info.get("username"):
+            raise AuthException(
+                "Can not update username of this user", http_code=HTTPStatus.FORBIDDEN
+            )
+        if user_info.get("password"):
+            raise AuthException(
+                "Can not update password of this user", http_code=HTTPStatus.FORBIDDEN
+            )
         super(AuthconnTacacs, self).update_user(user_info)
index f597d17..722ff59 100644 (file)
@@ -25,7 +25,6 @@ __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 class EngineException(Exception):
-
     def __init__(self, message, http_code=HTTPStatus.BAD_REQUEST):
         self.http_code = http_code
         super(Exception, self).__init__(message)
@@ -79,7 +78,9 @@ def increment_ip_mac(ip_mac, vm_index=1):
         if i > 0:
             i += 1
             # format in hex, len can be 2 for mac or 4 for ipv6
-            return ("{}{:0" + str(len(ip_mac) - i) + "x}").format(ip_mac[:i], int(ip_mac[i:], 16) + vm_index)
+            return ("{}{:0" + str(len(ip_mac) - i) + "x}").format(
+                ip_mac[:i], int(ip_mac[i:], 16) + vm_index
+            )
     except Exception:
         pass
     return None
@@ -87,21 +88,17 @@ def increment_ip_mac(ip_mac, vm_index=1):
 
 class BaseTopic:
     # static variables for all instance classes
-    topic = None        # to_override
-    topic_msg = None    # to_override
-    quota_name = None   # to_override. If not provided topic will be used for quota_name
-    schema_new = None   # to_override
+    topic = None  # to_override
+    topic_msg = None  # to_override
+    quota_name = None  # to_override. If not provided topic will be used for quota_name
+    schema_new = None  # to_override
     schema_edit = None  # to_override
     multiproject = True  # True if this Topic can be shared by several projects. Then it contains _admin.projects_read
 
     default_quota = 500
 
     # Alternative ID Fields for some Topics
-    alt_id_field = {
-        "projects": "name",
-        "users": "username",
-        "roles": "name"
-    }
+    alt_id_field = {"projects": "name", "users": "username", "roles": "name"}
 
     def __init__(self, db, fs, msg, auth):
         self.db = db
@@ -146,8 +143,12 @@ class BaseTopic:
             count = self.db.count(self.topic, {"_admin.projects_read": pid})
             if count >= quota:
                 name = proj["name"]
-                raise ValidationError("quota ({}={}) exceeded for project {} ({})".format(quota_name, quota, name, pid),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise ValidationError(
+                    "quota ({}={}) exceeded for project {} ({})".format(
+                        quota_name, quota, name, pid
+                    ),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
 
     def _validate_input_new(self, input, force=False):
         """
@@ -179,7 +180,7 @@ class BaseTopic:
         not present or contains ANY mean public.
         :param session: contains:
             project_id: project list this session has rights to access. Can be empty, one or several
-            set_project: items created will contain this project list  
+            set_project: items created will contain this project list
             force: True or False
             public: True, False or None
             method: "list", "show", "write", "delete"
@@ -203,12 +204,16 @@ class BaseTopic:
             project_filter_n.append(session["PROJECT.ne"])
 
         if project_filter:
-            if session["method"] in ("list", "show", "delete") or session.get("set_project"):
+            if session["method"] in ("list", "show", "delete") or session.get(
+                "set_project"
+            ):
                 p_filter["_admin.projects_read.cont"] = project_filter
             else:
                 p_filter["_admin.projects_write.cont"] = project_filter
         if project_filter_n:
-            if session["method"] in ("list", "show", "delete") or session.get("set_project"):
+            if session["method"] in ("list", "show", "delete") or session.get(
+                "set_project"
+            ):
                 p_filter["_admin.projects_read.ncont"] = project_filter_n
             else:
                 p_filter["_admin.projects_write.ncont"] = project_filter_n
@@ -237,10 +242,16 @@ class BaseTopic:
             return final_content
         # Change public status
         if session["public"] is not None:
-            if session["public"] and "ANY" not in final_content["_admin"]["projects_read"]:
+            if (
+                session["public"]
+                and "ANY" not in final_content["_admin"]["projects_read"]
+            ):
                 final_content["_admin"]["projects_read"].append("ANY")
                 final_content["_admin"]["projects_write"].clear()
-            if not session["public"] and "ANY" in final_content["_admin"]["projects_read"]:
+            if (
+                not session["public"]
+                and "ANY" in final_content["_admin"]["projects_read"]
+            ):
                 final_content["_admin"]["projects_read"].remove("ANY")
 
         # Change project status
@@ -266,8 +277,13 @@ class BaseTopic:
         _filter["name"] = name
         if _id:
             _filter["_id.neq"] = _id
-        if self.db.get_one(self.topic, _filter, fail_on_empty=False, fail_on_more=False):
-            raise EngineException("name '{}' already exists for {}".format(name, self.topic), HTTPStatus.CONFLICT)
+        if self.db.get_one(
+            self.topic, _filter, fail_on_empty=False, fail_on_more=False
+        ):
+            raise EngineException(
+                "name '{}' already exists for {}".format(name, self.topic),
+                HTTPStatus.CONFLICT,
+            )
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
@@ -355,25 +371,39 @@ class BaseTopic:
                         kitem_old = int(kitem)
                         # if index greater than list, extend the list
                         if kitem_old >= len(update_content):
-                            update_content += [None] * (kitem_old - len(update_content) + 1)
+                            update_content += [None] * (
+                                kitem_old - len(update_content) + 1
+                            )
                         if not isinstance(update_content[kitem_old], (dict, list)):
                             update_content[kitem_old] = {}
                     else:
                         raise EngineException(
-                            "Invalid query string '{}'. Descriptor is not a list nor dict at '{}'".format(k, kitem))
+                            "Invalid query string '{}'. Descriptor is not a list nor dict at '{}'".format(
+                                k, kitem
+                            )
+                        )
                 if v is None:
                     del update_content[kitem_old]
                 else:
                     update_content[kitem_old] = v if not yaml_format else safe_load(v)
         except KeyError:
             raise EngineException(
-                "Invalid query string '{}'. Descriptor does not contain '{}'".format(k, kitem_old))
+                "Invalid query string '{}'. Descriptor does not contain '{}'".format(
+                    k, kitem_old
+                )
+            )
         except ValueError:
-            raise EngineException("Invalid query string '{}'. Expected integer index list instead of '{}'".format(
-                k, kitem))
+            raise EngineException(
+                "Invalid query string '{}'. Expected integer index list instead of '{}'".format(
+                    k, kitem
+                )
+            )
         except IndexError:
             raise EngineException(
-                "Invalid query string '{}'. Index '{}' out of  range".format(k, kitem_old))
+                "Invalid query string '{}'. Index '{}' out of  range".format(
+                    k, kitem_old
+                )
+            )
         except YAMLError:
             raise EngineException("Invalid query string '{}' yaml format".format(k))
 
@@ -402,7 +432,7 @@ class BaseTopic:
             self.sol005_projection(data)
 
         return data
-        
+
         # TODO transform data for SOL005 URL requests
         # TODO remove _admin if not admin
 
@@ -415,7 +445,9 @@ class BaseTopic:
         :param accept_header: Content of Accept header. Must contain applition/zip or/and text/plain
         :return: opened file or raises an exception
         """
-        raise EngineException("Method get_file not valid for this topic", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method get_file not valid for this topic", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
     def list(self, session, filter_q=None, api_req=False):
         """
@@ -437,7 +469,7 @@ class BaseTopic:
         # Only perform SOL005 projection if we are serving an external request
         if api_req:
             data = [self.sol005_projection(inst) for inst in data]
-                
+
         return data
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
@@ -462,7 +494,9 @@ class BaseTopic:
             self._update_input_with_kwargs(content, kwargs)
             content = self._validate_input_new(content, force=session["force"])
             self.check_conflict_on_new(session, content)
-            op_id = self.format_on_new(content, project_id=session["project_id"], make_public=session["public"])
+            op_id = self.format_on_new(
+                content, project_id=session["project_id"], make_public=session["public"]
+            )
             _id = self.db.create(self.topic, content)
             rollback.append({"topic": self.topic, "_id": _id})
             if op_id:
@@ -484,7 +518,10 @@ class BaseTopic:
         :return: True package has is completely uploaded or False if partial content has been uplodaed.
             Raise exception on error
         """
-        raise EngineException("Method upload_content not valid for this topic", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method upload_content not valid for this topic",
+            HTTPStatus.INTERNAL_SERVER_ERROR,
+        )
 
     def delete_list(self, session, filter_q=None):
         """
@@ -534,26 +571,44 @@ class BaseTopic:
         self.check_conflict_on_del(session, _id, item_content)
         if dry_run:
             return None
-        
+
         if self.multiproject and session["project_id"]:
             # remove reference from project_read if there are more projects referencing it. If it last one,
             # do not remove reference, but delete
-            other_projects_referencing = next((p for p in item_content["_admin"]["projects_read"]
-                                               if p not in session["project_id"] and p != "ANY"), None)
+            other_projects_referencing = next(
+                (
+                    p
+                    for p in item_content["_admin"]["projects_read"]
+                    if p not in session["project_id"] and p != "ANY"
+                ),
+                None,
+            )
 
             # check if there are projects referencing it (apart from ANY, that means, public)....
             if other_projects_referencing:
                 # remove references but not delete
-                update_dict_pull = {"_admin.projects_read": session["project_id"],
-                                    "_admin.projects_write": session["project_id"]}
-                self.db.set_one(self.topic, filter_q, update_dict=None, pull_list=update_dict_pull)
+                update_dict_pull = {
+                    "_admin.projects_read": session["project_id"],
+                    "_admin.projects_write": session["project_id"],
+                }
+                self.db.set_one(
+                    self.topic, filter_q, update_dict=None, pull_list=update_dict_pull
+                )
                 return None
             else:
-                can_write = next((p for p in item_content["_admin"]["projects_write"] if p == "ANY" or
-                                  p in session["project_id"]), None)
+                can_write = next(
+                    (
+                        p
+                        for p in item_content["_admin"]["projects_write"]
+                        if p == "ANY" or p in session["project_id"]
+                    ),
+                    None,
+                )
                 if not can_write:
-                    raise EngineException("You have not write permission to delete it",
-                                          http_code=HTTPStatus.UNAUTHORIZED)
+                    raise EngineException(
+                        "You have not write permission to delete it",
+                        http_code=HTTPStatus.UNAUTHORIZED,
+                    )
 
         # delete
         self.db.del_one(self.topic, filter_q)
@@ -578,8 +633,10 @@ class BaseTopic:
             self._update_input_with_kwargs(indata, kwargs)
         try:
             if indata and session.get("set_project"):
-                raise EngineException("Cannot edit content and set to project (query string SET_PROJECT) at same time",
-                                      HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise EngineException(
+                    "Cannot edit content and set to project (query string SET_PROJECT) at same time",
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
             # TODO self._check_edition(session, indata, _id, force)
             if not content:
                 content = self.show(session, _id)
index df218cc..6bf437d 100644 (file)
@@ -18,6 +18,7 @@ import yaml
 import json
 import importlib
 import copy
+
 # import logging
 from hashlib import md5
 from osm_common.dbbase import DbException, deep_update_rfc7396
@@ -25,9 +26,15 @@ from http import HTTPStatus
 from time import time
 from uuid import uuid4
 from re import fullmatch
-from osm_nbi.validation import ValidationError, pdu_new_schema, pdu_edit_schema, \
-    validate_input, vnfpkgop_new_schema
+from osm_nbi.validation import (
+    ValidationError,
+    pdu_new_schema,
+    pdu_edit_schema,
+    validate_input,
+    vnfpkgop_new_schema,
+)
 from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
+
 etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd")
 etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd")
 from osm_im.nst import nst as nst_im
@@ -39,12 +46,13 @@ __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 class DescriptorTopic(BaseTopic):
-
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         def _check_unique_id_name(descriptor, position=""):
             for desc_key, desc_item in descriptor.items():
@@ -53,17 +61,27 @@ class DescriptorTopic(BaseTopic):
                     desc_item_id = None
                     for index, list_item in enumerate(desc_item):
                         if isinstance(list_item, dict):
-                            _check_unique_id_name(list_item, "{}.{}[{}]"
-                                                  .format(position, desc_key, index))
+                            _check_unique_id_name(
+                                list_item, "{}.{}[{}]".format(position, desc_key, index)
+                            )
                             # Base case
-                            if index == 0 and (list_item.get("id") or list_item.get("name")):
+                            if index == 0 and (
+                                list_item.get("id") or list_item.get("name")
+                            ):
                                 desc_item_id = "id" if list_item.get("id") else "name"
                             if desc_item_id and list_item.get(desc_item_id):
                                 if list_item[desc_item_id] in used_ids:
-                                    position = "{}.{}[{}]".format(position, desc_key, index)
-                                    raise EngineException("Error: identifier {} '{}' is not unique and repeats at '{}'"
-                                                          .format(desc_item_id, list_item[desc_item_id],
-                                                                  position), HTTPStatus.UNPROCESSABLE_ENTITY)
+                                    position = "{}.{}[{}]".format(
+                                        position, desc_key, index
+                                    )
+                                    raise EngineException(
+                                        "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
+                                            desc_item_id,
+                                            list_item[desc_item_id],
+                                            position,
+                                        ),
+                                        HTTPStatus.UNPROCESSABLE_ENTITY,
+                                    )
                                 used_ids.append(list_item[desc_item_id])
 
         _check_unique_id_name(final_content)
@@ -74,7 +92,9 @@ class DescriptorTopic(BaseTopic):
             if k in final_content:
                 internal_keys[k] = final_content.pop(k)
         storage_params = internal_keys["_admin"].get("storage")
-        serialized = self._validate_input_new(final_content, storage_params, session["force"])
+        serialized = self._validate_input_new(
+            final_content, storage_params, session["force"]
+        )
 
         # 1.2. modify final_content with a serialized version
         final_content = copy.deepcopy(serialized)
@@ -92,9 +112,12 @@ class DescriptorTopic(BaseTopic):
             _filter["_id.neq"] = _id
 
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
-                raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
-                                                                                               final_content["id"]),
-                                      HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "{} with id '{}' already exists for this project".format(
+                        self.topic[:-1], final_content["id"]
+                    ),
+                    HTTPStatus.CONFLICT,
+                )
 
         return final_content
 
@@ -126,20 +149,31 @@ class DescriptorTopic(BaseTopic):
         if len(desc_list) == 1:
             return desc_list[0]
         elif len(desc_list) > 1:
-            raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
-                              HTTPStatus.CONFLICT)
+            raise DbException(
+                "Found more than one {} with id='{}' belonging to this project".format(
+                    topic[:-1], id
+                ),
+                HTTPStatus.CONFLICT,
+            )
 
         # not found any: try to find public
         _filter = BaseTopic._get_project_filter(session)
         _filter["id"] = id
         desc_list = db.get_list(topic, _filter)
         if not desc_list:
-            raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
+            raise DbException(
+                "Not found any {} with id='{}'".format(topic[:-1], id),
+                HTTPStatus.NOT_FOUND,
+            )
         elif len(desc_list) == 1:
             return desc_list[0]
         else:
-            raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
-                topic[:-1], id), HTTPStatus.CONFLICT)
+            raise DbException(
+                "Found more than one public {} with id='{}'; and no one belonging to this project".format(
+                    topic[:-1], id
+                ),
+                HTTPStatus.CONFLICT,
+            )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
@@ -161,7 +195,7 @@ class DescriptorTopic(BaseTopic):
         # _remove_envelop
         if indata:
             if "userDefinedData" in indata:
-                indata = indata['userDefinedData']
+                indata = indata["userDefinedData"]
 
         # Override descriptor with query string kwargs
         self._update_input_with_kwargs(indata, kwargs)
@@ -170,7 +204,9 @@ class DescriptorTopic(BaseTopic):
         # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
 
         content = {"_admin": {"userDefinedData": indata}}
-        self.format_on_new(content, session["project_id"], make_public=session["public"])
+        self.format_on_new(
+            content, session["project_id"], make_public=session["public"]
+        )
         _id = self.db.create(self.topic, content)
         rollback.append({"topic": self.topic, "_id": _id})
         self._send_msg("created", {"_id": _id})
@@ -194,8 +230,12 @@ class DescriptorTopic(BaseTopic):
         expected_md5 = headers.get("Content-File-MD5")
         compressed = None
         content_type = headers.get("Content-Type")
-        if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
-                "application/zip" in content_type:
+        if (
+            content_type
+            and "application/gzip" in content_type
+            or "application/x-gzip" in content_type
+            or "application/zip" in content_type
+        ):
             compressed = "gzip"
         filename = headers.get("Content-Filename")
         if not filename:
@@ -205,19 +245,27 @@ class DescriptorTopic(BaseTopic):
         error_text = ""
         try:
             if content_range_text:
-                content_range = content_range_text.replace("-", " ").replace("/", " ").split()
-                if content_range[0] != "bytes":  # TODO check x<y not negative < total....
+                content_range = (
+                    content_range_text.replace("-", " ").replace("/", " ").split()
+                )
+                if (
+                    content_range[0] != "bytes"
+                ):  # TODO check x<y not negative < total....
                     raise IndexError()
                 start = int(content_range[1])
                 end = int(content_range[2]) + 1
                 total = int(content_range[3])
             else:
                 start = 0
-            temp_folder = _id + "_"  # all the content is upload here and if ok, it is rename from id_ to is folder
+            temp_folder = (
+                _id + "_"
+            )  # all the content is upload here and if ok, it is rename from id_ to is folder
 
             if start:
-                if not self.fs.file_exists(temp_folder, 'dir'):
-                    raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
+                if not self.fs.file_exists(temp_folder, "dir"):
+                    raise EngineException(
+                        "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
+                    )
             else:
                 self.fs.file_delete(temp_folder, ignore_non_exist=True)
                 self.fs.mkdir(temp_folder)
@@ -226,14 +274,18 @@ class DescriptorTopic(BaseTopic):
             storage["folder"] = _id
 
             file_path = (temp_folder, filename)
-            if self.fs.file_exists(file_path, 'file'):
+            if self.fs.file_exists(file_path, "file"):
                 file_size = self.fs.file_size(file_path)
             else:
                 file_size = 0
             if file_size != start:
-                raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
-                    file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
-            file_pkg = self.fs.file_open(file_path, 'a+b')
+                raise EngineException(
+                    "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
+                        file_size, start
+                    ),
+                    HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+                )
+            file_pkg = self.fs.file_open(file_path, "a+b")
             if isinstance(indata, dict):
                 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
                 file_pkg.write(indata_text.encode(encoding="utf-8"))
@@ -247,8 +299,12 @@ class DescriptorTopic(BaseTopic):
                     file_pkg.write(indata_text)
             if content_range_text:
                 if indata_len != end - start:
-                    raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
-                        start, end - 1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+                    raise EngineException(
+                        "Mismatch between Content-Range header {}-{} and body length of {}".format(
+                            start, end - 1, indata_len
+                        ),
+                        HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+                    )
                 if end != total:
                     # TODO update to UPLOADING
                     return False
@@ -265,28 +321,43 @@ class DescriptorTopic(BaseTopic):
                     raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
             file_pkg.seek(0, 0)
             if compressed == "gzip":
-                tar = tarfile.open(mode='r', fileobj=file_pkg)
+                tar = tarfile.open(mode="r", fileobj=file_pkg)
                 descriptor_file_name = None
                 for tarinfo in tar:
                     tarname = tarinfo.name
                     tarname_path = tarname.split("/")
-                    if not tarname_path[0] or ".." in tarname_path:  # if start with "/" means absolute path
-                        raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
+                    if (
+                        not tarname_path[0] or ".." in tarname_path
+                    ):  # if start with "/" means absolute path
+                        raise EngineException(
+                            "Absolute path or '..' are not allowed for package descriptor tar.gz"
+                        )
                     if len(tarname_path) == 1 and not tarinfo.isdir():
-                        raise EngineException("All files must be inside a dir for package descriptor tar.gz")
-                    if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
+                        raise EngineException(
+                            "All files must be inside a dir for package descriptor tar.gz"
+                        )
+                    if (
+                        tarname.endswith(".yaml")
+                        or tarname.endswith(".json")
+                        or tarname.endswith(".yml")
+                    ):
                         storage["pkg-dir"] = tarname_path[0]
                         if len(tarname_path) == 2:
                             if descriptor_file_name:
                                 raise EngineException(
-                                    "Found more than one descriptor file at package descriptor tar.gz")
+                                    "Found more than one descriptor file at package descriptor tar.gz"
+                                )
                             descriptor_file_name = tarname
                 if not descriptor_file_name:
-                    raise EngineException("Not found any descriptor file at package descriptor tar.gz")
+                    raise EngineException(
+                        "Not found any descriptor file at package descriptor tar.gz"
+                    )
                 storage["descriptor"] = descriptor_file_name
                 storage["zipfile"] = filename
                 self.fs.file_extract(tar, temp_folder)
-                with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
+                with self.fs.file_open(
+                    (temp_folder, descriptor_file_name), "r"
+                ) as descriptor_file:
                     content = descriptor_file.read()
             else:
                 content = file_pkg.read()
@@ -310,7 +381,9 @@ class DescriptorTopic(BaseTopic):
                 self._update_input_with_kwargs(indata, kwargs)
 
             deep_update_rfc7396(current_desc, indata)
-            current_desc = self.check_conflict_on_edit(session, current_desc, indata, _id=_id)
+            current_desc = self.check_conflict_on_edit(
+                session, current_desc, indata, _id=_id
+            )
             current_desc["_admin"]["modified"] = time()
             self.db.replace(self.topic, _id, current_desc)
             self.fs.dir_rename(temp_folder, _id)
@@ -325,12 +398,19 @@ class DescriptorTopic(BaseTopic):
         except EngineException:
             raise
         except IndexError:
-            raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
-                                  HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+            raise EngineException(
+                "invalid Content-Range header format. Expected 'bytes start-end/total'",
+                HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+            )
         except IOError as e:
-            raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
+            raise EngineException(
+                "invalid upload transaction sequence: '{}'".format(e),
+                HTTPStatus.BAD_REQUEST,
+            )
         except tarfile.ReadError as e:
-            raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
+            raise EngineException(
+                "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
+            )
         except (ValueError, yaml.YAMLError) as e:
             raise EngineException(error_text + str(e))
         except ValidationError as e:
@@ -350,33 +430,48 @@ class DescriptorTopic(BaseTopic):
         """
         accept_text = accept_zip = False
         if accept_header:
-            if 'text/plain' in accept_header or '*/*' in accept_header:
+            if "text/plain" in accept_header or "*/*" in accept_header:
                 accept_text = True
-            if 'application/zip' in accept_header or '*/*' in accept_header:
-                accept_zip = 'application/zip'
-            elif 'application/gzip' in accept_header:
-                accept_zip = 'application/gzip'
+            if "application/zip" in accept_header or "*/*" in accept_header:
+                accept_zip = "application/zip"
+            elif "application/gzip" in accept_header:
+                accept_zip = "application/gzip"
 
         if not accept_text and not accept_zip:
-            raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
-                                  http_code=HTTPStatus.NOT_ACCEPTABLE)
+            raise EngineException(
+                "provide request header 'Accept' with 'application/zip' or 'text/plain'",
+                http_code=HTTPStatus.NOT_ACCEPTABLE,
+            )
 
         content = self.show(session, _id)
         if content["_admin"]["onboardingState"] != "ONBOARDED":
-            raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
-                                  "onboardingState is {}".format(content["_admin"]["onboardingState"]),
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "Cannot get content because this resource is not at 'ONBOARDED' state. "
+                "onboardingState is {}".format(content["_admin"]["onboardingState"]),
+                http_code=HTTPStatus.CONFLICT,
+            )
         storage = content["_admin"]["storage"]
         if path is not None and path != "$DESCRIPTOR":  # artifacts
-            if not storage.get('pkg-dir'):
-                raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
-            if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
-                folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
+            if not storage.get("pkg-dir"):
+                raise EngineException(
+                    "Packages does not contains artifacts",
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+            if self.fs.file_exists(
+                (storage["folder"], storage["pkg-dir"], *path), "dir"
+            ):
+                folder_content = self.fs.dir_ls(
+                    (storage["folder"], storage["pkg-dir"], *path)
+                )
                 return folder_content, "text/plain"
                 # TODO manage folders in http
             else:
-                return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"), \
-                    "application/octet-stream"
+                return (
+                    self.fs.file_open(
+                        (storage["folder"], storage["pkg-dir"], *path), "rb"
+                    ),
+                    "application/octet-stream",
+                )
 
         # pkgtype   accept  ZIP  TEXT    -> result
         # manyfiles         yes  X       -> zip
@@ -384,22 +479,36 @@ class DescriptorTopic(BaseTopic):
         # onefile           yes  no      -> zip
         #                   X    yes     -> text
         contain_many_files = False
-        if storage.get('pkg-dir'):
+        if storage.get("pkg-dir"):
             # check if there are more than one file in the package, ignoring checksums.txt.
-            pkg_files = self.fs.dir_ls((storage['folder'], storage['pkg-dir']))
-            if len(pkg_files) >= 3 or (len(pkg_files) == 2 and 'checksums.txt' not in pkg_files):
+            pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
+            if len(pkg_files) >= 3 or (
+                len(pkg_files) == 2 and "checksums.txt" not in pkg_files
+            ):
                 contain_many_files = True
         if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
-            return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
+            return (
+                self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
+                "text/plain",
+            )
         elif contain_many_files and not accept_zip:
-            raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
-                                  "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
+            raise EngineException(
+                "Packages that contains several files need to be retrieved with 'application/zip'"
+                "Accept header",
+                http_code=HTTPStatus.NOT_ACCEPTABLE,
+            )
         else:
-            if not storage.get('zipfile'):
+            if not storage.get("zipfile"):
                 # TODO generate zipfile if not present
-                raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
-                                      "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
-            return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
+                raise EngineException(
+                    "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
+                    "future versions",
+                    http_code=HTTPStatus.NOT_ACCEPTABLE,
+                )
+            return (
+                self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
+                accept_zip,
+            )
 
     def _remove_yang_prefixes_from_descriptor(self, descriptor):
         new_descriptor = {}
@@ -414,12 +523,14 @@ class DescriptorTopic(BaseTopic):
                         new_v.append(self._remove_yang_prefixes_from_descriptor(x))
                     else:
                         new_v.append(x)
-            new_descriptor[k.split(':')[-1]] = new_v
+            new_descriptor[k.split(":")[-1]] = new_v
         return new_descriptor
 
     def pyangbind_validation(self, item, data, force=False):
-        raise EngineException("Not possible to validate '{}' item".format(item),
-                              http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Not possible to validate '{}' item".format(item),
+            http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+        )
 
     def _validate_input_edit(self, indata, content, force=False):
         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
@@ -432,25 +543,38 @@ class DescriptorTopic(BaseTopic):
             if indata["operationalState"] in ("ENABLED", "DISABLED"):
                 indata["_admin"]["operationalState"] = indata.pop("operationalState")
             else:
-                raise EngineException("State '{}' is not a valid operational state"
-                                      .format(indata["operationalState"]),
-                                      http_code=HTTPStatus.BAD_REQUEST)
-
-        # In the case of user defined data, we need to put the data in the root of the object 
+                raise EngineException(
+                    "State '{}' is not a valid operational state".format(
+                        indata["operationalState"]
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+
+        # In the case of user defined data, we need to put the data in the root of the object
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
             if type(data) == dict:
                 indata["_admin"]["userDefinedData"] = data
             else:
-                raise EngineException("userDefinedData should be an object, but is '{}' instead"
-                                      .format(type(data)),
-                                      http_code=HTTPStatus.BAD_REQUEST)
-
-        if ("operationalState" in indata["_admin"] and
-                content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]):
-            raise EngineException("operationalState already {}".format(content["_admin"]["operationalState"]),
-                                  http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "userDefinedData should be an object, but is '{}' instead".format(
+                        type(data)
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+
+        if (
+            "operationalState" in indata["_admin"]
+            and content["_admin"]["operationalState"]
+            == indata["_admin"]["operationalState"]
+        ):
+            raise EngineException(
+                "operationalState already {}".format(
+                    content["_admin"]["operationalState"]
+                ),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
         return indata
 
@@ -464,23 +588,33 @@ class VnfdTopic(DescriptorTopic):
 
     def pyangbind_validation(self, item, data, force=False):
         if self._descriptor_data_is_in_old_format(data):
-            raise EngineException("ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
         try:
             myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
-            pybindJSONDecoder.load_ietf_json({'etsi-nfv-vnfd:vnfd': data}, None, None, obj=myvnfd,
-                                             path_helper=True, skip_unknown=force)
+            pybindJSONDecoder.load_ietf_json(
+                {"etsi-nfv-vnfd:vnfd": data},
+                None,
+                None,
+                obj=myvnfd,
+                path_helper=True,
+                skip_unknown=force,
+            )
             out = pybindJSON.dumps(myvnfd, mode="ietf")
             desc_out = self._remove_envelop(yaml.safe_load(out))
             desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
             return utils.deep_update_dict(data, desc_out)
         except Exception as e:
-            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "Error in pyangbind validation: {}".format(str(e)),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
     @staticmethod
     def _descriptor_data_is_in_old_format(data):
-        return ('vnfd-catalog' in data) or ('vnfd:vnfd-catalog' in data)
+        return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
 
     @staticmethod
     def _remove_envelop(indata=None):
@@ -488,19 +622,21 @@ class VnfdTopic(DescriptorTopic):
             return {}
         clean_indata = indata
 
-        if clean_indata.get('etsi-nfv-vnfd:vnfd'):
-            if not isinstance(clean_indata['etsi-nfv-vnfd:vnfd'], dict):
+        if clean_indata.get("etsi-nfv-vnfd:vnfd"):
+            if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
                 raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
-            clean_indata = clean_indata['etsi-nfv-vnfd:vnfd']
-        elif clean_indata.get('vnfd'):
-            if not isinstance(clean_indata['vnfd'], dict):
+            clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
+        elif clean_indata.get("vnfd"):
+            if not isinstance(clean_indata["vnfd"], dict):
                 raise EngineException("'vnfd' must be dict")
-            clean_indata = clean_indata['vnfd']
+            clean_indata = clean_indata["vnfd"]
 
         return clean_indata
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         # set type of vnfd
         contains_pdu = False
@@ -539,15 +675,19 @@ class VnfdTopic(DescriptorTopic):
         # check vnfrs using this vnfd
         _filter["vnfd-id"] = _id
         if self.db.get_list("vnfrs", _filter):
-            raise EngineException("There is at least one VNF instance using this descriptor",
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VNF instance using this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
         # check NSD referencing this VNFD
         del _filter["vnfd-id"]
         _filter["vnfd-id"] = descriptor_id
         if self.db.get_list("nsds", _filter):
-            raise EngineException("There is at least one NS package referencing this descriptor",
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one NS package referencing this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def _validate_input_new(self, indata, storage_params, force=False):
         indata.pop("onboardingState", None)
@@ -579,15 +719,19 @@ class VnfdTopic(DescriptorTopic):
         if not indata.get("vdu"):
             return
         if not indata.get("mgmt-cp"):
-            raise EngineException("'mgmt-cp' is a mandatory field and it is not defined",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "'mgmt-cp' is a mandatory field and it is not defined",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
         for cp in get_iterable(indata.get("ext-cpd")):
             if cp["id"] == indata["mgmt-cp"]:
                 break
         else:
-            raise EngineException("mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
     @staticmethod
     def validate_vdu_internal_connection_points(vdu):
@@ -595,9 +739,12 @@ class VnfdTopic(DescriptorTopic):
         for cpd in get_iterable(vdu.get("int-cpd")):
             cpd_id = cpd.get("id")
             if cpd_id and cpd_id in int_cpds:
-                raise EngineException("vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd"
-                                      .format(vdu["id"], cpd_id),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise EngineException(
+                    "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
+                        vdu["id"], cpd_id
+                    ),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
             int_cpds.add(cpd_id)
 
     @staticmethod
@@ -611,68 +758,98 @@ class VnfdTopic(DescriptorTopic):
         for cpd in get_iterable(indata.get("ext-cpd")):
             cpd_id = cpd.get("id")
             if cpd_id and cpd_id in ext_cpds:
-                raise EngineException("ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise EngineException(
+                    "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
             ext_cpds.add(cpd_id)
 
             int_cpd = cpd.get("int-cpd")
             if int_cpd:
                 if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
-                    raise EngineException("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(cpd_id),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    raise EngineException(
+                        "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+                            cpd_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
             # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
 
     def _validate_vdu_charms_in_package(self, storage_params, indata):
         for df in indata["df"]:
-            if "lcm-operations-configuration" in df and "operate-vnf-op-config" in df["lcm-operations-configuration"]:
-                configs = df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", [])
+            if (
+                "lcm-operations-configuration" in df
+                and "operate-vnf-op-config" in df["lcm-operations-configuration"]
+            ):
+                configs = df["lcm-operations-configuration"][
+                    "operate-vnf-op-config"
+                ].get("day1-2", [])
                 vdus = df.get("vdu-profile", [])
                 for vdu in vdus:
                     for config in configs:
                         if config["id"] == vdu["id"] and utils.find_in_list(
                             config.get("execution-environment-list", []),
-                            lambda ee: "juju" in ee
+                            lambda ee: "juju" in ee,
                         ):
-                            if not self._validate_package_folders(storage_params, 'charms'):
-                                raise EngineException("Charm defined in vnf[id={}] but not present in "
-                                                      "package".format(indata["id"]))
+                            if not self._validate_package_folders(
+                                storage_params, "charms"
+                            ):
+                                raise EngineException(
+                                    "Charm defined in vnf[id={}] but not present in "
+                                    "package".format(indata["id"])
+                                )
 
     def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
         if not vdu.get("cloud-init-file"):
             return
-        if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
-            raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
-                                  "package".format(indata["id"], vdu["id"]))
+        if not self._validate_package_folders(
+            storage_params, "cloud_init", vdu["cloud-init-file"]
+        ):
+            raise EngineException(
+                "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
+                "package".format(indata["id"], vdu["id"])
+            )
 
     def _validate_vnf_charms_in_package(self, storage_params, indata):
         # Get VNF configuration through new container
-        for deployment_flavor in indata.get('df', []):
+        for deployment_flavor in indata.get("df", []):
             if "lcm-operations-configuration" not in deployment_flavor:
                 return
-            if "operate-vnf-op-config" not in deployment_flavor["lcm-operations-configuration"]:
+            if (
+                "operate-vnf-op-config"
+                not in deployment_flavor["lcm-operations-configuration"]
+            ):
                 return
-            for day_1_2_config in deployment_flavor["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]:
+            for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
+                "operate-vnf-op-config"
+            ]["day1-2"]:
                 if day_1_2_config["id"] == indata["id"]:
                     if utils.find_in_list(
                         day_1_2_config.get("execution-environment-list", []),
-                        lambda ee: "juju" in ee
+                        lambda ee: "juju" in ee,
                     ):
-                        if not self._validate_package_folders(storage_params, 'charms'):
-                            raise EngineException("Charm defined in vnf[id={}] but not present in "
-                                                  "package".format(indata["id"]))
+                        if not self._validate_package_folders(storage_params, "charms"):
+                            raise EngineException(
+                                "Charm defined in vnf[id={}] but not present in "
+                                "package".format(indata["id"])
+                            )
 
     def _validate_package_folders(self, storage_params, folder, file=None):
         if not storage_params or not storage_params.get("pkg-dir"):
             return False
         else:
-            if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
-                f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+            if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
+                f = "{}_/{}/{}".format(
+                    storage_params["folder"], storage_params["pkg-dir"], folder
+                )
             else:
-                f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+                f = "{}/{}/{}".format(
+                    storage_params["folder"], storage_params["pkg-dir"], folder
+                )
             if file:
-                return self.fs.file_exists("{}/{}".format(f, file), 'file')
+                return self.fs.file_exists("{}/{}".format(f, file), "file")
             else:
-                if self.fs.file_exists(f, 'dir'):
+                if self.fs.file_exists(f, "dir"):
                     if self.fs.dir_ls(f):
                         return True
             return False
@@ -683,8 +860,10 @@ class VnfdTopic(DescriptorTopic):
         for ivld in get_iterable(indata.get("int-virtual-link-desc")):
             ivld_id = ivld.get("id")
             if ivld_id and ivld_id in all_ivld_ids:
-                raise EngineException("Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                raise EngineException(
+                    "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
             else:
                 all_ivld_ids.add(ivld_id)
 
@@ -694,16 +873,21 @@ class VnfdTopic(DescriptorTopic):
                 if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
                     raise EngineException(
                         "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
-                        "int-virtual-link-desc".format(vdu["id"], int_cpd["id"], int_cpd_ivld_id),
-                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                        "int-virtual-link-desc".format(
+                            vdu["id"], int_cpd["id"], int_cpd_ivld_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
 
         for df in get_iterable(indata.get("df")):
             for vlp in get_iterable(df.get("virtual-link-profile")):
                 vlp_ivld_id = vlp.get("id")
                 if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
-                    raise EngineException("df[id='{}']:virtual-link-profile='{}' must match an existing "
-                                          "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    raise EngineException(
+                        "df[id='{}']:virtual-link-profile='{}' must match an existing "
+                        "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
 
     @staticmethod
     def validate_monitoring_params(indata):
@@ -712,10 +896,13 @@ class VnfdTopic(DescriptorTopic):
             for mp in get_iterable(ivld.get("monitoring-parameters")):
                 mp_id = mp.get("id")
                 if mp_id and mp_id in all_monitoring_params:
-                    raise EngineException("Duplicated monitoring-parameter id in "
-                                          "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']"
-                                          .format(ivld["id"], mp_id),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    raise EngineException(
+                        "Duplicated monitoring-parameter id in "
+                        "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
+                            ivld["id"], mp_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
                 else:
                     all_monitoring_params.add(mp_id)
 
@@ -723,10 +910,13 @@ class VnfdTopic(DescriptorTopic):
             for mp in get_iterable(vdu.get("monitoring-parameter")):
                 mp_id = mp.get("id")
                 if mp_id and mp_id in all_monitoring_params:
-                    raise EngineException("Duplicated monitoring-parameter id in "
-                                          "vdu[id='{}']:monitoring-parameter[id='{}']"
-                                          .format(vdu["id"], mp_id),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    raise EngineException(
+                        "Duplicated monitoring-parameter id in "
+                        "vdu[id='{}']:monitoring-parameter[id='{}']".format(
+                            vdu["id"], mp_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
                 else:
                     all_monitoring_params.add(mp_id)
 
@@ -734,10 +924,13 @@ class VnfdTopic(DescriptorTopic):
             for mp in get_iterable(df.get("monitoring-parameter")):
                 mp_id = mp.get("id")
                 if mp_id and mp_id in all_monitoring_params:
-                    raise EngineException("Duplicated monitoring-parameter id in "
-                                          "df[id='{}']:monitoring-parameter[id='{}']"
-                                          .format(df["id"], mp_id),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    raise EngineException(
+                        "Duplicated monitoring-parameter id in "
+                        "df[id='{}']:monitoring-parameter[id='{}']".format(
+                            df["id"], mp_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
                 else:
                     all_monitoring_params.add(mp_id)
 
@@ -761,36 +954,66 @@ class VnfdTopic(DescriptorTopic):
                 for sp in get_iterable(sa.get("scaling-policy")):
                     for sc in get_iterable(sp.get("scaling-criteria")):
                         sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
-                        if sc_monitoring_param and sc_monitoring_param not in all_monitoring_params:
-                            raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
-                                                  "[name='{}']:scaling-criteria[name='{}']: "
-                                                  "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
-                                                  .format(df["id"], sa["id"], sp["name"], sc["name"],
-                                                          sc_monitoring_param),
-                                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                        if (
+                            sc_monitoring_param
+                            and sc_monitoring_param not in all_monitoring_params
+                        ):
+                            raise EngineException(
+                                "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                                "[name='{}']:scaling-criteria[name='{}']: "
+                                "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+                                    df["id"],
+                                    sa["id"],
+                                    sp["name"],
+                                    sc["name"],
+                                    sc_monitoring_param,
+                                ),
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
 
                 for sca in get_iterable(sa.get("scaling-config-action")):
-                    if "lcm-operations-configuration" not in df \
-                        or "operate-vnf-op-config" not in df["lcm-operations-configuration"] \
+                    if (
+                        "lcm-operations-configuration" not in df
+                        or "operate-vnf-op-config"
+                        not in df["lcm-operations-configuration"]
                         or not utils.find_in_list(
-                            df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", []),
-                            lambda config: config["id"] == indata["id"]):
-                        raise EngineException("'day1-2 configuration' not defined in the descriptor but it is "
-                                              "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
-                                              .format(df["id"], sa["id"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                            df["lcm-operations-configuration"][
+                                "operate-vnf-op-config"
+                            ].get("day1-2", []),
+                            lambda config: config["id"] == indata["id"],
+                        )
+                    ):
+                        raise EngineException(
+                            "'day1-2 configuration' not defined in the descriptor but it is "
+                            "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+                                df["id"], sa["id"]
+                            ),
+                            http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                        )
                     for configuration in get_iterable(
-                        df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", [])
+                        df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
+                            "day1-2", []
+                        )
                     ):
-                        for primitive in get_iterable(configuration.get("config-primitive")):
-                            if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
+                        for primitive in get_iterable(
+                            configuration.get("config-primitive")
+                        ):
+                            if (
+                                primitive["name"]
+                                == sca["vnf-config-primitive-name-ref"]
+                            ):
                                 break
                         else:
-                            raise EngineException("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
-                                                  "config-primitive-name-ref='{}' does not match any "
-                                                  "day1-2 configuration:config-primitive:name"
-                                                  .format(df["id"], sa["id"], sca["vnf-config-primitive-name-ref"]),
-                                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                            raise EngineException(
+                                "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                                "config-primitive-name-ref='{}' does not match any "
+                                "day1-2 configuration:config-primitive:name".format(
+                                    df["id"],
+                                    sa["id"],
+                                    sca["vnf-config-primitive-name-ref"],
+                                ),
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
 
     def delete_extra(self, session, _id, db_content, not_send_msg=None):
         """
@@ -813,7 +1036,9 @@ class VnfdTopic(DescriptorTopic):
         links = {}
         links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
         links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
-        links["packageContent"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])}
+        links["packageContent"] = {
+            "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
+        }
         data["_links"] = links
 
         return super().sol005_projection(data)
@@ -828,26 +1053,36 @@ class NsdTopic(DescriptorTopic):
 
     def pyangbind_validation(self, item, data, force=False):
         if self._descriptor_data_is_in_old_format(data):
-            raise EngineException("ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
         try:
-            nsd_vnf_profiles = data.get('df', [{}])[0].get('vnf-profile', [])
+            nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
             mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
-            pybindJSONDecoder.load_ietf_json({'nsd': {'nsd': [data]}}, None, None, obj=mynsd,
-                                             path_helper=True, skip_unknown=force)
+            pybindJSONDecoder.load_ietf_json(
+                {"nsd": {"nsd": [data]}},
+                None,
+                None,
+                obj=mynsd,
+                path_helper=True,
+                skip_unknown=force,
+            )
             out = pybindJSON.dumps(mynsd, mode="ietf")
             desc_out = self._remove_envelop(yaml.safe_load(out))
             desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
             if nsd_vnf_profiles:
-                desc_out['df'][0]['vnf-profile'] = nsd_vnf_profiles
+                desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
             return desc_out
         except Exception as e:
-            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "Error in pyangbind validation: {}".format(str(e)),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
     @staticmethod
     def _descriptor_data_is_in_old_format(data):
-        return ('nsd-catalog' in data) or ('nsd:nsd-catalog' in data)
+        return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
 
     @staticmethod
     def _remove_envelop(indata=None):
@@ -855,14 +1090,17 @@ class NsdTopic(DescriptorTopic):
             return {}
         clean_indata = indata
 
-        if clean_indata.get('nsd'):
-            clean_indata = clean_indata['nsd']
-        elif clean_indata.get('etsi-nfv-nsd:nsd'):
-            clean_indata = clean_indata['etsi-nfv-nsd:nsd']
-        if clean_indata.get('nsd'):
-            if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
+        if clean_indata.get("nsd"):
+            clean_indata = clean_indata["nsd"]
+        elif clean_indata.get("etsi-nfv-nsd:nsd"):
+            clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
+        if clean_indata.get("nsd"):
+            if (
+                not isinstance(clean_indata["nsd"], list)
+                or len(clean_indata["nsd"]) != 1
+            ):
                 raise EngineException("'nsd' must be a list of only one element")
-            clean_indata = clean_indata['nsd'][0]
+            clean_indata = clean_indata["nsd"][0]
         return clean_indata
 
     def _validate_input_new(self, indata, storage_params, force=False):
@@ -891,10 +1129,12 @@ class NsdTopic(DescriptorTopic):
             for vlp in get_iterable(df.get("virtual-link-profile")):
                 if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
                     if vlp.get("virtual-link-protocol-data"):
-                        raise EngineException("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
-                                              "protocol-data You cannot set a virtual-link-protocol-data "
-                                              "when mgmt-network is True"
-                                              .format(df["id"], vlp["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                        raise EngineException(
+                            "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
+                            "protocol-data You cannot set a virtual-link-protocol-data "
+                            "when mgmt-network is True".format(df["id"], vlp["id"]),
+                            http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                        )
 
     @staticmethod
     def validate_vnf_profiles_vnfd_id(indata):
@@ -903,16 +1143,20 @@ class NsdTopic(DescriptorTopic):
             for vnf_profile in get_iterable(df.get("vnf-profile")):
                 vnfd_id = vnf_profile.get("vnfd-id")
                 if vnfd_id and vnfd_id not in all_vnfd_ids:
-                    raise EngineException("Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
-                                          "does not match any vnfd-id".format(df["id"], vnf_profile["id"], vnfd_id),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                    raise EngineException(
+                        "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
+                        "does not match any vnfd-id".format(
+                            df["id"], vnf_profile["id"], vnfd_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
 
     def _validate_input_edit(self, indata, content, force=False):
         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
         """
         indata looks as follows:
-            - In the new case (conformant) 
-                {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23', 
+            - In the new case (conformant)
+                {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
                 '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
             - In the old case (backwards-compatible)
                 {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
@@ -924,24 +1168,37 @@ class NsdTopic(DescriptorTopic):
             if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
                 indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
             else:
-                raise EngineException("State '{}' is not a valid operational state"
-                                      .format(indata["nsdOperationalState"]),
-                                      http_code=HTTPStatus.BAD_REQUEST)
-
-        # In the case of user defined data, we need to put the data in the root of the object 
+                raise EngineException(
+                    "State '{}' is not a valid operational state".format(
+                        indata["nsdOperationalState"]
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+
+        # In the case of user defined data, we need to put the data in the root of the object
         # to preserve current expected behaviour
         if "userDefinedData" in indata:
             data = indata.pop("userDefinedData")
             if type(data) == dict:
                 indata["_admin"]["userDefinedData"] = data
             else:
-                raise EngineException("userDefinedData should be an object, but is '{}' instead"
-                                      .format(type(data)),
-                                      http_code=HTTPStatus.BAD_REQUEST)
-        if ("operationalState" in indata["_admin"] and
-                content["_admin"]["operationalState"] == indata["_admin"]["operationalState"]):
-            raise EngineException("nsdOperationalState already {}".format(content["_admin"]["operationalState"]),
-                                  http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "userDefinedData should be an object, but is '{}' instead".format(
+                        type(data)
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+        if (
+            "operationalState" in indata["_admin"]
+            and content["_admin"]["operationalState"]
+            == indata["_admin"]["operationalState"]
+        ):
+            raise EngineException(
+                "nsdOperationalState already {}".format(
+                    content["_admin"]["operationalState"]
+                ),
+                http_code=HTTPStatus.CONFLICT,
+            )
         return indata
 
     def _check_descriptor_dependencies(self, session, descriptor):
@@ -968,8 +1225,11 @@ class NsdTopic(DescriptorTopic):
                 query_filter["id"] = vnfd_id
                 vnf_list = self.db.get_list("vnfds", query_filter)
                 if not vnf_list:
-                    raise EngineException("Descriptor error at 'vnfd-id'='{}' references a non "
-                                          "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
+                    raise EngineException(
+                        "Descriptor error at 'vnfd-id'='{}' references a non "
+                        "existing vnfd".format(vnfd_id),
+                        http_code=HTTPStatus.CONFLICT,
+                    )
                 vnfds_index[vnfd_id] = vnf_list[0]
         return vnfds_index
 
@@ -979,22 +1239,32 @@ class NsdTopic(DescriptorTopic):
             vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
             all_vnfd_ext_cpds = set()
             for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
-                if ext_cpd.get('id'):
-                    all_vnfd_ext_cpds.add(ext_cpd.get('id'))
+                if ext_cpd.get("id"):
+                    all_vnfd_ext_cpds.add(ext_cpd.get("id"))
 
-            for virtual_link in get_iterable(vnf_profile.get("virtual-link-connectivity")):
+            for virtual_link in get_iterable(
+                vnf_profile.get("virtual-link-connectivity")
+            ):
                 for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
-                    vl_cpd_id = vl_cpd.get('constituent-cpd-id')
+                    vl_cpd_id = vl_cpd.get("constituent-cpd-id")
                     if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
-                        raise EngineException("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
-                                              "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
-                                              "non existing ext-cpd:id inside vnfd '{}'"
-                                              .format(df["id"], vnf_profile["id"],
-                                                      virtual_link["virtual-link-profile-id"], vl_cpd_id, vnfd["id"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                        raise EngineException(
+                            "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+                            "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+                            "non existing ext-cpd:id inside vnfd '{}'".format(
+                                df["id"],
+                                vnf_profile["id"],
+                                virtual_link["virtual-link-profile-id"],
+                                vl_cpd_id,
+                                vnfd["id"],
+                            ),
+                            http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                        )
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         self._check_descriptor_dependencies(session, final_content)
 
@@ -1020,15 +1290,19 @@ class NsdTopic(DescriptorTopic):
         _filter = self._get_project_filter(session)
         _filter["nsd-id"] = _id
         if self.db.get_list("nsrs", _filter):
-            raise EngineException("There is at least one NS instance using this descriptor",
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one NS instance using this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
         # check NSD referenced by NST
         del _filter["nsd-id"]
         _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
         if self.db.get_list("nsts", _filter):
-            raise EngineException("There is at least one NetSlice Template referencing this descriptor",
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one NetSlice Template referencing this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def sol005_projection(self, data):
         data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
@@ -1037,7 +1311,9 @@ class NsdTopic(DescriptorTopic):
 
         links = {}
         links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
-        links["nsd_content"] = {"href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])}
+        links["nsd_content"] = {
+            "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
+        }
         data["_links"] = links
 
         return super().sol005_projection(data)
@@ -1054,14 +1330,22 @@ class NstTopic(DescriptorTopic):
     def pyangbind_validation(self, item, data, force=False):
         try:
             mynst = nst_im()
-            pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
-                                             path_helper=True, skip_unknown=force)
+            pybindJSONDecoder.load_ietf_json(
+                {"nst": [data]},
+                None,
+                None,
+                obj=mynst,
+                path_helper=True,
+                skip_unknown=force,
+            )
             out = pybindJSON.dumps(mynst, mode="ietf")
             desc_out = self._remove_envelop(yaml.safe_load(out))
             return desc_out
         except Exception as e:
-            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+            raise EngineException(
+                "Error in pyangbind validation: {}".format(str(e)),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
     @staticmethod
     def _remove_envelop(indata=None):
@@ -1069,14 +1353,20 @@ class NstTopic(DescriptorTopic):
             return {}
         clean_indata = indata
 
-        if clean_indata.get('nst'):
-            if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
+        if clean_indata.get("nst"):
+            if (
+                not isinstance(clean_indata["nst"], list)
+                or len(clean_indata["nst"]) != 1
+            ):
                 raise EngineException("'nst' must be a list only one element")
-            clean_indata = clean_indata['nst'][0]
-        elif clean_indata.get('nst:nst'):
-            if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
+            clean_indata = clean_indata["nst"][0]
+        elif clean_indata.get("nst:nst"):
+            if (
+                not isinstance(clean_indata["nst:nst"], list)
+                or len(clean_indata["nst:nst"]) != 1
+            ):
                 raise EngineException("'nst:nst' must be a list only one element")
-            clean_indata = clean_indata['nst:nst'][0]
+            clean_indata = clean_indata["nst:nst"][0]
         return clean_indata
 
     def _validate_input_new(self, indata, storage_params, force=False):
@@ -1100,11 +1390,16 @@ class NstTopic(DescriptorTopic):
             filter_q = self._get_project_filter(session)
             filter_q["id"] = nsd_id
             if not self.db.get_list("nsds", filter_q):
-                raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
-                                      "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
+                    "existing nsd".format(nsd_id),
+                    http_code=HTTPStatus.CONFLICT,
+                )
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        final_content = super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         self._check_descriptor_dependencies(session, final_content)
         return final_content
@@ -1125,8 +1420,10 @@ class NstTopic(DescriptorTopic):
         _filter = self._get_project_filter(session)
         _filter["_admin.nst-id"] = _id
         if self.db.get_list("nsis", _filter):
-            raise EngineException("there is at least one Netslice Instance using this descriptor",
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "there is at least one Netslice Instance using this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def sol005_projection(self, data):
         data["onboardingState"] = data["_admin"]["onboardingState"]
@@ -1172,7 +1469,10 @@ class PduTopic(BaseTopic):
         _filter = self._get_project_filter(session)
         _filter["vdur.pdu-id"] = _id
         if self.db.get_list("vnfrs", _filter):
-            raise EngineException("There is at least one VNF instance using this PDU", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VNF instance using this PDU",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
 
 class VnfPkgOpTopic(BaseTopic):
@@ -1185,16 +1485,22 @@ class VnfPkgOpTopic(BaseTopic):
         BaseTopic.__init__(self, db, fs, msg, auth)
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
-        raise EngineException("Method 'edit' not allowed for topic '{}'".format(self.topic),
-                              HTTPStatus.METHOD_NOT_ALLOWED)
+        raise EngineException(
+            "Method 'edit' not allowed for topic '{}'".format(self.topic),
+            HTTPStatus.METHOD_NOT_ALLOWED,
+        )
 
     def delete(self, session, _id, dry_run=False):
-        raise EngineException("Method 'delete' not allowed for topic '{}'".format(self.topic),
-                              HTTPStatus.METHOD_NOT_ALLOWED)
+        raise EngineException(
+            "Method 'delete' not allowed for topic '{}'".format(self.topic),
+            HTTPStatus.METHOD_NOT_ALLOWED,
+        )
 
     def delete_list(self, session, filter_q=None):
-        raise EngineException("Method 'delete_list' not allowed for topic '{}'".format(self.topic),
-                              HTTPStatus.METHOD_NOT_ALLOWED)
+        raise EngineException(
+            "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
+            HTTPStatus.METHOD_NOT_ALLOWED,
+        )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
@@ -1222,7 +1528,9 @@ class VnfPkgOpTopic(BaseTopic):
                 juju_bundle = kdu.get("juju-bundle")
                 break
         else:
-            raise EngineException("Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name))
+            raise EngineException(
+                "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
+            )
         if helm_chart:
             indata["helm-chart"] = helm_chart
             match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
@@ -1232,8 +1540,11 @@ class VnfPkgOpTopic(BaseTopic):
             match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
             repo_name = match.group(1) if match else None
         else:
-            raise EngineException("Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']"
-                                  .format(vnfpkg_id, kdu_name))
+            raise EngineException(
+                "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
+                    vnfpkg_id, kdu_name
+                )
+            )
         if repo_name:
             del filter_q["_id"]
             filter_q["name"] = repo_name
@@ -1257,9 +1568,11 @@ class VnfPkgOpTopic(BaseTopic):
             "links": {
                 "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
                 "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
-            }
+            },
         }
-        self.format_on_new(vnfpkgop_desc, session["project_id"], make_public=session["public"])
+        self.format_on_new(
+            vnfpkgop_desc, session["project_id"], make_public=session["public"]
+        )
         ctime = vnfpkgop_desc["_admin"]["created"]
         vnfpkgop_desc["statusEnteredTime"] = ctime
         vnfpkgop_desc["startTime"] = ctime
index e0c25e5..1bc9171 100644 (file)
 # limitations under the License.
 
 import logging
+
 # import yaml
-from osm_common import dbmongo, dbmemory, fslocal, fsmongo, msglocal, msgkafka, version as common_version
+from osm_common import (
+    dbmongo,
+    dbmemory,
+    fslocal,
+    fsmongo,
+    msglocal,
+    msgkafka,
+    version as common_version,
+)
 from osm_common.dbbase import DbException
 from osm_common.fsbase import FsException
 from osm_common.msgbase import MsgException
@@ -29,12 +38,24 @@ from osm_nbi.admin_topics import VimAccountTopic, WimAccountTopic, SdnTopic
 from osm_nbi.admin_topics import K8sClusterTopic, K8sRepoTopic, OsmRepoTopic
 from osm_nbi.admin_topics import VcaTopic
 from osm_nbi.admin_topics import UserTopicAuth, ProjectTopicAuth, RoleTopicAuth
-from osm_nbi.descriptor_topics import VnfdTopic, NsdTopic, PduTopic, NstTopic, VnfPkgOpTopic
-from osm_nbi.instance_topics import NsrTopic, VnfrTopic, NsLcmOpTopic, NsiTopic, NsiLcmOpTopic
+from osm_nbi.descriptor_topics import (
+    VnfdTopic,
+    NsdTopic,
+    PduTopic,
+    NstTopic,
+    VnfPkgOpTopic,
+)
+from osm_nbi.instance_topics import (
+    NsrTopic,
+    VnfrTopic,
+    NsLcmOpTopic,
+    NsiTopic,
+    NsiLcmOpTopic,
+)
 from osm_nbi.pmjobs_topics import PmJobsTopic
 from osm_nbi.subscription_topics import NslcmSubscriptionsTopic
 from base64 import b64encode
-from os import urandom   # , path
+from os import urandom  # , path
 from threading import Lock
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
@@ -57,9 +78,9 @@ class Engine(object):
         "vca": VcaTopic,
         "k8srepos": K8sRepoTopic,
         "osmrepos": OsmRepoTopic,
-        "users": UserTopicAuth,   # Valid for both internal and keystone authentication backends
-        "projects": ProjectTopicAuth,   # Valid for both internal and keystone authentication backends
-        "roles": RoleTopicAuth,   # Valid for both internal and keystone authentication backends
+        "users": UserTopicAuth,  # Valid for both internal and keystone authentication backends
+        "projects": ProjectTopicAuth,  # Valid for both internal and keystone authentication backends
+        "roles": RoleTopicAuth,  # Valid for both internal and keystone authentication backends
         "nsis": NsiTopic,
         "nsilcmops": NsiLcmOpTopic,
         "vnfpkgops": VnfPkgOpTopic,
@@ -97,8 +118,11 @@ class Engine(object):
         self.config = config
         # check right version of common
         if versiontuple(common_version) < versiontuple(min_common_version):
-            raise EngineException("Not compatible osm/common version '{}'. Needed '{}' or higher".format(
-                common_version, min_common_version))
+            raise EngineException(
+                "Not compatible osm/common version '{}'. Needed '{}' or higher".format(
+                    common_version, min_common_version
+                )
+            )
 
         try:
             if not self.db:
@@ -109,8 +133,11 @@ class Engine(object):
                     self.db = dbmemory.DbMemory()
                     self.db.db_connect(config["database"])
                 else:
-                    raise EngineException("Invalid configuration param '{}' at '[database]':'driver'".format(
-                        config["database"]["driver"]))
+                    raise EngineException(
+                        "Invalid configuration param '{}' at '[database]':'driver'".format(
+                            config["database"]["driver"]
+                        )
+                    )
             if not self.fs:
                 if config["storage"]["driver"] == "local":
                     self.fs = fslocal.FsLocal()
@@ -119,8 +146,11 @@ class Engine(object):
                     self.fs = fsmongo.FsMongo()
                     self.fs.fs_connect(config["storage"])
                 else:
-                    raise EngineException("Invalid configuration param '{}' at '[storage]':'driver'".format(
-                        config["storage"]["driver"]))
+                    raise EngineException(
+                        "Invalid configuration param '{}' at '[storage]':'driver'".format(
+                            config["storage"]["driver"]
+                        )
+                    )
             if not self.msg:
                 if config["message"]["driver"] == "local":
                     self.msg = msglocal.MsgLocal()
@@ -129,18 +159,30 @@ class Engine(object):
                     self.msg = msgkafka.MsgKafka()
                     self.msg.connect(config["message"])
                 else:
-                    raise EngineException("Invalid configuration param '{}' at '[message]':'driver'".format(
-                        config["message"]["driver"]))
+                    raise EngineException(
+                        "Invalid configuration param '{}' at '[message]':'driver'".format(
+                            config["message"]["driver"]
+                        )
+                    )
             if not self.authconn:
                 if config["authentication"]["backend"] == "keystone":
-                    self.authconn = AuthconnKeystone(config["authentication"], self.db,
-                                                     self.authenticator.role_permissions)
+                    self.authconn = AuthconnKeystone(
+                        config["authentication"],
+                        self.db,
+                        self.authenticator.role_permissions,
+                    )
                 elif config["authentication"]["backend"] == "tacacs":
-                    self.authconn = AuthconnTacacs(config["authentication"], self.db,
-                                                   self.authenticator.role_permissions)
+                    self.authconn = AuthconnTacacs(
+                        config["authentication"],
+                        self.db,
+                        self.authenticator.role_permissions,
+                    )
                 else:
-                    self.authconn = AuthconnInternal(config["authentication"], self.db,
-                                                     self.authenticator.role_permissions)
+                    self.authconn = AuthconnInternal(
+                        config["authentication"],
+                        self.db,
+                        self.authenticator.role_permissions,
+                    )
             # if not self.operations:
             #     if "resources_to_operations" in config["rbac"]:
             #         resources_to_operations_file = config["rbac"]["resources_to_operations"]
@@ -171,10 +213,15 @@ class Engine(object):
             for topic, topic_class in self.map_from_topic_to_class.items():
                 # if self.auth and topic_class in (UserTopicAuth, ProjectTopicAuth):
                 #     self.map_topic[topic] = topic_class(self.db, self.fs, self.msg, self.auth)
-                self.map_topic[topic] = topic_class(self.db, self.fs, self.msg, self.authconn)
-            
-            self.map_topic["pm_jobs"] = PmJobsTopic(self.db, config["prometheus"].get("host"),
-                                                    config["prometheus"].get("port"))
+                self.map_topic[topic] = topic_class(
+                    self.db, self.fs, self.msg, self.authconn
+                )
+
+            self.map_topic["pm_jobs"] = PmJobsTopic(
+                self.db,
+                config["prometheus"].get("host"),
+                config["prometheus"].get("port"),
+            )
         except (DbException, FsException, MsgException) as e:
             raise EngineException(str(e), http_code=e.http_code)
 
@@ -190,7 +237,9 @@ class Engine(object):
         except (DbException, FsException, MsgException) as e:
             raise EngineException(str(e), http_code=e.http_code)
 
-    def new_item(self, rollback, session, topic, indata=None, kwargs=None, headers=None):
+    def new_item(
+        self, rollback, session, topic, indata=None, kwargs=None, headers=None
+    ):
         """
         Creates a new entry into database. For nsds and vnfds it creates an almost empty DISABLED  entry,
         that must be completed with a call to method upload_content
@@ -203,7 +252,9 @@ class Engine(object):
         :return: _id: identity of the inserted data.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         with self.write_lock:
             return self.map_topic[topic].new(rollback, session, indata, kwargs, headers)
 
@@ -219,9 +270,13 @@ class Engine(object):
         :return: _id: identity of the inserted data.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         with self.write_lock:
-            return self.map_topic[topic].upload_content(session, _id, indata, kwargs, headers)
+            return self.map_topic[topic].upload_content(
+                session, _id, indata, kwargs, headers
+            )
 
     def get_item_list(self, session, topic, filter_q=None, api_req=False):
         """
@@ -233,7 +288,9 @@ class Engine(object):
         :return: The list, it can be empty if no one match the filter_q.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].list(session, filter_q, api_req)
 
     def get_item(self, session, topic, _id, api_req=False):
@@ -246,7 +303,9 @@ class Engine(object):
         :return: dictionary, raise exception if not found.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].show(session, _id, api_req)
 
     def get_file(self, session, topic, _id, path=None, accept_header=None):
@@ -260,7 +319,9 @@ class Engine(object):
         :return: opened file plus Accept format or raises an exception
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         return self.map_topic[topic].get_file(session, _id, path, accept_header)
 
     def del_item_list(self, session, topic, _filter=None):
@@ -272,7 +333,9 @@ class Engine(object):
         :return: The deleted list, it can be empty if no one match the _filter.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         with self.write_lock:
             return self.map_topic[topic].delete_list(session, _filter)
 
@@ -288,7 +351,9 @@ class Engine(object):
         :return: dictionary with deleted item _id. It raises exception if not found.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         with self.write_lock:
             return self.map_topic[topic].delete(session, _id, not_send_msg=not_send_msg)
 
@@ -303,38 +368,47 @@ class Engine(object):
         :return: dictionary with edited item _id, raise exception if not found.
         """
         if topic not in self.map_topic:
-            raise EngineException("Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Unknown topic {}!!!".format(topic), HTTPStatus.INTERNAL_SERVER_ERROR
+            )
         with self.write_lock:
             return self.map_topic[topic].edit(session, _id, indata, kwargs)
 
     def upgrade_db(self, current_version, target_version):
         if target_version not in self.map_target_version_to_int.keys():
-            raise EngineException("Cannot upgrade to version '{}' with this version of code".format(target_version),
-                                  http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+            raise EngineException(
+                "Cannot upgrade to version '{}' with this version of code".format(
+                    target_version
+                ),
+                http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+            )
 
         if current_version == target_version:
             return
-        
+
         target_version_int = self.map_target_version_to_int[target_version]
 
         if not current_version:
             # create database version
             serial = urandom(32)
             version_data = {
-                "_id": "version",               # Always "version"
-                "version_int": 1000,            # version number
-                "version": "1.0",               # version text
-                "date": "2018-10-25",           # version date
+                "_id": "version",  # Always "version"
+                "version_int": 1000,  # version number
+                "version": "1.0",  # version text
+                "date": "2018-10-25",  # version date
                 "description": "added serial",  # changes in this version
-                'status': "ENABLED",            # ENABLED, DISABLED (migration in process), ERROR,
-                'serial': b64encode(serial)
+                "status": "ENABLED",  # ENABLED, DISABLED (migration in process), ERROR,
+                "serial": b64encode(serial),
             }
             self.db.create("admin", version_data)
             self.db.set_secret_key(serial)
             current_version = "1.0"
-            
-        if current_version in ("1.0", "1.1") and target_version_int >= self.map_target_version_to_int["1.2"]:
-            if self.config['authentication']['backend'] == "internal":
+
+        if (
+            current_version in ("1.0", "1.1")
+            and target_version_int >= self.map_target_version_to_int["1.2"]
+        ):
+            if self.config["authentication"]["backend"] == "internal":
                 self.db.del_list("roles")
 
             version_data = {
@@ -342,14 +416,14 @@ class Engine(object):
                 "version_int": 1002,
                 "version": "1.2",
                 "date": "2019-06-11",
-                "description": "set new format for roles_operations"
+                "description": "set new format for roles_operations",
             }
 
             self.db.set_one("admin", {"_id": "version"}, version_data)
             current_version = "1.2"
             # TODO add future migrations here
 
-    def init_db(self, target_version='1.0'):
+    def init_db(self, target_version="1.0"):
         """
         Init database if empty. If not empty it checks that database version and migrates if needed
         If empty, it creates a new user admin/admin at 'users' and a new entry at 'version'
@@ -357,11 +431,15 @@ class Engine(object):
         :return: None if ok, exception if error or if the version is different.
         """
 
-        version_data = self.db.get_one("admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True)
+        version_data = self.db.get_one(
+            "admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True
+        )
         # check database status is ok
-        if version_data and version_data.get("status") != 'ENABLED':
-            raise EngineException("Wrong database status '{}'".format(
-                version_data["status"]), HTTPStatus.INTERNAL_SERVER_ERROR)
+        if version_data and version_data.get("status") != "ENABLED":
+            raise EngineException(
+                "Wrong database status '{}'".format(version_data["status"]),
+                HTTPStatus.INTERNAL_SERVER_ERROR,
+            )
 
         # check version
         db_version = None if not version_data else version_data.get("version")
index 316e15b..2d5a929 100644 (file)
@@ -132,8 +132,12 @@ html_nsilcmop_body = """
 </form>
 """
 
-html_vnfpackage_body = """<a href="/osm/vnfpkgm/v1/vnf_packages/{id}/artifacts">Artifacts </a>"""
-html_nspackage_body = """<a href="/osm/nsd/v1/ns_descriptors/{id}/artifacts">Artifacts </a>"""
+html_vnfpackage_body = (
+    """<a href="/osm/vnfpkgm/v1/vnf_packages/{id}/artifacts">Artifacts </a>"""
+)
+html_nspackage_body = (
+    """<a href="/osm/nsd/v1/ns_descriptors/{id}/artifacts">Artifacts </a>"""
+)
 
 
 def format(data, request, response, toke_info):
@@ -144,20 +148,35 @@ def format(data, request, response, toke_info):
     :param response: cherrypy response
     :return: string with teh html response
     """
-    response.headers["Content-Type"] = 'text/html'
+    response.headers["Content-Type"] = "text/html"
     if response.status == HTTPStatus.UNAUTHORIZED.value:
-        if response.headers.get("WWW-Authenticate") and request.config.get("auth.allow_basic_authentication"):
-            response.headers["WWW-Authenticate"] = "Basic" + response.headers["WWW-Authenticate"][6:]
+        if response.headers.get("WWW-Authenticate") and request.config.get(
+            "auth.allow_basic_authentication"
+        ):
+            response.headers["WWW-Authenticate"] = (
+                "Basic" + response.headers["WWW-Authenticate"][6:]
+            )
             return
         else:
             return html_auth2.format(error=data)
     if request.path_info in ("/version", "/system"):
-        return "<pre>" + yaml.safe_dump(data, explicit_start=False, indent=4, default_flow_style=False) + "</pre>"
+        return (
+            "<pre>"
+            + yaml.safe_dump(
+                data, explicit_start=False, indent=4, default_flow_style=False
+            )
+            + "</pre>"
+        )
     body = html_body.format(item=html_escape(request.path_info))
     if response.status and response.status > 202:
         # input request.path_info (URL) can contain XSS that are translated into output error detail
-        body += html_body_error.format(html_escape(
-            yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False)))
+        body += html_body_error.format(
+            html_escape(
+                yaml.safe_dump(
+                    data, explicit_start=True, indent=4, default_flow_style=False
+                )
+            )
+        )
     elif isinstance(data, (list, tuple)):
         if request.path_info == "/vnfpkgm/v1/vnf_packages":
             body += html_upload_body.format(request.path_info + "_content", "VNFD")
@@ -170,29 +189,42 @@ def format(data, request, response, toke_info):
                 data_id = k.pop("_id", None)
             elif isinstance(k, str):
                 data_id = k
-            body += '<p> <a href="/osm/{url}/{id}">{id}</a>: {t} </p>'.format(url=request.path_info, id=data_id,
-                                                                              t=html_escape(str(k)))
+            body += '<p> <a href="/osm/{url}/{id}">{id}</a>: {t} </p>'.format(
+                url=request.path_info, id=data_id, t=html_escape(str(k))
+            )
     elif isinstance(data, dict):
         if "Location" in response.headers:
             body += '<a href="{}"> show </a>'.format(response.headers["Location"])
         else:
-            _id = request.path_info[request.path_info.rfind("/")+1:]
-            body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'\
-                .format(request.path_info)
-            if request.path_info.startswith("/nslcm/v1/ns_instances_content/") or \
-                    request.path_info.startswith("/nslcm/v1/ns_instances/"):
+            _id = request.path_info[request.path_info.rfind("/") + 1 :]
+            body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'.format(
+                request.path_info
+            )
+            if request.path_info.startswith(
+                "/nslcm/v1/ns_instances_content/"
+            ) or request.path_info.startswith("/nslcm/v1/ns_instances/"):
                 body += html_nslcmop_body.format(id=_id)
-            elif request.path_info.startswith("/nsilcm/v1/netslice_instances_content/") or \
-                    request.path_info.startswith("/nsilcm/v1/netslice_instances/"):
+            elif request.path_info.startswith(
+                "/nsilcm/v1/netslice_instances_content/"
+            ) or request.path_info.startswith("/nsilcm/v1/netslice_instances/"):
                 body += html_nsilcmop_body.format(id=_id)
-            elif request.path_info.startswith("/vnfpkgm/v1/vnf_packages/") or \
-                    request.path_info.startswith("/vnfpkgm/v1/vnf_packages_content/"):
+            elif request.path_info.startswith(
+                "/vnfpkgm/v1/vnf_packages/"
+            ) or request.path_info.startswith("/vnfpkgm/v1/vnf_packages_content/"):
                 body += html_vnfpackage_body.format(id=_id)
-            elif request.path_info.startswith("/nsd/v1/ns_descriptors/") or \
-                    request.path_info.startswith("/nsd/v1/ns_descriptors_content/"):
+            elif request.path_info.startswith(
+                "/nsd/v1/ns_descriptors/"
+            ) or request.path_info.startswith("/nsd/v1/ns_descriptors_content/"):
                 body += html_nspackage_body.format(id=_id)
-        body += "<pre>" + html_escape(yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False)) + \
-                "</pre>"
+        body += (
+            "<pre>"
+            + html_escape(
+                yaml.safe_dump(
+                    data, explicit_start=True, indent=4, default_flow_style=False
+                )
+            )
+            + "</pre>"
+        )
     elif data is None:
         if request.method == "DELETE" or "METHOD=DELETE" in request.query_string:
             body += "<pre> deleted </pre>"
index 2264c69..87b186e 100644 (file)
@@ -18,15 +18,30 @@ from uuid import uuid4
 from http import HTTPStatus
 from time import time
 from copy import copy, deepcopy
-from osm_nbi.validation import validate_input, ValidationError, ns_instantiate, ns_terminate, ns_action, ns_scale,\
-    nsi_instantiate
-from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable, deep_get, increment_ip_mac
+from osm_nbi.validation import (
+    validate_input,
+    ValidationError,
+    ns_instantiate,
+    ns_terminate,
+    ns_action,
+    ns_scale,
+    nsi_instantiate,
+)
+from osm_nbi.base_topic import (
+    BaseTopic,
+    EngineException,
+    get_iterable,
+    deep_get,
+    increment_ip_mac,
+)
 from yaml import safe_dump
 from osm_common.dbbase import DbException
 from osm_common.msgbase import MsgException
 from osm_common.fsbase import FsException
 from osm_nbi import utils
-from re import match  # For checking that additional parameter names are valid Jinja2 identifiers
+from re import (
+    match,
+)  # For checking that additional parameter names are valid Jinja2 identifiers
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
@@ -51,8 +66,12 @@ class NsrTopic(BaseTopic):
             return
         nsd_id = descriptor["nsdId"]
         if not self.get_item_list(session, "nsds", {"id": nsd_id}):
-            raise EngineException("Descriptor error at nsdId='{}' references a non exist nsd".format(nsd_id),
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "Descriptor error at nsdId='{}' references a non exist nsd".format(
+                    nsd_id
+                ),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
@@ -72,9 +91,11 @@ class NsrTopic(BaseTopic):
             return
         nsr = db_content
         if nsr["_admin"].get("nsState") == "INSTANTIATED":
-            raise EngineException("nsr '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
-                                  "Launch 'terminate' operation first; or force deletion".format(_id),
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "nsr '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
+                "Launch 'terminate' operation first; or force deletion".format(_id),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def delete_extra(self, session, _id, db_content, not_send_msg=None):
         """
@@ -91,28 +112,42 @@ class NsrTopic(BaseTopic):
         self.db.del_list("vnfrs", {"nsr-id-ref": _id})
 
         # set all used pdus as free
-        self.db.set_list("pdus", {"_admin.usage.nsr_id": _id},
-                         {"_admin.usageState": "NOT_IN_USE", "_admin.usage": None})
+        self.db.set_list(
+            "pdus",
+            {"_admin.usage.nsr_id": _id},
+            {"_admin.usageState": "NOT_IN_USE", "_admin.usage": None},
+        )
 
         # Set NSD usageState
         nsr = db_content
         used_nsd_id = nsr.get("nsd-id")
         if used_nsd_id:
             # check if used by another NSR
-            nsrs_list = self.db.get_one("nsrs", {"nsd-id": used_nsd_id},
-                                        fail_on_empty=False, fail_on_more=False)
+            nsrs_list = self.db.get_one(
+                "nsrs", {"nsd-id": used_nsd_id}, fail_on_empty=False, fail_on_more=False
+            )
             if not nsrs_list:
-                self.db.set_one("nsds", {"_id": used_nsd_id}, {"_admin.usageState": "NOT_IN_USE"})
+                self.db.set_one(
+                    "nsds", {"_id": used_nsd_id}, {"_admin.usageState": "NOT_IN_USE"}
+                )
 
         # Set VNFD usageState
         used_vnfd_id_list = nsr.get("vnfd-id")
         if used_vnfd_id_list:
             for used_vnfd_id in used_vnfd_id_list:
                 # check if used by another NSR
-                nsrs_list = self.db.get_one("nsrs", {"vnfd-id": used_vnfd_id},
-                                            fail_on_empty=False, fail_on_more=False)
+                nsrs_list = self.db.get_one(
+                    "nsrs",
+                    {"vnfd-id": used_vnfd_id},
+                    fail_on_empty=False,
+                    fail_on_more=False,
+                )
                 if not nsrs_list:
-                    self.db.set_one("vnfds", {"_id": used_vnfd_id}, {"_admin.usageState": "NOT_IN_USE"})
+                    self.db.set_one(
+                        "vnfds",
+                        {"_id": used_vnfd_id},
+                        {"_admin.usageState": "NOT_IN_USE"},
+                    )
 
         # delete extra ro_nsrs used for internal RO module
         self.db.del_one("ro_nsrs", q_filter={"_id": _id}, fail_on_empty=False)
@@ -125,7 +160,9 @@ class NsrTopic(BaseTopic):
         return formated_request
 
     @staticmethod
-    def _format_additional_params(ns_request, member_vnf_index=None, vdu_id=None, kdu_name=None, descriptor=None):
+    def _format_additional_params(
+        ns_request, member_vnf_index=None, vdu_id=None, kdu_name=None, descriptor=None
+    ):
         """
         Get and format user additional params for NS or VNF
         :param ns_request: User instantiation additional parameters
@@ -139,15 +176,30 @@ class NsrTopic(BaseTopic):
             additional_params = copy(ns_request.get("additionalParamsForNs"))
             where_ = "additionalParamsForNs"
         elif ns_request.get("additionalParamsForVnf"):
-            where_ = "additionalParamsForVnf[member-vnf-index={}]".format(member_vnf_index)
-            item = next((x for x in ns_request["additionalParamsForVnf"] if x["member-vnf-index"] == member_vnf_index),
-                        None)
+            where_ = "additionalParamsForVnf[member-vnf-index={}]".format(
+                member_vnf_index
+            )
+            item = next(
+                (
+                    x
+                    for x in ns_request["additionalParamsForVnf"]
+                    if x["member-vnf-index"] == member_vnf_index
+                ),
+                None,
+            )
             if item:
                 if not vdu_id and not kdu_name:
                     other_params = item
                 additional_params = copy(item.get("additionalParams")) or {}
                 if vdu_id and item.get("additionalParamsForVdu"):
-                    item_vdu = next((x for x in item["additionalParamsForVdu"] if x["vdu_id"] == vdu_id), None)
+                    item_vdu = next(
+                        (
+                            x
+                            for x in item["additionalParamsForVdu"]
+                            if x["vdu_id"] == vdu_id
+                        ),
+                        None,
+                    )
                     other_params = item_vdu
                     if item_vdu and item_vdu.get("additionalParams"):
                         where_ += ".additionalParamsForVdu[vdu_id={}]".format(vdu_id)
@@ -155,24 +207,44 @@ class NsrTopic(BaseTopic):
                 if kdu_name:
                     additional_params = {}
                     if item.get("additionalParamsForKdu"):
-                        item_kdu = next((x for x in item["additionalParamsForKdu"] if x["kdu_name"] == kdu_name), None)
+                        item_kdu = next(
+                            (
+                                x
+                                for x in item["additionalParamsForKdu"]
+                                if x["kdu_name"] == kdu_name
+                            ),
+                            None,
+                        )
                         other_params = item_kdu
                         if item_kdu and item_kdu.get("additionalParams"):
-                            where_ += ".additionalParamsForKdu[kdu_name={}]".format(kdu_name)
+                            where_ += ".additionalParamsForKdu[kdu_name={}]".format(
+                                kdu_name
+                            )
                             additional_params = item_kdu["additionalParams"]
 
         if additional_params:
             for k, v in additional_params.items():
                 # BEGIN Check that additional parameter names are valid Jinja2 identifiers if target is not Kdu
-                if not kdu_name and not match('^[a-zA-Z_][a-zA-Z0-9_]*$', k):
-                    raise EngineException("Invalid param name at {}:{}. Must contain only alphanumeric characters "
-                                          "and underscores, and cannot start with a digit"
-                                          .format(where_, k))
+                if not kdu_name and not match("^[a-zA-Z_][a-zA-Z0-9_]*$", k):
+                    raise EngineException(
+                        "Invalid param name at {}:{}. Must contain only alphanumeric characters "
+                        "and underscores, and cannot start with a digit".format(
+                            where_, k
+                        )
+                    )
                 # END Check that additional parameter names are valid Jinja2 identifiers
                 if not isinstance(k, str):
-                    raise EngineException("Invalid param at {}:{}. Only string keys are allowed".format(where_, k))
+                    raise EngineException(
+                        "Invalid param at {}:{}. Only string keys are allowed".format(
+                            where_, k
+                        )
+                    )
                 if "." in k or "$" in k:
-                    raise EngineException("Invalid param at {}:{}. Keys must not contain dots or $".format(where_, k))
+                    raise EngineException(
+                        "Invalid param at {}:{}. Keys must not contain dots or $".format(
+                            where_, k
+                        )
+                    )
                 if isinstance(v, (dict, tuple, list)):
                     additional_params[k] = "!!yaml " + safe_dump(v)
 
@@ -182,24 +254,46 @@ class NsrTopic(BaseTopic):
                 # TODO: check for cloud-init
                 if member_vnf_index:
                     initial_primitives = []
-                    if "lcm-operations-configuration" in df \
-                       and "operate-vnf-op-config" in df["lcm-operations-configuration"]:
-                        for config in df["lcm-operations-configuration"]["operate-vnf-op-config"].get("day1-2", []):
-                            for primitive in get_iterable(config.get("initial-config-primitive")):
+                    if (
+                        "lcm-operations-configuration" in df
+                        and "operate-vnf-op-config"
+                        in df["lcm-operations-configuration"]
+                    ):
+                        for config in df["lcm-operations-configuration"][
+                            "operate-vnf-op-config"
+                        ].get("day1-2", []):
+                            for primitive in get_iterable(
+                                config.get("initial-config-primitive")
+                            ):
                                 initial_primitives.append(primitive)
                 else:
-                    initial_primitives = deep_get(descriptor, ("ns-configuration", "initial-config-primitive"))
+                    initial_primitives = deep_get(
+                        descriptor, ("ns-configuration", "initial-config-primitive")
+                    )
 
                 for initial_primitive in get_iterable(initial_primitives):
                     for param in get_iterable(initial_primitive.get("parameter")):
-                        if param["value"].startswith("<") and param["value"].endswith(">"):
-                            if param["value"] in ("<rw_mgmt_ip>", "<VDU_SCALE_INFO>", "<ns_config_info>"):
+                        if param["value"].startswith("<") and param["value"].endswith(
+                            ">"
+                        ):
+                            if param["value"] in (
+                                "<rw_mgmt_ip>",
+                                "<VDU_SCALE_INFO>",
+                                "<ns_config_info>",
+                            ):
                                 continue
-                            if not additional_params or param["value"][1:-1] not in additional_params:
-                                raise EngineException("Parameter '{}' needed for vnfd[id={}]:day1-2 configuration:"
-                                                      "initial-config-primitive[name={}] not supplied".
-                                                      format(param["value"], descriptor["id"],
-                                                             initial_primitive["name"]))
+                            if (
+                                not additional_params
+                                or param["value"][1:-1] not in additional_params
+                            ):
+                                raise EngineException(
+                                    "Parameter '{}' needed for vnfd[id={}]:day1-2 configuration:"
+                                    "initial-config-primitive[name={}] not supplied".format(
+                                        param["value"],
+                                        descriptor["id"],
+                                        initial_primitive["name"],
+                                    )
+                                )
 
         return additional_params or None, other_params or None
 
@@ -233,7 +327,9 @@ class NsrTopic(BaseTopic):
 
             step = "filling nsr from input data"
             nsr_id = str(uuid4())
-            nsr_descriptor = self._create_nsr_descriptor_from_nsd(nsd, ns_request, nsr_id, session)
+            nsr_descriptor = self._create_nsr_descriptor_from_nsd(
+                nsd, ns_request, nsr_id, session
+            )
 
             # Create VNFRs
             needed_vnfds = {}
@@ -242,7 +338,11 @@ class NsrTopic(BaseTopic):
             for vnfp in vnf_profiles:
                 vnfd_id = vnfp.get("vnfd-id")
                 vnf_index = vnfp.get("id")
-                step = "getting vnfd id='{}' constituent-vnfd='{}' from database".format(vnfd_id, vnf_index)
+                step = (
+                    "getting vnfd id='{}' constituent-vnfd='{}' from database".format(
+                        vnfd_id, vnf_index
+                    )
+                )
                 if vnfd_id not in needed_vnfds:
                     vnfd = self._get_vnfd_from_db(vnfd_id, session)
                     needed_vnfds[vnfd_id] = vnfd
@@ -250,11 +350,22 @@ class NsrTopic(BaseTopic):
                 else:
                     vnfd = needed_vnfds[vnfd_id]
 
-                step = "filling vnfr  vnfd-id='{}' constituent-vnfd='{}'".format(vnfd_id, vnf_index)
-                vnfr_descriptor = self._create_vnfr_descriptor_from_vnfd(nsd, vnfd, vnfd_id, vnf_index, nsr_descriptor,
-                                                                         ns_request, ns_k8s_namespace)
+                step = "filling vnfr  vnfd-id='{}' constituent-vnfd='{}'".format(
+                    vnfd_id, vnf_index
+                )
+                vnfr_descriptor = self._create_vnfr_descriptor_from_vnfd(
+                    nsd,
+                    vnfd,
+                    vnfd_id,
+                    vnf_index,
+                    nsr_descriptor,
+                    ns_request,
+                    ns_k8s_namespace,
+                )
 
-                step = "creating vnfr vnfd-id='{}' constituent-vnfd='{}' at database".format(vnfd_id, vnf_index)
+                step = "creating vnfr vnfd-id='{}' constituent-vnfd='{}' at database".format(
+                    vnfd_id, vnf_index
+                )
                 self._add_vnfr_to_db(vnfr_descriptor, rollback, session)
                 nsr_descriptor["constituent-vnfr-ref"].append(vnfr_descriptor["id"])
 
@@ -265,7 +376,13 @@ class NsrTopic(BaseTopic):
             self.fs.mkdir(nsr_id)
 
             return nsr_id, None
-        except (ValidationError, EngineException, DbException, MsgException, FsException) as e:
+        except (
+            ValidationError,
+            EngineException,
+            DbException,
+            MsgException,
+            FsException,
+        ) as e:
             raise type(e)("{} while '{}'".format(e, step), http_code=e.http_code)
 
     def _get_nsd_from_db(self, nsd_id, session):
@@ -281,22 +398,31 @@ class NsrTopic(BaseTopic):
         return vnfd
 
     def _add_nsr_to_db(self, nsr_descriptor, rollback, session):
-        self.format_on_new(nsr_descriptor, session["project_id"], make_public=session["public"])
+        self.format_on_new(
+            nsr_descriptor, session["project_id"], make_public=session["public"]
+        )
         self.db.create("nsrs", nsr_descriptor)
         rollback.append({"topic": "nsrs", "_id": nsr_descriptor["id"]})
 
     def _add_vnfr_to_db(self, vnfr_descriptor, rollback, session):
-        self.format_on_new(vnfr_descriptor, session["project_id"], make_public=session["public"])
+        self.format_on_new(
+            vnfr_descriptor, session["project_id"], make_public=session["public"]
+        )
         self.db.create("vnfrs", vnfr_descriptor)
         rollback.append({"topic": "vnfrs", "_id": vnfr_descriptor["id"]})
 
     def _check_nsd_operational_state(self, nsd, ns_request):
         if nsd["_admin"]["operationalState"] == "DISABLED":
-            raise EngineException("nsd with id '{}' is DISABLED, and thus cannot be used to create "
-                                  "a network service".format(ns_request["nsdId"]), http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "nsd with id '{}' is DISABLED, and thus cannot be used to create "
+                "a network service".format(ns_request["nsdId"]),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def _get_ns_k8s_namespace(self, nsd, ns_request, session):
-        additional_params, _ = self._format_additional_params(ns_request, descriptor=nsd)
+        additional_params, _ = self._format_additional_params(
+            ns_request, descriptor=nsd
+        )
         # use for k8s-namespace from ns_request or additionalParamsForNs. By default, the project_id
         ns_k8s_namespace = session["project_id"][0] if session["project_id"] else None
         if ns_request and ns_request.get("k8s-namespace"):
@@ -308,7 +434,9 @@ class NsrTopic(BaseTopic):
 
     def _create_nsr_descriptor_from_nsd(self, nsd, ns_request, nsr_id, session):
         now = time()
-        additional_params, _ = self._format_additional_params(ns_request, descriptor=nsd)
+        additional_params, _ = self._format_additional_params(
+            ns_request, descriptor=nsd
+        )
 
         nsr_descriptor = {
             "name": ns_request["nsName"],
@@ -361,11 +489,19 @@ class NsrTopic(BaseTopic):
             for vnf_profile in vnf_profiles:
                 for vlc in vnf_profile.get("virtual-link-connectivity", ()):
                     for cpd in vlc.get("constituent-cpd-id", ()):
-                        all_vld_connection_point_data[vlc.get("virtual-link-profile-id")].append({
-                            "member-vnf-index-ref": cpd.get("constituent-base-element-id"),
-                            "vnfd-connection-point-ref": cpd.get("constituent-cpd-id"),
-                            "vnfd-id-ref": vnf_profile.get("vnfd-id")
-                        })
+                        all_vld_connection_point_data[
+                            vlc.get("virtual-link-profile-id")
+                        ].append(
+                            {
+                                "member-vnf-index-ref": cpd.get(
+                                    "constituent-base-element-id"
+                                ),
+                                "vnfd-connection-point-ref": cpd.get(
+                                    "constituent-cpd-id"
+                                ),
+                                "vnfd-id-ref": vnf_profile.get("vnfd-id"),
+                            }
+                        )
 
                 vnfd = self._get_vnfd_from_db(vnf_profile.get("vnfd-id"), session)
 
@@ -382,30 +518,59 @@ class NsrTopic(BaseTopic):
                         if vsd.get("id") == vdu.get("virtual-storage-desc", [[]])[0]:
                             vdu_virtual_storage = vsd
                     # Get this vdu vcpus, memory and storage info for flavor_data
-                    if vdu_virtual_compute.get("virtual-cpu", {}).get("num-virtual-cpu"):
-                        flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"]["num-virtual-cpu"]
+                    if vdu_virtual_compute.get("virtual-cpu", {}).get(
+                        "num-virtual-cpu"
+                    ):
+                        flavor_data["vcpu-count"] = vdu_virtual_compute["virtual-cpu"][
+                            "num-virtual-cpu"
+                        ]
                     if vdu_virtual_compute.get("virtual-memory", {}).get("size"):
-                        flavor_data["memory-mb"] = float(vdu_virtual_compute["virtual-memory"]["size"]) * 1024.0
+                        flavor_data["memory-mb"] = (
+                            float(vdu_virtual_compute["virtual-memory"]["size"])
+                            * 1024.0
+                        )
                     if vdu_virtual_storage.get("size-of-storage"):
-                        flavor_data["storage-gb"] = vdu_virtual_storage["size-of-storage"]
+                        flavor_data["storage-gb"] = vdu_virtual_storage[
+                            "size-of-storage"
+                        ]
                     # Get this vdu EPA info for guest_epa
                     if vdu_virtual_compute.get("virtual-cpu", {}).get("cpu-quota"):
-                        guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"]["cpu-quota"]
+                        guest_epa["cpu-quota"] = vdu_virtual_compute["virtual-cpu"][
+                            "cpu-quota"
+                        ]
                     if vdu_virtual_compute.get("virtual-cpu", {}).get("pinning"):
                         vcpu_pinning = vdu_virtual_compute["virtual-cpu"]["pinning"]
                         if vcpu_pinning.get("thread-policy"):
-                            guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning["thread-policy"]
+                            guest_epa["cpu-thread-pinning-policy"] = vcpu_pinning[
+                                "thread-policy"
+                            ]
                         if vcpu_pinning.get("policy"):
-                            cpu_policy = "SHARED" if vcpu_pinning["policy"] == "dynamic" else "DEDICATED"
+                            cpu_policy = (
+                                "SHARED"
+                                if vcpu_pinning["policy"] == "dynamic"
+                                else "DEDICATED"
+                            )
                             guest_epa["cpu-pinning-policy"] = cpu_policy
                     if vdu_virtual_compute.get("virtual-memory", {}).get("mem-quota"):
-                        guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"]["mem-quota"]
-                    if vdu_virtual_compute.get("virtual-memory", {}).get("mempage-size"):
-                        guest_epa["mempage-size"] = vdu_virtual_compute["virtual-memory"]["mempage-size"]
-                    if vdu_virtual_compute.get("virtual-memory", {}).get("numa-node-policy"):
-                        guest_epa["numa-node-policy"] = vdu_virtual_compute["virtual-memory"]["numa-node-policy"]
+                        guest_epa["mem-quota"] = vdu_virtual_compute["virtual-memory"][
+                            "mem-quota"
+                        ]
+                    if vdu_virtual_compute.get("virtual-memory", {}).get(
+                        "mempage-size"
+                    ):
+                        guest_epa["mempage-size"] = vdu_virtual_compute[
+                            "virtual-memory"
+                        ]["mempage-size"]
+                    if vdu_virtual_compute.get("virtual-memory", {}).get(
+                        "numa-node-policy"
+                    ):
+                        guest_epa["numa-node-policy"] = vdu_virtual_compute[
+                            "virtual-memory"
+                        ]["numa-node-policy"]
                     if vdu_virtual_storage.get("disk-io-quota"):
-                        guest_epa["disk-io-quota"] = vdu_virtual_storage["disk-io-quota"]
+                        guest_epa["disk-io-quota"] = vdu_virtual_storage[
+                            "disk-io-quota"
+                        ]
 
                     if guest_epa:
                         flavor_data["guest-epa"] = guest_epa
@@ -425,15 +590,18 @@ class NsrTopic(BaseTopic):
                         self._add_image_to_nsr(nsr_descriptor, image_data)
 
             for vld in nsr_vld:
-                vld["vnfd-connection-point-ref"] = all_vld_connection_point_data.get(vld.get("id"), [])
+                vld["vnfd-connection-point-ref"] = all_vld_connection_point_data.get(
+                    vld.get("id"), []
+                )
                 vld["name"] = vld["id"]
             nsr_descriptor["vld"] = nsr_vld
 
         return nsr_descriptor
 
     def _get_image_data_from_vnfd(self, vnfd, sw_image_id):
-        sw_image_desc = utils.find_in_list(vnfd.get("sw-image-desc", ()),
-                                           lambda sw: sw["id"] == sw_image_id)
+        sw_image_desc = utils.find_in_list(
+            vnfd.get("sw-image-desc", ()), lambda sw: sw["id"] == sw_image_id
+        )
         image_data = {}
         if sw_image_desc.get("image"):
             image_data["image"] = sw_image_desc["image"]
@@ -447,18 +615,34 @@ class NsrTopic(BaseTopic):
         """
         Adds image to nsr checking first it is not already added
         """
-        img = next((f for f in nsr_descriptor["image"] if
-                    all(f.get(k) == image_data[k] for k in image_data)), None)
+        img = next(
+            (
+                f
+                for f in nsr_descriptor["image"]
+                if all(f.get(k) == image_data[k] for k in image_data)
+            ),
+            None,
+        )
         if not img:
             image_data["id"] = str(len(nsr_descriptor["image"]))
             nsr_descriptor["image"].append(image_data)
 
-    def _create_vnfr_descriptor_from_vnfd(self, nsd, vnfd, vnfd_id, vnf_index, nsr_descriptor,
-                                          ns_request, ns_k8s_namespace):
+    def _create_vnfr_descriptor_from_vnfd(
+        self,
+        nsd,
+        vnfd,
+        vnfd_id,
+        vnf_index,
+        nsr_descriptor,
+        ns_request,
+        ns_k8s_namespace,
+    ):
         vnfr_id = str(uuid4())
         nsr_id = nsr_descriptor["id"]
         now = time()
-        additional_params, vnf_params = self._format_additional_params(ns_request, vnf_index, descriptor=vnfd)
+        additional_params, vnf_params = self._format_additional_params(
+            ns_request, vnf_index, descriptor=vnfd
+        )
 
         vnfr_descriptor = {
             "id": vnfr_id,
@@ -507,17 +691,20 @@ class NsrTopic(BaseTopic):
             all_k8s_cluster_nets_cpds = {}
             for cpd in get_iterable(vnfd.get("ext-cpd")):
                 if cpd.get("k8s-cluster-net"):
-                    all_k8s_cluster_nets_cpds[cpd.get("k8s-cluster-net")] = cpd.get("id")
+                    all_k8s_cluster_nets_cpds[cpd.get("k8s-cluster-net")] = cpd.get(
+                        "id"
+                    )
             for net in get_iterable(vnfr_descriptor["k8s-cluster"].get("nets")):
                 if net.get("id") in all_k8s_cluster_nets_cpds:
-                    net["external-connection-point-ref"] = all_k8s_cluster_nets_cpds[net.get("id")]
+                    net["external-connection-point-ref"] = all_k8s_cluster_nets_cpds[
+                        net.get("id")
+                    ]
 
         # update kdus
         for kdu in get_iterable(vnfd.get("kdu")):
-            additional_params, kdu_params = self._format_additional_params(ns_request,
-                                                                           vnf_index,
-                                                                           kdu_name=kdu["name"],
-                                                                           descriptor=vnfd)
+            additional_params, kdu_params = self._format_additional_params(
+                ns_request, vnf_index, kdu_name=kdu["name"], descriptor=vnfd
+            )
             kdu_k8s_namespace = vnf_k8s_namespace
             kdu_model = kdu_params.get("kdu_model") if kdu_params else None
             if kdu_params and kdu_params.get("k8s-namespace"):
@@ -547,15 +734,19 @@ class NsrTopic(BaseTopic):
         for vdu in vnfd.get("vdu", ()):
             vdu_mgmt_cp = []
             try:
-                configs = vnfd.get("df")[0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]
-                vdu_config = utils.find_in_list(configs, lambda config: config["id"] == vdu["id"])
+                configs = vnfd.get("df")[0]["lcm-operations-configuration"][
+                    "operate-vnf-op-config"
+                ]["day1-2"]
+                vdu_config = utils.find_in_list(
+                    configs, lambda config: config["id"] == vdu["id"]
+                )
             except Exception:
                 vdu_config = None
 
             try:
                 vdu_instantiation_level = utils.find_in_list(
                     vnfd.get("df")[0]["instantiation-level"][0]["vdu-level"],
-                    lambda a_vdu_profile: a_vdu_profile["vdu-id"] == vdu["id"]
+                    lambda a_vdu_profile: a_vdu_profile["vdu-id"] == vdu["id"],
                 )
             except Exception:
                 vdu_instantiation_level = None
@@ -563,13 +754,14 @@ class NsrTopic(BaseTopic):
             if vdu_config:
                 external_connection_ee = utils.filter_in_list(
                     vdu_config.get("execution-environment-list", []),
-                    lambda ee: "external-connection-point-ref" in ee
+                    lambda ee: "external-connection-point-ref" in ee,
                 )
                 for ee in external_connection_ee:
                     vdu_mgmt_cp.append(ee["external-connection-point-ref"])
 
             additional_params, vdu_params = self._format_additional_params(
-                ns_request, vnf_index, vdu_id=vdu["id"], descriptor=vnfd)
+                ns_request, vnf_index, vdu_id=vdu["id"], descriptor=vnfd
+            )
             vdur = {
                 "vdu-id-ref": vdu["id"],
                 # TODO      "name": ""     Name of the VDU in the VIM
@@ -578,12 +770,14 @@ class NsrTopic(BaseTopic):
                 "internal-connection-point": [],
                 "interfaces": [],
                 "additionalParams": additional_params,
-                "vdu-name": vdu["name"]
+                "vdu-name": vdu["name"],
             }
             if vdu_params and vdu_params.get("config-units"):
                 vdur["config-units"] = vdu_params["config-units"]
             if deep_get(vdu, ("supplemental-boot-data", "boot-data-drive")):
-                vdur["boot-data-drive"] = vdu["supplemental-boot-data"]["boot-data-drive"]
+                vdur["boot-data-drive"] = vdu["supplemental-boot-data"][
+                    "boot-data-drive"
+                ]
             if vdu.get("pdu-type"):
                 vdur["pdu-type"] = vdu["pdu-type"]
                 vdur["name"] = vdu["pdu-type"]
@@ -599,14 +793,20 @@ class NsrTopic(BaseTopic):
 
                 for iface in icp.get("virtual-network-interface-requirement", ()):
                     iface_fields = ("name", "mac-address")
-                    vdu_iface = {x: iface[x] for x in iface_fields if iface.get(x) is not None}
+                    vdu_iface = {
+                        x: iface[x] for x in iface_fields if iface.get(x) is not None
+                    }
 
                     vdu_iface["internal-connection-point-ref"] = vdu_icp["id"]
                     if "port-security-enabled" in icp:
-                        vdu_iface["port-security-enabled"] = icp["port-security-enabled"]
+                        vdu_iface["port-security-enabled"] = icp[
+                            "port-security-enabled"
+                        ]
 
                     if "port-security-disable-strategy" in icp:
-                        vdu_iface["port-security-disable-strategy"] = icp["port-security-disable-strategy"]
+                        vdu_iface["port-security-disable-strategy"] = icp[
+                            "port-security-disable-strategy"
+                        ]
 
                     for ext_cp in vnfd.get("ext-cpd", ()):
                         if not ext_cp.get("int-cpd"):
@@ -614,21 +814,27 @@ class NsrTopic(BaseTopic):
                         if ext_cp["int-cpd"].get("vdu-id") != vdu["id"]:
                             continue
                         if icp["id"] == ext_cp["int-cpd"].get("cpd"):
-                            vdu_iface["external-connection-point-ref"] = ext_cp.get("id")
+                            vdu_iface["external-connection-point-ref"] = ext_cp.get(
+                                "id"
+                            )
 
                             if "port-security-enabled" in ext_cp:
-                                vdu_iface["port-security-enabled"] = (
-                                    ext_cp["port-security-enabled"]
-                                )
+                                vdu_iface["port-security-enabled"] = ext_cp[
+                                    "port-security-enabled"
+                                ]
 
                             if "port-security-disable-strategy" in ext_cp:
-                                vdu_iface["port-security-disable-strategy"] = (
-                                    ext_cp["port-security-disable-strategy"]
-                                )
+                                vdu_iface["port-security-disable-strategy"] = ext_cp[
+                                    "port-security-disable-strategy"
+                                ]
 
                             break
 
-                    if vnfd_mgmt_cp and vdu_iface.get("external-connection-point-ref") == vnfd_mgmt_cp:
+                    if (
+                        vnfd_mgmt_cp
+                        and vdu_iface.get("external-connection-point-ref")
+                        == vnfd_mgmt_cp
+                    ):
                         vdu_iface["mgmt-vnf"] = True
                         vdu_iface["mgmt-interface"] = True
 
@@ -645,24 +851,40 @@ class NsrTopic(BaseTopic):
                         # TODO: Change for multiple df support
                         for df in get_iterable(nsd.get("df")):
                             for vnf_profile in get_iterable(df.get("vnf-profile")):
-                                for vlc_index, vlc in \
-                                        enumerate(get_iterable(vnf_profile.get("virtual-link-connectivity"))):
-                                    for cpd in get_iterable(vlc.get("constituent-cpd-id")):
-                                        if cpd.get("constituent-cpd-id") == iface_ext_cp:
-                                            vdu_iface["ns-vld-id"] = vlc.get("virtual-link-profile-id")
+                                for vlc_index, vlc in enumerate(
+                                    get_iterable(
+                                        vnf_profile.get("virtual-link-connectivity")
+                                    )
+                                ):
+                                    for cpd in get_iterable(
+                                        vlc.get("constituent-cpd-id")
+                                    ):
+                                        if (
+                                            cpd.get("constituent-cpd-id")
+                                            == iface_ext_cp
+                                        ):
+                                            vdu_iface["ns-vld-id"] = vlc.get(
+                                                "virtual-link-profile-id"
+                                            )
                                             # if iface type is SRIOV or PASSTHROUGH, set pci-interfaces flag to True
-                                            if vdu_iface.get("type") in ("SR-IOV", "PCI-PASSTHROUGH"):
-                                                nsr_descriptor["vld"][vlc_index]["pci-interfaces"] = True
+                                            if vdu_iface.get("type") in (
+                                                "SR-IOV",
+                                                "PCI-PASSTHROUGH",
+                                            ):
+                                                nsr_descriptor["vld"][vlc_index][
+                                                    "pci-interfaces"
+                                                ] = True
                                             break
                     elif vdu_iface.get("internal-connection-point-ref"):
                         vdu_iface["vnf-vld-id"] = icp.get("int-virtual-link-desc")
                         # TODO: store fixed IP address in the record (if it exists in the ICP)
                         # if iface type is SRIOV or PASSTHROUGH, set pci-interfaces flag to True
                         if vdu_iface.get("type") in ("SR-IOV", "PCI-PASSTHROUGH"):
-                            ivld_index = utils.find_index_in_list(vnfd.get("int-virtual-link-desc", ()),
-                                                                  lambda ivld:
-                                                                  ivld["id"] == icp.get("int-virtual-link-desc")
-                                                                  )
+                            ivld_index = utils.find_index_in_list(
+                                vnfd.get("int-virtual-link-desc", ()),
+                                lambda ivld: ivld["id"]
+                                == icp.get("int-virtual-link-desc"),
+                            )
                             vnfr_descriptor["vld"][ivld_index]["pci-interfaces"] = True
 
                     vdur["interfaces"].append(vdu_iface)
@@ -670,10 +892,11 @@ class NsrTopic(BaseTopic):
             if vdu.get("sw-image-desc"):
                 sw_image = utils.find_in_list(
                     vnfd.get("sw-image-desc", ()),
-                    lambda image: image["id"] == vdu.get("sw-image-desc"))
+                    lambda image: image["id"] == vdu.get("sw-image-desc"),
+                )
                 nsr_sw_image_data = utils.find_in_list(
                     nsr_descriptor["image"],
-                    lambda nsr_image: (nsr_image.get("image") == sw_image.get("image"))
+                    lambda nsr_image: (nsr_image.get("image") == sw_image.get("image")),
                 )
                 vdur["ns-image-id"] = nsr_sw_image_data["id"]
 
@@ -682,10 +905,13 @@ class NsrTopic(BaseTopic):
                 for alt_image_id in vdu.get("alternative-sw-image-desc", ()):
                     sw_image = utils.find_in_list(
                         vnfd.get("sw-image-desc", ()),
-                        lambda image: image["id"] == alt_image_id)
+                        lambda image: image["id"] == alt_image_id,
+                    )
                     nsr_sw_image_data = utils.find_in_list(
                         nsr_descriptor["image"],
-                        lambda nsr_image: (nsr_image.get("image") == sw_image.get("image"))
+                        lambda nsr_image: (
+                            nsr_image.get("image") == sw_image.get("image")
+                        ),
                     )
                     alt_image_ids.append(nsr_sw_image_data["id"])
                 vdur["alt-image-ids"] = alt_image_ids
@@ -693,7 +919,8 @@ class NsrTopic(BaseTopic):
             flavor_data_name = vdu["id"][:56] + "-flv"
             nsr_flavor_desc = utils.find_in_list(
                 nsr_descriptor["flavor"],
-                lambda flavor: flavor["name"] == flavor_data_name)
+                lambda flavor: flavor["name"] == flavor_data_name,
+            )
 
             if nsr_flavor_desc:
                 vdur["ns-flavor-id"] = nsr_flavor_desc["id"]
@@ -719,7 +946,9 @@ class NsrTopic(BaseTopic):
         return vnfr_descriptor
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
-        raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
 
 class VnfrTopic(BaseTopic):
@@ -730,20 +959,26 @@ class VnfrTopic(BaseTopic):
         BaseTopic.__init__(self, db, fs, msg, auth)
 
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
-        raise EngineException("Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
-        raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         # Not used because vnfrs are created and deleted by NsrTopic class directly
-        raise EngineException("Method new called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method new called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
 
 class NsLcmOpTopic(BaseTopic):
     topic = "nslcmops"
     topic_msg = "ns"
-    operation_schema = {    # mapping between operation and jsonschema to validate
+    operation_schema = {  # mapping between operation and jsonschema to validate
         "instantiate": ns_instantiate,
         "action": ns_action,
         "scale": ns_scale,
@@ -772,42 +1007,58 @@ class NsLcmOpTopic(BaseTopic):
         nsd = nsr["nsd"]
         # check vnf_member_index
         if indata.get("vnf_member_index"):
-            indata["member_vnf_index"] = indata.pop("vnf_member_index")  # for backward compatibility
+            indata["member_vnf_index"] = indata.pop(
+                "vnf_member_index"
+            )  # for backward compatibility
         if indata.get("member_vnf_index"):
-            vnfd = self._get_vnfd_from_vnf_member_index(indata["member_vnf_index"], nsr["_id"])
+            vnfd = self._get_vnfd_from_vnf_member_index(
+                indata["member_vnf_index"], nsr["_id"]
+            )
             try:
-                configs = vnfd.get("df")[0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]
+                configs = vnfd.get("df")[0]["lcm-operations-configuration"][
+                    "operate-vnf-op-config"
+                ]["day1-2"]
             except Exception:
                 configs = []
 
             if indata.get("vdu_id"):
                 self._check_valid_vdu(vnfd, indata["vdu_id"])
                 descriptor_configuration = utils.find_in_list(
-                    configs,
-                    lambda config: config["id"] == indata["vdu_id"]
+                    configs, lambda config: config["id"] == indata["vdu_id"]
                 )
             elif indata.get("kdu_name"):
                 self._check_valid_kdu(vnfd, indata["kdu_name"])
                 descriptor_configuration = utils.find_in_list(
-                    configs,
-                    lambda config: config["id"] == indata.get("kdu_name")
+                    configs, lambda config: config["id"] == indata.get("kdu_name")
                 )
             else:
                 descriptor_configuration = utils.find_in_list(
-                    configs,
-                    lambda config: config["id"] == vnfd["id"]
+                    configs, lambda config: config["id"] == vnfd["id"]
                 )
             if descriptor_configuration is not None:
-                descriptor_configuration = descriptor_configuration.get("config-primitive")
+                descriptor_configuration = descriptor_configuration.get(
+                    "config-primitive"
+                )
         else:  # use a NSD
-            descriptor_configuration = nsd.get("ns-configuration", {}).get("config-primitive")
+            descriptor_configuration = nsd.get("ns-configuration", {}).get(
+                "config-primitive"
+            )
 
         # For k8s allows default primitives without validating the parameters
-        if indata.get("kdu_name") and indata["primitive"] in ("upgrade", "rollback", "status", "inspect", "readme"):
+        if indata.get("kdu_name") and indata["primitive"] in (
+            "upgrade",
+            "rollback",
+            "status",
+            "inspect",
+            "readme",
+        ):
             # TODO should be checked that rollback only can contains revsision_numbe????
             if not indata.get("member_vnf_index"):
-                raise EngineException("Missing action parameter 'member_vnf_index' for default KDU primitive '{}'"
-                                      .format(indata["primitive"]))
+                raise EngineException(
+                    "Missing action parameter 'member_vnf_index' for default KDU primitive '{}'".format(
+                        indata["primitive"]
+                    )
+                )
             return
         # if not, check primitive
         for config_primitive in get_iterable(descriptor_configuration):
@@ -821,26 +1072,45 @@ class NsLcmOpTopic(BaseTopic):
                     if paramd["name"] in in_primitive_params_copy:
                         del in_primitive_params_copy[paramd["name"]]
                     elif not paramd.get("default-value"):
-                        raise EngineException("Needed parameter {} not provided for primitive '{}'".format(
-                            paramd["name"], indata["primitive"]))
+                        raise EngineException(
+                            "Needed parameter {} not provided for primitive '{}'".format(
+                                paramd["name"], indata["primitive"]
+                            )
+                        )
                 # check no extra primitive params are provided
                 if in_primitive_params_copy:
-                    raise EngineException("parameter/s '{}' not present at vnfd /nsd for primitive '{}'".format(
-                        list(in_primitive_params_copy.keys()), indata["primitive"]))
+                    raise EngineException(
+                        "parameter/s '{}' not present at vnfd /nsd for primitive '{}'".format(
+                            list(in_primitive_params_copy.keys()), indata["primitive"]
+                        )
+                    )
                 break
         else:
-            raise EngineException("Invalid primitive '{}' is not present at vnfd/nsd".format(indata["primitive"]))
+            raise EngineException(
+                "Invalid primitive '{}' is not present at vnfd/nsd".format(
+                    indata["primitive"]
+                )
+            )
 
     def _check_scale_ns_operation(self, indata, nsr):
-        vnfd = self._get_vnfd_from_vnf_member_index(indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"],
-                                                    nsr["_id"])
+        vnfd = self._get_vnfd_from_vnf_member_index(
+            indata["scaleVnfData"]["scaleByStepData"]["member-vnf-index"], nsr["_id"]
+        )
         for scaling_aspect in get_iterable(vnfd.get("df", ())[0]["scaling-aspect"]):
-            if indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"] == scaling_aspect["id"]:
+            if (
+                indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"]
+                == scaling_aspect["id"]
+            ):
                 break
         else:
-            raise EngineException("Invalid scaleVnfData:scaleByStepData:scaling-group-descriptor '{}' is not "
-                                  "present at vnfd:scaling-aspect"
-                                  .format(indata["scaleVnfData"]["scaleByStepData"]["scaling-group-descriptor"]))
+            raise EngineException(
+                "Invalid scaleVnfData:scaleByStepData:scaling-group-descriptor '{}' is not "
+                "present at vnfd:scaling-aspect".format(
+                    indata["scaleVnfData"]["scaleByStepData"][
+                        "scaling-group-descriptor"
+                    ]
+                )
+            )
 
     def _check_instantiate_ns_operation(self, indata, nsr, session):
         vnf_member_index_to_vnfd = {}  # map between vnf_member_index to vnf descriptor.
@@ -854,33 +1124,51 @@ class NsLcmOpTopic(BaseTopic):
             if vnf_member_index_to_vnfd.get(member_vnf_index):
                 vnfd = vnf_member_index_to_vnfd[member_vnf_index]
             else:
-                vnfd = self._get_vnfd_from_vnf_member_index(member_vnf_index, nsr["_id"])
-                vnf_member_index_to_vnfd[member_vnf_index] = vnfd  # add to cache, avoiding a later look for
+                vnfd = self._get_vnfd_from_vnf_member_index(
+                    member_vnf_index, nsr["_id"]
+                )
+                vnf_member_index_to_vnfd[
+                    member_vnf_index
+                ] = vnfd  # add to cache, avoiding a later look for
             self._check_vnf_instantiation_params(in_vnf, vnfd)
             if in_vnf.get("vimAccountId"):
-                self._check_valid_vim_account(in_vnf["vimAccountId"], vim_accounts, session)
+                self._check_valid_vim_account(
+                    in_vnf["vimAccountId"], vim_accounts, session
+                )
 
         for in_vld in get_iterable(indata.get("vld")):
-            self._check_valid_wim_account(in_vld.get("wimAccountId"), wim_accounts, session)
+            self._check_valid_wim_account(
+                in_vld.get("wimAccountId"), wim_accounts, session
+            )
             for vldd in get_iterable(nsd.get("virtual-link-desc")):
                 if in_vld["name"] == vldd["id"]:
                     break
             else:
-                raise EngineException("Invalid parameter vld:name='{}' is not present at nsd:vld".format(
-                    in_vld["name"]))
+                raise EngineException(
+                    "Invalid parameter vld:name='{}' is not present at nsd:vld".format(
+                        in_vld["name"]
+                    )
+                )
 
     def _get_vnfd_from_vnf_member_index(self, member_vnf_index, nsr_id):
         # Obtain vnf descriptor. The vnfr is used to get the vnfd._id used for this member_vnf_index
-        vnfr = self.db.get_one("vnfrs",
-                               {"nsr-id-ref": nsr_id, "member-vnf-index-ref": member_vnf_index},
-                               fail_on_empty=False)
+        vnfr = self.db.get_one(
+            "vnfrs",
+            {"nsr-id-ref": nsr_id, "member-vnf-index-ref": member_vnf_index},
+            fail_on_empty=False,
+        )
         if not vnfr:
-            raise EngineException("Invalid parameter member_vnf_index='{}' is not one of the "
-                                  "nsd:constituent-vnfd".format(member_vnf_index))
+            raise EngineException(
+                "Invalid parameter member_vnf_index='{}' is not one of the "
+                "nsd:constituent-vnfd".format(member_vnf_index)
+            )
         vnfd = self.db.get_one("vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=False)
         if not vnfd:
-            raise EngineException("vnfd id={} has been deleted!. Operation cannot be performed".
-                                  format(vnfr["vnfd-id"]))
+            raise EngineException(
+                "vnfd id={} has been deleted!. Operation cannot be performed".format(
+                    vnfr["vnfd-id"]
+                )
+            )
         return vnfd
 
     def _check_valid_vdu(self, vnfd, vdu_id):
@@ -888,14 +1176,22 @@ class NsLcmOpTopic(BaseTopic):
             if vdud["id"] == vdu_id:
                 return vdud
         else:
-            raise EngineException("Invalid parameter vdu_id='{}' not present at vnfd:vdu:id".format(vdu_id))
+            raise EngineException(
+                "Invalid parameter vdu_id='{}' not present at vnfd:vdu:id".format(
+                    vdu_id
+                )
+            )
 
     def _check_valid_kdu(self, vnfd, kdu_name):
         for kdud in get_iterable(vnfd.get("kdu")):
             if kdud["name"] == kdu_name:
                 return kdud
         else:
-            raise EngineException("Invalid parameter kdu_name='{}' not present at vnfd:kdu:name".format(kdu_name))
+            raise EngineException(
+                "Invalid parameter kdu_name='{}' not present at vnfd:kdu:name".format(
+                    kdu_name
+                )
+            )
 
     def _check_vnf_instantiation_params(self, in_vnf, vnfd):
         for in_vdu in get_iterable(in_vnf.get("vdu")):
@@ -906,32 +1202,47 @@ class NsLcmOpTopic(BaseTopic):
                             if volumed["id"] == volume["name"]:
                                 break
                         else:
-                            raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
-                                                  "volume:name='{}' is not present at "
-                                                  "vnfd:vdu:virtual-storage-desc list".
-                                                  format(in_vnf["member-vnf-index"], in_vdu["id"],
-                                                         volume["id"]))
+                            raise EngineException(
+                                "Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
+                                "volume:name='{}' is not present at "
+                                "vnfd:vdu:virtual-storage-desc list".format(
+                                    in_vnf["member-vnf-index"],
+                                    in_vdu["id"],
+                                    volume["id"],
+                                )
+                            )
 
                     vdu_if_names = set()
                     for cpd in get_iterable(vdu.get("int-cpd")):
-                        for iface in get_iterable(cpd.get("virtual-network-interface-requirement")):
+                        for iface in get_iterable(
+                            cpd.get("virtual-network-interface-requirement")
+                        ):
                             vdu_if_names.add(iface.get("name"))
 
                     for in_iface in get_iterable(in_vdu["interface"]):
                         if in_iface["name"] in vdu_if_names:
                             break
                         else:
-                            raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
-                                                  "int-cpd[id='{}'] is not present at vnfd:vdu:int-cpd"
-                                                  .format(in_vnf["member-vnf-index"], in_vdu["id"],
-                                                          in_iface["name"]))
+                            raise EngineException(
+                                "Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}']:"
+                                "int-cpd[id='{}'] is not present at vnfd:vdu:int-cpd".format(
+                                    in_vnf["member-vnf-index"],
+                                    in_vdu["id"],
+                                    in_iface["name"],
+                                )
+                            )
                     break
 
             else:
-                raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}'] is not present "
-                                      "at vnfd:vdu".format(in_vnf["member-vnf-index"], in_vdu["id"]))
+                raise EngineException(
+                    "Invalid parameter vnf[member-vnf-index='{}']:vdu[id='{}'] is not present "
+                    "at vnfd:vdu".format(in_vnf["member-vnf-index"], in_vdu["id"])
+                )
 
-        vnfd_ivlds_cpds = {ivld.get("id"): set() for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))}
+        vnfd_ivlds_cpds = {
+            ivld.get("id"): set()
+            for ivld in get_iterable(vnfd.get("int-virtual-link-desc"))
+        }
         for vdu in get_iterable(vnfd.get("vdu")):
             for cpd in get_iterable(vnfd.get("int-cpd")):
                 if cpd.get("int-virtual-link-desc"):
@@ -943,15 +1254,22 @@ class NsLcmOpTopic(BaseTopic):
                     if in_icp["id-ref"] in vnfd_ivlds_cpds[in_ivld.get("name")]:
                         break
                     else:
-                        raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:internal-vld[name"
-                                              "='{}']:internal-connection-point[id-ref:'{}'] is not present at "
-                                              "vnfd:internal-vld:name/id:internal-connection-point"
-                                              .format(in_vnf["member-vnf-index"], in_ivld["name"],
-                                                      in_icp["id-ref"]))
+                        raise EngineException(
+                            "Invalid parameter vnf[member-vnf-index='{}']:internal-vld[name"
+                            "='{}']:internal-connection-point[id-ref:'{}'] is not present at "
+                            "vnfd:internal-vld:name/id:internal-connection-point".format(
+                                in_vnf["member-vnf-index"],
+                                in_ivld["name"],
+                                in_icp["id-ref"],
+                            )
+                        )
             else:
-                raise EngineException("Invalid parameter vnf[member-vnf-index='{}']:internal-vld:name='{}'"
-                                      " is not present at vnfd '{}'".format(in_vnf["member-vnf-index"],
-                                                                            in_ivld["name"], vnfd["id"]))
+                raise EngineException(
+                    "Invalid parameter vnf[member-vnf-index='{}']:internal-vld:name='{}'"
+                    " is not present at vnfd '{}'".format(
+                        in_vnf["member-vnf-index"], in_ivld["name"], vnfd["id"]
+                    )
+                )
 
     def _check_valid_vim_account(self, vim_account, vim_accounts, session):
         if vim_account in vim_accounts:
@@ -961,7 +1279,11 @@ class NsLcmOpTopic(BaseTopic):
             db_filter["_id"] = vim_account
             self.db.get_one("vim_accounts", db_filter)
         except Exception:
-            raise EngineException("Invalid vimAccountId='{}' not present for the project".format(vim_account))
+            raise EngineException(
+                "Invalid vimAccountId='{}' not present for the project".format(
+                    vim_account
+                )
+            )
         vim_accounts.append(vim_account)
 
     def _check_valid_wim_account(self, wim_account, wim_accounts, session):
@@ -974,10 +1296,16 @@ class NsLcmOpTopic(BaseTopic):
             db_filter["_id"] = wim_account
             self.db.get_one("wim_accounts", db_filter)
         except Exception:
-            raise EngineException("Invalid wimAccountId='{}' not present for the project".format(wim_account))
+            raise EngineException(
+                "Invalid wimAccountId='{}' not present for the project".format(
+                    wim_account
+                )
+            )
         wim_accounts.append(wim_account)
 
-    def _look_for_pdu(self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback):
+    def _look_for_pdu(
+        self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+    ):
         """
         Look for a free PDU in the catalog matching vdur type and interfaces. Fills vnfr.vdur with the interface
         (ip_address, ...) information.
@@ -1027,7 +1355,13 @@ class NsLcmOpTopic(BaseTopic):
             else:
                 raise EngineException(
                     "No PDU of type={} at vim_account={} found for member_vnf_index={}, vdu={} matching interface "
-                    "names".format(pdu_type, vim_account, vnfr["member-vnf-index-ref"], vdur["vdu-id-ref"]))
+                    "names".format(
+                        pdu_type,
+                        vim_account,
+                        vnfr["member-vnf-index-ref"],
+                        vdur["vdu-id-ref"],
+                    )
+                )
 
             # step 2. Update pdu
             rollback_pdu = {
@@ -1036,13 +1370,26 @@ class NsLcmOpTopic(BaseTopic):
                 "_admin.usage.nsr_id": None,
                 "_admin.usage.vdur": None,
             }
-            self.db.set_one("pdus", {"_id": pdu["_id"]},
-                            {"_admin.usageState": "IN_USE",
-                             "_admin.usage": {"vnfr_id": vnfr["_id"],
-                                              "nsr_id": vnfr["nsr-id-ref"],
-                                              "vdur": vdur["vdu-id-ref"]}
-                             })
-            rollback.append({"topic": "pdus", "_id": pdu["_id"], "operation": "set", "content": rollback_pdu})
+            self.db.set_one(
+                "pdus",
+                {"_id": pdu["_id"]},
+                {
+                    "_admin.usageState": "IN_USE",
+                    "_admin.usage": {
+                        "vnfr_id": vnfr["_id"],
+                        "nsr_id": vnfr["nsr-id-ref"],
+                        "vdur": vdur["vdu-id-ref"],
+                    },
+                },
+            )
+            rollback.append(
+                {
+                    "topic": "pdus",
+                    "_id": pdu["_id"],
+                    "operation": "set",
+                    "content": rollback_pdu,
+                }
+            )
 
             # step 3. Fill vnfr info by filling vdur
             vdu_text = "vdur.{}".format(vdur_index)
@@ -1053,31 +1400,58 @@ class NsLcmOpTopic(BaseTopic):
                     if pdu_interface["name"] == vdur_interface["name"]:
                         iface_text = vdu_text + ".interfaces.{}".format(iface_index)
                         for k, v in pdu_interface.items():
-                            if k in ("ip-address", "mac-address"):  # TODO: switch-xxxxx must be inserted
+                            if k in (
+                                "ip-address",
+                                "mac-address",
+                            ):  # TODO: switch-xxxxx must be inserted
                                 vnfr_update[iface_text + ".{}".format(k)] = v
-                                vnfr_update_rollback[iface_text + ".{}".format(k)] = vdur_interface.get(v)
+                                vnfr_update_rollback[
+                                    iface_text + ".{}".format(k)
+                                ] = vdur_interface.get(v)
                         if pdu_interface.get("ip-address"):
-                            if vdur_interface.get("mgmt-interface") or vdur_interface.get("mgmt-vnf"):
-                                vnfr_update_rollback[vdu_text + ".ip-address"] = vdur.get("ip-address")
-                                vnfr_update[vdu_text + ".ip-address"] = pdu_interface["ip-address"]
+                            if vdur_interface.get(
+                                "mgmt-interface"
+                            ) or vdur_interface.get("mgmt-vnf"):
+                                vnfr_update_rollback[
+                                    vdu_text + ".ip-address"
+                                ] = vdur.get("ip-address")
+                                vnfr_update[vdu_text + ".ip-address"] = pdu_interface[
+                                    "ip-address"
+                                ]
                             if vdur_interface.get("mgmt-vnf"):
-                                vnfr_update_rollback["ip-address"] = vnfr.get("ip-address")
+                                vnfr_update_rollback["ip-address"] = vnfr.get(
+                                    "ip-address"
+                                )
                                 vnfr_update["ip-address"] = pdu_interface["ip-address"]
-                                vnfr_update[vdu_text + ".ip-address"] = pdu_interface["ip-address"]
-                        if pdu_interface.get("vim-network-name") or pdu_interface.get("vim-network-id"):
-                            ifaces_forcing_vim_network.append({
-                                "name": vdur_interface.get("vnf-vld-id") or vdur_interface.get("ns-vld-id"),
-                                "vnf-vld-id": vdur_interface.get("vnf-vld-id"),
-                                "ns-vld-id": vdur_interface.get("ns-vld-id")})
+                                vnfr_update[vdu_text + ".ip-address"] = pdu_interface[
+                                    "ip-address"
+                                ]
+                        if pdu_interface.get("vim-network-name") or pdu_interface.get(
+                            "vim-network-id"
+                        ):
+                            ifaces_forcing_vim_network.append(
+                                {
+                                    "name": vdur_interface.get("vnf-vld-id")
+                                    or vdur_interface.get("ns-vld-id"),
+                                    "vnf-vld-id": vdur_interface.get("vnf-vld-id"),
+                                    "ns-vld-id": vdur_interface.get("ns-vld-id"),
+                                }
+                            )
                             if pdu_interface.get("vim-network-id"):
-                                ifaces_forcing_vim_network[-1]["vim-network-id"] = pdu_interface["vim-network-id"]
+                                ifaces_forcing_vim_network[-1][
+                                    "vim-network-id"
+                                ] = pdu_interface["vim-network-id"]
                             if pdu_interface.get("vim-network-name"):
-                                ifaces_forcing_vim_network[-1]["vim-network-name"] = pdu_interface["vim-network-name"]
+                                ifaces_forcing_vim_network[-1][
+                                    "vim-network-name"
+                                ] = pdu_interface["vim-network-name"]
                         break
 
         return ifaces_forcing_vim_network
 
-    def _look_for_k8scluster(self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback):
+    def _look_for_k8scluster(
+        self, session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+    ):
         """
         Look for an available k8scluster for all the kuds in the vnfd matching version and cni requirements.
         Fills vnfr.kdur with the selected k8scluster
@@ -1114,7 +1488,9 @@ class NsLcmOpTopic(BaseTopic):
             # restrict by cni
             if vnfr["k8s-cluster"].get("cni"):
                 k8s_requirements["cni"] = vnfr["k8s-cluster"]["cni"]
-                if not set(vnfr["k8s-cluster"]["cni"]).intersection(k8scluster.get("cni", ())):
+                if not set(vnfr["k8s-cluster"]["cni"]).intersection(
+                    k8scluster.get("cni", ())
+                ):
                     continue
             # restrict by version
             if vnfr["k8s-cluster"].get("version"):
@@ -1124,12 +1500,17 @@ class NsLcmOpTopic(BaseTopic):
             # restrict by number of networks
             if vnfr["k8s-cluster"].get("nets"):
                 k8s_requirements["networks"] = len(vnfr["k8s-cluster"]["nets"])
-                if not k8scluster.get("nets") or len(k8scluster["nets"]) < len(vnfr["k8s-cluster"]["nets"]):
+                if not k8scluster.get("nets") or len(k8scluster["nets"]) < len(
+                    vnfr["k8s-cluster"]["nets"]
+                ):
                     continue
             break
         else:
-            raise EngineException("No k8scluster with requirements='{}' at vim_account={} found for member_vnf_index={}"
-                                  .format(k8s_requirements, vim_account, vnfr["member-vnf-index-ref"]))
+            raise EngineException(
+                "No k8scluster with requirements='{}' at vim_account={} found for member_vnf_index={}".format(
+                    k8s_requirements, vim_account, vnfr["member-vnf-index-ref"]
+                )
+            )
 
         for kdur_index, kdur in enumerate(get_iterable(vnfr.get("kdur"))):
             # step 3. Fill vnfr info by filling kdur
@@ -1148,15 +1529,22 @@ class NsLcmOpTopic(BaseTopic):
                 else:
                     vim_net = k8scluster["nets"][k8scluster_net_list[0]]
                     k8scluster_net_list.pop(0)
-                vnfr_update_rollback["k8s-cluster.nets.{}.vim_net".format(net_index)] = None
+                vnfr_update_rollback[
+                    "k8s-cluster.nets.{}.vim_net".format(net_index)
+                ] = None
                 vnfr_update["k8s-cluster.nets.{}.vim_net".format(net_index)] = vim_net
-                if vim_net and (kdur_net.get("vnf-vld-id") or kdur_net.get("ns-vld-id")):
-                    ifaces_forcing_vim_network.append({
-                        "name": kdur_net.get("vnf-vld-id") or kdur_net.get("ns-vld-id"),
-                        "vnf-vld-id": kdur_net.get("vnf-vld-id"),
-                        "ns-vld-id": kdur_net.get("ns-vld-id"),
-                        "vim-network-name": vim_net,   # TODO can it be vim-network-id ???
-                    })
+                if vim_net and (
+                    kdur_net.get("vnf-vld-id") or kdur_net.get("ns-vld-id")
+                ):
+                    ifaces_forcing_vim_network.append(
+                        {
+                            "name": kdur_net.get("vnf-vld-id")
+                            or kdur_net.get("ns-vld-id"),
+                            "vnf-vld-id": kdur_net.get("vnf-vld-id"),
+                            "ns-vld-id": kdur_net.get("ns-vld-id"),
+                            "vim-network-name": vim_net,  # TODO can it be vim-network-id ???
+                        }
+                    )
             # TODO check that this forcing is not incompatible with other forcing
         return ifaces_forcing_vim_network
 
@@ -1187,56 +1575,108 @@ class NsLcmOpTopic(BaseTopic):
                     for vdur_index, vdur in enumerate(vnfr["vdur"]):
                         if vdu_inst_param["id"] != vdur["vdu-id-ref"]:
                             continue
-                        for iface_inst_param in get_iterable(vdu_inst_param.get("interface")):
-                            iface_index, _ = next(i for i in enumerate(vdur["interfaces"])
-                                                  if i[1]["name"] == iface_inst_param["name"])
-                            vnfr_update_text = "vdur.{}.interfaces.{}".format(vdur_index, iface_index)
+                        for iface_inst_param in get_iterable(
+                            vdu_inst_param.get("interface")
+                        ):
+                            iface_index, _ = next(
+                                i
+                                for i in enumerate(vdur["interfaces"])
+                                if i[1]["name"] == iface_inst_param["name"]
+                            )
+                            vnfr_update_text = "vdur.{}.interfaces.{}".format(
+                                vdur_index, iface_index
+                            )
                             if iface_inst_param.get("ip-address"):
-                                vnfr_update[vnfr_update_text + ".ip-address"] = increment_ip_mac(
-                                    iface_inst_param.get("ip-address"), vdur.get("count-index", 0))
+                                vnfr_update[
+                                    vnfr_update_text + ".ip-address"
+                                ] = increment_ip_mac(
+                                    iface_inst_param.get("ip-address"),
+                                    vdur.get("count-index", 0),
+                                )
                                 vnfr_update[vnfr_update_text + ".fixed-ip"] = True
                             if iface_inst_param.get("mac-address"):
-                                vnfr_update[vnfr_update_text + ".mac-address"] = increment_ip_mac(
-                                    iface_inst_param.get("mac-address"), vdur.get("count-index", 0))
+                                vnfr_update[
+                                    vnfr_update_text + ".mac-address"
+                                ] = increment_ip_mac(
+                                    iface_inst_param.get("mac-address"),
+                                    vdur.get("count-index", 0),
+                                )
                                 vnfr_update[vnfr_update_text + ".fixed-mac"] = True
                             if iface_inst_param.get("floating-ip-required"):
-                                vnfr_update[vnfr_update_text + ".floating-ip-required"] = True
+                                vnfr_update[
+                                    vnfr_update_text + ".floating-ip-required"
+                                ] = True
                 # get vnf.internal-vld.internal-conection-point instantiation params to update vnfr.vdur.interfaces
                 # TODO update vld with the ip-profile
-                for ivld_inst_param in get_iterable(vnf_inst_params.get("internal-vld")):
-                    for icp_inst_param in get_iterable(ivld_inst_param.get("internal-connection-point")):
+                for ivld_inst_param in get_iterable(
+                    vnf_inst_params.get("internal-vld")
+                ):
+                    for icp_inst_param in get_iterable(
+                        ivld_inst_param.get("internal-connection-point")
+                    ):
                         # look for iface
                         for vdur_index, vdur in enumerate(vnfr["vdur"]):
                             for iface_index, iface in enumerate(vdur["interfaces"]):
-                                if iface.get("internal-connection-point-ref") == icp_inst_param["id-ref"]:
-                                    vnfr_update_text = "vdur.{}.interfaces.{}".format(vdur_index, iface_index)
+                                if (
+                                    iface.get("internal-connection-point-ref")
+                                    == icp_inst_param["id-ref"]
+                                ):
+                                    vnfr_update_text = "vdur.{}.interfaces.{}".format(
+                                        vdur_index, iface_index
+                                    )
                                     if icp_inst_param.get("ip-address"):
-                                        vnfr_update[vnfr_update_text + ".ip-address"] = increment_ip_mac(
-                                            icp_inst_param.get("ip-address"), vdur.get("count-index", 0))
-                                        vnfr_update[vnfr_update_text + ".fixed-ip"] = True
+                                        vnfr_update[
+                                            vnfr_update_text + ".ip-address"
+                                        ] = increment_ip_mac(
+                                            icp_inst_param.get("ip-address"),
+                                            vdur.get("count-index", 0),
+                                        )
+                                        vnfr_update[
+                                            vnfr_update_text + ".fixed-ip"
+                                        ] = True
                                     if icp_inst_param.get("mac-address"):
-                                        vnfr_update[vnfr_update_text + ".mac-address"] = increment_ip_mac(
-                                            icp_inst_param.get("mac-address"), vdur.get("count-index", 0))
-                                        vnfr_update[vnfr_update_text + ".fixed-mac"] = True
+                                        vnfr_update[
+                                            vnfr_update_text + ".mac-address"
+                                        ] = increment_ip_mac(
+                                            icp_inst_param.get("mac-address"),
+                                            vdur.get("count-index", 0),
+                                        )
+                                        vnfr_update[
+                                            vnfr_update_text + ".fixed-mac"
+                                        ] = True
                                     break
             # get ip address from instantiation parameters.vld.vnfd-connection-point-ref
             for vld_inst_param in get_iterable(indata.get("vld")):
-                for vnfcp_inst_param in get_iterable(vld_inst_param.get("vnfd-connection-point-ref")):
+                for vnfcp_inst_param in get_iterable(
+                    vld_inst_param.get("vnfd-connection-point-ref")
+                ):
                     if vnfcp_inst_param["member-vnf-index-ref"] != member_vnf_index:
                         continue
                     # look for iface
                     for vdur_index, vdur in enumerate(vnfr["vdur"]):
                         for iface_index, iface in enumerate(vdur["interfaces"]):
-                            if iface.get("external-connection-point-ref") == \
-                                    vnfcp_inst_param["vnfd-connection-point-ref"]:
-                                vnfr_update_text = "vdur.{}.interfaces.{}".format(vdur_index, iface_index)
+                            if (
+                                iface.get("external-connection-point-ref")
+                                == vnfcp_inst_param["vnfd-connection-point-ref"]
+                            ):
+                                vnfr_update_text = "vdur.{}.interfaces.{}".format(
+                                    vdur_index, iface_index
+                                )
                                 if vnfcp_inst_param.get("ip-address"):
-                                    vnfr_update[vnfr_update_text + ".ip-address"] = increment_ip_mac(
-                                        vnfcp_inst_param.get("ip-address"), vdur.get("count-index", 0))
+                                    vnfr_update[
+                                        vnfr_update_text + ".ip-address"
+                                    ] = increment_ip_mac(
+                                        vnfcp_inst_param.get("ip-address"),
+                                        vdur.get("count-index", 0),
+                                    )
                                     vnfr_update[vnfr_update_text + ".fixed-ip"] = True
                                 if vnfcp_inst_param.get("mac-address"):
-                                    vnfr_update[vnfr_update_text + ".mac-address"] = increment_ip_mac(
-                                        vnfcp_inst_param.get("mac-address"), vdur.get("count-index", 0))
+                                    vnfr_update[
+                                        vnfr_update_text + ".mac-address"
+                                    ] = increment_ip_mac(
+                                        vnfcp_inst_param.get("mac-address"),
+                                        vdur.get("count-index", 0),
+                                    )
                                     vnfr_update[vnfr_update_text + ".fixed-mac"] = True
                                 break
 
@@ -1248,15 +1688,24 @@ class NsLcmOpTopic(BaseTopic):
                 vnfr_update_rollback["vca-id"] = vnfr.get("vca-id")
 
             # get pdu
-            ifaces_forcing_vim_network = self._look_for_pdu(session, rollback, vnfr, vim_account, vnfr_update,
-                                                            vnfr_update_rollback)
+            ifaces_forcing_vim_network = self._look_for_pdu(
+                session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+            )
 
             # get kdus
-            ifaces_forcing_vim_network += self._look_for_k8scluster(session, rollback, vnfr, vim_account, vnfr_update,
-                                                                    vnfr_update_rollback)
+            ifaces_forcing_vim_network += self._look_for_k8scluster(
+                session, rollback, vnfr, vim_account, vnfr_update, vnfr_update_rollback
+            )
             # update database vnfr
             self.db.set_one("vnfrs", {"_id": vnfr["_id"]}, vnfr_update)
-            rollback.append({"topic": "vnfrs", "_id": vnfr["_id"], "operation": "set", "content": vnfr_update_rollback})
+            rollback.append(
+                {
+                    "topic": "vnfrs",
+                    "_id": vnfr["_id"],
+                    "operation": "set",
+                    "content": vnfr_update_rollback,
+                }
+            )
 
             # Update indada in case pdu forces to use a concrete vim-network-name
             # TODO check if user has already insert a vim-network-name and raises an error
@@ -1266,17 +1715,33 @@ class NsLcmOpTopic(BaseTopic):
                 if iface_info.get("ns-vld-id"):
                     if "vld" not in indata:
                         indata["vld"] = []
-                    indata["vld"].append({key: iface_info[key] for key in
-                                          ("name", "vim-network-name", "vim-network-id") if iface_info.get(key)})
+                    indata["vld"].append(
+                        {
+                            key: iface_info[key]
+                            for key in ("name", "vim-network-name", "vim-network-id")
+                            if iface_info.get(key)
+                        }
+                    )
 
                 elif iface_info.get("vnf-vld-id"):
                     if "vnf" not in indata:
                         indata["vnf"] = []
-                    indata["vnf"].append({
-                        "member-vnf-index": member_vnf_index,
-                        "internal-vld": [{key: iface_info[key] for key in
-                                          ("name", "vim-network-name", "vim-network-id") if iface_info.get(key)}]
-                    })
+                    indata["vnf"].append(
+                        {
+                            "member-vnf-index": member_vnf_index,
+                            "internal-vld": [
+                                {
+                                    key: iface_info[key]
+                                    for key in (
+                                        "name",
+                                        "vim-network-name",
+                                        "vim-network-id",
+                                    )
+                                    if iface_info.get(key)
+                                }
+                            ],
+                        }
+                    )
 
     @staticmethod
     def _create_nslcmop(nsr_id, operation, params):
@@ -1307,7 +1772,7 @@ class NsLcmOpTopic(BaseTopic):
             "links": {
                 "self": "/osm/nslcm/v1/ns_lcm_op_occs/" + _id,
                 "nsInstance": "/osm/nslcm/v1/ns_instances/" + nsr_id,
-            }
+            },
         }
         return nslcmop
 
@@ -1321,10 +1786,18 @@ class NsLcmOpTopic(BaseTopic):
         vims = self.db.get_list("vim_accounts", db_filter)
         vimAccounts = []
         for vim in vims:
-            vimAccounts.append(vim['_id'])
+            vimAccounts.append(vim["_id"])
         return vimAccounts
 
-    def new(self, rollback, session, indata=None, kwargs=None, headers=None, slice_object=False):
+    def new(
+        self,
+        rollback,
+        session,
+        indata=None,
+        kwargs=None,
+        headers=None,
+        slice_object=False,
+    ):
         """
         Performs a new operation over a ns
         :param rollback: list to append created items at database in case a rollback must to be done
@@ -1336,14 +1809,21 @@ class NsLcmOpTopic(BaseTopic):
         :param headers: http request headers
         :return: id of the nslcmops
         """
+
         def check_if_nsr_is_not_slice_member(session, nsr_id):
             nsis = None
             db_filter = self._get_project_filter(session)
             db_filter["_admin.nsrs-detailed-list.ANYINDEX.nsrId"] = nsr_id
-            nsis = self.db.get_one("nsis", db_filter, fail_on_empty=False, fail_on_more=False)
+            nsis = self.db.get_one(
+                "nsis", db_filter, fail_on_empty=False, fail_on_more=False
+            )
             if nsis:
-                raise EngineException("The NS instance {} cannot be terminated because is used by the slice {}".format(
-                                      nsr_id, nsis["_id"]), http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "The NS instance {} cannot be terminated because is used by the slice {}".format(
+                        nsr_id, nsis["_id"]
+                    ),
+                    http_code=HTTPStatus.CONFLICT,
+                )
 
         try:
             # Override descriptor with query string kwargs
@@ -1360,17 +1840,31 @@ class NsLcmOpTopic(BaseTopic):
             # initial checking
             if operation == "terminate" and slice_object is False:
                 check_if_nsr_is_not_slice_member(session, nsr["_id"])
-            if not nsr["_admin"].get("nsState") or nsr["_admin"]["nsState"] == "NOT_INSTANTIATED":
+            if (
+                not nsr["_admin"].get("nsState")
+                or nsr["_admin"]["nsState"] == "NOT_INSTANTIATED"
+            ):
                 if operation == "terminate" and indata.get("autoremove"):
                     # NSR must be deleted
-                    return None, None    # a none in this case is used to indicate not instantiated. It can be removed
+                    return (
+                        None,
+                        None,
+                    )  # a none in this case is used to indicate not instantiated. It can be removed
                 if operation != "instantiate":
-                    raise EngineException("ns_instance '{}' cannot be '{}' because it is not instantiated".format(
-                        nsInstanceId, operation), HTTPStatus.CONFLICT)
+                    raise EngineException(
+                        "ns_instance '{}' cannot be '{}' because it is not instantiated".format(
+                            nsInstanceId, operation
+                        ),
+                        HTTPStatus.CONFLICT,
+                    )
             else:
                 if operation == "instantiate" and not session["force"]:
-                    raise EngineException("ns_instance '{}' cannot be '{}' because it is already instantiated".format(
-                        nsInstanceId, operation), HTTPStatus.CONFLICT)
+                    raise EngineException(
+                        "ns_instance '{}' cannot be '{}' because it is already instantiated".format(
+                            nsInstanceId, operation
+                        ),
+                        HTTPStatus.CONFLICT,
+                    )
             self._check_ns_operation(session, nsr, operation, indata)
 
             if operation == "instantiate":
@@ -1378,10 +1872,14 @@ class NsLcmOpTopic(BaseTopic):
 
             nslcmop_desc = self._create_nslcmop(nsInstanceId, operation, indata)
             _id = nslcmop_desc["_id"]
-            self.format_on_new(nslcmop_desc, session["project_id"], make_public=session["public"])
+            self.format_on_new(
+                nslcmop_desc, session["project_id"], make_public=session["public"]
+            )
             if indata.get("placement-engine"):
                 # Save valid vim accounts in lcm operation descriptor
-                nslcmop_desc['operationParams']['validVimAccounts'] = self._get_enabled_vims(session)
+                nslcmop_desc["operationParams"][
+                    "validVimAccounts"
+                ] = self._get_enabled_vims(session)
             self.db.create("nslcmops", nslcmop_desc)
             rollback.append({"topic": "nslcmops", "_id": _id})
             if not slice_object:
@@ -1393,10 +1891,14 @@ class NsLcmOpTopic(BaseTopic):
         #     raise EngineException("Cannot get ns_instance '{}': {}".format(e), HTTPStatus.NOT_FOUND)
 
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
-        raise EngineException("Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
-        raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
 
 class NsiTopic(BaseTopic):
@@ -1425,11 +1927,17 @@ class NsiTopic(BaseTopic):
         if additional_params:
             for k, v in additional_params.items():
                 if not isinstance(k, str):
-                    raise EngineException("Invalid param at additionalParamsForNsi:{}. Only string keys are allowed".
-                                          format(k))
+                    raise EngineException(
+                        "Invalid param at additionalParamsForNsi:{}. Only string keys are allowed".format(
+                            k
+                        )
+                    )
                 if "." in k or "$" in k:
-                    raise EngineException("Invalid param at additionalParamsForNsi:{}. Keys must not contain dots or $".
-                                          format(k))
+                    raise EngineException(
+                        "Invalid param at additionalParamsForNsi:{}. Keys must not contain dots or $".format(
+                            k
+                        )
+                    )
                 if isinstance(v, (dict, tuple, list)):
                     additional_params[k] = "!!yaml " + safe_dump(v)
         return additional_params
@@ -1445,8 +1953,12 @@ class NsiTopic(BaseTopic):
             return
         nstd_id = descriptor["nst-ref"]
         if not self.get_item_list(session, "nsts", {"id": nstd_id}):
-            raise EngineException("Descriptor error at nst-ref='{}' references a non exist nstd".format(nstd_id),
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "Descriptor error at nst-ref='{}' references a non exist nstd".format(
+                    nstd_id
+                ),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def check_conflict_on_del(self, session, _id, db_content):
         """
@@ -1460,9 +1972,11 @@ class NsiTopic(BaseTopic):
             return
         nsi = db_content
         if nsi["_admin"].get("nsiState") == "INSTANTIATED":
-            raise EngineException("nsi '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
-                                  "Launch 'terminate' operation first; or force deletion".format(_id),
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "nsi '{}' cannot be deleted because it is in 'INSTANTIATED' state. "
+                "Launch 'terminate' operation first; or force deletion".format(_id),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def delete_extra(self, session, _id, db_content, not_send_msg=None):
         """
@@ -1480,14 +1994,20 @@ class NsiTopic(BaseTopic):
         for nsrs_detailed_item in nsir["_admin"]["nsrs-detailed-list"]:
             nsr_id = nsrs_detailed_item["nsrId"]
             if nsrs_detailed_item.get("shared"):
-                _filter = {"_admin.nsrs-detailed-list.ANYINDEX.shared": True,
-                           "_admin.nsrs-detailed-list.ANYINDEX.nsrId": nsr_id,
-                           "_id.ne": nsir["_id"]}
-                nsi = self.db.get_one("nsis", _filter, fail_on_empty=False, fail_on_more=False)
+                _filter = {
+                    "_admin.nsrs-detailed-list.ANYINDEX.shared": True,
+                    "_admin.nsrs-detailed-list.ANYINDEX.nsrId": nsr_id,
+                    "_id.ne": nsir["_id"],
+                }
+                nsi = self.db.get_one(
+                    "nsis", _filter, fail_on_empty=False, fail_on_more=False
+                )
                 if nsi:  # last one using nsr
                     continue
             try:
-                self.nsrTopic.delete(session, nsr_id, dry_run=False, not_send_msg=not_send_msg)
+                self.nsrTopic.delete(
+                    session, nsr_id, dry_run=False, not_send_msg=not_send_msg
+                )
             except (DbException, EngineException) as e:
                 if e.http_code == HTTPStatus.NOT_FOUND:
                     pass
@@ -1501,10 +2021,18 @@ class NsiTopic(BaseTopic):
         nsir_admin = nsir.get("_admin")
         if nsir_admin and nsir_admin.get("nst-id"):
             # check if used by another NSI
-            nsis_list = self.db.get_one("nsis", {"nst-id": nsir_admin["nst-id"]},
-                                        fail_on_empty=False, fail_on_more=False)
+            nsis_list = self.db.get_one(
+                "nsis",
+                {"nst-id": nsir_admin["nst-id"]},
+                fail_on_empty=False,
+                fail_on_more=False,
+            )
             if not nsis_list:
-                self.db.set_one("nsts", {"_id": nsir_admin["nst-id"]}, {"_admin.usageState": "NOT_IN_USE"})
+                self.db.set_one(
+                    "nsts",
+                    {"_id": nsir_admin["nst-id"]},
+                    {"_admin.usageState": "NOT_IN_USE"},
+                )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
@@ -1528,22 +2056,30 @@ class NsiTopic(BaseTopic):
             slice_request = self._validate_input_new(slice_request, session["force"])
 
             # look for nstd
-            step = "getting nstd id='{}' from database".format(slice_request.get("nstId"))
+            step = "getting nstd id='{}' from database".format(
+                slice_request.get("nstId")
+            )
             _filter = self._get_project_filter(session)
             _filter["_id"] = slice_request["nstId"]
             nstd = self.db.get_one("nsts", _filter)
             # check NST is not disabled
             step = "checking NST operationalState"
             if nstd["_admin"]["operationalState"] == "DISABLED":
-                raise EngineException("nst with id '{}' is DISABLED, and thus cannot be used to create a netslice "
-                                      "instance".format(slice_request["nstId"]), http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "nst with id '{}' is DISABLED, and thus cannot be used to create a netslice "
+                    "instance".format(slice_request["nstId"]),
+                    http_code=HTTPStatus.CONFLICT,
+                )
             del _filter["_id"]
 
             # check NSD is not disabled
             step = "checking operationalState"
             if nstd["_admin"]["operationalState"] == "DISABLED":
-                raise EngineException("nst with id '{}' is DISABLED, and thus cannot be used to create "
-                                      "a network slice".format(slice_request["nstId"]), http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "nst with id '{}' is DISABLED, and thus cannot be used to create "
+                    "a network slice".format(slice_request["nstId"]),
+                    http_code=HTTPStatus.CONFLICT,
+                )
 
             nstd.pop("_admin", None)
             nstd_id = nstd.pop("_id", None)
@@ -1562,11 +2098,13 @@ class NsiTopic(BaseTopic):
                 "nsr-ref-list": [],
                 "vlr-list": [],
                 "_id": nsi_id,
-                "additionalParamsForNsi": self._format_addional_params(slice_request)
+                "additionalParamsForNsi": self._format_addional_params(slice_request),
             }
 
             step = "creating nsi at database"
-            self.format_on_new(nsi_descriptor, session["project_id"], make_public=session["public"])
+            self.format_on_new(
+                nsi_descriptor, session["project_id"], make_public=session["public"]
+            )
             nsi_descriptor["_admin"]["nsiState"] = "NOT_INSTANTIATED"
             nsi_descriptor["_admin"]["netslice-subnet"] = None
             nsi_descriptor["_admin"]["deployed"] = {}
@@ -1596,11 +2134,14 @@ class NsiTopic(BaseTopic):
             for member_ns in nstd["netslice-subnet"]:
                 nsd_id = member_ns["nsd-ref"]
                 step = "getting nstd id='{}' constituent-nsd='{}' from database".format(
-                    member_ns["nsd-ref"], member_ns["id"])
+                    member_ns["nsd-ref"], member_ns["id"]
+                )
                 if nsd_id not in needed_nsds:
                     # Obtain nsd
                     _filter["id"] = nsd_id
-                    nsd = self.db.get_one("nsds", _filter, fail_on_empty=True, fail_on_more=True)
+                    nsd = self.db.get_one(
+                        "nsds", _filter, fail_on_empty=True, fail_on_more=True
+                    )
                     del _filter["id"]
                     nsd.pop("_admin")
                     needed_nsds[nsd_id] = nsd
@@ -1610,7 +2151,8 @@ class NsiTopic(BaseTopic):
                 services.append(member_ns)
 
                 step = "filling nsir nsd-id='{}' constituent-nsd='{}' from database".format(
-                    member_ns["nsd-ref"], member_ns["id"])
+                    member_ns["nsd-ref"], member_ns["id"]
+                )
 
             # creates Network Services records (NSRs)
             step = "creating nsrs at database using NsrTopic.new()"
@@ -1623,9 +2165,13 @@ class NsiTopic(BaseTopic):
                 indata_ns = {}
                 # Is the nss shared and instantiated?
                 _filter["_admin.nsrs-detailed-list.ANYINDEX.shared"] = True
-                _filter["_admin.nsrs-detailed-list.ANYINDEX.nsd-id"] = service["nsd-ref"]
+                _filter["_admin.nsrs-detailed-list.ANYINDEX.nsd-id"] = service[
+                    "nsd-ref"
+                ]
                 _filter["_admin.nsrs-detailed-list.ANYINDEX.nss-id"] = service["id"]
-                nsi = self.db.get_one("nsis", _filter, fail_on_empty=False, fail_on_more=False)
+                nsi = self.db.get_one(
+                    "nsis", _filter, fail_on_empty=False, fail_on_more=False
+                )
                 if nsi and service.get("is-shared-nss"):
                     nsrs_detailed_list = nsi["_admin"]["nsrs-detailed-list"]
                     for nsrs_detailed_item in nsrs_detailed_list:
@@ -1642,9 +2188,11 @@ class NsiTopic(BaseTopic):
                     if service.get("instantiation-parameters"):
                         indata_ns = deepcopy(service["instantiation-parameters"])
                         # del service["instantiation-parameters"]
-                        
+
                     indata_ns["nsdId"] = service["_id"]
-                    indata_ns["nsName"] = slice_request.get("nsiName") + "." + service["id"]
+                    indata_ns["nsName"] = (
+                        slice_request.get("nsiName") + "." + service["id"]
+                    )
                     indata_ns["vimAccountId"] = slice_request.get("vimAccountId")
                     indata_ns["nsDescription"] = service["description"]
                     if slice_request.get("ssh_keys"):
@@ -1656,12 +2204,19 @@ class NsiTopic(BaseTopic):
                                 copy_ns_param = deepcopy(ns_param)
                                 del copy_ns_param["id"]
                                 indata_ns.update(copy_ns_param)
-                                break                   
+                                break
 
                     # Creates Nsr objects
-                    _id_nsr, _ = self.nsrTopic.new(rollback, session, indata_ns, kwargs, headers)
-                nsrs_item = {"nsrId": _id_nsr, "shared": service.get("is-shared-nss"), "nsd-id": service["nsd-ref"], 
-                             "nss-id": service["id"], "nslcmop_instantiate": None}
+                    _id_nsr, _ = self.nsrTopic.new(
+                        rollback, session, indata_ns, kwargs, headers
+                    )
+                nsrs_item = {
+                    "nsrId": _id_nsr,
+                    "shared": service.get("is-shared-nss"),
+                    "nsd-id": service["nsd-ref"],
+                    "nss-id": service["id"],
+                    "nslcmop_instantiate": None,
+                }
                 indata_ns["nss-id"] = service["id"]
                 nsrs_list.append(nsrs_item)
                 nsi_netslice_subnet.append(indata_ns)
@@ -1671,20 +2226,26 @@ class NsiTopic(BaseTopic):
             # Adding the nsrs list to the nsi
             nsi_descriptor["_admin"]["nsrs-detailed-list"] = nsrs_list
             nsi_descriptor["_admin"]["netslice-subnet"] = nsi_netslice_subnet
-            self.db.set_one("nsts", {"_id": slice_request["nstId"]}, {"_admin.usageState": "IN_USE"})
+            self.db.set_one(
+                "nsts", {"_id": slice_request["nstId"]}, {"_admin.usageState": "IN_USE"}
+            )
 
             # Creating the entry in the database
             self.db.create("nsis", nsi_descriptor)
             rollback.append({"topic": "nsis", "_id": nsi_id})
             return nsi_id, None
-        except Exception as e:   # TODO remove try Except, it is captured at nbi.py
-            self.logger.exception("Exception {} at NsiTopic.new()".format(e), exc_info=True)
+        except Exception as e:  # TODO remove try Except, it is captured at nbi.py
+            self.logger.exception(
+                "Exception {} at NsiTopic.new()".format(e), exc_info=True
+            )
             raise EngineException("Error {}: {}".format(step, e))
         except ValidationError as e:
             raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
-        raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
 
 class NsiLcmOpTopic(BaseTopic):
@@ -1692,9 +2253,9 @@ class NsiLcmOpTopic(BaseTopic):
     topic_msg = "nsi"
     operation_schema = {  # mapping between operation and jsonschema to validate
         "instantiate": nsi_instantiate,
-        "terminate": None
+        "terminate": None,
     }
-    
+
     def __init__(self, db, fs, msg, auth):
         BaseTopic.__init__(self, db, fs, msg, auth)
         self.nsi_NsLcmOpTopic = NsLcmOpTopic(self.db, self.fs, self.msg, self.auth)
@@ -1721,11 +2282,14 @@ class NsiLcmOpTopic(BaseTopic):
                         nsds[nsd_id] = self.db.get_one("nsds", _filter)
                     return nsds[nsd_id]
             else:
-                raise EngineException("Invalid parameter nstId='{}' is not one of the "
-                                      "nst:netslice-subnet".format(nstId))
+                raise EngineException(
+                    "Invalid parameter nstId='{}' is not one of the "
+                    "nst:netslice-subnet".format(nstId)
+                )
+
         if operation == "instantiate":
             # check the existance of netslice-subnet items
-            for in_nst in get_iterable(indata.get("netslice-subnet")):   
+            for in_nst in get_iterable(indata.get("netslice-subnet")):
                 check_valid_netslice_subnet_id(in_nst["id"])
 
     def _create_nsilcmop(self, session, netsliceInstanceId, operation, params):
@@ -1744,8 +2308,9 @@ class NsiLcmOpTopic(BaseTopic):
             "isCancelPending": False,
             "links": {
                 "self": "/osm/nsilcm/v1/nsi_lcm_op_occs/" + _id,
-                "netsliceInstanceId": "/osm/nsilcm/v1/netslice_instances/" + netsliceInstanceId,
-            }
+                "netsliceInstanceId": "/osm/nsilcm/v1/netslice_instances/"
+                + netsliceInstanceId,
+            },
         }
         return nsilcmop
 
@@ -1755,13 +2320,27 @@ class NsiLcmOpTopic(BaseTopic):
                 for admin_subnet_item in nsir["_admin"].get("netslice-subnet"):
                     if admin_subnet_item["nss-id"] == nst_sb_item["id"]:
                         for admin_vld_item in nsir["_admin"].get("netslice-vld"):
-                            for admin_vld_nss_cp_ref_item in admin_vld_item["nss-connection-point-ref"]:
-                                if admin_subnet_item["nss-id"] == admin_vld_nss_cp_ref_item["nss-ref"]:
-                                    if not nsr_item["nsrId"] in admin_vld_item["shared-nsrs-list"]:
-                                        admin_vld_item["shared-nsrs-list"].append(nsr_item["nsrId"])
+                            for admin_vld_nss_cp_ref_item in admin_vld_item[
+                                "nss-connection-point-ref"
+                            ]:
+                                if (
+                                    admin_subnet_item["nss-id"]
+                                    == admin_vld_nss_cp_ref_item["nss-ref"]
+                                ):
+                                    if (
+                                        not nsr_item["nsrId"]
+                                        in admin_vld_item["shared-nsrs-list"]
+                                    ):
+                                        admin_vld_item["shared-nsrs-list"].append(
+                                            nsr_item["nsrId"]
+                                        )
                                     break
         # self.db.set_one("nsis", {"_id": nsir["_id"]}, nsir)
-        self.db.set_one("nsis", {"_id": nsir["_id"]}, {"_admin.netslice-vld": nsir["_admin"].get("netslice-vld")})
+        self.db.set_one(
+            "nsis",
+            {"_id": nsir["_id"]},
+            {"_admin.netslice-vld": nsir["_admin"].get("netslice-vld")},
+        )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
@@ -1790,18 +2369,32 @@ class NsiLcmOpTopic(BaseTopic):
             del _filter["_id"]
 
             # initial checking
-            if not nsir["_admin"].get("nsiState") or nsir["_admin"]["nsiState"] == "NOT_INSTANTIATED":
+            if (
+                not nsir["_admin"].get("nsiState")
+                or nsir["_admin"]["nsiState"] == "NOT_INSTANTIATED"
+            ):
                 if operation == "terminate" and indata.get("autoremove"):
                     # NSIR must be deleted
-                    return None, None    # a none in this case is used to indicate not instantiated. It can be removed
+                    return (
+                        None,
+                        None,
+                    )  # a none in this case is used to indicate not instantiated. It can be removed
                 if operation != "instantiate":
-                    raise EngineException("netslice_instance '{}' cannot be '{}' because it is not instantiated".format(
-                        netsliceInstanceId, operation), HTTPStatus.CONFLICT)
+                    raise EngineException(
+                        "netslice_instance '{}' cannot be '{}' because it is not instantiated".format(
+                            netsliceInstanceId, operation
+                        ),
+                        HTTPStatus.CONFLICT,
+                    )
             else:
                 if operation == "instantiate" and not session["force"]:
-                    raise EngineException("netslice_instance '{}' cannot be '{}' because it is already instantiated".
-                                          format(netsliceInstanceId, operation), HTTPStatus.CONFLICT)
-            
+                    raise EngineException(
+                        "netslice_instance '{}' cannot be '{}' because it is already instantiated".format(
+                            netsliceInstanceId, operation
+                        ),
+                        HTTPStatus.CONFLICT,
+                    )
+
             # Creating all the NS_operation (nslcmop)
             # Get service list from db
             nsrs_list = nsir["_admin"]["nsrs-detailed-list"]
@@ -1812,23 +2405,41 @@ class NsiLcmOpTopic(BaseTopic):
                 if nsr_item.get("shared"):
                     _filter["_admin.nsrs-detailed-list.ANYINDEX.shared"] = True
                     _filter["_admin.nsrs-detailed-list.ANYINDEX.nsrId"] = nsr_id
-                    _filter["_admin.nsrs-detailed-list.ANYINDEX.nslcmop_instantiate.ne"] = None
+                    _filter[
+                        "_admin.nsrs-detailed-list.ANYINDEX.nslcmop_instantiate.ne"
+                    ] = None
                     _filter["_id.ne"] = netsliceInstanceId
-                    nsi = self.db.get_one("nsis", _filter, fail_on_empty=False, fail_on_more=False)
+                    nsi = self.db.get_one(
+                        "nsis", _filter, fail_on_empty=False, fail_on_more=False
+                    )
                     if operation == "terminate":
-                        _update = {"_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(index): None}
+                        _update = {
+                            "_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(
+                                index
+                            ): None
+                        }
                         self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
-                        if nsi:  # other nsi is using this nsr and it needs this nsr instantiated
+                        if (
+                            nsi
+                        ):  # other nsi is using this nsr and it needs this nsr instantiated
                             continue  # do not create nsilcmop
                     else:  # instantiate
                         # looks the first nsi fulfilling the conditions but not being the current NSIR
                         if nsi:
-                            nsi_nsr_item = next(n for n in nsi["_admin"]["nsrs-detailed-list"] if
-                                                n["nsrId"] == nsr_id and n["shared"] and
-                                                n["nslcmop_instantiate"])
+                            nsi_nsr_item = next(
+                                n
+                                for n in nsi["_admin"]["nsrs-detailed-list"]
+                                if n["nsrId"] == nsr_id
+                                and n["shared"]
+                                and n["nslcmop_instantiate"]
+                            )
                             self.add_shared_nsr_2vld(nsir, nsr_item)
                             nslcmops.append(nsi_nsr_item["nslcmop_instantiate"])
-                            _update = {"_admin.nsrs-detailed-list.{}".format(index): nsi_nsr_item}
+                            _update = {
+                                "_admin.nsrs-detailed-list.{}".format(
+                                    index
+                                ): nsi_nsr_item
+                            }
                             self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
                             # continue to not create nslcmop since nsrs is shared and nsrs was created
                             continue
@@ -1849,15 +2460,23 @@ class NsiLcmOpTopic(BaseTopic):
 
                     # Creating NS_LCM_OP with the flag slice_object=True to not trigger the service instantiation
                     # message via kafka bus
-                    nslcmop, _ = self.nsi_NsLcmOpTopic.new(rollback, session, indata_ns, None, headers,
-                                                           slice_object=True)
+                    nslcmop, _ = self.nsi_NsLcmOpTopic.new(
+                        rollback, session, indata_ns, None, headers, slice_object=True
+                    )
                     nslcmops.append(nslcmop)
                     if operation == "instantiate":
-                        _update = {"_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(index): nslcmop}
+                        _update = {
+                            "_admin.nsrs-detailed-list.{}.nslcmop_instantiate".format(
+                                index
+                            ): nslcmop
+                        }
                         self.db.set_one("nsis", {"_id": nsir["_id"]}, _update)
                 except (DbException, EngineException) as e:
                     if e.http_code == HTTPStatus.NOT_FOUND:
-                        self.logger.info(logging_prefix + "skipping NS={} because not found".format(nsr_id))
+                        self.logger.info(
+                            logging_prefix
+                            + "skipping NS={} because not found".format(nsr_id)
+                        )
                         pass
                     else:
                         raise
@@ -1866,8 +2485,12 @@ class NsiLcmOpTopic(BaseTopic):
             indata["nslcmops_ids"] = nslcmops
             self._check_nsi_operation(session, nsir, operation, indata)
 
-            nsilcmop_desc = self._create_nsilcmop(session, netsliceInstanceId, operation, indata)
-            self.format_on_new(nsilcmop_desc, session["project_id"], make_public=session["public"])
+            nsilcmop_desc = self._create_nsilcmop(
+                session, netsliceInstanceId, operation, indata
+            )
+            self.format_on_new(
+                nsilcmop_desc, session["project_id"], make_public=session["public"]
+            )
             _id = self.db.create("nsilcmops", nsilcmop_desc)
             rollback.append({"topic": "nsilcmops", "_id": _id})
             self.msg.write("nsi", operation, nsilcmop_desc)
@@ -1876,7 +2499,11 @@ class NsiLcmOpTopic(BaseTopic):
             raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
 
     def delete(self, session, _id, dry_run=False, not_send_msg=None):
-        raise EngineException("Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method delete called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
 
     def edit(self, session, _id, indata=None, kwargs=None, content=None):
-        raise EngineException("Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Method edit called directly", HTTPStatus.INTERNAL_SERVER_ERROR
+        )
index 9b1af5f..bec0cfa 100644 (file)
@@ -39,12 +39,12 @@ from osm_nbi import version as nbi_version, version_date as nbi_version_date
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
-__version__ = "0.1.3"    # file version, not NBI version
+__version__ = "0.1.3"  # file version, not NBI version
 version_date = "Aug 2019"
 
-database_version = '1.2'
-auth_database_version = '1.0'
-nbi_server = None           # instance of Server class
+database_version = "1.2"
+auth_database_version = "1.0"
+nbi_server = None  # instance of Server class
 subscription_thread = None  # instance of SubscriptionThread class
 
 """
@@ -209,277 +209,350 @@ valid_url_methods = {
     # contains allowed URL and methods, and the role_permission name
     "admin": {
         "v1": {
-            "tokens": {"METHODS": ("GET", "POST", "DELETE"),
-                       "ROLE_PERMISSION": "tokens:",
-                       "<ID>": {"METHODS": ("GET", "DELETE"),
-                                "ROLE_PERMISSION": "tokens:id:"
-                                }
-                       },
-            "users": {"METHODS": ("GET", "POST"),
-                      "ROLE_PERMISSION": "users:",
-                      "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                               "ROLE_PERMISSION": "users:id:"
-                               }
-                      },
-            "projects": {"METHODS": ("GET", "POST"),
-                         "ROLE_PERMISSION": "projects:",
-                         "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                                  "ROLE_PERMISSION": "projects:id:"}
-                         },
-            "roles": {"METHODS": ("GET", "POST"),
-                      "ROLE_PERMISSION": "roles:",
-                      "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                               "ROLE_PERMISSION": "roles:id:"
-                               }
-                      },
-            "vims": {"METHODS": ("GET", "POST"),
-                     "ROLE_PERMISSION": "vims:",
-                     "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                              "ROLE_PERMISSION": "vims:id:"
-                              }
-                     },
-            "vim_accounts": {"METHODS": ("GET", "POST"),
-                             "ROLE_PERMISSION": "vim_accounts:",
-                             "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                                      "ROLE_PERMISSION": "vim_accounts:id:"
-                                      }
-                             },
-            "wim_accounts": {"METHODS": ("GET", "POST"),
-                             "ROLE_PERMISSION": "wim_accounts:",
-                             "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                                      "ROLE_PERMISSION": "wim_accounts:id:"
-                                      }
-                             },
-            "sdns": {"METHODS": ("GET", "POST"),
-                     "ROLE_PERMISSION": "sdn_controllers:",
-                     "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                              "ROLE_PERMISSION": "sdn_controllers:id:"
-                              }
-                     },
-            "k8sclusters": {"METHODS": ("GET", "POST"),
-                            "ROLE_PERMISSION": "k8sclusters:",
-                            "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                                     "ROLE_PERMISSION": "k8sclusters:id:"
-                                     }
-                            },
-            "vca": {"METHODS": ("GET", "POST"),
-                    "ROLE_PERMISSION": "vca:",
-                    "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                             "ROLE_PERMISSION": "vca:id:"
-                             }
-                    },
-            "k8srepos": {"METHODS": ("GET", "POST"),
-                         "ROLE_PERMISSION": "k8srepos:",
-                         "<ID>": {"METHODS": ("GET", "DELETE"),
-                                  "ROLE_PERMISSION": "k8srepos:id:"
-                                  }
-                         },
-            "osmrepos": {"METHODS": ("GET", "POST"),
-                         "ROLE_PERMISSION": "osmrepos:",
-                         "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                                  "ROLE_PERMISSION": "osmrepos:id:"
-                                  }
-                         },
-            "domains": {"METHODS": ("GET", ),
-                        "ROLE_PERMISSION": "domains:",
-                        },
+            "tokens": {
+                "METHODS": ("GET", "POST", "DELETE"),
+                "ROLE_PERMISSION": "tokens:",
+                "<ID>": {"METHODS": ("GET", "DELETE"), "ROLE_PERMISSION": "tokens:id:"},
+            },
+            "users": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "users:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "users:id:",
+                },
+            },
+            "projects": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "projects:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "projects:id:",
+                },
+            },
+            "roles": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "roles:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "roles:id:",
+                },
+            },
+            "vims": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vims:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "vims:id:",
+                },
+            },
+            "vim_accounts": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vim_accounts:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "vim_accounts:id:",
+                },
+            },
+            "wim_accounts": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "wim_accounts:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "wim_accounts:id:",
+                },
+            },
+            "sdns": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "sdn_controllers:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "sdn_controllers:id:",
+                },
+            },
+            "k8sclusters": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "k8sclusters:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "k8sclusters:id:",
+                },
+            },
+            "vca": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vca:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "vca:id:",
+                },
+            },
+            "k8srepos": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "k8srepos:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "k8srepos:id:",
+                },
+            },
+            "osmrepos": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "osmrepos:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "osmrepos:id:",
+                },
+            },
+            "domains": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "domains:",
+            },
         }
     },
     "pdu": {
         "v1": {
-            "pdu_descriptors": {"METHODS": ("GET", "POST"),
-                                "ROLE_PERMISSION": "pduds:",
-                                "<ID>": {"METHODS": ("GET", "POST", "DELETE", "PATCH", "PUT"),
-                                         "ROLE_PERMISSION": "pduds:id:"
-                                         }
-                                },
+            "pdu_descriptors": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "pduds:",
+                "<ID>": {
+                    "METHODS": ("GET", "POST", "DELETE", "PATCH", "PUT"),
+                    "ROLE_PERMISSION": "pduds:id:",
+                },
+            },
         }
     },
     "nsd": {
         "v1": {
-            "ns_descriptors_content": {"METHODS": ("GET", "POST"),
-                                       "ROLE_PERMISSION": "nsds:",
-                                       "<ID>": {"METHODS": ("GET", "PUT", "DELETE"),
-                                                "ROLE_PERMISSION": "nsds:id:"
-                                                }
-                                       },
-            "ns_descriptors": {"METHODS": ("GET", "POST"),
-                               "ROLE_PERMISSION": "nsds:",
-                               "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),
-                                        "ROLE_PERMISSION": "nsds:id:",
-                                        "nsd_content": {"METHODS": ("GET", "PUT"),
-                                                        "ROLE_PERMISSION": "nsds:id:content:",
-                                                        },
-                                        "nsd": {"METHODS": ("GET",),  # descriptor inside package
-                                                "ROLE_PERMISSION": "nsds:id:content:"
-                                                },
-                                        "artifacts": {"METHODS": ("GET",),
-                                                      "ROLE_PERMISSION": "nsds:id:nsd_artifact:",
-                                                      "*": None,
-                                                      }
-                                        }
-                               },
-            "pnf_descriptors": {"TODO": ("GET", "POST"),
-                                "<ID>": {"TODO": ("GET", "DELETE", "PATCH"),
-                                         "pnfd_content": {"TODO": ("GET", "PUT")}
-                                         }
-                                },
-            "subscriptions": {"TODO": ("GET", "POST"),
-                              "<ID>": {"TODO": ("GET", "DELETE")}
-                              },
+            "ns_descriptors_content": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "nsds:",
+                "<ID>": {
+                    "METHODS": ("GET", "PUT", "DELETE"),
+                    "ROLE_PERMISSION": "nsds:id:",
+                },
+            },
+            "ns_descriptors": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "nsds:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),
+                    "ROLE_PERMISSION": "nsds:id:",
+                    "nsd_content": {
+                        "METHODS": ("GET", "PUT"),
+                        "ROLE_PERMISSION": "nsds:id:content:",
+                    },
+                    "nsd": {
+                        "METHODS": ("GET",),  # descriptor inside package
+                        "ROLE_PERMISSION": "nsds:id:content:",
+                    },
+                    "artifacts": {
+                        "METHODS": ("GET",),
+                        "ROLE_PERMISSION": "nsds:id:nsd_artifact:",
+                        "*": None,
+                    },
+                },
+            },
+            "pnf_descriptors": {
+                "TODO": ("GET", "POST"),
+                "<ID>": {
+                    "TODO": ("GET", "DELETE", "PATCH"),
+                    "pnfd_content": {"TODO": ("GET", "PUT")},
+                },
+            },
+            "subscriptions": {
+                "TODO": ("GET", "POST"),
+                "<ID>": {"TODO": ("GET", "DELETE")},
+            },
         }
     },
     "vnfpkgm": {
         "v1": {
-            "vnf_packages_content": {"METHODS": ("GET", "POST"),
-                                     "ROLE_PERMISSION": "vnfds:",
-                                     "<ID>": {"METHODS": ("GET", "PUT", "DELETE"),
-                                              "ROLE_PERMISSION": "vnfds:id:"}
-                                     },
-            "vnf_packages": {"METHODS": ("GET", "POST"),
-                             "ROLE_PERMISSION": "vnfds:",
-                             "<ID>": {"METHODS": ("GET", "DELETE", "PATCH"),  # GET: vnfPkgInfo
-                                      "ROLE_PERMISSION": "vnfds:id:",
-                                      "package_content": {"METHODS": ("GET", "PUT"),  # package
-                                                          "ROLE_PERMISSION": "vnfds:id:",
-                                                          "upload_from_uri": {"METHODS": (),
-                                                                              "TODO": ("POST", ),
-                                                                              "ROLE_PERMISSION": "vnfds:id:upload:"
-                                                                              }
-                                                          },
-                                      "vnfd": {"METHODS": ("GET", ),  # descriptor inside package
-                                               "ROLE_PERMISSION": "vnfds:id:content:"
-                                               },
-                                      "artifacts": {"METHODS": ("GET", ),
-                                                    "ROLE_PERMISSION": "vnfds:id:vnfd_artifact:",
-                                                    "*": None,
-                                                    },
-                                      "action": {"METHODS": ("POST", ),
-                                                 "ROLE_PERMISSION": "vnfds:id:action:"
-                                                 },
-                                      }
-                             },
-            "subscriptions": {"TODO": ("GET", "POST"),
-                              "<ID>": {"TODO": ("GET", "DELETE")}
-                              },
-            "vnfpkg_op_occs": {"METHODS": ("GET", ),
-                               "ROLE_PERMISSION": "vnfds:vnfpkgops:",
-                               "<ID>": {"METHODS": ("GET", ),
-                                        "ROLE_PERMISSION": "vnfds:vnfpkgops:id:"
-                                        }
-                               },
+            "vnf_packages_content": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vnfds:",
+                "<ID>": {
+                    "METHODS": ("GET", "PUT", "DELETE"),
+                    "ROLE_PERMISSION": "vnfds:id:",
+                },
+            },
+            "vnf_packages": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "vnfds:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE", "PATCH"),  # GET: vnfPkgInfo
+                    "ROLE_PERMISSION": "vnfds:id:",
+                    "package_content": {
+                        "METHODS": ("GET", "PUT"),  # package
+                        "ROLE_PERMISSION": "vnfds:id:",
+                        "upload_from_uri": {
+                            "METHODS": (),
+                            "TODO": ("POST",),
+                            "ROLE_PERMISSION": "vnfds:id:upload:",
+                        },
+                    },
+                    "vnfd": {
+                        "METHODS": ("GET",),  # descriptor inside package
+                        "ROLE_PERMISSION": "vnfds:id:content:",
+                    },
+                    "artifacts": {
+                        "METHODS": ("GET",),
+                        "ROLE_PERMISSION": "vnfds:id:vnfd_artifact:",
+                        "*": None,
+                    },
+                    "action": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "vnfds:id:action:",
+                    },
+                },
+            },
+            "subscriptions": {
+                "TODO": ("GET", "POST"),
+                "<ID>": {"TODO": ("GET", "DELETE")},
+            },
+            "vnfpkg_op_occs": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "vnfds:vnfpkgops:",
+                "<ID>": {"METHODS": ("GET",), "ROLE_PERMISSION": "vnfds:vnfpkgops:id:"},
+            },
         }
     },
     "nslcm": {
         "v1": {
-            "ns_instances_content": {"METHODS": ("GET", "POST"),
-                                     "ROLE_PERMISSION": "ns_instances:",
-                                     "<ID>": {"METHODS": ("GET", "DELETE"),
-                                              "ROLE_PERMISSION": "ns_instances:id:"
-                                              }
-                                     },
-            "ns_instances": {"METHODS": ("GET", "POST"),
-                             "ROLE_PERMISSION": "ns_instances:",
-                             "<ID>": {"METHODS": ("GET", "DELETE"),
-                                      "ROLE_PERMISSION": "ns_instances:id:",
-                                      "scale": {"METHODS": ("POST",),
-                                                "ROLE_PERMISSION": "ns_instances:id:scale:"
-                                                },
-                                      "terminate": {"METHODS": ("POST",),
-                                                    "ROLE_PERMISSION": "ns_instances:id:terminate:"
-                                                    },
-                                      "instantiate": {"METHODS": ("POST",),
-                                                      "ROLE_PERMISSION": "ns_instances:id:instantiate:"
-                                                      },
-                                      "action": {"METHODS": ("POST",),
-                                                 "ROLE_PERMISSION": "ns_instances:id:action:"
-                                                 },
-                                      }
-                             },
-            "ns_lcm_op_occs": {"METHODS": ("GET",),
-                               "ROLE_PERMISSION": "ns_instances:opps:",
-                               "<ID>": {"METHODS": ("GET",),
-                                        "ROLE_PERMISSION": "ns_instances:opps:id:"
-                                        },
-                               },
-            "vnfrs": {"METHODS": ("GET",),
-                      "ROLE_PERMISSION": "vnf_instances:",
-                      "<ID>": {"METHODS": ("GET",),
-                               "ROLE_PERMISSION": "vnf_instances:id:"
-                               }
-                      },
-            "vnf_instances": {"METHODS": ("GET",),
-                              "ROLE_PERMISSION": "vnf_instances:",
-                              "<ID>": {"METHODS": ("GET",),
-                                       "ROLE_PERMISSION": "vnf_instances:id:"
-                                       }
-                              },
-            "subscriptions": {"METHODS": ("GET", "POST"),
-                              "ROLE_PERMISSION": "ns_subscriptions:",
-                              "<ID>": {"METHODS": ("GET", "DELETE"),
-                                       "ROLE_PERMISSION": "ns_subscriptions:id:"
-                                       }
-                              },
+            "ns_instances_content": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "ns_instances:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "ns_instances:id:",
+                },
+            },
+            "ns_instances": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "ns_instances:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "ns_instances:id:",
+                    "scale": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:scale:",
+                    },
+                    "terminate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:terminate:",
+                    },
+                    "instantiate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:instantiate:",
+                    },
+                    "action": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "ns_instances:id:action:",
+                    },
+                },
+            },
+            "ns_lcm_op_occs": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "ns_instances:opps:",
+                "<ID>": {
+                    "METHODS": ("GET",),
+                    "ROLE_PERMISSION": "ns_instances:opps:id:",
+                },
+            },
+            "vnfrs": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "vnf_instances:",
+                "<ID>": {"METHODS": ("GET",), "ROLE_PERMISSION": "vnf_instances:id:"},
+            },
+            "vnf_instances": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "vnf_instances:",
+                "<ID>": {"METHODS": ("GET",), "ROLE_PERMISSION": "vnf_instances:id:"},
+            },
+            "subscriptions": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "ns_subscriptions:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "ns_subscriptions:id:",
+                },
+            },
         }
     },
     "nst": {
         "v1": {
-            "netslice_templates_content": {"METHODS": ("GET", "POST"),
-                                           "ROLE_PERMISSION": "slice_templates:",
-                                           "<ID>": {"METHODS": ("GET", "PUT", "DELETE"),
-                                                    "ROLE_PERMISSION": "slice_templates:id:", }
-                                           },
-            "netslice_templates": {"METHODS": ("GET", "POST"),
-                                   "ROLE_PERMISSION": "slice_templates:",
-                                   "<ID>": {"METHODS": ("GET", "DELETE"),
-                                            "TODO": ("PATCH",),
-                                            "ROLE_PERMISSION": "slice_templates:id:",
-                                            "nst_content": {"METHODS": ("GET", "PUT"),
-                                                            "ROLE_PERMISSION": "slice_templates:id:content:"
-                                                            },
-                                            "nst": {"METHODS": ("GET",),  # descriptor inside package
-                                                    "ROLE_PERMISSION": "slice_templates:id:content:"
-                                                    },
-                                            "artifacts": {"METHODS": ("GET",),
-                                                          "ROLE_PERMISSION": "slice_templates:id:content:",
-                                                          "*": None
-                                                          }
-                                            }
-                                   },
-            "subscriptions": {"TODO": ("GET", "POST"),
-                              "<ID>": {"TODO": ("GET", "DELETE")}
-                              },
+            "netslice_templates_content": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "slice_templates:",
+                "<ID>": {
+                    "METHODS": ("GET", "PUT", "DELETE"),
+                    "ROLE_PERMISSION": "slice_templates:id:",
+                },
+            },
+            "netslice_templates": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "slice_templates:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "TODO": ("PATCH",),
+                    "ROLE_PERMISSION": "slice_templates:id:",
+                    "nst_content": {
+                        "METHODS": ("GET", "PUT"),
+                        "ROLE_PERMISSION": "slice_templates:id:content:",
+                    },
+                    "nst": {
+                        "METHODS": ("GET",),  # descriptor inside package
+                        "ROLE_PERMISSION": "slice_templates:id:content:",
+                    },
+                    "artifacts": {
+                        "METHODS": ("GET",),
+                        "ROLE_PERMISSION": "slice_templates:id:content:",
+                        "*": None,
+                    },
+                },
+            },
+            "subscriptions": {
+                "TODO": ("GET", "POST"),
+                "<ID>": {"TODO": ("GET", "DELETE")},
+            },
         }
     },
     "nsilcm": {
         "v1": {
-            "netslice_instances_content": {"METHODS": ("GET", "POST"),
-                                           "ROLE_PERMISSION": "slice_instances:",
-                                           "<ID>": {"METHODS": ("GET", "DELETE"),
-                                                    "ROLE_PERMISSION": "slice_instances:id:"
-                                                    }
-                                           },
-            "netslice_instances": {"METHODS": ("GET", "POST"),
-                                   "ROLE_PERMISSION": "slice_instances:",
-                                   "<ID>": {"METHODS": ("GET", "DELETE"),
-                                            "ROLE_PERMISSION": "slice_instances:id:",
-                                            "terminate": {"METHODS": ("POST",),
-                                                          "ROLE_PERMISSION": "slice_instances:id:terminate:"
-                                                          },
-                                            "instantiate": {"METHODS": ("POST",),
-                                                            "ROLE_PERMISSION": "slice_instances:id:instantiate:"
-                                                            },
-                                            "action": {"METHODS": ("POST",),
-                                                       "ROLE_PERMISSION": "slice_instances:id:action:"
-                                                       },
-                                            }
-                                   },
-            "nsi_lcm_op_occs": {"METHODS": ("GET",),
-                                "ROLE_PERMISSION": "slice_instances:opps:",
-                                "<ID>": {"METHODS": ("GET",),
-                                         "ROLE_PERMISSION": "slice_instances:opps:id:",
-                                         },
-                                },
+            "netslice_instances_content": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "slice_instances:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "slice_instances:id:",
+                },
+            },
+            "netslice_instances": {
+                "METHODS": ("GET", "POST"),
+                "ROLE_PERMISSION": "slice_instances:",
+                "<ID>": {
+                    "METHODS": ("GET", "DELETE"),
+                    "ROLE_PERMISSION": "slice_instances:id:",
+                    "terminate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "slice_instances:id:terminate:",
+                    },
+                    "instantiate": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "slice_instances:id:instantiate:",
+                    },
+                    "action": {
+                        "METHODS": ("POST",),
+                        "ROLE_PERMISSION": "slice_instances:id:action:",
+                    },
+                },
+            },
+            "nsi_lcm_op_occs": {
+                "METHODS": ("GET",),
+                "ROLE_PERMISSION": "slice_instances:opps:",
+                "<ID>": {
+                    "METHODS": ("GET",),
+                    "ROLE_PERMISSION": "slice_instances:opps:id:",
+                },
+            },
         }
     },
     "nspm": {
@@ -487,9 +560,10 @@ valid_url_methods = {
             "pm_jobs": {
                 "<ID>": {
                     "reports": {
-                        "<ID>": {"METHODS": ("GET",),
-                                 "ROLE_PERMISSION": "reports:id:",
-                                 }
+                        "<ID>": {
+                            "METHODS": ("GET",),
+                            "ROLE_PERMISSION": "reports:id:",
+                        }
                     }
                 },
             },
@@ -499,7 +573,6 @@ valid_url_methods = {
 
 
 class NbiException(Exception):
-
     def __init__(self, message, http_code=HTTPStatus.METHOD_NOT_ALLOWED):
         Exception.__init__(self, message)
         self.http_code = http_code
@@ -528,27 +601,41 @@ class Server(object):
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                     elif "application/yaml" in cherrypy.request.headers["Content-Type"]:
                         error_text = "Invalid yaml format "
-                        indata = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                        indata = yaml.load(
+                            cherrypy.request.body, Loader=yaml.SafeLoader
+                        )
                         cherrypy.request.headers.pop("Content-File-MD5", None)
-                    elif "application/binary" in cherrypy.request.headers["Content-Type"] or \
-                         "application/gzip" in cherrypy.request.headers["Content-Type"] or \
-                         "application/zip" in cherrypy.request.headers["Content-Type"] or \
-                         "text/plain" in cherrypy.request.headers["Content-Type"]:
+                    elif (
+                        "application/binary" in cherrypy.request.headers["Content-Type"]
+                        or "application/gzip"
+                        in cherrypy.request.headers["Content-Type"]
+                        or "application/zip" in cherrypy.request.headers["Content-Type"]
+                        or "text/plain" in cherrypy.request.headers["Content-Type"]
+                    ):
                         indata = cherrypy.request.body  # .read()
-                    elif "multipart/form-data" in cherrypy.request.headers["Content-Type"]:
+                    elif (
+                        "multipart/form-data"
+                        in cherrypy.request.headers["Content-Type"]
+                    ):
                         if "descriptor_file" in kwargs:
                             filecontent = kwargs.pop("descriptor_file")
                             if not filecontent.file:
-                                raise NbiException("empty file or content", HTTPStatus.BAD_REQUEST)
+                                raise NbiException(
+                                    "empty file or content", HTTPStatus.BAD_REQUEST
+                                )
                             indata = filecontent.file  # .read()
                             if filecontent.content_type.value:
-                                cherrypy.request.headers["Content-Type"] = filecontent.content_type.value
+                                cherrypy.request.headers[
+                                    "Content-Type"
+                                ] = filecontent.content_type.value
                     else:
                         # raise cherrypy.HTTPError(HTTPStatus.Not_Acceptable,
                         #                          "Only 'Content-Type' of type 'application/json' or
                         # 'application/yaml' for input format are available")
                         error_text = "Invalid yaml format "
-                        indata = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
+                        indata = yaml.load(
+                            cherrypy.request.body, Loader=yaml.SafeLoader
+                        )
                         cherrypy.request.headers.pop("Content-File-MD5", None)
                 else:
                     error_text = "Invalid yaml format "
@@ -570,7 +657,12 @@ class Server(object):
                             kwargs[k] = yaml.load(v, Loader=yaml.SafeLoader)
                         except Exception:
                             pass
-                    elif k.endswith(".gt") or k.endswith(".lt") or k.endswith(".gte") or k.endswith(".lte"):
+                    elif (
+                        k.endswith(".gt")
+                        or k.endswith(".lt")
+                        or k.endswith(".gte")
+                        or k.endswith(".lte")
+                    ):
                         try:
                             kwargs[k] = int(v)
                         except Exception:
@@ -594,7 +686,9 @@ class Server(object):
         except (ValueError, yaml.YAMLError) as exc:
             raise NbiException(error_text + str(exc), HTTPStatus.BAD_REQUEST)
         except KeyError as exc:
-            raise NbiException("Query string error: " + str(exc), HTTPStatus.BAD_REQUEST)
+            raise NbiException(
+                "Query string error: " + str(exc), HTTPStatus.BAD_REQUEST
+            )
         except Exception as exc:
             raise NbiException(error_text + str(exc), HTTPStatus.BAD_REQUEST)
 
@@ -610,30 +704,45 @@ class Server(object):
         accept = cherrypy.request.headers.get("Accept")
         if data is None:
             if accept and "text/html" in accept:
-                return html.format(data, cherrypy.request, cherrypy.response, token_info)
+                return html.format(
+                    data, cherrypy.request, cherrypy.response, token_info
+                )
             # cherrypy.response.status = HTTPStatus.NO_CONTENT.value
             return
         elif hasattr(data, "read"):  # file object
             if _format:
                 cherrypy.response.headers["Content-Type"] = _format
             elif "b" in data.mode:  # binariy asssumig zip
-                cherrypy.response.headers["Content-Type"] = 'application/zip'
+                cherrypy.response.headers["Content-Type"] = "application/zip"
             else:
-                cherrypy.response.headers["Content-Type"] = 'text/plain'
+                cherrypy.response.headers["Content-Type"] = "text/plain"
             # TODO check that cherrypy close file. If not implement pending things to close  per thread next
             return data
         if accept:
             if "text/html" in accept:
-                return html.format(data, cherrypy.request, cherrypy.response, token_info)
+                return html.format(
+                    data, cherrypy.request, cherrypy.response, token_info
+                )
             elif "application/yaml" in accept or "*/*" in accept:
                 pass
-            elif "application/json" in accept or (cherrypy.response.status and cherrypy.response.status >= 300):
-                cherrypy.response.headers["Content-Type"] = 'application/json; charset=utf-8'
+            elif "application/json" in accept or (
+                cherrypy.response.status and cherrypy.response.status >= 300
+            ):
+                cherrypy.response.headers[
+                    "Content-Type"
+                ] = "application/json; charset=utf-8"
                 a = json.dumps(data, indent=4) + "\n"
-                return a.encode("utf8") 
-        cherrypy.response.headers["Content-Type"] = 'application/yaml'
-        return yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False, tags=False,
-                              encoding='utf-8', allow_unicode=True)  # , canonical=True, default_style='"'
+                return a.encode("utf8")
+        cherrypy.response.headers["Content-Type"] = "application/yaml"
+        return yaml.safe_dump(
+            data,
+            explicit_start=True,
+            indent=4,
+            default_flow_style=False,
+            tags=False,
+            encoding="utf-8",
+            allow_unicode=True,
+        )  # , canonical=True, default_style='"'
 
     @cherrypy.expose
     def index(self, *args, **kwargs):
@@ -641,10 +750,12 @@ class Server(object):
         try:
             if cherrypy.request.method == "GET":
                 token_info = self.authenticator.authorize()
-                outdata = token_info   # Home page
+                outdata = token_info  # Home page
             else:
-                raise cherrypy.HTTPError(HTTPStatus.METHOD_NOT_ALLOWED.value,
-                                         "Method {} not allowed for tokens".format(cherrypy.request.method))
+                raise cherrypy.HTTPError(
+                    HTTPStatus.METHOD_NOT_ALLOWED.value,
+                    "Method {} not allowed for tokens".format(cherrypy.request.method),
+                )
 
             return self._format_out(outdata, token_info)
 
@@ -658,9 +769,14 @@ class Server(object):
         # TODO consider to remove and provide version using the static version file
         try:
             if cherrypy.request.method != "GET":
-                raise NbiException("Only method GET is allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+                raise NbiException(
+                    "Only method GET is allowed", HTTPStatus.METHOD_NOT_ALLOWED
+                )
             elif args or kwargs:
-                raise NbiException("Invalid URL or query string for version", HTTPStatus.METHOD_NOT_ALLOWED)
+                raise NbiException(
+                    "Invalid URL or query string for version",
+                    HTTPStatus.METHOD_NOT_ALLOWED,
+                )
             # TODO include version of other modules, pick up from some kafka admin message
             osm_nbi_version = {"version": nbi_version, "date": nbi_version_date}
             return self._format_out(osm_nbi_version)
@@ -676,8 +792,13 @@ class Server(object):
     def domain(self):
         try:
             domains = {
-                "user_domain_name": cherrypy.tree.apps['/osm'].config["authentication"].get("user_domain_name"),
-                "project_domain_name": cherrypy.tree.apps['/osm'].config["authentication"].get("project_domain_name")}
+                "user_domain_name": cherrypy.tree.apps["/osm"]
+                .config["authentication"]
+                .get("user_domain_name"),
+                "project_domain_name": cherrypy.tree.apps["/osm"]
+                .config["authentication"]
+                .get("project_domain_name"),
+            }
             return self._format_out(domains)
         except NbiException as e:
             cherrypy.response.status = e.http_code.value
@@ -708,7 +829,10 @@ class Server(object):
         # self.engine.load_dbase(cherrypy.request.app.config)
         indata = self._format_in(kwargs)
         if not isinstance(indata, dict):
-            raise NbiException("Expected application/yaml or application/json Content-Type", HTTPStatus.BAD_REQUEST)
+            raise NbiException(
+                "Expected application/yaml or application/json Content-Type",
+                HTTPStatus.BAD_REQUEST,
+            )
 
         if method == "GET":
             token_info = self.authenticator.authorize()
@@ -727,8 +851,10 @@ class Server(object):
                 indata.update(kwargs)
             # This is needed to log the user when authentication fails
             cherrypy.request.login = "{}".format(indata.get("username", "-"))
-            outdata = token_info = self.authenticator.new_token(token_info, indata, cherrypy.request.remote)
-            cherrypy.session['Authorization'] = outdata["_id"]
+            outdata = token_info = self.authenticator.new_token(
+                token_info, indata, cherrypy.request.remote
+            )
+            cherrypy.session["Authorization"] = outdata["_id"]
             self._set_location_header("admin", "v1", "tokens", outdata["_id"])
             # for logging
             self._format_login(token_info)
@@ -745,23 +871,30 @@ class Server(object):
                 token_id = token_info["_id"]
             outdata = self.authenticator.del_token(token_id)
             token_info = None
-            cherrypy.session['Authorization'] = "logout"
+            cherrypy.session["Authorization"] = "logout"
             # cherrypy.response.cookie["Authorization"] = token_id
             # cherrypy.response.cookie["Authorization"]['expires'] = 0
         else:
-            raise NbiException("Method {} not allowed for token".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+            raise NbiException(
+                "Method {} not allowed for token".format(method),
+                HTTPStatus.METHOD_NOT_ALLOWED,
+            )
         return self._format_out(outdata, token_info)
 
     @cherrypy.expose
     def test(self, *args, **kwargs):
-        if not cherrypy.config.get("server.enable_test") or (isinstance(cherrypy.config["server.enable_test"], str) and
-                                                             cherrypy.config["server.enable_test"].lower() == "false"):
+        if not cherrypy.config.get("server.enable_test") or (
+            isinstance(cherrypy.config["server.enable_test"], str)
+            and cherrypy.config["server.enable_test"].lower() == "false"
+        ):
             cherrypy.response.status = HTTPStatus.METHOD_NOT_ALLOWED.value
             return "test URL is disabled"
         thread_info = None
         if args and args[0] == "help":
-            return "<html><pre>\ninit\nfile/<name>  download file\ndb-clear/table\nfs-clear[/folder]\nlogin\nlogin2\n"\
-                   "sleep/<time>\nmessage/topic\n</pre></html>"
+            return (
+                "<html><pre>\ninit\nfile/<name>  download file\ndb-clear/table\nfs-clear[/folder]\nlogin\nlogin2\n"
+                "sleep/<time>\nmessage/topic\n</pre></html>"
+            )
 
         elif args and args[0] == "init":
             try:
@@ -772,10 +905,15 @@ class Server(object):
                 cherrypy.response.status = HTTPStatus.FORBIDDEN.value
                 return self._format_out("Database already initialized")
         elif args and args[0] == "file":
-            return cherrypy.lib.static.serve_file(cherrypy.tree.apps['/osm'].config["storage"]["path"] + "/" + args[1],
-                                                  "text/plain", "attachment")
+            return cherrypy.lib.static.serve_file(
+                cherrypy.tree.apps["/osm"].config["storage"]["path"] + "/" + args[1],
+                "text/plain",
+                "attachment",
+            )
         elif args and args[0] == "file2":
-            f_path = cherrypy.tree.apps['/osm'].config["storage"]["path"] + "/" + args[1]
+            f_path = (
+                cherrypy.tree.apps["/osm"].config["storage"]["path"] + "/" + args[1]
+            )
             f = open(f_path, "r")
             cherrypy.response.headers["Content-type"] = "text/plain"
             return f
@@ -793,11 +931,15 @@ class Server(object):
             return ",".join(folders) + " folders deleted\n"
         elif args and args[0] == "login":
             if not cherrypy.request.headers.get("Authorization"):
-                cherrypy.response.headers["WWW-Authenticate"] = 'Basic realm="Access to OSM site", charset="UTF-8"'
+                cherrypy.response.headers[
+                    "WWW-Authenticate"
+                ] = 'Basic realm="Access to OSM site", charset="UTF-8"'
                 cherrypy.response.status = HTTPStatus.UNAUTHORIZED.value
         elif args and args[0] == "login2":
             if not cherrypy.request.headers.get("Authorization"):
-                cherrypy.response.headers["WWW-Authenticate"] = 'Bearer realm="Access to OSM site"'
+                cherrypy.response.headers[
+                    "WWW-Authenticate"
+                ] = 'Bearer realm="Access to OSM site"'
                 cherrypy.response.status = HTTPStatus.UNAUTHORIZED.value
         elif args and args[0] == "sleep":
             sleep_time = 5
@@ -814,12 +956,12 @@ class Server(object):
             main_topic = args[1]
             return_text = "<html><pre>{} ->\n".format(main_topic)
             try:
-                if cherrypy.request.method == 'POST':
+                if cherrypy.request.method == "POST":
                     to_send = yaml.load(cherrypy.request.body, Loader=yaml.SafeLoader)
                     for k, v in to_send.items():
                         self.engine.msg.write(main_topic, k, v)
                         return_text += "  {}: {}\n".format(k, v)
-                elif cherrypy.request.method == 'GET':
+                elif cherrypy.request.method == "GET":
                     for k, v in kwargs.items():
                         v_dict = yaml.load(v, Loader=yaml.SafeLoader)
                         self.engine.msg.write(main_topic, k, v_dict)
@@ -830,20 +972,26 @@ class Server(object):
             return return_text
 
         return_text = (
-            "<html><pre>\nheaders:\n  args: {}\n".format(args) +
-            "  kwargs: {}\n".format(kwargs) +
-            "  headers: {}\n".format(cherrypy.request.headers) +
-            "  path_info: {}\n".format(cherrypy.request.path_info) +
-            "  query_string: {}\n".format(cherrypy.request.query_string) +
-            "  session: {}\n".format(cherrypy.session) +
-            "  cookie: {}\n".format(cherrypy.request.cookie) +
-            "  method: {}\n".format(cherrypy.request.method) +
-            "  session: {}\n".format(cherrypy.session.get('fieldname')) +
-            "  body:\n")
+            "<html><pre>\nheaders:\n  args: {}\n".format(args)
+            + "  kwargs: {}\n".format(kwargs)
+            + "  headers: {}\n".format(cherrypy.request.headers)
+            + "  path_info: {}\n".format(cherrypy.request.path_info)
+            + "  query_string: {}\n".format(cherrypy.request.query_string)
+            + "  session: {}\n".format(cherrypy.session)
+            + "  cookie: {}\n".format(cherrypy.request.cookie)
+            + "  method: {}\n".format(cherrypy.request.method)
+            + "  session: {}\n".format(cherrypy.session.get("fieldname"))
+            + "  body:\n"
+        )
         return_text += "    length: {}\n".format(cherrypy.request.body.length)
         if cherrypy.request.body.length:
             return_text += "    content: {}\n".format(
-                str(cherrypy.request.body.read(int(cherrypy.request.headers.get('Content-Length', 0)))))
+                str(
+                    cherrypy.request.body.read(
+                        int(cherrypy.request.headers.get("Content-Length", 0))
+                    )
+                )
+            )
         if thread_info:
             return_text += "thread: {}\n".format(thread_info)
         return_text += "</pre></html>"
@@ -852,15 +1000,20 @@ class Server(object):
     @staticmethod
     def _check_valid_url_method(method, *args):
         if len(args) < 3:
-            raise NbiException("URL must contain at least 'main_topic/version/topic'", HTTPStatus.METHOD_NOT_ALLOWED)
+            raise NbiException(
+                "URL must contain at least 'main_topic/version/topic'",
+                HTTPStatus.METHOD_NOT_ALLOWED,
+            )
 
         reference = valid_url_methods
         for arg in args:
             if arg is None:
                 break
             if not isinstance(reference, dict):
-                raise NbiException("URL contains unexpected extra items '{}'".format(arg),
-                                   HTTPStatus.METHOD_NOT_ALLOWED)
+                raise NbiException(
+                    "URL contains unexpected extra items '{}'".format(arg),
+                    HTTPStatus.METHOD_NOT_ALLOWED,
+                )
 
             if arg in reference:
                 reference = reference[arg]
@@ -872,11 +1025,19 @@ class Server(object):
                     reference = reference["*"]
                 break
             else:
-                raise NbiException("Unexpected URL item {}".format(arg), HTTPStatus.METHOD_NOT_ALLOWED)
+                raise NbiException(
+                    "Unexpected URL item {}".format(arg), HTTPStatus.METHOD_NOT_ALLOWED
+                )
         if "TODO" in reference and method in reference["TODO"]:
-            raise NbiException("Method {} not supported yet for this URL".format(method), HTTPStatus.NOT_IMPLEMENTED)
+            raise NbiException(
+                "Method {} not supported yet for this URL".format(method),
+                HTTPStatus.NOT_IMPLEMENTED,
+            )
         elif "METHODS" in reference and method not in reference["METHODS"]:
-            raise NbiException("Method {} not supported for this URL".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+            raise NbiException(
+                "Method {} not supported for this URL".format(method),
+                HTTPStatus.METHOD_NOT_ALLOWED,
+            )
         return reference["ROLE_PERMISSION"] + method.lower()
 
     @staticmethod
@@ -890,7 +1051,9 @@ class Server(object):
         :return: None
         """
         # Use cherrypy.request.base for absoluted path and make use of request.header HOST just in case behind aNAT
-        cherrypy.response.headers["Location"] = "/osm/{}/{}/{}/{}".format(main_topic, version, topic, id)
+        cherrypy.response.headers["Location"] = "/osm/{}/{}/{}/{}".format(
+            main_topic, version, topic, id
+        )
         return
 
     @staticmethod
@@ -923,18 +1086,27 @@ class Server(object):
             set_project: tuple with projects that a created element will belong to
             method: show, list, delete, write
         """
-        admin_query = {"force": False, "project_id": (token_info["project_id"], ), "username": token_info["username"],
-                       "admin": token_info["admin"], "public": None,
-                       "allow_show_user_project_role": token_info["allow_show_user_project_role"]}
+        admin_query = {
+            "force": False,
+            "project_id": (token_info["project_id"],),
+            "username": token_info["username"],
+            "admin": token_info["admin"],
+            "public": None,
+            "allow_show_user_project_role": token_info["allow_show_user_project_role"],
+        }
         if kwargs:
             # FORCE
             if "FORCE" in kwargs:
-                if kwargs["FORCE"].lower() != "false":  # if None or True set force to True
+                if (
+                    kwargs["FORCE"].lower() != "false"
+                ):  # if None or True set force to True
                     admin_query["force"] = True
                 del kwargs["FORCE"]
             # PUBLIC
             if "PUBLIC" in kwargs:
-                if kwargs["PUBLIC"].lower() != "false":  # if None or True set public to True
+                if (
+                    kwargs["PUBLIC"].lower() != "false"
+                ):  # if None or True set public to True
                     admin_query["public"] = True
                 else:
                     admin_query["public"] = False
@@ -944,25 +1116,33 @@ class Server(object):
                 behave_as = kwargs.pop("ADMIN")
                 if behave_as.lower() != "false":
                     if not token_info["admin"]:
-                        raise NbiException("Only admin projects can use 'ADMIN' query string", HTTPStatus.UNAUTHORIZED)
-                    if not behave_as or behave_as.lower() == "true":  # convert True, None to empty list
+                        raise NbiException(
+                            "Only admin projects can use 'ADMIN' query string",
+                            HTTPStatus.UNAUTHORIZED,
+                        )
+                    if (
+                        not behave_as or behave_as.lower() == "true"
+                    ):  # convert True, None to empty list
                         admin_query["project_id"] = ()
                     elif isinstance(behave_as, (list, tuple)):
                         admin_query["project_id"] = behave_as
-                    else:   # isinstance(behave_as, str)
-                        admin_query["project_id"] = (behave_as, )
+                    else:  # isinstance(behave_as, str)
+                        admin_query["project_id"] = (behave_as,)
             if "SET_PROJECT" in kwargs:
                 set_project = kwargs.pop("SET_PROJECT")
                 if not set_project:
                     admin_query["set_project"] = list(admin_query["project_id"])
                 else:
                     if isinstance(set_project, str):
-                        set_project = (set_project, )
+                        set_project = (set_project,)
                     if admin_query["project_id"]:
                         for p in set_project:
                             if p not in admin_query["project_id"]:
-                                raise NbiException("Unauthorized for 'SET_PROJECT={p}'. Try with 'ADMIN=True' or "
-                                                   "'ADMIN='{p}'".format(p=p), HTTPStatus.UNAUTHORIZED)
+                                raise NbiException(
+                                    "Unauthorized for 'SET_PROJECT={p}'. Try with 'ADMIN=True' or "
+                                    "'ADMIN='{p}'".format(p=p),
+                                    HTTPStatus.UNAUTHORIZED,
+                                )
                     admin_query["set_project"] = set_project
 
             # PROJECT_READ
@@ -981,7 +1161,16 @@ class Server(object):
         return admin_query
 
     @cherrypy.expose
-    def default(self, main_topic=None, version=None, topic=None, _id=None, item=None, *args, **kwargs):
+    def default(
+        self,
+        main_topic=None,
+        version=None,
+        topic=None,
+        _id=None,
+        item=None,
+        *args,
+        **kwargs
+    ):
         token_info = None
         outdata = None
         _format = None
@@ -991,24 +1180,50 @@ class Server(object):
         engine_session = None
         try:
             if not main_topic or not version or not topic:
-                raise NbiException("URL must contain at least 'main_topic/version/topic'",
-                                   HTTPStatus.METHOD_NOT_ALLOWED)
-            if main_topic not in ("admin", "vnfpkgm", "nsd", "nslcm", "pdu", "nst", "nsilcm", "nspm"):
-                raise NbiException("URL main_topic '{}' not supported".format(main_topic),
-                                   HTTPStatus.METHOD_NOT_ALLOWED)
-            if version != 'v1':
-                raise NbiException("URL version '{}' not supported".format(version), HTTPStatus.METHOD_NOT_ALLOWED)
-
-            if kwargs and "METHOD" in kwargs and kwargs["METHOD"] in ("PUT", "POST", "DELETE", "GET", "PATCH"):
+                raise NbiException(
+                    "URL must contain at least 'main_topic/version/topic'",
+                    HTTPStatus.METHOD_NOT_ALLOWED,
+                )
+            if main_topic not in (
+                "admin",
+                "vnfpkgm",
+                "nsd",
+                "nslcm",
+                "pdu",
+                "nst",
+                "nsilcm",
+                "nspm",
+            ):
+                raise NbiException(
+                    "URL main_topic '{}' not supported".format(main_topic),
+                    HTTPStatus.METHOD_NOT_ALLOWED,
+                )
+            if version != "v1":
+                raise NbiException(
+                    "URL version '{}' not supported".format(version),
+                    HTTPStatus.METHOD_NOT_ALLOWED,
+                )
+
+            if (
+                kwargs
+                and "METHOD" in kwargs
+                and kwargs["METHOD"] in ("PUT", "POST", "DELETE", "GET", "PATCH")
+            ):
                 method = kwargs.pop("METHOD")
             else:
                 method = cherrypy.request.method
 
-            role_permission = self._check_valid_url_method(method, main_topic, version, topic, _id, item, *args)
-            query_string_operations = self._extract_query_string_operations(kwargs, method)
+            role_permission = self._check_valid_url_method(
+                method, main_topic, version, topic, _id, item, *args
+            )
+            query_string_operations = self._extract_query_string_operations(
+                kwargs, method
+            )
             if main_topic == "admin" and topic == "tokens":
                 return self.token(method, _id, kwargs)
-            token_info = self.authenticator.authorize(role_permission, query_string_operations, _id)
+            token_info = self.authenticator.authorize(
+                role_permission, query_string_operations, _id
+            )
             if main_topic == "admin" and topic == "domains":
                 return self.domain()
             engine_session = self._manage_admin_query(token_info, kwargs, method, _id)
@@ -1040,14 +1255,24 @@ class Server(object):
                     engine_topic = "nsilcmops"
             elif main_topic == "pdu":
                 engine_topic = "pdus"
-            if engine_topic == "vims":   # TODO this is for backward compatibility, it will be removed in the future
+            if (
+                engine_topic == "vims"
+            ):  # TODO this is for backward compatibility, it will be removed in the future
                 engine_topic = "vim_accounts"
 
             if topic == "subscriptions":
                 engine_topic = main_topic + "_" + topic
 
             if method == "GET":
-                if item in ("nsd_content", "package_content", "artifacts", "vnfd", "nsd", "nst", "nst_content"):
+                if item in (
+                    "nsd_content",
+                    "package_content",
+                    "artifacts",
+                    "vnfd",
+                    "nsd",
+                    "nst",
+                    "nst_content",
+                ):
                     if item in ("vnfd", "nsd", "nst"):
                         path = "$DESCRIPTOR"
                     elif args:
@@ -1056,26 +1281,51 @@ class Server(object):
                         path = ()
                     else:
                         path = None
-                    file, _format = self.engine.get_file(engine_session, engine_topic, _id, path,
-                                                         cherrypy.request.headers.get("Accept"))
+                    file, _format = self.engine.get_file(
+                        engine_session,
+                        engine_topic,
+                        _id,
+                        path,
+                        cherrypy.request.headers.get("Accept"),
+                    )
                     outdata = file
                 elif not _id:
-                    outdata = self.engine.get_item_list(engine_session, engine_topic, kwargs, api_req=True)
+                    outdata = self.engine.get_item_list(
+                        engine_session, engine_topic, kwargs, api_req=True
+                    )
                 else:
                     if item == "reports":
                         # TODO check that project_id (_id in this context) has permissions
                         _id = args[0]
-                    outdata = self.engine.get_item(engine_session, engine_topic, _id, True)
+                    outdata = self.engine.get_item(
+                        engine_session, engine_topic, _id, True
+                    )
 
             elif method == "POST":
                 cherrypy.response.status = HTTPStatus.CREATED.value
-                if topic in ("ns_descriptors_content", "vnf_packages_content", "netslice_templates_content"):
+                if topic in (
+                    "ns_descriptors_content",
+                    "vnf_packages_content",
+                    "netslice_templates_content",
+                ):
                     _id = cherrypy.request.headers.get("Transaction-Id")
                     if not _id:
-                        _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, {}, None,
-                                                      cherrypy.request.headers)
-                    completed = self.engine.upload_content(engine_session, engine_topic, _id, indata, kwargs,
-                                                           cherrypy.request.headers)
+                        _id, _ = self.engine.new_item(
+                            rollback,
+                            engine_session,
+                            engine_topic,
+                            {},
+                            None,
+                            cherrypy.request.headers,
+                        )
+                    completed = self.engine.upload_content(
+                        engine_session,
+                        engine_topic,
+                        _id,
+                        indata,
+                        kwargs,
+                        cherrypy.request.headers,
+                    )
                     if completed:
                         self._set_location_header(main_topic, version, topic, _id)
                     else:
@@ -1083,53 +1333,85 @@ class Server(object):
                     outdata = {"id": _id}
                 elif topic == "ns_instances_content":
                     # creates NSR
-                    _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, engine_topic, indata, kwargs
+                    )
                     # creates nslcmop
                     indata["lcmOperationType"] = "instantiate"
                     indata["nsInstanceId"] = _id
-                    nslcmop_id, _ = self.engine.new_item(rollback, engine_session, "nslcmops", indata, None)
+                    nslcmop_id, _ = self.engine.new_item(
+                        rollback, engine_session, "nslcmops", indata, None
+                    )
                     self._set_location_header(main_topic, version, topic, _id)
                     outdata = {"id": _id, "nslcmop_id": nslcmop_id}
                 elif topic == "ns_instances" and item:
                     indata["lcmOperationType"] = item
                     indata["nsInstanceId"] = _id
-                    _id, _ = self.engine.new_item(rollback, engine_session, "nslcmops", indata, kwargs)
-                    self._set_location_header(main_topic, version, "ns_lcm_op_occs", _id)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, "nslcmops", indata, kwargs
+                    )
+                    self._set_location_header(
+                        main_topic, version, "ns_lcm_op_occs", _id
+                    )
                     outdata = {"id": _id}
                     cherrypy.response.status = HTTPStatus.ACCEPTED.value
                 elif topic == "netslice_instances_content":
                     # creates NetSlice_Instance_record (NSIR)
-                    _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, engine_topic, indata, kwargs
+                    )
                     self._set_location_header(main_topic, version, topic, _id)
                     indata["lcmOperationType"] = "instantiate"
                     indata["netsliceInstanceId"] = _id
-                    nsilcmop_id, _ = self.engine.new_item(rollback, engine_session, "nsilcmops", indata, kwargs)
+                    nsilcmop_id, _ = self.engine.new_item(
+                        rollback, engine_session, "nsilcmops", indata, kwargs
+                    )
                     outdata = {"id": _id, "nsilcmop_id": nsilcmop_id}
                 elif topic == "netslice_instances" and item:
                     indata["lcmOperationType"] = item
                     indata["netsliceInstanceId"] = _id
-                    _id, _ = self.engine.new_item(rollback, engine_session, "nsilcmops", indata, kwargs)
-                    self._set_location_header(main_topic, version, "nsi_lcm_op_occs", _id)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, "nsilcmops", indata, kwargs
+                    )
+                    self._set_location_header(
+                        main_topic, version, "nsi_lcm_op_occs", _id
+                    )
                     outdata = {"id": _id}
                     cherrypy.response.status = HTTPStatus.ACCEPTED.value
                 elif topic == "vnf_packages" and item == "action":
                     indata["lcmOperationType"] = item
                     indata["vnfPkgId"] = _id
-                    _id, _ = self.engine.new_item(rollback, engine_session, "vnfpkgops", indata, kwargs)
-                    self._set_location_header(main_topic, version, "vnfpkg_op_occs", _id)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, "vnfpkgops", indata, kwargs
+                    )
+                    self._set_location_header(
+                        main_topic, version, "vnfpkg_op_occs", _id
+                    )
                     outdata = {"id": _id}
                     cherrypy.response.status = HTTPStatus.ACCEPTED.value
                 elif topic == "subscriptions":
-                    _id, _ = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs)
+                    _id, _ = self.engine.new_item(
+                        rollback, engine_session, engine_topic, indata, kwargs
+                    )
                     self._set_location_header(main_topic, version, topic, _id)
                     link = {}
                     link["self"] = cherrypy.response.headers["Location"]
-                    outdata = {"id": _id, "filter": indata["filter"], "callbackUri": indata["CallbackUri"],
-                               "_links": link}
+                    outdata = {
+                        "id": _id,
+                        "filter": indata["filter"],
+                        "callbackUri": indata["CallbackUri"],
+                        "_links": link,
+                    }
                     cherrypy.response.status = HTTPStatus.CREATED.value
                 else:
-                    _id, op_id = self.engine.new_item(rollback, engine_session, engine_topic, indata, kwargs,
-                                                      cherrypy.request.headers)
+                    _id, op_id = self.engine.new_item(
+                        rollback,
+                        engine_session,
+                        engine_topic,
+                        indata,
+                        kwargs,
+                        cherrypy.request.headers,
+                    )
                     self._set_location_header(main_topic, version, topic, _id)
                     outdata = {"id": _id}
                     if op_id:
@@ -1139,7 +1421,9 @@ class Server(object):
 
             elif method == "DELETE":
                 if not _id:
-                    outdata = self.engine.del_item_list(engine_session, engine_topic, kwargs)
+                    outdata = self.engine.del_item_list(
+                        engine_session, engine_topic, kwargs
+                    )
                     cherrypy.response.status = HTTPStatus.OK.value
                 else:  # len(args) > 1
                     # for NS NSI generate an operation
@@ -1148,18 +1432,25 @@ class Server(object):
                         nslcmop_desc = {
                             "lcmOperationType": "terminate",
                             "nsInstanceId": _id,
-                            "autoremove": True
+                            "autoremove": True,
                         }
-                        op_id, _ = self.engine.new_item(rollback, engine_session, "nslcmops", nslcmop_desc, kwargs)
+                        op_id, _ = self.engine.new_item(
+                            rollback, engine_session, "nslcmops", nslcmop_desc, kwargs
+                        )
                         if op_id:
                             outdata = {"_id": op_id}
-                    elif topic == "netslice_instances_content" and not engine_session["force"]:
+                    elif (
+                        topic == "netslice_instances_content"
+                        and not engine_session["force"]
+                    ):
                         nsilcmop_desc = {
                             "lcmOperationType": "terminate",
                             "netsliceInstanceId": _id,
-                            "autoremove": True
+                            "autoremove": True,
                         }
-                        op_id, _ = self.engine.new_item(rollback, engine_session, "nsilcmops", nsilcmop_desc, None)
+                        op_id, _ = self.engine.new_item(
+                            rollback, engine_session, "nsilcmops", nsilcmop_desc, None
+                        )
                         if op_id:
                             outdata = {"_id": op_id}
                     # if there is not any deletion in process, delete
@@ -1167,20 +1458,37 @@ class Server(object):
                         op_id = self.engine.del_item(engine_session, engine_topic, _id)
                         if op_id:
                             outdata = {"op_id": op_id}
-                    cherrypy.response.status = HTTPStatus.ACCEPTED.value if op_id else HTTPStatus.NO_CONTENT.value
+                    cherrypy.response.status = (
+                        HTTPStatus.ACCEPTED.value
+                        if op_id
+                        else HTTPStatus.NO_CONTENT.value
+                    )
 
             elif method in ("PUT", "PATCH"):
                 op_id = None
                 if not indata and not kwargs and not engine_session.get("set_project"):
-                    raise NbiException("Nothing to update. Provide payload and/or query string",
-                                       HTTPStatus.BAD_REQUEST)
-                if item in ("nsd_content", "package_content", "nst_content") and method == "PUT":
-                    completed = self.engine.upload_content(engine_session, engine_topic, _id, indata, kwargs,
-                                                           cherrypy.request.headers)
+                    raise NbiException(
+                        "Nothing to update. Provide payload and/or query string",
+                        HTTPStatus.BAD_REQUEST,
+                    )
+                if (
+                    item in ("nsd_content", "package_content", "nst_content")
+                    and method == "PUT"
+                ):
+                    completed = self.engine.upload_content(
+                        engine_session,
+                        engine_topic,
+                        _id,
+                        indata,
+                        kwargs,
+                        cherrypy.request.headers,
+                    )
                     if not completed:
                         cherrypy.response.headers["Transaction-Id"] = id
                 else:
-                    op_id = self.engine.edit_item(engine_session, engine_topic, _id, indata, kwargs)
+                    op_id = self.engine.edit_item(
+                        engine_session, engine_topic, _id, indata, kwargs
+                    )
 
                 if op_id:
                     cherrypy.response.status = HTTPStatus.ACCEPTED.value
@@ -1189,25 +1497,45 @@ class Server(object):
                     cherrypy.response.status = HTTPStatus.NO_CONTENT.value
                     outdata = None
             else:
-                raise NbiException("Method {} not allowed".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+                raise NbiException(
+                    "Method {} not allowed".format(method),
+                    HTTPStatus.METHOD_NOT_ALLOWED,
+                )
 
             # if Role information changes, it is needed to reload the information of roles
             if topic == "roles" and method != "GET":
                 self.authenticator.load_operation_to_allowed_roles()
 
-            if topic == "projects" and method == "DELETE" \
-                    or topic in ["users", "roles"] and method in ["PUT", "PATCH", "DELETE"]:
+            if (
+                topic == "projects"
+                and method == "DELETE"
+                or topic in ["users", "roles"]
+                and method in ["PUT", "PATCH", "DELETE"]
+            ):
                 self.authenticator.remove_token_from_cache()
 
             return self._format_out(outdata, token_info, _format)
         except Exception as e:
-            if isinstance(e, (NbiException, EngineException, DbException, FsException, MsgException, AuthException,
-                              ValidationError, AuthconnException)):
+            if isinstance(
+                e,
+                (
+                    NbiException,
+                    EngineException,
+                    DbException,
+                    FsException,
+                    MsgException,
+                    AuthException,
+                    ValidationError,
+                    AuthconnException,
+                ),
+            ):
                 http_code_value = cherrypy.response.status = e.http_code.value
                 http_code_name = e.http_code.name
                 cherrypy.log("Exception {}".format(e))
             else:
-                http_code_value = cherrypy.response.status = HTTPStatus.BAD_REQUEST.value  # INTERNAL_SERVER_ERROR
+                http_code_value = (
+                    cherrypy.response.status
+                ) = HTTPStatus.BAD_REQUEST.value  # INTERNAL_SERVER_ERROR
                 cherrypy.log("CRITICAL: Exception {}".format(e), traceback=True)
                 http_code_name = HTTPStatus.BAD_REQUEST.name
             if hasattr(outdata, "close"):  # is an open file
@@ -1217,16 +1545,28 @@ class Server(object):
             for rollback_item in rollback:
                 try:
                     if rollback_item.get("operation") == "set":
-                        self.engine.db.set_one(rollback_item["topic"], {"_id": rollback_item["_id"]},
-                                               rollback_item["content"], fail_on_empty=False)
+                        self.engine.db.set_one(
+                            rollback_item["topic"],
+                            {"_id": rollback_item["_id"]},
+                            rollback_item["content"],
+                            fail_on_empty=False,
+                        )
                     elif rollback_item.get("operation") == "del_list":
-                        self.engine.db.del_list(rollback_item["topic"], rollback_item["filter"], 
-                                                fail_on_empty=False)
+                        self.engine.db.del_list(
+                            rollback_item["topic"],
+                            rollback_item["filter"],
+                            fail_on_empty=False,
+                        )
                     else:
-                        self.engine.db.del_one(rollback_item["topic"], {"_id": rollback_item["_id"]},
-                                               fail_on_empty=False)
+                        self.engine.db.del_one(
+                            rollback_item["topic"],
+                            {"_id": rollback_item["_id"]},
+                            fail_on_empty=False,
+                        )
                 except Exception as e2:
-                    rollback_error_text = "Rollback Exception {}: {}".format(rollback_item, e2)
+                    rollback_error_text = "Rollback Exception {}: {}".format(
+                        rollback_item, e2
+                    )
                     cherrypy.log(rollback_error_text)
                     error_text += ". " + rollback_error_text
             # if isinstance(e, MsgException):
@@ -1245,7 +1585,9 @@ class Server(object):
                 if method in ("PUT", "PATCH", "POST") and isinstance(outdata, dict):
                     for logging_id in ("id", "op_id", "nsilcmop_id", "nslcmop_id"):
                         if outdata.get(logging_id):
-                            cherrypy.request.login += ";{}={}".format(logging_id, outdata[logging_id][:36])
+                            cherrypy.request.login += ";{}={}".format(
+                                logging_id, outdata[logging_id][:36]
+                            )
 
 
 def _start_service():
@@ -1261,7 +1603,7 @@ def _start_service():
     # update general cherrypy configuration
     update_dict = {}
 
-    engine_config = cherrypy.tree.apps['/osm'].config
+    engine_config = cherrypy.tree.apps["/osm"].config
     for k, v in environ.items():
         if not k.startswith("OSMNBI_"):
             continue
@@ -1270,15 +1612,15 @@ def _start_service():
             continue
         try:
             # update static configuration
-            if k == 'OSMNBI_STATIC_DIR':
-                engine_config["/static"]['tools.staticdir.dir'] = v
-                engine_config["/static"]['tools.staticdir.on'] = True
-            elif k == 'OSMNBI_SOCKET_PORT' or k == 'OSMNBI_SERVER_PORT':
-                update_dict['server.socket_port'] = int(v)
-            elif k == 'OSMNBI_SOCKET_HOST' or k == 'OSMNBI_SERVER_HOST':
-                update_dict['server.socket_host'] = v
+            if k == "OSMNBI_STATIC_DIR":
+                engine_config["/static"]["tools.staticdir.dir"] = v
+                engine_config["/static"]["tools.staticdir.on"] = True
+            elif k == "OSMNBI_SOCKET_PORT" or k == "OSMNBI_SERVER_PORT":
+                update_dict["server.socket_port"] = int(v)
+            elif k == "OSMNBI_SOCKET_HOST" or k == "OSMNBI_SERVER_HOST":
+                update_dict["server.socket_host"] = v
             elif k1 in ("server", "test", "auth", "log"):
-                update_dict[k1 + '.' + k2] = v
+                update_dict[k1 + "." + k2] = v
             elif k1 in ("message", "database", "storage", "authentication"):
                 # k2 = k2.replace('_', '.')
                 if k2 in ("port", "db_port"):
@@ -1296,26 +1638,34 @@ def _start_service():
         engine_config["global"].update(update_dict)
 
     # logging cherrypy
-    log_format_simple = "%(asctime)s %(levelname)s %(name)s %(filename)s:%(lineno)s %(message)s"
-    log_formatter_simple = logging.Formatter(log_format_simple, datefmt='%Y-%m-%dT%H:%M:%S')
+    log_format_simple = (
+        "%(asctime)s %(levelname)s %(name)s %(filename)s:%(lineno)s %(message)s"
+    )
+    log_formatter_simple = logging.Formatter(
+        log_format_simple, datefmt="%Y-%m-%dT%H:%M:%S"
+    )
     logger_server = logging.getLogger("cherrypy.error")
     logger_access = logging.getLogger("cherrypy.access")
     logger_cherry = logging.getLogger("cherrypy")
     logger_nbi = logging.getLogger("nbi")
 
     if "log.file" in engine_config["global"]:
-        file_handler = logging.handlers.RotatingFileHandler(engine_config["global"]["log.file"],
-                                                            maxBytes=100e6, backupCount=9, delay=0)
+        file_handler = logging.handlers.RotatingFileHandler(
+            engine_config["global"]["log.file"], maxBytes=100e6, backupCount=9, delay=0
+        )
         file_handler.setFormatter(log_formatter_simple)
         logger_cherry.addHandler(file_handler)
         logger_nbi.addHandler(file_handler)
     # log always to standard output
-    for format_, logger in {"nbi.server %(filename)s:%(lineno)s": logger_server,
-                            "nbi.access %(filename)s:%(lineno)s": logger_access,
-                            "%(name)s %(filename)s:%(lineno)s": logger_nbi
-                            }.items():
+    for format_, logger in {
+        "nbi.server %(filename)s:%(lineno)s": logger_server,
+        "nbi.access %(filename)s:%(lineno)s": logger_access,
+        "%(name)s %(filename)s:%(lineno)s": logger_nbi,
+    }.items():
         log_format_cherry = "%(asctime)s %(levelname)s {} %(message)s".format(format_)
-        log_formatter_cherry = logging.Formatter(log_format_cherry, datefmt='%Y-%m-%dT%H:%M:%S')
+        log_formatter_cherry = logging.Formatter(
+            log_format_cherry, datefmt="%Y-%m-%dT%H:%M:%S"
+        )
         str_handler = logging.StreamHandler()
         str_handler.setFormatter(log_formatter_cherry)
         logger.addHandler(str_handler)
@@ -1325,30 +1675,42 @@ def _start_service():
         logger_nbi.setLevel(engine_config["global"]["log.level"])
 
     # logging other modules
-    for k1, logname in {"message": "nbi.msg", "database": "nbi.db", "storage": "nbi.fs"}.items():
+    for k1, logname in {
+        "message": "nbi.msg",
+        "database": "nbi.db",
+        "storage": "nbi.fs",
+    }.items():
         engine_config[k1]["logger_name"] = logname
         logger_module = logging.getLogger(logname)
         if "logfile" in engine_config[k1]:
-            file_handler = logging.handlers.RotatingFileHandler(engine_config[k1]["logfile"],
-                                                                maxBytes=100e6, backupCount=9, delay=0)
+            file_handler = logging.handlers.RotatingFileHandler(
+                engine_config[k1]["logfile"], maxBytes=100e6, backupCount=9, delay=0
+            )
             file_handler.setFormatter(log_formatter_simple)
             logger_module.addHandler(file_handler)
         if "loglevel" in engine_config[k1]:
             logger_module.setLevel(engine_config[k1]["loglevel"])
     # TODO add more entries, e.g.: storage
-    cherrypy.tree.apps['/osm'].root.engine.start(engine_config)
-    cherrypy.tree.apps['/osm'].root.authenticator.start(engine_config)
-    cherrypy.tree.apps['/osm'].root.engine.init_db(target_version=database_version)
-    cherrypy.tree.apps['/osm'].root.authenticator.init_db(target_version=auth_database_version)
+    cherrypy.tree.apps["/osm"].root.engine.start(engine_config)
+    cherrypy.tree.apps["/osm"].root.authenticator.start(engine_config)
+    cherrypy.tree.apps["/osm"].root.engine.init_db(target_version=database_version)
+    cherrypy.tree.apps["/osm"].root.authenticator.init_db(
+        target_version=auth_database_version
+    )
 
     # start subscriptions thread:
-    subscription_thread = SubscriptionThread(config=engine_config, engine=nbi_server.engine)
+    subscription_thread = SubscriptionThread(
+        config=engine_config, engine=nbi_server.engine
+    )
     subscription_thread.start()
     # Do not capture except SubscriptionException
 
     backend = engine_config["authentication"]["backend"]
-    cherrypy.log.error("Starting OSM NBI Version '{} {}' with '{}' authentication backend"
-                       .format(nbi_version, nbi_version_date, backend))
+    cherrypy.log.error(
+        "Starting OSM NBI Version '{} {}' with '{}' authentication backend".format(
+            nbi_version, nbi_version_date, backend
+        )
+    )
 
 
 def _stop_service():
@@ -1360,7 +1722,7 @@ def _stop_service():
     if subscription_thread:
         subscription_thread.terminate()
     subscription_thread = None
-    cherrypy.tree.apps['/osm'].root.engine.stop()
+    cherrypy.tree.apps["/osm"].root.engine.stop()
     cherrypy.log.error("Stopping osm_nbi")
 
 
@@ -1384,21 +1746,25 @@ def nbi(config_file):
     #    'tools.auth_basic.realm': 'localhost',
     #    'tools.auth_basic.checkpassword': validate_password})
     nbi_server = Server()
-    cherrypy.engine.subscribe('start', _start_service)
-    cherrypy.engine.subscribe('stop', _stop_service)
-    cherrypy.quickstart(nbi_server, '/osm', config_file)
+    cherrypy.engine.subscribe("start", _start_service)
+    cherrypy.engine.subscribe("stop", _stop_service)
+    cherrypy.quickstart(nbi_server, "/osm", config_file)
 
 
 def usage():
-    print("""Usage: {} [options]
+    print(
+        """Usage: {} [options]
         -c|--config [configuration_file]: loads the configuration file (default: ./nbi.cfg)
         -h|--help: shows this help
-        """.format(sys.argv[0]))
+        """.format(
+            sys.argv[0]
+        )
+    )
     # --log-socket-host HOST: send logs to this host")
     # --log-socket-port PORT: send logs using this port (default: 9022)")
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     try:
         # load parameters and configuration
         opts, args = getopt.getopt(sys.argv[1:], "hvc:", ["config=", "help"])
@@ -1420,14 +1786,24 @@ if __name__ == '__main__':
                 assert False, "Unhandled option"
         if config_file:
             if not path.isfile(config_file):
-                print("configuration file '{}' that not exist".format(config_file), file=sys.stderr)
+                print(
+                    "configuration file '{}' that not exist".format(config_file),
+                    file=sys.stderr,
+                )
                 exit(1)
         else:
-            for config_file in (__file__[:__file__.rfind(".")] + ".cfg", "./nbi.cfg", "/etc/osm/nbi.cfg"):
+            for config_file in (
+                __file__[: __file__.rfind(".")] + ".cfg",
+                "./nbi.cfg",
+                "/etc/osm/nbi.cfg",
+            ):
                 if path.isfile(config_file):
                     break
             else:
-                print("No configuration file 'nbi.cfg' found neither at local folder nor at /etc/osm/", file=sys.stderr)
+                print(
+                    "No configuration file 'nbi.cfg' found neither at local folder nor at /etc/osm/",
+                    file=sys.stderr,
+                )
                 exit(1)
         nbi(config_file)
     except getopt.GetoptError as e:
index 2e7ba94..7b681a1 100644 (file)
@@ -44,10 +44,7 @@ class NotificationBase:
 
     response_models = None
     # Common HTTP payload header for all notifications.
-    payload_header = {
-        "Content-Type": "application/json",
-        "Accept": "application/json"
-    }
+    payload_header = {"Content-Type": "application/json", "Accept": "application/json"}
 
     def __init__(self, db) -> None:
         """
@@ -72,13 +69,18 @@ class NotificationBase:
         :param kwargs: any keyword arguments needed for db query.
         :return: List of subscribers
         """
-        raise NotificationException("Method get_subscribers() is not implemented", http_code=HTTPStatus.NOT_IMPLEMENTED)
+        raise NotificationException(
+            "Method get_subscribers() is not implemented",
+            http_code=HTTPStatus.NOT_IMPLEMENTED,
+        )
 
     @staticmethod
     def _get_basic_auth(username: str, password: str) -> tuple:
         return aiohttp.BasicAuth(username, password)
 
-    def _decrypt_password(self, hashed: str, salt: str, schema_version: str = "1.1") -> str:
+    def _decrypt_password(
+        self, hashed: str, salt: str, schema_version: str = "1.1"
+    ) -> str:
         return self.db.decrypt(hashed, schema_version, salt=salt)
 
     def get_payload(self, meta_notification: dict) -> dict:
@@ -90,18 +92,25 @@ class NotificationBase:
         model_name = meta_notification["notificationType"]
         response_models = self.get_models()
         if not response_models or not response_models.get(model_name):
-            raise NotificationException("Response model {} is not defined.".format(model_name),
-                                        HTTPStatus.NOT_IMPLEMENTED)
+            raise NotificationException(
+                "Response model {} is not defined.".format(model_name),
+                HTTPStatus.NOT_IMPLEMENTED,
+            )
         model_keys = response_models[model_name]
         payload = dict.fromkeys(model_keys, "N/A")
         notification_keys = set(meta_notification.keys())
         for model_key in model_keys.intersection(notification_keys):
             payload[model_key] = meta_notification[model_key]
-        self.logger.debug("Payload generated for subscriber: {} for {}".format(payload["subscriptionId"],
-                                                                               payload["notificationType"]))
+        self.logger.debug(
+            "Payload generated for subscriber: {} for {}".format(
+                payload["subscriptionId"], payload["notificationType"]
+            )
+        )
         return payload
 
-    async def send_notifications(self, subscribers: list, loop: asyncio.AbstractEventLoop = None):
+    async def send_notifications(
+        self, subscribers: list, loop: asyncio.AbstractEventLoop = None
+    ):
         """
         Generate tasks for all notification for an event.
         :param subscribers: A list of subscribers who want to be notified for event.
@@ -111,38 +120,59 @@ class NotificationBase:
         for subscriber in subscribers:
             # Notify without auth
             if not subscriber.get("authentication"):
-                notifications.append({
-                    "headers": self.payload_header,
-                    "payload": self.get_payload(subscriber),
-                    "CallbackUri": subscriber["CallbackUri"]
-                })
+                notifications.append(
+                    {
+                        "headers": self.payload_header,
+                        "payload": self.get_payload(subscriber),
+                        "CallbackUri": subscriber["CallbackUri"],
+                    }
+                )
             elif subscriber["authentication"]["authType"] == "basic":
                 salt = subscriber["subscriptionId"]
-                hashed_password = subscriber["authentication"]["paramsBasic"]["password"]
+                hashed_password = subscriber["authentication"]["paramsBasic"][
+                    "password"
+                ]
                 password = self._decrypt_password(hashed_password, salt)
-                auth_basic = self._get_basic_auth(subscriber["authentication"]["paramsBasic"]["userName"], password)
-                notifications.append({
-                    "headers": self.payload_header,
-                    "payload": self.get_payload(subscriber),
-                    "auth_basic": auth_basic,
-                    "CallbackUri": subscriber["CallbackUri"]
-                })
+                auth_basic = self._get_basic_auth(
+                    subscriber["authentication"]["paramsBasic"]["userName"], password
+                )
+                notifications.append(
+                    {
+                        "headers": self.payload_header,
+                        "payload": self.get_payload(subscriber),
+                        "auth_basic": auth_basic,
+                        "CallbackUri": subscriber["CallbackUri"],
+                    }
+                )
             # TODO add support for AuthType OAuth and TLS after support is added in subscription.
             else:
-                self.logger.debug("Subscriber {} can not be notified. {} notification auth type is not implemented"
-                                  .format(subscriber["subscriptionId"],
-                                          subscriber["authentication"]["authType"]))
+                self.logger.debug(
+                    "Subscriber {} can not be notified. {} notification auth type is not implemented".format(
+                        subscriber["subscriptionId"],
+                        subscriber["authentication"]["authType"],
+                    )
+                )
 
         if notifications:
             tasks = []
             async with aiohttp.ClientSession(loop=loop) as session:
                 for notification in notifications:
-                    tasks.append(asyncio.ensure_future(self.send_notification(session, notification, loop=loop),
-                                                       loop=loop))
+                    tasks.append(
+                        asyncio.ensure_future(
+                            self.send_notification(session, notification, loop=loop),
+                            loop=loop,
+                        )
+                    )
                 await asyncio.gather(*tasks, loop=loop)
 
-    async def send_notification(self, session: aiohttp.ClientSession, notification: dict,
-                                loop: asyncio.AbstractEventLoop = None, retry_count: int = 5, timeout: float = 5.0):
+    async def send_notification(
+        self,
+        session: aiohttp.ClientSession,
+        notification: dict,
+        loop: asyncio.AbstractEventLoop = None,
+        retry_count: int = 5,
+        timeout: float = 5.0,
+    ):
         """
         Performs HTTP Post request to notify subscriber. In case if for any reason notification is not sent successfully
         after maximum number of reties, then notification is dropped.
@@ -155,14 +185,20 @@ class NotificationBase:
         backoff_delay = 1
         while retry_count > 0:
             try:
-                async with session.post(url=notification["CallbackUri"], headers=notification["headers"],
-                                        auth=notification.get("auth_basic", None),
-                                        data=json.dumps(notification["payload"]),
-                                        timeout=timeout) as resp:
+                async with session.post(
+                    url=notification["CallbackUri"],
+                    headers=notification["headers"],
+                    auth=notification.get("auth_basic", None),
+                    data=json.dumps(notification["payload"]),
+                    timeout=timeout,
+                ) as resp:
                     # self.logger.debug("Notification response: {}".format(resp.status))
                     if resp.status == HTTPStatus.NO_CONTENT:
-                        self.logger.debug("Notification sent successfully to subscriber {}"
-                                          .format(notification["payload"]["subscriptionId"]))
+                        self.logger.debug(
+                            "Notification sent successfully to subscriber {}".format(
+                                notification["payload"]["subscriptionId"]
+                            )
+                        )
                     else:
                         error_text = "Erroneous response code: {}, ".format(resp.status)
                         error_text += await resp.text()
@@ -170,12 +206,15 @@ class NotificationBase:
                 return True
             except Exception as e:
                 error_text = type(e).__name__ + ": " + str(e)
-                self.logger.debug("Unable to send notification to subscriber {}. Details: {}"
-                                  .format(notification["payload"]["subscriptionId"], error_text))
+                self.logger.debug(
+                    "Unable to send notification to subscriber {}. Details: {}".format(
+                        notification["payload"]["subscriptionId"], error_text
+                    )
+                )
                 error_detail = {
                     "error": type(e).__name__,
                     "error_text": str(e),
-                    "timestamp": time.time()
+                    "timestamp": time.time(),
                 }
                 if "error_details" in notification["payload"].keys():
                     notification["payload"]["error_details"].append(error_detail)
@@ -183,13 +222,19 @@ class NotificationBase:
                     notification["payload"]["error_details"] = [error_detail]
                 retry_count -= 1
                 backoff_delay *= 2
-                self.logger.debug("Retry Notification for subscriber: {} after backoff delay: {} seconds."
-                                  .format(notification["payload"]["subscriptionId"], backoff_delay))
+                self.logger.debug(
+                    "Retry Notification for subscriber: {} after backoff delay: {} seconds.".format(
+                        notification["payload"]["subscriptionId"], backoff_delay
+                    )
+                )
                 await asyncio.sleep(backoff_delay, loop=loop)
         # Dropping notification
-        self.logger.debug("Notification {} sent failed to subscriber:{}."
-                          .format(notification["payload"]["notificationType"],
-                                  notification["payload"]["subscriptionId"]))
+        self.logger.debug(
+            "Notification {} sent failed to subscriber:{}.".format(
+                notification["payload"]["notificationType"],
+                notification["payload"]["subscriptionId"],
+            )
+        )
         return False
 
 
@@ -197,22 +242,52 @@ class NsLcmNotification(NotificationBase):
 
     # SOL005 response model for nslcm notifications
     response_models = {
-        "NsLcmOperationOccurrenceNotification": {"id", "nsInstanceId", "nsLcmOpOccId", "operation",
-                                                 "notificationType", "subscriptionId", "timestamp",
-                                                 "notificationStatus", "operationState", "isAutomaticInvocation",
-                                                 "affectedVnf", "affectedVl", "affectedVnffg", "affectedNs",
-                                                 "affectedSap", "error", "_links"},
-
-        "NsIdentifierCreationNotification": {"notificationType", "subscriptionId", "timestamp",
-                                             "nsInstanceId", "_links"},
-
-        "NsIdentifierDeletionNotification": {"notificationType", "subscriptionId", "timestamp",
-                                             "nsInstanceId", "_links"},
-
-        "NsChangeNotification": {"nsInstanceId", "nsComponentType", "nsComponentId",
-                                 "lcmOpOccIdImpactngNsComponent", "lcmOpNameImpactingNsComponent",
-                                 "lcmOpOccStatusImpactingNsComponent", "notificationType", "subscriptionId",
-                                 "timeStamp", "error", "_links"}
+        "NsLcmOperationOccurrenceNotification": {
+            "id",
+            "nsInstanceId",
+            "nsLcmOpOccId",
+            "operation",
+            "notificationType",
+            "subscriptionId",
+            "timestamp",
+            "notificationStatus",
+            "operationState",
+            "isAutomaticInvocation",
+            "affectedVnf",
+            "affectedVl",
+            "affectedVnffg",
+            "affectedNs",
+            "affectedSap",
+            "error",
+            "_links",
+        },
+        "NsIdentifierCreationNotification": {
+            "notificationType",
+            "subscriptionId",
+            "timestamp",
+            "nsInstanceId",
+            "_links",
+        },
+        "NsIdentifierDeletionNotification": {
+            "notificationType",
+            "subscriptionId",
+            "timestamp",
+            "nsInstanceId",
+            "_links",
+        },
+        "NsChangeNotification": {
+            "nsInstanceId",
+            "nsComponentType",
+            "nsComponentId",
+            "lcmOpOccIdImpactngNsComponent",
+            "lcmOpNameImpactingNsComponent",
+            "lcmOpOccStatusImpactingNsComponent",
+            "notificationType",
+            "subscriptionId",
+            "timeStamp",
+            "error",
+            "_links",
+        },
     }
 
     def __init__(self, db) -> None:
@@ -256,8 +331,14 @@ class NsLcmNotification(NotificationBase):
             subscriber.update(event_details["params"])
         return subscribers
 
-    def get_subscribers(self, nsd_id: str, ns_instance_id: str, command: str, op_state: str,
-                        event_details: dict) -> list:
+    def get_subscribers(
+        self,
+        nsd_id: str,
+        ns_instance_id: str,
+        command: str,
+        op_state: str,
+        event_details: dict,
+    ) -> list:
         """
         Queries database and returns list of subscribers.
         :param nsd_id: NSD id of an NS whose lifecycle has changed. (scaled, terminated. etc)
@@ -267,7 +348,11 @@ class NsLcmNotification(NotificationBase):
         :param event_details: dict containing raw data of event occured.
         :return: List of interested subscribers for occurred event.
         """
-        filter_q = {"identifier": [nsd_id, ns_instance_id], "operationStates": ["ANY"], "operationTypes": ["ANY"]}
+        filter_q = {
+            "identifier": [nsd_id, ns_instance_id],
+            "operationStates": ["ANY"],
+            "operationTypes": ["ANY"],
+        }
         if op_state:
             filter_q["operationStates"].append(op_state)
         if command:
@@ -285,7 +370,6 @@ class NsLcmNotification(NotificationBase):
 
 
 class NsdNotification(NotificationBase):
-
     def __init__(self, db):
         """
         Constructor of the class
@@ -297,7 +381,6 @@ class NsdNotification(NotificationBase):
 
 
 class VnfdNotification(NotificationBase):
-
     def __init__(self, db):
         """
         Constructor of the class
index 3142744..f8d7714 100644 (file)
@@ -22,30 +22,49 @@ from osm_nbi.base_topic import EngineException
 __author__ = "Vijay R S <vijay.r@tataelxsi.co.in>"
 
 
-class PmJobsTopic():
+class PmJobsTopic:
     def __init__(self, db, host=None, port=None):
         self.db = db
-        self.url = 'http://{}:{}'.format(host, port)
-        self.nfvi_metric_list = ['cpu_utilization', 'average_memory_utilization', 'disk_read_ops',
-                                 'disk_write_ops', 'disk_read_bytes', 'disk_write_bytes',
-                                 'packets_dropped', 'packets_sent', 'packets_received']
+        self.url = "http://{}:{}".format(host, port)
+        self.nfvi_metric_list = [
+            "cpu_utilization",
+            "average_memory_utilization",
+            "disk_read_ops",
+            "disk_write_ops",
+            "disk_read_bytes",
+            "disk_write_bytes",
+            "packets_dropped",
+            "packets_sent",
+            "packets_received",
+        ]
 
     def _get_vnf_metric_list(self, ns_id):
         metric_list = self.nfvi_metric_list.copy()
         vnfr_desc = self.db.get_list("vnfrs", {"nsr-id-ref": ns_id})
         if not vnfr_desc:
-            raise EngineException("NS not found with id {}".format(ns_id), http_code=HTTPStatus.NOT_FOUND)
+            raise EngineException(
+                "NS not found with id {}".format(ns_id), http_code=HTTPStatus.NOT_FOUND
+            )
         else:
             for vnfr in vnfr_desc:
-                vnfd_desc = self.db.get_one("vnfds", {"_id": vnfr["vnfd-id"]}, fail_on_empty=True, fail_on_more=False)
+                vnfd_desc = self.db.get_one(
+                    "vnfds",
+                    {"_id": vnfr["vnfd-id"]},
+                    fail_on_empty=True,
+                    fail_on_more=False,
+                )
                 try:
-                    configs = vnfd_desc.get("df")[0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"]
+                    configs = vnfd_desc.get("df")[0]["lcm-operations-configuration"][
+                        "operate-vnf-op-config"
+                    ]["day1-2"]
                 except Exception:
                     configs = []
 
                 for config in configs:
                     if "metrics" in config:
-                        metric_list.extend([quote(metric['name']) for metric in config["metrics"]])
+                        metric_list.extend(
+                            [quote(metric["name"]) for metric in config["metrics"]]
+                        )
         metric_list = list(set(metric_list))
         return metric_list
 
@@ -54,10 +73,17 @@ class PmJobsTopic():
             async with aiohttp.ClientSession() as session:
                 data = []
                 for metlist in metrics_list:
-                    request_url = self.url+'/api/v1/query?query=osm_'+metlist+"{ns_id='"+ns_id+"'}"
+                    request_url = (
+                        self.url
+                        + "/api/v1/query?query=osm_"
+                        + metlist
+                        + "{ns_id='"
+                        + ns_id
+                        + "'}"
+                    )
                     async with session.get(request_url) as resp:
                         resp = await resp.json()
-                        resp = resp['data']['result']
+                        resp = resp["data"]["result"]
                         if resp:
                             data.append(resp)
                 return data
@@ -68,22 +94,29 @@ class PmJobsTopic():
         metrics_list = self._get_vnf_metric_list(ns_id)
         loop = asyncio.new_event_loop()
         asyncio.set_event_loop(loop)
-        prom_metric = loop.run_until_complete(self._prom_metric_request(ns_id, metrics_list))
+        prom_metric = loop.run_until_complete(
+            self._prom_metric_request(ns_id, metrics_list)
+        )
         metric = {}
         metric_temp = []
         for index_list in prom_metric:
             for index in index_list:
-                process_metric = {'performanceValue': {'performanceValue': {}}}
-                process_metric['objectInstanceId'] = index['metric']['ns_id']
-                process_metric['performanceMetric'] = index['metric']['__name__']
-                process_metric['performanceValue']['timestamp'] = index['value'][0]
-                process_metric['performanceValue']['performanceValue']['performanceValue'] = index['value'][1]
-                process_metric['performanceValue']['performanceValue']['vnfMemberIndex'] \
-                    = index['metric']['vnf_member_index']
-                if 'vdu_name' not in index['metric']:
+                process_metric = {"performanceValue": {"performanceValue": {}}}
+                process_metric["objectInstanceId"] = index["metric"]["ns_id"]
+                process_metric["performanceMetric"] = index["metric"]["__name__"]
+                process_metric["performanceValue"]["timestamp"] = index["value"][0]
+                process_metric["performanceValue"]["performanceValue"][
+                    "performanceValue"
+                ] = index["value"][1]
+                process_metric["performanceValue"]["performanceValue"][
+                    "vnfMemberIndex"
+                ] = index["metric"]["vnf_member_index"]
+                if "vdu_name" not in index["metric"]:
                     pass
                 else:
-                    process_metric['performanceValue']['performanceValue']['vduName'] = index['metric']['vdu_name']
+                    process_metric["performanceValue"]["performanceValue"][
+                        "vduName"
+                    ] = index["metric"]["vdu_name"]
                 metric_temp.append(process_metric)
-        metric['entries'] = metric_temp
+        metric["entries"] = metric_temp
         return metric
index 7a3d52e..92c7417 100644 (file)
@@ -59,20 +59,29 @@ class CommonSubscriptions(BaseTopic):
             filter_dict["authentication"] = None  # For Items without authentication
         existing_subscriptions = self.db.get_list("subscriptions", q_filter=filter_dict)
         new_sub_pwd = None
-        if content.get("authentication") and content["authentication"].get("authType") == "basic":
+        if (
+            content.get("authentication")
+            and content["authentication"].get("authType") == "basic"
+        ):
             new_sub_pwd = content["authentication"]["paramsBasic"]["password"]
             content["authentication"]["paramsBasic"].pop("password", None)
         for existing_subscription in existing_subscriptions:
             sub_id = existing_subscription.pop("_id", None)
             existing_subscription.pop("_admin", None)
             existing_subscription.pop("schema_version", None)
-            if existing_subscription.get("authentication") and \
-                    existing_subscription["authentication"].get("authType") == "basic":
-                existing_subscription["authentication"]["paramsBasic"].pop("password", None)
+            if (
+                existing_subscription.get("authentication")
+                and existing_subscription["authentication"].get("authType") == "basic"
+            ):
+                existing_subscription["authentication"]["paramsBasic"].pop(
+                    "password", None
+                )
             # self.logger.debug(existing_subscription)
             if existing_subscription == content:
-                raise EngineException("Subscription already exists with id: {}".format(sub_id),
-                                      HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "Subscription already exists with id: {}".format(sub_id),
+                    HTTPStatus.CONFLICT,
+                )
         if new_sub_pwd:
             content["authentication"]["paramsBasic"]["password"] = new_sub_pwd
         return
@@ -91,18 +100,28 @@ class CommonSubscriptions(BaseTopic):
                 if auth is None:
                     response = requests.get(url, timeout=5)
                     if response.status_code != HTTPStatus.NO_CONTENT:
-                        raise EngineException("Cannot access to the notification URL '{}',received {}: {}"
-                                              .format(url, response.status_code, response.content))
+                        raise EngineException(
+                            "Cannot access to the notification URL '{}',received {}: {}".format(
+                                url, response.status_code, response.content
+                            )
+                        )
                 elif auth["authType"] == "basic":
                     username = auth["paramsBasic"].get("userName")
                     password = auth["paramsBasic"].get("password")
                     response = requests.get(url, auth=(username, password), timeout=5)
                     if response.status_code != HTTPStatus.NO_CONTENT:
-                        raise EngineException("Cannot access to the notification URL '{}',received {}: {}"
-                                              .format(url, response.status_code, response.content))
+                        raise EngineException(
+                            "Cannot access to the notification URL '{}',received {}: {}".format(
+                                url, response.status_code, response.content
+                            )
+                        )
             except requests.exceptions.RequestException as e:
                 error_text = type(e).__name__ + ": " + str(e)
-                raise EngineException("Cannot access to the notification URL '{}': {}".format(url, error_text))
+                raise EngineException(
+                    "Cannot access to the notification URL '{}': {}".format(
+                        url, error_text
+                    )
+                )
 
         url = content["CallbackUri"]
         auth = content.get("authentication")
@@ -110,9 +129,11 @@ class CommonSubscriptions(BaseTopic):
         content["schema_version"] = schema_version = "1.1"
         if auth is not None and auth["authType"] == "basic":
             if content["authentication"]["paramsBasic"].get("password"):
-                content["authentication"]["paramsBasic"]["password"] = \
-                    self.db.encrypt(content["authentication"]["paramsBasic"]["password"],
-                                    schema_version=schema_version, salt=content["_id"])
+                content["authentication"]["paramsBasic"]["password"] = self.db.encrypt(
+                    content["authentication"]["paramsBasic"]["password"],
+                    schema_version=schema_version,
+                    salt=content["_id"],
+                )
         return None
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
@@ -120,8 +141,16 @@ class CommonSubscriptions(BaseTopic):
         Uses BaseTopic.new to create entry into db
         Once entry is made into subscriptions,mapper function is invoked
         """
-        _id, op_id = BaseTopic.new(self, rollback, session, indata=indata, kwargs=kwargs, headers=headers)
-        rollback.append({"topic": "mapped_subscriptions", "operation": "del_list", "filter": {"reference": _id}})
+        _id, op_id = BaseTopic.new(
+            self, rollback, session, indata=indata, kwargs=kwargs, headers=headers
+        )
+        rollback.append(
+            {
+                "topic": "mapped_subscriptions",
+                "operation": "del_list",
+                "filter": {"reference": _id},
+            }
+        )
         self._subscription_mapper(_id, indata, table="mapped_subscriptions")
         return _id, op_id
 
@@ -145,8 +174,10 @@ class NslcmSubscriptionsTopic(CommonSubscriptions):
         :param table: table in which transformed data are inserted
         """
         formatted_data = []
-        formed_data = {"reference": data.get("_id"),
-                       "CallbackUri": data.get("CallbackUri")}
+        formed_data = {
+            "reference": data.get("_id"),
+            "CallbackUri": data.get("CallbackUri"),
+        }
         if data.get("authentication"):
             formed_data.update({"authentication": data.get("authentication")})
         if data.get("filter"):
@@ -168,27 +199,38 @@ class NslcmSubscriptionsTopic(CommonSubscriptions):
                         formatted_data.append(update_dict)
                     elif elem == "NsLcmOperationOccurrenceNotification":
                         if "operationTypes" in data["filter"].keys():
-                            update_dict["operationTypes"] = data["filter"]["operationTypes"]
+                            update_dict["operationTypes"] = data["filter"][
+                                "operationTypes"
+                            ]
                         else:
                             update_dict["operationTypes"] = "ANY"
                         if "operationStates" in data["filter"].keys():
-                            update_dict["operationStates"] = data["filter"]["operationStates"]
+                            update_dict["operationStates"] = data["filter"][
+                                "operationStates"
+                            ]
                         else:
                             update_dict["operationStates"] = "ANY"
                         formatted_data.append(update_dict)
                     elif elem == "NsChangeNotification":
                         if "nsComponentTypes" in data["filter"].keys():
-                            update_dict["nsComponentTypes"] = data["filter"]["nsComponentTypes"]
+                            update_dict["nsComponentTypes"] = data["filter"][
+                                "nsComponentTypes"
+                            ]
                         else:
                             update_dict["nsComponentTypes"] = "ANY"
                         if "lcmOpNameImpactingNsComponent" in data["filter"].keys():
-                            update_dict["lcmOpNameImpactingNsComponent"] = \
-                                data["filter"]["lcmOpNameImpactingNsComponent"]
+                            update_dict["lcmOpNameImpactingNsComponent"] = data[
+                                "filter"
+                            ]["lcmOpNameImpactingNsComponent"]
                         else:
                             update_dict["lcmOpNameImpactingNsComponent"] = "ANY"
-                        if "lcmOpOccStatusImpactingNsComponent" in data["filter"].keys():
-                            update_dict["lcmOpOccStatusImpactingNsComponent"] = \
-                                data["filter"]["lcmOpOccStatusImpactingNsComponent"]
+                        if (
+                            "lcmOpOccStatusImpactingNsComponent"
+                            in data["filter"].keys()
+                        ):
+                            update_dict["lcmOpOccStatusImpactingNsComponent"] = data[
+                                "filter"
+                            ]["lcmOpOccStatusImpactingNsComponent"]
                         else:
                             update_dict["lcmOpOccStatusImpactingNsComponent"] = "ANY"
                         formatted_data.append(update_dict)
index ec70d0c..6810ccd 100644 (file)
@@ -35,14 +35,12 @@ __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 class SubscriptionException(Exception):
-
     def __init__(self, message, http_code=HTTPStatus.BAD_REQUEST):
         self.http_code = http_code
         Exception.__init__(self, message)
 
 
 class SubscriptionThread(threading.Thread):
-
     def __init__(self, config, engine):
         """
         Constructor of class
@@ -57,8 +55,12 @@ class SubscriptionThread(threading.Thread):
         self.engine = engine
         self.loop = None
         self.logger = logging.getLogger("nbi.subscriptions")
-        self.aiomain_task_admin = None  # asyncio task for receiving admin actions from kafka bus
-        self.aiomain_task = None  # asyncio task for receiving normal actions from kafka bus
+        self.aiomain_task_admin = (
+            None  # asyncio task for receiving admin actions from kafka bus
+        )
+        self.aiomain_task = (
+            None  # asyncio task for receiving normal actions from kafka bus
+        )
         self.internal_session = {  # used for a session to the engine methods
             "project_id": (),
             "set_project": (),
@@ -77,7 +79,9 @@ class SubscriptionThread(threading.Thread):
                 # bug 710 635. The library aiokafka does not recieve anything when the topci at kafka has not been
                 # created.
                 # Before subscribe, send dummy messages
-                await self.msg.aiowrite("admin", "echo", "dummy message", loop=self.loop)
+                await self.msg.aiowrite(
+                    "admin", "echo", "dummy message", loop=self.loop
+                )
                 await self.msg.aiowrite("ns", "echo", "dummy message", loop=self.loop)
                 await self.msg.aiowrite("nsi", "echo", "dummy message", loop=self.loop)
                 if not kafka_working:
@@ -86,26 +90,44 @@ class SubscriptionThread(threading.Thread):
                 if not self.aiomain_task_admin:
                     await asyncio.sleep(10, loop=self.loop)
                     self.logger.debug("Starting admin subscription task")
-                    self.aiomain_task_admin = asyncio.ensure_future(self.msg.aioread(("admin",), loop=self.loop,
-                                                                                     group_id=False,
-                                                                                     aiocallback=self._msg_callback),
-                                                                    loop=self.loop)
+                    self.aiomain_task_admin = asyncio.ensure_future(
+                        self.msg.aioread(
+                            ("admin",),
+                            loop=self.loop,
+                            group_id=False,
+                            aiocallback=self._msg_callback,
+                        ),
+                        loop=self.loop,
+                    )
                 if not self.aiomain_task:
                     await asyncio.sleep(10, loop=self.loop)
                     self.logger.debug("Starting non-admin subscription task")
-                    self.aiomain_task = asyncio.ensure_future(self.msg.aioread(("ns", "nsi"), loop=self.loop,
-                                                                               aiocallback=self._msg_callback),
-                                                              loop=self.loop)
-                done, _ = await asyncio.wait([self.aiomain_task, self.aiomain_task_admin],
-                                             timeout=None, loop=self.loop, return_when=asyncio.FIRST_COMPLETED)
+                    self.aiomain_task = asyncio.ensure_future(
+                        self.msg.aioread(
+                            ("ns", "nsi"),
+                            loop=self.loop,
+                            aiocallback=self._msg_callback,
+                        ),
+                        loop=self.loop,
+                    )
+                done, _ = await asyncio.wait(
+                    [self.aiomain_task, self.aiomain_task_admin],
+                    timeout=None,
+                    loop=self.loop,
+                    return_when=asyncio.FIRST_COMPLETED,
+                )
                 try:
                     if self.aiomain_task_admin in done:
                         exc = self.aiomain_task_admin.exception()
-                        self.logger.error("admin subscription task exception: {}".format(exc))
+                        self.logger.error(
+                            "admin subscription task exception: {}".format(exc)
+                        )
                         self.aiomain_task_admin = None
                     if self.aiomain_task in done:
                         exc = self.aiomain_task.exception()
-                        self.logger.error("non-admin subscription task exception: {}".format(exc))
+                        self.logger.error(
+                            "non-admin subscription task exception: {}".format(exc)
+                        )
                         self.aiomain_task = None
                 except asyncio.CancelledError:
                     pass
@@ -114,7 +136,9 @@ class SubscriptionThread(threading.Thread):
                     return
                 if kafka_working:
                     # logging only first time
-                    self.logger.critical("Error accessing kafka '{}'. Retrying ...".format(e))
+                    self.logger.critical(
+                        "Error accessing kafka '{}'. Retrying ...".format(e)
+                    )
                     kafka_working = False
             await asyncio.sleep(10, loop=self.loop)
 
@@ -133,8 +157,11 @@ class SubscriptionThread(threading.Thread):
                     self.db = dbmemory.DbMemory()
                     self.db.db_connect(self.config["database"])
                 else:
-                    raise SubscriptionException("Invalid configuration param '{}' at '[database]':'driver'".format(
-                        self.config["database"]["driver"]))
+                    raise SubscriptionException(
+                        "Invalid configuration param '{}' at '[database]':'driver'".format(
+                            self.config["database"]["driver"]
+                        )
+                    )
             if not self.msg:
                 config_msg = self.config["message"].copy()
                 config_msg["loop"] = self.loop
@@ -145,8 +172,11 @@ class SubscriptionThread(threading.Thread):
                     self.msg = msgkafka.MsgKafka()
                     self.msg.connect(config_msg)
                 else:
-                    raise SubscriptionException("Invalid configuration param '{}' at '[message]':'driver'".format(
-                        config_msg["driver"]))
+                    raise SubscriptionException(
+                        "Invalid configuration param '{}' at '[message]':'driver'".format(
+                            config_msg["driver"]
+                        )
+                    )
             self.nslcm = NsLcmNotification(self.db)
         except (DbException, MsgException) as e:
             raise SubscriptionException(str(e), http_code=e.http_code)
@@ -155,12 +185,16 @@ class SubscriptionThread(threading.Thread):
         while not self.to_terminate:
             try:
 
-                self.loop.run_until_complete(asyncio.ensure_future(self.start_kafka(), loop=self.loop))
+                self.loop.run_until_complete(
+                    asyncio.ensure_future(self.start_kafka(), loop=self.loop)
+                )
             # except asyncio.CancelledError:
             #     break  # if cancelled it should end, breaking loop
             except Exception as e:
                 if not self.to_terminate:
-                    self.logger.exception("Exception '{}' at messaging read loop".format(e), exc_info=True)
+                    self.logger.exception(
+                        "Exception '{}' at messaging read loop".format(e), exc_info=True
+                    )
 
         self.logger.debug("Finishing")
         self._stop()
@@ -177,69 +211,125 @@ class SubscriptionThread(threading.Thread):
         msg_to_send = []
         try:
             if topic == "ns":
-                if command == "terminated" and params["operationState"] in ("COMPLETED", "PARTIALLY_COMPLETED"):
+                if command == "terminated" and params["operationState"] in (
+                    "COMPLETED",
+                    "PARTIALLY_COMPLETED",
+                ):
                     self.logger.debug("received ns terminated {}".format(params))
                     if params.get("autoremove"):
-                        self.engine.del_item(self.internal_session, "nsrs", _id=params["nsr_id"],
-                                             not_send_msg=msg_to_send)
-                        self.logger.debug("ns={} deleted from database".format(params["nsr_id"]))
+                        self.engine.del_item(
+                            self.internal_session,
+                            "nsrs",
+                            _id=params["nsr_id"],
+                            not_send_msg=msg_to_send,
+                        )
+                        self.logger.debug(
+                            "ns={} deleted from database".format(params["nsr_id"])
+                        )
                 # Check for nslcm notification
                 if isinstance(params, dict):
                     # Check availability of operationState and command
-                    if (not params.get("operationState")) or (not command) or (not params.get("operationParams")):
-                        self.logger.debug("Message can not be used for notification of nslcm")
+                    if (
+                        (not params.get("operationState"))
+                        or (not command)
+                        or (not params.get("operationParams"))
+                    ):
+                        self.logger.debug(
+                            "Message can not be used for notification of nslcm"
+                        )
                     else:
                         nsd_id = params["operationParams"].get("nsdId")
                         ns_instance_id = params["operationParams"].get("nsInstanceId")
                         # Any one among nsd_id, ns_instance_id should be present.
                         if not (nsd_id or ns_instance_id):
-                            self.logger.debug("Message can not be used for notification of nslcm")
+                            self.logger.debug(
+                                "Message can not be used for notification of nslcm"
+                            )
                         else:
                             op_state = params["operationState"]
-                            event_details = {"topic": topic, "command": command.upper(), "params": params}
-                            subscribers = self.nslcm.get_subscribers(nsd_id, ns_instance_id, command.upper(), op_state,
-                                                                     event_details)
+                            event_details = {
+                                "topic": topic,
+                                "command": command.upper(),
+                                "params": params,
+                            }
+                            subscribers = self.nslcm.get_subscribers(
+                                nsd_id,
+                                ns_instance_id,
+                                command.upper(),
+                                op_state,
+                                event_details,
+                            )
                             # self.logger.debug("subscribers list: ")
                             # self.logger.debug(subscribers)
                             if subscribers:
-                                asyncio.ensure_future(self.nslcm.send_notifications(subscribers, loop=self.loop),
-                                                      loop=self.loop)
+                                asyncio.ensure_future(
+                                    self.nslcm.send_notifications(
+                                        subscribers, loop=self.loop
+                                    ),
+                                    loop=self.loop,
+                                )
                 else:
-                    self.logger.debug("Message can not be used for notification of nslcm")
+                    self.logger.debug(
+                        "Message can not be used for notification of nslcm"
+                    )
             elif topic == "nsi":
-                if command == "terminated" and params["operationState"] in ("COMPLETED", "PARTIALLY_COMPLETED"):
+                if command == "terminated" and params["operationState"] in (
+                    "COMPLETED",
+                    "PARTIALLY_COMPLETED",
+                ):
                     self.logger.debug("received nsi terminated {}".format(params))
                     if params.get("autoremove"):
-                        self.engine.del_item(self.internal_session, "nsis", _id=params["nsir_id"],
-                                             not_send_msg=msg_to_send)
-                        self.logger.debug("nsis={} deleted from database".format(params["nsir_id"]))
+                        self.engine.del_item(
+                            self.internal_session,
+                            "nsis",
+                            _id=params["nsir_id"],
+                            not_send_msg=msg_to_send,
+                        )
+                        self.logger.debug(
+                            "nsis={} deleted from database".format(params["nsir_id"])
+                        )
             elif topic == "admin":
                 self.logger.debug("received {} {} {}".format(topic, command, params))
-                if command in ["echo", "ping"]:   # ignored commands
+                if command in ["echo", "ping"]:  # ignored commands
                     pass
                 elif command == "revoke_token":
                     if params:
                         if isinstance(params, dict) and "_id" in params:
                             tid = params.get("_id")
                             self.engine.authenticator.tokens_cache.pop(tid, None)
-                            self.logger.debug("token '{}' removed from token_cache".format(tid))
+                            self.logger.debug(
+                                "token '{}' removed from token_cache".format(tid)
+                            )
                         else:
-                            self.logger.debug("unrecognized params in command '{} {}': {}"
-                                              .format(topic, command, params))
+                            self.logger.debug(
+                                "unrecognized params in command '{} {}': {}".format(
+                                    topic, command, params
+                                )
+                            )
                     else:
                         self.engine.authenticator.tokens_cache.clear()
                         self.logger.debug("token_cache cleared")
                 else:
-                    self.logger.debug("unrecognized command '{} {}'".format(topic, command))
+                    self.logger.debug(
+                        "unrecognized command '{} {}'".format(topic, command)
+                    )
             # writing to kafka must be done with our own loop. For this reason it is not allowed Engine to do that,
             # but content to be written is stored at msg_to_send
             for msg in msg_to_send:
                 await self.msg.aiowrite(*msg, loop=self.loop)
         except (EngineException, DbException, MsgException) as e:
-            self.logger.error("Error while processing topic={} command={}: {}".format(topic, command, e))
+            self.logger.error(
+                "Error while processing topic={} command={}: {}".format(
+                    topic, command, e
+                )
+            )
         except Exception as e:
-            self.logger.exception("Exception while processing topic={} command={}: {}".format(topic, command, e),
-                                  exc_info=True)
+            self.logger.exception(
+                "Exception while processing topic={} command={}: {}".format(
+                    topic, command, e
+                ),
+                exc_info=True,
+            )
 
     def _stop(self):
         """
index 9e85358..f339354 100755 (executable)
@@ -20,6 +20,7 @@ import requests
 import json
 import logging
 import yaml
+
 # import json
 # import tarfile
 from time import sleep
@@ -37,27 +38,49 @@ version_date = "Oct 2018"
 
 def usage():
     print("Usage: ", sys.argv[0], "[options]")
-    print("      Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'")
-    print("      If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
-          "where deployment is done")
+    print(
+        "      Performs system tests over running NBI. It can be used for real OSM test using option '--test-osm'"
+    )
+    print(
+        "      If this is the case env variables 'OSMNBITEST_VIM_NAME' must be supplied to create a VIM if not exist "
+        "where deployment is done"
+    )
     print("OPTIONS")
     print("      -h|--help: shows this help")
     print("      --insecure: Allows non trusted https NBI server")
     print("      --list: list available tests")
-    print("      --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
-          "'--test-osm'")
+    print(
+        "      --manual-check: Deployment tests stop after deployed to allow manual inspection. Only make sense with "
+        "'--test-osm'"
+    )
     print("      -p|--password PASSWORD: NBI access password. 'admin' by default")
     print("      ---project PROJECT: NBI access project. 'admin' by default")
-    print("      --test TEST[,...]: Execute only a test or a comma separated list of tests")
-    print("      --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config")
-    print("      --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
-          "this flag to test the system. LCM and RO components are expected to be up and running")
-    print("      --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout))
-    print("      --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(timeout_deploy))
-    print("      --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
-          " by default {}s".format(timeout_configure))
+    print(
+        "      --test TEST[,...]: Execute only a test or a comma separated list of tests"
+    )
+    print(
+        "      --params key=val: params to the previous test. key can be vnfd-files, nsd-file, ns-name, ns-config"
+    )
+    print(
+        "      --test-osm: If missing this test is intended for NBI only, no other OSM components are expected. Use "
+        "this flag to test the system. LCM and RO components are expected to be up and running"
+    )
+    print(
+        "      --timeout TIMEOUT: General NBI timeout, by default {}s".format(timeout)
+    )
+    print(
+        "      --timeout-deploy TIMEOUT: Timeout used for getting NS deployed, by default {}s".format(
+            timeout_deploy
+        )
+    )
+    print(
+        "      --timeout-configure TIMEOUT: Timeout used for getting NS deployed and configured,"
+        " by default {}s".format(timeout_configure)
+    )
     print("      -u|--user USERNAME: NBI access username. 'admin' by default")
-    print("      --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default")
+    print(
+        "      --url URL: complete NBI server URL. 'https//localhost:9999/osm' by default"
+    )
     print("      -v|--verbose print debug information, can be used several times")
     print("      --no-verbose remove verbosity")
     print("      --version: prints current version")
@@ -68,8 +91,8 @@ def usage():
     print("      export OSMNBITEST_VIM_TENANT=vim-tenant")
     print("      export OSMNBITEST_VIM_USER=vim-user")
     print("      export OSMNBITEST_VIM_PASSWORD=vim-password")
-    print("      export OSMNBITEST_VIM_CONFIG=\"vim-config\"")
-    print("      export OSMNBITEST_NS_NAME=\"vim-config\"")
+    print('      export OSMNBITEST_VIM_CONFIG="vim-config"')
+    print('      export OSMNBITEST_NS_NAME="vim-config"')
     return
 
 
@@ -85,24 +108,66 @@ headers_zip = {"Accept": "application/zip,application/yaml"}
 headers_zip_yaml = {"Accept": "application/yaml", "Content-type": "application/zip"}
 headers_zip_json = {"Accept": "application/json", "Content-type": "application/zip"}
 headers_txt_json = {"Accept": "application/json", "Content-type": "text/plain"}
-r_headers_yaml_location_vnfd = {"Location": "/vnfpkgm/v1/vnf_packages_content/", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nsd = {"Location": "/nsd/v1/ns_descriptors_content/", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nst = {"Location": "/nst/v1/netslice_templates_content", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nslcmop = {"Location": "nslcm/v1/ns_lcm_op_occs/", "Content-Type": "application/yaml"}
-r_headers_yaml_location_nsilcmop = {"Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/", "Content-Type": "application/yaml"}
+r_headers_yaml_location_vnfd = {
+    "Location": "/vnfpkgm/v1/vnf_packages_content/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsd = {
+    "Location": "/nsd/v1/ns_descriptors_content/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nst = {
+    "Location": "/nst/v1/netslice_templates_content",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nslcmop = {
+    "Location": "nslcm/v1/ns_lcm_op_occs/",
+    "Content-Type": "application/yaml",
+}
+r_headers_yaml_location_nsilcmop = {
+    "Location": "/osm/nsilcm/v1/nsi_lcm_op_occs/",
+    "Content-Type": "application/yaml",
+}
 
 # test ones authorized
 test_authorized_list = (
-    ("AU1", "Invalid vnfd id", "GET", "/vnfpkgm/v1/vnf_packages/non-existing-id",
-     headers_json, None, 404, r_header_json, "json"),
-    ("AU2", "Invalid nsd id", "GET", "/nsd/v1/ns_descriptors/non-existing-id",
-     headers_yaml, None, 404, r_header_yaml, "yaml"),
-    ("AU3", "Invalid nsd id", "DELETE", "/nsd/v1/ns_descriptors_content/non-existing-id",
-     headers_yaml, None, 404, r_header_yaml, "yaml"),
+    (
+        "AU1",
+        "Invalid vnfd id",
+        "GET",
+        "/vnfpkgm/v1/vnf_packages/non-existing-id",
+        headers_json,
+        None,
+        404,
+        r_header_json,
+        "json",
+    ),
+    (
+        "AU2",
+        "Invalid nsd id",
+        "GET",
+        "/nsd/v1/ns_descriptors/non-existing-id",
+        headers_yaml,
+        None,
+        404,
+        r_header_yaml,
+        "yaml",
+    ),
+    (
+        "AU3",
+        "Invalid nsd id",
+        "DELETE",
+        "/nsd/v1/ns_descriptors_content/non-existing-id",
+        headers_yaml,
+        None,
+        404,
+        r_header_yaml,
+        "yaml",
+    ),
 )
-timeout = 120   # general timeout
-timeout_deploy = 60*10        # timeout for NS deploying without charms
-timeout_configure = 60*20     # timeout for NS deploying and configuring
+timeout = 120  # general timeout
+timeout_deploy = 60 * 10  # timeout for NS deploying without charms
+timeout_configure = 60 * 20  # timeout for NS deploying and configuring
 
 
 class TestException(Exception):
@@ -110,7 +175,15 @@ class TestException(Exception):
 
 
 class TestRest:
-    def __init__(self, url_base, header_base=None, verify=False, user="admin", password="admin", project="admin"):
+    def __init__(
+        self,
+        url_base,
+        header_base=None,
+        verify=False,
+        user="admin",
+        password="admin",
+        project="admin",
+    ):
         self.url_base = url_base
         if header_base is None:
             self.header_base = {}
@@ -127,7 +200,7 @@ class TestRest:
         # contains ID of tests obtained from Location response header. "" key contains last obtained id
         self.last_id = ""
         self.test_name = None
-        self.step = 0   # number of subtest under test
+        self.step = 0  # number of subtest under test
         self.passed_tests = 0
         self.failed_tests = 0
 
@@ -146,8 +219,19 @@ class TestRest:
         if key in self.s.headers:
             del self.s.headers[key]
 
-    def test(self, description, method, url, headers, payload, expected_codes, expected_headers,
-             expected_payload, store_file=None, pooling=False):
+    def test(
+        self,
+        description,
+        method,
+        url,
+        headers,
+        payload,
+        expected_codes,
+        expected_headers,
+        expected_payload,
+        store_file=None,
+        pooling=False,
+    ):
         """
         Performs an http request and check http code response. Exit if different than allowed. It get the returned id
         that can be used by following test in the URL with {name} where name is the name of the test
@@ -189,7 +273,9 @@ class TestRest:
                     payload = json.dumps(payload)
 
             if not pooling:
-                test_description = "Test {}{} {} {} {}".format(self.test_name, self.step, description, method, url)
+                test_description = "Test {}{} {} {} {}".format(
+                    self.test_name, self.step, description, method, url
+                )
                 logger.warning(test_description)
                 self.step += 1
             stream = False
@@ -198,8 +284,13 @@ class TestRest:
             __retry = 0
             while True:
                 try:
-                    r = getattr(self.s, method.lower())(url, data=payload, headers=headers, verify=self.verify,
-                                                        stream=stream)
+                    r = getattr(self.s, method.lower())(
+                        url,
+                        data=payload,
+                        headers=headers,
+                        verify=self.verify,
+                        stream=stream,
+                    )
                     break
                 except requests.exceptions.ConnectionError as e:
                     if __retry == 2:
@@ -218,15 +309,21 @@ class TestRest:
                     expected_codes = (expected_codes,)
                 if r.status_code not in expected_codes:
                     raise TestException(
-                        "Got status {}. Expected {}. {}".format(r.status_code, expected_codes, r.text))
+                        "Got status {}. Expected {}. {}".format(
+                            r.status_code, expected_codes, r.text
+                        )
+                    )
 
             if expected_headers:
                 for header_key, header_val in expected_headers.items():
                     if header_key.lower() not in r.headers:
                         raise TestException("Header {} not present".format(header_key))
                     if header_val and header_val.lower() not in r.headers[header_key]:
-                        raise TestException("Header {} does not contain {} but {}".format(header_key, header_val,
-                                            r.headers[header_key]))
+                        raise TestException(
+                            "Header {} does not contain {} but {}".format(
+                                header_key, header_val, r.headers[header_key]
+                            )
+                        )
 
             if expected_payload is not None:
                 if expected_payload == 0 and len(r.content) > 0:
@@ -235,15 +332,25 @@ class TestRest:
                     try:
                         r.json()
                     except Exception as e:
-                        raise TestException("Expected json response payload, but got Exception {}".format(e))
+                        raise TestException(
+                            "Expected json response payload, but got Exception {}".format(
+                                e
+                            )
+                        )
                 elif expected_payload == "yaml":
                     try:
                         yaml.safe_load(r.text)
                     except Exception as e:
-                        raise TestException("Expected yaml response payload, but got Exception {}".format(e))
+                        raise TestException(
+                            "Expected yaml response payload, but got Exception {}".format(
+                                e
+                            )
+                        )
                 elif expected_payload in ("zip", "octet-string"):
                     if len(r.content) == 0:
-                        raise TestException("Expected some response payload, but got empty")
+                        raise TestException(
+                            "Expected some response payload, but got empty"
+                        )
                     # try:
                     #     tar = tarfile.open(None, 'r:gz', fileobj=r.raw)
                     #     for tarinfo in tar:
@@ -253,16 +360,18 @@ class TestRest:
                     #     raise TestException("Expected zip response payload, but got Exception {}".format(e))
                 elif expected_payload == "text":
                     if len(r.content) == 0:
-                        raise TestException("Expected some response payload, but got empty")
+                        raise TestException(
+                            "Expected some response payload, but got empty"
+                        )
                     # r.text
             if store_file:
-                with open(store_file, 'wb') as fd:
+                with open(store_file, "wb") as fd:
                     for chunk in r.iter_content(chunk_size=128):
                         fd.write(chunk)
 
             location = r.headers.get("Location")
             if location:
-                _id = location[location.rfind("/") + 1:]
+                _id = location[location.rfind("/") + 1 :]
                 if _id:
                     self.last_id = str(_id)
             if not pooling:
@@ -290,14 +399,27 @@ class TestRest:
             logger.error("Exception: {}".format(e))
 
     def get_autorization(self):  # user=None, password=None, project=None):
-        if self.token:  # and self.user == user and self.password == password and self.project == project:
+        if (
+            self.token
+        ):  # and self.user == user and self.password == password and self.project == project:
             return
         # self.user = user
         # self.password = password
         # self.project = project
-        r = self.test("Obtain token", "POST", "/admin/v1/tokens", headers_json,
-                      {"username": self.user, "password": self.password, "project_id": self.project},
-                      (200, 201), r_header_json, "json")
+        r = self.test(
+            "Obtain token",
+            "POST",
+            "/admin/v1/tokens",
+            headers_json,
+            {
+                "username": self.user,
+                "password": self.password,
+                "project_id": self.project,
+            },
+            (200, 201),
+            r_header_json,
+            "json",
+        )
         if not r:
             return
         response = r.json()
@@ -306,8 +428,16 @@ class TestRest:
 
     def remove_authorization(self):
         if self.token:
-            self.test("Delete token", "DELETE", "/admin/v1/tokens/{}".format(self.token), headers_json,
-                      None, (200, 201, 204), None, None)
+            self.test(
+                "Delete token",
+                "DELETE",
+                "/admin/v1/tokens/{}".format(self.token),
+                headers_json,
+                None,
+                (200, 201, 204),
+                None,
+                None,
+            )
         self.token = None
         self.unset_header("Authorization")
 
@@ -319,12 +449,21 @@ class TestRest:
             vim_name = os.environ.get("OSMNBITEST_VIM_NAME")
             if not vim_name:
                 raise TestException(
-                    "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment")
+                    "Needed to define OSMNBITEST_VIM_XXX variables to create a real VIM for deployment"
+                )
         else:
             vim_name = "fakeVim"
         # Get VIM
-        r = self.test("Get VIM ID", "GET", "/admin/v1/vim_accounts?name={}".format(vim_name), headers_json,
-                      None, 200, r_header_json, "json")
+        r = self.test(
+            "Get VIM ID",
+            "GET",
+            "/admin/v1/vim_accounts?name={}".format(vim_name),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         if not r:
             return
         vims = r.json()
@@ -333,30 +472,52 @@ class TestRest:
         # Add VIM
         if test_osm:
             # check needed environ parameters:
-            if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get("OSMNBITEST_VIM_TENANT"):
-                raise TestException("Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
-                                    " to deploy on whit the --test-osm option")
-            vim_data = "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}', vim_tenant_name: '{}', "\
-                       "vim_user: {}, vim_password: {}".format(vim_name,
-                                                               os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
-                                                               os.environ.get("OSMNBITEST_VIM_URL"),
-                                                               os.environ.get("OSMNBITEST_VIM_TENANT"),
-                                                               os.environ.get("OSMNBITEST_VIM_USER"),
-                                                               os.environ.get("OSMNBITEST_VIM_PASSWORD"))
+            if not os.environ.get("OSMNBITEST_VIM_URL") or not os.environ.get(
+                "OSMNBITEST_VIM_TENANT"
+            ):
+                raise TestException(
+                    "Env OSMNBITEST_VIM_URL and OSMNBITEST_VIM_TENANT are needed for create a real VIM"
+                    " to deploy on whit the --test-osm option"
+                )
+            vim_data = "{{schema_version: '1.0', name: '{}', vim_type: {}, vim_url: '{}', vim_tenant_name: '{}', " "vim_user: {}, vim_password: {}".format(
+                vim_name,
+                os.environ.get("OSMNBITEST_VIM_TYPE", "openstack"),
+                os.environ.get("OSMNBITEST_VIM_URL"),
+                os.environ.get("OSMNBITEST_VIM_TENANT"),
+                os.environ.get("OSMNBITEST_VIM_USER"),
+                os.environ.get("OSMNBITEST_VIM_PASSWORD"),
+            )
             if os.environ.get("OSMNBITEST_VIM_CONFIG"):
-                vim_data += " ,config: {}".format(os.environ.get("OSMNBITEST_VIM_CONFIG"))
+                vim_data += " ,config: {}".format(
+                    os.environ.get("OSMNBITEST_VIM_CONFIG")
+                )
             vim_data += "}"
         else:
-            vim_data = "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"\
-                       ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
-        self.test("Create VIM", "POST", "/admin/v1/vim_accounts", headers_yaml, vim_data,
-                  (201, 202), {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"}, "yaml")
+            vim_data = (
+                "{schema_version: '1.0', name: fakeVim, vim_type: openstack, vim_url: 'http://10.11.12.13/fake'"
+                ", vim_tenant_name: 'vimtenant', vim_user: vimuser, vim_password: vimpassword}"
+            )
+        self.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_yaml,
+            vim_data,
+            (201, 202),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/yaml"},
+            "yaml",
+        )
         return self.last_id
 
     def print_results(self):
         print("\n\n\n--------------------------------------------")
-        print("TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(self.passed_tests + self.failed_tests,
-                                                                       self.passed_tests, self.failed_tests))
+        print(
+            "TEST RESULTS: Total: {}, Passed: {}, Failed: {}".format(
+                self.passed_tests + self.failed_tests,
+                self.passed_tests,
+                self.failed_tests,
+            )
+        )
         print("--------------------------------------------")
 
     def wait_until_delete(self, url_op, timeout_delete):
@@ -367,14 +528,26 @@ class TestRest:
         :return:
         """
         description = "Wait to topic being deleted"
-        test_description = "Test {}{} {} {} {}".format(self.test_name, self.step, description, "GET", url_op)
+        test_description = "Test {}{} {} {} {}".format(
+            self.test_name, self.step, description, "GET", url_op
+        )
         logger.warning(test_description)
         self.step += 1
 
         wait = timeout_delete
         while wait >= 0:
-            r = self.test(description, "GET", url_op, headers_yaml, None, (200, 404), None, r_header_yaml, "yaml",
-                          pooling=True)
+            r = self.test(
+                description,
+                "GET",
+                url_op,
+                headers_yaml,
+                None,
+                (200, 404),
+                None,
+                r_header_yaml,
+                "yaml",
+                pooling=True,
+            )
             if not r:
                 return
             if r.status_code == 404:
@@ -384,7 +557,9 @@ class TestRest:
                 wait -= 5
                 sleep(5)
         else:
-            raise TestException("Topic is not deleted after {} seconds".format(timeout_delete))
+            raise TestException(
+                "Topic is not deleted after {} seconds".format(timeout_delete)
+            )
             self.failed_tests += 1
 
     def wait_operation_ready(self, ns_nsi, opp_id, timeout, expected_fail=False):
@@ -401,26 +576,43 @@ class TestRest:
         else:
             url_op = "/nsilcm/v1/nsi_lcm_op_occs/{}".format(opp_id)
         description = "Wait to {} lcm operation complete".format(ns_nsi)
-        test_description = "Test {}{} {} {} {}".format(self.test_name, self.step, description, "GET", url_op)
+        test_description = "Test {}{} {} {} {}".format(
+            self.test_name, self.step, description, "GET", url_op
+        )
         logger.warning(test_description)
         self.step += 1
         wait = timeout
         while wait >= 0:
-            r = self.test(description, "GET", url_op, headers_json, None,
-                          200, r_header_json, "json", pooling=True)
+            r = self.test(
+                description,
+                "GET",
+                url_op,
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+                pooling=True,
+            )
             if not r:
                 return
             nslcmop = r.json()
             if "COMPLETED" in nslcmop["operationState"]:
                 if expected_fail:
-                    logger.error("NS terminate has success, expecting failing: {}".format(nslcmop["detailed-status"]))
+                    logger.error(
+                        "NS terminate has success, expecting failing: {}".format(
+                            nslcmop["detailed-status"]
+                        )
+                    )
                     self.failed_tests += 1
                 else:
                     self.passed_tests += 1
                 break
             elif "FAILED" in nslcmop["operationState"]:
                 if not expected_fail:
-                    logger.error("NS terminate has failed: {}".format(nslcmop["detailed-status"]))
+                    logger.error(
+                        "NS terminate has failed: {}".format(nslcmop["detailed-status"])
+                    )
                     self.failed_tests += 1
                 else:
                     self.passed_tests += 1
@@ -431,7 +623,9 @@ class TestRest:
             sleep(10)
         else:
             self.failed_tests += 1
-            logger.error("NS instantiate is not terminate after {} seconds".format(timeout))
+            logger.error(
+                "NS instantiate is not terminate after {} seconds".format(timeout)
+            )
             return
         print("", file=stderr)
 
@@ -444,9 +638,36 @@ class TestNonAuthorized:
         engine.set_test_name("NonAuth")
         engine.remove_authorization()
         test_not_authorized_list = (
-            ("Invalid token", "GET", "/admin/v1/users", headers_json, None, 401, r_header_json, "json"),
-            ("Invalid URL", "POST", "/admin/v1/nonexist", headers_yaml, None, 405, r_header_yaml, "yaml"),
-            ("Invalid version", "DELETE", "/admin/v2/users", headers_yaml, None, 405, r_header_yaml, "yaml"),
+            (
+                "Invalid token",
+                "GET",
+                "/admin/v1/users",
+                headers_json,
+                None,
+                401,
+                r_header_json,
+                "json",
+            ),
+            (
+                "Invalid URL",
+                "POST",
+                "/admin/v1/nonexist",
+                headers_yaml,
+                None,
+                405,
+                r_header_yaml,
+                "yaml",
+            ),
+            (
+                "Invalid version",
+                "DELETE",
+                "/admin/v2/users",
+                headers_yaml,
+                None,
+                405,
+                r_header_yaml,
+                "yaml",
+            ),
         )
         for t in test_not_authorized_list:
             engine.test(*t)
@@ -467,27 +688,74 @@ class TestUsersProjects:
 
         engine.get_autorization()
 
-        res = engine.test("Create project non admin 1", "POST", "/admin/v1/projects", headers_json, {"name": "P1"},
-                          (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create project non admin 1",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P1"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
         p1 = engine.last_id if res else None
 
-        res = engine.test("Create project admin", "POST", "/admin/v1/projects", headers_json,
-                          {"name": "Padmin", "admin": True}, (201, 204),
-                          {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create project admin",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "Padmin", "admin": True},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
         padmin = engine.last_id if res else None
 
-        res = engine.test("Create project bad format", "POST", "/admin/v1/projects", headers_json, {"name": 1},
-                          (400, 422), r_header_json, "json")
+        res = engine.test(
+            "Create project bad format",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": 1},
+            (400, 422),
+            r_header_json,
+            "json",
+        )
         pbad = engine.last_id if res else None
 
-        res = engine.test("Get project admin role", "GET", "/admin/v1/roles?name=project_admin", headers_json, {},
-                          (200), {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Get project admin role",
+            "GET",
+            "/admin/v1/roles?name=project_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         rpa = res.json()[0]["_id"] if res else None
-        res = engine.test("Get project user role", "GET", "/admin/v1/roles?name=project_user", headers_json, {},
-                          (200), {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Get project user role",
+            "GET",
+            "/admin/v1/roles?name=project_user",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         rpu = res.json()[0]["_id"] if res else None
-        res = engine.test("Get system admin role", "GET", "/admin/v1/roles?name=system_admin", headers_json, {},
-                          (200), {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Get system admin role",
+            "GET",
+            "/admin/v1/roles?name=system_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         rsa = res.json()[0]["_id"] if res else None
 
         data = {"username": "U1", "password": "pw1"}
@@ -495,34 +763,77 @@ class TestUsersProjects:
         data["project_role_mappings"] = [
             {"project": p1, "role": rpa},
             {"project": p2, "role": rpa},
-            {"project": padmin, "role": rpu}
+            {"project": padmin, "role": rpu},
         ]
         rc = 201
         xhd = {"Location": "/admin/v1/users/", "Content-Type": "application/json"}
-        res = engine.test("Create user with bad project and force", "POST", "/admin/v1/users?FORCE=True", headers_json,
-                          data, rc, xhd, "json")
+        res = engine.test(
+            "Create user with bad project and force",
+            "POST",
+            "/admin/v1/users?FORCE=True",
+            headers_json,
+            data,
+            rc,
+            xhd,
+            "json",
+        )
         if res:
             u1 = engine.last_id
         else:
             # User is created sometimes even though an exception is raised
-            res = engine.test("Get user U1", "GET", "/admin/v1/users?username=U1", headers_json, {},
-                              (200), {"Content-Type": "application/json"}, "json")
+            res = engine.test(
+                "Get user U1",
+                "GET",
+                "/admin/v1/users?username=U1",
+                headers_json,
+                {},
+                (200),
+                {"Content-Type": "application/json"},
+                "json",
+            )
             u1 = res.json()[0]["_id"] if res else None
 
         data = {"username": "U2", "password": "pw2"}
-        data["project_role_mappings"] = [{"project": p1, "role": rpa}, {"project": padmin, "role": rsa}]
-        res = engine.test("Create user 2", "POST", "/admin/v1/users", headers_json,
-                          data, 201, {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
+        data["project_role_mappings"] = [
+            {"project": p1, "role": rpa},
+            {"project": padmin, "role": rsa},
+        ]
+        res = engine.test(
+            "Create user 2",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
         u2 = engine.last_id if res else None
 
         if u1:
             ftt = "project_role_mappings"
             xpr = [{"project": p1, "role": rpa}, {"project": padmin, "role": rpu}]
             data = {ftt: xpr}
-            engine.test("Edit user U1, delete  P2 project", "PATCH", "/admin/v1/users/"+u1, headers_json,
-                        data, 204, None, None)
-            res = engine.test("Check user U1, contains the right projects", "GET", "/admin/v1/users/"+u1,
-                              headers_json, None, 200, None, json)
+            engine.test(
+                "Edit user U1, delete  P2 project",
+                "PATCH",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                data,
+                204,
+                None,
+                None,
+            )
+            res = engine.test(
+                "Check user U1, contains the right projects",
+                "GET",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                None,
+                200,
+                None,
+                json,
+            )
             if res:
                 rj = res.json()
                 xpr[0]["project_name"] = "P1"
@@ -537,225 +848,619 @@ class TestUsersProjects:
                     if pr not in rj[ftt]:
                         ok = False
                 if not ok:
-                    logger.error("User {} '{}' are different than expected '{}'. Edition was not done properly"
-                                 .format(ftt, rj[ftt], xpr))
+                    logger.error(
+                        "User {} '{}' are different than expected '{}'. Edition was not done properly".format(
+                            ftt, rj[ftt], xpr
+                        )
+                    )
                     engine.failed_tests += 1
 
-        p2 = None   # To prevent deletion attempts
+        p2 = None  # To prevent deletion attempts
 
         # Add a test of 'default project' for Keystone?
 
         if u2:
-            engine.test("Edit user U2, change password", "PUT", "/admin/v1/users/"+u2, headers_json,
-                        {"password": "pw2_new"}, 204, None, None)
+            engine.test(
+                "Edit user U2, change password",
+                "PUT",
+                "/admin/v1/users/" + u2,
+                headers_json,
+                {"password": "pw2_new"},
+                204,
+                None,
+                None,
+            )
 
         if p1:
-            engine.test("Change to project P1 non existing", "POST", "/admin/v1/tokens/", headers_json,
-                        {"project_id": p1}, 401, r_header_json, "json")
+            engine.test(
+                "Change to project P1 non existing",
+                "POST",
+                "/admin/v1/tokens/",
+                headers_json,
+                {"project_id": p1},
+                401,
+                r_header_json,
+                "json",
+            )
 
         if u2 and p1:
-            res = engine.test("Change to user U2 project P1", "POST", "/admin/v1/tokens", headers_json,
-                              {"username": "U2", "password": "pw2_new", "project_id": "P1"}, (200, 201),
-                              r_header_json, "json")
+            res = engine.test(
+                "Change to user U2 project P1",
+                "POST",
+                "/admin/v1/tokens",
+                headers_json,
+                {"username": "U2", "password": "pw2_new", "project_id": "P1"},
+                (200, 201),
+                r_header_json,
+                "json",
+            )
             if res:
                 rj = res.json()
                 engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
 
-                engine.test("Edit user projects non admin", "PUT", "/admin/v1/users/U1", headers_json,
-                            {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
-                            401, r_header_json, "json")
-
-                res = engine.test("Add new project non admin", "POST", "/admin/v1/projects", headers_json,
-                                  {"name": "P2"}, 401, r_header_json, "json")
+                engine.test(
+                    "Edit user projects non admin",
+                    "PUT",
+                    "/admin/v1/users/U1",
+                    headers_json,
+                    {"remove_project_role_mappings": [{"project": "P1", "role": None}]},
+                    401,
+                    r_header_json,
+                    "json",
+                )
+
+                res = engine.test(
+                    "Add new project non admin",
+                    "POST",
+                    "/admin/v1/projects",
+                    headers_json,
+                    {"name": "P2"},
+                    401,
+                    r_header_json,
+                    "json",
+                )
                 if res is None or res.status_code == 201:
                     # The project has been created even though it shouldn't
-                    res = engine.test("Get project P2", "GET", "/admin/v1/projects/P2", headers_json, None,
-                                      200, r_header_json, "json")
+                    res = engine.test(
+                        "Get project P2",
+                        "GET",
+                        "/admin/v1/projects/P2",
+                        headers_json,
+                        None,
+                        200,
+                        r_header_json,
+                        "json",
+                    )
                     p2 = res.json()["_id"] if res else None
 
                 if p1:
                     data = {"username": "U3", "password": "pw3"}
                     data["project_role_mappings"] = [{"project": p1, "role": rpu}]
-                    res = engine.test("Add new user non admin", "POST", "/admin/v1/users", headers_json,
-                                      data, 401, r_header_json, "json")
+                    res = engine.test(
+                        "Add new user non admin",
+                        "POST",
+                        "/admin/v1/users",
+                        headers_json,
+                        data,
+                        401,
+                        r_header_json,
+                        "json",
+                    )
                     if res is None or res.status_code == 201:
                         # The user has been created even though it shouldn't
-                        res = engine.test("Get user U3", "GET", "/admin/v1/users/U3", headers_json, None,
-                                          200, r_header_json, "json")
+                        res = engine.test(
+                            "Get user U3",
+                            "GET",
+                            "/admin/v1/users/U3",
+                            headers_json,
+                            None,
+                            200,
+                            r_header_json,
+                            "json",
+                        )
                         u3 = res.json()["_id"] if res else None
                 else:
                     u3 = None
 
                 if padmin:
-                    res = engine.test("Change to user U2 project Padmin", "POST", "/admin/v1/tokens", headers_json,
-                                      {"project_id": "Padmin"},   # Caused a Keystone authentication error
-                                      # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
-                                      (200, 201), r_header_json, "json")
+                    res = engine.test(
+                        "Change to user U2 project Padmin",
+                        "POST",
+                        "/admin/v1/tokens",
+                        headers_json,
+                        {
+                            "project_id": "Padmin"
+                        },  # Caused a Keystone authentication error
+                        # {"username": "U2", "password": "pw2_new", "project_id": "Padmin"},
+                        (200, 201),
+                        r_header_json,
+                        "json",
+                    )
                     if res:
                         rj = res.json()
-                        engine.set_header({"Authorization": "Bearer {}".format(rj["id"])})
-
-                        res = engine.test("Add new project admin", "POST", "/admin/v1/projects", headers_json,
-                                          {"name": "P3"}, (201, 204),
-                                          {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
-                                          "json")
+                        engine.set_header(
+                            {"Authorization": "Bearer {}".format(rj["id"])}
+                        )
+
+                        res = engine.test(
+                            "Add new project admin",
+                            "POST",
+                            "/admin/v1/projects",
+                            headers_json,
+                            {"name": "P3"},
+                            (201, 204),
+                            {
+                                "Location": "/admin/v1/projects/",
+                                "Content-Type": "application/json",
+                            },
+                            "json",
+                        )
                         p3 = engine.last_id if res else None
 
                         if p1:
                             data = {"username": "U4", "password": "pw4"}
-                            data["project_role_mappings"] = [{"project": p1, "role": rpa}]
-                            res = engine.test("Add new user admin", "POST", "/admin/v1/users", headers_json,
-                                              data, (201, 204),
-                                              {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
-                                              "json")
+                            data["project_role_mappings"] = [
+                                {"project": p1, "role": rpa}
+                            ]
+                            res = engine.test(
+                                "Add new user admin",
+                                "POST",
+                                "/admin/v1/users",
+                                headers_json,
+                                data,
+                                (201, 204),
+                                {
+                                    "Location": "/admin/v1/users/",
+                                    "Content-Type": "application/json",
+                                },
+                                "json",
+                            )
                             u4 = engine.last_id if res else None
                         else:
                             u4 = None
 
                         if u4 and p3:
-                            data = {"project_role_mappings": [{"project": p3, "role": rpa}]}
-                            engine.test("Edit user projects admin", "PUT", "/admin/v1/users/U4", headers_json,
-                                        data, 204, None, None)
+                            data = {
+                                "project_role_mappings": [{"project": p3, "role": rpa}]
+                            }
+                            engine.test(
+                                "Edit user projects admin",
+                                "PUT",
+                                "/admin/v1/users/U4",
+                                headers_json,
+                                data,
+                                204,
+                                None,
+                                None,
+                            )
                             # Project is deleted even though it shouldn't - PROVISIONAL?
-                            res = engine.test("Delete project P3 conflict", "DELETE", "/admin/v1/projects/"+p3,
-                                              headers_json, None, 409, None, None)
+                            res = engine.test(
+                                "Delete project P3 conflict",
+                                "DELETE",
+                                "/admin/v1/projects/" + p3,
+                                headers_json,
+                                None,
+                                409,
+                                None,
+                                None,
+                            )
                             if res and res.status_code in (200, 204):
                                 p3 = None
                             if p3:
-                                res = engine.test("Delete project P3 forcing", "DELETE",
-                                                  "/admin/v1/projects/"+p3+"?FORCE=True", headers_json, None, 204,
-                                                  None, None)
+                                res = engine.test(
+                                    "Delete project P3 forcing",
+                                    "DELETE",
+                                    "/admin/v1/projects/" + p3 + "?FORCE=True",
+                                    headers_json,
+                                    None,
+                                    204,
+                                    None,
+                                    None,
+                                )
                                 if res and res.status_code in (200, 204):
                                     p3 = None
 
                         if u2:
-                            res = engine.test("Delete user U2. Conflict deleting own user", "DELETE",
-                                              "/admin/v1/users/"+u2, headers_json, None, 409, r_header_json, "json")
+                            res = engine.test(
+                                "Delete user U2. Conflict deleting own user",
+                                "DELETE",
+                                "/admin/v1/users/" + u2,
+                                headers_json,
+                                None,
+                                409,
+                                r_header_json,
+                                "json",
+                            )
                             if res is None or res.status_code in (200, 204):
                                 u2 = None
                         if u4:
-                            res = engine.test("Delete user U4", "DELETE", "/admin/v1/users/"+u4, headers_json, None,
-                                              204, None, None)
+                            res = engine.test(
+                                "Delete user U4",
+                                "DELETE",
+                                "/admin/v1/users/" + u4,
+                                headers_json,
+                                None,
+                                204,
+                                None,
+                                None,
+                            )
                             if res and res.status_code in (200, 204):
                                 u4 = None
                         if p3:
-                            res = engine.test("Delete project P3", "DELETE", "/admin/v1/projects/"+p3, headers_json,
-                                              None, 204, None, None)
+                            res = engine.test(
+                                "Delete project P3",
+                                "DELETE",
+                                "/admin/v1/projects/" + p3,
+                                headers_json,
+                                None,
+                                204,
+                                None,
+                                None,
+                            )
                             if res and res.status_code in (200, 204):
                                 p3 = None
 
                 if u3:
-                    res = engine.test("Delete user U3", "DELETE", "/admin/v1/users/"+u3, headers_json, None,
-                                      204, None, None)
+                    res = engine.test(
+                        "Delete user U3",
+                        "DELETE",
+                        "/admin/v1/users/" + u3,
+                        headers_json,
+                        None,
+                        204,
+                        None,
+                        None,
+                    )
                     if res:
                         u3 = None
 
         # change to admin
-        engine.remove_authorization()   # To force get authorization
+        engine.remove_authorization()  # To force get authorization
         engine.get_autorization()
         if u1:
-            engine.test("Delete user U1", "DELETE", "/admin/v1/users/"+u1, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete user U1",
+                "DELETE",
+                "/admin/v1/users/" + u1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if u2:
-            engine.test("Delete user U2", "DELETE", "/admin/v1/users/"+u2, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete user U2",
+                "DELETE",
+                "/admin/v1/users/" + u2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if u3:
-            engine.test("Delete user U3", "DELETE", "/admin/v1/users/"+u3, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete user U3",
+                "DELETE",
+                "/admin/v1/users/" + u3,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if u4:
-            engine.test("Delete user U4", "DELETE", "/admin/v1/users/"+u4, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete user U4",
+                "DELETE",
+                "/admin/v1/users/" + u4,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if p1:
-            engine.test("Delete project P1", "DELETE", "/admin/v1/projects/"+p1, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete project P1",
+                "DELETE",
+                "/admin/v1/projects/" + p1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if p2:
-            engine.test("Delete project P2", "DELETE", "/admin/v1/projects/"+p2, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete project P2",
+                "DELETE",
+                "/admin/v1/projects/" + p2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if p3:
-            engine.test("Delete project P3", "DELETE", "/admin/v1/projects/"+p3, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete project P3",
+                "DELETE",
+                "/admin/v1/projects/" + p3,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if padmin:
-            engine.test("Delete project Padmin", "DELETE", "/admin/v1/projects/"+padmin, headers_json, None, 204,
-                        None, None)
+            engine.test(
+                "Delete project Padmin",
+                "DELETE",
+                "/admin/v1/projects/" + padmin,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if pbad:
-            engine.test("Delete bad project", "DELETE", "/admin/v1/projects/"+pbad, headers_json, None, 204,
-                        None, None)
+            engine.test(
+                "Delete bad project",
+                "DELETE",
+                "/admin/v1/projects/" + pbad,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
 
         # BEGIN New Tests - Addressing Projects/Users by Name/ID
         pid1 = pid2 = None
         uid1 = uid2 = None
-        res = engine.test("Create new project P1", "POST", "/admin/v1/projects", headers_json, {"name": "P1"},
-                          201, {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create new project P1",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P1"},
+            201,
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
         if res:
             pid1 = res.json()["id"]
             # print("# pid =", pid1)
-        res = engine.test("Create new project P2", "POST", "/admin/v1/projects", headers_json, {"name": "P2"},
-                          201, {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create new project P2",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P2"},
+            201,
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
         if res:
             pid2 = res.json()["id"]
             # print("# pid =", pid2)
         data = {"username": "U1", "password": "pw1"}
         data["project_role_mappings"] = [{"project": pid1, "role": rpu}]
-        res = engine.test("Create new user U1", "POST", "/admin/v1/users", headers_json, data, 201,
-                          {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create new user U1",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
         if res:
             uid1 = res.json()["id"]
             # print("# uid =", uid1)
         data = {"username": "U2", "password": "pw2"}
         data["project_role_mappings"] = [{"project": pid2, "role": rpu}]
-        res = engine.test("Create new user U2", "POST", "/admin/v1/users", headers_json, data, 201,
-                          {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create new user U2",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            data,
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
         if res:
             uid2 = res.json()["id"]
             # print("# uid =", uid2)
         if pid1:
-            engine.test("Get Project P1 by Name", "GET", "/admin/v1/projects/P1", headers_json, None,
-                        200, None, "json")
-            engine.test("Get Project P1 by ID", "GET", "/admin/v1/projects/"+pid1, headers_json, None,
-                        200, None, "json")
+            engine.test(
+                "Get Project P1 by Name",
+                "GET",
+                "/admin/v1/projects/P1",
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+            engine.test(
+                "Get Project P1 by ID",
+                "GET",
+                "/admin/v1/projects/" + pid1,
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
         if uid1:
-            engine.test("Get User U1 by Name", "GET", "/admin/v1/users/U1", headers_json, None, 200, None, "json")
-            engine.test("Get User U1 by ID", "GET", "/admin/v1/users/"+uid1, headers_json, None, 200, None, "json")
+            engine.test(
+                "Get User U1 by Name",
+                "GET",
+                "/admin/v1/users/U1",
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
+            engine.test(
+                "Get User U1 by ID",
+                "GET",
+                "/admin/v1/users/" + uid1,
+                headers_json,
+                None,
+                200,
+                None,
+                "json",
+            )
         if pid1:
-            res = engine.test("Rename Project P1 by Name", "PUT", "/admin/v1/projects/P1", headers_json,
-                              {"name": "P3"}, 204, None, None)
+            res = engine.test(
+                "Rename Project P1 by Name",
+                "PUT",
+                "/admin/v1/projects/P1",
+                headers_json,
+                {"name": "P3"},
+                204,
+                None,
+                None,
+            )
             if res:
-                engine.test("Get Project P1 by new Name", "GET", "/admin/v1/projects/P3", headers_json, None,
-                            200, None, "json")
+                engine.test(
+                    "Get Project P1 by new Name",
+                    "GET",
+                    "/admin/v1/projects/P3",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
         if pid2:
-            res = engine.test("Rename Project P2 by ID", "PUT", "/admin/v1/projects/"+pid2, headers_json,
-                              {"name": "P4"}, 204, None, None)
+            res = engine.test(
+                "Rename Project P2 by ID",
+                "PUT",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                {"name": "P4"},
+                204,
+                None,
+                None,
+            )
             if res:
-                engine.test("Get Project P2 by new Name", "GET", "/admin/v1/projects/P4", headers_json, None,
-                            200, None, "json")
+                engine.test(
+                    "Get Project P2 by new Name",
+                    "GET",
+                    "/admin/v1/projects/P4",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
 
         if uid1:
-            res = engine.test("Rename User U1 by Name", "PUT", "/admin/v1/users/U1", headers_json,
-                              {"username": "U3"}, 204, None, None)
+            res = engine.test(
+                "Rename User U1 by Name",
+                "PUT",
+                "/admin/v1/users/U1",
+                headers_json,
+                {"username": "U3"},
+                204,
+                None,
+                None,
+            )
             if res:
-                engine.test("Get User U1 by new Name", "GET", "/admin/v1/users/U3", headers_json, None,
-                            200, None, "json")
+                engine.test(
+                    "Get User U1 by new Name",
+                    "GET",
+                    "/admin/v1/users/U3",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
 
         if uid2:
-            res = engine.test("Rename User U2 by ID", "PUT", "/admin/v1/users/"+uid2, headers_json,
-                              {"username": "U4"}, 204, None, None)
+            res = engine.test(
+                "Rename User U2 by ID",
+                "PUT",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                {"username": "U4"},
+                204,
+                None,
+                None,
+            )
             if res:
-                engine.test("Get User U2 by new Name", "GET", "/admin/v1/users/U4", headers_json, None,
-                            200, None, "json")
+                engine.test(
+                    "Get User U2 by new Name",
+                    "GET",
+                    "/admin/v1/users/U4",
+                    headers_json,
+                    None,
+                    200,
+                    None,
+                    "json",
+                )
         if uid1:
-            res = engine.test("Delete User U1 by Name", "DELETE", "/admin/v1/users/U3", headers_json, None,
-                              204, None, None)
+            res = engine.test(
+                "Delete User U1 by Name",
+                "DELETE",
+                "/admin/v1/users/U3",
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
             if res:
                 uid1 = None
 
         if uid2:
-            res = engine.test("Delete User U2 by ID", "DELETE", "/admin/v1/users/"+uid2, headers_json, None,
-                              204, None, None)
+            res = engine.test(
+                "Delete User U2 by ID",
+                "DELETE",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
             if res:
                 uid2 = None
 
         if pid1:
-            res = engine.test("Delete Project P1 by Name", "DELETE", "/admin/v1/projects/P3", headers_json, None,
-                              204, None, None)
+            res = engine.test(
+                "Delete Project P1 by Name",
+                "DELETE",
+                "/admin/v1/projects/P3",
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
             if res:
                 pid1 = None
 
         if pid2:
-            res = engine.test("Delete Project P2 by ID", "DELETE", "/admin/v1/projects/"+pid2, headers_json, None,
-                              204, None, None)
+            res = engine.test(
+                "Delete Project P2 by ID",
+                "DELETE",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
             if res:
                 pid2 = None
 
@@ -763,15 +1468,51 @@ class TestUsersProjects:
 
         # CLEANUP
         if pid1:
-            engine.test("Delete Project P1", "DELETE", "/admin/v1/projects/"+pid1, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete Project P1",
+                "DELETE",
+                "/admin/v1/projects/" + pid1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if pid2:
-            engine.test("Delete Project P2", "DELETE", "/admin/v1/projects/"+pid2, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete Project P2",
+                "DELETE",
+                "/admin/v1/projects/" + pid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if uid1:
-            engine.test("Delete User U1", "DELETE", "/admin/v1/users/"+uid1, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete User U1",
+                "DELETE",
+                "/admin/v1/users/" + uid1,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
         if uid2:
-            engine.test("Delete User U2", "DELETE", "/admin/v1/users/"+uid2, headers_json, None, 204, None, None)
+            engine.test(
+                "Delete User U2",
+                "DELETE",
+                "/admin/v1/users/" + uid2,
+                headers_json,
+                None,
+                204,
+                None,
+                None,
+            )
 
-        engine.remove_authorization()   # To finish
+        engine.remove_authorization()  # To finish
 
 
 class TestProjectsDescriptors:
@@ -784,134 +1525,417 @@ class TestProjectsDescriptors:
         engine.get_autorization()
 
         project_admin_id = None
-        res = engine.test("Get my project Padmin", "GET", "/admin/v1/projects/{}".format(engine.project), headers_json,
-                          None, 200, r_header_json, "json")
+        res = engine.test(
+            "Get my project Padmin",
+            "GET",
+            "/admin/v1/projects/{}".format(engine.project),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         if res:
             response = res.json()
             project_admin_id = response["_id"]
-        engine.test("Create project Padmin", "POST", "/admin/v1/projects", headers_json,
-                    {"name": "Padmin", "admin": True}, (201, 204),
-                    {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
-        engine.test("Create project P2", "POST", "/admin/v1/projects", headers_json, {"name": "P2"},
-                    (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
-        engine.test("Create project P3", "POST", "/admin/v1/projects", headers_json, {"name": "P3"},
-                    (201, 204), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
-
-        engine.test("Create user U1", "POST", "/admin/v1/users", headers_json,
-                    {"username": "U1", "password": "pw1",
-                     "project_role_mappings": [{"project": "Padmin", "role": "system_admin"},
-                                               {"project": "P2", "role": "project_admin"},
-                                               {"project": "P3", "role": "project_admin"}],
-                     }, 201, {"Location": "/admin/v1/users/", "Content-Type": "application/json"}, "json")
-
-        engine.test("Onboard VNFD id1", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id1", headers_yaml,
-                    TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Create project Padmin",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "Padmin", "admin": True},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Create project P2",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P2"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Create project P3",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "P3"},
+            (201, 204),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
+
+        engine.test(
+            "Create user U1",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {
+                "username": "U1",
+                "password": "pw1",
+                "project_role_mappings": [
+                    {"project": "Padmin", "role": "system_admin"},
+                    {"project": "P2", "role": "project_admin"},
+                    {"project": "P3", "role": "project_admin"},
+                ],
+            },
+            201,
+            {"Location": "/admin/v1/users/", "Content-Type": "application/json"},
+            "json",
+        )
+
+        engine.test(
+            "Onboard VNFD id1",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id1",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         vnfd_ids.append(engine.last_id)
-        engine.test("Onboard VNFD id2 PUBLIC", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
-                    headers_yaml, TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard VNFD id2 PUBLIC",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id2&PUBLIC=TRUE",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         vnfd_ids.append(engine.last_id)
-        engine.test("Onboard VNFD id3", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE", headers_yaml,
-                    TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard VNFD id3",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id3&PUBLIC=FALSE",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         vnfd_ids.append(engine.last_id)
 
-        res = engine.test("Get VNFD descriptors", "GET", "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
-                          headers_json, None, 200, r_header_json, "json")
+        res = engine.test(
+            "Get VNFD descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?id=id1,id2,id3",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         response = res.json()
         if len(response) != 3:
-            logger.error("Only 3 vnfds should be present for project admin. {} listed".format(len(response)))
+            logger.error(
+                "Only 3 vnfds should be present for project admin. {} listed".format(
+                    len(response)
+                )
+            )
             engine.failed_tests += 1
 
         # Change to other project Padmin
-        res = engine.test("Change to user U1 project Padmin", "POST", "/admin/v1/tokens", headers_json,
-                          {"username": "U1", "password": "pw1", "project_id": "Padmin"}, (200, 201),
-                          r_header_json, "json")
+        res = engine.test(
+            "Change to user U1 project Padmin",
+            "POST",
+            "/admin/v1/tokens",
+            headers_json,
+            {"username": "U1", "password": "pw1", "project_id": "Padmin"},
+            (200, 201),
+            r_header_json,
+            "json",
+        )
         if res:
             response = res.json()
             engine.set_header({"Authorization": "Bearer {}".format(response["id"])})
 
         # list vnfds
-        res = engine.test("List VNFD descriptors for Padmin", "GET", "/vnfpkgm/v1/vnf_packages",
-                          headers_json, None, 200, r_header_json, "json")
+        res = engine.test(
+            "List VNFD descriptors for Padmin",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         response = res.json()
         if len(response) != 0:
-            logger.error("Only 0 vnfds should be present for project Padmin. {} listed".format(len(response)))
+            logger.error(
+                "Only 0 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
             engine.failed_tests += 1
 
         # list Public vnfds
-        res = engine.test("List VNFD public descriptors", "GET", "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
-                          headers_json, None, 200, r_header_json, "json")
+        res = engine.test(
+            "List VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?PUBLIC=True",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         response = res.json()
         if len(response) != 1:
-            logger.error("Only 1 vnfds should be present for project Padmin. {} listed".format(len(response)))
+            logger.error(
+                "Only 1 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
             engine.failed_tests += 1
 
         # list vnfds belonging to project "admin"
-        res = engine.test("List VNFD of admin project", "GET",
-                          "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
-                          headers_json, None, 200, r_header_json, "json")
+        res = engine.test(
+            "List VNFD of admin project",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages?ADMIN={}".format(project_admin_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         if res:
             response = res.json()
             if len(response) != 3:
-                logger.error("Only 3 vnfds should be present for project Padmin. {} listed".format(len(response)))
+                logger.error(
+                    "Only 3 vnfds should be present for project Padmin. {} listed".format(
+                        len(response)
+                    )
+                )
                 engine.failed_tests += 1
 
         # Get Public vnfds
-        engine.test("Get VNFD public descriptors", "GET", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-                    headers_json, None, 200, r_header_json, "json")
+        engine.test(
+            "Get VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         # Edit not owned vnfd
-        engine.test("Edit VNFD ", "PATCH", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
-                    headers_yaml, '{name: pepe}', 404, r_header_yaml, "yaml")
+        engine.test(
+            "Edit VNFD ",
+            "PATCH",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+            headers_yaml,
+            "{name: pepe}",
+            404,
+            r_header_yaml,
+            "yaml",
+        )
 
         # Add to my catalog
-        engine.test("Add VNFD id2 to my catalog", "PATCH", "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".
-                    format(vnfd_ids[1]), headers_json, None, 204, None, 0)
+        engine.test(
+            "Add VNFD id2 to my catalog",
+            "PATCH",
+            "/vnfpkgm/v1/vnf_packages/{}?SET_PROJECT".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # Add a new vnfd
-        engine.test("Onboard VNFD id4", "POST", "/vnfpkgm/v1/vnf_packages_content?id=id4", headers_yaml,
-                    TestDescriptors.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard VNFD id4",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content?id=id4",
+            headers_yaml,
+            TestDescriptors.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         vnfd_ids.append(engine.last_id)
 
         # list vnfds
-        res = engine.test("List VNFD public descriptors", "GET", "/vnfpkgm/v1/vnf_packages",
-                          headers_json, None, 200, r_header_json, "json")
+        res = engine.test(
+            "List VNFD public descriptors",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages",
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         response = res.json()
         if len(response) != 2:
-            logger.error("Only 2 vnfds should be present for project Padmin. {} listed".format(len(response)))
+            logger.error(
+                "Only 2 vnfds should be present for project Padmin. {} listed".format(
+                    len(response)
+                )
+            )
             engine.failed_tests += 1
 
         if manual_check:
-            input('VNFDs have been omboarded. Perform manual check and press enter to resume')
-
-        test_rest.test("Delete VNFD id2", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-                       headers_yaml, None, 204, None, 0)
+            input(
+                "VNFDs have been omboarded. Perform manual check and press enter to resume"
+            )
+
+        test_rest.test(
+            "Delete VNFD id2",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # change to admin project
-        engine.remove_authorization()   # To force get authorization
+        engine.remove_authorization()  # To force get authorization
         engine.get_autorization()
-        test_rest.test("Delete VNFD id1", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
-                       headers_yaml, None, 204, None, 0)
-        test_rest.test("Delete VNFD id2", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
-                       headers_yaml, None, 204, None, 0)
-        test_rest.test("Delete VNFD id3", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
-                       headers_yaml, None, 204, None, 0)
-        test_rest.test("Delete VNFD id4", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
-                       headers_yaml, None, 404, r_header_yaml, "yaml")
-        test_rest.test("Delete VNFD id4", "DELETE", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
-                       headers_yaml, None, 204, None, 0)
+        test_rest.test(
+            "Delete VNFD id1",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[0]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id2",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[1]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id3",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[2]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
+        test_rest.test(
+            "Delete VNFD id4",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_ids[3]),
+            headers_yaml,
+            None,
+            404,
+            r_header_yaml,
+            "yaml",
+        )
+        test_rest.test(
+            "Delete VNFD id4",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
         # Get Public vnfds
-        engine.test("Get VNFD deleted id1", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
-                    headers_json, None, 404, r_header_json, "json")
-        engine.test("Get VNFD deleted id2", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
-                    headers_json, None, 404, r_header_json, "json")
-        engine.test("Get VNFD deleted id3", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
-                    headers_json, None, 404, r_header_json, "json")
-        engine.test("Get VNFD deleted id4", "GET", "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
-                    headers_json, None, 404, r_header_json, "json")
+        engine.test(
+            "Get VNFD deleted id1",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[0]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id2",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[1]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id3",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[2]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
+        engine.test(
+            "Get VNFD deleted id4",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}?ADMIN".format(vnfd_ids[3]),
+            headers_json,
+            None,
+            404,
+            r_header_json,
+            "json",
+        )
 
-        engine.test("Delete user U1", "DELETE", "/admin/v1/users/U1", headers_json, None, 204, None, None)
-        engine.test("Delete project Padmin", "DELETE", "/admin/v1/projects/Padmin", headers_json, None, 204, None, None)
-        engine.test("Delete project P2", "DELETE", "/admin/v1/projects/P2", headers_json, None, 204, None, None)
-        engine.test("Delete project P3", "DELETE", "/admin/v1/projects/P3", headers_json, None, 204, None, None)
+        engine.test(
+            "Delete user U1",
+            "DELETE",
+            "/admin/v1/users/U1",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project Padmin",
+            "DELETE",
+            "/admin/v1/projects/Padmin",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project P2",
+            "DELETE",
+            "/admin/v1/projects/P2",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
+        engine.test(
+            "Delete project P3",
+            "DELETE",
+            "/admin/v1/projects/P3",
+            headers_json,
+            None,
+            204,
+            None,
+            None,
+        )
 
 
 class TestFakeVim:
@@ -928,7 +1952,7 @@ class TestFakeVim:
             "vim_tenant_name": "vimTenant",
             "vim_user": "user",
             "vim_password": "password",
-            "config": {"config_param": 1}
+            "config": {"config_param": 1},
         }
         self.sdn = {
             "name": "sdn-name",
@@ -939,17 +1963,39 @@ class TestFakeVim:
             "type": "opendaylight",
             "version": "3.5.6",
             "user": "user",
-            "password": "passwd"
+            "password": "passwd",
         }
         self.port_mapping = [
-            {"compute_node": "compute node 1",
-             "ports": [{"pci": "0000:81:00.0", "switch_port": "port-2/1", "switch_mac": "52:54:00:94:21:21"},
-                       {"pci": "0000:81:00.1", "switch_port": "port-2/2", "switch_mac": "52:54:00:94:21:22"}
-                       ]},
-            {"compute_node": "compute node 2",
-             "ports": [{"pci": "0000:81:00.0", "switch_port": "port-2/3", "switch_mac": "52:54:00:94:21:23"},
-                       {"pci": "0000:81:00.1", "switch_port": "port-2/4", "switch_mac": "52:54:00:94:21:24"}
-                       ]}
+            {
+                "compute_node": "compute node 1",
+                "ports": [
+                    {
+                        "pci": "0000:81:00.0",
+                        "switch_port": "port-2/1",
+                        "switch_mac": "52:54:00:94:21:21",
+                    },
+                    {
+                        "pci": "0000:81:00.1",
+                        "switch_port": "port-2/2",
+                        "switch_mac": "52:54:00:94:21:22",
+                    },
+                ],
+            },
+            {
+                "compute_node": "compute node 2",
+                "ports": [
+                    {
+                        "pci": "0000:81:00.0",
+                        "switch_port": "port-2/3",
+                        "switch_mac": "52:54:00:94:21:23",
+                    },
+                    {
+                        "pci": "0000:81:00.1",
+                        "switch_port": "port-2/4",
+                        "switch_mac": "52:54:00:94:21:24",
+                    },
+                ],
+            },
         ]
 
     def run(self, engine, test_osm, manual_check, test_params=None):
@@ -959,28 +2005,94 @@ class TestFakeVim:
 
         engine.set_test_name("FakeVim")
         engine.get_autorization()
-        engine.test("Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, (201, 202),
-                    {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json")
+        engine.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            (201, 202),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+            "json",
+        )
         vim_id = engine.last_id
-        engine.test("Create VIM without name, bad schema", "POST", "/admin/v1/vim_accounts", headers_json,
-                    vim_bad, 422, None, headers_json)
-        engine.test("Create VIM name repeated", "POST", "/admin/v1/vim_accounts", headers_json, self.vim,
-                    409, None, headers_json)
-        engine.test("Show VIMs", "GET", "/admin/v1/vim_accounts", headers_yaml, None, 200, r_header_yaml,
-                    "yaml")
-        engine.test("Show VIM", "GET", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml, None, 200,
-                    r_header_yaml, "yaml")
+        engine.test(
+            "Create VIM without name, bad schema",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            vim_bad,
+            422,
+            None,
+            headers_json,
+        )
+        engine.test(
+            "Create VIM name repeated",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            409,
+            None,
+            headers_json,
+        )
+        engine.test(
+            "Show VIMs",
+            "GET",
+            "/admin/v1/vim_accounts",
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
+        engine.test(
+            "Show VIM",
+            "GET",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
         if not test_osm:
             # delete with FORCE
-            engine.test("Delete VIM", "DELETE", "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id), headers_yaml,
-                        None, 202, None, 0)
-            engine.test("Check VIM is deleted", "GET", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml, None,
-                        404, r_header_yaml, "yaml")
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Check VIM is deleted",
+                "GET",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
         else:
             # delete and wait until is really deleted
-            engine.test("Delete VIM", "DELETE", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml, None, 202,
-                        None, 0)
-            engine.wait_until_delete("/admin/v1/vim_accounts/{}".format(vim_id), timeout)
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.wait_until_delete(
+                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+            )
 
 
 class TestVIMSDN(TestFakeVim):
@@ -997,66 +2109,192 @@ class TestVIMSDN(TestFakeVim):
             "wim_url": "http://localhost:/wim",
             "user": "user",
             "password": "password",
-            "config": {"config_param": 1}
+            "config": {"config_param": 1},
         }
 
     def run(self, engine, test_osm, manual_check, test_params=None):
         engine.set_test_name("VimSdn")
         engine.get_autorization()
         # Added SDN
-        engine.test("Create SDN", "POST", "/admin/v1/sdns", headers_json, self.sdn, (201, 202),
-                    {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"}, "json")
+        engine.test(
+            "Create SDN",
+            "POST",
+            "/admin/v1/sdns",
+            headers_json,
+            self.sdn,
+            (201, 202),
+            {"Location": "/admin/v1/sdns/", "Content-Type": "application/json"},
+            "json",
+        )
         sdnc_id = engine.last_id
         # sleep(5)
         # Edit SDN
-        engine.test("Edit SDN", "PATCH", "/admin/v1/sdns/{}".format(sdnc_id), headers_json, {"name": "new_sdn_name"},
-                    (202, 204), None, None)
+        engine.test(
+            "Edit SDN",
+            "PATCH",
+            "/admin/v1/sdns/{}".format(sdnc_id),
+            headers_json,
+            {"name": "new_sdn_name"},
+            (202, 204),
+            None,
+            None,
+        )
         # sleep(5)
         # VIM with SDN
         self.vim["config"]["sdn-controller"] = sdnc_id
         self.vim["config"]["sdn-port-mapping"] = self.port_mapping
-        engine.test("Create VIM", "POST", "/admin/v1/vim_accounts", headers_json, self.vim, (200, 202, 201),
-                    {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"}, "json"),
+        engine.test(
+            "Create VIM",
+            "POST",
+            "/admin/v1/vim_accounts",
+            headers_json,
+            self.vim,
+            (200, 202, 201),
+            {"Location": "/admin/v1/vim_accounts/", "Content-Type": "application/json"},
+            "json",
+        ),
 
         vim_id = engine.last_id
         self.port_mapping[0]["compute_node"] = "compute node XX"
-        engine.test("Edit VIM change port-mapping", "PUT", "/admin/v1/vim_accounts/{}".format(vim_id), headers_json,
-                    {"config": {"sdn-port-mapping": self.port_mapping}}, (202, 204), None, None)
-        engine.test("Edit VIM remove port-mapping", "PUT", "/admin/v1/vim_accounts/{}".format(vim_id), headers_json,
-                    {"config": {"sdn-port-mapping": None}}, (202, 204), None, None)
+        engine.test(
+            "Edit VIM change port-mapping",
+            "PUT",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_json,
+            {"config": {"sdn-port-mapping": self.port_mapping}},
+            (202, 204),
+            None,
+            None,
+        )
+        engine.test(
+            "Edit VIM remove port-mapping",
+            "PUT",
+            "/admin/v1/vim_accounts/{}".format(vim_id),
+            headers_json,
+            {"config": {"sdn-port-mapping": None}},
+            (202, 204),
+            None,
+            None,
+        )
 
-        engine.test("Create WIM", "POST", "/admin/v1/wim_accounts", headers_json, self.wim, (200, 202, 201),
-                    {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"}, "json"),
+        engine.test(
+            "Create WIM",
+            "POST",
+            "/admin/v1/wim_accounts",
+            headers_json,
+            self.wim,
+            (200, 202, 201),
+            {"Location": "/admin/v1/wim_accounts/", "Content-Type": "application/json"},
+            "json",
+        ),
         wim_id = engine.last_id
 
         if not test_osm:
             # delete with FORCE
-            engine.test("Delete VIM remove port-mapping", "DELETE",
-                        "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id), headers_json, None, 202, None, 0)
-            engine.test("Delete SDNC", "DELETE", "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id), headers_json, None,
-                        202, None, 0)
-
-            engine.test("Delete WIM", "DELETE",
-                        "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id), headers_json, None, 202, None, 0)
-            engine.test("Check VIM is deleted", "GET", "/admin/v1/vim_accounts/{}".format(vim_id), headers_yaml,
-                        None, 404, r_header_yaml, "yaml")
-            engine.test("Check SDN is deleted", "GET", "/admin/v1/sdns/{}".format(sdnc_id), headers_yaml, None,
-                        404, r_header_yaml, "yaml")
-            engine.test("Check WIM is deleted", "GET", "/admin/v1/wim_accounts/{}".format(wim_id), headers_yaml,
-                        None, 404, r_header_yaml, "yaml")
+            engine.test(
+                "Delete VIM remove port-mapping",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}?FORCE=True".format(vim_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Delete SDNC",
+                "DELETE",
+                "/admin/v1/sdns/{}?FORCE=True".format(sdnc_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+
+            engine.test(
+                "Delete WIM",
+                "DELETE",
+                "/admin/v1/wim_accounts/{}?FORCE=True".format(wim_id),
+                headers_json,
+                None,
+                202,
+                None,
+                0,
+            )
+            engine.test(
+                "Check VIM is deleted",
+                "GET",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+            engine.test(
+                "Check SDN is deleted",
+                "GET",
+                "/admin/v1/sdns/{}".format(sdnc_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
+            engine.test(
+                "Check WIM is deleted",
+                "GET",
+                "/admin/v1/wim_accounts/{}".format(wim_id),
+                headers_yaml,
+                None,
+                404,
+                r_header_yaml,
+                "yaml",
+            )
         else:
             if manual_check:
-                input('VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume')
+                input(
+                    "VIM, SDN, WIM has been deployed. Perform manual check and press enter to resume"
+                )
             # delete and wait until is really deleted
-            engine.test("Delete VIM remove port-mapping", "DELETE", "/admin/v1/vim_accounts/{}".format(vim_id),
-                        headers_json, None, (202, 201, 204), None, 0)
-            engine.test("Delete SDN", "DELETE", "/admin/v1/sdns/{}".format(sdnc_id), headers_json, None,
-                        (202, 201, 204), None, 0)
-            engine.test("Delete VIM", "DELETE", "/admin/v1/wim_accounts/{}".format(wim_id),
-                        headers_json, None, (202, 201, 204), None, 0)
-            engine.wait_until_delete("/admin/v1/vim_accounts/{}".format(vim_id), timeout)
+            engine.test(
+                "Delete VIM remove port-mapping",
+                "DELETE",
+                "/admin/v1/vim_accounts/{}".format(vim_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.test(
+                "Delete SDN",
+                "DELETE",
+                "/admin/v1/sdns/{}".format(sdnc_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.test(
+                "Delete VIM",
+                "DELETE",
+                "/admin/v1/wim_accounts/{}".format(wim_id),
+                headers_json,
+                None,
+                (202, 201, 204),
+                None,
+                0,
+            )
+            engine.wait_until_delete(
+                "/admin/v1/vim_accounts/{}".format(vim_id), timeout
+            )
             engine.wait_until_delete("/admin/v1/sdns/{}".format(sdnc_id), timeout)
-            engine.wait_until_delete("/admin/v1/wim_accounts/{}".format(wim_id), timeout)
+            engine.wait_until_delete(
+                "/admin/v1/wim_accounts/{}".format(wim_id), timeout
+            )
 
 
 class TestDeploy:
@@ -1068,7 +2306,9 @@ class TestDeploy:
         self.vim_id = None
         self.ns_id = None
         self.vnfds_id = []
-        self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        )
         self.vnfd_filenames = ("cirros_vnf.tar.gz",)
         self.nsd_filename = "cirros_2vnf_ns.tar.gz"
         self.descriptor_edit = None
@@ -1090,15 +2330,21 @@ class TestDeploy:
             if "/" in vnfd_filename:
                 vnfd_filename_path = vnfd_filename
                 if not os.path.exists(vnfd_filename_path):
-                    raise TestException("File '{}' does not exist".format(vnfd_filename_path))
+                    raise TestException(
+                        "File '{}' does not exist".format(vnfd_filename_path)
+                    )
             else:
                 vnfd_filename_path = temp_dir + vnfd_filename
                 if not os.path.exists(vnfd_filename_path):
                     with open(vnfd_filename_path, "wb") as file:
                         response = requests.get(self.descriptor_url + vnfd_filename)
                         if response.status_code >= 300:
-                            raise TestException("Error downloading descriptor from '{}': {}".format(
-                                self.descriptor_url + vnfd_filename, response.status_code))
+                            raise TestException(
+                                "Error downloading descriptor from '{}': {}".format(
+                                    self.descriptor_url + vnfd_filename,
+                                    response.status_code,
+                                )
+                            )
                         file.write(response.content)
             if vnfd_filename_path.endswith(".yaml"):
                 headers = headers_yaml
@@ -1106,40 +2352,76 @@ class TestDeploy:
                 headers = headers_zip_yaml
             if randint(0, 1) == 0:
                 # vnfd CREATE AND UPLOAD in one step:
-                engine.test("Onboard VNFD in one step", "POST",
-                            "/vnfpkgm/v1/vnf_packages_content" + self.qforce, headers, "@b" + vnfd_filename_path, 201,
-                            r_headers_yaml_location_vnfd,
-                            "yaml")
+                engine.test(
+                    "Onboard VNFD in one step",
+                    "POST",
+                    "/vnfpkgm/v1/vnf_packages_content" + self.qforce,
+                    headers,
+                    "@b" + vnfd_filename_path,
+                    201,
+                    r_headers_yaml_location_vnfd,
+                    "yaml",
+                )
                 self.vnfds_id.append(engine.last_id)
             else:
                 # vnfd CREATE AND UPLOAD ZIP
-                engine.test("Onboard VNFD step 1", "POST", "/vnfpkgm/v1/vnf_packages",
-                            headers_json, None, 201,
-                            {"Location": "/vnfpkgm/v1/vnf_packages/", "Content-Type": "application/json"}, "json")
+                engine.test(
+                    "Onboard VNFD step 1",
+                    "POST",
+                    "/vnfpkgm/v1/vnf_packages",
+                    headers_json,
+                    None,
+                    201,
+                    {
+                        "Location": "/vnfpkgm/v1/vnf_packages/",
+                        "Content-Type": "application/json",
+                    },
+                    "json",
+                )
                 self.vnfds_id.append(engine.last_id)
-                engine.test("Onboard VNFD step 2 as ZIP", "PUT",
-                            "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
-                            headers, "@b" + vnfd_filename_path, 204, None, 0)
+                engine.test(
+                    "Onboard VNFD step 2 as ZIP",
+                    "PUT",
+                    "/vnfpkgm/v1/vnf_packages/<>/package_content" + self.qforce,
+                    headers,
+                    "@b" + vnfd_filename_path,
+                    204,
+                    None,
+                    0,
+                )
 
             if self.descriptor_edit:
                 if "vnfd{}".format(vnfd_index) in self.descriptor_edit:
                     # Modify VNFD
-                    engine.test("Edit VNFD ", "PATCH",
-                                "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
-                                headers_yaml, self.descriptor_edit["vnfd{}".format(vnfd_index)], 204, None, None)
+                    engine.test(
+                        "Edit VNFD ",
+                        "PATCH",
+                        "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfds_id[-1]),
+                        headers_yaml,
+                        self.descriptor_edit["vnfd{}".format(vnfd_index)],
+                        204,
+                        None,
+                        None,
+                    )
 
         if "/" in self.nsd_filename:
             nsd_filename_path = self.nsd_filename
             if not os.path.exists(nsd_filename_path):
-                raise TestException("File '{}' does not exist".format(nsd_filename_path))
+                raise TestException(
+                    "File '{}' does not exist".format(nsd_filename_path)
+                )
         else:
             nsd_filename_path = temp_dir + self.nsd_filename
             if not os.path.exists(nsd_filename_path):
                 with open(nsd_filename_path, "wb") as file:
                     response = requests.get(self.descriptor_url + self.nsd_filename)
                     if response.status_code >= 300:
-                        raise TestException("Error downloading descriptor from '{}': {}".format(
-                            self.descriptor_url + self.nsd_filename, response.status_code))
+                        raise TestException(
+                            "Error downloading descriptor from '{}': {}".format(
+                                self.descriptor_url + self.nsd_filename,
+                                response.status_code,
+                            )
+                        )
                     file.write(response.content)
         if nsd_filename_path.endswith(".yaml"):
             headers = headers_yaml
@@ -1148,47 +2430,107 @@ class TestDeploy:
 
         if randint(0, 1) == 0:
             # nsd CREATE AND UPLOAD in one step:
-            engine.test("Onboard NSD in one step", "POST",
-                        "/nsd/v1/ns_descriptors_content" + self.qforce, headers, "@b" + nsd_filename_path, 201,
-                        r_headers_yaml_location_nsd, yaml)
+            engine.test(
+                "Onboard NSD in one step",
+                "POST",
+                "/nsd/v1/ns_descriptors_content" + self.qforce,
+                headers,
+                "@b" + nsd_filename_path,
+                201,
+                r_headers_yaml_location_nsd,
+                yaml,
+            )
             self.nsd_id = engine.last_id
         else:
             # nsd CREATE AND UPLOAD ZIP
-            engine.test("Onboard NSD step 1", "POST", "/nsd/v1/ns_descriptors",
-                        headers_json, None, 201,
-                        {"Location": "/nsd/v1/ns_descriptors/", "Content-Type": "application/json"}, "json")
+            engine.test(
+                "Onboard NSD step 1",
+                "POST",
+                "/nsd/v1/ns_descriptors",
+                headers_json,
+                None,
+                201,
+                {
+                    "Location": "/nsd/v1/ns_descriptors/",
+                    "Content-Type": "application/json",
+                },
+                "json",
+            )
             self.nsd_id = engine.last_id
-            engine.test("Onboard NSD step 2 as ZIP", "PUT",
-                        "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
-                        headers, "@b" + nsd_filename_path, 204, None, 0)
+            engine.test(
+                "Onboard NSD step 2 as ZIP",
+                "PUT",
+                "/nsd/v1/ns_descriptors/<>/nsd_content" + self.qforce,
+                headers,
+                "@b" + nsd_filename_path,
+                204,
+                None,
+                0,
+            )
 
         if self.descriptor_edit and "nsd" in self.descriptor_edit:
             # Modify NSD
-            engine.test("Edit NSD ", "PATCH",
-                        "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-                        headers_yaml, self.descriptor_edit["nsd"], 204, None, None)
+            engine.test(
+                "Edit NSD ",
+                "PATCH",
+                "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+                headers_yaml,
+                self.descriptor_edit["nsd"],
+                204,
+                None,
+                None,
+            )
 
     def delete_descriptors(self, engine):
         # delete descriptors
-        engine.test("Delete NSSD SOL005", "DELETE",
-                    "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
-                    headers_yaml, None, 204, None, 0)
+        engine.test(
+            "Delete NSSD SOL005",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
         for vnfd_id in self.vnfds_id:
-            engine.test("Delete VNFD SOL005", "DELETE",
-                        "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id), headers_yaml, None, 204, None, 0)
+            engine.test(
+                "Delete VNFD SOL005",
+                "DELETE",
+                "/vnfpkgm/v1/vnf_packages/{}".format(vnfd_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
 
     def instantiate(self, engine, ns_data):
         ns_data_text = yaml.safe_dump(ns_data, default_flow_style=True, width=256)
         # create NS Two steps
-        r = engine.test("Create NS step 1", "POST", "/nslcm/v1/ns_instances",
-                        headers_yaml, ns_data_text, (201, 202),
-                        {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"}, "yaml")
+        r = engine.test(
+            "Create NS step 1",
+            "POST",
+            "/nslcm/v1/ns_instances",
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            {"Location": "nslcm/v1/ns_instances/", "Content-Type": "application/yaml"},
+            "yaml",
+        )
         if not r:
             return
         self.ns_id = engine.last_id
-        engine.test("Instantiate NS step 2", "POST",
-                    "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id), headers_yaml, ns_data_text,
-                    (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        engine.test(
+            "Instantiate NS step 2",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/instantiate".format(self.ns_id),
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
         nslcmop_id = engine.last_id
 
         if test_osm:
@@ -1199,40 +2541,99 @@ class TestDeploy:
     def terminate(self, engine):
         # remove deployment
         if test_osm:
-            engine.test("Terminate NS", "POST", "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id), headers_yaml,
-                        None, (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+            engine.test(
+                "Terminate NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/terminate".format(self.ns_id),
+                headers_yaml,
+                None,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
             nslcmop2_id = engine.last_id
             # Wait until status is Ok
             engine.wait_operation_ready("ns", nslcmop2_id, timeout_deploy)
 
-            engine.test("Delete NS", "DELETE", "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_yaml, None,
-                        204, None, 0)
+            engine.test(
+                "Delete NS",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
         else:
-            engine.test("Delete NS with FORCE", "DELETE", "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
-                        headers_yaml, None, 204, None, 0)
+            engine.test(
+                "Delete NS with FORCE",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
 
         # check all it is deleted
-        engine.test("Check NS is deleted", "GET", "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_yaml, None,
-                    404, None, "yaml")
-        r = engine.test("Check NSLCMOPs are deleted", "GET",
-                        "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id), headers_json, None,
-                        200, None, "json")
+        engine.test(
+            "Check NS is deleted",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_yaml,
+            None,
+            404,
+            None,
+            "yaml",
+        )
+        r = engine.test(
+            "Check NSLCMOPs are deleted",
+            "GET",
+            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            None,
+            "json",
+        )
         if not r:
             return
         nslcmops = r.json()
         if not isinstance(nslcmops, list) or nslcmops:
-            raise TestException("NS {} deleted but with ns_lcm_op_occ active: {}".format(self.ns_id, nslcmops))
-
-    def test_ns(self, engine, test_osm, commands=None, users=None, passwds=None, keys=None, timeout=0):
-
-        r = engine.test("GET VNFR IDs", "GET",
-                        "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_json, None,
-                        200, r_header_json, "json")
+            raise TestException(
+                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+                    self.ns_id, nslcmops
+                )
+            )
+
+    def test_ns(
+        self,
+        engine,
+        test_osm,
+        commands=None,
+        users=None,
+        passwds=None,
+        keys=None,
+        timeout=0,
+    ):
+
+        r = engine.test(
+            "GET VNFR IDs",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         if not r:
             return
         ns_data = r.json()
 
-        vnfr_list = ns_data['constituent-vnfr-ref']
+        vnfr_list = ns_data["constituent-vnfr-ref"]
         time = 0
         _commands = commands if commands is not None else self.commands
         _users = users if users is not None else self.users
@@ -1242,9 +2643,16 @@ class TestDeploy:
 
         # vnfr_list=[d8272263-6bd3-4680-84ca-6a4be23b3f2d, 88b22e2f-994a-4b61-94fd-4a3c90de3dc4]
         for vnfr_id in vnfr_list:
-            r = engine.test("Get VNFR to get IP_ADDRESS", "GET",
-                            "/nslcm/v1/vnfrs/{}".format(vnfr_id), headers_json, None,
-                            200, r_header_json, "json")
+            r = engine.test(
+                "Get VNFR to get IP_ADDRESS",
+                "GET",
+                "/nslcm/v1/vnfrs/{}".format(vnfr_id),
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+            )
             if not r:
                 continue
             vnfr_data = r.json()
@@ -1252,16 +2660,23 @@ class TestDeploy:
             vnf_index = str(vnfr_data["member-vnf-index-ref"])
 
             ip_address = self.get_vnfr_ip(engine, vnf_index)
-            description = "Exec command='{}' at VNFR={} IP={}".format(_commands.get(vnf_index)[0], vnf_index,
-                                                                      ip_address)
+            description = "Exec command='{}' at VNFR={} IP={}".format(
+                _commands.get(vnf_index)[0], vnf_index, ip_address
+            )
             engine.step += 1
-            test_description = "{}{} {}".format(engine.test_name, engine.step, description)
+            test_description = "{}{} {}".format(
+                engine.test_name, engine.step, description
+            )
             logger.warning(test_description)
             while _timeout >= time:
-                result, message = self.do_checks([ip_address],
-                                                 vnf_index=vnfr_data["member-vnf-index-ref"],
-                                                 commands=_commands.get(vnf_index), user=_users.get(vnf_index),
-                                                 passwd=_passwds.get(vnf_index), key=_keys.get(vnf_index))
+                result, message = self.do_checks(
+                    [ip_address],
+                    vnf_index=vnfr_data["member-vnf-index-ref"],
+                    commands=_commands.get(vnf_index),
+                    user=_users.get(vnf_index),
+                    passwd=_passwds.get(vnf_index),
+                    key=_keys.get(vnf_index),
+                )
                 if result == 1:
                     engine.passed_tests += 1
                     logger.debug(message)
@@ -1279,7 +2694,9 @@ class TestDeploy:
                     logger.error(message)
             else:
                 engine.failed_tests += 1
-                logger.error("VNFR {} has not mgmt address. Check failed".format(vnf_index))
+                logger.error(
+                    "VNFR {} has not mgmt address. Check failed".format(vnf_index)
+                )
 
     def do_checks(self, ip, vnf_index, commands=[], user=None, passwd=None, key=None):
         try:
@@ -1288,8 +2705,10 @@ class TestDeploy:
             from pssh.utils import load_private_key
             from ssh2 import exceptions as ssh2Exception
         except ImportError as e:
-            logger.critical("Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
-                            "parallel-ssh urllib3': {}".format(e))
+            logger.critical(
+                "Package <pssh> or/and <urllib3> is not installed. Please add them with 'pip3 install "
+                "parallel-ssh urllib3': {}".format(e)
+            )
             return -1, "install needed packages 'pip3 install parallel-ssh urllib3'"
         urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
         try:
@@ -1302,18 +2721,32 @@ class TestDeploy:
             else:
                 pkey = None
 
-            client = ParallelSSHClient(ip, user=user, password=passwd, pkey=pkey, proxy_host=p_host,
-                                       proxy_user=p_user, proxy_password=p_password, timeout=10, num_retries=0)
+            client = ParallelSSHClient(
+                ip,
+                user=user,
+                password=passwd,
+                pkey=pkey,
+                proxy_host=p_host,
+                proxy_user=p_user,
+                proxy_password=p_password,
+                timeout=10,
+                num_retries=0,
+            )
             for cmd in commands:
                 output = client.run_command(cmd)
                 client.join(output)
                 if output[ip[0]].exit_code:
-                    return -1, "VNFR {} command '{}' returns error: '{}'".format(ip[0], cmd,
-                                                                                 "\n".join(output[ip[0]].stderr))
+                    return -1, "VNFR {} command '{}' returns error: '{}'".format(
+                        ip[0], cmd, "\n".join(output[ip[0]].stderr)
+                    )
                 else:
                     return 1, "VNFR {} command '{}' successful".format(ip[0], cmd)
-        except (ssh2Exception.ChannelFailure, ssh2Exception.SocketDisconnectError, ssh2Exception.SocketTimeout,
-                ssh2Exception.SocketRecvError) as e:
+        except (
+            ssh2Exception.ChannelFailure,
+            ssh2Exception.SocketDisconnectError,
+            ssh2Exception.SocketTimeout,
+            ssh2Exception.SocketRecvError,
+        ) as e:
             return 0, "Timeout accessing the VNFR {}: {}".format(ip[0], str(e))
         except Exception as e:
             return -1, "ERROR checking the VNFR {}: {}".format(ip[0], str(e))
@@ -1336,8 +2769,12 @@ class TestDeploy:
 
         # create real VIM if not exist
         self.vim_id = engine.get_create_vim(test_osm)
-        ns_data = {"nsDescription": "default description", "nsName": nsname, "nsdId": self.nsd_id,
-                   "vimAccountId": self.vim_id}
+        ns_data = {
+            "nsDescription": "default description",
+            "nsName": nsname,
+            "nsdId": self.nsd_id,
+            "vimAccountId": self.vim_id,
+        }
         if self.ns_params:
             ns_data.update(self.ns_params)
         if test_params and test_params.get("ns-config"):
@@ -1348,7 +2785,9 @@ class TestDeploy:
         self.instantiate(engine, ns_data)
 
         if manual_check:
-            input('NS has been deployed. Perform manual check and press enter to resume')
+            input(
+                "NS has been deployed. Perform manual check and press enter to resume"
+            )
         if test_osm and self.commands:
             self.test_ns(engine, test_osm)
         self.additional_operations(engine, test_osm, manual_check)
@@ -1363,12 +2802,20 @@ class TestDeploy:
     def get_vnfr_ip(self, engine, vnfr_index_wanted):
         # If the IP address list has been obtained before, it has been stored in 'vnfr_ip_list'
         ip = self.vnfr_ip_list.get(vnfr_index_wanted, "")
-        if (ip):
+        if ip:
             return self.get_first_ip(ip)
-        r = engine.test("Get VNFR to get IP_ADDRESS", "GET",
-                        "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
-                            vnfr_index_wanted, self.ns_id), headers_json, None,
-                        200, r_header_json, "json")
+        r = engine.test(
+            "Get VNFR to get IP_ADDRESS",
+            "GET",
+            "/nslcm/v1/vnfrs?member-vnf-index-ref={}&nsr-id-ref={}".format(
+                vnfr_index_wanted, self.ns_id
+            ),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
         if not r:
             return ""
         vnfr_data = r.json()
@@ -1390,32 +2837,76 @@ class TestDeployHackfestCirros(TestDeploy):
         self.test_name = "CIRROS"
         self.vnfd_filenames = ("cirros_vnf.tar.gz",)
         self.nsd_filename = "cirros_2vnf_ns.tar.gz"
-        self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
-        self.users = {'1': "cirros", '2': "cirros"}
-        self.passwords = {'1': "cubswin:)", '2': "cubswin:)"}
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "cirros", "2": "cirros"}
+        self.passwords = {"1": "cubswin:)", "2": "cubswin:)"}
 
     def terminate(self, engine):
         # Make a delete in one step, overriding the normal two step of TestDeploy that launched terminate and delete
         if test_osm:
-            engine.test("Terminate and delete NS in one step", "DELETE", "/nslcm/v1/ns_instances_content/{}".
-                        format(self.ns_id), headers_yaml, None, 202, None, "yaml")
-
-            engine .wait_until_delete("/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy)
+            engine.test(
+                "Terminate and delete NS in one step",
+                "DELETE",
+                "/nslcm/v1/ns_instances_content/{}".format(self.ns_id),
+                headers_yaml,
+                None,
+                202,
+                None,
+                "yaml",
+            )
+
+            engine.wait_until_delete(
+                "/nslcm/v1/ns_instances/{}".format(self.ns_id), timeout_deploy
+            )
         else:
-            engine.test("Delete NS with FORCE", "DELETE", "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
-                        headers_yaml, None, 204, None, 0)
+            engine.test(
+                "Delete NS with FORCE",
+                "DELETE",
+                "/nslcm/v1/ns_instances/{}?FORCE=True".format(self.ns_id),
+                headers_yaml,
+                None,
+                204,
+                None,
+                0,
+            )
 
         # check all it is deleted
-        engine.test("Check NS is deleted", "GET", "/nslcm/v1/ns_instances/{}".format(self.ns_id), headers_yaml, None,
-                    404, None, "yaml")
-        r = engine.test("Check NSLCMOPs are deleted", "GET",
-                        "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id), headers_json, None,
-                        200, None, "json")
+        engine.test(
+            "Check NS is deleted",
+            "GET",
+            "/nslcm/v1/ns_instances/{}".format(self.ns_id),
+            headers_yaml,
+            None,
+            404,
+            None,
+            "yaml",
+        )
+        r = engine.test(
+            "Check NSLCMOPs are deleted",
+            "GET",
+            "/nslcm/v1/ns_lcm_op_occs?nsInstanceId={}".format(self.ns_id),
+            headers_json,
+            None,
+            200,
+            None,
+            "json",
+        )
         if not r:
             return
         nslcmops = r.json()
         if not isinstance(nslcmops, list) or nslcmops:
-            raise TestException("NS {} deleted but with ns_lcm_op_occ active: {}".format(self.ns_id, nslcmops))
+            raise TestException(
+                "NS {} deleted but with ns_lcm_op_occ active: {}".format(
+                    self.ns_id, nslcmops
+                )
+            )
 
 
 class TestDeployHackfest1(TestDeploy):
@@ -1432,7 +2923,9 @@ class TestDeployHackfest1(TestDeploy):
 
 
 class TestDeployHackfestCirrosScaling(TestDeploy):
-    description = "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
+    description = (
+        "Load and deploy Hackfest cirros_2vnf_ns example with scaling modifications"
+    )
 
     def __init__(self):
         super().__init__()
@@ -1442,17 +2935,14 @@ class TestDeployHackfestCirrosScaling(TestDeploy):
         # Modify VNFD to add scaling and count=2
         self.descriptor_edit = {
             "vnfd0": {
-                "vdu": {
-                    "$id: 'cirros_vnfd-VM'": {"count": 2}
-                },
-                "scaling-group-descriptor": [{
-                    "name": "scale_cirros",
-                    "max-instance-count": 2,
-                    "vdu": [{
-                        "vdu-id-ref": "cirros_vnfd-VM",
-                        "count": 2
-                    }]
-                }]
+                "vdu": {"$id: 'cirros_vnfd-VM'": {"count": 2}},
+                "scaling-group-descriptor": [
+                    {
+                        "name": "scale_cirros",
+                        "max-instance-count": 2,
+                        "vdu": [{"vdu-id-ref": "cirros_vnfd-VM", "count": 2}],
+                    }
+                ],
             }
         }
 
@@ -1460,37 +2950,64 @@ class TestDeployHackfestCirrosScaling(TestDeploy):
         if not test_osm:
             return
         # 2 perform scale out twice
-        payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
-                  '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        )
         for i in range(0, 2):
-            engine.test("Execute scale action over NS", "POST",
-                        "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-                        (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+            engine.test(
+                "Execute scale action over NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+                headers_yaml,
+                payload,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
             nslcmop2_scale_out = engine.last_id
             engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
             if manual_check:
-                input('NS scale out done. Check that two more vdus are there')
+                input("NS scale out done. Check that two more vdus are there")
             # TODO check automatic
 
         # 2 perform scale in
-        payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
-                  '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+            '{scaling-group-descriptor: scale_cirros, member-vnf-index: "1"}}}'
+        )
         for i in range(0, 2):
-            engine.test("Execute scale IN action over NS", "POST",
-                        "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-                        (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+            engine.test(
+                "Execute scale IN action over NS",
+                "POST",
+                "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+                headers_yaml,
+                payload,
+                (201, 202),
+                r_headers_yaml_location_nslcmop,
+                "yaml",
+            )
             nslcmop2_scale_in = engine.last_id
             engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
             if manual_check:
-                input('NS scale in done. Check that two less vdus are there')
+                input("NS scale in done. Check that two less vdus are there")
             # TODO check automatic
 
         # perform scale in that must fail as reached limit
-        engine.test("Execute scale IN out of limit action over NS", "POST",
-                    "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-                    (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        engine.test(
+            "Execute scale IN out of limit action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
         nslcmop2_scale_in = engine.last_id
-        engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True)
+        engine.wait_operation_ready(
+            "ns", nslcmop2_scale_in, timeout_deploy, expected_fail=True
+        )
 
 
 class TestDeployIpMac(TestDeploy):
@@ -1499,13 +3016,22 @@ class TestDeployIpMac(TestDeploy):
     def __init__(self):
         super().__init__()
         self.test_name = "SetIpMac"
-        self.vnfd_filenames = ("vnfd_2vdu_set_ip_mac2.yaml", "vnfd_2vdu_set_ip_mac.yaml")
+        self.vnfd_filenames = (
+            "vnfd_2vdu_set_ip_mac2.yaml",
+            "vnfd_2vdu_set_ip_mac.yaml",
+        )
         self.nsd_filename = "scenario_2vdu_set_ip_mac.yaml"
-        self.descriptor_url = \
-            "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
-        self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
-        self.users = {'1': "osm", '2': "osm"}
-        self.passwords = {'1': "osm4u", '2': "osm4u"}
+        self.descriptor_url = "https://osm.etsi.org/gitweb/?p=osm/RO.git;a=blob_plain;f=test/RO_tests/v3_2vdu_set_ip_mac/"
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "osm", "2": "osm"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
         self.timeout = 360
 
     def run(self, engine, test_osm, manual_check, test_params=None):
@@ -1517,11 +3043,14 @@ class TestDeployIpMac(TestDeploy):
                     "member-vnf-index": "1",
                     "internal-vld": [
                         {
-                            "name": "internal_vld1",   # net_internal
+                            "name": "internal_vld1",  # net_internal
                             "ip-profile": {
                                 "ip-version": "ipv4",
                                 "subnet-address": "10.9.8.0/24",
-                                "dhcp-params": {"count": 100, "start-address": "10.9.8.100"}
+                                "dhcp-params": {
+                                    "count": 100,
+                                    "start-address": "10.9.8.100",
+                                },
                             },
                             "internal-connection-point": [
                                 {
@@ -1531,11 +3060,10 @@ class TestDeployIpMac(TestDeploy):
                                 {
                                     "id-ref": "eth3",
                                     "ip-address": "10.9.8.3",
-                                }
-                            ]
+                                },
+                            ],
                         },
                     ],
-
                     "vdu": [
                         {
                             "id": "VM1",
@@ -1544,10 +3072,7 @@ class TestDeployIpMac(TestDeploy):
                                 #     "name": "iface11",
                                 #     "floating-ip-required": True,
                                 # },
-                                {
-                                    "name": "iface13",
-                                    "mac-address": "52:33:44:55:66:13"
-                                },
+                                {"name": "iface13", "mac-address": "52:33:44:55:66:13"},
                             ],
                         },
                         {
@@ -1556,16 +3081,21 @@ class TestDeployIpMac(TestDeploy):
                                 {
                                     "name": "iface21",
                                     "ip-address": "10.31.31.22",
-                                    "mac-address": "52:33:44:55:66:21"
+                                    "mac-address": "52:33:44:55:66:21",
                                 },
                             ],
                         },
-                    ]
+                    ],
                 },
             ]
         }
 
-        super().run(engine, test_osm, manual_check, test_params={"ns-config": instantiation_params})
+        super().run(
+            engine,
+            test_osm,
+            manual_check,
+            test_params={"ns-config": instantiation_params},
+        )
 
 
 class TestDeployHackfest4(TestDeploy):
@@ -1577,9 +3107,16 @@ class TestDeployHackfest4(TestDeploy):
         self.vnfd_filenames = ("hackfest_4_vnfd.tar.gz",)
         self.nsd_filename = "hackfest_4_nsd.tar.gz"
         self.uses_configuration = True
-        self.commands = {'1': ['ls -lrt', ], '2': ['ls -lrt', ]}
-        self.users = {'1': "ubuntu", '2': "ubuntu"}
-        self.passwords = {'1': "osm4u", '2': "osm4u"}
+        self.commands = {
+            "1": [
+                "ls -lrt",
+            ],
+            "2": [
+                "ls -lrt",
+            ],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
         # Modify VNFD to add scaling
         # self.descriptor_edit = {
         #     "vnfd0": {
@@ -1634,9 +3171,12 @@ class TestDeployHackfest3Charmed(TestDeploy):
         self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz",)
         self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
         self.uses_configuration = True
-        self.commands = {'1': ['ls -lrt /home/ubuntu/first-touch'], '2': ['ls -lrt /home/ubuntu/first-touch']}
-        self.users = {'1': "ubuntu", '2': "ubuntu"}
-        self.passwords = {'1': "osm4u", '2': "osm4u"}
+        self.commands = {
+            "1": ["ls -lrt /home/ubuntu/first-touch"],
+            "2": ["ls -lrt /home/ubuntu/first-touch"],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
         self.descriptor_edit = {
             "vnfd0": yaml.safe_load(
                 """
@@ -1657,7 +3197,8 @@ class TestDeployHackfest3Charmed(TestDeploy):
                         parameter:
                         -   name: filename
                             value: '/home/ubuntu/last-touch2'
-                """)
+                """
+            )
         }
 
     def additional_operations(self, engine, test_osm, manual_check):
@@ -1666,9 +3207,16 @@ class TestDeployHackfest3Charmed(TestDeploy):
         # 1 perform action
         vnfr_index_selected = "2"
         payload = '{member_vnf_index: "2", primitive: touch, primitive_params: { filename: /home/ubuntu/OSMTESTNBI }}'
-        engine.test("Exec service primitive over NS", "POST",
-                    "/nslcm/v1/ns_instances/{}/action".format(self.ns_id), headers_yaml, payload,
-                    (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        engine.test(
+            "Exec service primitive over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/action".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
         nslcmop2_action = engine.last_id
         # Wait until status is Ok
         engine.wait_operation_ready("ns", nslcmop2_action, timeout_deploy)
@@ -1676,10 +3224,17 @@ class TestDeployHackfest3Charmed(TestDeploy):
         if manual_check:
             input(
                 "NS service primitive has been executed."
-                "Check that file /home/ubuntu/OSMTESTNBI is present at {}".
-                format(vnfr_ip))
+                "Check that file /home/ubuntu/OSMTESTNBI is present at {}".format(
+                    vnfr_ip
+                )
+            )
         if test_osm:
-            commands = {'1': [''], '2': ['ls -lrt /home/ubuntu/OSMTESTNBI', ]}
+            commands = {
+                "1": [""],
+                "2": [
+                    "ls -lrt /home/ubuntu/OSMTESTNBI",
+                ],
+            }
             self.test_ns(engine, test_osm, commands=commands)
 
         # # 2 perform scale out
@@ -1708,8 +3263,10 @@ class TestDeployHackfest3Charmed(TestDeploy):
 
 
 class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
-    description = "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in " \
-                  "ids and member-vnf-index."
+    description = (
+        "Load and deploy Hackfest 3charmed_ns example modified version of descriptors to have dots in "
+        "ids and member-vnf-index."
+    )
 
     def __init__(self):
         super().__init__()
@@ -1719,18 +3276,20 @@ class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
             "vnfd0": {
                 "vdu": {
                     "$[0]": {
-                        "interface": {"$[0]": {"external-connection-point-ref": "pdu-mgmt"}}
+                        "interface": {
+                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+                        }
                     },
-                    "$[1]": None
+                    "$[1]": None,
                 },
                 "vnf-configuration": None,
                 "connection-point": {
                     "$[0]": {
                         "id": "pdu-mgmt",
                         "name": "pdu-mgmt",
-                        "short-name": "pdu-mgmt"
+                        "short-name": "pdu-mgmt",
                     },
-                    "$[1]": None
+                    "$[1]": None,
                 },
                 "mgmt-interface": {"cp": "pdu-mgmt"},
                 "description": "A vnf single vdu to be used as PDU",
@@ -1741,11 +3300,10 @@ class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
                         "name": "pdu_internal",
                         "internal-connection-point": {"$[1]": None},
                         "short-name": "pdu_internal",
-                        "type": "ELAN"
+                        "type": "ELAN",
                     }
-                }
+                },
             },
-
             # Modify NSD accordingly
             "nsd": {
                 "constituent-vnfd": {
@@ -1766,13 +3324,13 @@ class TestDeployHackfest3Charmed2(TestDeployHackfest3Charmed):
                                 "vnfd-connection-point-ref": "pdu-mgmt",
                                 "vnfd-id-ref": "vdu-as-pdu",
                             },
-                            "$[1]": None
+                            "$[1]": None,
                         },
-                        "type": "ELAN"
+                        "type": "ELAN",
                     },
                     "$[1]": None,
-                }
-            }
+                },
+            },
         }
 
 
@@ -1782,7 +3340,10 @@ class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
     def __init__(self):
         super().__init__()
         self.test_name = "HACKFEST3v3-"
-        self.commands = {'1': ['ls -lrt /home/ubuntu/first-touch-1'], '2': ['ls -lrt /home/ubuntu/first-touch-2']}
+        self.commands = {
+            "1": ["ls -lrt /home/ubuntu/first-touch-1"],
+            "2": ["ls -lrt /home/ubuntu/first-touch-2"],
+        }
         self.descriptor_edit = {
             "vnfd0": yaml.load(
                 """
@@ -1834,14 +3395,25 @@ class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
                                 "$[0]":
                                     default-value: "<touch_filename2>"
                 """,
-                Loader=yaml.Loader)
+                Loader=yaml.Loader,
+            )
         }
         self.ns_params = {
             "additionalParamsForVnf": [
-                {"member-vnf-index": "1", "additionalParams": {"touch_filename": "/home/ubuntu/first-touch-1",
-                                                               "touch_filename2": "/home/ubuntu/second-touch-1"}},
-                {"member-vnf-index": "2", "additionalParams": {"touch_filename": "/home/ubuntu/first-touch-2",
-                                                               "touch_filename2": "/home/ubuntu/second-touch-2"}},
+                {
+                    "member-vnf-index": "1",
+                    "additionalParams": {
+                        "touch_filename": "/home/ubuntu/first-touch-1",
+                        "touch_filename2": "/home/ubuntu/second-touch-1",
+                    },
+                },
+                {
+                    "member-vnf-index": "2",
+                    "additionalParams": {
+                        "touch_filename": "/home/ubuntu/first-touch-2",
+                        "touch_filename2": "/home/ubuntu/second-touch-2",
+                    },
+                },
             ]
         }
 
@@ -1851,30 +3423,56 @@ class TestDeployHackfest3Charmed3(TestDeployHackfest3Charmed):
             return
 
         # 2 perform scale out
-        payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: ' \
-                  '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        engine.test("Execute scale action over NS", "POST",
-                    "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-                    (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_OUT, scaleByStepData: "
+            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        )
+        engine.test(
+            "Execute scale action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
         nslcmop2_scale_out = engine.last_id
         engine.wait_operation_ready("ns", nslcmop2_scale_out, timeout_deploy)
         if manual_check:
-            input('NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created')
+            input(
+                "NS scale out done. Check that file /home/ubuntu/second-touch-1 is present and new VM is created"
+            )
         if test_osm:
-            commands = {'1': ['ls -lrt /home/ubuntu/second-touch-1', ]}
+            commands = {
+                "1": [
+                    "ls -lrt /home/ubuntu/second-touch-1",
+                ]
+            }
             self.test_ns(engine, test_osm, commands=commands)
             # TODO check automatic connection to scaled VM
 
         # 2 perform scale in
-        payload = '{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: ' \
-                  '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
-        engine.test("Execute scale action over NS", "POST",
-                    "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id), headers_yaml, payload,
-                    (201, 202), r_headers_yaml_location_nslcmop, "yaml")
+        payload = (
+            "{scaleType: SCALE_VNF, scaleVnfData: {scaleVnfType: SCALE_IN, scaleByStepData: "
+            '{scaling-group-descriptor: scale_dataVM, member-vnf-index: "1"}}}'
+        )
+        engine.test(
+            "Execute scale action over NS",
+            "POST",
+            "/nslcm/v1/ns_instances/{}/scale".format(self.ns_id),
+            headers_yaml,
+            payload,
+            (201, 202),
+            r_headers_yaml_location_nslcmop,
+            "yaml",
+        )
         nslcmop2_scale_in = engine.last_id
         engine.wait_operation_ready("ns", nslcmop2_scale_in, timeout_deploy)
         if manual_check:
-            input('NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted')
+            input(
+                "NS scale in done. Check that file /home/ubuntu/second-touch-1 is updated and new VM is deleted"
+            )
         # TODO check automatic
 
 
@@ -1884,54 +3482,80 @@ class TestDeploySimpleCharm(TestDeploy):
     def __init__(self):
         super().__init__()
         self.test_name = "HACKFEST-SIMPLE"
-        self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-4.0-four/4th-hackfest/packages/"
+        )
         self.vnfd_filenames = ("hackfest_simplecharm_vnf.tar.gz",)
         self.nsd_filename = "hackfest_simplecharm_ns.tar.gz"
         self.uses_configuration = True
-        self.commands = {'1': [''], '2': ['ls -lrt /home/ubuntu/first-touch', ]}
-        self.users = {'1': "ubuntu", '2': "ubuntu"}
-        self.passwords = {'1': "osm4u", '2': "osm4u"}
+        self.commands = {
+            "1": [""],
+            "2": [
+                "ls -lrt /home/ubuntu/first-touch",
+            ],
+        }
+        self.users = {"1": "ubuntu", "2": "ubuntu"}
+        self.passwords = {"1": "osm4u", "2": "osm4u"}
 
 
 class TestDeploySimpleCharm2(TestDeploySimpleCharm):
-    description = "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and " \
-                  "vnf-member-index"
+    description = (
+        "Deploy hackfest-4 hackfest_simplecharm example changing naming to contain dots on ids and "
+        "vnf-member-index"
+    )
 
     def __init__(self):
         super().__init__()
         self.test_name = "HACKFEST-SIMPLE2-"
         self.qforce = "?FORCE=True"
         self.descriptor_edit = {
-            "vnfd0": {
-                "id": "hackfest.simplecharm.vnf"
-            },
-
+            "vnfd0": {"id": "hackfest.simplecharm.vnf"},
             "nsd": {
                 "id": "hackfest.simplecharm.ns",
                 "constituent-vnfd": {
-                    "$[0]": {"vnfd-id-ref": "hackfest.simplecharm.vnf", "member-vnf-index": "$1"},
-                    "$[1]": {"vnfd-id-ref": "hackfest.simplecharm.vnf", "member-vnf-index": "$2"},
+                    "$[0]": {
+                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                        "member-vnf-index": "$1",
+                    },
+                    "$[1]": {
+                        "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                        "member-vnf-index": "$2",
+                    },
                 },
                 "vld": {
                     "$[0]": {
-                        "vnfd-connection-point-ref": {"$[0]": {"member-vnf-index-ref": "$1",
-                                                               "vnfd-id-ref": "hackfest.simplecharm.vnf"},
-                                                      "$[1]": {"member-vnf-index-ref": "$2",
-                                                               "vnfd-id-ref": "hackfest.simplecharm.vnf"}},
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "member-vnf-index-ref": "$1",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                            "$[1]": {
+                                "member-vnf-index-ref": "$2",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                        },
                     },
                     "$[1]": {
-                        "vnfd-connection-point-ref": {"$[0]": {"member-vnf-index-ref": "$1",
-                                                               "vnfd-id-ref": "hackfest.simplecharm.vnf"},
-                                                      "$[1]": {"member-vnf-index-ref": "$2",
-                                                               "vnfd-id-ref": "hackfest.simplecharm.vnf"}},
+                        "vnfd-connection-point-ref": {
+                            "$[0]": {
+                                "member-vnf-index-ref": "$1",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                            "$[1]": {
+                                "member-vnf-index-ref": "$2",
+                                "vnfd-id-ref": "hackfest.simplecharm.vnf",
+                            },
+                        },
                     },
-                }
-            }
+                },
+            },
         }
 
 
 class TestDeploySingleVdu(TestDeployHackfest3Charmed):
-    description = "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
+    description = (
+        "Generate a single VDU base on editing Hackfest3Charmed descriptors and deploy"
+    )
 
     def __init__(self):
         super().__init__()
@@ -1942,18 +3566,20 @@ class TestDeploySingleVdu(TestDeployHackfest3Charmed):
             "vnfd0": {
                 "vdu": {
                     "$[0]": {
-                        "interface": {"$[0]": {"external-connection-point-ref": "pdu-mgmt"}}
+                        "interface": {
+                            "$[0]": {"external-connection-point-ref": "pdu-mgmt"}
+                        }
                     },
-                    "$[1]": None
+                    "$[1]": None,
                 },
                 "vnf-configuration": None,
                 "connection-point": {
                     "$[0]": {
                         "id": "pdu-mgmt",
                         "name": "pdu-mgmt",
-                        "short-name": "pdu-mgmt"
+                        "short-name": "pdu-mgmt",
                     },
-                    "$[1]": None
+                    "$[1]": None,
                 },
                 "mgmt-interface": {"cp": "pdu-mgmt"},
                 "description": "A vnf single vdu to be used as PDU",
@@ -1964,11 +3590,10 @@ class TestDeploySingleVdu(TestDeployHackfest3Charmed):
                         "name": "pdu_internal",
                         "internal-connection-point": {"$[1]": None},
                         "short-name": "pdu_internal",
-                        "type": "ELAN"
+                        "type": "ELAN",
                     }
-                }
+                },
             },
-
             # Modify NSD accordingly
             "nsd": {
                 "constituent-vnfd": {
@@ -1989,18 +3614,20 @@ class TestDeploySingleVdu(TestDeployHackfest3Charmed):
                                 "vnfd-connection-point-ref": "pdu-mgmt",
                                 "vnfd-id-ref": "vdu-as-pdu",
                             },
-                            "$[1]": None
+                            "$[1]": None,
                         },
-                        "type": "ELAN"
+                        "type": "ELAN",
                     },
                     "$[1]": None,
-                }
-            }
+                },
+            },
         }
 
 
 class TestDeployHnfd(TestDeployHackfest3Charmed):
-    description = "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
+    description = (
+        "Generate a HNFD base on editing Hackfest3Charmed descriptors and deploy"
+    )
 
     def __init__(self):
         super().__init__()
@@ -2047,9 +3674,12 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
                     "mac-address": "mac_address",
                     "vim-network-name": "pdu_internal",  # OSMNBITEST-PDU-pdu_internal
                 },
-            ]
+            ],
         }
-        self.vnfd_filenames = ("hackfest_3charmed_vnfd.tar.gz", "hackfest_3charmed_vnfd.tar.gz")
+        self.vnfd_filenames = (
+            "hackfest_3charmed_vnfd.tar.gz",
+            "hackfest_3charmed_vnfd.tar.gz",
+        )
 
         self.descriptor_edit = {
             "vnfd0": {
@@ -2062,19 +3692,21 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
                         "interface": {
                             "$[0]": {"name": "mgmt-iface"},
                             "$[1]": {"name": "pdu-iface-internal"},
-                        }
+                        },
                     }
-                }
+                },
             },
             "nsd": {
-                "constituent-vnfd": {
-                    "$[1]": {"vnfd-id-ref": "hfnd1"}
-                },
+                "constituent-vnfd": {"$[1]": {"vnfd-id-ref": "hfnd1"}},
                 "vld": {
-                    "$[0]": {"vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}},
-                    "$[1]": {"vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}}
-                }
-            }
+                    "$[0]": {
+                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+                    },
+                    "$[1]": {
+                        "vnfd-connection-point-ref": {"$[1]": {"vnfd-id-ref": "hfnd1"}}
+                    },
+                },
+            },
         }
 
     def create_descriptors(self, engine):
@@ -2087,10 +3719,22 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
         # TODO get vim-network-name from vnfr.vld.name
         self.pdu_descriptor["interfaces"][1]["vim-network-name"] = "{}-{}-{}".format(
             os.environ.get("OSMNBITEST_NS_NAME", "OSMNBITEST"),
-            "PDU", self.pdu_descriptor["interfaces"][1]["vim-network-name"])
-        engine.test("Onboard PDU descriptor", "POST", "/pdu/v1/pdu_descriptors",
-                    {"Location": "/pdu/v1/pdu_descriptors/", "Content-Type": "application/yaml"}, self.pdu_descriptor,
-                    201, r_header_yaml, "yaml")
+            "PDU",
+            self.pdu_descriptor["interfaces"][1]["vim-network-name"],
+        )
+        engine.test(
+            "Onboard PDU descriptor",
+            "POST",
+            "/pdu/v1/pdu_descriptors",
+            {
+                "Location": "/pdu/v1/pdu_descriptors/",
+                "Content-Type": "application/yaml",
+            },
+            self.pdu_descriptor,
+            201,
+            r_header_yaml,
+            "yaml",
+        )
         self.pdu_id = engine.last_id
 
     def run(self, engine, test_osm, manual_check, test_params=None):
@@ -2102,26 +3746,50 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
         self.vim_id = engine.get_create_vim(test_osm)
         # instantiate PDU
         self.pduDeploy.create_descriptors(engine)
-        self.pduDeploy.instantiate(engine, {"nsDescription": "to be used as PDU", "nsName": nsname + "-PDU",
-                                            "nsdId": self.pduDeploy.nsd_id, "vimAccountId": self.vim_id})
+        self.pduDeploy.instantiate(
+            engine,
+            {
+                "nsDescription": "to be used as PDU",
+                "nsName": nsname + "-PDU",
+                "nsdId": self.pduDeploy.nsd_id,
+                "vimAccountId": self.vim_id,
+            },
+        )
         if manual_check:
-            input('VNF to be used as PDU has been deployed. Perform manual check and press enter to resume')
+            input(
+                "VNF to be used as PDU has been deployed. Perform manual check and press enter to resume"
+            )
         if test_osm:
             self.pduDeploy.test_ns(engine, test_osm)
 
         if test_osm:
-            r = engine.test("Get VNFR to obtain IP_ADDRESS", "GET",
-                            "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id), headers_json, None,
-                            200, r_header_json, "json")
+            r = engine.test(
+                "Get VNFR to obtain IP_ADDRESS",
+                "GET",
+                "/nslcm/v1/vnfrs?nsr-id-ref={}".format(self.pduDeploy.ns_id),
+                headers_json,
+                None,
+                200,
+                r_header_json,
+                "json",
+            )
             if not r:
                 return
             vnfr_data = r.json()
             # print(vnfr_data)
 
-            self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][0].get("ip-address")
-            self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][1].get("ip-address")
-            self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][0].get("mac-address")
-            self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][1].get("mac-address")
+            self.pdu_interface_0["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                0
+            ].get("ip-address")
+            self.pdu_interface_1["ip-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                1
+            ].get("ip-address")
+            self.pdu_interface_0["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                0
+            ].get("mac-address")
+            self.pdu_interface_1["mac-address"] = vnfr_data[0]["vdur"][0]["interfaces"][
+                1
+            ].get("mac-address")
             if not self.pdu_interface_0["ip-address"]:
                 raise TestException("Vnfr has not managment ip address")
         else:
@@ -2132,8 +3800,12 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
 
         self.create_descriptors(engine)
 
-        ns_data = {"nsDescription": "default description", "nsName": nsname, "nsdId": self.nsd_id,
-                   "vimAccountId": self.vim_id}
+        ns_data = {
+            "nsDescription": "default description",
+            "nsName": nsname,
+            "nsdId": self.nsd_id,
+            "vimAccountId": self.vim_id,
+        }
         if test_params and test_params.get("ns-config"):
             if isinstance(test_params["ns-config"], str):
                 ns_data.update(yaml.load(test_params["ns-config"]), Loader=yaml.Loader)
@@ -2142,7 +3814,9 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
 
         self.instantiate(engine, ns_data)
         if manual_check:
-            input('NS has been deployed. Perform manual check and press enter to resume')
+            input(
+                "NS has been deployed. Perform manual check and press enter to resume"
+            )
         if test_osm:
             self.test_ns(engine, test_osm)
         self.additional_operations(engine, test_osm, manual_check)
@@ -2154,9 +3828,16 @@ class TestDeployHnfd(TestDeployHackfest3Charmed):
     def delete_descriptors(self, engine):
         super().delete_descriptors(engine)
         # delete pdu
-        engine.test("Delete PDU SOL005", "DELETE",
-                    "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
-                    headers_yaml, None, 204, None, 0)
+        engine.test(
+            "Delete PDU SOL005",
+            "DELETE",
+            "/pdu/v1/pdu_descriptors/{}".format(self.pdu_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
 
 class TestDescriptors:
@@ -2190,7 +3871,9 @@ class TestDescriptors:
     def __init__(self):
         self.vnfd_filename = "hackfest_3charmed_vnfd.tar.gz"
         self.nsd_filename = "hackfest_3charmed_nsd.tar.gz"
-        self.descriptor_url = "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        self.descriptor_url = (
+            "https://osm-download.etsi.org/ftp/osm-3.0-three/2nd-hackfest/packages/"
+        )
         self.vnfd_id = None
         self.nsd_id = None
 
@@ -2208,123 +3891,274 @@ class TestDescriptors:
                 with open(filename_path, "wb") as file:
                     response = requests.get(self.descriptor_url + filename)
                     if response.status_code >= 300:
-                        raise TestException("Error downloading descriptor from '{}': {}".format(
-                            self.descriptor_url + filename, response.status_code))
+                        raise TestException(
+                            "Error downloading descriptor from '{}': {}".format(
+                                self.descriptor_url + filename, response.status_code
+                            )
+                        )
                     file.write(response.content)
 
         vnfd_filename_path = temp_dir + self.vnfd_filename
         nsd_filename_path = temp_dir + self.nsd_filename
 
-        engine.test("Onboard empty VNFD in one step", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
-                    self.vnfd_empty, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard empty VNFD in one step",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_empty,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         self.vnfd_id = engine.last_id
 
         # test bug 605
-        engine.test("Upload invalid VNFD ", "PUT", "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-                    headers_yaml, self.vnfd_prova, 422, r_header_yaml, "yaml")
-
-        engine.test("Upload VNFD {}".format(self.vnfd_filename), "PUT",
-                    "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id), headers_zip_yaml,
-                    "@b" + vnfd_filename_path, 204, None, 0)
-
-        queries = ["mgmt-interface.cp=mgmt", "vdu.0.interface.0.external-connection-point-ref=mgmt",
-                   "vdu.0.interface.1.internal-connection-point-ref=internal",
-                   "internal-vld.0.internal-connection-point.0.id-ref=internal",
-                   # Detection of duplicated VLD names in VNF Descriptors
-                   # URL: internal-vld=[
-                   #        {id: internal1, name: internal, type:ELAN,
-                   #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
-                   #        {id: internal2, name: internal, type:ELAN,
-                   #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
-                   #        ]
-                   "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
-                   "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
-                   "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
-                   "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
-                   "id-ref%3A%20dataVM-internal%7D%5D%7D%5D"
-                   ]
+        engine.test(
+            "Upload invalid VNFD ",
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_yaml,
+            self.vnfd_prova,
+            422,
+            r_header_yaml,
+            "yaml",
+        )
+
+        engine.test(
+            "Upload VNFD {}".format(self.vnfd_filename),
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_zip_yaml,
+            "@b" + vnfd_filename_path,
+            204,
+            None,
+            0,
+        )
+
+        queries = [
+            "mgmt-interface.cp=mgmt",
+            "vdu.0.interface.0.external-connection-point-ref=mgmt",
+            "vdu.0.interface.1.internal-connection-point-ref=internal",
+            "internal-vld.0.internal-connection-point.0.id-ref=internal",
+            # Detection of duplicated VLD names in VNF Descriptors
+            # URL: internal-vld=[
+            #        {id: internal1, name: internal, type:ELAN,
+            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]},
+            #        {id: internal2, name: internal, type:ELAN,
+            #            internal-connection-point: [{id-ref: mgmtVM-internal}, {id-ref: dataVM-internal}]}
+            #        ]
+            "internal-vld=%5B%7Bid%3A%20internal1%2C%20name%3A%20internal%2C%20type%3A%20ELAN%2C%20"
+            "internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7Bid-ref%3A%20"
+            "dataVM-internal%7D%5D%7D%2C%20%7Bid%3A%20internal2%2C%20name%3A%20internal%2C%20type%3A%20"
+            "ELAN%2C%20internal-connection-point%3A%20%5B%7Bid-ref%3A%20mgmtVM-internal%7D%2C%20%7B"
+            "id-ref%3A%20dataVM-internal%7D%5D%7D%5D",
+        ]
         for query in queries:
-            engine.test("Upload invalid VNFD ", "PUT",
-                        "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(self.vnfd_id, query),
-                        headers_zip_yaml, "@b" + vnfd_filename_path, 422, r_header_yaml, "yaml")
+            engine.test(
+                "Upload invalid VNFD ",
+                "PUT",
+                "/vnfpkgm/v1/vnf_packages/{}/package_content?{}".format(
+                    self.vnfd_id, query
+                ),
+                headers_zip_yaml,
+                "@b" + vnfd_filename_path,
+                422,
+                r_header_yaml,
+                "yaml",
+            )
 
         # test bug 605
-        engine.test("Upload invalid VNFD ", "PUT", "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
-                    headers_yaml, self.vnfd_prova, 422, r_header_yaml, "yaml")
+        engine.test(
+            "Upload invalid VNFD ",
+            "PUT",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_yaml,
+            self.vnfd_prova,
+            422,
+            r_header_yaml,
+            "yaml",
+        )
 
         # get vnfd descriptor
-        engine.test("Get VNFD descriptor", "GET", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
-                    headers_yaml, None, 200, r_header_yaml, "yaml")
+        engine.test(
+            "Get VNFD descriptor",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
 
         # get vnfd file descriptor
-        engine.test("Get VNFD file descriptor", "GET", "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
-                    headers_text, None, 200, r_header_text, "text", temp_dir+"vnfd-yaml")
+        engine.test(
+            "Get VNFD file descriptor",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(self.vnfd_id),
+            headers_text,
+            None,
+            200,
+            r_header_text,
+            "text",
+            temp_dir + "vnfd-yaml",
+        )
         # TODO compare files: diff vnfd-yaml hackfest_3charmed_vnfd/hackfest_3charmed_vnfd.yaml
 
         # get vnfd zip file package
-        engine.test("Get VNFD zip package", "GET",
-                    "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id), headers_zip, None, 200,
-                    r_header_zip, "zip", temp_dir+"vnfd-zip")
+        engine.test(
+            "Get VNFD zip package",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/package_content".format(self.vnfd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_zip,
+            "zip",
+            temp_dir + "vnfd-zip",
+        )
         # TODO compare files: diff vnfd-zip hackfest_3charmed_vnfd.tar.gz
 
         # get vnfd artifact
-        engine.test("Get VNFD artifact package", "GET",
-                    "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id), headers_zip, None, 200,
-                    r_header_octect, "octet-string", temp_dir+"vnfd-icon")
+        engine.test(
+            "Get VNFD artifact package",
+            "GET",
+            "/vnfpkgm/v1/vnf_packages/{}/artifacts/icons/osm.png".format(self.vnfd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_octect,
+            "octet-string",
+            temp_dir + "vnfd-icon",
+        )
         # TODO compare files: diff vnfd-icon hackfest_3charmed_vnfd/icons/osm.png
 
         # nsd CREATE AND UPLOAD in one step:
-        engine.test("Onboard NSD in one step", "POST", "/nsd/v1/ns_descriptors_content", headers_zip_yaml,
-                    "@b" + nsd_filename_path, 201, r_headers_yaml_location_nsd, "yaml")
+        engine.test(
+            "Onboard NSD in one step",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_zip_yaml,
+            "@b" + nsd_filename_path,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
         self.nsd_id = engine.last_id
 
         queries = ["vld.0.vnfd-connection-point-ref.0.vnfd-id-ref=hf"]
         for query in queries:
-            engine.test("Upload invalid NSD ", "PUT",
-                        "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
-                        headers_zip_yaml, "@b" + nsd_filename_path, 422, r_header_yaml, "yaml")
+            engine.test(
+                "Upload invalid NSD ",
+                "PUT",
+                "/nsd/v1/ns_descriptors/{}/nsd_content?{}".format(self.nsd_id, query),
+                headers_zip_yaml,
+                "@b" + nsd_filename_path,
+                422,
+                r_header_yaml,
+                "yaml",
+            )
 
         # get nsd descriptor
-        engine.test("Get NSD descriptor", "GET", "/nsd/v1/ns_descriptors/{}".format(self.nsd_id), headers_yaml,
-                    None, 200, r_header_yaml, "yaml")
+        engine.test(
+            "Get NSD descriptor",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            200,
+            r_header_yaml,
+            "yaml",
+        )
 
         # get nsd file descriptor
-        engine.test("Get NSD file descriptor", "GET", "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id), headers_text,
-                    None, 200, r_header_text, "text", temp_dir+"nsd-yaml")
+        engine.test(
+            "Get NSD file descriptor",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/nsd".format(self.nsd_id),
+            headers_text,
+            None,
+            200,
+            r_header_text,
+            "text",
+            temp_dir + "nsd-yaml",
+        )
         # TODO compare files: diff nsd-yaml hackfest_3charmed_nsd/hackfest_3charmed_nsd.yaml
 
         # get nsd zip file package
-        engine.test("Get NSD zip package", "GET", "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
-                    headers_zip, None, 200, r_header_zip, "zip", temp_dir+"nsd-zip")
+        engine.test(
+            "Get NSD zip package",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/nsd_content".format(self.nsd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_zip,
+            "zip",
+            temp_dir + "nsd-zip",
+        )
         # TODO compare files: diff nsd-zip hackfest_3charmed_nsd.tar.gz
 
         # get nsd artifact
-        engine.test("Get NSD artifact package", "GET",
-                    "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id), headers_zip, None, 200,
-                    r_header_octect, "octet-string", temp_dir+"nsd-icon")
+        engine.test(
+            "Get NSD artifact package",
+            "GET",
+            "/nsd/v1/ns_descriptors/{}/artifacts/icons/osm.png".format(self.nsd_id),
+            headers_zip,
+            None,
+            200,
+            r_header_octect,
+            "octet-string",
+            temp_dir + "nsd-icon",
+        )
         # TODO compare files: diff nsd-icon hackfest_3charmed_nsd/icons/osm.png
 
         # vnfd DELETE
-        test_rest.test("Delete VNFD conflict", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
-                       headers_yaml, None, 409, None, None)
+        test_rest.test(
+            "Delete VNFD conflict",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            409,
+            None,
+            None,
+        )
 
-        test_rest.test("Delete VNFD force", "DELETE", "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
-                       headers_yaml, None, 204, None, 0)
+        test_rest.test(
+            "Delete VNFD force",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}?FORCE=TRUE".format(self.vnfd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # nsd DELETE
-        test_rest.test("Delete NSD", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_id), headers_yaml, None, 204,
-                       None, 0)
+        test_rest.test(
+            "Delete NSD",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
 
 class TestNetSliceTemplates:
     description = "Upload a NST to OSM"
 
     def __init__(self):
-        self.vnfd_filename = ("@./slice_shared/vnfd/slice_shared_vnfd.yaml")
-        self.vnfd_filename_middle = ("@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml")
-        self.nsd_filename = ("@./slice_shared/nsd/slice_shared_nsd.yaml")
-        self.nsd_filename_middle = ("@./slice_shared/nsd/slice_shared_middle_nsd.yaml")
-        self.nst_filenames = ("@./slice_shared/slice_shared_nstd.yaml")
+        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
 
     def run(self, engine, test_osm, manual_check, test_params=None):
         # nst CREATE
@@ -2335,53 +4169,141 @@ class TestNetSliceTemplates:
             os.makedirs(temp_dir)
 
         # Onboard VNFDs
-        engine.test("Onboard edge VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
-                    self.vnfd_filename, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard edge VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         self.vnfd_edge_id = engine.last_id
 
-        engine.test("Onboard middle VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
-                    self.vnfd_filename_middle, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard middle VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename_middle,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         self.vnfd_middle_id = engine.last_id
 
         # Onboard NSDs
-        engine.test("Onboard NSD edge", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
-                    self.nsd_filename, 201, r_headers_yaml_location_nsd, "yaml")
+        engine.test(
+            "Onboard NSD edge",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
         self.nsd_edge_id = engine.last_id
 
-        engine.test("Onboard NSD middle", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
-                    self.nsd_filename_middle, 201, r_headers_yaml_location_nsd, "yaml")
+        engine.test(
+            "Onboard NSD middle",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename_middle,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
         self.nsd_middle_id = engine.last_id
 
         # Onboard NST
-        engine.test("Onboard NST", "POST", "/nst/v1/netslice_templates_content", headers_yaml, self.nst_filenames,
-                    201, r_headers_yaml_location_nst, "yaml")
+        engine.test(
+            "Onboard NST",
+            "POST",
+            "/nst/v1/netslice_templates_content",
+            headers_yaml,
+            self.nst_filenames,
+            201,
+            r_headers_yaml_location_nst,
+            "yaml",
+        )
         nst_id = engine.last_id
 
         # nstd SHOW OSM format
-        engine.test("Show NSTD OSM format", "GET", "/nst/v1/netslice_templates/{}".format(nst_id), headers_json, None,
-                    200, r_header_json, "json")
+        engine.test(
+            "Show NSTD OSM format",
+            "GET",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            200,
+            r_header_json,
+            "json",
+        )
 
         # nstd DELETE
-        engine.test("Delete NSTD", "DELETE", "/nst/v1/netslice_templates/{}".format(nst_id), headers_json, None,
-                    204, None, 0)
+        engine.test(
+            "Delete NSTD",
+            "DELETE",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # NSDs DELETE
-        test_rest.test("Delete NSD middle", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
-                       headers_json, None, 204, None, 0)
+        test_rest.test(
+            "Delete NSD middle",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
-        test_rest.test("Delete NSD edge", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id), headers_json,
-                       None, 204, None, 0)
+        test_rest.test(
+            "Delete NSD edge",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # VNFDs DELETE
-        test_rest.test("Delete VNFD edge", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
-                       headers_yaml, None, 204, None, 0)
+        test_rest.test(
+            "Delete VNFD edge",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
-        test_rest.test("Delete VNFD middle", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
-                       headers_yaml, None, 204, None, 0)
+        test_rest.test(
+            "Delete VNFD middle",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
 
 class TestNetSliceInstances:
-    '''
+    """
     Test procedure:
     1. Populate databases with VNFD, NSD, NST with the following scenario
        +-----------------management-----------------+
@@ -2414,38 +4336,71 @@ class TestNetSliceInstances:
         Manual check - All cleaned correctly?
         NSI-2 and NSI-3 were terminated and deleted
     14. Cleanup database
-    '''
+    """
 
     description = "Upload a NST to OSM"
 
     def __init__(self):
         self.vim_id = None
-        self.vnfd_filename = ("@./slice_shared/vnfd/slice_shared_vnfd.yaml")
-        self.vnfd_filename_middle = ("@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml")
-        self.nsd_filename = ("@./slice_shared/nsd/slice_shared_nsd.yaml")
-        self.nsd_filename_middle = ("@./slice_shared/nsd/slice_shared_middle_nsd.yaml")
-        self.nst_filenames = ("@./slice_shared/slice_shared_nstd.yaml")
+        self.vnfd_filename = "@./slice_shared/vnfd/slice_shared_vnfd.yaml"
+        self.vnfd_filename_middle = "@./slice_shared/vnfd/slice_shared_middle_vnfd.yaml"
+        self.nsd_filename = "@./slice_shared/nsd/slice_shared_nsd.yaml"
+        self.nsd_filename_middle = "@./slice_shared/nsd/slice_shared_middle_nsd.yaml"
+        self.nst_filenames = "@./slice_shared/slice_shared_nstd.yaml"
 
     def create_slice(self, engine, nsi_data, name):
         ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
-        r = engine.test(name, "POST", "/nsilcm/v1/netslice_instances",
-                        headers_yaml, ns_data_text, (201, 202),
-                        {"Location": "nsilcm/v1/netslice_instances/", "Content-Type": "application/yaml"}, "yaml")
+        r = engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances",
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            {
+                "Location": "nsilcm/v1/netslice_instances/",
+                "Content-Type": "application/yaml",
+            },
+            "yaml",
+        )
         return r
 
     def instantiate_slice(self, engine, nsi_data, nsi_id, name):
         ns_data_text = yaml.safe_dump(nsi_data, default_flow_style=True, width=256)
-        engine.test(name, "POST",
-                    "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id), headers_yaml, ns_data_text,
-                    (201, 202), r_headers_yaml_location_nsilcmop, "yaml")
+        engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances/{}/instantiate".format(nsi_id),
+            headers_yaml,
+            ns_data_text,
+            (201, 202),
+            r_headers_yaml_location_nsilcmop,
+            "yaml",
+        )
 
     def terminate_slice(self, engine, nsi_id, name):
-        engine.test(name, "POST", "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
-                    headers_yaml, None, (201, 202), r_headers_yaml_location_nsilcmop, "yaml")
+        engine.test(
+            name,
+            "POST",
+            "/nsilcm/v1/netslice_instances/{}/terminate".format(nsi_id),
+            headers_yaml,
+            None,
+            (201, 202),
+            r_headers_yaml_location_nsilcmop,
+            "yaml",
+        )
 
     def delete_slice(self, engine, nsi_id, name):
-        engine.test(name, "DELETE", "/nsilcm/v1/netslice_instances/{}".format(nsi_id), headers_yaml, None,
-                    204, None, 0)
+        engine.test(
+            name,
+            "DELETE",
+            "/nsilcm/v1/netslice_instances/{}".format(nsi_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
     def run(self, engine, test_osm, manual_check, test_params=None):
         # nst CREATE
@@ -2453,39 +4408,86 @@ class TestNetSliceInstances:
         engine.get_autorization()
 
         # Onboard VNFDs
-        engine.test("Onboard edge VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
-                    self.vnfd_filename, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard edge VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         self.vnfd_edge_id = engine.last_id
 
-        engine.test("Onboard middle VNFD", "POST", "/vnfpkgm/v1/vnf_packages_content", headers_yaml,
-                    self.vnfd_filename_middle, 201, r_headers_yaml_location_vnfd, "yaml")
+        engine.test(
+            "Onboard middle VNFD",
+            "POST",
+            "/vnfpkgm/v1/vnf_packages_content",
+            headers_yaml,
+            self.vnfd_filename_middle,
+            201,
+            r_headers_yaml_location_vnfd,
+            "yaml",
+        )
         self.vnfd_middle_id = engine.last_id
 
         # Onboard NSDs
-        engine.test("Onboard NSD edge", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
-                    self.nsd_filename, 201, r_headers_yaml_location_nsd, "yaml")
+        engine.test(
+            "Onboard NSD edge",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
         self.nsd_edge_id = engine.last_id
 
-        engine.test("Onboard NSD middle", "POST", "/nsd/v1/ns_descriptors_content", headers_yaml,
-                    self.nsd_filename_middle, 201, r_headers_yaml_location_nsd, "yaml")
+        engine.test(
+            "Onboard NSD middle",
+            "POST",
+            "/nsd/v1/ns_descriptors_content",
+            headers_yaml,
+            self.nsd_filename_middle,
+            201,
+            r_headers_yaml_location_nsd,
+            "yaml",
+        )
         self.nsd_middle_id = engine.last_id
 
         # Onboard NST
-        engine.test("Onboard NST", "POST", "/nst/v1/netslice_templates_content", headers_yaml, self.nst_filenames,
-                    201, r_headers_yaml_location_nst, "yaml")
+        engine.test(
+            "Onboard NST",
+            "POST",
+            "/nst/v1/netslice_templates_content",
+            headers_yaml,
+            self.nst_filenames,
+            201,
+            r_headers_yaml_location_nst,
+            "yaml",
+        )
         nst_id = engine.last_id
 
         self.vim_id = engine.get_create_vim(test_osm)
 
         # CREATE NSI-1
-        ns_data = {'nsiName': 'Deploy-NSI-1', 'vimAccountId': self.vim_id, 'nstId': nst_id, 'nsiDescription': 'default'}
+        ns_data = {
+            "nsiName": "Deploy-NSI-1",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
         r = self.create_slice(engine, ns_data, "Create NSI-1 step 1")
         if not r:
             return
         self.nsi_id1 = engine.last_id
 
         # INSTANTIATE NSI-1
-        self.instantiate_slice(engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2")
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id1, "Instantiate NSI-1 step 2"
+        )
         nsilcmop_id1 = engine.last_id
 
         # Waiting for NSI-1
@@ -2493,14 +4495,21 @@ class TestNetSliceInstances:
             engine.wait_operation_ready("nsi", nsilcmop_id1, timeout_deploy)
 
         # CREATE NSI-2
-        ns_data = {'nsiName': 'Deploy-NSI-2', 'vimAccountId': self.vim_id, 'nstId': nst_id, 'nsiDescription': 'default'}
+        ns_data = {
+            "nsiName": "Deploy-NSI-2",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
         r = self.create_slice(engine, ns_data, "Create NSI-2 step 1")
         if not r:
             return
         self.nsi_id2 = engine.last_id
 
         # INSTANTIATE NSI-2
-        self.instantiate_slice(engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2")
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id2, "Instantiate NSI-2 step 2"
+        )
         nsilcmop_id2 = engine.last_id
 
         # Waiting for NSI-2
@@ -2508,7 +4517,9 @@ class TestNetSliceInstances:
             engine.wait_operation_ready("nsi", nsilcmop_id2, timeout_deploy)
 
         if manual_check:
-            input('NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume')
+            input(
+                "NSI-1 AND NSI-2 has been deployed. Perform manual check and press enter to resume"
+            )
 
         # TERMINATE NSI-1
         if test_osm:
@@ -2522,10 +4533,17 @@ class TestNetSliceInstances:
         self.delete_slice(engine, self.nsi_id1, "Delete NS")
 
         if manual_check:
-            input('NSI-1 has been deleted. Perform manual check and press enter to resume')
+            input(
+                "NSI-1 has been deleted. Perform manual check and press enter to resume"
+            )
 
         # CREATE NSI-3
-        ns_data = {'nsiName': 'Deploy-NSI-3', 'vimAccountId': self.vim_id, 'nstId': nst_id, 'nsiDescription': 'default'}
+        ns_data = {
+            "nsiName": "Deploy-NSI-3",
+            "vimAccountId": self.vim_id,
+            "nstId": nst_id,
+            "nsiDescription": "default",
+        }
         r = self.create_slice(engine, ns_data, "Create NSI-3 step 1")
 
         if not r:
@@ -2533,7 +4551,9 @@ class TestNetSliceInstances:
         self.nsi_id3 = engine.last_id
 
         # INSTANTIATE NSI-3
-        self.instantiate_slice(engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2")
+        self.instantiate_slice(
+            engine, ns_data, self.nsi_id3, "Instantiate NSI-3 step 2"
+        )
         nsilcmop_id3 = engine.last_id
 
         # Wait Instantiate NSI-3
@@ -2541,7 +4561,9 @@ class TestNetSliceInstances:
             engine.wait_operation_ready("nsi", nsilcmop_id3, timeout_deploy)
 
         if manual_check:
-            input('NSI-3 has been deployed. Perform manual check and press enter to resume')
+            input(
+                "NSI-3 has been deployed. Perform manual check and press enter to resume"
+            )
 
         # TERMINATE NSI-2
         if test_osm:
@@ -2550,13 +4572,13 @@ class TestNetSliceInstances:
 
             # Wait terminate NSI-2
             engine.wait_operation_ready("nsi", nsilcmop2_id, timeout_deploy)
-        
+
         # DELETE NSI-2
         self.delete_slice(engine, self.nsi_id2, "DELETE NSI-2")
 
         # TERMINATE NSI-3
         if test_osm:
-            self. terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
+            self.terminate_slice(engine, self.nsi_id3, "Terminate NSI-3")
             nsilcmop3_id = engine.last_id
 
             # Wait terminate NSI-3
@@ -2566,25 +4588,67 @@ class TestNetSliceInstances:
         self.delete_slice(engine, self.nsi_id3, "DELETE NSI-3")
 
         if manual_check:
-            input('NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume')
+            input(
+                "NSI-2 and NSI-3 has been deleted. Perform manual check and press enter to resume"
+            )
 
         # nstd DELETE
-        engine.test("Delete NSTD", "DELETE", "/nst/v1/netslice_templates/{}".format(nst_id), headers_json, None,
-                    204, None, 0)
+        engine.test(
+            "Delete NSTD",
+            "DELETE",
+            "/nst/v1/netslice_templates/{}".format(nst_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # NSDs DELETE
-        test_rest.test("Delete NSD middle", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
-                       headers_json, None, 204, None, 0)
+        test_rest.test(
+            "Delete NSD middle",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_middle_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
-        test_rest.test("Delete NSD edge", "DELETE", "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id), headers_json,
-                       None, 204, None, 0)
+        test_rest.test(
+            "Delete NSD edge",
+            "DELETE",
+            "/nsd/v1/ns_descriptors/{}".format(self.nsd_edge_id),
+            headers_json,
+            None,
+            204,
+            None,
+            0,
+        )
 
         # VNFDs DELETE
-        test_rest.test("Delete VNFD edge", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
-                       headers_yaml, None, 204, None, 0)
+        test_rest.test(
+            "Delete VNFD edge",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_edge_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
-        test_rest.test("Delete VNFD middle", "DELETE", "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
-                       headers_yaml, None, 204, None, 0)
+        test_rest.test(
+            "Delete VNFD middle",
+            "DELETE",
+            "/vnfpkgm/v1/vnf_packages/{}".format(self.vnfd_middle_id),
+            headers_yaml,
+            None,
+            204,
+            None,
+            0,
+        )
 
 
 class TestAuthentication:
@@ -2603,95 +4667,273 @@ class TestAuthentication:
         engine.get_autorization()
 
         # GET
-        engine.test("Get tokens", "GET", "/admin/v1/tokens", headers_json, {},
-                    (200), {"Content-Type": "application/json"}, "json")
-        engine.test("Get projects", "GET", "/admin/v1/projects", headers_json, {},
-                    (200), {"Content-Type": "application/json"}, "json")
-        engine.test("Get users", "GET", "/admin/v1/users", headers_json, {},
-                    (200), {"Content-Type": "application/json"}, "json")
-        engine.test("Get roles", "GET", "/admin/v1/roles", headers_json, {},
-                    (200), {"Content-Type": "application/json"}, "json")
-        res = engine.test("Get admin project", "GET", "/admin/v1/projects?name=admin", headers_json, {},
-                          (200), {"Content-Type": "application/json"}, "json")
+        engine.test(
+            "Get tokens",
+            "GET",
+            "/admin/v1/tokens",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get projects",
+            "GET",
+            "/admin/v1/projects",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get users",
+            "GET",
+            "/admin/v1/users",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        engine.test(
+            "Get roles",
+            "GET",
+            "/admin/v1/roles",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
+        res = engine.test(
+            "Get admin project",
+            "GET",
+            "/admin/v1/projects?name=admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         admin_project_id = res.json()[0]["_id"] if res else None
-        res = engine.test("Get project admin role", "GET", "/admin/v1/roles?name=project_admin", headers_json, {},
-                          (200), {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Get project admin role",
+            "GET",
+            "/admin/v1/roles?name=project_admin",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         project_admin_role_id = res.json()[0]["_id"] if res else None
-        res = engine.test("Get project user role", "GET", "/admin/v1/roles?name=project_user", headers_json, {},
-                          (200), {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Get project user role",
+            "GET",
+            "/admin/v1/roles?name=project_user",
+            headers_json,
+            {},
+            (200),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         project_user_role_id = res.json()[0]["_id"] if res else None
 
         # POST
-        res = engine.test("Create test project", "POST", "/admin/v1/projects", headers_json, {"name": "test"},
-                          (201), {"Location": "/admin/v1/projects/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create test project",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {"name": "test"},
+            (201),
+            {"Location": "/admin/v1/projects/", "Content-Type": "application/json"},
+            "json",
+        )
         test_project_id = engine.last_id if res else None
-        res = engine.test("Create role without permissions", "POST", "/admin/v1/roles", headers_json, {"name": "empty"},
-                          (201), {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create role without permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {"name": "empty"},
+            (201),
+            {"Content-Type": "application/json"},
+            "json",
+        )
         empty_role_id = engine.last_id if res else None
-        res = engine.test("Create role with default permissions", "POST", "/admin/v1/roles", headers_json,
-                          {"name": "default", "permissions": {"default": True}},
-                          (201), {"Location": "/admin/v1/roles/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create role with default permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {"name": "default", "permissions": {"default": True}},
+            (201),
+            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+            "json",
+        )
         default_role_id = engine.last_id if res else None
-        res = engine.test("Create role with token permissions", "POST", "/admin/v1/roles", headers_json,
-                          {"name": "tokens", "permissions": {"tokens": True}},   # is default required ?
-                          (201), {"Location": "/admin/v1/roles/", "Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create role with token permissions",
+            "POST",
+            "/admin/v1/roles",
+            headers_json,
+            {
+                "name": "tokens",
+                "permissions": {"tokens": True},
+            },  # is default required ?
+            (201),
+            {"Location": "/admin/v1/roles/", "Content-Type": "application/json"},
+            "json",
+        )
         token_role_id = engine.last_id if res else None
         pr = "project-role mappings"
-        res = engine.test("Create user without "+pr, "POST", "/admin/v1/users", headers_json,
-                          {"username": "empty", "password": "empty"},
-                          201, {"Content-Type": "application/json"}, "json")
+        res = engine.test(
+            "Create user without " + pr,
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {"username": "empty", "password": "empty"},
+            201,
+            {"Content-Type": "application/json"},
+            "json",
+        )
         empty_user_id = engine.last_id if res else None
-        if admin_project_id and test_project_id and project_admin_role_id and project_user_role_id:
+        if (
+            admin_project_id
+            and test_project_id
+            and project_admin_role_id
+            and project_user_role_id
+        ):
             data = {"username": "test", "password": "test"}
             data["project_role_mappings"] = [
                 {"project": test_project_id, "role": project_admin_role_id},
-                {"project": admin_project_id, "role": project_user_role_id}
+                {"project": admin_project_id, "role": project_user_role_id},
             ]
-            res = engine.test("Create user with "+pr, "POST", "/admin/v1/users", headers_json, data,
-                              (201), {"Content-Type": "application/json"}, "json")
+            res = engine.test(
+                "Create user with " + pr,
+                "POST",
+                "/admin/v1/users",
+                headers_json,
+                data,
+                (201),
+                {"Content-Type": "application/json"},
+                "json",
+            )
             test_user_id = engine.last_id if res else None
 
         # PUT
         if test_user_id:
-            engine.test("Modify test user's password", "PUT", "/admin/v1/users/"+test_user_id, headers_json,
-                        {"password": "password"},
-                        (204), {}, 0)
-        if empty_user_id and admin_project_id and test_project_id and project_admin_role_id and project_user_role_id:
-            data = {"project_role_mappings": [
-                {"project": test_project_id, "role": project_admin_role_id},
-                {"project": admin_project_id, "role": project_user_role_id}
-            ]}
-            engine.test("Modify empty user's "+pr, "PUT", "/admin/v1/users/"+empty_user_id,
-                        headers_json,
-                        data,
-                        (204), {}, 0)
+            engine.test(
+                "Modify test user's password",
+                "PUT",
+                "/admin/v1/users/" + test_user_id,
+                headers_json,
+                {"password": "password"},
+                (204),
+                {},
+                0,
+            )
+        if (
+            empty_user_id
+            and admin_project_id
+            and test_project_id
+            and project_admin_role_id
+            and project_user_role_id
+        ):
+            data = {
+                "project_role_mappings": [
+                    {"project": test_project_id, "role": project_admin_role_id},
+                    {"project": admin_project_id, "role": project_user_role_id},
+                ]
+            }
+            engine.test(
+                "Modify empty user's " + pr,
+                "PUT",
+                "/admin/v1/users/" + empty_user_id,
+                headers_json,
+                data,
+                (204),
+                {},
+                0,
+            )
 
         # DELETE
         if empty_user_id:
-            engine.test("Delete empty user", "DELETE", "/admin/v1/users/"+empty_user_id, headers_json, {},
-                        (204), {}, 0)
+            engine.test(
+                "Delete empty user",
+                "DELETE",
+                "/admin/v1/users/" + empty_user_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         if test_user_id:
-            engine.test("Delete test user", "DELETE", "/admin/v1/users/"+test_user_id, headers_json, {},
-                        (204), {}, 0)
+            engine.test(
+                "Delete test user",
+                "DELETE",
+                "/admin/v1/users/" + test_user_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         if empty_role_id:
-            engine.test("Delete empty role", "DELETE", "/admin/v1/roles/"+empty_role_id, headers_json, {},
-                        (204), {}, 0)
+            engine.test(
+                "Delete empty role",
+                "DELETE",
+                "/admin/v1/roles/" + empty_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         if default_role_id:
-            engine.test("Delete default role", "DELETE", "/admin/v1/roles/"+default_role_id, headers_json, {},
-                        (204), {}, 0)
+            engine.test(
+                "Delete default role",
+                "DELETE",
+                "/admin/v1/roles/" + default_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         if token_role_id:
-            engine.test("Delete token role", "DELETE", "/admin/v1/roles/"+token_role_id, headers_json, {},
-                        (204), {}, 0)
+            engine.test(
+                "Delete token role",
+                "DELETE",
+                "/admin/v1/roles/" + token_role_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         if test_project_id:
-            engine.test("Delete test project", "DELETE", "/admin/v1/projects/"+test_project_id, headers_json, {},
-                        (204), {}, 0)
+            engine.test(
+                "Delete test project",
+                "DELETE",
+                "/admin/v1/projects/" + test_project_id,
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
 
         # END Tests
 
-        engine.remove_authorization()   # To finish
+        engine.remove_authorization()  # To finish
 
 
-class TestNbiQuotas():
+class TestNbiQuotas:
     description = "Test NBI Quotas"
 
     @staticmethod
@@ -2730,36 +4972,72 @@ class TestNbiQuotas():
         admin_token = engine.last_id
 
         # Check that test project,user do not exist
-        res1 = engine.test("Check that test project doesn't exist", "GET", "/admin/v1/projects/"+test_project,
-                           headers_json, {}, (404), {}, True)
-        res2 = engine.test("Check that test user doesn't exist", "GET", "/admin/v1/users/"+test_username,
-                           headers_json, {}, (404), {}, True)
+        res1 = engine.test(
+            "Check that test project doesn't exist",
+            "GET",
+            "/admin/v1/projects/" + test_project,
+            headers_json,
+            {},
+            (404),
+            {},
+            True,
+        )
+        res2 = engine.test(
+            "Check that test user doesn't exist",
+            "GET",
+            "/admin/v1/users/" + test_username,
+            headers_json,
+            {},
+            (404),
+            {},
+            True,
+        )
         if None in [res1, res2]:
             engine.remove_authorization()
             logger.error("Test project and/or user already exist")
             return
 
         # Create test project&user
-        res = engine.test("Create test project", "POST", "/admin/v1/projects", headers_json,
-                          {"name": test_username,
-                           "quotas": {
-                               "vnfds": 2,
-                               "nsds": 2,
-                               "nsts": 1,
-                               "pdus": 1,
-                               "nsrs": 2,
-                               "nsis": 1,
-                               "vim_accounts": 1,
-                               "wim_accounts": 1,
-                               "sdns": 1,
-                           }
-                           },
-                          (201), r_header_json, "json")
+        res = engine.test(
+            "Create test project",
+            "POST",
+            "/admin/v1/projects",
+            headers_json,
+            {
+                "name": test_username,
+                "quotas": {
+                    "vnfds": 2,
+                    "nsds": 2,
+                    "nsts": 1,
+                    "pdus": 1,
+                    "nsrs": 2,
+                    "nsis": 1,
+                    "vim_accounts": 1,
+                    "wim_accounts": 1,
+                    "sdns": 1,
+                },
+            },
+            (201),
+            r_header_json,
+            "json",
+        )
         test_project_id = engine.last_id if res else None
-        res = engine.test("Create test user", "POST", "/admin/v1/users", headers_json,
-                          {"username": test_username, "password": test_password,
-                           "project_role_mappings": [{"project": test_project, "role": "project_admin"}]},
-                          (201), r_header_json, "json")
+        res = engine.test(
+            "Create test user",
+            "POST",
+            "/admin/v1/users",
+            headers_json,
+            {
+                "username": test_username,
+                "password": test_password,
+                "project_role_mappings": [
+                    {"project": test_project, "role": "project_admin"}
+                ],
+            },
+            (201),
+            r_header_json,
+            "json",
+        )
         test_user_id = engine.last_id if res else None
 
         if test_project_id and test_user_id:
@@ -2773,327 +5051,647 @@ class TestNbiQuotas():
             user_token = engine.last_id
 
             # Create test VIM
-            res = engine.test("Create test VIM", "POST", "/admin/v1/vim_accounts", headers_json,
-                              {"name": test_vim,
-                               "vim_type": "openvim",
-                               "vim_user": test_username,
-                               "vim_password": test_password,
-                               "vim_tenant_name": test_project,
-                               "vim_url": "https://0.0.0.0:0/v0.0",
-                               },
-                              (202), r_header_json, "json")
+            res = engine.test(
+                "Create test VIM",
+                "POST",
+                "/admin/v1/vim_accounts",
+                headers_json,
+                {
+                    "name": test_vim,
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
             test_vim_ids += [engine.last_id if res else None]
 
-            res = engine.test("Try to create second test VIM", "POST", "/admin/v1/vim_accounts", headers_json,
-                              {"name": test_vim + "_2",
-                               "vim_type": "openvim",
-                               "vim_user": test_username,
-                               "vim_password": test_password,
-                               "vim_tenant_name": test_project,
-                               "vim_url": "https://0.0.0.0:0/v0.0",
-                               },
-                              (422), r_header_json, "json")
+            res = engine.test(
+                "Try to create second test VIM",
+                "POST",
+                "/admin/v1/vim_accounts",
+                headers_json,
+                {
+                    "name": test_vim + "_2",
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
             test_vim_ids += [engine.last_id if res is None else None]
 
-            res = engine.test("Try to create second test VIM with FORCE",
-                              "POST", "/admin/v1/vim_accounts?FORCE", headers_json,
-                              {"name": test_vim + "_3",
-                               "vim_type": "openvim",
-                               "vim_user": test_username,
-                               "vim_password": test_password,
-                               "vim_tenant_name": test_project,
-                               "vim_url": "https://0.0.0.0:0/v0.0",
-                               },
-                              (202), r_header_json, "json")
+            res = engine.test(
+                "Try to create second test VIM with FORCE",
+                "POST",
+                "/admin/v1/vim_accounts?FORCE",
+                headers_json,
+                {
+                    "name": test_vim + "_3",
+                    "vim_type": "openvim",
+                    "vim_user": test_username,
+                    "vim_password": test_password,
+                    "vim_tenant_name": test_project,
+                    "vim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
             test_vim_ids += [engine.last_id if res else None]
 
             if test_vim_ids[0]:
 
                 # Download descriptor files (if required)
-                test_dir = "/tmp/"+test_username+"/"
+                test_dir = "/tmp/" + test_username + "/"
                 test_url = "https://osm-download.etsi.org/ftp/osm-6.0-six/7th-hackfest/packages/"
-                vnfd_filenames = ["slice_hackfest_vnfd.tar.gz", "slice_hackfest_middle_vnfd.tar.gz"]
-                nsd_filenames = ["slice_hackfest_nsd.tar.gz", "slice_hackfest_middle_nsd.tar.gz"]
+                vnfd_filenames = [
+                    "slice_hackfest_vnfd.tar.gz",
+                    "slice_hackfest_middle_vnfd.tar.gz",
+                ]
+                nsd_filenames = [
+                    "slice_hackfest_nsd.tar.gz",
+                    "slice_hackfest_middle_nsd.tar.gz",
+                ]
                 nst_filenames = ["slice_hackfest_nstd.yaml"]
                 pdu_filenames = ["PDU_router.yaml"]
-                desc_filenames = vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
+                desc_filenames = (
+                    vnfd_filenames + nsd_filenames + nst_filenames + pdu_filenames
+                )
                 if not os.path.exists(test_dir):
                     os.makedirs(test_dir)
                 for filename in desc_filenames:
-                    if not os.path.exists(test_dir+filename):
-                        res = requests.get(test_url+filename)
+                    if not os.path.exists(test_dir + filename):
+                        res = requests.get(test_url + filename)
                         if res.status_code < 300:
-                            with open(test_dir+filename, "wb") as file:
+                            with open(test_dir + filename, "wb") as file:
                                 file.write(res.content)
 
-                if all([os.path.exists(test_dir+p) for p in desc_filenames]):
+                if all([os.path.exists(test_dir + p) for p in desc_filenames]):
 
                     # Test VNFD Quotas
-                    res = engine.test("Create test VNFD #1", "POST", "/vnfpkgm/v1/vnf_packages_content",
-                                      headers_zip_json, "@b"+test_dir+vnfd_filenames[0],
-                                      (201), r_header_json, "json")
+                    res = engine.test(
+                        "Create test VNFD #1",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
                     test_vnfd_ids += [engine.last_id if res else None]
-                    res = engine.test("Create test VNFD #2", "POST", "/vnfpkgm/v1/vnf_packages_content",
-                                      headers_zip_json, "@b"+test_dir+vnfd_filenames[1],
-                                      (201), r_header_json, "json")
+                    res = engine.test(
+                        "Create test VNFD #2",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[1],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
                     test_vnfd_ids += [engine.last_id if res else None]
-                    res = engine.test("Try to create extra test VNFD", "POST",
-                                      "/vnfpkgm/v1/vnf_packages_content",
-                                      headers_zip_json, "@b"+test_dir+vnfd_filenames[0],
-                                      (422), r_header_json, "json")
+                    res = engine.test(
+                        "Try to create extra test VNFD",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (422),
+                        r_header_json,
+                        "json",
+                    )
                     test_vnfd_ids += [engine.last_id if res is None else None]
-                    res = engine.test("Try to create extra test VNFD with FORCE",
-                                      "POST", "/vnfpkgm/v1/vnf_packages_content?FORCE",
-                                      headers_zip_json, "@b"+test_dir+vnfd_filenames[0],
-                                      (201), r_header_json, "json")
+                    res = engine.test(
+                        "Try to create extra test VNFD with FORCE",
+                        "POST",
+                        "/vnfpkgm/v1/vnf_packages_content?FORCE",
+                        headers_zip_json,
+                        "@b" + test_dir + vnfd_filenames[0],
+                        (201),
+                        r_header_json,
+                        "json",
+                    )
                     test_vnfd_ids += [engine.last_id if res else None]
 
                     # Remove extra VNFDs to prevent further errors
                     for i in [2, 3]:
                         if test_vnfd_ids[i]:
-                            res = engine.test("Delete test VNFD #" + str(i), "DELETE",
-                                              "/vnfpkgm/v1/vnf_packages_content/"+test_vnfd_ids[i]+"?FORCE",
-                                              headers_json, {}, (204), {}, 0)
+                            res = engine.test(
+                                "Delete test VNFD #" + str(i),
+                                "DELETE",
+                                "/vnfpkgm/v1/vnf_packages_content/"
+                                + test_vnfd_ids[i]
+                                + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
                             if res:
                                 test_vnfd_ids[i] = None
 
                     if test_vnfd_ids[0] and test_vnfd_ids[1]:
 
                         # Test NSD Quotas
-                        res = engine.test("Create test NSD #1", "POST", "/nsd/v1/ns_descriptors_content",
-                                          headers_zip_json, "@b"+test_dir+nsd_filenames[0],
-                                          (201), r_header_json, "json")
+                        res = engine.test(
+                            "Create test NSD #1",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
                         test_nsd_ids += [engine.last_id if res else None]
-                        res = engine.test("Create test NSD #2", "POST", "/nsd/v1/ns_descriptors_content",
-                                          headers_zip_json, "@b"+test_dir+nsd_filenames[1],
-                                          (201), r_header_json, "json")
+                        res = engine.test(
+                            "Create test NSD #2",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[1],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
                         test_nsd_ids += [engine.last_id if res else None]
-                        res = engine.test("Try to create extra test NSD", "POST", "/nsd/v1/ns_descriptors_content",
-                                          headers_zip_json, "@b"+test_dir+nsd_filenames[0],
-                                          (422), r_header_json, "json")
+                        res = engine.test(
+                            "Try to create extra test NSD",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (422),
+                            r_header_json,
+                            "json",
+                        )
                         test_nsd_ids += [engine.last_id if res is None else None]
-                        res = engine.test("Try to create extra test NSD with FORCE",
-                                          "POST", "/nsd/v1/ns_descriptors_content?FORCE",
-                                          headers_zip_json, "@b"+test_dir+nsd_filenames[0],
-                                          (201), r_header_json, "json")
+                        res = engine.test(
+                            "Try to create extra test NSD with FORCE",
+                            "POST",
+                            "/nsd/v1/ns_descriptors_content?FORCE",
+                            headers_zip_json,
+                            "@b" + test_dir + nsd_filenames[0],
+                            (201),
+                            r_header_json,
+                            "json",
+                        )
                         test_nsd_ids += [engine.last_id if res else None]
 
                         # Remove extra NSDs to prevent further errors
                         for i in [2, 3]:
                             if test_nsd_ids[i]:
-                                res = engine.test("Delete test NSD #" + str(i), "DELETE",
-                                                  "/nsd/v1/ns_descriptors_content/"+test_nsd_ids[i]+"?FORCE",
-                                                  headers_json, {}, (204), {}, 0)
+                                res = engine.test(
+                                    "Delete test NSD #" + str(i),
+                                    "DELETE",
+                                    "/nsd/v1/ns_descriptors_content/"
+                                    + test_nsd_ids[i]
+                                    + "?FORCE",
+                                    headers_json,
+                                    {},
+                                    (204),
+                                    {},
+                                    0,
+                                )
                                 if res:
                                     test_nsd_ids[i] = None
 
                         if test_nsd_ids[0] and test_nsd_ids[1]:
 
                             # Test NSR Quotas
-                            res = engine.test("Create test NSR #1", "POST", "/nslcm/v1/ns_instances_content",
-                                              headers_json,
-                                              {"nsName": test_username+"_1",
-                                               "nsdId": test_nsd_ids[0],
-                                               "vimAccountId": test_vim_ids[0],
-                                               },
-                                              (201), r_header_json, "json")
+                            res = engine.test(
+                                "Create test NSR #1",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_1",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
                             test_nsr_ids += [engine.last_id if res else None]
-                            res = engine.test("Create test NSR #2", "POST", "/nslcm/v1/ns_instances_content",
-                                              headers_json,
-                                              {"nsName": test_username+"_2",
-                                               "nsdId": test_nsd_ids[1],
-                                               "vimAccountId": test_vim_ids[0],
-                                               },
-                                              (201), r_header_json, "json")
+                            res = engine.test(
+                                "Create test NSR #2",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_2",
+                                    "nsdId": test_nsd_ids[1],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
                             test_nsr_ids += [engine.last_id if res else None]
-                            res = engine.test("Try to create extra test NSR", "POST", "/nslcm/v1/ns_instances_content",
-                                              headers_json,
-                                              {"nsName": test_username+"_3",
-                                               "nsdId": test_nsd_ids[0],
-                                               "vimAccountId": test_vim_ids[0],
-                                               },
-                                              (422), r_header_json, "json")
+                            res = engine.test(
+                                "Try to create extra test NSR",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_3",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (422),
+                                r_header_json,
+                                "json",
+                            )
                             test_nsr_ids += [engine.last_id if res is None else None]
-                            res = engine.test("Try to create test NSR with FORCE", "POST",
-                                              "/nslcm/v1/ns_instances_content?FORCE", headers_json,
-                                              {"nsName": test_username+"_4",
-                                               "nsdId": test_nsd_ids[0],
-                                               "vimAccountId": test_vim_ids[0],
-                                               },
-                                              (201), r_header_json, "json")
+                            res = engine.test(
+                                "Try to create test NSR with FORCE",
+                                "POST",
+                                "/nslcm/v1/ns_instances_content?FORCE",
+                                headers_json,
+                                {
+                                    "nsName": test_username + "_4",
+                                    "nsdId": test_nsd_ids[0],
+                                    "vimAccountId": test_vim_ids[0],
+                                },
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
                             test_nsr_ids += [engine.last_id if res else None]
 
                             # Test NST Quotas
-                            res = engine.test("Create test NST", "POST", "/nst/v1/netslice_templates_content",
-                                              headers_txt_json, "@b"+test_dir+nst_filenames[0],
-                                              (201), r_header_json, "json")
+                            res = engine.test(
+                                "Create test NST",
+                                "POST",
+                                "/nst/v1/netslice_templates_content",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
                             test_nst_ids += [engine.last_id if res else None]
-                            res = engine.test("Try to create extra test NST", "POST",
-                                              "/nst/v1/netslice_templates_content",
-                                              headers_txt_json, "@b"+test_dir+nst_filenames[0],
-                                              (422), r_header_json, "json")
+                            res = engine.test(
+                                "Try to create extra test NST",
+                                "POST",
+                                "/nst/v1/netslice_templates_content",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (422),
+                                r_header_json,
+                                "json",
+                            )
                             test_nst_ids += [engine.last_id if res is None else None]
-                            res = engine.test("Try to create extra test NST with FORCE", "POST",
-                                              "/nst/v1/netslice_templates_content?FORCE",
-                                              headers_txt_json, "@b"+test_dir+nst_filenames[0],
-                                              (201), r_header_json, "json")
+                            res = engine.test(
+                                "Try to create extra test NST with FORCE",
+                                "POST",
+                                "/nst/v1/netslice_templates_content?FORCE",
+                                headers_txt_json,
+                                "@b" + test_dir + nst_filenames[0],
+                                (201),
+                                r_header_json,
+                                "json",
+                            )
                             test_nst_ids += [engine.last_id if res else None]
 
                             if test_nst_ids[0]:
                                 # Remove NSR Quota
-                                engine.set_header({"Authorization": "Bearer {}".format(admin_token)})
-                                res = engine.test("Remove NSR Quota", "PUT", "/admin/v1/projects/"+test_project_id,
-                                                  headers_json,
-                                                  {"quotas": {"nsrs": None}},
-                                                  (204), {}, 0)
-                                engine.set_header({"Authorization": "Bearer {}".format(user_token)})
+                                engine.set_header(
+                                    {"Authorization": "Bearer {}".format(admin_token)}
+                                )
+                                res = engine.test(
+                                    "Remove NSR Quota",
+                                    "PUT",
+                                    "/admin/v1/projects/" + test_project_id,
+                                    headers_json,
+                                    {"quotas": {"nsrs": None}},
+                                    (204),
+                                    {},
+                                    0,
+                                )
+                                engine.set_header(
+                                    {"Authorization": "Bearer {}".format(user_token)}
+                                )
                                 if res:
                                     # Test NSI Quotas
-                                    res = engine.test("Create test NSI", "POST",
-                                                      "/nsilcm/v1/netslice_instances_content", headers_json,
-                                                      {"nsiName": test_username,
-                                                       "nstId": test_nst_ids[0],
-                                                       "vimAccountId": test_vim_ids[0],
-                                                       },
-                                                      (201), r_header_json, "json")
+                                    res = engine.test(
+                                        "Create test NSI",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (201),
+                                        r_header_json,
+                                        "json",
+                                    )
                                     test_nsi_ids += [engine.last_id if res else None]
-                                    res = engine.test("Try to create extra test NSI", "POST",
-                                                      "/nsilcm/v1/netslice_instances_content", headers_json,
-                                                      {"nsiName": test_username,
-                                                       "nstId": test_nst_ids[0],
-                                                       "vimAccountId": test_vim_ids[0],
-                                                       },
-                                                      (400), r_header_json, "json")
-                                    test_nsi_ids += [engine.last_id if res is None else None]
-                                    res = engine.test("Try to create extra test NSI with FORCE", "POST",
-                                                      "/nsilcm/v1/netslice_instances_content?FORCE", headers_json,
-                                                      {"nsiName": test_username,
-                                                       "nstId": test_nst_ids[0],
-                                                       "vimAccountId": test_vim_ids[0],
-                                                       },
-                                                      (201), r_header_json, "json")
+                                    res = engine.test(
+                                        "Try to create extra test NSI",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (400),
+                                        r_header_json,
+                                        "json",
+                                    )
+                                    test_nsi_ids += [
+                                        engine.last_id if res is None else None
+                                    ]
+                                    res = engine.test(
+                                        "Try to create extra test NSI with FORCE",
+                                        "POST",
+                                        "/nsilcm/v1/netslice_instances_content?FORCE",
+                                        headers_json,
+                                        {
+                                            "nsiName": test_username,
+                                            "nstId": test_nst_ids[0],
+                                            "vimAccountId": test_vim_ids[0],
+                                        },
+                                        (201),
+                                        r_header_json,
+                                        "json",
+                                    )
                                     test_nsi_ids += [engine.last_id if res else None]
 
                     # Test PDU Quotas
-                    with open(test_dir+pdu_filenames[0], "rb") as file:
-                        pdu_text = re.sub(r"ip-address: *\[[^\]]*\]", "ip-address: '0.0.0.0'",
-                                          file.read().decode("utf-8"))
-                    with open(test_dir+pdu_filenames[0], "wb") as file:
+                    with open(test_dir + pdu_filenames[0], "rb") as file:
+                        pdu_text = re.sub(
+                            r"ip-address: *\[[^\]]*\]",
+                            "ip-address: '0.0.0.0'",
+                            file.read().decode("utf-8"),
+                        )
+                    with open(test_dir + pdu_filenames[0], "wb") as file:
                         file.write(pdu_text.encode("utf-8"))
-                    res = engine.test("Create test PDU", "POST", "/pdu/v1/pdu_descriptors",
-                                      headers_yaml, "@b"+test_dir+pdu_filenames[0],
-                                      (201), r_header_yaml, "yaml")
+                    res = engine.test(
+                        "Create test PDU",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (201),
+                        r_header_yaml,
+                        "yaml",
+                    )
                     test_pdu_ids += [engine.last_id if res else None]
-                    res = engine.test("Try to create extra test PDU", "POST", "/pdu/v1/pdu_descriptors",
-                                      headers_yaml, "@b"+test_dir+pdu_filenames[0],
-                                      (422), r_header_yaml, "yaml")
+                    res = engine.test(
+                        "Try to create extra test PDU",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (422),
+                        r_header_yaml,
+                        "yaml",
+                    )
                     test_pdu_ids += [engine.last_id if res is None else None]
-                    res = engine.test("Try to create extra test PDU with FORCE", "POST",
-                                      "/pdu/v1/pdu_descriptors?FORCE",
-                                      headers_yaml, "@b"+test_dir+pdu_filenames[0],
-                                      (201), r_header_yaml, "yaml")
+                    res = engine.test(
+                        "Try to create extra test PDU with FORCE",
+                        "POST",
+                        "/pdu/v1/pdu_descriptors?FORCE",
+                        headers_yaml,
+                        "@b" + test_dir + pdu_filenames[0],
+                        (201),
+                        r_header_yaml,
+                        "yaml",
+                    )
                     test_pdu_ids += [engine.last_id if res else None]
 
                     # Cleanup
                     for i, id in enumerate(test_nsi_ids):
                         if id:
-                            engine.test("Delete test NSI #"+str(i), "DELETE",
-                                        "/nsilcm/v1/netslice_instances_content/"+id+"?FORCE",
-                                        headers_json, {}, (204), {}, 0)
+                            engine.test(
+                                "Delete test NSI #" + str(i),
+                                "DELETE",
+                                "/nsilcm/v1/netslice_instances_content/"
+                                + id
+                                + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
                     for i, id in enumerate(test_nsr_ids):
                         if id:
-                            engine.test("Delete test NSR #"+str(i), "DELETE",
-                                        "/nslcm/v1/ns_instances_content/"+id+"?FORCE",
-                                        headers_json, {}, (204), {}, 0)
+                            engine.test(
+                                "Delete test NSR #" + str(i),
+                                "DELETE",
+                                "/nslcm/v1/ns_instances_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
                     for i, id in enumerate(test_nst_ids):
                         if id:
-                            engine.test("Delete test NST #"+str(i), "DELETE",
-                                        "/nst/v1/netslice_templates_content/"+id+"?FORCE",
-                                        headers_json, {}, (204), {}, 0)
+                            engine.test(
+                                "Delete test NST #" + str(i),
+                                "DELETE",
+                                "/nst/v1/netslice_templates_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
                     for i, id in enumerate(test_nsd_ids):
                         if id:
-                            engine.test("Delete test NSD #"+str(i), "DELETE",
-                                        "/nsd/v1/ns_descriptors_content/"+id+"?FORCE",
-                                        headers_json, {}, (204), {}, 0)
+                            engine.test(
+                                "Delete test NSD #" + str(i),
+                                "DELETE",
+                                "/nsd/v1/ns_descriptors_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
                     for i, id in enumerate(test_vnfd_ids):
                         if id:
-                            engine.test("Delete test VNFD #"+str(i), "DELETE",
-                                        "/vnfpkgm/v1/vnf_packages_content/"+id+"?FORCE",
-                                        headers_json, {}, (204), {}, 0)
+                            engine.test(
+                                "Delete test VNFD #" + str(i),
+                                "DELETE",
+                                "/vnfpkgm/v1/vnf_packages_content/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
                     for i, id in enumerate(test_pdu_ids):
                         if id:
-                            engine.test("Delete test PDU #"+str(i), "DELETE",
-                                        "/pdu/v1/pdu_descriptors/"+id+"?FORCE",
-                                        headers_json, {}, (204), {}, 0)
+                            engine.test(
+                                "Delete test PDU #" + str(i),
+                                "DELETE",
+                                "/pdu/v1/pdu_descriptors/" + id + "?FORCE",
+                                headers_json,
+                                {},
+                                (204),
+                                {},
+                                0,
+                            )
 
                     # END Test NBI Quotas
 
             # Test WIM Quotas
-            res = engine.test("Create test WIM", "POST", "/admin/v1/wim_accounts", headers_json,
-                              {"name": test_wim,
-                               "wim_type": "onos",
-                               "wim_url": "https://0.0.0.0:0/v0.0",
-                               },
-                              (202), r_header_json, "json")
+            res = engine.test(
+                "Create test WIM",
+                "POST",
+                "/admin/v1/wim_accounts",
+                headers_json,
+                {
+                    "name": test_wim,
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
             test_wim_ids += [engine.last_id if res else None]
-            res = engine.test("Try to create second test WIM", "POST", "/admin/v1/wim_accounts", headers_json,
-                              {"name": test_wim + "_2",
-                               "wim_type": "onos",
-                               "wim_url": "https://0.0.0.0:0/v0.0",
-                               },
-                              (422), r_header_json, "json")
+            res = engine.test(
+                "Try to create second test WIM",
+                "POST",
+                "/admin/v1/wim_accounts",
+                headers_json,
+                {
+                    "name": test_wim + "_2",
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
             test_wim_ids += [engine.last_id if res is None else None]
-            res = engine.test("Try to create second test WIM with FORCE", "POST", "/admin/v1/wim_accounts?FORCE",
-                              headers_json,
-                              {"name": test_wim + "_3",
-                               "wim_type": "onos",
-                               "wim_url": "https://0.0.0.0:0/v0.0",
-                               },
-                              (202), r_header_json, "json")
+            res = engine.test(
+                "Try to create second test WIM with FORCE",
+                "POST",
+                "/admin/v1/wim_accounts?FORCE",
+                headers_json,
+                {
+                    "name": test_wim + "_3",
+                    "wim_type": "onos",
+                    "wim_url": "https://0.0.0.0:0/v0.0",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
             test_wim_ids += [engine.last_id if res else None]
 
             # Test SDN Quotas
-            res = engine.test("Create test SDN", "POST", "/admin/v1/sdns", headers_json,
-                              {"name": test_sdn,
-                               "type": "onos",
-                               "ip": "0.0.0.0",
-                               "port": 9999,
-                               "dpid": "00:00:00:00:00:00:00:00",
-                               },
-                              (202), r_header_json, "json")
+            res = engine.test(
+                "Create test SDN",
+                "POST",
+                "/admin/v1/sdns",
+                headers_json,
+                {
+                    "name": test_sdn,
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
             test_sdn_ids += [engine.last_id if res else None]
-            res = engine.test("Try to create second test SDN", "POST", "/admin/v1/sdns", headers_json,
-                              {"name": test_sdn + "_2",
-                               "type": "onos",
-                               "ip": "0.0.0.0",
-                               "port": 9999,
-                               "dpid": "00:00:00:00:00:00:00:00",
-                               },
-                              (422), r_header_json, "json")
+            res = engine.test(
+                "Try to create second test SDN",
+                "POST",
+                "/admin/v1/sdns",
+                headers_json,
+                {
+                    "name": test_sdn + "_2",
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (422),
+                r_header_json,
+                "json",
+            )
             test_sdn_ids += [engine.last_id if res is None else None]
-            res = engine.test("Try to create second test SDN with FORCE", "POST", "/admin/v1/sdns?FORCE", headers_json,
-                              {"name": test_sdn + "_3",
-                               "type": "onos",
-                               "ip": "0.0.0.0",
-                               "port": 9999,
-                               "dpid": "00:00:00:00:00:00:00:00",
-                               },
-                              (202), r_header_json, "json")
+            res = engine.test(
+                "Try to create second test SDN with FORCE",
+                "POST",
+                "/admin/v1/sdns?FORCE",
+                headers_json,
+                {
+                    "name": test_sdn + "_3",
+                    "type": "onos",
+                    "ip": "0.0.0.0",
+                    "port": 9999,
+                    "dpid": "00:00:00:00:00:00:00:00",
+                },
+                (202),
+                r_header_json,
+                "json",
+            )
             test_sdn_ids += [engine.last_id if res else None]
 
             # Cleanup
             for i, id in enumerate(test_vim_ids):
                 if id:
-                    engine.test("Delete test VIM #"+str(i), "DELETE", "/admin/v1/vim_accounts/"+id+"?FORCE",
-                                headers_json, {}, (202), {}, 0)
+                    engine.test(
+                        "Delete test VIM #" + str(i),
+                        "DELETE",
+                        "/admin/v1/vim_accounts/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
             for i, id in enumerate(test_wim_ids):
                 if id:
-                    engine.test("Delete test WIM #"+str(i), "DELETE", "/admin/v1/wim_accounts/"+id+"?FORCE",
-                                headers_json, {}, (202), {}, 0)
+                    engine.test(
+                        "Delete test WIM #" + str(i),
+                        "DELETE",
+                        "/admin/v1/wim_accounts/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
             for i, id in enumerate(test_sdn_ids):
                 if id:
-                    engine.test("Delete test SDN #"+str(i), "DELETE", "/admin/v1/sdns/"+id+"?FORCE",
-                                headers_json, {}, (202), {}, 0)
+                    engine.test(
+                        "Delete test SDN #" + str(i),
+                        "DELETE",
+                        "/admin/v1/sdns/" + id + "?FORCE",
+                        headers_json,
+                        {},
+                        (202),
+                        {},
+                        0,
+                    )
 
             # Release user access
             engine.remove_authorization()
@@ -3104,11 +5702,27 @@ class TestNbiQuotas():
         engine.project = admin_project
         engine.get_autorization()
         if test_user_id:
-            engine.test("Delete test user", "DELETE", "/admin/v1/users/"+test_user_id+"?FORCE",
-                        headers_json, {}, (204), {}, 0)
+            engine.test(
+                "Delete test user",
+                "DELETE",
+                "/admin/v1/users/" + test_user_id + "?FORCE",
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         if test_project_id:
-            engine.test("Delete test project", "DELETE", "/admin/v1/projects/"+test_project_id+"?FORCE",
-                        headers_json, {}, (204), {}, 0)
+            engine.test(
+                "Delete test project",
+                "DELETE",
+                "/admin/v1/projects/" + test_project_id + "?FORCE",
+                headers_json,
+                {},
+                (204),
+                {},
+                0,
+            )
         engine.remove_authorization()
 
     # END class TestNbiQuotas
@@ -3122,12 +5736,32 @@ if __name__ == "__main__":
     requests.packages.urllib3.disable_warnings()
     try:
         logging.basicConfig(format="%(levelname)s %(message)s", level=logging.ERROR)
-        logger = logging.getLogger('NBI')
+        logger = logging.getLogger("NBI")
         # load parameters and configuration
-        opts, args = getopt.getopt(sys.argv[1:], "hvu:p:",
-                                   ["url=", "user=", "password=", "help", "version", "verbose", "no-verbose",
-                                    "project=", "insecure", "timeout", "timeout-deploy", "timeout-configure",
-                                    "test=", "list", "test-osm", "manual-check", "params=", 'fail-fast'])
+        opts, args = getopt.getopt(
+            sys.argv[1:],
+            "hvu:p:",
+            [
+                "url=",
+                "user=",
+                "password=",
+                "help",
+                "version",
+                "verbose",
+                "no-verbose",
+                "project=",
+                "insecure",
+                "timeout",
+                "timeout-deploy",
+                "timeout-configure",
+                "test=",
+                "list",
+                "test-osm",
+                "manual-check",
+                "params=",
+                "fail-fast",
+            ],
+        )
         url = "https://localhost:9999/osm"
         user = password = project = "admin"
         test_osm = False
@@ -3167,7 +5801,7 @@ if __name__ == "__main__":
         for o, a in opts:
             # print("parameter:", o, a)
             if o == "--version":
-                print("test version " + __version__ + ' ' + version_date)
+                print("test version " + __version__ + " " + version_date)
                 exit()
             elif o == "--list":
                 for test, test_class in sorted(test_classes.items()):
@@ -3197,8 +5831,12 @@ if __name__ == "__main__":
             elif o == "--test":
                 for _test in a.split(","):
                     if _test not in test_classes:
-                        print("Invalid test name '{}'. Use option '--list' to show available tests".format(_test),
-                              file=sys.stderr)
+                        print(
+                            "Invalid test name '{}'. Use option '--list' to show available tests".format(
+                                _test
+                            ),
+                            file=sys.stderr,
+                        )
                         exit(1)
                     test_to_do.append(_test)
             elif o == "--params":
@@ -3233,7 +5871,9 @@ if __name__ == "__main__":
                     break
                 text_index += 1
                 test_class = test_classes[test]
-                test_class().run(test_rest, test_osm, manual_check, test_params.get(text_index))
+                test_class().run(
+                    test_rest, test_osm, manual_check, test_params.get(text_index)
+                )
         else:
             for test, test_class in sorted(test_classes.items()):
                 if fail_fast and test_rest.failed_tests:
index 6835654..d066d14 100755 (executable)
@@ -40,13 +40,17 @@ if __name__ == "__main__":
             exit(0)
 
         if len(sys.argv) != 4:
-            print("missing parameters. Type --help for more information", file=sys.stderr)
+            print(
+                "missing parameters. Type --help for more information", file=sys.stderr
+            )
             exit(1)
 
         topic, key, message = sys.argv[1:]
         host = getenv("OSMNBI_HOST", "localhost")
         port = getenv("OSMNBI_PORT", "9999")
-        url = "https://{host}:{port}/osm/test/message/{topic}".format(host=host, port=port, topic=topic)
+        url = "https://{host}:{port}/osm/test/message/{topic}".format(
+            host=host, port=port, topic=topic
+        )
         print(url)
         data = {key: message}
 
index 74528f8..734a289 100755 (executable)
@@ -43,7 +43,7 @@ test_name = "test-user"
 
 def norm(str):
     """Normalize string for checking"""
-    return ' '.join(str.strip().split()).lower()
+    return " ".join(str.strip().split()).lower()
 
 
 class TestVcaTopic(TestCase):
@@ -119,7 +119,7 @@ class TestVcaTopic(TestCase):
 
         self.db.get_list.assert_called_with(
             "vim_accounts",
-            {"vca": _id, '_admin.projects_read.cont': 'project-id'},
+            {"vca": _id, "_admin.projects_read.cont": "project-id"},
         )
         mock_check_conflict_on_del.assert_called_with(session, _id, db_content)
 
@@ -154,19 +154,18 @@ class TestVcaTopic(TestCase):
                 context.exception,
                 EngineException(
                     "There is at least one VIM account using this vca",
-                    http_code=HTTPStatus.CONFLICT
-                )
+                    http_code=HTTPStatus.CONFLICT,
+                ),
             )
 
         self.db.get_list.assert_called_with(
             "vim_accounts",
-            {"vca": _id, '_admin.projects_read.cont': 'project-id'},
+            {"vca": _id, "_admin.projects_read.cont": "project-id"},
         )
         mock_check_conflict_on_del.assert_not_called()
 
 
 class Test_ProjectTopicAuth(TestCase):
-
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-project-topic"
@@ -177,8 +176,15 @@ class Test_ProjectTopicAuth(TestCase):
         self.msg = Mock(msgbase.MsgBase())
         self.auth = Mock(authconn.Authconn(None, None, None))
         self.topic = ProjectTopicAuth(self.db, self.fs, self.msg, self.auth)
-        self.fake_session = {"username": self.test_name, "project_id": (test_pid,), "method": None,
-                             "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+        self.fake_session = {
+            "username": self.test_name,
+            "project_id": (test_pid,),
+            "method": None,
+            "admin": True,
+            "force": False,
+            "public": False,
+            "allow_show_user_project_role": True,
+        }
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
     def test_new_project(self):
@@ -187,37 +193,64 @@ class Test_ProjectTopicAuth(TestCase):
             pid1 = str(uuid4())
             self.auth.get_project_list.return_value = []
             self.auth.create_project.return_value = pid1
-            pid2, oid = self.topic.new(rollback, self.fake_session, {"name": self.test_name, "quotas": {}})
+            pid2, oid = self.topic.new(
+                rollback, self.fake_session, {"name": self.test_name, "quotas": {}}
+            )
             self.assertEqual(len(rollback), 1, "Wrong rollback length")
             self.assertEqual(pid2, pid1, "Wrong project identifier")
             content = self.auth.create_project.call_args[0][0]
             self.assertEqual(content["name"], self.test_name, "Wrong project name")
             self.assertEqual(content["quotas"], {}, "Wrong quotas")
             self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+            self.assertEqual(
+                content["_admin"]["modified"],
+                content["_admin"]["created"],
+                "Wrong modification time",
+            )
         with self.subTest(i=2):
             rollback = []
             with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
-                self.topic.new(rollback, self.fake_session, {"name": "other-project-name", "quotas": {"baditems": 10}})
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {"name": "other-project-name", "quotas": {"baditems": 10}},
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'"
-                          .format("baditems"), norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'".format(
+                    "baditems"
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_edit_project(self):
         now = time()
         pid = str(uuid4())
-        proj = {"_id": pid, "name": self.test_name, "_admin": {"created": now, "modified": now}}
+        proj = {
+            "_id": pid,
+            "name": self.test_name,
+            "_admin": {"created": now, "modified": now},
+        }
         with self.subTest(i=1):
             self.auth.get_project_list.side_effect = [[proj], []]
             new_name = "new-project-name"
             quotas = {"vnfds": randint(0, 100), "nsds": randint(0, 100)}
-            self.topic.edit(self.fake_session, pid, {"name": new_name, "quotas": quotas})
+            self.topic.edit(
+                self.fake_session, pid, {"name": new_name, "quotas": quotas}
+            )
             _id, content = self.auth.update_project.call_args[0]
             self.assertEqual(_id, pid, "Wrong project identifier")
             self.assertEqual(content["_id"], pid, "Wrong project identifier")
             self.assertEqual(content["_admin"]["created"], now, "Wrong creation time")
-            self.assertGreater(content["_admin"]["modified"], now, "Wrong modification time")
+            self.assertGreater(
+                content["_admin"]["modified"], now, "Wrong modification time"
+            )
             self.assertEqual(content["name"], new_name, "Wrong project name")
             self.assertEqual(content["quotas"], quotas, "Wrong quotas")
         with self.subTest(i=2):
@@ -225,108 +258,218 @@ class Test_ProjectTopicAuth(TestCase):
             quotas = {"baditems": randint(0, 100)}
             self.auth.get_project_list.side_effect = [[proj], []]
             with self.assertRaises(EngineException, msg="Accepted wrong quotas") as e:
-                self.topic.edit(self.fake_session, pid, {"name": new_name, "quotas": quotas})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'"
-                          .format("baditems"), norm(str(e.exception)), "Wrong exception text")
+                self.topic.edit(
+                    self.fake_session, pid, {"name": new_name, "quotas": quotas}
+                )
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "format error at 'quotas' 'additional properties are not allowed ('{}' was unexpected)'".format(
+                    "baditems"
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_new(self):
         with self.subTest(i=1):
             rollback = []
             pid = str(uuid4())
-            with self.assertRaises(EngineException, msg="Accepted uuid as project name") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted uuid as project name"
+            ) as e:
                 self.topic.new(rollback, self.fake_session, {"name": pid})
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("project name '{}' cannot have an uuid format".format(pid),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "project name '{}' cannot have an uuid format".format(pid),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=2):
             rollback = []
-            self.auth.get_project_list.return_value = [{"_id": test_pid, "name": self.test_name}]
-            with self.assertRaises(EngineException, msg="Accepted existing project name") as e:
+            self.auth.get_project_list.return_value = [
+                {"_id": test_pid, "name": self.test_name}
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing project name"
+            ) as e:
                 self.topic.new(rollback, self.fake_session, {"name": self.test_name})
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("project '{}' exists".format(self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "project '{}' exists".format(self.test_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_edit(self):
         with self.subTest(i=1):
-            self.auth.get_project_list.return_value = [{"_id": test_pid, "name": self.test_name}]
+            self.auth.get_project_list.return_value = [
+                {"_id": test_pid, "name": self.test_name}
+            ]
             new_name = str(uuid4())
-            with self.assertRaises(EngineException, msg="Accepted uuid as project name") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted uuid as project name"
+            ) as e:
                 self.topic.edit(self.fake_session, test_pid, {"name": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("project name '{}' cannot have an uuid format".format(new_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "project name '{}' cannot have an uuid format".format(new_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=2):
             pid = str(uuid4())
             self.auth.get_project_list.return_value = [{"_id": pid, "name": "admin"}]
-            with self.assertRaises(EngineException, msg="Accepted renaming of project 'admin'") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted renaming of project 'admin'"
+            ) as e:
                 self.topic.edit(self.fake_session, pid, {"name": "new-name"})
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("you cannot rename project 'admin'",
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "you cannot rename project 'admin'",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=3):
             new_name = "new-project-name"
-            self.auth.get_project_list.side_effect = [[{"_id": test_pid, "name": self.test_name}],
-                                                      [{"_id": str(uuid4()), "name": new_name}]]
-            with self.assertRaises(EngineException, msg="Accepted existing project name") as e:
+            self.auth.get_project_list.side_effect = [
+                [{"_id": test_pid, "name": self.test_name}],
+                [{"_id": str(uuid4()), "name": new_name}],
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing project name"
+            ) as e:
                 self.topic.edit(self.fake_session, pid, {"name": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("project '{}' is already used".format(new_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "project '{}' is already used".format(new_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_delete_project(self):
         with self.subTest(i=1):
             pid = str(uuid4())
-            self.auth.get_project.return_value = {"_id": pid, "name": "other-project-name"}
+            self.auth.get_project.return_value = {
+                "_id": pid,
+                "name": "other-project-name",
+            }
             self.auth.delete_project.return_value = {"deleted": 1}
             self.auth.get_user_list.return_value = []
             self.db.get_list.return_value = []
             rc = self.topic.delete(self.fake_session, pid)
             self.assertEqual(rc, {"deleted": 1}, "Wrong project deletion return info")
-            self.assertEqual(self.auth.get_project.call_args[0][0], pid, "Wrong project identifier")
-            self.assertEqual(self.auth.delete_project.call_args[0][0], pid, "Wrong project identifier")
+            self.assertEqual(
+                self.auth.get_project.call_args[0][0], pid, "Wrong project identifier"
+            )
+            self.assertEqual(
+                self.auth.delete_project.call_args[0][0],
+                pid,
+                "Wrong project identifier",
+            )
 
     def test_conflict_on_del(self):
         with self.subTest(i=1):
-            self.auth.get_project.return_value = {"_id": test_pid, "name": self.test_name}
-            with self.assertRaises(EngineException, msg="Accepted deletion of own project") as e:
+            self.auth.get_project.return_value = {
+                "_id": test_pid,
+                "name": self.test_name,
+            }
+            with self.assertRaises(
+                EngineException, msg="Accepted deletion of own project"
+            ) as e:
                 self.topic.delete(self.fake_session, self.test_name)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("you cannot delete your own project", norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "you cannot delete your own project",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=2):
             self.auth.get_project.return_value = {"_id": str(uuid4()), "name": "admin"}
-            with self.assertRaises(EngineException, msg="Accepted deletion of project 'admin'") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted deletion of project 'admin'"
+            ) as e:
                 self.topic.delete(self.fake_session, "admin")
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("you cannot delete project 'admin'", norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "you cannot delete project 'admin'",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=3):
             pid = str(uuid4())
             name = "other-project-name"
             self.auth.get_project.return_value = {"_id": pid, "name": name}
-            self.auth.get_user_list.return_value = [{"_id": str(uuid4()), "username": self.test_name,
-                                                     "project_role_mappings": [{"project": pid, "role": str(uuid4())}]}]
-            with self.assertRaises(EngineException, msg="Accepted deletion of used project") as e:
+            self.auth.get_user_list.return_value = [
+                {
+                    "_id": str(uuid4()),
+                    "username": self.test_name,
+                    "project_role_mappings": [{"project": pid, "role": str(uuid4())}],
+                }
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted deletion of used project"
+            ) as e:
                 self.topic.delete(self.fake_session, pid)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("project '{}' ({}) is being used by user '{}'".format(name, pid, self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "project '{}' ({}) is being used by user '{}'".format(
+                    name, pid, self.test_name
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=4):
             self.auth.get_user_list.return_value = []
-            self.db.get_list.return_value = [{"_id": str(uuid4()), "id": self.test_name,
-                                              "_admin": {"projects_read": [pid], "projects_write": []}}]
-            with self.assertRaises(EngineException, msg="Accepted deletion of used project") as e:
+            self.db.get_list.return_value = [
+                {
+                    "_id": str(uuid4()),
+                    "id": self.test_name,
+                    "_admin": {"projects_read": [pid], "projects_write": []},
+                }
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted deletion of used project"
+            ) as e:
                 self.topic.delete(self.fake_session, pid)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("project '{}' ({}) is being used by {} '{}'"
-                          .format(name, pid, "vnf descriptor", self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "project '{}' ({}) is being used by {} '{}'".format(
+                    name, pid, "vnf descriptor", self.test_name
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
 
 class Test_RoleTopicAuth(TestCase):
-
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-role-topic"
@@ -339,8 +482,15 @@ class Test_RoleTopicAuth(TestCase):
         self.auth = Mock(authconn.Authconn(None, None, None))
         self.auth.role_permissions = self.test_operations
         self.topic = RoleTopicAuth(self.db, self.fs, self.msg, self.auth)
-        self.fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
-                             "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+        self.fake_session = {
+            "username": test_name,
+            "project_id": (test_pid,),
+            "method": None,
+            "admin": True,
+            "force": False,
+            "public": False,
+            "allow_show_user_project_role": True,
+        }
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
     def test_new_role(self):
@@ -351,51 +501,95 @@ class Test_RoleTopicAuth(TestCase):
             perms_out = {"default": False, "admin": False, "tokens": True}
             self.auth.get_role_list.return_value = []
             self.auth.create_role.return_value = rid1
-            rid2, oid = self.topic.new(rollback, self.fake_session, {"name": self.test_name, "permissions": perms_in})
+            rid2, oid = self.topic.new(
+                rollback,
+                self.fake_session,
+                {"name": self.test_name, "permissions": perms_in},
+            )
             self.assertEqual(len(rollback), 1, "Wrong rollback length")
             self.assertEqual(rid2, rid1, "Wrong project identifier")
             content = self.auth.create_role.call_args[0][0]
             self.assertEqual(content["name"], self.test_name, "Wrong role name")
             self.assertEqual(content["permissions"], perms_out, "Wrong permissions")
             self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+            self.assertEqual(
+                content["_admin"]["modified"],
+                content["_admin"]["created"],
+                "Wrong modification time",
+            )
         with self.subTest(i=2):
             rollback = []
-            with self.assertRaises(EngineException, msg="Accepted wrong permissions") as e:
-                self.topic.new(rollback, self.fake_session,
-                               {"name": "other-role-name", "permissions": {"projects": True}})
+            with self.assertRaises(
+                EngineException, msg="Accepted wrong permissions"
+            ) as e:
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {"name": "other-role-name", "permissions": {"projects": True}},
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("invalid permission '{}'".format("projects"),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "invalid permission '{}'".format("projects"),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_edit_role(self):
         now = time()
         rid = str(uuid4())
-        role = {"_id": rid, "name": self.test_name, "permissions": {"tokens": True},
-                "_admin": {"created": now, "modified": now}}
+        role = {
+            "_id": rid,
+            "name": self.test_name,
+            "permissions": {"tokens": True},
+            "_admin": {"created": now, "modified": now},
+        }
         with self.subTest(i=1):
             self.auth.get_role_list.side_effect = [[role], []]
             self.auth.get_role.return_value = role
             new_name = "new-role-name"
             perms_in = {"tokens": False, "tokens:get": True}
-            perms_out = {"default": False, "admin": False, "tokens": False, "tokens:get": True}
-            self.topic.edit(self.fake_session, rid, {"name": new_name, "permissions": perms_in})
+            perms_out = {
+                "default": False,
+                "admin": False,
+                "tokens": False,
+                "tokens:get": True,
+            }
+            self.topic.edit(
+                self.fake_session, rid, {"name": new_name, "permissions": perms_in}
+            )
             content = self.auth.update_role.call_args[0][0]
             self.assertEqual(content["_id"], rid, "Wrong role identifier")
             self.assertEqual(content["_admin"]["created"], now, "Wrong creation time")
-            self.assertGreater(content["_admin"]["modified"], now, "Wrong modification time")
+            self.assertGreater(
+                content["_admin"]["modified"], now, "Wrong modification time"
+            )
             self.assertEqual(content["name"], new_name, "Wrong role name")
             self.assertEqual(content["permissions"], perms_out, "Wrong permissions")
         with self.subTest(i=2):
             new_name = "other-role-name"
             perms_in = {"tokens": False, "tokens:post": True}
             self.auth.get_role_list.side_effect = [[role], []]
-            with self.assertRaises(EngineException, msg="Accepted wrong permissions") as e:
-                self.topic.edit(self.fake_session, rid, {"name": new_name, "permissions": perms_in})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("invalid permission '{}'".format("tokens:post"),
-                          norm(str(e.exception)), "Wrong exception text")
+            with self.assertRaises(
+                EngineException, msg="Accepted wrong permissions"
+            ) as e:
+                self.topic.edit(
+                    self.fake_session, rid, {"name": new_name, "permissions": perms_in}
+                )
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "invalid permission '{}'".format("tokens:post"),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_delete_role(self):
         with self.subTest(i=1):
@@ -407,59 +601,123 @@ class Test_RoleTopicAuth(TestCase):
             self.auth.get_user_list.return_value = []
             rc = self.topic.delete(self.fake_session, rid)
             self.assertEqual(rc, {"deleted": 1}, "Wrong role deletion return info")
-            self.assertEqual(self.auth.get_role_list.call_args[0][0]["_id"], rid, "Wrong role identifier")
-            self.assertEqual(self.auth.get_role.call_args[0][0], rid, "Wrong role identifier")
-            self.assertEqual(self.auth.delete_role.call_args[0][0], rid, "Wrong role identifier")
+            self.assertEqual(
+                self.auth.get_role_list.call_args[0][0]["_id"],
+                rid,
+                "Wrong role identifier",
+            )
+            self.assertEqual(
+                self.auth.get_role.call_args[0][0], rid, "Wrong role identifier"
+            )
+            self.assertEqual(
+                self.auth.delete_role.call_args[0][0], rid, "Wrong role identifier"
+            )
 
     def test_conflict_on_new(self):
         with self.subTest(i=1):
             rollback = []
             rid = str(uuid4())
-            with self.assertRaises(EngineException, msg="Accepted uuid as role name") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted uuid as role name"
+            ) as e:
                 self.topic.new(rollback, self.fake_session, {"name": rid})
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("role name '{}' cannot have an uuid format".format(rid),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "role name '{}' cannot have an uuid format".format(rid),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=2):
             rollback = []
-            self.auth.get_role_list.return_value = [{"_id": str(uuid4()), "name": self.test_name}]
-            with self.assertRaises(EngineException, msg="Accepted existing role name") as e:
+            self.auth.get_role_list.return_value = [
+                {"_id": str(uuid4()), "name": self.test_name}
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing role name"
+            ) as e:
                 self.topic.new(rollback, self.fake_session, {"name": self.test_name})
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("role name '{}' exists".format(self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "role name '{}' exists".format(self.test_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_edit(self):
         rid = str(uuid4())
         with self.subTest(i=1):
-            self.auth.get_role_list.return_value = [{"_id": rid, "name": self.test_name, "permissions": {}}]
+            self.auth.get_role_list.return_value = [
+                {"_id": rid, "name": self.test_name, "permissions": {}}
+            ]
             new_name = str(uuid4())
-            with self.assertRaises(EngineException, msg="Accepted uuid as role name") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted uuid as role name"
+            ) as e:
                 self.topic.edit(self.fake_session, rid, {"name": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("role name '{}' cannot have an uuid format".format(new_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "role name '{}' cannot have an uuid format".format(new_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         for i, role_name in enumerate(["system_admin", "project_admin"], start=2):
             with self.subTest(i=i):
                 rid = str(uuid4())
-                self.auth.get_role.return_value = {"_id": rid, "name": role_name, "permissions": {}}
-                with self.assertRaises(EngineException, msg="Accepted renaming of role '{}'".format(role_name)) as e:
+                self.auth.get_role.return_value = {
+                    "_id": rid,
+                    "name": role_name,
+                    "permissions": {},
+                }
+                with self.assertRaises(
+                    EngineException,
+                    msg="Accepted renaming of role '{}'".format(role_name),
+                ) as e:
                     self.topic.edit(self.fake_session, rid, {"name": "new-name"})
-                self.assertEqual(e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code")
-                self.assertIn("you cannot rename role '{}'".format(role_name),
-                              norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=i+1):
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.FORBIDDEN,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    "you cannot rename role '{}'".format(role_name),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
+        with self.subTest(i=i + 1):
             new_name = "new-role-name"
-            self.auth.get_role_list.side_effect = [[{"_id": rid, "name": self.test_name, "permissions": {}}],
-                                                   [{"_id": str(uuid4()), "name": new_name, "permissions": {}}]]
-            self.auth.get_role.return_value = {"_id": rid, "name": self.test_name, "permissions": {}}
-            with self.assertRaises(EngineException, msg="Accepted existing role name") as e:
+            self.auth.get_role_list.side_effect = [
+                [{"_id": rid, "name": self.test_name, "permissions": {}}],
+                [{"_id": str(uuid4()), "name": new_name, "permissions": {}}],
+            ]
+            self.auth.get_role.return_value = {
+                "_id": rid,
+                "name": self.test_name,
+                "permissions": {},
+            }
+            with self.assertRaises(
+                EngineException, msg="Accepted existing role name"
+            ) as e:
                 self.topic.edit(self.fake_session, rid, {"name": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("role name '{}' exists".format(new_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "role name '{}' exists".format(new_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_del(self):
         for i, role_name in enumerate(["system_admin", "project_admin"], start=1):
@@ -468,28 +726,51 @@ class Test_RoleTopicAuth(TestCase):
                 role = {"_id": rid, "name": role_name}
                 self.auth.get_role_list.return_value = [role]
                 self.auth.get_role.return_value = role
-                with self.assertRaises(EngineException, msg="Accepted deletion of role '{}'".format(role_name)) as e:
+                with self.assertRaises(
+                    EngineException,
+                    msg="Accepted deletion of role '{}'".format(role_name),
+                ) as e:
                     self.topic.delete(self.fake_session, rid)
-                self.assertEqual(e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code")
-                self.assertIn("you cannot delete role '{}'".format(role_name),
-                              norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=i+1):
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.FORBIDDEN,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    "you cannot delete role '{}'".format(role_name),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
+        with self.subTest(i=i + 1):
             rid = str(uuid4())
             name = "other-role-name"
             role = {"_id": rid, "name": name}
             self.auth.get_role_list.return_value = [role]
             self.auth.get_role.return_value = role
-            self.auth.get_user_list.return_value = [{"_id": str(uuid4()), "username": self.test_name,
-                                                     "project_role_mappings": [{"project": str(uuid4()), "role": rid}]}]
-            with self.assertRaises(EngineException, msg="Accepted deletion of used role") as e:
+            self.auth.get_user_list.return_value = [
+                {
+                    "_id": str(uuid4()),
+                    "username": self.test_name,
+                    "project_role_mappings": [{"project": str(uuid4()), "role": rid}],
+                }
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted deletion of used role"
+            ) as e:
                 self.topic.delete(self.fake_session, rid)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("role '{}' ({}) is being used by user '{}'".format(name, rid, self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "role '{}' ({}) is being used by user '{}'".format(
+                    name, rid, self.test_name
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
 
 class Test_UserTopicAuth(TestCase):
-
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-user-topic"
@@ -500,8 +781,15 @@ class Test_UserTopicAuth(TestCase):
         self.msg = Mock(msgbase.MsgBase())
         self.auth = Mock(authconn.Authconn(None, None, None))
         self.topic = UserTopicAuth(self.db, self.fs, self.msg, self.auth)
-        self.fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
-                             "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+        self.fake_session = {
+            "username": test_name,
+            "project_id": (test_pid,),
+            "method": None,
+            "admin": True,
+            "force": False,
+            "public": False,
+            "allow_show_user_project_role": True,
+        }
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
     def test_new_user(self):
@@ -516,18 +804,31 @@ class Test_UserTopicAuth(TestCase):
             self.auth.get_role.return_value = {"_id": rid, "name": "some_role"}
             prms_in = [{"project": "some_project", "role": "some_role"}]
             prms_out = [{"project": pid, "role": rid}]
-            uid2, oid = self.topic.new(rollback, self.fake_session, {"username": self.test_name,
-                                                                     "password": self.test_name,
-                                                                     "project_role_mappings": prms_in
-                                                                     })
+            uid2, oid = self.topic.new(
+                rollback,
+                self.fake_session,
+                {
+                    "username": self.test_name,
+                    "password": self.test_name,
+                    "project_role_mappings": prms_in,
+                },
+            )
             self.assertEqual(len(rollback), 1, "Wrong rollback length")
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
             self.assertEqual(content["password"], self.test_name, "Wrong password")
-            self.assertEqual(content["project_role_mappings"], prms_out, "Wrong project-role mappings")
+            self.assertEqual(
+                content["project_role_mappings"],
+                prms_out,
+                "Wrong project-role mappings",
+            )
             self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+            self.assertEqual(
+                content["_admin"]["modified"],
+                content["_admin"]["created"],
+                "Wrong modification time",
+            )
         with self.subTest(i=2):
             rollback = []
             def_rid = str(uuid4())
@@ -535,184 +836,380 @@ class Test_UserTopicAuth(TestCase):
             self.auth.get_role.return_value = def_role
             self.auth.get_role_list.return_value = [def_role]
             prms_out = [{"project": pid, "role": def_rid}]
-            uid2, oid = self.topic.new(rollback, self.fake_session, {"username": self.test_name,
-                                                                     "password": self.test_name,
-                                                                     "projects": ["some_project"]
-                                                                     })
+            uid2, oid = self.topic.new(
+                rollback,
+                self.fake_session,
+                {
+                    "username": self.test_name,
+                    "password": self.test_name,
+                    "projects": ["some_project"],
+                },
+            )
             self.assertEqual(len(rollback), 1, "Wrong rollback length")
             self.assertEqual(uid2, uid1, "Wrong project identifier")
             content = self.auth.create_user.call_args[0][0]
             self.assertEqual(content["username"], self.test_name, "Wrong project name")
             self.assertEqual(content["password"], self.test_name, "Wrong password")
-            self.assertEqual(content["project_role_mappings"], prms_out, "Wrong project-role mappings")
+            self.assertEqual(
+                content["project_role_mappings"],
+                prms_out,
+                "Wrong project-role mappings",
+            )
             self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
+            self.assertEqual(
+                content["_admin"]["modified"],
+                content["_admin"]["created"],
+                "Wrong modification time",
+            )
         with self.subTest(i=3):
             rollback = []
-            with self.assertRaises(EngineException, msg="Accepted wrong project-role mappings") as e:
-                self.topic.new(rollback, self.fake_session, {"username": "other-project-name",
-                                                             "password": "other-password",
-                                                             "project_role_mappings": [{}]
-                                                             })
+            with self.assertRaises(
+                EngineException, msg="Accepted wrong project-role mappings"
+            ) as e:
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {
+                        "username": "other-project-name",
+                        "password": "other-password",
+                        "project_role_mappings": [{}],
+                    },
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("format error at '{}' '{}'"
-                          .format("project_role_mappings:{}", "'{}' is a required property").format(0, "project"),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "format error at '{}' '{}'".format(
+                    "project_role_mappings:{}", "'{}' is a required property"
+                ).format(0, "project"),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=4):
             rollback = []
             with self.assertRaises(EngineException, msg="Accepted wrong projects") as e:
-                self.topic.new(rollback, self.fake_session, {"username": "other-project-name",
-                                                             "password": "other-password",
-                                                             "projects": []
-                                                             })
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {
+                        "username": "other-project-name",
+                        "password": "other-password",
+                        "projects": [],
+                    },
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("format error at '{}' '{}'" .format("projects", "{} is too short").format([]),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "format error at '{}' '{}'".format(
+                    "projects", "{} is too short"
+                ).format([]),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_edit_user(self):
         now = time()
         uid = str(uuid4())
         pid1 = str(uuid4())
         rid1 = str(uuid4())
-        prms = [{"project": pid1, "project_name": "project-1", "role": rid1, "role_name": "role-1"}]
-        user = {"_id": uid, "username": self.test_name, "project_role_mappings": prms,
-                "_admin": {"created": now, "modified": now}}
+        prms = [
+            {
+                "project": pid1,
+                "project_name": "project-1",
+                "role": rid1,
+                "role_name": "role-1",
+            }
+        ]
+        user = {
+            "_id": uid,
+            "username": self.test_name,
+            "project_role_mappings": prms,
+            "_admin": {"created": now, "modified": now},
+        }
         with self.subTest(i=1):
             self.auth.get_user_list.side_effect = [[user], []]
             self.auth.get_user.return_value = user
             pid2 = str(uuid4())
             rid2 = str(uuid4())
-            self.auth.get_project.side_effect = [{"_id": pid2, "name": "project-2"},
-                                                 {"_id": pid1, "name": "project-1"}]
-            self.auth.get_role.side_effect = [{"_id": rid2, "name": "role-2"},
-                                              {"_id": rid1, "name": "role-1"}]
+            self.auth.get_project.side_effect = [
+                {"_id": pid2, "name": "project-2"},
+                {"_id": pid1, "name": "project-1"},
+            ]
+            self.auth.get_role.side_effect = [
+                {"_id": rid2, "name": "role-2"},
+                {"_id": rid1, "name": "role-1"},
+            ]
             new_name = "new-user-name"
             new_pasw = "new-password"
             add_prms = [{"project": pid2, "role": rid2}]
             rem_prms = [{"project": pid1, "role": rid1}]
-            self.topic.edit(self.fake_session, uid, {"username": new_name, "password": new_pasw,
-                                                     "add_project_role_mappings": add_prms,
-                                                     "remove_project_role_mappings": rem_prms
-                                                     })
+            self.topic.edit(
+                self.fake_session,
+                uid,
+                {
+                    "username": new_name,
+                    "password": new_pasw,
+                    "add_project_role_mappings": add_prms,
+                    "remove_project_role_mappings": rem_prms,
+                },
+            )
             content = self.auth.update_user.call_args[0][0]
             self.assertEqual(content["_id"], uid, "Wrong user identifier")
             self.assertEqual(content["username"], new_name, "Wrong user name")
             self.assertEqual(content["password"], new_pasw, "Wrong user password")
-            self.assertEqual(content["add_project_role_mappings"], add_prms, "Wrong project-role mappings to add")
-            self.assertEqual(content["remove_project_role_mappings"], prms, "Wrong project-role mappings to remove")
+            self.assertEqual(
+                content["add_project_role_mappings"],
+                add_prms,
+                "Wrong project-role mappings to add",
+            )
+            self.assertEqual(
+                content["remove_project_role_mappings"],
+                prms,
+                "Wrong project-role mappings to remove",
+            )
         with self.subTest(i=2):
             new_name = "other-user-name"
             new_prms = [{}]
             self.auth.get_role_list.side_effect = [[user], []]
             self.auth.get_user_list.side_effect = [[user]]
-            with self.assertRaises(EngineException, msg="Accepted wrong project-role mappings") as e:
-                self.topic.edit(self.fake_session, uid, {"username": new_name, "project_role_mappings": new_prms})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("format error at '{}' '{}'"
-                          .format("project_role_mappings:{}", "'{}' is a required property").format(0, "project"),
-                          norm(str(e.exception)), "Wrong exception text")
+            with self.assertRaises(
+                EngineException, msg="Accepted wrong project-role mappings"
+            ) as e:
+                self.topic.edit(
+                    self.fake_session,
+                    uid,
+                    {"username": new_name, "project_role_mappings": new_prms},
+                )
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "format error at '{}' '{}'".format(
+                    "project_role_mappings:{}", "'{}' is a required property"
+                ).format(0, "project"),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_delete_user(self):
         with self.subTest(i=1):
             uid = str(uuid4())
             self.fake_session["username"] = self.test_name
-            user = user = {"_id": uid, "username": "other-user-name", "project_role_mappings": []}
+            user = user = {
+                "_id": uid,
+                "username": "other-user-name",
+                "project_role_mappings": [],
+            }
             self.auth.get_user.return_value = user
             self.auth.delete_user.return_value = {"deleted": 1}
             rc = self.topic.delete(self.fake_session, uid)
             self.assertEqual(rc, {"deleted": 1}, "Wrong user deletion return info")
-            self.assertEqual(self.auth.get_user.call_args[0][0], uid, "Wrong user identifier")
-            self.assertEqual(self.auth.delete_user.call_args[0][0], uid, "Wrong user identifier")
+            self.assertEqual(
+                self.auth.get_user.call_args[0][0], uid, "Wrong user identifier"
+            )
+            self.assertEqual(
+                self.auth.delete_user.call_args[0][0], uid, "Wrong user identifier"
+            )
 
     def test_conflict_on_new(self):
         with self.subTest(i=1):
             rollback = []
             uid = str(uuid4())
-            with self.assertRaises(EngineException, msg="Accepted uuid as username") as e:
-                self.topic.new(rollback, self.fake_session, {"username": uid, "password": self.test_name,
-                                                             "projects": [test_pid]})
+            with self.assertRaises(
+                EngineException, msg="Accepted uuid as username"
+            ) as e:
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {
+                        "username": uid,
+                        "password": self.test_name,
+                        "projects": [test_pid],
+                    },
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("username '{}' cannot have a uuid format".format(uid),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "username '{}' cannot have a uuid format".format(uid),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=2):
             rollback = []
-            self.auth.get_user_list.return_value = [{"_id": str(uuid4()), "username": self.test_name}]
-            with self.assertRaises(EngineException, msg="Accepted existing username") as e:
-                self.topic.new(rollback, self.fake_session, {"username": self.test_name, "password": self.test_name,
-                                                             "projects": [test_pid]})
+            self.auth.get_user_list.return_value = [
+                {"_id": str(uuid4()), "username": self.test_name}
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing username"
+            ) as e:
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {
+                        "username": self.test_name,
+                        "password": self.test_name,
+                        "projects": [test_pid],
+                    },
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("username '{}' is already used".format(self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "username '{}' is already used".format(self.test_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=3):
             rollback = []
             self.auth.get_user_list.return_value = []
             self.auth.get_role_list.side_effect = [[], []]
-            with self.assertRaises(AuthconnNotFoundException, msg="Accepted user without default role") as e:
-                self.topic.new(rollback, self.fake_session, {"username": self.test_name, "password": self.test_name,
-                                                             "projects": [str(uuid4())]})
+            with self.assertRaises(
+                AuthconnNotFoundException, msg="Accepted user without default role"
+            ) as e:
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {
+                        "username": self.test_name,
+                        "password": self.test_name,
+                        "projects": [str(uuid4())],
+                    },
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
-            self.assertIn("can't find default role for user '{}'".format(self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "can't find default role for user '{}'".format(self.test_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_edit(self):
         uid = str(uuid4())
         with self.subTest(i=1):
-            self.auth.get_user_list.return_value = [{"_id": uid, "username": self.test_name}]
+            self.auth.get_user_list.return_value = [
+                {"_id": uid, "username": self.test_name}
+            ]
             new_name = str(uuid4())
-            with self.assertRaises(EngineException, msg="Accepted uuid as username") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted uuid as username"
+            ) as e:
                 self.topic.edit(self.fake_session, uid, {"username": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("username '{}' cannot have an uuid format".format(new_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "username '{}' cannot have an uuid format".format(new_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=2):
-            self.auth.get_user_list.return_value = [{"_id": uid, "username": self.test_name}]
+            self.auth.get_user_list.return_value = [
+                {"_id": uid, "username": self.test_name}
+            ]
             self.auth.get_role_list.side_effect = [[], []]
-            with self.assertRaises(AuthconnNotFoundException, msg="Accepted user without default role") as e:
+            with self.assertRaises(
+                AuthconnNotFoundException, msg="Accepted user without default role"
+            ) as e:
                 self.topic.edit(self.fake_session, uid, {"projects": [str(uuid4())]})
-            self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
-            self.assertIn("can't find a default role for user '{}'".format(self.test_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "can't find a default role for user '{}'".format(self.test_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=3):
             admin_uid = str(uuid4())
-            self.auth.get_user_list.return_value = [{"_id": admin_uid, "username": "admin"}]
-            with self.assertRaises(EngineException, msg="Accepted removing system_admin role from admin user") as e:
-                self.topic.edit(self.fake_session, admin_uid,
-                                {"remove_project_role_mappings": [{"project": "admin", "role": "system_admin"}]})
-            self.assertEqual(e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code")
-            self.assertIn("you cannot remove system_admin role from admin user",
-                          norm(str(e.exception)), "Wrong exception text")
+            self.auth.get_user_list.return_value = [
+                {"_id": admin_uid, "username": "admin"}
+            ]
+            with self.assertRaises(
+                EngineException,
+                msg="Accepted removing system_admin role from admin user",
+            ) as e:
+                self.topic.edit(
+                    self.fake_session,
+                    admin_uid,
+                    {
+                        "remove_project_role_mappings": [
+                            {"project": "admin", "role": "system_admin"}
+                        ]
+                    },
+                )
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.FORBIDDEN, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "you cannot remove system_admin role from admin user",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         with self.subTest(i=4):
             new_name = "new-user-name"
-            self.auth.get_user_list.side_effect = [[{"_id": uid, "name": self.test_name}],
-                                                   [{"_id": str(uuid4()), "name": new_name}]]
-            with self.assertRaises(EngineException, msg="Accepted existing username") as e:
+            self.auth.get_user_list.side_effect = [
+                [{"_id": uid, "name": self.test_name}],
+                [{"_id": str(uuid4()), "name": new_name}],
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing username"
+            ) as e:
                 self.topic.edit(self.fake_session, uid, {"username": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("username '{}' is already used".format(new_name),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "username '{}' is already used".format(new_name),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_del(self):
         with self.subTest(i=1):
             uid = str(uuid4())
             self.fake_session["username"] = self.test_name
-            user = user = {"_id": uid, "username": self.test_name, "project_role_mappings": []}
+            user = user = {
+                "_id": uid,
+                "username": self.test_name,
+                "project_role_mappings": [],
+            }
             self.auth.get_user.return_value = user
-            with self.assertRaises(EngineException, msg="Accepted deletion of own user") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted deletion of own user"
+            ) as e:
                 self.topic.delete(self.fake_session, uid)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("you cannot delete your own login user", norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "you cannot delete your own login user",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
 
 class Test_CommonVimWimSdn(TestCase):
-
     @classmethod
     def setUpClass(cls):
-        cls.test_name = "test-cim-topic"   # CIM = Common Infrastructure Manager
+        cls.test_name = "test-cim-topic"  # CIM = Common Infrastructure Manager
 
     def setUp(self):
         self.db = Mock(dbbase.DbBase())
@@ -725,8 +1222,15 @@ class Test_CommonVimWimSdn(TestCase):
         self.topic.topic = "wims"
         self.topic.schema_new = validation.wim_account_new_schema
         self.topic.schema_edit = validation.wim_account_edit_schema
-        self.fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
-                             "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+        self.fake_session = {
+            "username": test_name,
+            "project_id": (test_pid,),
+            "method": None,
+            "admin": True,
+            "force": False,
+            "public": False,
+            "allow_show_user_project_role": True,
+        }
         self.topic.check_quota = Mock(return_value=None)  # skip quota
 
     def test_new_cvws(self):
@@ -736,8 +1240,11 @@ class Test_CommonVimWimSdn(TestCase):
             test_type = "fake"
             self.db.get_one.return_value = None
             self.db.create.side_effect = lambda self, content: content["_id"]
-            cid, oid = self.topic.new(rollback, self.fake_session,
-                                      {"name": self.test_name, "wim_url": test_url, "wim_type": test_type})
+            cid, oid = self.topic.new(
+                rollback,
+                self.fake_session,
+                {"name": self.test_name, "wim_url": test_url, "wim_type": test_type},
+            )
             self.assertEqual(len(rollback), 1, "Wrong rollback length")
             args = self.db.create.call_args[0]
             content = args[1]
@@ -749,20 +1256,55 @@ class Test_CommonVimWimSdn(TestCase):
             self.assertEqual(content["schema_version"], "1.11", "Wrong schema version")
             self.assertEqual(content["op_id"], oid, "Wrong operation identifier")
             self.assertIsNotNone(content["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
-            self.assertEqual(content["_admin"]["operationalState"], "PROCESSING", "Wrong operational state")
-            self.assertEqual(content["_admin"]["projects_read"], [test_pid], "Wrong read-only projects")
-            self.assertEqual(content["_admin"]["projects_write"], [test_pid], "Wrong read/write projects")
-            self.assertIsNone(content["_admin"]["current_operation"], "Wrong current operation")
-            self.assertEqual(len(content["_admin"]["operations"]), 1, "Wrong number of operations")
+            self.assertEqual(
+                content["_admin"]["modified"],
+                content["_admin"]["created"],
+                "Wrong modification time",
+            )
+            self.assertEqual(
+                content["_admin"]["operationalState"],
+                "PROCESSING",
+                "Wrong operational state",
+            )
+            self.assertEqual(
+                content["_admin"]["projects_read"],
+                [test_pid],
+                "Wrong read-only projects",
+            )
+            self.assertEqual(
+                content["_admin"]["projects_write"],
+                [test_pid],
+                "Wrong read/write projects",
+            )
+            self.assertIsNone(
+                content["_admin"]["current_operation"], "Wrong current operation"
+            )
+            self.assertEqual(
+                len(content["_admin"]["operations"]), 1, "Wrong number of operations"
+            )
             operation = content["_admin"]["operations"][0]
-            self.assertEqual(operation["lcmOperationType"], "create", "Wrong operation type")
-            self.assertEqual(operation["operationState"], "PROCESSING", "Wrong operation state")
-            self.assertGreater(operation["startTime"], content["_admin"]["created"], "Wrong operation start time")
-            self.assertGreater(operation["statusEnteredTime"], content["_admin"]["created"],
-                               "Wrong operation status enter time")
-            self.assertEqual(operation["detailed-status"], "", "Wrong operation detailed status info")
-            self.assertIsNone(operation["operationParams"], "Wrong operation parameters")
+            self.assertEqual(
+                operation["lcmOperationType"], "create", "Wrong operation type"
+            )
+            self.assertEqual(
+                operation["operationState"], "PROCESSING", "Wrong operation state"
+            )
+            self.assertGreater(
+                operation["startTime"],
+                content["_admin"]["created"],
+                "Wrong operation start time",
+            )
+            self.assertGreater(
+                operation["statusEnteredTime"],
+                content["_admin"]["created"],
+                "Wrong operation status enter time",
+            )
+            self.assertEqual(
+                operation["detailed-status"], "", "Wrong operation detailed status info"
+            )
+            self.assertIsNone(
+                operation["operationParams"], "Wrong operation parameters"
+            )
         # This test is disabled. From Feature 8030 we admit all WIM/SDN types
         # with self.subTest(i=2):
         #     rollback = []
@@ -781,21 +1323,46 @@ class Test_CommonVimWimSdn(TestCase):
             test_url = "http://0.0.0.0:0"
             test_type = "fake"
             self.db.get_one.return_value = {"_id": str(uuid4()), "name": self.test_name}
-            with self.assertRaises(EngineException, msg="Accepted existing CIM name") as e:
-                self.topic.new(rollback, self.fake_session,
-                               {"name": self.test_name, "wim_url": test_url, "wim_type": test_type})
+            with self.assertRaises(
+                EngineException, msg="Accepted existing CIM name"
+            ) as e:
+                self.topic.new(
+                    rollback,
+                    self.fake_session,
+                    {
+                        "name": self.test_name,
+                        "wim_url": test_url,
+                        "wim_type": test_type,
+                    },
+                )
             self.assertEqual(len(rollback), 0, "Wrong rollback length")
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("name '{}' already exists for {}".format(self.test_name, self.topic.topic),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "name '{}' already exists for {}".format(
+                    self.test_name, self.topic.topic
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_edit_cvws(self):
         now = time()
         cid = str(uuid4())
         test_url = "http://0.0.0.0:0"
         test_type = "fake"
-        cvws = {"_id": cid, "name": self.test_name, "wim_url": test_url, "wim_type": test_type,
-                "_admin": {"created": now, "modified": now, "operations": [{"lcmOperationType": "create"}]}}
+        cvws = {
+            "_id": cid,
+            "name": self.test_name,
+            "wim_url": test_url,
+            "wim_type": test_type,
+            "_admin": {
+                "created": now,
+                "modified": now,
+                "operations": [{"lcmOperationType": "create"}],
+            },
+        }
         with self.subTest(i=1):
             new_name = "new-cim-name"
             new_url = "https://1.1.1.1:1"
@@ -803,7 +1370,11 @@ class Test_CommonVimWimSdn(TestCase):
             self.db.get_one.side_effect = [cvws, None]
             self.db.replace.return_value = {"updated": 1}
             # self.db.encrypt.side_effect = [b64str(), b64str()]
-            self.topic.edit(self.fake_session, cid, {"name": new_name, "wim_url": new_url, "wim_type": new_type})
+            self.topic.edit(
+                self.fake_session,
+                cid,
+                {"name": new_name, "wim_url": new_url, "wim_type": new_type},
+            )
             args = self.db.replace.call_args[0]
             content = args[2]
             self.assertEqual(args[0], self.topic.topic, "Wrong topic")
@@ -813,36 +1384,78 @@ class Test_CommonVimWimSdn(TestCase):
             self.assertEqual(content["wim_type"], new_type, "Wrong CIM type")
             self.assertEqual(content["wim_url"], new_url, "Wrong URL")
             self.assertEqual(content["_admin"]["created"], now, "Wrong creation time")
-            self.assertGreater(content["_admin"]["modified"], content["_admin"]["created"], "Wrong modification time")
-            self.assertEqual(len(content["_admin"]["operations"]), 2, "Wrong number of operations")
+            self.assertGreater(
+                content["_admin"]["modified"],
+                content["_admin"]["created"],
+                "Wrong modification time",
+            )
+            self.assertEqual(
+                len(content["_admin"]["operations"]), 2, "Wrong number of operations"
+            )
             operation = content["_admin"]["operations"][1]
-            self.assertEqual(operation["lcmOperationType"], "edit", "Wrong operation type")
-            self.assertEqual(operation["operationState"], "PROCESSING", "Wrong operation state")
-            self.assertGreater(operation["startTime"], content["_admin"]["modified"], "Wrong operation start time")
-            self.assertGreater(operation["statusEnteredTime"], content["_admin"]["modified"],
-                               "Wrong operation status enter time")
-            self.assertEqual(operation["detailed-status"], "", "Wrong operation detailed status info")
-            self.assertIsNone(operation["operationParams"], "Wrong operation parameters")
+            self.assertEqual(
+                operation["lcmOperationType"], "edit", "Wrong operation type"
+            )
+            self.assertEqual(
+                operation["operationState"], "PROCESSING", "Wrong operation state"
+            )
+            self.assertGreater(
+                operation["startTime"],
+                content["_admin"]["modified"],
+                "Wrong operation start time",
+            )
+            self.assertGreater(
+                operation["statusEnteredTime"],
+                content["_admin"]["modified"],
+                "Wrong operation status enter time",
+            )
+            self.assertEqual(
+                operation["detailed-status"], "", "Wrong operation detailed status info"
+            )
+            self.assertIsNone(
+                operation["operationParams"], "Wrong operation parameters"
+            )
         with self.subTest(i=2):
             self.db.get_one.side_effect = [cvws]
             with self.assertRaises(EngineException, msg="Accepted wrong property") as e:
-                self.topic.edit(self.fake_session, str(uuid4()), {"name": "new-name", "extra_prop": "anything"})
-            self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-            self.assertIn("format error '{}'".format("additional properties are not allowed ('{}' was unexpected)").
-                          format("extra_prop"),
-                          norm(str(e.exception)), "Wrong exception text")
+                self.topic.edit(
+                    self.fake_session,
+                    str(uuid4()),
+                    {"name": "new-name", "extra_prop": "anything"},
+                )
+            self.assertEqual(
+                e.exception.http_code,
+                HTTPStatus.UNPROCESSABLE_ENTITY,
+                "Wrong HTTP status code",
+            )
+            self.assertIn(
+                "format error '{}'".format(
+                    "additional properties are not allowed ('{}' was unexpected)"
+                ).format("extra_prop"),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_conflict_on_edit(self):
         with self.subTest(i=1):
             cid = str(uuid4())
             new_name = "new-cim-name"
-            self.db.get_one.side_effect = [{"_id": cid, "name": self.test_name},
-                                           {"_id": str(uuid4()), "name": new_name}]
-            with self.assertRaises(EngineException, msg="Accepted existing CIM name") as e:
+            self.db.get_one.side_effect = [
+                {"_id": cid, "name": self.test_name},
+                {"_id": str(uuid4()), "name": new_name},
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing CIM name"
+            ) as e:
                 self.topic.edit(self.fake_session, cid, {"name": new_name})
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("name '{}' already exists for {}".format(new_name, self.topic.topic),
-                          norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "name '{}' already exists for {}".format(new_name, self.topic.topic),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
 
     def test_delete_cvws(self):
         cid = str(uuid4())
@@ -851,57 +1464,119 @@ class Test_CommonVimWimSdn(TestCase):
         cvws = {"_id": cid, "name": self.test_name}
         self.db.get_list.return_value = []
         with self.subTest(i=1):
-            cvws["_admin"] = {"projects_read": [test_pid, ro_pid, rw_pid], "projects_write": [test_pid, rw_pid]}
+            cvws["_admin"] = {
+                "projects_read": [test_pid, ro_pid, rw_pid],
+                "projects_write": [test_pid, rw_pid],
+            }
             self.db.get_one.return_value = cvws
             oid = self.topic.delete(self.fake_session, cid)
             self.assertIsNone(oid, "Wrong operation identifier")
-            self.assertEqual(self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic")
-            self.assertEqual(self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
-            self.assertEqual(self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic")
-            self.assertEqual(self.db.set_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
-            self.assertEqual(self.db.set_one.call_args[1]["update_dict"], None,
-                             "Wrong read-only projects update")
-            self.assertEqual(self.db.set_one.call_args[1]["pull_list"],
-                             {"_admin.projects_read": (test_pid,), "_admin.projects_write": (test_pid,)},
-                             "Wrong read/write projects update")
+            self.assertEqual(
+                self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic"
+            )
+            self.assertEqual(
+                self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic"
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[1]["update_dict"],
+                None,
+                "Wrong read-only projects update",
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[1]["pull_list"],
+                {
+                    "_admin.projects_read": (test_pid,),
+                    "_admin.projects_write": (test_pid,),
+                },
+                "Wrong read/write projects update",
+            )
             self.topic._send_msg.assert_not_called()
         with self.subTest(i=2):
             now = time()
-            cvws["_admin"] = {"projects_read": [test_pid], "projects_write": [test_pid], "operations": []}
+            cvws["_admin"] = {
+                "projects_read": [test_pid],
+                "projects_write": [test_pid],
+                "operations": [],
+            }
             self.db.get_one.return_value = cvws
             oid = self.topic.delete(self.fake_session, cid)
-            self.assertEqual(oid, cid+":0", "Wrong operation identifier")
-            self.assertEqual(self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic")
-            self.assertEqual(self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
-            self.assertEqual(self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic")
-            self.assertEqual(self.db.set_one.call_args[0][1]["_id"], cid, "Wrong user identifier")
-            self.assertEqual(self.db.set_one.call_args[1]["update_dict"], {"_admin.to_delete": True},
-                             "Wrong _admin.to_delete update")
+            self.assertEqual(oid, cid + ":0", "Wrong operation identifier")
+            self.assertEqual(
+                self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic"
+            )
+            self.assertEqual(
+                self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[0][0], self.topic.topic, "Wrong topic"
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[0][1]["_id"], cid, "Wrong user identifier"
+            )
+            self.assertEqual(
+                self.db.set_one.call_args[1]["update_dict"],
+                {"_admin.to_delete": True},
+                "Wrong _admin.to_delete update",
+            )
             operation = self.db.set_one.call_args[1]["push"]["_admin.operations"]
-            self.assertEqual(operation["lcmOperationType"], "delete", "Wrong operation type")
-            self.assertEqual(operation["operationState"], "PROCESSING", "Wrong operation state")
-            self.assertEqual(operation["detailed-status"], "", "Wrong operation detailed status")
-            self.assertIsNone(operation["operationParams"], "Wrong operation parameters")
-            self.assertGreater(operation["startTime"], now, "Wrong operation start time")
-            self.assertGreater(operation["statusEnteredTime"], now, "Wrong operation status enter time")
-            self.topic._send_msg.assert_called_once_with("delete", {"_id": cid, "op_id": cid + ":0"}, not_send_msg=None)
+            self.assertEqual(
+                operation["lcmOperationType"], "delete", "Wrong operation type"
+            )
+            self.assertEqual(
+                operation["operationState"], "PROCESSING", "Wrong operation state"
+            )
+            self.assertEqual(
+                operation["detailed-status"], "", "Wrong operation detailed status"
+            )
+            self.assertIsNone(
+                operation["operationParams"], "Wrong operation parameters"
+            )
+            self.assertGreater(
+                operation["startTime"], now, "Wrong operation start time"
+            )
+            self.assertGreater(
+                operation["statusEnteredTime"], now, "Wrong operation status enter time"
+            )
+            self.topic._send_msg.assert_called_once_with(
+                "delete", {"_id": cid, "op_id": cid + ":0"}, not_send_msg=None
+            )
         with self.subTest(i=3):
-            cvws["_admin"] = {"projects_read": [], "projects_write": [], "operations": []}
+            cvws["_admin"] = {
+                "projects_read": [],
+                "projects_write": [],
+                "operations": [],
+            }
             self.db.get_one.return_value = cvws
             self.topic._send_msg.reset_mock()
             self.db.get_one.reset_mock()
             self.db.del_one.reset_mock()
-            self.fake_session["force"] = True   # to force deletion
-            self.fake_session["admin"] = True   # to force deletion
-            self.fake_session["project_id"] = []   # to force deletion
+            self.fake_session["force"] = True  # to force deletion
+            self.fake_session["admin"] = True  # to force deletion
+            self.fake_session["project_id"] = []  # to force deletion
             oid = self.topic.delete(self.fake_session, cid)
             self.assertIsNone(oid, "Wrong operation identifier")
-            self.assertEqual(self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic")
-            self.assertEqual(self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
-            self.assertEqual(self.db.del_one.call_args[0][0], self.topic.topic, "Wrong topic")
-            self.assertEqual(self.db.del_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier")
-            self.topic._send_msg.assert_called_once_with("deleted", {"_id": cid, "op_id": None}, not_send_msg=None)
+            self.assertEqual(
+                self.db.get_one.call_args[0][0], self.topic.topic, "Wrong topic"
+            )
+            self.assertEqual(
+                self.db.get_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+            )
+            self.assertEqual(
+                self.db.del_one.call_args[0][0], self.topic.topic, "Wrong topic"
+            )
+            self.assertEqual(
+                self.db.del_one.call_args[0][1]["_id"], cid, "Wrong CIM identifier"
+            )
+            self.topic._send_msg.assert_called_once_with(
+                "deleted", {"_id": cid, "op_id": None}, not_send_msg=None
+            )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
index a3fc32c..5107680 100755 (executable)
@@ -19,13 +19,13 @@ __date__ = "2020-06-17"
 
 import unittest
 from unittest import TestCase
+
 # from unittest.mock import Mock
 # from osm_common import dbbase, fsbase, msgbase
 from osm_nbi.base_topic import BaseTopic, EngineException
 
 
 class Test_BaseTopic(TestCase):
-
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-base-topic"
@@ -45,29 +45,84 @@ class Test_BaseTopic(TestCase):
 
         test_set = (
             # (descriptor content, kwargs, expected descriptor (None=fails), message)
-            ({"a": {"none": None}}, {"a.b.num": "v"}, {"a": {"none": None, "b": {"num": "v"}}}, "create dict"),
-            ({"a": {"none": None}}, {"a.none.num": "v"}, {"a": {"none": {"num": "v"}}}, "create dict over none"),
-            ({"a": {"b": {"num": 4}}}, {"a.b.num": "v"}, {"a": {"b": {"num": "v"}}}, "replace_number"),
-            ({"a": {"b": {"num": 4}}}, {"a.b.num.c.d": "v"}, {"a": {"b": {"num": {"c": {"d": "v"}}}}},
-             "create dict over number"),
-            ({"a": {"b": {"num": 4}}}, {"a.b": "v"}, {"a": {"b": "v"}}, "replace dict with a string"),
-            ({"a": {"b": {"num": 4}}}, {"a.b": None}, {"a": {}}, "replace dict with None"),
-            ({"a": [{"b": {"num": 4}}]}, {"a.b.num": "v"}, None, "create dict over list should fail"),
-            ({"a": [{"b": {"num": 4}}]}, {"a.0.b.num": "v"}, {"a": [{"b": {"num": "v"}}]}, "set list"),
-            ({"a": [{"b": {"num": 4}}]}, {"a.3.b.num": "v"},
-             {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, "expand list"),
+            (
+                {"a": {"none": None}},
+                {"a.b.num": "v"},
+                {"a": {"none": None, "b": {"num": "v"}}},
+                "create dict",
+            ),
+            (
+                {"a": {"none": None}},
+                {"a.none.num": "v"},
+                {"a": {"none": {"num": "v"}}},
+                "create dict over none",
+            ),
+            (
+                {"a": {"b": {"num": 4}}},
+                {"a.b.num": "v"},
+                {"a": {"b": {"num": "v"}}},
+                "replace_number",
+            ),
+            (
+                {"a": {"b": {"num": 4}}},
+                {"a.b.num.c.d": "v"},
+                {"a": {"b": {"num": {"c": {"d": "v"}}}}},
+                "create dict over number",
+            ),
+            (
+                {"a": {"b": {"num": 4}}},
+                {"a.b": "v"},
+                {"a": {"b": "v"}},
+                "replace dict with a string",
+            ),
+            (
+                {"a": {"b": {"num": 4}}},
+                {"a.b": None},
+                {"a": {}},
+                "replace dict with None",
+            ),
+            (
+                {"a": [{"b": {"num": 4}}]},
+                {"a.b.num": "v"},
+                None,
+                "create dict over list should fail",
+            ),
+            (
+                {"a": [{"b": {"num": 4}}]},
+                {"a.0.b.num": "v"},
+                {"a": [{"b": {"num": "v"}}]},
+                "set list",
+            ),
+            (
+                {"a": [{"b": {"num": 4}}]},
+                {"a.3.b.num": "v"},
+                {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]},
+                "expand list",
+            ),
             ({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"),
-            ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"),
-            ({"a": [[4]]}, {"a.2.2": "v"}, {"a": [[4], None, {"2": "v"}]}, "expand list and add number key"),
+            (
+                {"a": [[4]]},
+                {"a.0.2": "v"},
+                {"a": [[4, None, "v"]]},
+                "expand nested list",
+            ),
+            (
+                {"a": [[4]]},
+                {"a.2.2": "v"},
+                {"a": [[4], None, {"2": "v"}]},
+                "expand list and add number key",
+            ),
             ({"a": None}, {"b.c": "v"}, {"a": None, "b": {"c": "v"}}, "expand at root"),
         )
         for desc, kwargs, expected, message in test_set:
             if expected is None:
-                self.assertRaises(EngineException, BaseTopic._update_input_with_kwargs, desc, kwargs)
+                self.assertRaises(
+                    EngineException, BaseTopic._update_input_with_kwargs, desc, kwargs
+                )
             else:
                 BaseTopic._update_input_with_kwargs(desc, kwargs)
                 self.assertEqual(desc, expected, message)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
index 0b1d800..b2df34f 100755 (executable)
@@ -36,13 +36,20 @@ test_name = "test-user"
 db_vnfd_content = yaml.load(db_vnfds_text, Loader=yaml.Loader)[0]
 db_nsd_content = yaml.load(db_nsds_text, Loader=yaml.Loader)[0]
 test_pid = db_vnfd_content["_admin"]["projects_read"][0]
-fake_session = {"username": test_name, "project_id": (test_pid,), "method": None,
-                "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+fake_session = {
+    "username": test_name,
+    "project_id": (test_pid,),
+    "method": None,
+    "admin": True,
+    "force": False,
+    "public": False,
+    "allow_show_user_project_role": True,
+}
 
 
 def norm(str):
     """Normalize string for checking"""
-    return ' '.join(str.strip().split()).lower()
+    return " ".join(str.strip().split()).lower()
 
 
 def compare_desc(tc, d1, d2, k):
@@ -69,7 +76,6 @@ def compare_desc(tc, d1, d2, k):
 
 
 class Test_VnfdTopic(TestCase):
-
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-vnfd-topic"
@@ -90,11 +96,13 @@ class Test_VnfdTopic(TestCase):
         did = db_vnfd_content["_id"]
         self.fs.get_params.return_value = {}
         self.fs.file_exists.return_value = False
-        self.fs.file_open.side_effect = lambda path, mode: open("/tmp/" + str(uuid4()), "a+b")
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
         test_vnfd = deepcopy(db_vnfd_content)
         del test_vnfd["_id"]
         del test_vnfd["_admin"]
-        with self.subTest(i=1, t='Normal Creation'):
+        with self.subTest(i=1, t="Normal Creation"):
             self.db.create.return_value = did
             rollback = []
             did2, oid = self.topic.new(rollback, fake_session, {})
@@ -107,32 +115,42 @@ class Test_VnfdTopic(TestCase):
             self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(did2, did, "Wrong DB VNFD id")
             self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(db_args[1]["_admin"]["modified"], db_args[1]["_admin"]["created"],
-                             "Wrong modification time")
-            self.assertEqual(db_args[1]["_admin"]["projects_read"], [test_pid], "Wrong read-only project list")
-            self.assertEqual(db_args[1]["_admin"]["projects_write"], [test_pid], "Wrong read-write project list")
+            self.assertEqual(
+                db_args[1]["_admin"]["modified"],
+                db_args[1]["_admin"]["created"],
+                "Wrong modification time",
+            )
+            self.assertEqual(
+                db_args[1]["_admin"]["projects_read"],
+                [test_pid],
+                "Wrong read-only project list",
+            )
+            self.assertEqual(
+                db_args[1]["_admin"]["projects_write"],
+                [test_pid],
+                "Wrong read-write project list",
+            )
             tmp1 = test_vnfd["vdu"][0]["cloud-init-file"]
-            tmp2 = test_vnfd["df"][
-                0
-            ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
-                0
-            ]["execution-environment-list"][
-                0
-            ]["juju"]
+            tmp2 = test_vnfd["df"][0]["lcm-operations-configuration"][
+                "operate-vnf-op-config"
+            ]["day1-2"][0]["execution-environment-list"][0]["juju"]
             del test_vnfd["vdu"][0]["cloud-init-file"]
-            del test_vnfd["df"][
-                0
-            ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
-                0
-            ]["execution-environment-list"][
-                0
-            ]["juju"]
+            del test_vnfd["df"][0]["lcm-operations-configuration"][
+                "operate-vnf-op-config"
+            ]["day1-2"][0]["execution-environment-list"][0]["juju"]
             try:
-                self.db.get_one.side_effect = [{"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}, None]
-                self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
+                self.db.get_one.side_effect = [
+                    {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+                    None,
+                ]
+                self.topic.upload_content(
+                    fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                )
                 msg_args = self.msg.write.call_args[0]
                 test_vnfd["_id"] = did
-                self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+                self.assertEqual(
+                    msg_args[0], self.topic.topic_msg, "Wrong message topic"
+                )
                 self.assertEqual(msg_args[1], "edited", "Wrong message action")
                 self.assertEqual(msg_args[2], test_vnfd, "Wrong message content")
                 db_args = self.db.get_one.mock_calls[0][1]
@@ -144,217 +162,430 @@ class Test_VnfdTopic(TestCase):
                 admin = db_args[2]["_admin"]
                 db_admin = deepcopy(db_vnfd_content["_admin"])
                 self.assertEqual(admin["type"], "vnfd", "Wrong descriptor type")
-                self.assertEqual(admin["created"], db_admin["created"], "Wrong creation time")
-                self.assertGreater(admin["modified"], db_admin["created"], "Wrong modification time")
-                self.assertEqual(admin["projects_read"], db_admin["projects_read"], "Wrong read-only project list")
-                self.assertEqual(admin["projects_write"], db_admin["projects_write"], "Wrong read-write project list")
-                self.assertEqual(admin["onboardingState"], "ONBOARDED", "Wrong onboarding state")
-                self.assertEqual(admin["operationalState"], "ENABLED", "Wrong operational state")
+                self.assertEqual(
+                    admin["created"], db_admin["created"], "Wrong creation time"
+                )
+                self.assertGreater(
+                    admin["modified"], db_admin["created"], "Wrong modification time"
+                )
+                self.assertEqual(
+                    admin["projects_read"],
+                    db_admin["projects_read"],
+                    "Wrong read-only project list",
+                )
+                self.assertEqual(
+                    admin["projects_write"],
+                    db_admin["projects_write"],
+                    "Wrong read-write project list",
+                )
+                self.assertEqual(
+                    admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
+                )
+                self.assertEqual(
+                    admin["operationalState"], "ENABLED", "Wrong operational state"
+                )
                 self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
                 storage = admin["storage"]
                 self.assertEqual(storage["folder"], did, "Wrong storage folder")
-                self.assertEqual(storage["descriptor"], "package", "Wrong storage descriptor")
+                self.assertEqual(
+                    storage["descriptor"], "package", "Wrong storage descriptor"
+                )
                 compare_desc(self, test_vnfd, db_args[2], "VNFD")
             finally:
                 test_vnfd["vdu"][0]["cloud-init-file"] = tmp1
-                test_vnfd["df"][
-                    0
-                ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
-                    0
-                ]["execution-environment-list"][
-                    0
-                ]["juju"] = tmp2
-        self.db.get_one.side_effect = lambda table, filter, fail_on_empty=None, fail_on_more=None: \
-            {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}
-        with self.subTest(i=2, t='Check Pyangbind Validation: additional properties'):
+                test_vnfd["df"][0]["lcm-operations-configuration"][
+                    "operate-vnf-op-config"
+                ]["day1-2"][0]["execution-environment-list"][0]["juju"] = tmp2
+        self.db.get_one.side_effect = (
+            lambda table, filter, fail_on_empty=None, fail_on_more=None: {
+                "_id": did,
+                "_admin": deepcopy(db_vnfd_content["_admin"]),
+            }
+        )
+        with self.subTest(i=2, t="Check Pyangbind Validation: additional properties"):
             test_vnfd["extra-property"] = 0
             try:
-                with self.assertRaises(EngineException, msg="Accepted VNFD with an additional property") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error in pyangbind validation: {} ({})"
-                                   .format("json object contained a key that did not exist", "extra-property")),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted VNFD with an additional property"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "Error in pyangbind validation: {} ({})".format(
+                            "json object contained a key that did not exist",
+                            "extra-property",
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 del test_vnfd["extra-property"]
-        with self.subTest(i=3, t='Check Pyangbind Validation: property types'):
+        with self.subTest(i=3, t="Check Pyangbind Validation: property types"):
             tmp = test_vnfd["product-name"]
             test_vnfd["product-name"] = {"key": 0}
             try:
-                with self.assertRaises(EngineException, msg="Accepted VNFD with a wrongly typed property") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error in pyangbind validation: {} ({})"
-                                   .format("json object contained a key that did not exist", "key")),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted VNFD with a wrongly typed property"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "Error in pyangbind validation: {} ({})".format(
+                            "json object contained a key that did not exist", "key"
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 test_vnfd["product-name"] = tmp
-        with self.subTest(i=4, t='Check Input Validation: cloud-init'):
-            with self.assertRaises(EngineException, msg="Accepted non-existent cloud_init file") as e:
-                self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-            self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
-            self.assertIn(norm("{} defined in vnf[id={}]:vdu[id={}] but not present in package"
-                               .format("cloud-init", test_vnfd["id"], test_vnfd["vdu"][0]["id"])),
-                          norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=5, t='Check Input Validation: day1-2 configuration[juju]'):
+        with self.subTest(i=4, t="Check Input Validation: cloud-init"):
+            with self.assertRaises(
+                EngineException, msg="Accepted non-existent cloud_init file"
+            ) as e:
+                self.topic.upload_content(
+                    fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                )
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                norm(
+                    "{} defined in vnf[id={}]:vdu[id={}] but not present in package".format(
+                        "cloud-init", test_vnfd["id"], test_vnfd["vdu"][0]["id"]
+                    )
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=5, t="Check Input Validation: day1-2 configuration[juju]"):
             del test_vnfd["vdu"][0]["cloud-init-file"]
-            with self.assertRaises(EngineException, msg="Accepted non-existent charm in VNF configuration") as e:
-                self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
+            with self.assertRaises(
+                EngineException, msg="Accepted non-existent charm in VNF configuration"
+            ) as e:
+                self.topic.upload_content(
+                    fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                )
             print(str(e.exception))
-            self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
-            self.assertIn(norm("{} defined in vnf[id={}] but not present in package".format("charm", test_vnfd["id"])),
-                          norm(str(e.exception)), "Wrong exception text")
-            del test_vnfd["df"][
-                0
-            ]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][
-                0
-            ]["execution-environment-list"][
-                0
-            ]["juju"]
-        with self.subTest(i=6, t='Check Input Validation: mgmt-cp'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                norm(
+                    "{} defined in vnf[id={}] but not present in package".format(
+                        "charm", test_vnfd["id"]
+                    )
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+            del test_vnfd["df"][0]["lcm-operations-configuration"][
+                "operate-vnf-op-config"
+            ]["day1-2"][0]["execution-environment-list"][0]["juju"]
+        with self.subTest(i=6, t="Check Input Validation: mgmt-cp"):
             tmp = test_vnfd["mgmt-cp"]
             del test_vnfd["mgmt-cp"]
             try:
-                with self.assertRaises(EngineException, msg="Accepted VNFD without management interface") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("'{}' is a mandatory field and it is not defined".format("mgmt-cp")),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted VNFD without management interface"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "'{}' is a mandatory field and it is not defined".format(
+                            "mgmt-cp"
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 test_vnfd["mgmt-cp"] = tmp
-        with self.subTest(i=7, t='Check Input Validation: mgmt-cp connection point'):
+        with self.subTest(i=7, t="Check Input Validation: mgmt-cp connection point"):
             tmp = test_vnfd["mgmt-cp"]
             test_vnfd["mgmt-cp"] = "wrong-cp"
             try:
-                with self.assertRaises(EngineException, msg="Accepted wrong mgmt-cp connection point") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("mgmt-cp='{}' must match an existing ext-cpd".format(test_vnfd["mgmt-cp"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted wrong mgmt-cp connection point"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "mgmt-cp='{}' must match an existing ext-cpd".format(
+                            test_vnfd["mgmt-cp"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 test_vnfd["mgmt-cp"] = tmp
-        with self.subTest(i=8, t='Check Input Validation: vdu int-cpd'):
+        with self.subTest(i=8, t="Check Input Validation: vdu int-cpd"):
             ext_cpd = test_vnfd["ext-cpd"][1]
             tmp = ext_cpd["int-cpd"]["cpd"]
             ext_cpd["int-cpd"]["cpd"] = "wrong-cpd"
             try:
-                with self.assertRaises(EngineException, msg="Accepted wrong ext-cpd internal connection point") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(ext_cpd["id"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException,
+                    msg="Accepted wrong ext-cpd internal connection point",
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+                            ext_cpd["id"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 ext_cpd["int-cpd"]["cpd"] = tmp
-        with self.subTest(i=9, t='Check Input Validation: Duplicated VLD'):
-            test_vnfd['int-virtual-link-desc'].insert(0, {'id': 'internal'})
+        with self.subTest(i=9, t="Check Input Validation: Duplicated VLD"):
+            test_vnfd["int-virtual-link-desc"].insert(0, {"id": "internal"})
             try:
-                with self.assertRaises(EngineException, msg="Accepted duplicated VLD name") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
+                with self.assertRaises(
+                    EngineException, msg="Accepted duplicated VLD name"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
                 self.assertIn(
-                    norm("identifier id '{}' is not unique".format(test_vnfd['int-virtual-link-desc'][0]["id"])),
-                    norm(str(e.exception)), "Wrong exception text")
+                    norm(
+                        "identifier id '{}' is not unique".format(
+                            test_vnfd["int-virtual-link-desc"][0]["id"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                del test_vnfd['int-virtual-link-desc'][0]
-        with self.subTest(i=10, t='Check Input Validation: vdu int-virtual-link-desc'):
-            vdu = test_vnfd['vdu'][0]
-            int_cpd = vdu['int-cpd'][1]
-            tmp = int_cpd['int-virtual-link-desc']
-            int_cpd['int-virtual-link-desc'] = 'non-existing-int-virtual-link-desc'
+                del test_vnfd["int-virtual-link-desc"][0]
+        with self.subTest(i=10, t="Check Input Validation: vdu int-virtual-link-desc"):
+            vdu = test_vnfd["vdu"][0]
+            int_cpd = vdu["int-cpd"][1]
+            tmp = int_cpd["int-virtual-link-desc"]
+            int_cpd["int-virtual-link-desc"] = "non-existing-int-virtual-link-desc"
             try:
-                with self.assertRaises(EngineException, msg="Accepted int-virtual-link-desc") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
-                                   "int-virtual-link-desc".format(vdu["id"], int_cpd["id"],
-                                                                  int_cpd['int-virtual-link-desc'])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted int-virtual-link-desc"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+                        "int-virtual-link-desc".format(
+                            vdu["id"], int_cpd["id"], int_cpd["int-virtual-link-desc"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                int_cpd['int-virtual-link-desc'] = tmp
-        with self.subTest(i=11, t='Check Input Validation: virtual-link-profile)'):
-            fake_ivld_profile = {'id': 'fake-profile-ref', 'flavour': 'fake-flavour'}
-            df = test_vnfd['df'][0]
-            df['virtual-link-profile'] = [fake_ivld_profile]
+                int_cpd["int-virtual-link-desc"] = tmp
+        with self.subTest(i=11, t="Check Input Validation: virtual-link-profile)"):
+            fake_ivld_profile = {"id": "fake-profile-ref", "flavour": "fake-flavour"}
+            df = test_vnfd["df"][0]
+            df["virtual-link-profile"] = [fake_ivld_profile]
             try:
-                with self.assertRaises(EngineException, msg="Accepted non-existent Profile Ref") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("df[id='{}']:virtual-link-profile='{}' must match an existing "
-                                   "int-virtual-link-desc".format(df["id"], fake_ivld_profile["id"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted non-existent Profile Ref"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "df[id='{}']:virtual-link-profile='{}' must match an existing "
+                        "int-virtual-link-desc".format(
+                            df["id"], fake_ivld_profile["id"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                del df['virtual-link-profile']
-        with self.subTest(i=12, t='Check Input Validation: scaling-criteria monitoring-param-ref'):
-            vdu = test_vnfd['vdu'][1]
-            affected_df = test_vnfd['df'][0]
-            sa = affected_df['scaling-aspect'][0]
-            sp = sa['scaling-policy'][0]
-            sc = sp['scaling-criteria'][0]
-            tmp = vdu.pop('monitoring-parameter')
+                del df["virtual-link-profile"]
+        with self.subTest(
+            i=12, t="Check Input Validation: scaling-criteria monitoring-param-ref"
+        ):
+            vdu = test_vnfd["vdu"][1]
+            affected_df = test_vnfd["df"][0]
+            sa = affected_df["scaling-aspect"][0]
+            sp = sa["scaling-policy"][0]
+            sc = sp["scaling-criteria"][0]
+            tmp = vdu.pop("monitoring-parameter")
             try:
-                with self.assertRaises(EngineException, msg="Accepted non-existent Scaling Group Policy Criteria") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
-                                   "[name='{}']:scaling-criteria[name='{}']: "
-                                   "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
-                                   .format(affected_df["id"], sa["id"], sp["name"],
-                                           sc["name"], sc["vnf-monitoring-param-ref"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException,
+                    msg="Accepted non-existent Scaling Group Policy Criteria",
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                        "[name='{}']:scaling-criteria[name='{}']: "
+                        "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+                            affected_df["id"],
+                            sa["id"],
+                            sp["name"],
+                            sc["name"],
+                            sc["vnf-monitoring-param-ref"],
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                vdu['monitoring-parameter'] = tmp
-        with self.subTest(i=13, t='Check Input Validation: scaling-aspect vnf-configuration'):
-            df = test_vnfd['df'][0]
-            tmp = test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"].pop()
+                vdu["monitoring-parameter"] = tmp
+        with self.subTest(
+            i=13, t="Check Input Validation: scaling-aspect vnf-configuration"
+        ):
+            df = test_vnfd["df"][0]
+            tmp = test_vnfd["df"][0]["lcm-operations-configuration"][
+                "operate-vnf-op-config"
+            ]["day1-2"].pop()
             try:
-                with self.assertRaises(EngineException, msg="Accepted non-existent Scaling Group VDU ID Reference") \
-                        as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("'day1-2 configuration' not defined in the descriptor but it is referenced "
-                                   "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
-                                   .format(df["id"], df['scaling-aspect'][0]["id"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException,
+                    msg="Accepted non-existent Scaling Group VDU ID Reference",
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "'day1-2 configuration' not defined in the descriptor but it is referenced "
+                        "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+                            df["id"], df["scaling-aspect"][0]["id"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                test_vnfd["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"].append(tmp)
-        with self.subTest(i=14, t='Check Input Validation: scaling-config-action'):
-            df = test_vnfd['df'][0]
-            tmp = test_vnfd["df"][0].get(
-                "lcm-operations-configuration"
-            ).get(
-                "operate-vnf-op-config"
-            )["day1-2"][0]['config-primitive']
-            test_vnfd["df"][0].get(
-                "lcm-operations-configuration"
-            ).get(
+                test_vnfd["df"][0]["lcm-operations-configuration"][
+                    "operate-vnf-op-config"
+                ]["day1-2"].append(tmp)
+        with self.subTest(i=14, t="Check Input Validation: scaling-config-action"):
+            df = test_vnfd["df"][0]
+            tmp = (
+                test_vnfd["df"][0]
+                .get("lcm-operations-configuration")
+                .get("operate-vnf-op-config")["day1-2"][0]["config-primitive"]
+            )
+            test_vnfd["df"][0].get("lcm-operations-configuration").get(
                 "operate-vnf-op-config"
-            )["day1-2"][0]['config-primitive'] = [{'name': 'wrong-primitive'}]
+            )["day1-2"][0]["config-primitive"] = [{"name": "wrong-primitive"}]
             try:
-                with self.assertRaises(EngineException,
-                                       msg="Accepted non-existent Scaling Group VDU ID Reference") as e:
-                    self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
-                                   "config-primitive-name-ref='{}' does not match any "
-                                   "day1-2 configuration:config-primitive:name"
-                                   .format(df["id"], df['scaling-aspect'][0]["id"],
-                                           sa['scaling-config-action'][0]['vnf-config-primitive-name-ref'])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException,
+                    msg="Accepted non-existent Scaling Group VDU ID Reference",
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_vnfd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                        "config-primitive-name-ref='{}' does not match any "
+                        "day1-2 configuration:config-primitive:name".format(
+                            df["id"],
+                            df["scaling-aspect"][0]["id"],
+                            sa["scaling-config-action"][0][
+                                "vnf-config-primitive-name-ref"
+                            ],
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                test_vnfd["df"][0].get(
-                    "lcm-operations-configuration"
-                ).get(
+                test_vnfd["df"][0].get("lcm-operations-configuration").get(
                     "operate-vnf-op-config"
-                )["day1-2"][0]['config-primitive'] = tmp
-        with self.subTest(i=15, t='Check Input Validation: everything right'):
+                )["day1-2"][0]["config-primitive"] = tmp
+        with self.subTest(i=15, t="Check Input Validation: everything right"):
             test_vnfd["id"] = "fake-vnfd-id"
-            test_vnfd["df"][0].get(
-                "lcm-operations-configuration"
-            ).get(
+            test_vnfd["df"][0].get("lcm-operations-configuration").get(
                 "operate-vnf-op-config"
             )["day1-2"][0]["id"] = "fake-vnfd-id"
-            self.db.get_one.side_effect = [{"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])}, None]
-            rc = self.topic.upload_content(fake_session, did, test_vnfd, {}, {"Content-Type": []})
+            self.db.get_one.side_effect = [
+                {"_id": did, "_admin": deepcopy(db_vnfd_content["_admin"])},
+                None,
+            ]
+            rc = self.topic.upload_content(
+                fake_session, did, test_vnfd, {}, {"Content-Type": []}
+            )
             self.assertTrue(rc, "Input Validation: Unexpected failure")
         return
 
@@ -363,12 +594,10 @@ class Test_VnfdTopic(TestCase):
         did = vnfd_content["_id"]
         self.fs.file_exists.return_value = True
         self.fs.dir_ls.return_value = True
-        with self.subTest(i=1, t='Normal Edition'):
+        with self.subTest(i=1, t="Normal Edition"):
             now = time()
             self.db.get_one.side_effect = [deepcopy(vnfd_content), None]
-            data = {
-                "product-name": "new-vnfd-name"
-            }
+            data = {"product-name": "new-vnfd-name"}
             self.topic.edit(fake_session, did, data)
             db_args = self.db.replace.call_args[0]
             msg_args = self.msg.write.call_args[0]
@@ -378,36 +607,68 @@ class Test_VnfdTopic(TestCase):
             self.assertEqual(msg_args[2], data, "Wrong message content")
             self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_args[1], did, "Wrong DB ID")
-            self.assertEqual(db_args[2]["_admin"]["created"], vnfd_content["_admin"]["created"],
-                             "Wrong creation time")
-            self.assertGreater(db_args[2]["_admin"]["modified"], now,
-                               "Wrong modification time")
-            self.assertEqual(db_args[2]["_admin"]["projects_read"], vnfd_content["_admin"]["projects_read"],
-                             "Wrong read-only project list")
-            self.assertEqual(db_args[2]["_admin"]["projects_write"], vnfd_content["_admin"]["projects_write"],
-                             "Wrong read-write project list")
-            self.assertEqual(db_args[2]["product-name"], data["product-name"], "Wrong VNFD Name")
-        with self.subTest(i=2, t='Conflict on Edit'):
+            self.assertEqual(
+                db_args[2]["_admin"]["created"],
+                vnfd_content["_admin"]["created"],
+                "Wrong creation time",
+            )
+            self.assertGreater(
+                db_args[2]["_admin"]["modified"], now, "Wrong modification time"
+            )
+            self.assertEqual(
+                db_args[2]["_admin"]["projects_read"],
+                vnfd_content["_admin"]["projects_read"],
+                "Wrong read-only project list",
+            )
+            self.assertEqual(
+                db_args[2]["_admin"]["projects_write"],
+                vnfd_content["_admin"]["projects_write"],
+                "Wrong read-write project list",
+            )
+            self.assertEqual(
+                db_args[2]["product-name"], data["product-name"], "Wrong VNFD Name"
+            )
+        with self.subTest(i=2, t="Conflict on Edit"):
             data = {"id": "hackfest3charmed-vnf", "product-name": "new-vnfd-name"}
-            self.db.get_one.side_effect = [deepcopy(vnfd_content), {"_id": str(uuid4()), "id": data["id"]}]
-            with self.assertRaises(EngineException, msg="Accepted existing VNFD ID") as e:
+            self.db.get_one.side_effect = [
+                deepcopy(vnfd_content),
+                {"_id": str(uuid4()), "id": data["id"]},
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing VNFD ID"
+            ) as e:
                 self.topic.edit(fake_session, did, data)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn(norm("{} with id '{}' already exists for this project".format("vnfd", data["id"])),
-                          norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=3, t='Check Envelope'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                norm(
+                    "{} with id '{}' already exists for this project".format(
+                        "vnfd", data["id"]
+                    )
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=3, t="Check Envelope"):
             data = {"vnfd": [{"id": "new-vnfd-id-1", "product-name": "new-vnfd-name"}]}
-            with self.assertRaises(EngineException, msg="Accepted VNFD with wrong envelope") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted VNFD with wrong envelope"
+            ) as e:
                 self.topic.edit(fake_session, did, data, content=vnfd_content)
-            self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
-            self.assertIn("'vnfd' must be dict", norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "'vnfd' must be dict", norm(str(e.exception)), "Wrong exception text"
+            )
         return
 
     def test_delete_vnfd(self):
         did = db_vnfd_content["_id"]
         self.db.get_one.return_value = db_vnfd_content
         p_id = db_vnfd_content["_admin"]["projects_read"][0]
-        with self.subTest(i=1, t='Normal Deletion'):
+        with self.subTest(i=1, t="Normal Deletion"):
             self.db.get_list.return_value = []
             self.db.del_one.return_value = {"deleted": 1}
             self.topic.delete(fake_session, did)
@@ -418,7 +679,11 @@ class Test_VnfdTopic(TestCase):
             self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
             self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_args[1]["_id"], did, "Wrong DB ID")
-            self.assertEqual(db_args[1]["_admin.projects_write.cont"], [p_id, 'ANY'], "Wrong DB filter")
+            self.assertEqual(
+                db_args[1]["_admin.projects_write.cont"],
+                [p_id, "ANY"],
+                "Wrong DB filter",
+            )
             db_g1_args = self.db.get_one.call_args[0]
             self.assertEqual(db_g1_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_g1_args[1]["_id"], did, "Wrong DB VNFD ID")
@@ -426,35 +691,61 @@ class Test_VnfdTopic(TestCase):
             self.assertEqual(db_gl_calls[0][0][0], "vnfrs", "Wrong DB topic")
             # self.assertEqual(db_gl_calls[0][0][1]["vnfd-id"], did, "Wrong DB VNFD ID")   # Filter changed after call
             self.assertEqual(db_gl_calls[1][0][0], "nsds", "Wrong DB topic")
-            self.assertEqual(db_gl_calls[1][0][1]["vnfd-id"], db_vnfd_content["id"],
-                             "Wrong DB NSD vnfd-id")
+            self.assertEqual(
+                db_gl_calls[1][0][1]["vnfd-id"],
+                db_vnfd_content["id"],
+                "Wrong DB NSD vnfd-id",
+            )
 
             self.db.set_one.assert_not_called()
             fs_del_calls = self.fs.file_delete.call_args_list
             self.assertEqual(fs_del_calls[0][0][0], did, "Wrong FS file id")
-            self.assertEqual(fs_del_calls[1][0][0], did + '_', "Wrong FS folder id")
-        with self.subTest(i=2, t='Conflict on Delete - VNFD in use by VNFR'):
+            self.assertEqual(fs_del_calls[1][0][0], did + "_", "Wrong FS folder id")
+        with self.subTest(i=2, t="Conflict on Delete - VNFD in use by VNFR"):
             self.db.get_list.return_value = [{"_id": str(uuid4()), "name": "fake-vnfr"}]
-            with self.assertRaises(EngineException, msg="Accepted VNFD in use by VNFR") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted VNFD in use by VNFR"
+            ) as e:
                 self.topic.delete(fake_session, did)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("there is at least one vnf instance using this descriptor", norm(str(e.exception)),
-                          "Wrong exception text")
-        with self.subTest(i=3, t='Conflict on Delete - VNFD in use by NSD'):
-            self.db.get_list.side_effect = [[], [{"_id": str(uuid4()), "name": "fake-nsd"}]]
-            with self.assertRaises(EngineException, msg="Accepted VNFD in use by NSD") as e:
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "there is at least one vnf instance using this descriptor",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=3, t="Conflict on Delete - VNFD in use by NSD"):
+            self.db.get_list.side_effect = [
+                [],
+                [{"_id": str(uuid4()), "name": "fake-nsd"}],
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted VNFD in use by NSD"
+            ) as e:
                 self.topic.delete(fake_session, did)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("there is at least one ns package referencing this descriptor", norm(str(e.exception)),
-                          "Wrong exception text")
-        with self.subTest(i=4, t='Non-existent VNFD'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "there is at least one ns package referencing this descriptor",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=4, t="Non-existent VNFD"):
             excp_msg = "Not found any {} with filter='{}'".format("VNFD", {"_id": did})
             self.db.get_one.side_effect = DbException(excp_msg, HTTPStatus.NOT_FOUND)
-            with self.assertRaises(DbException, msg="Accepted non-existent VNFD ID") as e:
+            with self.assertRaises(
+                DbException, msg="Accepted non-existent VNFD ID"
+            ) as e:
                 self.topic.delete(fake_session, did)
-            self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
-            self.assertIn(norm(excp_msg), norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=5, t='No delete because referenced by other project'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                norm(excp_msg), norm(str(e.exception)), "Wrong exception text"
+            )
+        with self.subTest(i=5, t="No delete because referenced by other project"):
             db_vnfd_content["_admin"]["projects_read"].append("other_project")
             self.db.get_one = Mock(return_value=db_vnfd_content)
             self.db.get_list = Mock(return_value=[])
@@ -471,11 +762,17 @@ class Test_VnfdTopic(TestCase):
             db_s1_args = self.db.set_one.call_args
             self.assertEqual(db_s1_args[0][0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_s1_args[0][1]["_id"], did, "Wrong DB ID")
-            self.assertIn(p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter")
-            self.assertIsNone(db_s1_args[1]["update_dict"], "Wrong DB update dictionary")
-            self.assertEqual(db_s1_args[1]["pull_list"],
-                             {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
-                             "Wrong DB pull_list dictionary")
+            self.assertIn(
+                p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter"
+            )
+            self.assertIsNone(
+                db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
+            )
+            self.assertEqual(
+                db_s1_args[1]["pull_list"],
+                {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
+                "Wrong DB pull_list dictionary",
+            )
             self.fs.file_delete.assert_not_called()
         return
 
@@ -483,69 +780,128 @@ class Test_VnfdTopic(TestCase):
         indata = deepcopy(db_vnfd_content)
         self.topic.validate_mgmt_interface_connection_point(indata)
 
-    def test_validate_mgmt_interface_connection_point_when_missing_connection_point(self):
+    def test_validate_mgmt_interface_connection_point_when_missing_connection_point(
+        self,
+    ):
         indata = deepcopy(db_vnfd_content)
-        indata['ext-cpd'] = []
+        indata["ext-cpd"] = []
         with self.assertRaises(EngineException) as e:
             self.topic.validate_mgmt_interface_connection_point(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("mgmt-cp='{}' must match an existing ext-cpd"
-                           .format(indata["mgmt-cp"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"])
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_mgmt_interface_connection_point_when_missing_mgmt_cp(self):
         indata = deepcopy(db_vnfd_content)
-        indata.pop('mgmt-cp')
+        indata.pop("mgmt-cp")
         with self.assertRaises(EngineException) as e:
             self.topic.validate_mgmt_interface_connection_point(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("'mgmt-cp' is a mandatory field and it is not defined"),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm("'mgmt-cp' is a mandatory field and it is not defined"),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_vdu_internal_connection_points_on_valid_descriptor(self):
         indata = db_vnfd_content
-        vdu = indata['vdu'][0]
+        vdu = indata["vdu"][0]
         self.topic.validate_vdu_internal_connection_points(vdu)
 
     def test_validate_external_connection_points_on_valid_descriptor(self):
         indata = db_vnfd_content
         self.topic.validate_external_connection_points(indata)
 
-    def test_validate_external_connection_points_when_missing_internal_connection_point(self):
+    def test_validate_external_connection_points_when_missing_internal_connection_point(
+        self,
+    ):
         indata = deepcopy(db_vnfd_content)
-        vdu = indata['vdu'][0]
-        vdu.pop('int-cpd')
+        vdu = indata["vdu"][0]
+        vdu.pop("int-cpd")
         affected_ext_cpd = indata["ext-cpd"][0]
         with self.assertRaises(EngineException) as e:
             self.topic.validate_external_connection_points(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd"
-                           .format(affected_ext_cpd["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+                    affected_ext_cpd["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
-    def test_validate_vdu_internal_connection_points_on_duplicated_internal_connection_point(self):
+    def test_validate_vdu_internal_connection_points_on_duplicated_internal_connection_point(
+        self,
+    ):
         indata = deepcopy(db_vnfd_content)
-        vdu = indata['vdu'][0]
-        duplicated_cpd = {'id': 'vnf-mgmt', 'order': 3,
-                          'virtual-network-interface-requirement': [{'name': 'duplicated'}]}
-        vdu['int-cpd'].insert(0, duplicated_cpd)
+        vdu = indata["vdu"][0]
+        duplicated_cpd = {
+            "id": "vnf-mgmt",
+            "order": 3,
+            "virtual-network-interface-requirement": [{"name": "duplicated"}],
+        }
+        vdu["int-cpd"].insert(0, duplicated_cpd)
         with self.assertRaises(EngineException) as e:
             self.topic.validate_vdu_internal_connection_points(vdu)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd"
-                           .format(vdu["id"], duplicated_cpd["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
+                    vdu["id"], duplicated_cpd["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
-    def test_validate_external_connection_points_on_duplicated_external_connection_point(self):
+    def test_validate_external_connection_points_on_duplicated_external_connection_point(
+        self,
+    ):
         indata = deepcopy(db_vnfd_content)
-        duplicated_cpd = {'id': 'vnf-mgmt-ext', 'int-cpd': {'vdu-id': 'dataVM', 'cpd': 'vnf-data'}}
-        indata['ext-cpd'].insert(0, duplicated_cpd)
+        duplicated_cpd = {
+            "id": "vnf-mgmt-ext",
+            "int-cpd": {"vdu-id": "dataVM", "cpd": "vnf-data"},
+        }
+        indata["ext-cpd"].insert(0, duplicated_cpd)
         with self.assertRaises(EngineException) as e:
             self.topic.validate_external_connection_points(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("ext-cpd[id='{}'] is already used by other ext-cpd"
-                           .format(duplicated_cpd["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "ext-cpd[id='{}'] is already used by other ext-cpd".format(
+                    duplicated_cpd["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_internal_virtual_links_on_valid_descriptor(self):
         indata = db_vnfd_content
@@ -553,39 +909,72 @@ class Test_VnfdTopic(TestCase):
 
     def test_validate_internal_virtual_links_on_duplicated_ivld(self):
         indata = deepcopy(db_vnfd_content)
-        duplicated_vld = {'id': 'internal'}
-        indata['int-virtual-link-desc'].insert(0, duplicated_vld)
+        duplicated_vld = {"id": "internal"}
+        indata["int-virtual-link-desc"].insert(0, duplicated_vld)
         with self.assertRaises(EngineException) as e:
             self.topic.validate_internal_virtual_links(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Duplicated VLD id in int-virtual-link-desc[id={}]"
-                           .format(duplicated_vld["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Duplicated VLD id in int-virtual-link-desc[id={}]".format(
+                    duplicated_vld["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
-    def test_validate_internal_virtual_links_when_missing_ivld_on_connection_point(self):
+    def test_validate_internal_virtual_links_when_missing_ivld_on_connection_point(
+        self,
+    ):
         indata = deepcopy(db_vnfd_content)
-        vdu = indata['vdu'][0]
-        affected_int_cpd = vdu['int-cpd'][0]
-        affected_int_cpd['int-virtual-link-desc'] = 'non-existing-int-virtual-link-desc'
+        vdu = indata["vdu"][0]
+        affected_int_cpd = vdu["int-cpd"][0]
+        affected_int_cpd["int-virtual-link-desc"] = "non-existing-int-virtual-link-desc"
         with self.assertRaises(EngineException) as e:
             self.topic.validate_internal_virtual_links(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
-                           "int-virtual-link-desc".format(vdu["id"], affected_int_cpd["id"],
-                                                          affected_int_cpd['int-virtual-link-desc'])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+                "int-virtual-link-desc".format(
+                    vdu["id"],
+                    affected_int_cpd["id"],
+                    affected_int_cpd["int-virtual-link-desc"],
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_internal_virtual_links_when_missing_ivld_on_profile(self):
         indata = deepcopy(db_vnfd_content)
-        affected_ivld_profile = {'id': 'non-existing-int-virtual-link-desc'}
-        df = indata['df'][0]
-        df['virtual-link-profile'] = [affected_ivld_profile]
+        affected_ivld_profile = {"id": "non-existing-int-virtual-link-desc"}
+        df = indata["df"][0]
+        df["virtual-link-profile"] = [affected_ivld_profile]
         with self.assertRaises(EngineException) as e:
             self.topic.validate_internal_virtual_links(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("df[id='{}']:virtual-link-profile='{}' must match an existing "
-                           "int-virtual-link-desc".format(df["id"], affected_ivld_profile["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:virtual-link-profile='{}' must match an existing "
+                "int-virtual-link-desc".format(df["id"], affected_ivld_profile["id"])
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_monitoring_params_on_valid_descriptor(self):
         indata = db_vnfd_content
@@ -593,42 +982,80 @@ class Test_VnfdTopic(TestCase):
 
     def test_validate_monitoring_params_on_duplicated_ivld_monitoring_param(self):
         indata = deepcopy(db_vnfd_content)
-        duplicated_mp = {'id': 'cpu', 'name': 'cpu', 'performance_metric': 'cpu'}
-        affected_ivld = indata['int-virtual-link-desc'][0]
-        affected_ivld['monitoring-parameters'] = [duplicated_mp, duplicated_mp]
+        duplicated_mp = {"id": "cpu", "name": "cpu", "performance_metric": "cpu"}
+        affected_ivld = indata["int-virtual-link-desc"][0]
+        affected_ivld["monitoring-parameters"] = [duplicated_mp, duplicated_mp]
         with self.assertRaises(EngineException) as e:
             self.topic.validate_monitoring_params(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Duplicated monitoring-parameter id in "
-                           "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']"
-                           .format(affected_ivld["id"], duplicated_mp["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Duplicated monitoring-parameter id in "
+                "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
+                    affected_ivld["id"], duplicated_mp["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_monitoring_params_on_duplicated_vdu_monitoring_param(self):
         indata = deepcopy(db_vnfd_content)
-        duplicated_mp = {'id': 'dataVM_cpu_util', 'name': 'dataVM_cpu_util', 'performance_metric': 'cpu'}
-        affected_vdu = indata['vdu'][1]
-        affected_vdu['monitoring-parameter'].insert(0, duplicated_mp)
+        duplicated_mp = {
+            "id": "dataVM_cpu_util",
+            "name": "dataVM_cpu_util",
+            "performance_metric": "cpu",
+        }
+        affected_vdu = indata["vdu"][1]
+        affected_vdu["monitoring-parameter"].insert(0, duplicated_mp)
         with self.assertRaises(EngineException) as e:
             self.topic.validate_monitoring_params(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Duplicated monitoring-parameter id in "
-                           "vdu[id='{}']:monitoring-parameter[id='{}']"
-                           .format(affected_vdu["id"], duplicated_mp["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Duplicated monitoring-parameter id in "
+                "vdu[id='{}']:monitoring-parameter[id='{}']".format(
+                    affected_vdu["id"], duplicated_mp["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_monitoring_params_on_duplicated_df_monitoring_param(self):
         indata = deepcopy(db_vnfd_content)
-        duplicated_mp = {'id': 'memory', 'name': 'memory', 'performance_metric': 'memory'}
-        affected_df = indata['df'][0]
-        affected_df['monitoring-parameter'] = [duplicated_mp, duplicated_mp]
+        duplicated_mp = {
+            "id": "memory",
+            "name": "memory",
+            "performance_metric": "memory",
+        }
+        affected_df = indata["df"][0]
+        affected_df["monitoring-parameter"] = [duplicated_mp, duplicated_mp]
         with self.assertRaises(EngineException) as e:
             self.topic.validate_monitoring_params(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Duplicated monitoring-parameter id in "
-                           "df[id='{}']:monitoring-parameter[id='{}']"
-                           .format(affected_df["id"], duplicated_mp["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Duplicated monitoring-parameter id in "
+                "df[id='{}']:monitoring-parameter[id='{}']".format(
+                    affected_df["id"], duplicated_mp["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_scaling_group_descriptor_on_valid_descriptor(self):
         indata = db_vnfd_content
@@ -636,53 +1063,93 @@ class Test_VnfdTopic(TestCase):
 
     def test_validate_scaling_group_descriptor_when_missing_monitoring_param(self):
         indata = deepcopy(db_vnfd_content)
-        vdu = indata['vdu'][1]
-        affected_df = indata['df'][0]
-        affected_sa = affected_df['scaling-aspect'][0]
-        affected_sp = affected_sa['scaling-policy'][0]
-        affected_sc = affected_sp['scaling-criteria'][0]
-        vdu.pop('monitoring-parameter')
+        vdu = indata["vdu"][1]
+        affected_df = indata["df"][0]
+        affected_sa = affected_df["scaling-aspect"][0]
+        affected_sp = affected_sa["scaling-policy"][0]
+        affected_sc = affected_sp["scaling-criteria"][0]
+        vdu.pop("monitoring-parameter")
         with self.assertRaises(EngineException) as e:
             self.topic.validate_scaling_group_descriptor(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
-                           "[name='{}']:scaling-criteria[name='{}']: "
-                           "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
-                           .format(affected_df["id"], affected_sa["id"], affected_sp["name"], affected_sc["name"],
-                                   affected_sc["vnf-monitoring-param-ref"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                "[name='{}']:scaling-criteria[name='{}']: "
+                "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+                    affected_df["id"],
+                    affected_sa["id"],
+                    affected_sp["name"],
+                    affected_sc["name"],
+                    affected_sc["vnf-monitoring-param-ref"],
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_scaling_group_descriptor_when_missing_vnf_configuration(self):
         indata = deepcopy(db_vnfd_content)
-        df = indata['df'][0]
-        affected_sa = df['scaling-aspect'][0]
-        indata["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"].pop()
+        df = indata["df"][0]
+        affected_sa = df["scaling-aspect"][0]
+        indata["df"][0]["lcm-operations-configuration"]["operate-vnf-op-config"][
+            "day1-2"
+        ].pop()
         with self.assertRaises(EngineException) as e:
             self.topic.validate_scaling_group_descriptor(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("'day1-2 configuration' not defined in the descriptor but it is referenced "
-                           "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action"
-                           .format(df["id"], affected_sa["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "'day1-2 configuration' not defined in the descriptor but it is referenced "
+                "by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+                    df["id"], affected_sa["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
-    def test_validate_scaling_group_descriptor_when_missing_scaling_config_action_primitive(self):
+    def test_validate_scaling_group_descriptor_when_missing_scaling_config_action_primitive(
+        self,
+    ):
         indata = deepcopy(db_vnfd_content)
-        df = indata['df'][0]
-        affected_sa = df['scaling-aspect'][0]
-        affected_sca_primitive = affected_sa['scaling-config-action'][0]['vnf-config-primitive-name-ref']
-        df["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][0]['config-primitive'] = []
+        df = indata["df"][0]
+        affected_sa = df["scaling-aspect"][0]
+        affected_sca_primitive = affected_sa["scaling-config-action"][0][
+            "vnf-config-primitive-name-ref"
+        ]
+        df["lcm-operations-configuration"]["operate-vnf-op-config"]["day1-2"][0][
+            "config-primitive"
+        ] = []
         with self.assertRaises(EngineException) as e:
             self.topic.validate_scaling_group_descriptor(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
-                           "config-primitive-name-ref='{}' does not match any "
-                           "day1-2 configuration:config-primitive:name"
-                           .format(df["id"], affected_sa["id"], affected_sca_primitive)),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                "config-primitive-name-ref='{}' does not match any "
+                "day1-2 configuration:config-primitive:name".format(
+                    df["id"], affected_sa["id"], affected_sca_primitive
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
 
 class Test_NsdTopic(TestCase):
-
     @classmethod
     def setUpClass(cls):
         cls.test_name = "test-nsd-topic"
@@ -703,11 +1170,13 @@ class Test_NsdTopic(TestCase):
         did = db_nsd_content["_id"]
         self.fs.get_params.return_value = {}
         self.fs.file_exists.return_value = False
-        self.fs.file_open.side_effect = lambda path, mode: open("/tmp/" + str(uuid4()), "a+b")
+        self.fs.file_open.side_effect = lambda path, mode: open(
+            "/tmp/" + str(uuid4()), "a+b"
+        )
         test_nsd = deepcopy(db_nsd_content)
         del test_nsd["_id"]
         del test_nsd["_admin"]
-        with self.subTest(i=1, t='Normal Creation'):
+        with self.subTest(i=1, t="Normal Creation"):
             self.db.create.return_value = did
             rollback = []
             did2, oid = self.topic.new(rollback, fake_session, {})
@@ -720,17 +1189,35 @@ class Test_NsdTopic(TestCase):
             self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(did2, did, "Wrong DB NSD id")
             self.assertIsNotNone(db_args[1]["_admin"]["created"], "Wrong creation time")
-            self.assertEqual(db_args[1]["_admin"]["modified"], db_args[1]["_admin"]["created"],
-                             "Wrong modification time")
-            self.assertEqual(db_args[1]["_admin"]["projects_read"], [test_pid], "Wrong read-only project list")
-            self.assertEqual(db_args[1]["_admin"]["projects_write"], [test_pid], "Wrong read-write project list")
+            self.assertEqual(
+                db_args[1]["_admin"]["modified"],
+                db_args[1]["_admin"]["created"],
+                "Wrong modification time",
+            )
+            self.assertEqual(
+                db_args[1]["_admin"]["projects_read"],
+                [test_pid],
+                "Wrong read-only project list",
+            )
+            self.assertEqual(
+                db_args[1]["_admin"]["projects_write"],
+                [test_pid],
+                "Wrong read-write project list",
+            )
             try:
-                self.db.get_one.side_effect = [{"_id": did, "_admin": db_nsd_content["_admin"]}, None]
+                self.db.get_one.side_effect = [
+                    {"_id": did, "_admin": db_nsd_content["_admin"]},
+                    None,
+                ]
                 self.db.get_list.return_value = [db_vnfd_content]
-                self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
+                self.topic.upload_content(
+                    fake_session, did, test_nsd, {}, {"Content-Type": []}
+                )
                 msg_args = self.msg.write.call_args[0]
                 test_nsd["_id"] = did
-                self.assertEqual(msg_args[0], self.topic.topic_msg, "Wrong message topic")
+                self.assertEqual(
+                    msg_args[0], self.topic.topic_msg, "Wrong message topic"
+                )
                 self.assertEqual(msg_args[1], "edited", "Wrong message action")
                 self.assertEqual(msg_args[2], test_nsd, "Wrong message content")
                 db_args = self.db.get_one.mock_calls[0][1]
@@ -741,101 +1228,220 @@ class Test_NsdTopic(TestCase):
                 self.assertEqual(db_args[1], did, "Wrong DB NSD id")
                 admin = db_args[2]["_admin"]
                 db_admin = db_nsd_content["_admin"]
-                self.assertEqual(admin["created"], db_admin["created"], "Wrong creation time")
-                self.assertGreater(admin["modified"], db_admin["created"], "Wrong modification time")
-                self.assertEqual(admin["projects_read"], db_admin["projects_read"], "Wrong read-only project list")
-                self.assertEqual(admin["projects_write"], db_admin["projects_write"], "Wrong read-write project list")
-                self.assertEqual(admin["onboardingState"], "ONBOARDED", "Wrong onboarding state")
-                self.assertEqual(admin["operationalState"], "ENABLED", "Wrong operational state")
+                self.assertEqual(
+                    admin["created"], db_admin["created"], "Wrong creation time"
+                )
+                self.assertGreater(
+                    admin["modified"], db_admin["created"], "Wrong modification time"
+                )
+                self.assertEqual(
+                    admin["projects_read"],
+                    db_admin["projects_read"],
+                    "Wrong read-only project list",
+                )
+                self.assertEqual(
+                    admin["projects_write"],
+                    db_admin["projects_write"],
+                    "Wrong read-write project list",
+                )
+                self.assertEqual(
+                    admin["onboardingState"], "ONBOARDED", "Wrong onboarding state"
+                )
+                self.assertEqual(
+                    admin["operationalState"], "ENABLED", "Wrong operational state"
+                )
                 self.assertEqual(admin["usageState"], "NOT_IN_USE", "Wrong usage state")
                 storage = admin["storage"]
                 self.assertEqual(storage["folder"], did, "Wrong storage folder")
-                self.assertEqual(storage["descriptor"], "package", "Wrong storage descriptor")
+                self.assertEqual(
+                    storage["descriptor"], "package", "Wrong storage descriptor"
+                )
                 compare_desc(self, test_nsd, db_args[2], "NSD")
             finally:
                 pass
-        self.db.get_one.side_effect = lambda table, filter, fail_on_empty=None, fail_on_more=None: \
-            {"_id": did, "_admin": db_nsd_content["_admin"]}
-        with self.subTest(i=2, t='Check Pyangbind Validation: required properties'):
+        self.db.get_one.side_effect = (
+            lambda table, filter, fail_on_empty=None, fail_on_more=None: {
+                "_id": did,
+                "_admin": db_nsd_content["_admin"],
+            }
+        )
+        with self.subTest(i=2, t="Check Pyangbind Validation: required properties"):
             tmp = test_nsd["id"]
             del test_nsd["id"]
             try:
-                with self.assertRaises(EngineException, msg="Accepted NSD with a missing required property") as e:
-                    self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error in pyangbind validation: '{}'".format("id")),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted NSD with a missing required property"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_nsd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm("Error in pyangbind validation: '{}'".format("id")),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 test_nsd["id"] = tmp
-        with self.subTest(i=3, t='Check Pyangbind Validation: additional properties'):
+        with self.subTest(i=3, t="Check Pyangbind Validation: additional properties"):
             test_nsd["extra-property"] = 0
             try:
-                with self.assertRaises(EngineException, msg="Accepted NSD with an additional property") as e:
-                    self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error in pyangbind validation: {} ({})"
-                                   .format("json object contained a key that did not exist", "extra-property")),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted NSD with an additional property"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_nsd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "Error in pyangbind validation: {} ({})".format(
+                            "json object contained a key that did not exist",
+                            "extra-property",
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 del test_nsd["extra-property"]
-        with self.subTest(i=4, t='Check Pyangbind Validation: property types'):
+        with self.subTest(i=4, t="Check Pyangbind Validation: property types"):
             tmp = test_nsd["designer"]
             test_nsd["designer"] = {"key": 0}
             try:
-                with self.assertRaises(EngineException, msg="Accepted NSD with a wrongly typed property") as e:
-                    self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error in pyangbind validation: {} ({})"
-                                   .format("json object contained a key that did not exist", "key")),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted NSD with a wrongly typed property"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_nsd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "Error in pyangbind validation: {} ({})".format(
+                            "json object contained a key that did not exist", "key"
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 test_nsd["designer"] = tmp
-        with self.subTest(i=5, t='Check Input Validation: mgmt-network+virtual-link-protocol-data'):
-            df = test_nsd['df'][0]
-            mgmt_profile = {'id': 'id', 'virtual-link-desc-id': 'mgmt',
-                            'virtual-link-protocol-data': {'associated-layer-protocol': 'ipv4'}}
-            df['virtual-link-profile'] = [mgmt_profile]
+        with self.subTest(
+            i=5, t="Check Input Validation: mgmt-network+virtual-link-protocol-data"
+        ):
+            df = test_nsd["df"][0]
+            mgmt_profile = {
+                "id": "id",
+                "virtual-link-desc-id": "mgmt",
+                "virtual-link-protocol-data": {"associated-layer-protocol": "ipv4"},
+            }
+            df["virtual-link-profile"] = [mgmt_profile]
             try:
-                with self.assertRaises(EngineException, msg="Accepted VLD with mgmt-network+ip-profile") as e:
-                    self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
-                                   " You cannot set a virtual-link-protocol-data when mgmt-network is True"
-                                   .format(df["id"], mgmt_profile["id"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted VLD with mgmt-network+ip-profile"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_nsd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
+                        " You cannot set a virtual-link-protocol-data when mgmt-network is True".format(
+                            df["id"], mgmt_profile["id"]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
-                del df['virtual-link-profile']
-        with self.subTest(i=6, t='Check Descriptor Dependencies: vnfd-id[]'):
-            self.db.get_one.side_effect = [{"_id": did, "_admin": db_nsd_content["_admin"]}, None]
+                del df["virtual-link-profile"]
+        with self.subTest(i=6, t="Check Descriptor Dependencies: vnfd-id[]"):
+            self.db.get_one.side_effect = [
+                {"_id": did, "_admin": db_nsd_content["_admin"]},
+                None,
+            ]
             self.db.get_list.return_value = []
             try:
-                with self.assertRaises(EngineException, msg="Accepted wrong VNFD ID reference") as e:
-                    self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-                self.assertIn(norm("'vnfd-id'='{}' references a non existing vnfd".format(test_nsd['vnfd-id'][0])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted wrong VNFD ID reference"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_nsd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+                )
+                self.assertIn(
+                    norm(
+                        "'vnfd-id'='{}' references a non existing vnfd".format(
+                            test_nsd["vnfd-id"][0]
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 pass
-        with self.subTest(i=7, t='Check Descriptor Dependencies: '
-                                 'vld[vnfd-connection-point-ref][vnfd-connection-point-ref]'):
+        with self.subTest(
+            i=7,
+            t="Check Descriptor Dependencies: "
+            "vld[vnfd-connection-point-ref][vnfd-connection-point-ref]",
+        ):
             vnfd_descriptor = deepcopy(db_vnfd_content)
-            df = test_nsd['df'][0]
-            affected_vnf_profile = df['vnf-profile'][0]
-            affected_virtual_link = affected_vnf_profile['virtual-link-connectivity'][1]
-            affected_cpd = vnfd_descriptor['ext-cpd'].pop()
-            self.db.get_one.side_effect = [{"_id": did, "_admin": db_nsd_content["_admin"]}, None]
+            df = test_nsd["df"][0]
+            affected_vnf_profile = df["vnf-profile"][0]
+            affected_virtual_link = affected_vnf_profile["virtual-link-connectivity"][1]
+            affected_cpd = vnfd_descriptor["ext-cpd"].pop()
+            self.db.get_one.side_effect = [
+                {"_id": did, "_admin": db_nsd_content["_admin"]},
+                None,
+            ]
             self.db.get_list.return_value = [vnfd_descriptor]
             try:
-                with self.assertRaises(EngineException, msg="Accepted wrong VLD CP reference") as e:
-                    self.topic.upload_content(fake_session, did, test_nsd, {}, {"Content-Type": []})
-                self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-                self.assertIn(norm("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
-                                   "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
-                                   "non existing ext-cpd:id inside vnfd '{}'"
-                                   .format(df["id"], affected_vnf_profile["id"],
-                                           affected_virtual_link["virtual-link-profile-id"], affected_cpd["id"],
-                                           vnfd_descriptor["id"])),
-                              norm(str(e.exception)), "Wrong exception text")
+                with self.assertRaises(
+                    EngineException, msg="Accepted wrong VLD CP reference"
+                ) as e:
+                    self.topic.upload_content(
+                        fake_session, did, test_nsd, {}, {"Content-Type": []}
+                    )
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.UNPROCESSABLE_ENTITY,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    norm(
+                        "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+                        "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+                        "non existing ext-cpd:id inside vnfd '{}'".format(
+                            df["id"],
+                            affected_vnf_profile["id"],
+                            affected_virtual_link["virtual-link-profile-id"],
+                            affected_cpd["id"],
+                            vnfd_descriptor["id"],
+                        )
+                    ),
+                    norm(str(e.exception)),
+                    "Wrong exception text",
+                )
             finally:
                 pass
         return
@@ -845,7 +1451,7 @@ class Test_NsdTopic(TestCase):
         did = nsd_content["_id"]
         self.fs.file_exists.return_value = True
         self.fs.dir_ls.return_value = True
-        with self.subTest(i=1, t='Normal Edition'):
+        with self.subTest(i=1, t="Normal Edition"):
             now = time()
             self.db.get_one.side_effect = [deepcopy(nsd_content), None]
             self.db.get_list.return_value = [db_vnfd_content]
@@ -859,37 +1465,70 @@ class Test_NsdTopic(TestCase):
             self.assertEqual(msg_args[2], data, "Wrong message content")
             self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_args[1], did, "Wrong DB ID")
-            self.assertEqual(db_args[2]["_admin"]["created"], nsd_content["_admin"]["created"],
-                             "Wrong creation time")
-            self.assertGreater(db_args[2]["_admin"]["modified"], now, "Wrong modification time")
-            self.assertEqual(db_args[2]["_admin"]["projects_read"], nsd_content["_admin"]["projects_read"],
-                             "Wrong read-only project list")
-            self.assertEqual(db_args[2]["_admin"]["projects_write"], nsd_content["_admin"]["projects_write"],
-                             "Wrong read-write project list")
+            self.assertEqual(
+                db_args[2]["_admin"]["created"],
+                nsd_content["_admin"]["created"],
+                "Wrong creation time",
+            )
+            self.assertGreater(
+                db_args[2]["_admin"]["modified"], now, "Wrong modification time"
+            )
+            self.assertEqual(
+                db_args[2]["_admin"]["projects_read"],
+                nsd_content["_admin"]["projects_read"],
+                "Wrong read-only project list",
+            )
+            self.assertEqual(
+                db_args[2]["_admin"]["projects_write"],
+                nsd_content["_admin"]["projects_write"],
+                "Wrong read-write project list",
+            )
             self.assertEqual(db_args[2]["id"], data["id"], "Wrong NSD ID")
             self.assertEqual(db_args[2]["name"], data["name"], "Wrong NSD Name")
-        with self.subTest(i=2, t='Conflict on Edit'):
+        with self.subTest(i=2, t="Conflict on Edit"):
             data = {"id": "fake-nsd-id", "name": "new-nsd-name"}
-            self.db.get_one.side_effect = [nsd_content, {"_id": str(uuid4()), "id": data["id"]}]
-            with self.assertRaises(EngineException, msg="Accepted existing NSD ID") as e:
+            self.db.get_one.side_effect = [
+                nsd_content,
+                {"_id": str(uuid4()), "id": data["id"]},
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted existing NSD ID"
+            ) as e:
                 self.topic.edit(fake_session, did, data)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn(norm("{} with id '{}' already exists for this project".format("nsd", data["id"])),
-                          norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=3, t='Check Envelope'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                norm(
+                    "{} with id '{}' already exists for this project".format(
+                        "nsd", data["id"]
+                    )
+                ),
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=3, t="Check Envelope"):
             data = {"nsd": {"nsd": {"id": "new-nsd-id", "name": "new-nsd-name"}}}
             self.db.get_one.side_effect = [nsd_content, None]
-            with self.assertRaises(EngineException, msg="Accepted NSD with wrong envelope") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted NSD with wrong envelope"
+            ) as e:
                 self.topic.edit(fake_session, did, data, content=nsd_content)
-            self.assertEqual(e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code")
-            self.assertIn("'nsd' must be a list of only one element", norm(str(e.exception)), "Wrong exception text")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.BAD_REQUEST, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "'nsd' must be a list of only one element",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
         return
 
     def test_delete_nsd(self):
         did = db_nsd_content["_id"]
         self.db.get_one.return_value = db_nsd_content
         p_id = db_nsd_content["_admin"]["projects_read"][0]
-        with self.subTest(i=1, t='Normal Deletion'):
+        with self.subTest(i=1, t="Normal Deletion"):
             self.db.get_list.return_value = []
             self.db.del_one.return_value = {"deleted": 1}
             self.topic.delete(fake_session, did)
@@ -900,7 +1539,11 @@ class Test_NsdTopic(TestCase):
             self.assertEqual(msg_args[2], {"_id": did}, "Wrong message content")
             self.assertEqual(db_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_args[1]["_id"], did, "Wrong DB ID")
-            self.assertEqual(db_args[1]["_admin.projects_write.cont"], [p_id, 'ANY'], "Wrong DB filter")
+            self.assertEqual(
+                db_args[1]["_admin.projects_write.cont"],
+                [p_id, "ANY"],
+                "Wrong DB filter",
+            )
             db_g1_args = self.db.get_one.call_args[0]
             self.assertEqual(db_g1_args[0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_g1_args[1]["_id"], did, "Wrong DB NSD ID")
@@ -908,34 +1551,60 @@ class Test_NsdTopic(TestCase):
             self.assertEqual(db_gl_calls[0][0][0], "nsrs", "Wrong DB topic")
             # self.assertEqual(db_gl_calls[0][0][1]["nsd-id"], did, "Wrong DB NSD ID")   # Filter changed after call
             self.assertEqual(db_gl_calls[1][0][0], "nsts", "Wrong DB topic")
-            self.assertEqual(db_gl_calls[1][0][1]["netslice-subnet.ANYINDEX.nsd-ref"], db_nsd_content["id"],
-                             "Wrong DB NSD netslice-subnet nsd-ref")
+            self.assertEqual(
+                db_gl_calls[1][0][1]["netslice-subnet.ANYINDEX.nsd-ref"],
+                db_nsd_content["id"],
+                "Wrong DB NSD netslice-subnet nsd-ref",
+            )
             self.db.set_one.assert_not_called()
             fs_del_calls = self.fs.file_delete.call_args_list
             self.assertEqual(fs_del_calls[0][0][0], did, "Wrong FS file id")
-            self.assertEqual(fs_del_calls[1][0][0], did + '_', "Wrong FS folder id")
-        with self.subTest(i=2, t='Conflict on Delete - NSD in use by nsr'):
+            self.assertEqual(fs_del_calls[1][0][0], did + "_", "Wrong FS folder id")
+        with self.subTest(i=2, t="Conflict on Delete - NSD in use by nsr"):
             self.db.get_list.return_value = [{"_id": str(uuid4()), "name": "fake-nsr"}]
-            with self.assertRaises(EngineException, msg="Accepted NSD in use by NSR") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted NSD in use by NSR"
+            ) as e:
                 self.topic.delete(fake_session, did)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("there is at least one ns instance using this descriptor", norm(str(e.exception)),
-                          "Wrong exception text")
-        with self.subTest(i=3, t='Conflict on Delete - NSD in use by NST'):
-            self.db.get_list.side_effect = [[], [{"_id": str(uuid4()), "name": "fake-nst"}]]
-            with self.assertRaises(EngineException, msg="Accepted NSD in use by NST") as e:
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "there is at least one ns instance using this descriptor",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=3, t="Conflict on Delete - NSD in use by NST"):
+            self.db.get_list.side_effect = [
+                [],
+                [{"_id": str(uuid4()), "name": "fake-nst"}],
+            ]
+            with self.assertRaises(
+                EngineException, msg="Accepted NSD in use by NST"
+            ) as e:
                 self.topic.delete(fake_session, did)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-            self.assertIn("there is at least one netslice template referencing this descriptor", norm(str(e.exception)),
-                          "Wrong exception text")
-        with self.subTest(i=4, t='Non-existent NSD'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                "there is at least one netslice template referencing this descriptor",
+                norm(str(e.exception)),
+                "Wrong exception text",
+            )
+        with self.subTest(i=4, t="Non-existent NSD"):
             excp_msg = "Not found any {} with filter='{}'".format("NSD", {"_id": did})
             self.db.get_one.side_effect = DbException(excp_msg, HTTPStatus.NOT_FOUND)
-            with self.assertRaises(DbException, msg="Accepted non-existent NSD ID") as e:
+            with self.assertRaises(
+                DbException, msg="Accepted non-existent NSD ID"
+            ) as e:
                 self.topic.delete(fake_session, did)
-            self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
-            self.assertIn(norm(excp_msg), norm(str(e.exception)), "Wrong exception text")
-        with self.subTest(i=5, t='No delete because referenced by other project'):
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+            )
+            self.assertIn(
+                norm(excp_msg), norm(str(e.exception)), "Wrong exception text"
+            )
+        with self.subTest(i=5, t="No delete because referenced by other project"):
             db_nsd_content["_admin"]["projects_read"].append("other_project")
             self.db.get_one = Mock(return_value=db_nsd_content)
             self.db.get_list = Mock(return_value=[])
@@ -952,33 +1621,60 @@ class Test_NsdTopic(TestCase):
             db_s1_args = self.db.set_one.call_args
             self.assertEqual(db_s1_args[0][0], self.topic.topic, "Wrong DB topic")
             self.assertEqual(db_s1_args[0][1]["_id"], did, "Wrong DB ID")
-            self.assertIn(p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter")
-            self.assertIsNone(db_s1_args[1]["update_dict"], "Wrong DB update dictionary")
-            self.assertEqual(db_s1_args[1]["pull_list"],
-                             {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
-                             "Wrong DB pull_list dictionary")
+            self.assertIn(
+                p_id, db_s1_args[0][1]["_admin.projects_write.cont"], "Wrong DB filter"
+            )
+            self.assertIsNone(
+                db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
+            )
+            self.assertEqual(
+                db_s1_args[1]["pull_list"],
+                {"_admin.projects_read": (p_id,), "_admin.projects_write": (p_id,)},
+                "Wrong DB pull_list dictionary",
+            )
             self.fs.file_delete.assert_not_called()
         return
 
-    def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_on_valid_descriptor(self):
+    def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_on_valid_descriptor(
+        self,
+    ):
         indata = deepcopy(db_nsd_content)
-        vld = indata['virtual-link-desc'][0]
-        self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
+        vld = indata["virtual-link-desc"][0]
+        self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(
+            vld, indata
+        )
 
-    def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_when_both_defined(self):
+    def test_validate_vld_mgmt_network_with_virtual_link_protocol_data_when_both_defined(
+        self,
+    ):
         indata = deepcopy(db_nsd_content)
-        vld = indata['virtual-link-desc'][0]
-        df = indata['df'][0]
-        affected_vlp = {'id': 'id', 'virtual-link-desc-id': 'mgmt',
-                        'virtual-link-protocol-data': {'associated-layer-protocol': 'ipv4'}}
-        df['virtual-link-profile'] = [affected_vlp]
+        vld = indata["virtual-link-desc"][0]
+        df = indata["df"][0]
+        affected_vlp = {
+            "id": "id",
+            "virtual-link-desc-id": "mgmt",
+            "virtual-link-protocol-data": {"associated-layer-protocol": "ipv4"},
+        }
+        df["virtual-link-profile"] = [affected_vlp]
         with self.assertRaises(EngineException) as e:
-            self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
-                           " You cannot set a virtual-link-protocol-data when mgmt-network is True"
-                           .format(df["id"], affected_vlp["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+            self.topic.validate_vld_mgmt_network_with_virtual_link_protocol_data(
+                vld, indata
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-protocol-data"
+                " You cannot set a virtual-link-protocol-data when mgmt-network is True".format(
+                    df["id"], affected_vlp["id"]
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_validate_vnf_profiles_vnfd_id_on_valid_descriptor(self):
         indata = deepcopy(db_nsd_content)
@@ -986,58 +1682,99 @@ class Test_NsdTopic(TestCase):
 
     def test_validate_vnf_profiles_vnfd_id_when_missing_vnfd(self):
         indata = deepcopy(db_nsd_content)
-        df = indata['df'][0]
-        affected_vnf_profile = df['vnf-profile'][0]
-        indata['vnfd-id'] = ['non-existing-vnfd']
+        df = indata["df"][0]
+        affected_vnf_profile = df["vnf-profile"][0]
+        indata["vnfd-id"] = ["non-existing-vnfd"]
         with self.assertRaises(EngineException) as e:
             self.topic.validate_vnf_profiles_vnfd_id(indata)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
-                           "does not match any vnfd-id"
-                           .format(df["id"], affected_vnf_profile["id"], affected_vnf_profile['vnfd-id'])),
-                      norm(str(e.exception)), "Wrong exception text")
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
+                "does not match any vnfd-id".format(
+                    df["id"],
+                    affected_vnf_profile["id"],
+                    affected_vnf_profile["vnfd-id"],
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
-    def test_validate_df_vnf_profiles_constituent_connection_points_on_valid_descriptor(self):
+    def test_validate_df_vnf_profiles_constituent_connection_points_on_valid_descriptor(
+        self,
+    ):
         nsd_descriptor = deepcopy(db_nsd_content)
         vnfd_descriptor = deepcopy(db_vnfd_content)
-        df = nsd_descriptor['df'][0]
-        vnfds_index = {vnfd_descriptor['id']: vnfd_descriptor}
-        self.topic.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
+        df = nsd_descriptor["df"][0]
+        vnfds_index = {vnfd_descriptor["id"]: vnfd_descriptor}
+        self.topic.validate_df_vnf_profiles_constituent_connection_points(
+            df, vnfds_index
+        )
 
-    def test_validate_df_vnf_profiles_constituent_connection_points_when_missing_connection_point(self):
+    def test_validate_df_vnf_profiles_constituent_connection_points_when_missing_connection_point(
+        self,
+    ):
         nsd_descriptor = deepcopy(db_nsd_content)
         vnfd_descriptor = deepcopy(db_vnfd_content)
-        df = nsd_descriptor['df'][0]
-        affected_vnf_profile = df['vnf-profile'][0]
-        affected_virtual_link = affected_vnf_profile['virtual-link-connectivity'][1]
-        vnfds_index = {vnfd_descriptor['id']: vnfd_descriptor}
-        affected_cpd = vnfd_descriptor['ext-cpd'].pop()
+        df = nsd_descriptor["df"][0]
+        affected_vnf_profile = df["vnf-profile"][0]
+        affected_virtual_link = affected_vnf_profile["virtual-link-connectivity"][1]
+        vnfds_index = {vnfd_descriptor["id"]: vnfd_descriptor}
+        affected_cpd = vnfd_descriptor["ext-cpd"].pop()
         with self.assertRaises(EngineException) as e:
-            self.topic.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
-        self.assertEqual(e.exception.http_code, HTTPStatus.UNPROCESSABLE_ENTITY, "Wrong HTTP status code")
-        self.assertIn(norm("Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
-                           "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
-                           "non existing ext-cpd:id inside vnfd '{}'"
-                           .format(df["id"], affected_vnf_profile["id"],
-                                   affected_virtual_link["virtual-link-profile-id"], affected_cpd["id"],
-                                   vnfd_descriptor["id"])),
-                      norm(str(e.exception)), "Wrong exception text")
+            self.topic.validate_df_vnf_profiles_constituent_connection_points(
+                df, vnfds_index
+            )
+        self.assertEqual(
+            e.exception.http_code,
+            HTTPStatus.UNPROCESSABLE_ENTITY,
+            "Wrong HTTP status code",
+        )
+        self.assertIn(
+            norm(
+                "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+                "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+                "non existing ext-cpd:id inside vnfd '{}'".format(
+                    df["id"],
+                    affected_vnf_profile["id"],
+                    affected_virtual_link["virtual-link-profile-id"],
+                    affected_cpd["id"],
+                    vnfd_descriptor["id"],
+                )
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
     def test_check_conflict_on_edit_when_missing_constituent_vnfd_id(self):
         nsd_descriptor = deepcopy(db_nsd_content)
-        invalid_vnfd_id = 'invalid-vnfd-id'
-        nsd_descriptor['id'] = 'invalid-vnfd-id-ns'
-        nsd_descriptor['vnfd-id'][0] = invalid_vnfd_id
-        nsd_descriptor['df'][0]['vnf-profile'][0]['vnfd-id'] = invalid_vnfd_id
-        nsd_descriptor['df'][0]['vnf-profile'][1]['vnfd-id'] = invalid_vnfd_id
+        invalid_vnfd_id = "invalid-vnfd-id"
+        nsd_descriptor["id"] = "invalid-vnfd-id-ns"
+        nsd_descriptor["vnfd-id"][0] = invalid_vnfd_id
+        nsd_descriptor["df"][0]["vnf-profile"][0]["vnfd-id"] = invalid_vnfd_id
+        nsd_descriptor["df"][0]["vnf-profile"][1]["vnfd-id"] = invalid_vnfd_id
         with self.assertRaises(EngineException) as e:
             self.db.get_list.return_value = []
-            nsd_descriptor = self.topic.check_conflict_on_edit(fake_session, nsd_descriptor, [], 'id')
-        self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
-        self.assertIn(norm("Descriptor error at 'vnfd-id'='{}' references a non "
-                           "existing vnfd".format(invalid_vnfd_id)),
-                      norm(str(e.exception)), "Wrong exception text")
+            nsd_descriptor = self.topic.check_conflict_on_edit(
+                fake_session, nsd_descriptor, [], "id"
+            )
+        self.assertEqual(
+            e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+        )
+        self.assertIn(
+            norm(
+                "Descriptor error at 'vnfd-id'='{}' references a non "
+                "existing vnfd".format(invalid_vnfd_id)
+            ),
+            norm(str(e.exception)),
+            "Wrong exception text",
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
index bff7775..5e86ae5 100644 (file)
@@ -16,7 +16,7 @@
 ##
 
 import unittest
-from unittest.mock import Mock, mock_open   # patch, MagicMock
+from unittest.mock import Mock, mock_open  # patch, MagicMock
 from osm_common.dbbase import DbException
 from osm_nbi.engine import EngineException
 from osm_common.dbmemory import DbMemory
@@ -24,14 +24,18 @@ from osm_common.fsbase import FsBase
 from osm_common.msgbase import MsgBase
 from http import HTTPStatus
 from osm_nbi.instance_topics import NsLcmOpTopic, NsrTopic
-from osm_nbi.tests.test_db_descriptors import db_vim_accounts_text, db_nsds_text, db_vnfds_text, db_nsrs_text,\
-    db_vnfrs_text
+from osm_nbi.tests.test_db_descriptors import (
+    db_vim_accounts_text,
+    db_nsds_text,
+    db_vnfds_text,
+    db_nsrs_text,
+    db_vnfrs_text,
+)
 from copy import deepcopy
 import yaml
 
 
 class TestNsLcmOpTopic(unittest.TestCase):
-
     def setUp(self):
         self.db = DbMemory()
         self.fs = Mock(FsBase())
@@ -42,7 +46,9 @@ class TestNsLcmOpTopic(unittest.TestCase):
         self.nslcmop_topic = NsLcmOpTopic(self.db, self.fs, self.msg, None)
         self.nslcmop_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list("vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader))
+        self.db.create_list(
+            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
+        )
         self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
         self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
         self.db.create_list("vnfrs", yaml.load(db_vnfrs_text, Loader=yaml.Loader))
@@ -59,64 +65,137 @@ class TestNsLcmOpTopic(unittest.TestCase):
         self.vim_id = self.vim["_id"]
 
     def test_create_instantiate(self):
-        session = {"force": False, "admin": False, "public": False, "project_id": [self.nsr_project], "method": "write"}
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsr_project],
+            "method": "write",
+        }
         indata = {
             "nsdId": self.nsd_id,
             "nsInstanceId": self.nsr_id,
             "nsName": "name",
             "vimAccountId": self.vim_id,
-            "additionalParamsForVnf": [{"member-vnf-index": "1", "additionalParams": {"touch_filename": "file"}},
-                                       {"member-vnf-index": "2", "additionalParams": {"touch_filename": "file"}}],
-            "vnf": [{"member-vnf-index": "1",
-                     "vdu": [{"id": "dataVM", "interface": [{"name": "dataVM-eth0",
-                                                             "ip-address": "10.11.12.13",
-                                                             "floating-ip-required": True}]
-                              }],
-                     "internal-vld": [{"name": "internal", "vim-network-id": "vim-net-id"}]
-                     }],
+            "additionalParamsForVnf": [
+                {
+                    "member-vnf-index": "1",
+                    "additionalParams": {"touch_filename": "file"},
+                },
+                {
+                    "member-vnf-index": "2",
+                    "additionalParams": {"touch_filename": "file"},
+                },
+            ],
+            "vnf": [
+                {
+                    "member-vnf-index": "1",
+                    "vdu": [
+                        {
+                            "id": "dataVM",
+                            "interface": [
+                                {
+                                    "name": "dataVM-eth0",
+                                    "ip-address": "10.11.12.13",
+                                    "floating-ip-required": True,
+                                }
+                            ],
+                        }
+                    ],
+                    "internal-vld": [
+                        {"name": "internal", "vim-network-id": "vim-net-id"}
+                    ],
+                }
+            ],
             "lcmOperationType": "instantiate",
-
         }
         rollback = []
         headers = {}
 
-        nslcmop_id, _ = self.nslcmop_topic.new(rollback, session, indata=deepcopy(indata), kwargs=None, headers=headers)
+        nslcmop_id, _ = self.nslcmop_topic.new(
+            rollback, session, indata=deepcopy(indata), kwargs=None, headers=headers
+        )
 
         # check nslcmop is created at database
-        self.assertEqual(self.db.create.call_count, 1, "database create not called, or called more than once")
+        self.assertEqual(
+            self.db.create.call_count,
+            1,
+            "database create not called, or called more than once",
+        )
         _call = self.db.create.call_args_list[0]
-        self.assertEqual(_call[0][0], "nslcmops", "must be create a nslcmops entry at database")
+        self.assertEqual(
+            _call[0][0], "nslcmops", "must be create a nslcmops entry at database"
+        )
 
         created_nslcmop = _call[0][1]
-        self.assertEqual(nslcmop_id, created_nslcmop["_id"], "mismatch between return id and database '_id'")
-        self.assertEqual(self.nsr_id, created_nslcmop["nsInstanceId"], "bad reference id from nslcmop to nsr")
-        self.assertTrue(created_nslcmop["_admin"].get("projects_read"),
-                        "Database record must contain '_amdin.projects_read'")
-        self.assertIn("created", created_nslcmop["_admin"], "Database record must contain '_admin.created'")
-        self.assertTrue(created_nslcmop["lcmOperationType"] == "instantiate",
-                        "Database record must contain 'lcmOperationType=instantiate'")
+        self.assertEqual(
+            nslcmop_id,
+            created_nslcmop["_id"],
+            "mismatch between return id and database '_id'",
+        )
+        self.assertEqual(
+            self.nsr_id,
+            created_nslcmop["nsInstanceId"],
+            "bad reference id from nslcmop to nsr",
+        )
+        self.assertTrue(
+            created_nslcmop["_admin"].get("projects_read"),
+            "Database record must contain '_amdin.projects_read'",
+        )
+        self.assertIn(
+            "created",
+            created_nslcmop["_admin"],
+            "Database record must contain '_admin.created'",
+        )
+        self.assertTrue(
+            created_nslcmop["lcmOperationType"] == "instantiate",
+            "Database record must contain 'lcmOperationType=instantiate'",
+        )
 
-        self.assertEqual(len(rollback), len(self.db.set_one.call_args_list) + 1,
-                         "rollback mismatch with created/set items at database")
+        self.assertEqual(
+            len(rollback),
+            len(self.db.set_one.call_args_list) + 1,
+            "rollback mismatch with created/set items at database",
+        )
 
         # test parameters with error
         bad_id = "88d90b0c-faff-4b9f-bccd-aaaaaaaaaaaa"
         test_set = (
-            ("nsr not found", {"nsInstanceId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)),
+            (
+                "nsr not found",
+                {"nsInstanceId": bad_id},
+                DbException,
+                HTTPStatus.NOT_FOUND,
+                ("not found", bad_id),
+            ),
             # TODO add "nsd"
             # ({"vimAccountId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)),  # TODO add "vim"
-            ("bad member-vnf-index", {"vnf.0.member-vnf-index": "k"}, EngineException, HTTPStatus.BAD_REQUEST,
-             ("k",)),
+            (
+                "bad member-vnf-index",
+                {"vnf.0.member-vnf-index": "k"},
+                EngineException,
+                HTTPStatus.BAD_REQUEST,
+                ("k",),
+            ),
         )
         for message, kwargs_, expect_exc, expect_code, expect_text_list in test_set:
             with self.assertRaises(expect_exc, msg=message) as e:
-                self.nslcmop_topic.new(rollback, session, indata=deepcopy(indata), kwargs=kwargs_, headers=headers)
+                self.nslcmop_topic.new(
+                    rollback,
+                    session,
+                    indata=deepcopy(indata),
+                    kwargs=kwargs_,
+                    headers=headers,
+                )
             if expect_code:
                 self.assertTrue(e.exception.http_code == expect_code)
             if expect_text_list:
                 for expect_text in expect_text_list:
-                    self.assertIn(expect_text, str(e.exception).lower(),
-                                  "Expected '{}' at exception text".format(expect_text))
+                    self.assertIn(
+                        expect_text,
+                        str(e.exception).lower(),
+                        "Expected '{}' at exception text".format(expect_text),
+                    )
 
     def test_check_ns_operation_action(self):
         nsrs = self.db.get_list("nsrs")[0]
@@ -126,7 +205,7 @@ class TestNsLcmOpTopic(unittest.TestCase):
             "member_vnf_index": "1",
             "vdu_id": None,
             "primitive": "touch",
-            "primitive_params": {"filename": "file"}
+            "primitive_params": {"filename": "file"},
         }
 
         self.nslcmop_topic._check_ns_operation(session, nsrs, "action", indata)
@@ -136,14 +215,18 @@ class TestNsLcmOpTopic(unittest.TestCase):
                 continue
             indata_copy[k] = "non_existing"
             with self.assertRaises(EngineException) as exc_manager:
-                self.nslcmop_topic._check_ns_operation(session, nsrs, "action", indata_copy)
+                self.nslcmop_topic._check_ns_operation(
+                    session, nsrs, "action", indata_copy
+                )
             exc = exc_manager.exception
-            self.assertEqual(exc.http_code, HTTPStatus.BAD_REQUEST, "Engine exception bad http_code with {}".
-                             format(indata_copy))
+            self.assertEqual(
+                exc.http_code,
+                HTTPStatus.BAD_REQUEST,
+                "Engine exception bad http_code with {}".format(indata_copy),
+            )
 
 
 class TestNsrTopic(unittest.TestCase):
-
     def setUp(self):
         self.db = DbMemory()
         self.fs = Mock(FsBase())
@@ -154,7 +237,9 @@ class TestNsrTopic(unittest.TestCase):
         self.nsr_topic = NsrTopic(self.db, self.fs, self.msg, None)
         self.nsr_topic.check_quota = Mock(return_value=None)  # skip quota
 
-        self.db.create_list("vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader))
+        self.db.create_list(
+            "vim_accounts", yaml.load(db_vim_accounts_text, Loader=yaml.Loader)
+        )
         self.db.create_list("nsds", yaml.load(db_nsds_text, Loader=yaml.Loader))
         self.db.create_list("vnfds", yaml.load(db_vnfds_text, Loader=yaml.Loader))
         self.db.create = Mock(return_value="created_id")
@@ -166,20 +251,34 @@ class TestNsrTopic(unittest.TestCase):
         self.vim_id = self.vim["_id"]
 
     def test_create(self):
-        session = {"force": False, "admin": False, "public": False, "project_id": [self.nsd_project], "method": "write"}
+        session = {
+            "force": False,
+            "admin": False,
+            "public": False,
+            "project_id": [self.nsd_project],
+            "method": "write",
+        }
         indata = {
             "nsdId": self.nsd_id,
             "nsName": "name",
             "vimAccountId": self.vim_id,
             "additionalParamsForVnf": [
-                {"member-vnf-index": "hackfest_vnf1", "additionalParams": {"touch_filename": "file"}},
-                {"member-vnf-index": "hackfest_vnf2", "additionalParams": {"touch_filename": "file"}}
-            ]
+                {
+                    "member-vnf-index": "hackfest_vnf1",
+                    "additionalParams": {"touch_filename": "file"},
+                },
+                {
+                    "member-vnf-index": "hackfest_vnf2",
+                    "additionalParams": {"touch_filename": "file"},
+                },
+            ],
         }
         rollback = []
         headers = {}
 
-        self.nsr_topic.new(rollback, session, indata=indata, kwargs=None, headers=headers)
+        self.nsr_topic.new(
+            rollback, session, indata=indata, kwargs=None, headers=headers
+        )
 
         # check vnfrs and nsrs created in whatever order
         created_vnfrs = []
@@ -190,50 +289,97 @@ class TestNsrTopic(unittest.TestCase):
             created_item = _call[0][1]
             if _call[0][0] == "vnfrs":
                 created_vnfrs.append(created_item)
-                self.assertIn("member-vnf-index-ref", created_item,
-                              "Created item must contain member-vnf-index-ref section")
+                self.assertIn(
+                    "member-vnf-index-ref",
+                    created_item,
+                    "Created item must contain member-vnf-index-ref section",
+                )
                 if nsr_id:
-                    self.assertEqual(nsr_id, created_item["nsr-id-ref"], "bad reference id from vnfr to nsr")
+                    self.assertEqual(
+                        nsr_id,
+                        created_item["nsr-id-ref"],
+                        "bad reference id from vnfr to nsr",
+                    )
                 else:
                     nsr_id = created_item["nsr-id-ref"]
 
             elif _call[0][0] == "nsrs":
                 created_nsrs.append(created_item)
                 if nsr_id:
-                    self.assertEqual(nsr_id, created_item["_id"], "bad reference id from vnfr to nsr")
+                    self.assertEqual(
+                        nsr_id, created_item["_id"], "bad reference id from vnfr to nsr"
+                    )
                 else:
                     nsr_id = created_item["_id"]
             else:
-                assert True, "created an unknown record {} at database".format(_call[0][0])
-
-            self.assertTrue(created_item["_admin"].get("projects_read"),
-                            "Database record must contain '_amdin.projects_read'")
-            self.assertIn("created", created_item["_admin"], "Database record must contain '_admin.created'")
-            self.assertTrue(created_item["_admin"]["nsState"] == "NOT_INSTANTIATED",
-                            "Database record must contain '_admin.nstate=NOT INSTANTIATE'")
-
-        self.assertEqual(len(created_vnfrs), 2, "created a mismatch number of vnfr at database")
-        self.assertEqual(len(created_nsrs), 1, "Only one nsrs must be created at database")
-        self.assertEqual(len(rollback), len(created_vnfrs) + 1, "rollback mismatch with created items at database")
+                assert True, "created an unknown record {} at database".format(
+                    _call[0][0]
+                )
+
+            self.assertTrue(
+                created_item["_admin"].get("projects_read"),
+                "Database record must contain '_amdin.projects_read'",
+            )
+            self.assertIn(
+                "created",
+                created_item["_admin"],
+                "Database record must contain '_admin.created'",
+            )
+            self.assertTrue(
+                created_item["_admin"]["nsState"] == "NOT_INSTANTIATED",
+                "Database record must contain '_admin.nstate=NOT INSTANTIATE'",
+            )
+
+        self.assertEqual(
+            len(created_vnfrs), 2, "created a mismatch number of vnfr at database"
+        )
+        self.assertEqual(
+            len(created_nsrs), 1, "Only one nsrs must be created at database"
+        )
+        self.assertEqual(
+            len(rollback),
+            len(created_vnfrs) + 1,
+            "rollback mismatch with created items at database",
+        )
 
         # test parameters with error
         bad_id = "88d90b0c-faff-4b9f-bccd-aaaaaaaaaaaa"
         test_set = (
             # TODO add "nsd"
-            ("nsd not found", {"nsdId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)),
+            (
+                "nsd not found",
+                {"nsdId": bad_id},
+                DbException,
+                HTTPStatus.NOT_FOUND,
+                ("not found", bad_id),
+            ),
             # ({"vimAccountId": bad_id}, DbException, HTTPStatus.NOT_FOUND, ("not found", bad_id)),  # TODO add "vim"
-            ("additional params not supply", {"additionalParamsForVnf.0.member-vnf-index": "k"}, EngineException,
-             HTTPStatus.BAD_REQUEST, None),
+            (
+                "additional params not supply",
+                {"additionalParamsForVnf.0.member-vnf-index": "k"},
+                EngineException,
+                HTTPStatus.BAD_REQUEST,
+                None,
+            ),
         )
         for message, kwargs_, expect_exc, expect_code, expect_text_list in test_set:
             with self.assertRaises(expect_exc, msg=message) as e:
-                self.nsr_topic.new(rollback, session, indata=deepcopy(indata), kwargs=kwargs_, headers=headers)
+                self.nsr_topic.new(
+                    rollback,
+                    session,
+                    indata=deepcopy(indata),
+                    kwargs=kwargs_,
+                    headers=headers,
+                )
             if expect_code:
                 self.assertTrue(e.exception.http_code == expect_code)
             if expect_text_list:
                 for expect_text in expect_text_list:
-                    self.assertIn(expect_text, str(e.exception).lower(),
-                                  "Expected '{}' at exception text".format(expect_text))
+                    self.assertIn(
+                        expect_text,
+                        str(e.exception).lower(),
+                        "Expected '{}' at exception text".format(expect_text),
+                    )
 
     def test_delete_ns(self):
         self.db.create_list("nsrs", yaml.load(db_nsrs_text, Loader=yaml.Loader))
@@ -243,10 +389,28 @@ class TestNsrTopic(unittest.TestCase):
         p_id = self.nsd_project
         p_other = "other_p"
 
-        session = {"force": False, "admin": False, "public": None, "project_id": [p_id], "method": "delete"}
-        session2 = {"force": False, "admin": False, "public": None, "project_id": [p_other], "method": "delete"}
-        session_force = {"force": True, "admin": True, "public": None, "project_id": [], "method": "delete"}
-        with self.subTest(i=1, t='Normal Deletion'):
+        session = {
+            "force": False,
+            "admin": False,
+            "public": None,
+            "project_id": [p_id],
+            "method": "delete",
+        }
+        session2 = {
+            "force": False,
+            "admin": False,
+            "public": None,
+            "project_id": [p_other],
+            "method": "delete",
+        }
+        session_force = {
+            "force": True,
+            "admin": True,
+            "public": None,
+            "project_id": [],
+            "method": "delete",
+        }
+        with self.subTest(i=1, t="Normal Deletion"):
             self.db.del_one = Mock()
             self.db.set_one = Mock()
             self.nsr_topic.delete(session, self.nsr_id)
@@ -254,19 +418,30 @@ class TestNsrTopic(unittest.TestCase):
             db_args_ro_nsrs = self.db.del_one.call_args_list[1][0]
             db_args = self.db.del_one.call_args_list[0][0]
             msg_args = self.msg.write.call_args[0]
-            self.assertEqual(msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic")
+            self.assertEqual(
+                msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic"
+            )
             self.assertEqual(msg_args[1], "deleted", "Wrong message action")
             self.assertEqual(msg_args[2], {"_id": self.nsr_id}, "Wrong message content")
             self.assertEqual(db_args_ro_nsrs[0], "ro_nsrs", "Wrong DB topic")
             self.assertEqual(db_args[0], self.nsr_topic.topic, "Wrong DB topic")
             self.assertEqual(db_args[1]["_id"], self.nsr_id, "Wrong DB ID")
-            self.assertEqual(db_args[1]["_admin.projects_read.cont"], [p_id], "Wrong DB filter")
+            self.assertEqual(
+                db_args[1]["_admin.projects_read.cont"], [p_id], "Wrong DB filter"
+            )
             self.db.set_one.assert_not_called()
             fs_del_calls = self.fs.file_delete.call_args_list
             self.assertEqual(fs_del_calls[0][0][0], self.nsr_id, "Wrong FS file id")
-        with self.subTest(i=2, t='No delete because referenced by other project'):
-            self.db_set_one("nsrs", {"_id": self.nsr_id}, update_dict=None, push={"_admin.projects_read": p_other,
-                                                                                  "_admin.projects_write": p_other})
+        with self.subTest(i=2, t="No delete because referenced by other project"):
+            self.db_set_one(
+                "nsrs",
+                {"_id": self.nsr_id},
+                update_dict=None,
+                push={
+                    "_admin.projects_read": p_other,
+                    "_admin.projects_write": p_other,
+                },
+            )
             self.db.del_one.reset_mock()
             self.db.set_one.reset_mock()
             self.msg.write.reset_mock()
@@ -278,12 +453,16 @@ class TestNsrTopic(unittest.TestCase):
             db_s1_args = self.db.set_one.call_args
             self.assertEqual(db_s1_args[0][0], self.nsr_topic.topic, "Wrong DB topic")
             self.assertEqual(db_s1_args[0][1]["_id"], self.nsr_id, "Wrong DB ID")
-            self.assertIsNone(db_s1_args[1]["update_dict"], "Wrong DB update dictionary")
-            self.assertEqual(db_s1_args[1]["pull_list"],
-                             {"_admin.projects_read": [p_other], "_admin.projects_write": [p_other]},
-                             "Wrong DB pull_list dictionary")
+            self.assertIsNone(
+                db_s1_args[1]["update_dict"], "Wrong DB update dictionary"
+            )
+            self.assertEqual(
+                db_s1_args[1]["pull_list"],
+                {"_admin.projects_read": [p_other], "_admin.projects_write": [p_other]},
+                "Wrong DB pull_list dictionary",
+            )
             self.fs.file_delete.assert_not_called()
-        with self.subTest(i=4, t='Delete with force and admin'):
+        with self.subTest(i=4, t="Delete with force and admin"):
             self.db.del_one.reset_mock()
             self.db.set_one.reset_mock()
             self.msg.write.reset_mock()
@@ -293,7 +472,9 @@ class TestNsrTopic(unittest.TestCase):
             db_args_ro_nsrs = self.db.del_one.call_args_list[1][0]
             db_args = self.db.del_one.call_args_list[0][0]
             msg_args = self.msg.write.call_args[0]
-            self.assertEqual(msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic")
+            self.assertEqual(
+                msg_args[0], self.nsr_topic.topic_msg, "Wrong message topic"
+            )
             self.assertEqual(msg_args[1], "deleted", "Wrong message action")
             self.assertEqual(msg_args[2], {"_id": self.nsr_id}, "Wrong message content")
             self.assertEqual(db_args_ro_nsrs[0], "ro_nsrs", "Wrong DB topic")
@@ -302,29 +483,44 @@ class TestNsrTopic(unittest.TestCase):
             self.db.set_one.assert_not_called()
             fs_del_calls = self.fs.file_delete.call_args_list
             self.assertEqual(fs_del_calls[0][0][0], self.nsr_id, "Wrong FS file id")
-        with self.subTest(i=3, t='Conflict on Delete - NS in INSTANTIATED state'):
-            self.db_set_one("nsrs", {"_id": self.nsr_id}, {"_admin.nsState": "INSTANTIATED"},
-                            pull={"_admin.projects_read": p_other, "_admin.projects_write": p_other})
+        with self.subTest(i=3, t="Conflict on Delete - NS in INSTANTIATED state"):
+            self.db_set_one(
+                "nsrs",
+                {"_id": self.nsr_id},
+                {"_admin.nsState": "INSTANTIATED"},
+                pull={
+                    "_admin.projects_read": p_other,
+                    "_admin.projects_write": p_other,
+                },
+            )
             self.db.del_one.reset_mock()
             self.db.set_one.reset_mock()
             self.msg.write.reset_mock()
             self.fs.file_delete.reset_mock()
 
-            with self.assertRaises(EngineException, msg="Accepted NSR with nsState INSTANTIATED") as e:
+            with self.assertRaises(
+                EngineException, msg="Accepted NSR with nsState INSTANTIATED"
+            ) as e:
                 self.nsr_topic.delete(session, self.nsr_id)
-            self.assertEqual(e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.CONFLICT, "Wrong HTTP status code"
+            )
             self.assertIn("INSTANTIATED", str(e.exception), "Wrong exception text")
         # TODOD with self.subTest(i=3, t='Conflict on Delete - NS in use by NSI'):
 
-        with self.subTest(i=4, t='Non-existent NS'):
+        with self.subTest(i=4, t="Non-existent NS"):
             self.db.del_one.reset_mock()
             self.db.set_one.reset_mock()
             self.msg.write.reset_mock()
             self.fs.file_delete.reset_mock()
             excp_msg = "Not found"
-            with self.assertRaises(DbException, msg="Accepted non-existent NSD ID") as e:
+            with self.assertRaises(
+                DbException, msg="Accepted non-existent NSD ID"
+            ) as e:
                 self.nsr_topic.delete(session2, "other_id")
-            self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
+            self.assertEqual(
+                e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code"
+            )
             self.assertIn(excp_msg, str(e.exception), "Wrong exception text")
             self.assertIn("other_id", str(e.exception), "Wrong exception text")
         return
index 84bf048..231818b 100644 (file)
@@ -23,12 +23,23 @@ from http import HTTPStatus
 from osm_nbi.engine import EngineException
 from osm_common.dbmemory import DbMemory
 from osm_nbi.pmjobs_topics import PmJobsTopic
-from osm_nbi.tests.test_db_descriptors import db_nsds_text, db_vnfds_text, db_nsrs_text, db_vnfrs_text
-from osm_nbi.tests.pmjob_mocks.response import show_res, prom_res, cpu_utilization, users, load, empty
+from osm_nbi.tests.test_db_descriptors import (
+    db_nsds_text,
+    db_vnfds_text,
+    db_nsrs_text,
+    db_vnfrs_text,
+)
+from osm_nbi.tests.pmjob_mocks.response import (
+    show_res,
+    prom_res,
+    cpu_utilization,
+    users,
+    load,
+    empty,
+)
 
 
 class PmJobsTopicTest(asynctest.TestCase):
-
     def setUp(self):
         self.db = DbMemory()
         self.pmjobs_topic = PmJobsTopic(self.db, host="prometheus", port=9091)
@@ -40,22 +51,39 @@ class PmJobsTopicTest(asynctest.TestCase):
         self.nsr_id = self.nsr["_id"]
         project_id = self.nsr["_admin"]["projects_write"]
         """metric_check_list contains the vnf metric name used in descriptor i.e users,load"""
-        self.metric_check_list = ['cpu_utilization', 'average_memory_utilization', 'disk_read_ops',
-                                  'disk_write_ops', 'disk_read_bytes', 'disk_write_bytes',
-                                  'packets_dropped', 'packets_sent', 'packets_received', 'users', 'load']
-        self.session = {"username": "admin", "project_id": project_id, "method": None,
-                        "admin": True, "force": False, "public": False, "allow_show_user_project_role": True}
+        self.metric_check_list = [
+            "cpu_utilization",
+            "average_memory_utilization",
+            "disk_read_ops",
+            "disk_write_ops",
+            "disk_read_bytes",
+            "disk_write_bytes",
+            "packets_dropped",
+            "packets_sent",
+            "packets_received",
+            "users",
+            "load",
+        ]
+        self.session = {
+            "username": "admin",
+            "project_id": project_id,
+            "method": None,
+            "admin": True,
+            "force": False,
+            "public": False,
+            "allow_show_user_project_role": True,
+        }
 
     def set_get_mock_res(self, mock_res, ns_id, metric_list):
         site = "http://prometheus:9091/api/v1/query?query=osm_metric_name{ns_id='nsr'}"
-        site = re.sub(r'nsr', ns_id, site)
+        site = re.sub(r"nsr", ns_id, site)
         for metric in metric_list:
-            endpoint = re.sub(r'metric_name', metric, site)
-            if metric == 'cpu_utilization':
+            endpoint = re.sub(r"metric_name", metric, site)
+            if metric == "cpu_utilization":
                 response = yaml.load(cpu_utilization, Loader=yaml.Loader)
-            elif metric == 'users':
+            elif metric == "users":
                 response = yaml.load(users, Loader=yaml.Loader)
-            elif metric == 'load':
+            elif metric == "load":
                 response = yaml.load(load, Loader=yaml.Loader)
             else:
                 response = yaml.load(empty, Loader=yaml.Loader)
@@ -66,11 +94,17 @@ class PmJobsTopicTest(asynctest.TestCase):
             prom_response = yaml.load(prom_res, Loader=yaml.Loader)
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
-                result = await self.pmjobs_topic._prom_metric_request(self.nsr_id, self.metric_check_list)
+                result = await self.pmjobs_topic._prom_metric_request(
+                    self.nsr_id, self.metric_check_list
+                )
             self.assertCountEqual(result, prom_response, "Metric Data is valid")
         with self.subTest("Test case2 failed in test_prom"):
-            with self.assertRaises(EngineException, msg="Prometheus not reachable") as e:
-                await self.pmjobs_topic._prom_metric_request(self.nsr_id, self.metric_check_list)
+            with self.assertRaises(
+                EngineException, msg="Prometheus not reachable"
+            ) as e:
+                await self.pmjobs_topic._prom_metric_request(
+                    self.nsr_id, self.metric_check_list
+                )
             self.assertIn("Connection to ", str(e.exception), "Wrong exception text")
 
     def test_show(self):
@@ -79,7 +113,7 @@ class PmJobsTopicTest(asynctest.TestCase):
             with aioresponses() as mock_res:
                 self.set_get_mock_res(mock_res, self.nsr_id, self.metric_check_list)
                 result = self.pmjobs_topic.show(self.session, self.nsr_id)
-            self.assertEqual(len(result['entries']), 1, "Number of metrics returned")
+            self.assertEqual(len(result["entries"]), 1, "Number of metrics returned")
             self.assertCountEqual(result, show_response, "Response is valid")
         with self.subTest("Test case2 failed in test_show"):
             wrong_ns_id = "88d90b0c-faff-4bbc-cccc-aaaaaaaaaaaa"
@@ -87,6 +121,13 @@ class PmJobsTopicTest(asynctest.TestCase):
                 self.set_get_mock_res(mock_res, wrong_ns_id, self.metric_check_list)
                 with self.assertRaises(EngineException, msg="ns not found") as e:
                     self.pmjobs_topic.show(self.session, wrong_ns_id)
-                self.assertEqual(e.exception.http_code, HTTPStatus.NOT_FOUND, "Wrong HTTP status code")
-                self.assertIn("NS not found with id {}".format(wrong_ns_id), str(e.exception),
-                              "Wrong exception text")
+                self.assertEqual(
+                    e.exception.http_code,
+                    HTTPStatus.NOT_FOUND,
+                    "Wrong HTTP status code",
+                )
+                self.assertIn(
+                    "NS not found with id {}".format(wrong_ns_id),
+                    str(e.exception),
+                    "Wrong exception text",
+                )
index d256801..dfd7302 100755 (executable)
@@ -41,8 +41,11 @@ def usage():
 if __name__ == "__main__":
     try:
         # load parameters and configuration
-        opts, args = getopt.getopt(sys.argv[1:], "hvu:s:f:t:",
-                                   ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="])
+        opts, args = getopt.getopt(
+            sys.argv[1:],
+            "hvu:s:f:t:",
+            ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="],
+        )
         url = None
         chunk_size = 500
         pkg_file = None
@@ -51,7 +54,7 @@ if __name__ == "__main__":
 
         for o, a in opts:
             if o == "--version":
-                print("upload version " + __version__ + ' ' + version_date)
+                print("upload version " + __version__ + " " + version_date)
                 sys.exit()
             elif o in ("-v", "--verbose"):
                 verbose += 1
@@ -72,7 +75,7 @@ if __name__ == "__main__":
         index = 0
         transaction_id = None
         file_md5 = md5()
-        with open(pkg_file, 'rb') as f:
+        with open(pkg_file, "rb") as f:
             headers = {
                 "Content-type": "application/gzip",
                 "Content-Filename": basename(pkg_file),
@@ -87,10 +90,12 @@ if __name__ == "__main__":
                 #            "chunk_size": chunk_size}
                 if transaction_id:
                     headers["Transaction-Id"] = transaction_id
-                if index+len(chunk_data) == total_size:
+                if index + len(chunk_data) == total_size:
                     headers["Content-File-MD5"] = file_md5.hexdigest()
                 #    payload["id"] = transaction_id
-                headers["Content-range"] = "bytes {}-{}/{}".format(index, index+len(chunk_data)-1, total_size)
+                headers["Content-range"] = "bytes {}-{}/{}".format(
+                    index, index + len(chunk_data) - 1, total_size
+                )
                 # refers to rfc2616:  https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
                 if verbose:
                     print("TX chunk Headers: {}".format(headers))
index 43fd84d..153086b 100644 (file)
@@ -16,7 +16,7 @@
 from jsonschema import validate as js_v, exceptions as js_e
 from http import HTTPStatus
 from copy import deepcopy
-from uuid import UUID   # To test for valid UUID
+from uuid import UUID  # To test for valid UUID
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 __version__ = "0.1"
@@ -28,20 +28,52 @@ Validator of input data using JSON schemas for those items that not contains an
 
 # Basis schemas
 patern_name = "^[ -~]+$"
-shortname_schema = {"type": "string", "minLength": 1, "maxLength": 60, "pattern": "^[^,;()\\.\\$'\"]+$"}
+shortname_schema = {
+    "type": "string",
+    "minLength": 1,
+    "maxLength": 60,
+    "pattern": "^[^,;()\\.\\$'\"]+$",
+}
 passwd_schema = {"type": "string", "minLength": 1, "maxLength": 60}
-name_schema = {"type": "string", "minLength": 1, "maxLength": 255, "pattern": "^[^,;()'\"]+$"}
+name_schema = {
+    "type": "string",
+    "minLength": 1,
+    "maxLength": 255,
+    "pattern": "^[^,;()'\"]+$",
+}
 string_schema = {"type": "string", "minLength": 1, "maxLength": 255}
-xml_text_schema = {"type": "string", "minLength": 1, "maxLength": 1000, "pattern": "^[^']+$"}
-description_schema = {"type": ["string", "null"], "maxLength": 255, "pattern": "^[^'\"]+$"}
-long_description_schema = {"type": ["string", "null"], "maxLength": 3000, "pattern": "^[^'\"]+$"}
+xml_text_schema = {
+    "type": "string",
+    "minLength": 1,
+    "maxLength": 1000,
+    "pattern": "^[^']+$",
+}
+description_schema = {
+    "type": ["string", "null"],
+    "maxLength": 255,
+    "pattern": "^[^'\"]+$",
+}
+long_description_schema = {
+    "type": ["string", "null"],
+    "maxLength": 3000,
+    "pattern": "^[^'\"]+$",
+}
 id_schema_fake = {"type": "string", "minLength": 2, "maxLength": 36}
 bool_schema = {"type": "boolean"}
 null_schema = {"type": "null"}
 # "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"
-id_schema = {"type": "string", "pattern": "^[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}$"}
-time_schema = {"type": "string", "pattern": "^[0-9]{4}-[0-1][0-9]-[0-3][0-9]T[0-2][0-9]([0-5]:){2}"}
-pci_schema = {"type": "string", "pattern": "^[0-9a-fA-F]{4}(:[0-9a-fA-F]{2}){2}\\.[0-9a-fA-F]$"}
+id_schema = {
+    "type": "string",
+    "pattern": "^[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}$",
+}
+time_schema = {
+    "type": "string",
+    "pattern": "^[0-9]{4}-[0-1][0-9]-[0-3][0-9]T[0-2][0-9]([0-5]:){2}",
+}
+pci_schema = {
+    "type": "string",
+    "pattern": "^[0-9a-fA-F]{4}(:[0-9a-fA-F]{2}){2}\\.[0-9a-fA-F]$",
+}
 # allows [] for wildcards. For that reason huge length limit is set
 pci_extended_schema = {"type": "string", "pattern": "^[0-9a-fA-F.:-\\[\\]]{12,40}$"}
 http_schema = {"type": "string", "pattern": "^(https?|http)://[^'\"=]+$"}
@@ -52,27 +84,34 @@ integer1_schema = {"type": "integer", "minimum": 1}
 path_schema = {"type": "string", "pattern": "^(\\.){0,2}(/[^/\"':{}\\(\\)]+)+$"}
 vlan_schema = {"type": "integer", "minimum": 1, "maximum": 4095}
 vlan1000_schema = {"type": "integer", "minimum": 1000, "maximum": 4095}
-mac_schema = {"type": "string",
-              "pattern": "^[0-9a-fA-F][02468aceACE](:[0-9a-fA-F]{2}){5}$"}  # must be unicast: LSB bit of MSB byte ==0
+mac_schema = {
+    "type": "string",
+    "pattern": "^[0-9a-fA-F][02468aceACE](:[0-9a-fA-F]{2}){5}$",
+}  # must be unicast: LSB bit of MSB byte ==0
 dpid_Schema = {"type": "string", "pattern": "^[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){7}$"}
 # mac_schema={"type":"string", "pattern":"^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$"}
-ip_schema = {"type": "string",
-             "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}
-ip_prefix_schema = {"type": "string",
-                    "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}"
-                               "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)/(30|[12]?[0-9])$"}
+ip_schema = {
+    "type": "string",
+    "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
+}
+ip_prefix_schema = {
+    "type": "string",
+    "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}"
+    "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)/(30|[12]?[0-9])$",
+}
 port_schema = {"type": "integer", "minimum": 1, "maximum": 65534}
 object_schema = {"type": "object"}
 schema_version_2 = {"type": "integer", "minimum": 2, "maximum": 2}
 # schema_version_string={"type":"string","enum": ["0.1", "2", "0.2", "3", "0.3"]}
-log_level_schema = {"type": "string", "enum": ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]}
+log_level_schema = {
+    "type": "string",
+    "enum": ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
+}
 checksum_schema = {"type": "string", "pattern": "^[0-9a-fA-F]{32}$"}
 size_schema = {"type": "integer", "minimum": 1, "maximum": 100}
 array_edition_schema = {
     "type": "object",
-    "patternProperties": {
-        "^\\$": {}
-    },
+    "patternProperties": {"^\\$": {}},
     "additionalProperties": False,
     "minProperties": 1,
 }
@@ -104,8 +143,8 @@ ns_instantiate_vdu = {
                     "vim-volume-id": name_schema,
                 },
                 "required": ["name", "vim-volume-id"],
-                "additionalProperties": False
-            }
+                "additionalProperties": False,
+            },
         },
         "interface": {
             "type": "array",
@@ -119,12 +158,12 @@ ns_instantiate_vdu = {
                     "floating-ip-required": bool_schema,
                 },
                 "required": ["name"],
-                "additionalProperties": False
-            }
-        }
+                "additionalProperties": False,
+            },
+        },
     },
     "required": ["id"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 ip_profile_dns_schema = {
@@ -136,8 +175,8 @@ ip_profile_dns_schema = {
             "address": ip_schema,
         },
         "required": ["address"],
-        "additionalProperties": False
-    }
+        "additionalProperties": False,
+    },
 }
 
 ip_profile_dhcp_schema = {
@@ -145,7 +184,7 @@ ip_profile_dhcp_schema = {
     "properties": {
         "enabled": {"type": "boolean"},
         "count": integer1_schema,
-        "start-address": ip_schema
+        "start-address": ip_schema,
     },
     "additionalProperties": False,
 }
@@ -160,7 +199,7 @@ ip_profile_schema = {
         "gateway-address": ip_schema,
         "dns-server": ip_profile_dns_schema,
         "dhcp-params": ip_profile_dhcp_schema,
-    }
+    },
 }
 
 ip_profile_update_schema = {
@@ -172,10 +211,9 @@ ip_profile_update_schema = {
         "subnet-address": {"oneOf": [null_schema, ip_prefix_schema]},
         "gateway-address": {"oneOf": [null_schema, ip_schema]},
         "dns-server": {"oneOf": [null_schema, ip_profile_dns_schema]},
-
         "dhcp-params": {"oneOf": [null_schema, ip_profile_dhcp_schema]},
     },
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 provider_network_schema = {
@@ -195,12 +233,12 @@ provider_network_schema = {
                     "mac_address": mac_schema,
                     "vlan": vlan_schema,
                 },
-                "additionalProperties": True
-            }
+                "additionalProperties": True,
+            },
         },
         "network-type": shortname_schema,
     },
-    "additionalProperties": True
+    "additionalProperties": True,
 }
 
 ns_instantiate_internal_vld = {
@@ -225,13 +263,13 @@ ns_instantiate_internal_vld = {
                 },
                 "required": ["id-ref"],
                 "minProperties": 2,
-                "additionalProperties": False
+                "additionalProperties": False,
             },
-        }
+        },
     },
     "required": ["name"],
     "minProperties": 2,
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 additional_params_for_vnf = {
@@ -250,7 +288,7 @@ additional_params_for_vnf = {
                     "properties": {
                         "vdu_id": name_schema,
                         "additionalParams": object_schema,
-                        "config-units": integer1_schema,   # number of configuration units of this vdu, by default 1
+                        "config-units": integer1_schema,  # number of configuration units of this vdu, by default 1
                     },
                     "required": ["vdu_id"],
                     "minProperties": 2,
@@ -266,7 +304,7 @@ additional_params_for_vnf = {
                         "additionalParams": object_schema,
                         "kdu_model": name_schema,
                         "k8s-namespace": name_schema,
-                        "config-units": integer1_schema,    # number of configuration units of this knf, by default 1
+                        "config-units": integer1_schema,  # number of configuration units of this knf, by default 1
                     },
                     "required": ["kdu_name"],
                     "minProperties": 2,
@@ -276,8 +314,8 @@ additional_params_for_vnf = {
         },
         "required": ["member-vnf-index"],
         "minProperties": 2,
-        "additionalProperties": False
-    }
+        "additionalProperties": False,
+    },
 }
 
 ns_instantiate = {
@@ -298,7 +336,7 @@ ns_instantiate = {
         "placement-constraints": object_schema,
         "additionalParamsForNs": object_schema,
         "additionalParamsForVnf": additional_params_for_vnf,
-        "config-units": integer1_schema,    # number of configuration units of this ns, by default 1
+        "config-units": integer1_schema,  # number of configuration units of this ns, by default 1
         "k8s-namespace": name_schema,
         "ssh_keys": {"type": "array", "items": {"type": "string"}},
         "timeout_ns_deploy": integer1_schema,
@@ -321,13 +359,13 @@ ns_instantiate = {
                     "internal-vld": {
                         "type": "array",
                         "minItems": 1,
-                        "items": ns_instantiate_internal_vld
-                    }
+                        "items": ns_instantiate_internal_vld,
+                    },
                 },
                 "required": ["member-vnf-index"],
                 "minProperties": 2,
-                "additionalProperties": False
-            }
+                "additionalProperties": False,
+            },
         },
         "vld": {
             "type": "array",
@@ -353,19 +391,22 @@ ns_instantiate = {
                                 "ip-address": ip_schema,
                                 # "mac-address": mac_schema,
                             },
-                            "required": ["member-vnf-index-ref", "vnfd-connection-point-ref"],
+                            "required": [
+                                "member-vnf-index-ref",
+                                "vnfd-connection-point-ref",
+                            ],
                             "minProperties": 3,
-                            "additionalProperties": False
+                            "additionalProperties": False,
                         },
-                    }
+                    },
                 },
                 "required": ["name"],
-                "additionalProperties": False
-            }
+                "additionalProperties": False,
+            },
         },
     },
     "required": ["nsName", "nsdId", "vimAccountId"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 ns_terminate = {
@@ -380,10 +421,10 @@ ns_terminate = {
         "skip_terminate_primitives": bool_schema,
         "netsliceInstanceId": id_schema,
     },
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
-ns_action = {   # TODO for the moment it is only contemplated the vnfd primitive execution
+ns_action = {  # TODO for the moment it is only contemplated the vnfd primitive execution
     "title": "ns action input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
@@ -399,10 +440,10 @@ ns_action = {   # TODO for the moment it is only contemplated the vnfd primitive
         "timeout_ns_action": integer1_schema,
         "primitive_params": {"type": "object"},
     },
-    "required": ["primitive", "primitive_params"],   # TODO add member_vnf_index
-    "additionalProperties": False
+    "required": ["primitive", "primitive_params"],  # TODO add member_vnf_index
+    "additionalProperties": False,
 }
-ns_scale = {   # TODO for the moment it is only VDU-scaling
+ns_scale = {  # TODO for the moment it is only VDU-scaling
     "title": "ns scale input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
@@ -415,7 +456,7 @@ ns_scale = {   # TODO for the moment it is only VDU-scaling
             "type": "object",
             "properties": {
                 "vnfInstanceId": name_schema,
-                "scaleVnfType": {"enum": ["SCALE_OUT", 'SCALE_IN']},
+                "scaleVnfType": {"enum": ["SCALE_OUT", "SCALE_IN"]},
                 "scaleByStepData": {
                     "type": "object",
                     "properties": {
@@ -424,16 +465,16 @@ ns_scale = {   # TODO for the moment it is only VDU-scaling
                         "scaling-policy": name_schema,
                     },
                     "required": ["scaling-group-descriptor", "member-vnf-index"],
-                    "additionalProperties": False
+                    "additionalProperties": False,
                 },
             },
             "required": ["scaleVnfType", "scaleByStepData"],  # vnfInstanceId
-            "additionalProperties": False
+            "additionalProperties": False,
         },
         "scaleTime": time_schema,
     },
     "required": ["scaleType", "scaleVnfData"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 
@@ -458,9 +499,9 @@ vim_account_edit_schema = {
         "vim_user": shortname_schema,
         "vim_password": passwd_schema,
         "vca": id_schema,
-        "config": {"type": "object"}
+        "config": {"type": "object"},
     },
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 vim_account_new_schema = {
@@ -482,10 +523,17 @@ vim_account_new_schema = {
         "vim_user": shortname_schema,
         "vim_password": passwd_schema,
         "vca": id_schema,
-        "config": {"type": "object"}
+        "config": {"type": "object"},
     },
-    "required": ["name", "vim_url", "vim_type", "vim_user", "vim_password", "vim_tenant_name"],
-    "additionalProperties": False
+    "required": [
+        "name",
+        "vim_url",
+        "vim_type",
+        "vim_user",
+        "vim_password",
+        "vim_tenant_name",
+    ],
+    "additionalProperties": False,
 }
 
 wim_type = shortname_schema  # {"enum": ["ietfl2vpn", "onos", "odl", "dynpac", "fake"]}
@@ -502,9 +550,9 @@ wim_account_edit_schema = {
         "wim_url": description_schema,
         "user": shortname_schema,
         "password": passwd_schema,
-        "config": {"type": "object"}
+        "config": {"type": "object"},
     },
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 wim_account_new_schema = {
@@ -523,13 +571,11 @@ wim_account_new_schema = {
         "password": passwd_schema,
         "config": {
             "type": "object",
-            "patternProperties": {
-                ".": {"not": {"type": "null"}}
-            }
-        }
+            "patternProperties": {".": {"not": {"type": "null"}}},
+        },
     },
     "required": ["name", "wim_url", "wim_type"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 sdn_properties = {
@@ -551,8 +597,8 @@ sdn_new_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
     "properties": sdn_properties,
-    "required": ["name", 'type'],
-    "additionalProperties": False
+    "required": ["name", "type"],
+    "additionalProperties": False,
 }
 sdn_edit_schema = {
     "title": "sdn controller update information schema",
@@ -560,7 +606,7 @@ sdn_edit_schema = {
     "type": "object",
     "properties": sdn_properties,
     # "required": ["name", "port", 'ip', 'dpid', 'type'],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 sdn_port_mapping_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -577,14 +623,14 @@ sdn_port_mapping_schema = {
                     "properties": {
                         "pci": pci_extended_schema,
                         "switch_port": shortname_schema,
-                        "switch_mac": mac_schema
+                        "switch_mac": mac_schema,
                     },
-                    "required": ["pci"]
-                }
-            }
+                    "required": ["pci"],
+                },
+            },
         },
-        "required": ["compute_node", "ports"]
-    }
+        "required": ["compute_node", "ports"],
+    },
 }
 sdn_external_port_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -593,9 +639,9 @@ sdn_external_port_schema = {
     "properties": {
         "port": {"type": "string", "minLength": 1, "maxLength": 60},
         "vlan": vlan_schema,
-        "mac": mac_schema
+        "mac": mac_schema,
     },
-    "required": ["port"]
+    "required": ["port"],
 }
 
 # K8s Clusters
@@ -605,7 +651,7 @@ k8scluster_nets_schema = {
     "type": "object",
     "patternProperties": {".": {"oneOf": [name_schema, null_schema]}},
     "minProperties": 1,
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 k8scluster_new_schema = {
     "title": "k8scluster creation input schema",
@@ -625,7 +671,7 @@ k8scluster_new_schema = {
         "cni": nameshort_list_schema,
     },
     "required": ["name", "credentials", "vim_account", "k8s_version", "nets"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 k8scluster_edit_schema = {
     "title": "vim_account edit input schema",
@@ -642,7 +688,7 @@ k8scluster_edit_schema = {
         "namespace": name_schema,
         "cni": nameshort_list_schema,
     },
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 # VCA
@@ -713,14 +759,14 @@ k8srepo_new_schema = {
     "type": "object",
     "properties": k8srepo_properties,
     "required": ["name", "type", "url"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 k8srepo_edit_schema = {
     "title": "vim_account edit input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
     "properties": k8srepo_properties,
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 # OSM Repos
@@ -739,14 +785,14 @@ osmrepo_new_schema = {
     "type": "object",
     "properties": osmrepo_properties,
     "required": ["name", "type", "url"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 osmrepo_edit_schema = {
     "title": "osm repo edit input schema",
     "$schema": "http://json-schema.org/draft-04/schema#",
     "type": "object",
     "properties": osmrepo_properties,
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 # PDUs
@@ -755,7 +801,7 @@ pdu_interface = {
     "properties": {
         "name": shortname_schema,
         "mgmt": bool_schema,
-        "type": {"enum": ["overlay", 'underlay']},
+        "type": {"enum": ["overlay", "underlay"]},
         "ip-address": ip_schema,
         # TODO, add user, password, ssh-key
         "mac-address": mac_schema,
@@ -768,7 +814,7 @@ pdu_interface = {
         # "switch-vlan": vlan_schema,
     },
     "required": ["name", "mgmt", "ip-address"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 pdu_new_schema = {
     "title": "pdu creation input schema",
@@ -781,14 +827,10 @@ pdu_new_schema = {
         "shared": bool_schema,
         "vims": nameshort_list_schema,
         "vim_accounts": nameshort_list_schema,
-        "interfaces": {
-            "type": "array",
-            "items": pdu_interface,
-            "minItems": 1
-        }
+        "interfaces": {"type": "array", "items": pdu_interface, "minItems": 1},
     },
     "required": ["name", "type", "interfaces"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 pdu_edit_schema = {
     "title": "pdu edit input schema",
@@ -801,17 +843,15 @@ pdu_edit_schema = {
         "shared": bool_schema,
         "vims": {"oneOf": [array_edition_schema, nameshort_list_schema]},
         "vim_accounts": {"oneOf": [array_edition_schema, nameshort_list_schema]},
-        "interfaces": {"oneOf": [
-            array_edition_schema,
-            {
-                "type": "array",
-                "items": pdu_interface,
-                "minItems": 1
-            }
-        ]}
+        "interfaces": {
+            "oneOf": [
+                array_edition_schema,
+                {"type": "array", "items": pdu_interface, "minItems": 1},
+            ]
+        },
     },
     "additionalProperties": False,
-    "minProperties": 1
+    "minProperties": 1,
 }
 
 # VNF PKG OPERATIONS
@@ -826,8 +866,14 @@ vnfpkgop_new_schema = {
         "primitive": name_schema,
         "primitive_params": {"type": "object"},
     },
-    "required": ["lcmOperationType", "vnfPkgId", "kdu_name", "primitive", "primitive_params"],
-    "additionalProperties": False
+    "required": [
+        "lcmOperationType",
+        "vnfPkgId",
+        "kdu_name",
+        "primitive",
+        "primitive_params",
+    ],
+    "additionalProperties": False,
 }
 
 # USERS
@@ -837,14 +883,11 @@ project_role_mappings = {
     "type": "array",
     "items": {
         "type": "object",
-        "properties": {
-            "project": shortname_schema,
-            "role": shortname_schema
-        },
+        "properties": {"project": shortname_schema, "role": shortname_schema},
         "required": ["project", "role"],
-        "additionalProperties": False
+        "additionalProperties": False,
     },
-    "minItems": 1
+    "minItems": 1,
 }
 project_role_mappings_optional = {
     "title": "list of projects/roles or projects only",
@@ -852,14 +895,11 @@ project_role_mappings_optional = {
     "type": "array",
     "items": {
         "type": "object",
-        "properties": {
-            "project": shortname_schema,
-            "role": shortname_schema
-        },
+        "properties": {"project": shortname_schema, "role": shortname_schema},
         "required": ["project"],
-        "additionalProperties": False
+        "additionalProperties": False,
     },
-    "minItems": 1
+    "minItems": 1,
 }
 user_new_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -873,7 +913,7 @@ user_new_schema = {
         "project_role_mappings": project_role_mappings,
     },
     "required": ["username", "password"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 user_edit_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -881,24 +921,33 @@ user_edit_schema = {
     "type": "object",
     "properties": {
         "password": passwd_schema,
-        "username": shortname_schema,     # To allow User Name modification
-        "projects": {
-            "oneOf": [
-                nameshort_list_schema,
-                array_edition_schema
-            ]
-        },
+        "username": shortname_schema,  # To allow User Name modification
+        "projects": {"oneOf": [nameshort_list_schema, array_edition_schema]},
         "project_role_mappings": project_role_mappings,
         "add_project_role_mappings": project_role_mappings,
         "remove_project_role_mappings": project_role_mappings_optional,
     },
     "minProperties": 1,
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 # PROJECTS
-topics_with_quota = ["vnfds", "nsds", "slice_templates", "pduds", "ns_instances", "slice_instances", "vim_accounts",
-                     "wim_accounts", "sdn_controllers", "k8sclusters", "vca", "k8srepos", "osmrepos", "ns_subscriptions"]
+topics_with_quota = [
+    "vnfds",
+    "nsds",
+    "slice_templates",
+    "pduds",
+    "ns_instances",
+    "slice_instances",
+    "vim_accounts",
+    "wim_accounts",
+    "sdn_controllers",
+    "k8sclusters",
+    "vca",
+    "k8srepos",
+    "osmrepos",
+    "ns_subscriptions",
+]
 project_new_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
     "title": "New project schema for administrators",
@@ -910,11 +959,11 @@ project_new_schema = {
         "quotas": {
             "type": "object",
             "properties": {topic: integer0_schema for topic in topics_with_quota},
-            "additionalProperties": False
+            "additionalProperties": False,
         },
     },
     "required": ["name"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 project_edit_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -922,15 +971,18 @@ project_edit_schema = {
     "type": "object",
     "properties": {
         "admin": bool_schema,
-        "name": shortname_schema,     # To allow Project Name modification
+        "name": shortname_schema,  # To allow Project Name modification
         "quotas": {
             "type": "object",
-            "properties": {topic: {"oneOf": [integer0_schema, null_schema]} for topic in topics_with_quota},
-            "additionalProperties": False
+            "properties": {
+                topic: {"oneOf": [integer0_schema, null_schema]}
+                for topic in topics_with_quota
+            },
+            "additionalProperties": False,
         },
     },
     "additionalProperties": False,
-    "minProperties": 1
+    "minProperties": 1,
 }
 
 # ROLES
@@ -946,10 +998,10 @@ roles_new_schema = {
                 ".": bool_schema,
             },
             # "minProperties": 1,
-        }
+        },
     },
     "required": ["name"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 roles_edit_schema = {
     "$schema": "http://json-schema.org/draft-04/schema#",
@@ -959,16 +1011,12 @@ roles_edit_schema = {
         "name": shortname_schema,
         "permissions": {
             "type": "object",
-            "patternProperties": {
-                ".": {
-                    "oneOf": [bool_schema, null_schema]
-                }
-            },
+            "patternProperties": {".": {"oneOf": [bool_schema, null_schema]}},
             # "minProperties": 1,
-        }
+        },
     },
     "additionalProperties": False,
-    "minProperties": 1
+    "minProperties": 1,
 }
 
 # GLOBAL SCHEMAS
@@ -1009,7 +1057,7 @@ nsi_vld_instantiate = {
         "ip-profile": object_schema,
     },
     "required": ["name"],
-    "additionalProperties": False
+    "additionalProperties": False,
 }
 
 nsi_instantiate = {
@@ -1030,25 +1078,17 @@ nsi_instantiate = {
         "netslice-subnet": {
             "type": "array",
             "minItems": 1,
-            "items": nsi_subnet_instantiate
-        },
-        "netslice-vld": {
-            "type": "array",
-            "minItems": 1,
-            "items": nsi_vld_instantiate
+            "items": nsi_subnet_instantiate,
         },
+        "netslice-vld": {"type": "array", "minItems": 1, "items": nsi_vld_instantiate},
     },
     "required": ["nsiName", "nstId", "vimAccountId"],
-    "additionalProperties": False
-}
-
-nsi_action = {
-
+    "additionalProperties": False,
 }
 
-nsi_terminate = {
+nsi_action = {}
 
-}
+nsi_terminate = {}
 
 nsinstancesubscriptionfilter_schema = {
     "title": "instance identifier schema",
@@ -1072,42 +1112,65 @@ nslcmsub_schema = {
         "notificationTypes": {
             "type": "array",
             "items": {
-                "enum": ['NsLcmOperationOccurrenceNotification', 'NsChangeNotification',
-                         'NsIdentifierCreationNotification', 'NsIdentifierDeletionNotification']
-            }
+                "enum": [
+                    "NsLcmOperationOccurrenceNotification",
+                    "NsChangeNotification",
+                    "NsIdentifierCreationNotification",
+                    "NsIdentifierDeletionNotification",
+                ]
+            },
         },
         "operationTypes": {
             "type": "array",
-            "items": {
-                "enum": ['INSTANTIATE', 'SCALE', 'TERMINATE', 'UPDATE', 'HEAL']
-            }
+            "items": {"enum": ["INSTANTIATE", "SCALE", "TERMINATE", "UPDATE", "HEAL"]},
         },
         "operationStates": {
             "type": "array",
             "items": {
-                "enum": ['PROCESSING', 'COMPLETED', 'PARTIALLY_COMPLETED', 'FAILED',
-                         'FAILED_TEMP', 'ROLLING_BACK', 'ROLLED_BACK']
-            }
-        },
-        "nsComponentTypes": {
-            "type": "array",
-            "items": {
-                "enum": ['VNF', 'NS', 'PNF']
-            }
+                "enum": [
+                    "PROCESSING",
+                    "COMPLETED",
+                    "PARTIALLY_COMPLETED",
+                    "FAILED",
+                    "FAILED_TEMP",
+                    "ROLLING_BACK",
+                    "ROLLED_BACK",
+                ]
+            },
         },
+        "nsComponentTypes": {"type": "array", "items": {"enum": ["VNF", "NS", "PNF"]}},
         "lcmOpNameImpactingNsComponent": {
             "type": "array",
             "items": {
-                "enum": ['VNF_INSTANTIATE', 'VNF_SCALE', 'VNF_SCALE_TO_LEVEL', 'VNF_CHANGE_FLAVOUR',
-                         'VNF_TERMINATE', 'VNF_HEAL', 'VNF_OPERATE', 'VNF_CHANGE_EXT_CONN', 'VNF_MODIFY_INFO',
-                         'NS_INSTANTIATE', 'NS_SCALE', 'NS_UPDATE', 'NS_TERMINATE', 'NS_HEAL']
-            }
+                "enum": [
+                    "VNF_INSTANTIATE",
+                    "VNF_SCALE",
+                    "VNF_SCALE_TO_LEVEL",
+                    "VNF_CHANGE_FLAVOUR",
+                    "VNF_TERMINATE",
+                    "VNF_HEAL",
+                    "VNF_OPERATE",
+                    "VNF_CHANGE_EXT_CONN",
+                    "VNF_MODIFY_INFO",
+                    "NS_INSTANTIATE",
+                    "NS_SCALE",
+                    "NS_UPDATE",
+                    "NS_TERMINATE",
+                    "NS_HEAL",
+                ]
+            },
         },
         "lcmOpOccStatusImpactingNsComponent": {
             "type": "array",
             "items": {
-                "enum": ['START', 'COMPLETED', 'PARTIALLY_COMPLETED', 'FAILED', 'ROLLED_BACK']
-            }
+                "enum": [
+                    "START",
+                    "COMPLETED",
+                    "PARTIALLY_COMPLETED",
+                    "FAILED",
+                    "ROLLED_BACK",
+                ]
+            },
         },
     },
     "allOf": [
@@ -1124,14 +1187,12 @@ nslcmsub_schema = {
                     {"required": ["operationTypes"]},
                     {"required": ["operationStates"]},
                 ]
-            } 
+            },
         },
         {
             "if": {
                 "properties": {
-                    "notificationTypes": {
-                        "contains": {"const": "NsChangeNotification"}
-                    }
+                    "notificationTypes": {"contains": {"const": "NsChangeNotification"}}
                 },
             },
             "then": {
@@ -1140,9 +1201,9 @@ nslcmsub_schema = {
                     {"required": ["lcmOpNameImpactingNsComponent"]},
                     {"required": ["lcmOpOccStatusImpactingNsComponent"]},
                 ]
-            }
-        }
-    ]
+            },
+        },
+    ],
 }
 
 authentication_schema = {
@@ -1168,7 +1229,7 @@ subscription = {
     "properties": {
         "filter": nslcmsub_schema,
         "CallbackUri": description_schema,
-        "authentication": authentication_schema
+        "authentication": authentication_schema,
     },
     "required": ["CallbackUri"],
 }
@@ -1198,7 +1259,10 @@ def validate_input(indata, schema_to_use):
             error_pos = ""
         raise ValidationError("Format error {} '{}' ".format(error_pos, e.message))
     except js_e.SchemaError:
-        raise ValidationError("Bad json schema {}".format(schema_to_use), http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise ValidationError(
+            "Bad json schema {}".format(schema_to_use),
+            http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+        )
 
 
 def is_valid_uuid(x):
index ad987ac..c6f0bc6 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -24,24 +24,26 @@ _name = "osm_nbi"
 here = os.path.abspath(os.path.dirname(__file__))
 # with open(os.path.join(here, 'osm_nbi/html_public/version')) as version_file:
 #     VERSION = version_file.readline().strip()
-with open(os.path.join(here, 'README.rst')) as readme_file:
+with open(os.path.join(here, "README.rst")) as readme_file:
     README = readme_file.read()
 
 setup(
     name=_name,
-    description='OSM North Bound Interface',
+    description="OSM North Bound Interface",
     long_description=README,
-    version_command=('git describe --match v* --tags --long --dirty', 'pep440-git-full'),
+    version_command=(
+        "git describe --match v* --tags --long --dirty",
+        "pep440-git-full",
+    ),
     # version=VERSION,
     # python_requires='>3.5.0',
-    author='ETSI OSM',
-    author_email='osmsupport@etsi.org',
-    maintainer='ETSI OSM',
-    maintainer_email='osmsupport@etsi.org',
-    url='https://osm.etsi.org/gitweb/?p=osm/NBI.git;a=summary',
-    license='Apache 2.0',
-
+    author="ETSI OSM",
+    author_email="osmsupport@etsi.org",
+    maintainer="ETSI OSM",
+    maintainer_email="osmsupport@etsi.org",
+    url="https://osm.etsi.org/gitweb/?p=osm/NBI.git;a=summary",
+    license="Apache 2.0",
     packages=find_packages(exclude=["temp", "local"]),
     include_package_data=True,
-    setup_requires=['setuptools-version-command'],
+    setup_requires=["setuptools-version-command"],
 )
diff --git a/tox.ini b/tox.ini
index a0fef5a..5cb66e7 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -33,6 +33,7 @@ deps = black
 skip_install = true
 commands =
         - black --check --diff osm_nbi/
+        - black --check --diff setup.py
 
 
 #######################################################################################
@@ -120,4 +121,5 @@ exclude =
         test_mznmodels.py
 max-line-length = 120
 show-source = True
-builtins = _
\ No newline at end of file
+builtins = _
+