Reformat NBI to standardized format
[osm/NBI.git] / osm_nbi / descriptor_topics.py
index c533054..6bf437d 100644 (file)
 import tarfile
 import yaml
 import json
 import tarfile
 import yaml
 import json
+import importlib
+import copy
+
 # import logging
 from hashlib import md5
 from osm_common.dbbase import DbException, deep_update_rfc7396
 from http import HTTPStatus
 # import logging
 from hashlib import md5
 from osm_common.dbbase import DbException, deep_update_rfc7396
 from http import HTTPStatus
-from validation import ValidationError, pdu_new_schema, pdu_edit_schema
-from base_topic import BaseTopic, EngineException, get_iterable
-from osm_im.vnfd import vnfd as vnfd_im
-from osm_im.nsd import nsd as nsd_im
+from time import time
+from uuid import uuid4
+from re import fullmatch
+from osm_nbi.validation import (
+    ValidationError,
+    pdu_new_schema,
+    pdu_edit_schema,
+    validate_input,
+    vnfpkgop_new_schema,
+)
+from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
+
+etsi_nfv_vnfd = importlib.import_module("osm_im.etsi-nfv-vnfd")
+etsi_nfv_nsd = importlib.import_module("osm_im.etsi-nfv-nsd")
 from osm_im.nst import nst as nst_im
 from pyangbind.lib.serialise import pybindJSONDecoder
 import pyangbind.lib.pybindJSON as pybindJSON
 from osm_im.nst import nst as nst_im
 from pyangbind.lib.serialise import pybindJSONDecoder
 import pyangbind.lib.pybindJSON as pybindJSON
+from osm_nbi import utils
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 class DescriptorTopic(BaseTopic):
 
 __author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
 
 
 class DescriptorTopic(BaseTopic):
-
-    def __init__(self, db, fs, msg):
-        BaseTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
+
+        def _check_unique_id_name(descriptor, position=""):
+            for desc_key, desc_item in descriptor.items():
+                if isinstance(desc_item, list) and desc_item:
+                    used_ids = []
+                    desc_item_id = None
+                    for index, list_item in enumerate(desc_item):
+                        if isinstance(list_item, dict):
+                            _check_unique_id_name(
+                                list_item, "{}.{}[{}]".format(position, desc_key, index)
+                            )
+                            # Base case
+                            if index == 0 and (
+                                list_item.get("id") or list_item.get("name")
+                            ):
+                                desc_item_id = "id" if list_item.get("id") else "name"
+                            if desc_item_id and list_item.get(desc_item_id):
+                                if list_item[desc_item_id] in used_ids:
+                                    position = "{}.{}[{}]".format(
+                                        position, desc_key, index
+                                    )
+                                    raise EngineException(
+                                        "Error: identifier {} '{}' is not unique and repeats at '{}'".format(
+                                            desc_item_id,
+                                            list_item[desc_item_id],
+                                            position,
+                                        ),
+                                        HTTPStatus.UNPROCESSABLE_ENTITY,
+                                    )
+                                used_ids.append(list_item[desc_item_id])
+
+        _check_unique_id_name(final_content)
         # 1. validate again with pyangbind
         # 1.1. remove internal keys
         internal_keys = {}
         # 1. validate again with pyangbind
         # 1.1. remove internal keys
         internal_keys = {}
@@ -45,25 +92,34 @@ class DescriptorTopic(BaseTopic):
             if k in final_content:
                 internal_keys[k] = final_content.pop(k)
         storage_params = internal_keys["_admin"].get("storage")
             if k in final_content:
                 internal_keys[k] = final_content.pop(k)
         storage_params = internal_keys["_admin"].get("storage")
-        serialized = self._validate_input_new(final_content, storage_params, session["force"])
+        serialized = self._validate_input_new(
+            final_content, storage_params, session["force"]
+        )
+
         # 1.2. modify final_content with a serialized version
         # 1.2. modify final_content with a serialized version
-        final_content.clear()
-        final_content.update(serialized)
+        final_content = copy.deepcopy(serialized)
         # 1.3. restore internal keys
         for k, v in internal_keys.items():
             final_content[k] = v
         # 1.3. restore internal keys
         for k, v in internal_keys.items():
             final_content[k] = v
-
         if session["force"]:
         if session["force"]:
-            return
+            return final_content
+
         # 2. check that this id is not present
         if "id" in edit_content:
             _filter = self._get_project_filter(session)
         # 2. check that this id is not present
         if "id" in edit_content:
             _filter = self._get_project_filter(session)
+
             _filter["id"] = final_content["id"]
             _filter["_id.neq"] = _id
             _filter["id"] = final_content["id"]
             _filter["_id.neq"] = _id
+
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
             if self.db.get_one(self.topic, _filter, fail_on_empty=False):
-                raise EngineException("{} with id '{}' already exists for this project".format(self.topic[:-1],
-                                                                                               final_content["id"]),
-                                      HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "{} with id '{}' already exists for this project".format(
+                        self.topic[:-1], final_content["id"]
+                    ),
+                    HTTPStatus.CONFLICT,
+                )
+
+        return final_content
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
@@ -72,7 +128,15 @@ class DescriptorTopic(BaseTopic):
         content["_admin"]["operationalState"] = "DISABLED"
         content["_admin"]["usageState"] = "NOT_IN_USE"
 
         content["_admin"]["operationalState"] = "DISABLED"
         content["_admin"]["usageState"] = "NOT_IN_USE"
 
-    def delete_extra(self, session, _id):
+    def delete_extra(self, session, _id, db_content, not_send_msg=None):
+        """
+        Deletes file system storage associated with the descriptor
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param db_content: The database content of the descriptor
+        :param not_send_msg: To not send message (False) or store content (list) instead
+        :return: None if ok or raises EngineException with the problem
+        """
         self.fs.file_delete(_id, ignore_non_exist=True)
         self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
 
         self.fs.file_delete(_id, ignore_non_exist=True)
         self.fs.file_delete(_id + "_", ignore_non_exist=True)  # remove temp folder
 
@@ -85,20 +149,31 @@ class DescriptorTopic(BaseTopic):
         if len(desc_list) == 1:
             return desc_list[0]
         elif len(desc_list) > 1:
         if len(desc_list) == 1:
             return desc_list[0]
         elif len(desc_list) > 1:
-            raise DbException("Found more than one {} with id='{}' belonging to this project".format(topic[:-1], id),
-                              HTTPStatus.CONFLICT)
+            raise DbException(
+                "Found more than one {} with id='{}' belonging to this project".format(
+                    topic[:-1], id
+                ),
+                HTTPStatus.CONFLICT,
+            )
 
         # not found any: try to find public
         _filter = BaseTopic._get_project_filter(session)
         _filter["id"] = id
         desc_list = db.get_list(topic, _filter)
         if not desc_list:
 
         # not found any: try to find public
         _filter = BaseTopic._get_project_filter(session)
         _filter["id"] = id
         desc_list = db.get_list(topic, _filter)
         if not desc_list:
-            raise DbException("Not found any {} with id='{}'".format(topic[:-1], id), HTTPStatus.NOT_FOUND)
+            raise DbException(
+                "Not found any {} with id='{}'".format(topic[:-1], id),
+                HTTPStatus.NOT_FOUND,
+            )
         elif len(desc_list) == 1:
             return desc_list[0]
         else:
         elif len(desc_list) == 1:
             return desc_list[0]
         else:
-            raise DbException("Found more than one public {} with id='{}'; and no one belonging to this project".format(
-                topic[:-1], id), HTTPStatus.CONFLICT)
+            raise DbException(
+                "Found more than one public {} with id='{}'; and no one belonging to this project".format(
+                    topic[:-1], id
+                ),
+                HTTPStatus.CONFLICT,
+            )
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
 
     def new(self, rollback, session, indata=None, kwargs=None, headers=None):
         """
@@ -110,28 +185,32 @@ class DescriptorTopic(BaseTopic):
         :param indata: data to be inserted
         :param kwargs: used to override the indata descriptor
         :param headers: http request headers
         :param indata: data to be inserted
         :param kwargs: used to override the indata descriptor
         :param headers: http request headers
-        :return: _id: identity of the inserted data.
+        :return: _id, None: identity of the inserted data; and None as there is not any operation
         """
 
         """
 
-        try:
-            # _remove_envelop
-            if indata:
-                if "userDefinedData" in indata:
-                    indata = indata['userDefinedData']
-
-            # Override descriptor with query string kwargs
-            self._update_input_with_kwargs(indata, kwargs)
-            # uncomment when this method is implemented.
-            # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
-            # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
-
-            content = {"_admin": {"userDefinedData": indata}}
-            self.format_on_new(content, session["project_id"], make_public=session["public"])
-            _id = self.db.create(self.topic, content)
-            rollback.append({"topic": self.topic, "_id": _id})
-            return _id
-        except ValidationError as e:
-            raise EngineException(e, HTTPStatus.UNPROCESSABLE_ENTITY)
+        # No needed to capture exceptions
+        # Check Quota
+        self.check_quota(session)
+
+        # _remove_envelop
+        if indata:
+            if "userDefinedData" in indata:
+                indata = indata["userDefinedData"]
+
+        # Override descriptor with query string kwargs
+        self._update_input_with_kwargs(indata, kwargs)
+        # uncomment when this method is implemented.
+        # Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
+        # indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
+
+        content = {"_admin": {"userDefinedData": indata}}
+        self.format_on_new(
+            content, session["project_id"], make_public=session["public"]
+        )
+        _id = self.db.create(self.topic, content)
+        rollback.append({"topic": self.topic, "_id": _id})
+        self._send_msg("created", {"_id": _id})
+        return _id, None
 
     def upload_content(self, session, _id, indata, kwargs, headers):
         """
 
     def upload_content(self, session, _id, indata, kwargs, headers):
         """
@@ -151,8 +230,12 @@ class DescriptorTopic(BaseTopic):
         expected_md5 = headers.get("Content-File-MD5")
         compressed = None
         content_type = headers.get("Content-Type")
         expected_md5 = headers.get("Content-File-MD5")
         compressed = None
         content_type = headers.get("Content-Type")
-        if content_type and "application/gzip" in content_type or "application/x-gzip" in content_type or \
-                "application/zip" in content_type:
+        if (
+            content_type
+            and "application/gzip" in content_type
+            or "application/x-gzip" in content_type
+            or "application/zip" in content_type
+        ):
             compressed = "gzip"
         filename = headers.get("Content-Filename")
         if not filename:
             compressed = "gzip"
         filename = headers.get("Content-Filename")
         if not filename:
@@ -162,19 +245,27 @@ class DescriptorTopic(BaseTopic):
         error_text = ""
         try:
             if content_range_text:
         error_text = ""
         try:
             if content_range_text:
-                content_range = content_range_text.replace("-", " ").replace("/", " ").split()
-                if content_range[0] != "bytes":  # TODO check x<y not negative < total....
+                content_range = (
+                    content_range_text.replace("-", " ").replace("/", " ").split()
+                )
+                if (
+                    content_range[0] != "bytes"
+                ):  # TODO check x<y not negative < total....
                     raise IndexError()
                 start = int(content_range[1])
                 end = int(content_range[2]) + 1
                 total = int(content_range[3])
             else:
                 start = 0
                     raise IndexError()
                 start = int(content_range[1])
                 end = int(content_range[2]) + 1
                 total = int(content_range[3])
             else:
                 start = 0
-            temp_folder = _id + "_"  # all the content is upload here and if ok, it is rename from id_ to is folder
+            temp_folder = (
+                _id + "_"
+            )  # all the content is upload here and if ok, it is rename from id_ to is folder
 
             if start:
 
             if start:
-                if not self.fs.file_exists(temp_folder, 'dir'):
-                    raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
+                if not self.fs.file_exists(temp_folder, "dir"):
+                    raise EngineException(
+                        "invalid Transaction-Id header", HTTPStatus.NOT_FOUND
+                    )
             else:
                 self.fs.file_delete(temp_folder, ignore_non_exist=True)
                 self.fs.mkdir(temp_folder)
             else:
                 self.fs.file_delete(temp_folder, ignore_non_exist=True)
                 self.fs.mkdir(temp_folder)
@@ -183,14 +274,18 @@ class DescriptorTopic(BaseTopic):
             storage["folder"] = _id
 
             file_path = (temp_folder, filename)
             storage["folder"] = _id
 
             file_path = (temp_folder, filename)
-            if self.fs.file_exists(file_path, 'file'):
+            if self.fs.file_exists(file_path, "file"):
                 file_size = self.fs.file_size(file_path)
             else:
                 file_size = 0
             if file_size != start:
                 file_size = self.fs.file_size(file_path)
             else:
                 file_size = 0
             if file_size != start:
-                raise EngineException("invalid Content-Range start sequence, expected '{}' but received '{}'".format(
-                    file_size, start), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
-            file_pkg = self.fs.file_open(file_path, 'a+b')
+                raise EngineException(
+                    "invalid Content-Range start sequence, expected '{}' but received '{}'".format(
+                        file_size, start
+                    ),
+                    HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+                )
+            file_pkg = self.fs.file_open(file_path, "a+b")
             if isinstance(indata, dict):
                 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
                 file_pkg.write(indata_text.encode(encoding="utf-8"))
             if isinstance(indata, dict):
                 indata_text = yaml.safe_dump(indata, indent=4, default_flow_style=False)
                 file_pkg.write(indata_text.encode(encoding="utf-8"))
@@ -203,9 +298,13 @@ class DescriptorTopic(BaseTopic):
                         break
                     file_pkg.write(indata_text)
             if content_range_text:
                         break
                     file_pkg.write(indata_text)
             if content_range_text:
-                if indata_len != end-start:
-                    raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
-                        start, end-1, indata_len), HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+                if indata_len != end - start:
+                    raise EngineException(
+                        "Mismatch between Content-Range header {}-{} and body length of {}".format(
+                            start, end - 1, indata_len
+                        ),
+                        HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+                    )
                 if end != total:
                     # TODO update to UPLOADING
                     return False
                 if end != total:
                     # TODO update to UPLOADING
                     return False
@@ -222,28 +321,43 @@ class DescriptorTopic(BaseTopic):
                     raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
             file_pkg.seek(0, 0)
             if compressed == "gzip":
                     raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
             file_pkg.seek(0, 0)
             if compressed == "gzip":
-                tar = tarfile.open(mode='r', fileobj=file_pkg)
+                tar = tarfile.open(mode="r", fileobj=file_pkg)
                 descriptor_file_name = None
                 for tarinfo in tar:
                     tarname = tarinfo.name
                     tarname_path = tarname.split("/")
                 descriptor_file_name = None
                 for tarinfo in tar:
                     tarname = tarinfo.name
                     tarname_path = tarname.split("/")
-                    if not tarname_path[0] or ".." in tarname_path:  # if start with "/" means absolute path
-                        raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
+                    if (
+                        not tarname_path[0] or ".." in tarname_path
+                    ):  # if start with "/" means absolute path
+                        raise EngineException(
+                            "Absolute path or '..' are not allowed for package descriptor tar.gz"
+                        )
                     if len(tarname_path) == 1 and not tarinfo.isdir():
                     if len(tarname_path) == 1 and not tarinfo.isdir():
-                        raise EngineException("All files must be inside a dir for package descriptor tar.gz")
-                    if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
+                        raise EngineException(
+                            "All files must be inside a dir for package descriptor tar.gz"
+                        )
+                    if (
+                        tarname.endswith(".yaml")
+                        or tarname.endswith(".json")
+                        or tarname.endswith(".yml")
+                    ):
                         storage["pkg-dir"] = tarname_path[0]
                         if len(tarname_path) == 2:
                             if descriptor_file_name:
                                 raise EngineException(
                         storage["pkg-dir"] = tarname_path[0]
                         if len(tarname_path) == 2:
                             if descriptor_file_name:
                                 raise EngineException(
-                                    "Found more than one descriptor file at package descriptor tar.gz")
+                                    "Found more than one descriptor file at package descriptor tar.gz"
+                                )
                             descriptor_file_name = tarname
                 if not descriptor_file_name:
                             descriptor_file_name = tarname
                 if not descriptor_file_name:
-                    raise EngineException("Not found any descriptor file at package descriptor tar.gz")
+                    raise EngineException(
+                        "Not found any descriptor file at package descriptor tar.gz"
+                    )
                 storage["descriptor"] = descriptor_file_name
                 storage["zipfile"] = filename
                 self.fs.file_extract(tar, temp_folder)
                 storage["descriptor"] = descriptor_file_name
                 storage["zipfile"] = filename
                 self.fs.file_extract(tar, temp_folder)
-                with self.fs.file_open((temp_folder, descriptor_file_name), "r") as descriptor_file:
+                with self.fs.file_open(
+                    (temp_folder, descriptor_file_name), "r"
+                ) as descriptor_file:
                     content = descriptor_file.read()
             else:
                 content = file_pkg.read()
                     content = descriptor_file.read()
             else:
                 content = file_pkg.read()
@@ -254,7 +368,7 @@ class DescriptorTopic(BaseTopic):
                 indata = json.load(content)
             else:
                 error_text = "Invalid yaml format "
                 indata = json.load(content)
             else:
                 error_text = "Invalid yaml format "
-                indata = yaml.load(content)
+                indata = yaml.load(content, Loader=yaml.SafeLoader)
 
             current_desc["_admin"]["storage"] = storage
             current_desc["_admin"]["onboardingState"] = "ONBOARDED"
 
             current_desc["_admin"]["storage"] = storage
             current_desc["_admin"]["onboardingState"] = "ONBOARDED"
@@ -265,16 +379,17 @@ class DescriptorTopic(BaseTopic):
             # Override descriptor with query string kwargs
             if kwargs:
                 self._update_input_with_kwargs(indata, kwargs)
             # Override descriptor with query string kwargs
             if kwargs:
                 self._update_input_with_kwargs(indata, kwargs)
-            # it will call overrides method at VnfdTopic or NsdTopic
-            # indata = self._validate_input_edit(indata, force=session["force"])
 
             deep_update_rfc7396(current_desc, indata)
 
             deep_update_rfc7396(current_desc, indata)
-            self.check_conflict_on_edit(session, current_desc, indata, _id=_id)
+            current_desc = self.check_conflict_on_edit(
+                session, current_desc, indata, _id=_id
+            )
+            current_desc["_admin"]["modified"] = time()
             self.db.replace(self.topic, _id, current_desc)
             self.fs.dir_rename(temp_folder, _id)
 
             indata["_id"] = _id
             self.db.replace(self.topic, _id, current_desc)
             self.fs.dir_rename(temp_folder, _id)
 
             indata["_id"] = _id
-            self._send_msg("created", indata)
+            self._send_msg("edited", indata)
 
             # TODO if descriptor has changed because kwargs update content and remove cached zip
             # TODO if zip is not present creates one
 
             # TODO if descriptor has changed because kwargs update content and remove cached zip
             # TODO if zip is not present creates one
@@ -283,12 +398,19 @@ class DescriptorTopic(BaseTopic):
         except EngineException:
             raise
         except IndexError:
         except EngineException:
             raise
         except IndexError:
-            raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
-                                  HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE)
+            raise EngineException(
+                "invalid Content-Range header format. Expected 'bytes start-end/total'",
+                HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE,
+            )
         except IOError as e:
         except IOError as e:
-            raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
+            raise EngineException(
+                "invalid upload transaction sequence: '{}'".format(e),
+                HTTPStatus.BAD_REQUEST,
+            )
         except tarfile.ReadError as e:
         except tarfile.ReadError as e:
-            raise EngineException("invalid file content {}".format(e), HTTPStatus.BAD_REQUEST)
+            raise EngineException(
+                "invalid file content {}".format(e), HTTPStatus.BAD_REQUEST
+            )
         except (ValueError, yaml.YAMLError) as e:
             raise EngineException(error_text + str(e))
         except ValidationError as e:
         except (ValueError, yaml.YAMLError) as e:
             raise EngineException(error_text + str(e))
         except ValidationError as e:
@@ -308,109 +430,213 @@ class DescriptorTopic(BaseTopic):
         """
         accept_text = accept_zip = False
         if accept_header:
         """
         accept_text = accept_zip = False
         if accept_header:
-            if 'text/plain' in accept_header or '*/*' in accept_header:
+            if "text/plain" in accept_header or "*/*" in accept_header:
                 accept_text = True
                 accept_text = True
-            if 'application/zip' in accept_header or '*/*' in accept_header:
-                accept_zip = 'application/zip'
-            elif 'application/gzip' in accept_header:
-                accept_zip = 'application/gzip'
+            if "application/zip" in accept_header or "*/*" in accept_header:
+                accept_zip = "application/zip"
+            elif "application/gzip" in accept_header:
+                accept_zip = "application/gzip"
 
         if not accept_text and not accept_zip:
 
         if not accept_text and not accept_zip:
-            raise EngineException("provide request header 'Accept' with 'application/zip' or 'text/plain'",
-                                  http_code=HTTPStatus.NOT_ACCEPTABLE)
+            raise EngineException(
+                "provide request header 'Accept' with 'application/zip' or 'text/plain'",
+                http_code=HTTPStatus.NOT_ACCEPTABLE,
+            )
 
         content = self.show(session, _id)
         if content["_admin"]["onboardingState"] != "ONBOARDED":
 
         content = self.show(session, _id)
         if content["_admin"]["onboardingState"] != "ONBOARDED":
-            raise EngineException("Cannot get content because this resource is not at 'ONBOARDED' state. "
-                                  "onboardingState is {}".format(content["_admin"]["onboardingState"]),
-                                  http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "Cannot get content because this resource is not at 'ONBOARDED' state. "
+                "onboardingState is {}".format(content["_admin"]["onboardingState"]),
+                http_code=HTTPStatus.CONFLICT,
+            )
         storage = content["_admin"]["storage"]
         storage = content["_admin"]["storage"]
-        if path is not None and path != "$DESCRIPTOR":   # artifacts
-            if not storage.get('pkg-dir'):
-                raise EngineException("Packages does not contains artifacts", http_code=HTTPStatus.BAD_REQUEST)
-            if self.fs.file_exists((storage['folder'], storage['pkg-dir'], *path), 'dir'):
-                folder_content = self.fs.dir_ls((storage['folder'], storage['pkg-dir'], *path))
+        if path is not None and path != "$DESCRIPTOR":  # artifacts
+            if not storage.get("pkg-dir"):
+                raise EngineException(
+                    "Packages does not contains artifacts",
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+            if self.fs.file_exists(
+                (storage["folder"], storage["pkg-dir"], *path), "dir"
+            ):
+                folder_content = self.fs.dir_ls(
+                    (storage["folder"], storage["pkg-dir"], *path)
+                )
                 return folder_content, "text/plain"
                 # TODO manage folders in http
             else:
                 return folder_content, "text/plain"
                 # TODO manage folders in http
             else:
-                return self.fs.file_open((storage['folder'], storage['pkg-dir'], *path), "rb"),\
-                    "application/octet-stream"
+                return (
+                    self.fs.file_open(
+                        (storage["folder"], storage["pkg-dir"], *path), "rb"
+                    ),
+                    "application/octet-stream",
+                )
 
         # pkgtype   accept  ZIP  TEXT    -> result
         # manyfiles         yes  X       -> zip
         #                   no   yes     -> error
         # onefile           yes  no      -> zip
         #                   X    yes     -> text
 
         # pkgtype   accept  ZIP  TEXT    -> result
         # manyfiles         yes  X       -> zip
         #                   no   yes     -> error
         # onefile           yes  no      -> zip
         #                   X    yes     -> text
-
-        if accept_text and (not storage.get('pkg-dir') or path == "$DESCRIPTOR"):
-            return self.fs.file_open((storage['folder'], storage['descriptor']), "r"), "text/plain"
-        elif storage.get('pkg-dir') and not accept_zip:
-            raise EngineException("Packages that contains several files need to be retrieved with 'application/zip'"
-                                  "Accept header", http_code=HTTPStatus.NOT_ACCEPTABLE)
+        contain_many_files = False
+        if storage.get("pkg-dir"):
+            # check if there are more than one file in the package, ignoring checksums.txt.
+            pkg_files = self.fs.dir_ls((storage["folder"], storage["pkg-dir"]))
+            if len(pkg_files) >= 3 or (
+                len(pkg_files) == 2 and "checksums.txt" not in pkg_files
+            ):
+                contain_many_files = True
+        if accept_text and (not contain_many_files or path == "$DESCRIPTOR"):
+            return (
+                self.fs.file_open((storage["folder"], storage["descriptor"]), "r"),
+                "text/plain",
+            )
+        elif contain_many_files and not accept_zip:
+            raise EngineException(
+                "Packages that contains several files need to be retrieved with 'application/zip'"
+                "Accept header",
+                http_code=HTTPStatus.NOT_ACCEPTABLE,
+            )
         else:
         else:
-            if not storage.get('zipfile'):
+            if not storage.get("zipfile"):
                 # TODO generate zipfile if not present
                 # TODO generate zipfile if not present
-                raise EngineException("Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
-                                      "future versions", http_code=HTTPStatus.NOT_ACCEPTABLE)
-            return self.fs.file_open((storage['folder'], storage['zipfile']), "rb"), accept_zip
+                raise EngineException(
+                    "Only allowed 'text/plain' Accept header for this descriptor. To be solved in "
+                    "future versions",
+                    http_code=HTTPStatus.NOT_ACCEPTABLE,
+                )
+            return (
+                self.fs.file_open((storage["folder"], storage["zipfile"]), "rb"),
+                accept_zip,
+            )
+
+    def _remove_yang_prefixes_from_descriptor(self, descriptor):
+        new_descriptor = {}
+        for k, v in descriptor.items():
+            new_v = v
+            if isinstance(v, dict):
+                new_v = self._remove_yang_prefixes_from_descriptor(v)
+            elif isinstance(v, list):
+                new_v = list()
+                for x in v:
+                    if isinstance(x, dict):
+                        new_v.append(self._remove_yang_prefixes_from_descriptor(x))
+                    else:
+                        new_v.append(x)
+            new_descriptor[k.split(":")[-1]] = new_v
+        return new_descriptor
 
     def pyangbind_validation(self, item, data, force=False):
 
     def pyangbind_validation(self, item, data, force=False):
-        try:
-            if item == "vnfds":
-                myvnfd = vnfd_im()
-                pybindJSONDecoder.load_ietf_json({'vnfd:vnfd-catalog': {'vnfd': [data]}}, None, None, obj=myvnfd,
-                                                 path_helper=True, skip_unknown=force)
-                out = pybindJSON.dumps(myvnfd, mode="ietf")
-            elif item == "nsds":
-                mynsd = nsd_im()
-                pybindJSONDecoder.load_ietf_json({'nsd:nsd-catalog': {'nsd': [data]}}, None, None, obj=mynsd,
-                                                 path_helper=True, skip_unknown=force)
-                out = pybindJSON.dumps(mynsd, mode="ietf")
-            elif item == "nsts":
-                mynst = nst_im()
-                pybindJSONDecoder.load_ietf_json({'nst': [data]}, None, None, obj=mynst,
-                                                 path_helper=True, skip_unknown=force)
-                out = pybindJSON.dumps(mynst, mode="ietf")
-            else:
-                raise EngineException("Not possible to validate '{}' item".format(item),
-                                      http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+        raise EngineException(
+            "Not possible to validate '{}' item".format(item),
+            http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+        )
 
 
-            desc_out = self._remove_envelop(yaml.safe_load(out))
-            return desc_out
+    def _validate_input_edit(self, indata, content, force=False):
+        # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
+        if "_id" in indata:
+            indata.pop("_id")
+        if "_admin" not in indata:
+            indata["_admin"] = {}
+
+        if "operationalState" in indata:
+            if indata["operationalState"] in ("ENABLED", "DISABLED"):
+                indata["_admin"]["operationalState"] = indata.pop("operationalState")
+            else:
+                raise EngineException(
+                    "State '{}' is not a valid operational state".format(
+                        indata["operationalState"]
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+
+        # In the case of user defined data, we need to put the data in the root of the object
+        # to preserve current expected behaviour
+        if "userDefinedData" in indata:
+            data = indata.pop("userDefinedData")
+            if type(data) == dict:
+                indata["_admin"]["userDefinedData"] = data
+            else:
+                raise EngineException(
+                    "userDefinedData should be an object, but is '{}' instead".format(
+                        type(data)
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+
+        if (
+            "operationalState" in indata["_admin"]
+            and content["_admin"]["operationalState"]
+            == indata["_admin"]["operationalState"]
+        ):
+            raise EngineException(
+                "operationalState already {}".format(
+                    content["_admin"]["operationalState"]
+                ),
+                http_code=HTTPStatus.CONFLICT,
+            )
 
 
-        except Exception as e:
-            raise EngineException("Error in pyangbind validation: {}".format(str(e)),
-                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+        return indata
 
 
 class VnfdTopic(DescriptorTopic):
     topic = "vnfds"
     topic_msg = "vnfd"
 
 
 
 class VnfdTopic(DescriptorTopic):
     topic = "vnfds"
     topic_msg = "vnfd"
 
-    def __init__(self, db, fs, msg):
-        DescriptorTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        DescriptorTopic.__init__(self, db, fs, msg, auth)
+
+    def pyangbind_validation(self, item, data, force=False):
+        if self._descriptor_data_is_in_old_format(data):
+            raise EngineException(
+                "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        try:
+            myvnfd = etsi_nfv_vnfd.etsi_nfv_vnfd()
+            pybindJSONDecoder.load_ietf_json(
+                {"etsi-nfv-vnfd:vnfd": data},
+                None,
+                None,
+                obj=myvnfd,
+                path_helper=True,
+                skip_unknown=force,
+            )
+            out = pybindJSON.dumps(myvnfd, mode="ietf")
+            desc_out = self._remove_envelop(yaml.safe_load(out))
+            desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
+            return utils.deep_update_dict(data, desc_out)
+        except Exception as e:
+            raise EngineException(
+                "Error in pyangbind validation: {}".format(str(e)),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+
+    @staticmethod
+    def _descriptor_data_is_in_old_format(data):
+        return ("vnfd-catalog" in data) or ("vnfd:vnfd-catalog" in data)
 
     @staticmethod
     def _remove_envelop(indata=None):
         if not indata:
             return {}
         clean_indata = indata
 
     @staticmethod
     def _remove_envelop(indata=None):
         if not indata:
             return {}
         clean_indata = indata
-        if clean_indata.get('vnfd:vnfd-catalog'):
-            clean_indata = clean_indata['vnfd:vnfd-catalog']
-        elif clean_indata.get('vnfd-catalog'):
-            clean_indata = clean_indata['vnfd-catalog']
-        if clean_indata.get('vnfd'):
-            if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
-                raise EngineException("'vnfd' must be a list of only one element")
-            clean_indata = clean_indata['vnfd'][0]
-        elif clean_indata.get('vnfd:vnfd'):
-            if not isinstance(clean_indata['vnfd:vnfd'], list) or len(clean_indata['vnfd:vnfd']) != 1:
-                raise EngineException("'vnfd:vnfd' must be a list of only one element")
-            clean_indata = clean_indata['vnfd:vnfd'][0]
+
+        if clean_indata.get("etsi-nfv-vnfd:vnfd"):
+            if not isinstance(clean_indata["etsi-nfv-vnfd:vnfd"], dict):
+                raise EngineException("'etsi-nfv-vnfd:vnfd' must be a dict")
+            clean_indata = clean_indata["etsi-nfv-vnfd:vnfd"]
+        elif clean_indata.get("vnfd"):
+            if not isinstance(clean_indata["vnfd"], dict):
+                raise EngineException("'vnfd' must be dict")
+            clean_indata = clean_indata["vnfd"]
+
         return clean_indata
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
         return clean_indata
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         # set type of vnfd
         contains_pdu = False
 
         # set type of vnfd
         contains_pdu = False
@@ -425,224 +651,438 @@ class VnfdTopic(DescriptorTopic):
         elif contains_vdu:
             final_content["_admin"]["type"] = "vnfd"
         # if neither vud nor pdu do not fill type
         elif contains_vdu:
             final_content["_admin"]["type"] = "vnfd"
         # if neither vud nor pdu do not fill type
+        return final_content
 
 
-    def check_conflict_on_del(self, session, _id):
+    def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
         that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
         that uses this vnfd
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         """
         Check that there is not any NSD that uses this VNFD. Only NSDs belonging to this project are considered. Note
         that VNFD can be public and be used by NSD of other projects. Also check there are not deployments, or vnfr
         that uses this vnfd
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
-        :param _id: vnfd inernal id
+        :param _id: vnfd internal id
+        :param db_content: The database content of the _id.
         :return: None or raises EngineException with the conflict
         """
         if session["force"]:
             return
         :return: None or raises EngineException with the conflict
         """
         if session["force"]:
             return
-        descriptor = self.db.get_one("vnfds", {"_id": _id})
+        descriptor = db_content
         descriptor_id = descriptor.get("id")
         if not descriptor_id:  # empty vnfd not uploaded
             return
 
         _filter = self._get_project_filter(session)
         descriptor_id = descriptor.get("id")
         if not descriptor_id:  # empty vnfd not uploaded
             return
 
         _filter = self._get_project_filter(session)
+
         # check vnfrs using this vnfd
         _filter["vnfd-id"] = _id
         if self.db.get_list("vnfrs", _filter):
         # check vnfrs using this vnfd
         _filter["vnfd-id"] = _id
         if self.db.get_list("vnfrs", _filter):
-            raise EngineException("There is some VNFR that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VNF instance using this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
+
+        # check NSD referencing this VNFD
         del _filter["vnfd-id"]
         del _filter["vnfd-id"]
-        # check NSD using this VNFD
-        _filter["constituent-vnfd.ANYINDEX.vnfd-id-ref"] = descriptor_id
+        _filter["vnfd-id"] = descriptor_id
         if self.db.get_list("nsds", _filter):
         if self.db.get_list("nsds", _filter):
-            raise EngineException("There is at least a NSD that depends on this VNFD", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one NS package referencing this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
 
     def _validate_input_new(self, indata, storage_params, force=False):
 
     def _validate_input_new(self, indata, storage_params, force=False):
+        indata.pop("onboardingState", None)
+        indata.pop("operationalState", None)
+        indata.pop("usageState", None)
+        indata.pop("links", None)
+
         indata = self.pyangbind_validation("vnfds", indata, force)
         # Cross references validation in the descriptor
         indata = self.pyangbind_validation("vnfds", indata, force)
         # Cross references validation in the descriptor
-        if indata.get("vdu"):
-            if not indata.get("mgmt-interface"):
-                raise EngineException("'mgmt-interface' is a mandatory field and it is not defined",
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            if indata["mgmt-interface"].get("cp"):
-                for cp in get_iterable(indata.get("connection-point")):
-                    if cp["name"] == indata["mgmt-interface"]["cp"]:
-                        break
-                else:
-                    raise EngineException("mgmt-interface:cp='{}' must match an existing connection-point"
-                                          .format(indata["mgmt-interface"]["cp"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+
+        self.validate_mgmt_interface_connection_point(indata)
 
         for vdu in get_iterable(indata.get("vdu")):
 
         for vdu in get_iterable(indata.get("vdu")):
-            for interface in get_iterable(vdu.get("interface")):
-                if interface.get("external-connection-point-ref"):
-                    for cp in get_iterable(indata.get("connection-point")):
-                        if cp["name"] == interface["external-connection-point-ref"]:
-                            break
-                    else:
-                        raise EngineException("vdu[id='{}']:interface[name='{}']:external-connection-point-ref='{}' "
-                                              "must match an existing connection-point"
-                                              .format(vdu["id"], interface["name"],
-                                                      interface["external-connection-point-ref"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-
-                elif interface.get("internal-connection-point-ref"):
-                    for internal_cp in get_iterable(vdu.get("internal-connection-point")):
-                        if interface["internal-connection-point-ref"] == internal_cp.get("id"):
-                            break
-                    else:
-                        raise EngineException("vdu[id='{}']:interface[name='{}']:internal-connection-point-ref='{}' "
-                                              "must match an existing vdu:internal-connection-point"
-                                              .format(vdu["id"], interface["name"],
-                                                      interface["internal-connection-point-ref"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
-            if vdu.get("vdu-configuration"):
-                if vdu["vdu-configuration"].get("juju"):
-                    if not self._validate_package_folders(storage_params, 'charms'):
-                        raise EngineException("Charm defined in vnf[id={}]:vdu[id={}] but not present in "
-                                              "package".format(indata["id"], vdu["id"]))
-            # Validate that if descriptor contains cloud-init, artifacts _admin.storage."pkg-dir" is not none
-            if vdu.get("cloud-init-file"):
-                if not self._validate_package_folders(storage_params, 'cloud_init', vdu["cloud-init-file"]):
-                    raise EngineException("Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
-                                          "package".format(indata["id"], vdu["id"]))
-        # Validate that if descriptor contains charms, artifacts _admin.storage."pkg-dir" is not none
-        if indata.get("vnf-configuration"):
-            if indata["vnf-configuration"].get("juju"):
-                if not self._validate_package_folders(storage_params, 'charms'):
-                    raise EngineException("Charm defined in vnf[id={}] but not present in "
-                                          "package".format(indata["id"]))
-        vld_names = []  # For detection of duplicated VLD names
-        for ivld in get_iterable(indata.get("internal-vld")):
-            # BEGIN Detection of duplicated VLD names
-            ivld_name = ivld["name"]
-            if ivld_name in vld_names:
-                raise EngineException("Duplicated VLD name '{}' in vnfd[id={}]:internal-vld[id={}]"
-                                      .format(ivld["name"], indata["id"], ivld["id"]),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            else:
-                vld_names.append(ivld_name)
-            # END Detection of duplicated VLD names
-            for icp in get_iterable(ivld.get("internal-connection-point")):
-                icp_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    for internal_cp in get_iterable(vdu.get("internal-connection-point")):
-                        if icp["id-ref"] == internal_cp["id"]:
-                            icp_mark = True
-                            break
-                    if icp_mark:
-                        break
-                else:
-                    raise EngineException("internal-vld[id='{}']:internal-connection-point='{}' must match an existing "
-                                          "vdu:internal-connection-point".format(ivld["id"], icp["id-ref"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            if ivld.get("ip-profile-ref"):
-                for ip_prof in get_iterable(indata.get("ip-profiles")):
-                    if ip_prof["name"] == get_iterable(ivld.get("ip-profile-ref")):
-                        break
-                else:
-                    raise EngineException("internal-vld[id='{}']:ip-profile-ref='{}' does not exist".format(
-                        ivld["id"], ivld["ip-profile-ref"]),
-                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        for mp in get_iterable(indata.get("monitoring-param")):
-            if mp.get("vdu-monitoring-param"):
-                mp_vmp_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    for vmp in get_iterable(vdu.get("monitoring-param")):
-                        if vmp["id"] == mp["vdu-monitoring-param"].get("vdu-monitoring-param-ref") and vdu["id"] ==\
-                                mp["vdu-monitoring-param"]["vdu-ref"]:
-                            mp_vmp_mark = True
-                            break
-                    if mp_vmp_mark:
-                        break
-                else:
-                    raise EngineException("monitoring-param:vdu-monitoring-param:vdu-monitoring-param-ref='{}' not "
-                                          "defined at vdu[id='{}'] or vdu does not exist"
-                                          .format(mp["vdu-monitoring-param"]["vdu-monitoring-param-ref"],
-                                                  mp["vdu-monitoring-param"]["vdu-ref"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            elif mp.get("vdu-metric"):
-                mp_vm_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    if vdu.get("vdu-configuration"):
-                        for metric in get_iterable(vdu["vdu-configuration"].get("metrics")):
-                            if metric["name"] == mp["vdu-metric"]["vdu-metric-name-ref"] and vdu["id"] == \
-                                    mp["vdu-metric"]["vdu-ref"]:
-                                mp_vm_mark = True
-                                break
-                        if mp_vm_mark:
-                            break
-                else:
-                    raise EngineException("monitoring-param:vdu-metric:vdu-metric-name-ref='{}' not defined at "
-                                          "vdu[id='{}'] or vdu does not exist"
-                                          .format(mp["vdu-metric"]["vdu-metric-name-ref"],
-                                                  mp["vdu-metric"]["vdu-ref"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-
-        for sgd in get_iterable(indata.get("scaling-group-descriptor")):
-            for sp in get_iterable(sgd.get("scaling-policy")):
-                for sc in get_iterable(sp.get("scaling-criteria")):
-                    for mp in get_iterable(indata.get("monitoring-param")):
-                        if mp["id"] == get_iterable(sc.get("vnf-monitoring-param-ref")):
-                            break
-                    else:
-                        raise EngineException("scaling-group-descriptor[name='{}']:scaling-criteria[name='{}']:"
-                                              "vnf-monitoring-param-ref='{}' not defined in any monitoring-param"
-                                              .format(sgd["name"], sc["name"], sc["vnf-monitoring-param-ref"]),
-                                              http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            for sgd_vdu in get_iterable(sgd.get("vdu")):
-                sgd_vdu_mark = False
-                for vdu in get_iterable(indata.get("vdu")):
-                    if vdu["id"] == sgd_vdu["vdu-id-ref"]:
-                        sgd_vdu_mark = True
-                        break
-                if sgd_vdu_mark:
-                    break
-            else:
-                raise EngineException("scaling-group-descriptor[name='{}']:vdu-id-ref={} does not match any vdu"
-                                      .format(sgd["name"], sgd_vdu["vdu-id-ref"]),
-                                      http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            for sca in get_iterable(sgd.get("scaling-config-action")):
-                if not indata.get("vnf-configuration"):
-                    raise EngineException("'vnf-configuration' not defined in the descriptor but it is referenced by "
-                                          "scaling-group-descriptor[name='{}']:scaling-config-action"
-                                          .format(sgd["name"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-                for primitive in get_iterable(indata["vnf-configuration"].get("config-primitive")):
-                    if primitive["name"] == sca["vnf-config-primitive-name-ref"]:
-                        break
-                else:
-                    raise EngineException("scaling-group-descriptor[name='{}']:scaling-config-action:vnf-config-"
-                                          "primitive-name-ref='{}' does not match any "
-                                          "vnf-configuration:config-primitive:name"
-                                          .format(sgd["name"], sca["vnf-config-primitive-name-ref"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-        return indata
+            self.validate_vdu_internal_connection_points(vdu)
+            self._validate_vdu_cloud_init_in_package(storage_params, vdu, indata)
+        self._validate_vdu_charms_in_package(storage_params, indata)
+
+        self._validate_vnf_charms_in_package(storage_params, indata)
+
+        self.validate_external_connection_points(indata)
+        self.validate_internal_virtual_links(indata)
+        self.validate_monitoring_params(indata)
+        self.validate_scaling_group_descriptor(indata)
 
 
-    def _validate_input_edit(self, indata, force=False):
-        # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
         return indata
 
         return indata
 
+    @staticmethod
+    def validate_mgmt_interface_connection_point(indata):
+        if not indata.get("vdu"):
+            return
+        if not indata.get("mgmt-cp"):
+            raise EngineException(
+                "'mgmt-cp' is a mandatory field and it is not defined",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+
+        for cp in get_iterable(indata.get("ext-cpd")):
+            if cp["id"] == indata["mgmt-cp"]:
+                break
+        else:
+            raise EngineException(
+                "mgmt-cp='{}' must match an existing ext-cpd".format(indata["mgmt-cp"]),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+
+    @staticmethod
+    def validate_vdu_internal_connection_points(vdu):
+        int_cpds = set()
+        for cpd in get_iterable(vdu.get("int-cpd")):
+            cpd_id = cpd.get("id")
+            if cpd_id and cpd_id in int_cpds:
+                raise EngineException(
+                    "vdu[id='{}']:int-cpd[id='{}'] is already used by other int-cpd".format(
+                        vdu["id"], cpd_id
+                    ),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
+            int_cpds.add(cpd_id)
+
+    @staticmethod
+    def validate_external_connection_points(indata):
+        all_vdus_int_cpds = set()
+        for vdu in get_iterable(indata.get("vdu")):
+            for int_cpd in get_iterable(vdu.get("int-cpd")):
+                all_vdus_int_cpds.add((vdu.get("id"), int_cpd.get("id")))
+
+        ext_cpds = set()
+        for cpd in get_iterable(indata.get("ext-cpd")):
+            cpd_id = cpd.get("id")
+            if cpd_id and cpd_id in ext_cpds:
+                raise EngineException(
+                    "ext-cpd[id='{}'] is already used by other ext-cpd".format(cpd_id),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
+            ext_cpds.add(cpd_id)
+
+            int_cpd = cpd.get("int-cpd")
+            if int_cpd:
+                if (int_cpd.get("vdu-id"), int_cpd.get("cpd")) not in all_vdus_int_cpds:
+                    raise EngineException(
+                        "ext-cpd[id='{}']:int-cpd must match an existing vdu int-cpd".format(
+                            cpd_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+            # TODO: Validate k8s-cluster-net points to a valid k8s-cluster:nets ?
+
+    def _validate_vdu_charms_in_package(self, storage_params, indata):
+        for df in indata["df"]:
+            if (
+                "lcm-operations-configuration" in df
+                and "operate-vnf-op-config" in df["lcm-operations-configuration"]
+            ):
+                configs = df["lcm-operations-configuration"][
+                    "operate-vnf-op-config"
+                ].get("day1-2", [])
+                vdus = df.get("vdu-profile", [])
+                for vdu in vdus:
+                    for config in configs:
+                        if config["id"] == vdu["id"] and utils.find_in_list(
+                            config.get("execution-environment-list", []),
+                            lambda ee: "juju" in ee,
+                        ):
+                            if not self._validate_package_folders(
+                                storage_params, "charms"
+                            ):
+                                raise EngineException(
+                                    "Charm defined in vnf[id={}] but not present in "
+                                    "package".format(indata["id"])
+                                )
+
+    def _validate_vdu_cloud_init_in_package(self, storage_params, vdu, indata):
+        if not vdu.get("cloud-init-file"):
+            return
+        if not self._validate_package_folders(
+            storage_params, "cloud_init", vdu["cloud-init-file"]
+        ):
+            raise EngineException(
+                "Cloud-init defined in vnf[id={}]:vdu[id={}] but not present in "
+                "package".format(indata["id"], vdu["id"])
+            )
+
+    def _validate_vnf_charms_in_package(self, storage_params, indata):
+        # Get VNF configuration through new container
+        for deployment_flavor in indata.get("df", []):
+            if "lcm-operations-configuration" not in deployment_flavor:
+                return
+            if (
+                "operate-vnf-op-config"
+                not in deployment_flavor["lcm-operations-configuration"]
+            ):
+                return
+            for day_1_2_config in deployment_flavor["lcm-operations-configuration"][
+                "operate-vnf-op-config"
+            ]["day1-2"]:
+                if day_1_2_config["id"] == indata["id"]:
+                    if utils.find_in_list(
+                        day_1_2_config.get("execution-environment-list", []),
+                        lambda ee: "juju" in ee,
+                    ):
+                        if not self._validate_package_folders(storage_params, "charms"):
+                            raise EngineException(
+                                "Charm defined in vnf[id={}] but not present in "
+                                "package".format(indata["id"])
+                            )
+
     def _validate_package_folders(self, storage_params, folder, file=None):
         if not storage_params or not storage_params.get("pkg-dir"):
             return False
         else:
     def _validate_package_folders(self, storage_params, folder, file=None):
         if not storage_params or not storage_params.get("pkg-dir"):
             return False
         else:
-            if self.fs.file_exists("{}_".format(storage_params["folder"]), 'dir'):
-                f = "{}_/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+            if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
+                f = "{}_/{}/{}".format(
+                    storage_params["folder"], storage_params["pkg-dir"], folder
+                )
             else:
             else:
-                f = "{}/{}/{}".format(storage_params["folder"], storage_params["pkg-dir"], folder)
+                f = "{}/{}/{}".format(
+                    storage_params["folder"], storage_params["pkg-dir"], folder
+                )
             if file:
             if file:
-                return self.fs.file_exists("{}/{}".format(f, file), 'file')
+                return self.fs.file_exists("{}/{}".format(f, file), "file")
             else:
             else:
-                if self.fs.file_exists(f, 'dir'):
+                if self.fs.file_exists(f, "dir"):
                     if self.fs.dir_ls(f):
                         return True
             return False
 
                     if self.fs.dir_ls(f):
                         return True
             return False
 
+    @staticmethod
+    def validate_internal_virtual_links(indata):
+        all_ivld_ids = set()
+        for ivld in get_iterable(indata.get("int-virtual-link-desc")):
+            ivld_id = ivld.get("id")
+            if ivld_id and ivld_id in all_ivld_ids:
+                raise EngineException(
+                    "Duplicated VLD id in int-virtual-link-desc[id={}]".format(ivld_id),
+                    http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                )
+            else:
+                all_ivld_ids.add(ivld_id)
+
+        for vdu in get_iterable(indata.get("vdu")):
+            for int_cpd in get_iterable(vdu.get("int-cpd")):
+                int_cpd_ivld_id = int_cpd.get("int-virtual-link-desc")
+                if int_cpd_ivld_id and int_cpd_ivld_id not in all_ivld_ids:
+                    raise EngineException(
+                        "vdu[id='{}']:int-cpd[id='{}']:int-virtual-link-desc='{}' must match an existing "
+                        "int-virtual-link-desc".format(
+                            vdu["id"], int_cpd["id"], int_cpd_ivld_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+        for df in get_iterable(indata.get("df")):
+            for vlp in get_iterable(df.get("virtual-link-profile")):
+                vlp_ivld_id = vlp.get("id")
+                if vlp_ivld_id and vlp_ivld_id not in all_ivld_ids:
+                    raise EngineException(
+                        "df[id='{}']:virtual-link-profile='{}' must match an existing "
+                        "int-virtual-link-desc".format(df["id"], vlp_ivld_id),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+    @staticmethod
+    def validate_monitoring_params(indata):
+        all_monitoring_params = set()
+        for ivld in get_iterable(indata.get("int-virtual-link-desc")):
+            for mp in get_iterable(ivld.get("monitoring-parameters")):
+                mp_id = mp.get("id")
+                if mp_id and mp_id in all_monitoring_params:
+                    raise EngineException(
+                        "Duplicated monitoring-parameter id in "
+                        "int-virtual-link-desc[id='{}']:monitoring-parameters[id='{}']".format(
+                            ivld["id"], mp_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+                else:
+                    all_monitoring_params.add(mp_id)
+
+        for vdu in get_iterable(indata.get("vdu")):
+            for mp in get_iterable(vdu.get("monitoring-parameter")):
+                mp_id = mp.get("id")
+                if mp_id and mp_id in all_monitoring_params:
+                    raise EngineException(
+                        "Duplicated monitoring-parameter id in "
+                        "vdu[id='{}']:monitoring-parameter[id='{}']".format(
+                            vdu["id"], mp_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+                else:
+                    all_monitoring_params.add(mp_id)
+
+        for df in get_iterable(indata.get("df")):
+            for mp in get_iterable(df.get("monitoring-parameter")):
+                mp_id = mp.get("id")
+                if mp_id and mp_id in all_monitoring_params:
+                    raise EngineException(
+                        "Duplicated monitoring-parameter id in "
+                        "df[id='{}']:monitoring-parameter[id='{}']".format(
+                            df["id"], mp_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+                else:
+                    all_monitoring_params.add(mp_id)
+
+    @staticmethod
+    def validate_scaling_group_descriptor(indata):
+        all_monitoring_params = set()
+        for ivld in get_iterable(indata.get("int-virtual-link-desc")):
+            for mp in get_iterable(ivld.get("monitoring-parameters")):
+                all_monitoring_params.add(mp.get("id"))
+
+        for vdu in get_iterable(indata.get("vdu")):
+            for mp in get_iterable(vdu.get("monitoring-parameter")):
+                all_monitoring_params.add(mp.get("id"))
+
+        for df in get_iterable(indata.get("df")):
+            for mp in get_iterable(df.get("monitoring-parameter")):
+                all_monitoring_params.add(mp.get("id"))
+
+        for df in get_iterable(indata.get("df")):
+            for sa in get_iterable(df.get("scaling-aspect")):
+                for sp in get_iterable(sa.get("scaling-policy")):
+                    for sc in get_iterable(sp.get("scaling-criteria")):
+                        sc_monitoring_param = sc.get("vnf-monitoring-param-ref")
+                        if (
+                            sc_monitoring_param
+                            and sc_monitoring_param not in all_monitoring_params
+                        ):
+                            raise EngineException(
+                                "df[id='{}']:scaling-aspect[id='{}']:scaling-policy"
+                                "[name='{}']:scaling-criteria[name='{}']: "
+                                "vnf-monitoring-param-ref='{}' not defined in any monitoring-param".format(
+                                    df["id"],
+                                    sa["id"],
+                                    sp["name"],
+                                    sc["name"],
+                                    sc_monitoring_param,
+                                ),
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
+
+                for sca in get_iterable(sa.get("scaling-config-action")):
+                    if (
+                        "lcm-operations-configuration" not in df
+                        or "operate-vnf-op-config"
+                        not in df["lcm-operations-configuration"]
+                        or not utils.find_in_list(
+                            df["lcm-operations-configuration"][
+                                "operate-vnf-op-config"
+                            ].get("day1-2", []),
+                            lambda config: config["id"] == indata["id"],
+                        )
+                    ):
+                        raise EngineException(
+                            "'day1-2 configuration' not defined in the descriptor but it is "
+                            "referenced by df[id='{}']:scaling-aspect[id='{}']:scaling-config-action".format(
+                                df["id"], sa["id"]
+                            ),
+                            http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                        )
+                    for configuration in get_iterable(
+                        df["lcm-operations-configuration"]["operate-vnf-op-config"].get(
+                            "day1-2", []
+                        )
+                    ):
+                        for primitive in get_iterable(
+                            configuration.get("config-primitive")
+                        ):
+                            if (
+                                primitive["name"]
+                                == sca["vnf-config-primitive-name-ref"]
+                            ):
+                                break
+                        else:
+                            raise EngineException(
+                                "df[id='{}']:scaling-aspect[id='{}']:scaling-config-action:vnf-"
+                                "config-primitive-name-ref='{}' does not match any "
+                                "day1-2 configuration:config-primitive:name".format(
+                                    df["id"],
+                                    sa["id"],
+                                    sca["vnf-config-primitive-name-ref"],
+                                ),
+                                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                            )
+
+    def delete_extra(self, session, _id, db_content, not_send_msg=None):
+        """
+        Deletes associate file system storage (via super)
+        Deletes associated vnfpkgops from database.
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: server internal id
+        :param db_content: The database content of the descriptor
+        :return: None
+        :raises: FsException in case of error while deleting associated storage
+        """
+        super().delete_extra(session, _id, db_content, not_send_msg)
+        self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
+
+    def sol005_projection(self, data):
+        data["onboardingState"] = data["_admin"]["onboardingState"]
+        data["operationalState"] = data["_admin"]["operationalState"]
+        data["usageState"] = data["_admin"]["usageState"]
+
+        links = {}
+        links["self"] = {"href": "/vnfpkgm/v1/vnf_packages/{}".format(data["_id"])}
+        links["vnfd"] = {"href": "/vnfpkgm/v1/vnf_packages/{}/vnfd".format(data["_id"])}
+        links["packageContent"] = {
+            "href": "/vnfpkgm/v1/vnf_packages/{}/package_content".format(data["_id"])
+        }
+        data["_links"] = links
+
+        return super().sol005_projection(data)
+
 
 class NsdTopic(DescriptorTopic):
     topic = "nsds"
     topic_msg = "nsd"
 
 
 class NsdTopic(DescriptorTopic):
     topic = "nsds"
     topic_msg = "nsd"
 
-    def __init__(self, db, fs, msg):
-        DescriptorTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        DescriptorTopic.__init__(self, db, fs, msg, auth)
+
+    def pyangbind_validation(self, item, data, force=False):
+        if self._descriptor_data_is_in_old_format(data):
+            raise EngineException(
+                "ERROR: Unsupported descriptor format. Please, use an ETSI SOL006 descriptor.",
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+        try:
+            nsd_vnf_profiles = data.get("df", [{}])[0].get("vnf-profile", [])
+            mynsd = etsi_nfv_nsd.etsi_nfv_nsd()
+            pybindJSONDecoder.load_ietf_json(
+                {"nsd": {"nsd": [data]}},
+                None,
+                None,
+                obj=mynsd,
+                path_helper=True,
+                skip_unknown=force,
+            )
+            out = pybindJSON.dumps(mynsd, mode="ietf")
+            desc_out = self._remove_envelop(yaml.safe_load(out))
+            desc_out = self._remove_yang_prefixes_from_descriptor(desc_out)
+            if nsd_vnf_profiles:
+                desc_out["df"][0]["vnf-profile"] = nsd_vnf_profiles
+            return desc_out
+        except Exception as e:
+            raise EngineException(
+                "Error in pyangbind validation: {}".format(str(e)),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
+
+    @staticmethod
+    def _descriptor_data_is_in_old_format(data):
+        return ("nsd-catalog" in data) or ("nsd:nsd-catalog" in data)
 
     @staticmethod
     def _remove_envelop(indata=None):
 
     @staticmethod
     def _remove_envelop(indata=None):
@@ -650,49 +1090,115 @@ class NsdTopic(DescriptorTopic):
             return {}
         clean_indata = indata
 
             return {}
         clean_indata = indata
 
-        if clean_indata.get('nsd:nsd-catalog'):
-            clean_indata = clean_indata['nsd:nsd-catalog']
-        elif clean_indata.get('nsd-catalog'):
-            clean_indata = clean_indata['nsd-catalog']
-        if clean_indata.get('nsd'):
-            if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
+        if clean_indata.get("nsd"):
+            clean_indata = clean_indata["nsd"]
+        elif clean_indata.get("etsi-nfv-nsd:nsd"):
+            clean_indata = clean_indata["etsi-nfv-nsd:nsd"]
+        if clean_indata.get("nsd"):
+            if (
+                not isinstance(clean_indata["nsd"], list)
+                or len(clean_indata["nsd"]) != 1
+            ):
                 raise EngineException("'nsd' must be a list of only one element")
                 raise EngineException("'nsd' must be a list of only one element")
-            clean_indata = clean_indata['nsd'][0]
-        elif clean_indata.get('nsd:nsd'):
-            if not isinstance(clean_indata['nsd:nsd'], list) or len(clean_indata['nsd:nsd']) != 1:
-                raise EngineException("'nsd:nsd' must be a list of only one element")
-            clean_indata = clean_indata['nsd:nsd'][0]
+            clean_indata = clean_indata["nsd"][0]
         return clean_indata
 
     def _validate_input_new(self, indata, storage_params, force=False):
         return clean_indata
 
     def _validate_input_new(self, indata, storage_params, force=False):
+        indata.pop("nsdOnboardingState", None)
+        indata.pop("nsdOperationalState", None)
+        indata.pop("nsdUsageState", None)
+
+        indata.pop("links", None)
+
         indata = self.pyangbind_validation("nsds", indata, force)
         # Cross references validation in the descriptor
         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
         indata = self.pyangbind_validation("nsds", indata, force)
         # Cross references validation in the descriptor
         # TODO validata that if contains cloud-init-file or charms, have artifacts _admin.storage."pkg-dir" is not none
-        for vld in get_iterable(indata.get("vld")):
-            if vld.get("mgmt-network") and vld.get("ip-profile-ref"):
-                raise EngineException("Error at vld[id='{}']:ip-profile-ref"
-                                      " You cannot set an ip-profile when mgmt-network is True"
-                                      .format(vld["id"]), http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-            for vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
-                for constituent_vnfd in get_iterable(indata.get("constituent-vnfd")):
-                    if vnfd_cp["member-vnf-index-ref"] == constituent_vnfd["member-vnf-index"]:
-                        if vnfd_cp.get("vnfd-id-ref") and vnfd_cp["vnfd-id-ref"] != constituent_vnfd["vnfd-id-ref"]:
-                            raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[vnfd-id-ref='{}'] "
-                                                  "does not match constituent-vnfd[member-vnf-index='{}']:vnfd-id-ref"
-                                                  " '{}'".format(vld["id"], vnfd_cp["vnfd-id-ref"],
-                                                                 constituent_vnfd["member-vnf-index"],
-                                                                 constituent_vnfd["vnfd-id-ref"]),
-                                                  http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-                        break
-                else:
-                    raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
-                                          "does not match any constituent-vnfd:member-vnf-index"
-                                          .format(vld["id"], vnfd_cp["member-vnf-index-ref"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+        for vld in get_iterable(indata.get("virtual-link-desc")):
+            self.validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata)
+
+        self.validate_vnf_profiles_vnfd_id(indata)
+
         return indata
 
         return indata
 
-    def _validate_input_edit(self, indata, force=False):
+    @staticmethod
+    def validate_vld_mgmt_network_with_virtual_link_protocol_data(vld, indata):
+        if not vld.get("mgmt-network"):
+            return
+        vld_id = vld.get("id")
+        for df in get_iterable(indata.get("df")):
+            for vlp in get_iterable(df.get("virtual-link-profile")):
+                if vld_id and vld_id == vlp.get("virtual-link-desc-id"):
+                    if vlp.get("virtual-link-protocol-data"):
+                        raise EngineException(
+                            "Error at df[id='{}']:virtual-link-profile[id='{}']:virtual-link-"
+                            "protocol-data You cannot set a virtual-link-protocol-data "
+                            "when mgmt-network is True".format(df["id"], vlp["id"]),
+                            http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                        )
+
+    @staticmethod
+    def validate_vnf_profiles_vnfd_id(indata):
+        all_vnfd_ids = set(get_iterable(indata.get("vnfd-id")))
+        for df in get_iterable(indata.get("df")):
+            for vnf_profile in get_iterable(df.get("vnf-profile")):
+                vnfd_id = vnf_profile.get("vnfd-id")
+                if vnfd_id and vnfd_id not in all_vnfd_ids:
+                    raise EngineException(
+                        "Error at df[id='{}']:vnf_profile[id='{}']:vnfd-id='{}' "
+                        "does not match any vnfd-id".format(
+                            df["id"], vnf_profile["id"], vnfd_id
+                        ),
+                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                    )
+
+    def _validate_input_edit(self, indata, content, force=False):
         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
         # not needed to validate with pyangbind becuase it will be validated at check_conflict_on_edit
+        """
+        indata looks as follows:
+            - In the new case (conformant)
+                {'nsdOperationalState': 'DISABLED', 'userDefinedData': {'id': 'string23',
+                '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}}
+            - In the old case (backwards-compatible)
+                {'id': 'string23', '_id': 'c6ddc544-cede-4b94-9ebe-be07b298a3c1', 'name': 'simon46'}
+        """
+        if "_admin" not in indata:
+            indata["_admin"] = {}
+
+        if "nsdOperationalState" in indata:
+            if indata["nsdOperationalState"] in ("ENABLED", "DISABLED"):
+                indata["_admin"]["operationalState"] = indata.pop("nsdOperationalState")
+            else:
+                raise EngineException(
+                    "State '{}' is not a valid operational state".format(
+                        indata["nsdOperationalState"]
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+
+        # In the case of user defined data, we need to put the data in the root of the object
+        # to preserve current expected behaviour
+        if "userDefinedData" in indata:
+            data = indata.pop("userDefinedData")
+            if type(data) == dict:
+                indata["_admin"]["userDefinedData"] = data
+            else:
+                raise EngineException(
+                    "userDefinedData should be an object, but is '{}' instead".format(
+                        type(data)
+                    ),
+                    http_code=HTTPStatus.BAD_REQUEST,
+                )
+        if (
+            "operationalState" in indata["_admin"]
+            and content["_admin"]["operationalState"]
+            == indata["_admin"]["operationalState"]
+        ):
+            raise EngineException(
+                "nsdOperationalState already {}".format(
+                    content["_admin"]["operationalState"]
+                ),
+                http_code=HTTPStatus.CONFLICT,
+            )
         return indata
 
     def _check_descriptor_dependencies(self, session, descriptor):
         return indata
 
     def _check_descriptor_dependencies(self, session, descriptor):
@@ -705,69 +1211,141 @@ class NsdTopic(DescriptorTopic):
         """
         if session["force"]:
             return
         """
         if session["force"]:
             return
-        member_vnfd_index = {}
-        if descriptor.get("constituent-vnfd") and not session["force"]:
-            for vnf in descriptor["constituent-vnfd"]:
-                vnfd_id = vnf["vnfd-id-ref"]
-                filter_q = self._get_project_filter(session)
-                filter_q["id"] = vnfd_id
-                vnf_list = self.db.get_list("vnfds", filter_q)
-                if not vnf_list:
-                    raise EngineException("Descriptor error at 'constituent-vnfd':'vnfd-id-ref'='{}' references a non "
-                                          "existing vnfd".format(vnfd_id), http_code=HTTPStatus.CONFLICT)
-                # elif len(vnf_list) > 1:
-                #     raise EngineException("More than one vnfd found for id='{}'".format(vnfd_id),
-                #                           http_code=HTTPStatus.CONFLICT)
-                member_vnfd_index[vnf["member-vnf-index"]] = vnf_list[0]
+        vnfds_index = self._get_descriptor_constituent_vnfds_index(session, descriptor)
 
         # Cross references validation in the descriptor and vnfd connection point validation
 
         # Cross references validation in the descriptor and vnfd connection point validation
-        for vld in get_iterable(descriptor.get("vld")):
-            for referenced_vnfd_cp in get_iterable(vld.get("vnfd-connection-point-ref")):
-                # look if this vnfd contains this connection point
-                vnfd = member_vnfd_index.get(referenced_vnfd_cp["member-vnf-index-ref"])
-                if not vnfd:
-                    raise EngineException("Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}'] "
-                                          "does not match any constituent-vnfd:member-vnf-index"
-                                          .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"]),
-                                          http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
-                for vnfd_cp in get_iterable(vnfd.get("connection-point")):
-                    if referenced_vnfd_cp.get("vnfd-connection-point-ref") == vnfd_cp["name"]:
-                        break
-                else:
+        for df in get_iterable(descriptor.get("df")):
+            self.validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index)
+
+    def _get_descriptor_constituent_vnfds_index(self, session, descriptor):
+        vnfds_index = {}
+        if descriptor.get("vnfd-id") and not session["force"]:
+            for vnfd_id in get_iterable(descriptor.get("vnfd-id")):
+                query_filter = self._get_project_filter(session)
+                query_filter["id"] = vnfd_id
+                vnf_list = self.db.get_list("vnfds", query_filter)
+                if not vnf_list:
                     raise EngineException(
                     raise EngineException(
-                        "Error at vld[id='{}']:vnfd-connection-point-ref[member-vnf-index-ref='{}']:vnfd-"
-                        "connection-point-ref='{}' references a non existing conection-point:name inside vnfd '{}'"
-                        .format(vld["id"], referenced_vnfd_cp["member-vnf-index-ref"],
-                                referenced_vnfd_cp["vnfd-connection-point-ref"], vnfd["id"]),
-                        http_code=HTTPStatus.UNPROCESSABLE_ENTITY)
+                        "Descriptor error at 'vnfd-id'='{}' references a non "
+                        "existing vnfd".format(vnfd_id),
+                        http_code=HTTPStatus.CONFLICT,
+                    )
+                vnfds_index[vnfd_id] = vnf_list[0]
+        return vnfds_index
+
+    @staticmethod
+    def validate_df_vnf_profiles_constituent_connection_points(df, vnfds_index):
+        for vnf_profile in get_iterable(df.get("vnf-profile")):
+            vnfd = vnfds_index.get(vnf_profile["vnfd-id"])
+            all_vnfd_ext_cpds = set()
+            for ext_cpd in get_iterable(vnfd.get("ext-cpd")):
+                if ext_cpd.get("id"):
+                    all_vnfd_ext_cpds.add(ext_cpd.get("id"))
+
+            for virtual_link in get_iterable(
+                vnf_profile.get("virtual-link-connectivity")
+            ):
+                for vl_cpd in get_iterable(virtual_link.get("constituent-cpd-id")):
+                    vl_cpd_id = vl_cpd.get("constituent-cpd-id")
+                    if vl_cpd_id and vl_cpd_id not in all_vnfd_ext_cpds:
+                        raise EngineException(
+                            "Error at df[id='{}']:vnf-profile[id='{}']:virtual-link-connectivity"
+                            "[virtual-link-profile-id='{}']:constituent-cpd-id='{}' references a "
+                            "non existing ext-cpd:id inside vnfd '{}'".format(
+                                df["id"],
+                                vnf_profile["id"],
+                                virtual_link["virtual-link-profile-id"],
+                                vl_cpd_id,
+                                vnfd["id"],
+                            ),
+                            http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+                        )
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         self._check_descriptor_dependencies(session, final_content)
 
 
         self._check_descriptor_dependencies(session, final_content)
 
-    def check_conflict_on_del(self, session, _id):
+        return final_content
+
+    def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
         that NSD can be public and be used by other projects.
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         """
         Check that there is not any NSR that uses this NSD. Only NSRs belonging to this project are considered. Note
         that NSD can be public and be used by other projects.
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
-        :param _id: vnfd inernal id
+        :param _id: nsd internal id
+        :param db_content: The database content of the _id
         :return: None or raises EngineException with the conflict
         """
         if session["force"]:
             return
         :return: None or raises EngineException with the conflict
         """
         if session["force"]:
             return
+        descriptor = db_content
+        descriptor_id = descriptor.get("id")
+        if not descriptor_id:  # empty nsd not uploaded
+            return
+
+        # check NSD used by NS
         _filter = self._get_project_filter(session)
         _filter = self._get_project_filter(session)
-        _filter["nsdId"] = _id
+        _filter["nsd-id"] = _id
         if self.db.get_list("nsrs", _filter):
         if self.db.get_list("nsrs", _filter):
-            raise EngineException("There is some NSR that depends on this NSD", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one NS instance using this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
+
+        # check NSD referenced by NST
+        del _filter["nsd-id"]
+        _filter["netslice-subnet.ANYINDEX.nsd-ref"] = descriptor_id
+        if self.db.get_list("nsts", _filter):
+            raise EngineException(
+                "There is at least one NetSlice Template referencing this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
+
+    def sol005_projection(self, data):
+        data["nsdOnboardingState"] = data["_admin"]["onboardingState"]
+        data["nsdOperationalState"] = data["_admin"]["operationalState"]
+        data["nsdUsageState"] = data["_admin"]["usageState"]
+
+        links = {}
+        links["self"] = {"href": "/nsd/v1/ns_descriptors/{}".format(data["_id"])}
+        links["nsd_content"] = {
+            "href": "/nsd/v1/ns_descriptors/{}/nsd_content".format(data["_id"])
+        }
+        data["_links"] = links
+
+        return super().sol005_projection(data)
 
 
 class NstTopic(DescriptorTopic):
     topic = "nsts"
     topic_msg = "nst"
 
 
 class NstTopic(DescriptorTopic):
     topic = "nsts"
     topic_msg = "nst"
+    quota_name = "slice_templates"
+
+    def __init__(self, db, fs, msg, auth):
+        DescriptorTopic.__init__(self, db, fs, msg, auth)
 
 
-    def __init__(self, db, fs, msg):
-        DescriptorTopic.__init__(self, db, fs, msg)
+    def pyangbind_validation(self, item, data, force=False):
+        try:
+            mynst = nst_im()
+            pybindJSONDecoder.load_ietf_json(
+                {"nst": [data]},
+                None,
+                None,
+                obj=mynst,
+                path_helper=True,
+                skip_unknown=force,
+            )
+            out = pybindJSON.dumps(mynst, mode="ietf")
+            desc_out = self._remove_envelop(yaml.safe_load(out))
+            return desc_out
+        except Exception as e:
+            raise EngineException(
+                "Error in pyangbind validation: {}".format(str(e)),
+                http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+            )
 
     @staticmethod
     def _remove_envelop(indata=None):
 
     @staticmethod
     def _remove_envelop(indata=None):
@@ -775,21 +1353,26 @@ class NstTopic(DescriptorTopic):
             return {}
         clean_indata = indata
 
             return {}
         clean_indata = indata
 
-        if clean_indata.get('nst'):
-            if not isinstance(clean_indata['nst'], list) or len(clean_indata['nst']) != 1:
+        if clean_indata.get("nst"):
+            if (
+                not isinstance(clean_indata["nst"], list)
+                or len(clean_indata["nst"]) != 1
+            ):
                 raise EngineException("'nst' must be a list only one element")
                 raise EngineException("'nst' must be a list only one element")
-            clean_indata = clean_indata['nst'][0]
-        elif clean_indata.get('nst:nst'):
-            if not isinstance(clean_indata['nst:nst'], list) or len(clean_indata['nst:nst']) != 1:
+            clean_indata = clean_indata["nst"][0]
+        elif clean_indata.get("nst:nst"):
+            if (
+                not isinstance(clean_indata["nst:nst"], list)
+                or len(clean_indata["nst:nst"]) != 1
+            ):
                 raise EngineException("'nst:nst' must be a list only one element")
                 raise EngineException("'nst:nst' must be a list only one element")
-            clean_indata = clean_indata['nst:nst'][0]
+            clean_indata = clean_indata["nst:nst"][0]
         return clean_indata
 
         return clean_indata
 
-    def _validate_input_edit(self, indata, force=False):
-        # TODO validate with pyangbind, serialize
-        return indata
-
     def _validate_input_new(self, indata, storage_params, force=False):
     def _validate_input_new(self, indata, storage_params, force=False):
+        indata.pop("onboardingState", None)
+        indata.pop("operationalState", None)
+        indata.pop("usageState", None)
         indata = self.pyangbind_validation("nsts", indata, force)
         return indata.copy()
 
         indata = self.pyangbind_validation("nsts", indata, force)
         return indata.copy()
 
@@ -807,20 +1390,27 @@ class NstTopic(DescriptorTopic):
             filter_q = self._get_project_filter(session)
             filter_q["id"] = nsd_id
             if not self.db.get_list("nsds", filter_q):
             filter_q = self._get_project_filter(session)
             filter_q["id"] = nsd_id
             if not self.db.get_list("nsds", filter_q):
-                raise EngineException("Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
-                                      "existing nsd".format(nsd_id), http_code=HTTPStatus.CONFLICT)
+                raise EngineException(
+                    "Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non "
+                    "existing nsd".format(nsd_id),
+                    http_code=HTTPStatus.CONFLICT,
+                )
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
 
     def check_conflict_on_edit(self, session, final_content, edit_content, _id):
-        super().check_conflict_on_edit(session, final_content, edit_content, _id)
+        final_content = super().check_conflict_on_edit(
+            session, final_content, edit_content, _id
+        )
 
         self._check_descriptor_dependencies(session, final_content)
 
         self._check_descriptor_dependencies(session, final_content)
+        return final_content
 
 
-    def check_conflict_on_del(self, session, _id):
+    def check_conflict_on_del(self, session, _id, db_content):
         """
         Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
         that NST can be public and be used by other projects.
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param _id: nst internal id
         """
         Check that there is not any NSIR that uses this NST. Only NSIRs belonging to this project are considered. Note
         that NST can be public and be used by other projects.
         :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
         :param _id: nst internal id
+        :param db_content: The database content of the _id.
         :return: None or raises EngineException with the conflict
         """
         # TODO: Check this method
         :return: None or raises EngineException with the conflict
         """
         # TODO: Check this method
@@ -828,26 +1418,35 @@ class NstTopic(DescriptorTopic):
             return
         # Get Network Slice Template from Database
         _filter = self._get_project_filter(session)
             return
         # Get Network Slice Template from Database
         _filter = self._get_project_filter(session)
-        _filter["_id"] = _id
-        nst = self.db.get_one("nsts", _filter)
-        
-        # Search NSIs using NST via nst-ref
-        _filter = self._get_project_filter(session)
-        _filter["nst-ref"] = nst["id"]
-        nsis_list = self.db.get_list("nsis", _filter)
-        for nsi_item in nsis_list:
-            if nsi_item["_admin"].get("nsiState") != "TERMINATED":
-                raise EngineException("There is some NSIS that depends on this NST", http_code=HTTPStatus.CONFLICT)
+        _filter["_admin.nst-id"] = _id
+        if self.db.get_list("nsis", _filter):
+            raise EngineException(
+                "there is at least one Netslice Instance using this descriptor",
+                http_code=HTTPStatus.CONFLICT,
+            )
+
+    def sol005_projection(self, data):
+        data["onboardingState"] = data["_admin"]["onboardingState"]
+        data["operationalState"] = data["_admin"]["operationalState"]
+        data["usageState"] = data["_admin"]["usageState"]
+
+        links = {}
+        links["self"] = {"href": "/nst/v1/netslice_templates/{}".format(data["_id"])}
+        links["nst"] = {"href": "/nst/v1/netslice_templates/{}/nst".format(data["_id"])}
+        data["_links"] = links
+
+        return super().sol005_projection(data)
 
 
 class PduTopic(BaseTopic):
     topic = "pdus"
     topic_msg = "pdu"
 
 
 class PduTopic(BaseTopic):
     topic = "pdus"
     topic_msg = "pdu"
+    quota_name = "pduds"
     schema_new = pdu_new_schema
     schema_edit = pdu_edit_schema
 
     schema_new = pdu_new_schema
     schema_edit = pdu_edit_schema
 
-    def __init__(self, db, fs, msg):
-        BaseTopic.__init__(self, db, fs, msg)
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
 
     @staticmethod
     def format_on_new(content, project_id=None, make_public=False):
@@ -856,10 +1455,128 @@ class PduTopic(BaseTopic):
         content["_admin"]["operationalState"] = "ENABLED"
         content["_admin"]["usageState"] = "NOT_IN_USE"
 
         content["_admin"]["operationalState"] = "ENABLED"
         content["_admin"]["usageState"] = "NOT_IN_USE"
 
-    def check_conflict_on_del(self, session, _id):
+    def check_conflict_on_del(self, session, _id, db_content):
+        """
+        Check that there is not any vnfr that uses this PDU
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param _id: pdu internal id
+        :param db_content: The database content of the _id.
+        :return: None or raises EngineException with the conflict
+        """
         if session["force"]:
             return
         if session["force"]:
             return
-        # TODO Is it needed to check descriptors _admin.project_read/project_write??
-        _filter = {"vdur.pdu-id": _id}
+
+        _filter = self._get_project_filter(session)
+        _filter["vdur.pdu-id"] = _id
         if self.db.get_list("vnfrs", _filter):
         if self.db.get_list("vnfrs", _filter):
-            raise EngineException("There is some NSR that uses this PDU", http_code=HTTPStatus.CONFLICT)
+            raise EngineException(
+                "There is at least one VNF instance using this PDU",
+                http_code=HTTPStatus.CONFLICT,
+            )
+
+
+class VnfPkgOpTopic(BaseTopic):
+    topic = "vnfpkgops"
+    topic_msg = "vnfd"
+    schema_new = vnfpkgop_new_schema
+    schema_edit = None
+
+    def __init__(self, db, fs, msg, auth):
+        BaseTopic.__init__(self, db, fs, msg, auth)
+
+    def edit(self, session, _id, indata=None, kwargs=None, content=None):
+        raise EngineException(
+            "Method 'edit' not allowed for topic '{}'".format(self.topic),
+            HTTPStatus.METHOD_NOT_ALLOWED,
+        )
+
+    def delete(self, session, _id, dry_run=False):
+        raise EngineException(
+            "Method 'delete' not allowed for topic '{}'".format(self.topic),
+            HTTPStatus.METHOD_NOT_ALLOWED,
+        )
+
+    def delete_list(self, session, filter_q=None):
+        raise EngineException(
+            "Method 'delete_list' not allowed for topic '{}'".format(self.topic),
+            HTTPStatus.METHOD_NOT_ALLOWED,
+        )
+
+    def new(self, rollback, session, indata=None, kwargs=None, headers=None):
+        """
+        Creates a new entry into database.
+        :param rollback: list to append created items at database in case a rollback may to be done
+        :param session: contains "username", "admin", "force", "public", "project_id", "set_project"
+        :param indata: data to be inserted
+        :param kwargs: used to override the indata descriptor
+        :param headers: http request headers
+        :return: _id, op_id:
+            _id: identity of the inserted data.
+             op_id: None
+        """
+        self._update_input_with_kwargs(indata, kwargs)
+        validate_input(indata, self.schema_new)
+        vnfpkg_id = indata["vnfPkgId"]
+        filter_q = BaseTopic._get_project_filter(session)
+        filter_q["_id"] = vnfpkg_id
+        vnfd = self.db.get_one("vnfds", filter_q)
+        operation = indata["lcmOperationType"]
+        kdu_name = indata["kdu_name"]
+        for kdu in vnfd.get("kdu", []):
+            if kdu["name"] == kdu_name:
+                helm_chart = kdu.get("helm-chart")
+                juju_bundle = kdu.get("juju-bundle")
+                break
+        else:
+            raise EngineException(
+                "Not found vnfd[id='{}']:kdu[name='{}']".format(vnfpkg_id, kdu_name)
+            )
+        if helm_chart:
+            indata["helm-chart"] = helm_chart
+            match = fullmatch(r"([^/]*)/([^/]*)", helm_chart)
+            repo_name = match.group(1) if match else None
+        elif juju_bundle:
+            indata["juju-bundle"] = juju_bundle
+            match = fullmatch(r"([^/]*)/([^/]*)", juju_bundle)
+            repo_name = match.group(1) if match else None
+        else:
+            raise EngineException(
+                "Found neither 'helm-chart' nor 'juju-bundle' in vnfd[id='{}']:kdu[name='{}']".format(
+                    vnfpkg_id, kdu_name
+                )
+            )
+        if repo_name:
+            del filter_q["_id"]
+            filter_q["name"] = repo_name
+            repo = self.db.get_one("k8srepos", filter_q)
+            k8srepo_id = repo.get("_id")
+            k8srepo_url = repo.get("url")
+        else:
+            k8srepo_id = None
+            k8srepo_url = None
+        indata["k8srepoId"] = k8srepo_id
+        indata["k8srepo_url"] = k8srepo_url
+        vnfpkgop_id = str(uuid4())
+        vnfpkgop_desc = {
+            "_id": vnfpkgop_id,
+            "operationState": "PROCESSING",
+            "vnfPkgId": vnfpkg_id,
+            "lcmOperationType": operation,
+            "isAutomaticInvocation": False,
+            "isCancelPending": False,
+            "operationParams": indata,
+            "links": {
+                "self": "/osm/vnfpkgm/v1/vnfpkg_op_occs/" + vnfpkgop_id,
+                "vnfpkg": "/osm/vnfpkgm/v1/vnf_packages/" + vnfpkg_id,
+            },
+        }
+        self.format_on_new(
+            vnfpkgop_desc, session["project_id"], make_public=session["public"]
+        )
+        ctime = vnfpkgop_desc["_admin"]["created"]
+        vnfpkgop_desc["statusEnteredTime"] = ctime
+        vnfpkgop_desc["startTime"] = ctime
+        self.db.create(self.topic, vnfpkgop_desc)
+        rollback.append({"topic": self.topic, "_id": vnfpkgop_id})
+        self.msg.write(self.topic_msg, operation, vnfpkgop_desc)
+        return vnfpkgop_id, None